From c9d76d93ae8b82fe7e3d8257d700cc625f5b9ff3 Mon Sep 17 00:00:00 2001 From: Sean Owen Date: Tue, 19 Sep 2017 21:08:26 +0100 Subject: [PATCH 1/2] Update plugins, including scala-maven-plugin, to latest versions. Update checkstyle to 8.2. Remove bogus checkstyle config and enable it. Fix existing and new Java checkstyle errors. --- R/pkg/DESCRIPTION | 2 +- assembly/pom.xml | 2 +- .../apache/spark/network/util/ByteUnit.java | 12 ++++---- .../network/util/NettyMemoryMetrics.java | 2 +- common/sketch/pom.xml | 27 ++++++++---------- common/unsafe/pom.xml | 27 ++++++++---------- .../apache/spark/unsafe/types/UTF8String.java | 27 ++++++++++-------- .../spark/unsafe/types/UTF8StringSuite.java | 3 +- core/pom.xml | 1 + .../apache/spark/io/ReadAheadInputStream.java | 21 ++++++++------ .../unsafe/sort/UnsafeSorterSpillReader.java | 5 ++-- .../spark/io/ReadAheadInputStreamSuite.java | 3 +- dev/checkstyle.xml | 28 +++++++++---------- dev/test-dependencies.sh | 2 +- pom.xml | 28 +++++++++++-------- .../sources/v2/reader/DataSourceV2Reader.java | 9 +++--- .../v2/reader/SupportsScanUnsafeRow.java | 5 ++-- .../sources/v2/JavaAdvancedDataSourceV2.java | 4 ++- 18 files changed, 107 insertions(+), 101 deletions(-) diff --git a/R/pkg/DESCRIPTION b/R/pkg/DESCRIPTION index d1c846c04827..c70839805222 100644 --- a/R/pkg/DESCRIPTION +++ b/R/pkg/DESCRIPTION @@ -57,5 +57,5 @@ Collate: 'types.R' 'utils.R' 'window.R' -RoxygenNote: 5.0.1 +RoxygenNote: 6.0.1 VignetteBuilder: knitr diff --git a/assembly/pom.xml b/assembly/pom.xml index cd8366a17552..01fe354235e5 100644 --- a/assembly/pom.xml +++ b/assembly/pom.xml @@ -187,7 +187,7 @@ org.apache.maven.plugins maven-assembly-plugin - 3.0.0 + 3.1.0 dist diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java b/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java index e097714bbc6d..984575acaf51 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java +++ b/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java @@ -17,12 +17,12 @@ package org.apache.spark.network.util; public enum ByteUnit { - BYTE (1), - KiB (1024L), - MiB ((long) Math.pow(1024L, 2L)), - GiB ((long) Math.pow(1024L, 3L)), - TiB ((long) Math.pow(1024L, 4L)), - PiB ((long) Math.pow(1024L, 5L)); + BYTE(1), + KiB(1024L), + MiB((long) Math.pow(1024L, 2L)), + GiB((long) Math.pow(1024L, 3L)), + TiB((long) Math.pow(1024L, 4L)), + PiB((long) Math.pow(1024L, 5L)); ByteUnit(long multiplier) { this.multiplier = multiplier; diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/NettyMemoryMetrics.java b/common/network-common/src/main/java/org/apache/spark/network/util/NettyMemoryMetrics.java index 969d9f65fb40..77f807cdb541 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/util/NettyMemoryMetrics.java +++ b/common/network-common/src/main/java/org/apache/spark/network/util/NettyMemoryMetrics.java @@ -44,7 +44,7 @@ public class NettyMemoryMetrics implements MetricSet { private final String metricPrefix; @VisibleForTesting - final static Set VERBOSE_METRICS = new HashSet<>(); + static final Set VERBOSE_METRICS = new HashSet<>(); static { VERBOSE_METRICS.addAll(Arrays.asList( "numAllocations", diff --git a/common/sketch/pom.xml b/common/sketch/pom.xml index ca929d8f253d..2d59c71cc375 100644 --- a/common/sketch/pom.xml +++ b/common/sketch/pom.xml @@ -56,20 +56,17 @@ target/scala-${scala.binary.version}/classes target/scala-${scala.binary.version}/test-classes - - - - net.alchim31.maven - scala-maven-plugin - 3.2.2 - - - - -XDignore.symbol.file - - - - - + + + net.alchim31.maven + scala-maven-plugin + + + + -XDignore.symbol.file + + + + diff --git a/common/unsafe/pom.xml b/common/unsafe/pom.xml index 772b0ef82f01..a3772a262008 100644 --- a/common/unsafe/pom.xml +++ b/common/unsafe/pom.xml @@ -93,20 +93,17 @@ target/scala-${scala.binary.version}/classes target/scala-${scala.binary.version}/test-classes - - - - net.alchim31.maven - scala-maven-plugin - 3.2.2 - - - - -XDignore.symbol.file - - - - - + + + net.alchim31.maven + scala-maven-plugin + + + + -XDignore.symbol.file + + + + diff --git a/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java b/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java index 76db0fb91e48..ce4a06bde80c 100644 --- a/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java +++ b/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java @@ -513,9 +513,9 @@ public UTF8String trim() { /** * Based on the given trim string, trim this string starting from both ends - * This method searches for each character in the source string, removes the character if it is found - * in the trim string, stops at the first not found. It calls the trimLeft first, then trimRight. - * It returns a new string in which both ends trim characters have been removed. + * This method searches for each character in the source string, removes the character if it is + * found in the trim string, stops at the first not found. It calls the trimLeft first, then + * trimRight. It returns a new string in which both ends trim characters have been removed. * @param trimString the trim character string */ public UTF8String trim(UTF8String trimString) { @@ -540,8 +540,9 @@ public UTF8String trimLeft() { /** * Based on the given trim string, trim this string starting from left end - * This method searches each character in the source string starting from the left end, removes the character if it - * is in the trim string, stops at the first character which is not in the trim string, returns the new string. + * This method searches each character in the source string starting from the left end, removes + * the character if it is in the trim string, stops at the first character which is not in the + * trim string, returns the new string. * @param trimString the trim character string */ public UTF8String trimLeft(UTF8String trimString) { @@ -552,7 +553,8 @@ public UTF8String trimLeft(UTF8String trimString) { int trimIdx = 0; while (srchIdx < numBytes) { - UTF8String searchChar = copyUTF8String(srchIdx, srchIdx + numBytesForFirstByte(this.getByte(srchIdx)) - 1); + UTF8String searchChar = copyUTF8String( + srchIdx, srchIdx + numBytesForFirstByte(this.getByte(srchIdx)) - 1); int searchCharBytes = searchChar.numBytes; // try to find the matching for the searchChar in the trimString set if (trimString.find(searchChar, 0) >= 0) { @@ -587,8 +589,9 @@ public UTF8String trimRight() { /** * Based on the given trim string, trim this string starting from right end - * This method searches each character in the source string starting from the right end, removes the character if it - * is in the trim string, stops at the first character which is not in the trim string, returns the new string. + * This method searches each character in the source string starting from the right end, + * removes the character if it is in the trim string, stops at the first character which is not + * in the trim string, returns the new string. * @param trimString the trim character string */ public UTF8String trimRight(UTF8String trimString) { @@ -608,11 +611,13 @@ public UTF8String trimRight(UTF8String trimString) { numChars ++; } - // index trimEnd points to the first no matching byte position from the right side of the source string. + // index trimEnd points to the first no matching byte position from the right side of + // the source string. int trimEnd = numBytes - 1; while (numChars > 0) { - UTF8String searchChar = - copyUTF8String(stringCharPos[numChars - 1], stringCharPos[numChars - 1] + stringCharLen[numChars - 1] - 1); + UTF8String searchChar = copyUTF8String( + stringCharPos[numChars - 1], + stringCharPos[numChars - 1] + stringCharLen[numChars - 1] - 1); if (trimString.find(searchChar, 0) >= 0) { trimEnd -= stringCharLen[numChars - 1]; } else { diff --git a/common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java b/common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java index f0860018d564..7b03d2c650fc 100644 --- a/common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java +++ b/common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java @@ -777,7 +777,8 @@ public void trimRightWithTrimString() { assertEquals(fromString("cc"), fromString("ccbaaaa").trimRight(fromString("ba"))); assertEquals(fromString(""), fromString("aabbbbaaa").trimRight(fromString("ab"))); assertEquals(fromString(" he"), fromString(" hello ").trimRight(fromString(" ol"))); - assertEquals(fromString("oohell"), fromString("oohellooo../*&").trimRight(fromString("./,&%*o"))); + assertEquals(fromString("oohell"), + fromString("oohellooo../*&").trimRight(fromString("./,&%*o"))); assertEquals(EMPTY_UTF8, fromString(" ").trimRight(fromString(" "))); diff --git a/core/pom.xml b/core/pom.xml index 431967e057e6..da68abd855c7 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -483,6 +483,7 @@ org.codehaus.mojo exec-maven-plugin + 1.6.0 sparkr-pkg diff --git a/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java b/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java index 618bd42d0e65..5b45d268ace8 100644 --- a/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java +++ b/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java @@ -32,11 +32,12 @@ /** * {@link InputStream} implementation which asynchronously reads ahead from the underlying input - * stream when specified amount of data has been read from the current buffer. It does it by maintaining - * two buffer - active buffer and read ahead buffer. Active buffer contains data which should be returned - * when a read() call is issued. The read ahead buffer is used to asynchronously read from the underlying - * input stream and once the current active buffer is exhausted, we flip the two buffers so that we can - * start reading from the read ahead buffer without being blocked in disk I/O. + * stream when specified amount of data has been read from the current buffer. It does it by + * maintaining two buffers - active buffer and read ahead buffer. Active buffer contains data + * which should be returned when a read() call is issued. The read ahead buffer is used to + * asynchronously read from the underlying input stream and once the current active buffer is + * exhausted, we flip the two buffers so that we can start reading from the read ahead buffer + * without being blocked in disk I/O. */ public class ReadAheadInputStream extends InputStream { @@ -83,7 +84,8 @@ public class ReadAheadInputStream extends InputStream { private final InputStream underlyingInputStream; - private final ExecutorService executorService = ThreadUtils.newDaemonSingleThreadExecutor("read-ahead"); + private final ExecutorService executorService = + ThreadUtils.newDaemonSingleThreadExecutor("read-ahead"); private final Condition asyncReadComplete = stateChangeLock.newCondition(); @@ -98,13 +100,14 @@ public class ReadAheadInputStream extends InputStream { * @param readAheadThresholdInBytes If the active buffer has less data than the read-ahead * threshold, an async read is triggered. */ - public ReadAheadInputStream(InputStream inputStream, int bufferSizeInBytes, int readAheadThresholdInBytes) { + public ReadAheadInputStream( + InputStream inputStream, int bufferSizeInBytes, int readAheadThresholdInBytes) { Preconditions.checkArgument(bufferSizeInBytes > 0, "bufferSizeInBytes should be greater than 0, but the value is " + bufferSizeInBytes); Preconditions.checkArgument(readAheadThresholdInBytes > 0 && readAheadThresholdInBytes < bufferSizeInBytes, - "readAheadThresholdInBytes should be greater than 0 and less than bufferSizeInBytes, but the" + - "value is " + readAheadThresholdInBytes); + "readAheadThresholdInBytes should be greater than 0 and less than bufferSizeInBytes, " + + "but the value is " + readAheadThresholdInBytes); activeBuffer = ByteBuffer.allocate(bufferSizeInBytes); readAheadBuffer = ByteBuffer.allocate(bufferSizeInBytes); this.readAheadThresholdInBytes = readAheadThresholdInBytes; diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java index 1e760b0b5198..e2f48e5508af 100644 --- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java +++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java @@ -76,9 +76,8 @@ public UnsafeSorterSpillReader( SparkEnv.get() == null ? 0.5 : SparkEnv.get().conf().getDouble("spark.unsafe.sorter.spill.read.ahead.fraction", 0.5); - final boolean readAheadEnabled = - SparkEnv.get() == null ? false : - SparkEnv.get().conf().getBoolean("spark.unsafe.sorter.spill.read.ahead.enabled", true); + final boolean readAheadEnabled = SparkEnv.get() != null && + SparkEnv.get().conf().getBoolean("spark.unsafe.sorter.spill.read.ahead.enabled", true); final InputStream bs = new NioBufferedFileInputStream(file, (int) bufferSizeBytes); diff --git a/core/src/test/java/org/apache/spark/io/ReadAheadInputStreamSuite.java b/core/src/test/java/org/apache/spark/io/ReadAheadInputStreamSuite.java index 5008f93b7e40..918ddc4517ec 100644 --- a/core/src/test/java/org/apache/spark/io/ReadAheadInputStreamSuite.java +++ b/core/src/test/java/org/apache/spark/io/ReadAheadInputStreamSuite.java @@ -28,6 +28,7 @@ public class ReadAheadInputStreamSuite extends GenericFileInputStreamSuite { @Before public void setUp() throws IOException { super.setUp(); - inputStream = new ReadAheadInputStream(new NioBufferedFileInputStream(inputFile), 8 * 1024, 4 * 1024); + inputStream = new ReadAheadInputStream( + new NioBufferedFileInputStream(inputFile), 8 * 1024, 4 * 1024); } } diff --git a/dev/checkstyle.xml b/dev/checkstyle.xml index fd73ca73ee7e..68aee4d4c97e 100644 --- a/dev/checkstyle.xml +++ b/dev/checkstyle.xml @@ -52,20 +52,6 @@ - - - - - - - @@ -81,6 +67,19 @@ + + + + + + @@ -182,6 +181,5 @@ - diff --git a/dev/test-dependencies.sh b/dev/test-dependencies.sh index 114b116d84cd..c7714578bd00 100755 --- a/dev/test-dependencies.sh +++ b/dev/test-dependencies.sh @@ -46,7 +46,7 @@ OLD_VERSION=$($MVN -q \ -Dexec.executable="echo" \ -Dexec.args='${project.version}' \ --non-recursive \ - org.codehaus.mojo:exec-maven-plugin:1.5.0:exec) + org.codehaus.mojo:exec-maven-plugin:1.6.0:exec) if [ $? != 0 ]; then echo -e "Error while getting version string from Maven:\n$OLD_VERSION" exit 1 diff --git a/pom.xml b/pom.xml index 0bbbf20a76d6..e292db941937 100644 --- a/pom.xml +++ b/pom.xml @@ -1969,7 +1969,7 @@ org.apache.maven.plugins maven-enforcer-plugin - 1.4.1 + 3.0.0-M1 enforce-versions @@ -2012,7 +2012,7 @@ net.alchim31.maven scala-maven-plugin - 3.2.2 + 3.3.1 eclipse-add-source @@ -2061,7 +2061,7 @@ org.apache.maven.plugins maven-compiler-plugin - 3.6.1 + 3.7.0 ${java.version} ${java.version} @@ -2078,7 +2078,7 @@ org.apache.maven.plugins maven-surefire-plugin - 2.19.1 + 2.20.1 @@ -2222,7 +2222,7 @@ org.apache.maven.plugins maven-javadoc-plugin - 2.10.4 + 3.0.0-M1 -Xdoclint:all -Xdoclint:-missing @@ -2262,17 +2262,17 @@ org.codehaus.mojo exec-maven-plugin - 1.5.0 + 1.6.0 org.apache.maven.plugins maven-assembly-plugin - 3.0.0 + 3.1.0 org.apache.maven.plugins maven-shade-plugin - 3.0.0 + 3.1.0 org.apache.maven.plugins @@ -2287,7 +2287,7 @@ org.apache.maven.plugins maven-dependency-plugin - 3.0.0 + 3.0.2 default-cli @@ -2487,10 +2487,7 @@ maven-checkstyle-plugin 2.17 - false - false true - false ${basedir}/src/main/java,${basedir}/src/main/scala ${basedir}/src/test/java dev/checkstyle.xml @@ -2498,6 +2495,13 @@ ${project.build.sourceEncoding} ${project.reporting.outputEncoding} + + + com.puppycrawl.tools + checkstyle + 8.2 + + diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/DataSourceV2Reader.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/DataSourceV2Reader.java index 48feb049c1de..fb4d5c0d7ae4 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/DataSourceV2Reader.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/DataSourceV2Reader.java @@ -21,15 +21,14 @@ import org.apache.spark.annotation.InterfaceStability; import org.apache.spark.sql.Row; -import org.apache.spark.sql.sources.v2.DataSourceV2Options; -import org.apache.spark.sql.sources.v2.ReadSupport; -import org.apache.spark.sql.sources.v2.ReadSupportWithSchema; import org.apache.spark.sql.types.StructType; /** * A data source reader that is returned by - * {@link ReadSupport#createReader(DataSourceV2Options)} or - * {@link ReadSupportWithSchema#createReader(StructType, DataSourceV2Options)}. + * {@link org.apache.spark.sql.sources.v2.ReadSupport#createReader( + * org.apache.spark.sql.sources.v2.DataSourceV2Options)} or + * {@link org.apache.spark.sql.sources.v2.ReadSupportWithSchema#createReader( + * StructType, org.apache.spark.sql.sources.v2.DataSourceV2Options)}. * It can mix in various query optimization interfaces to speed up the data scan. The actual scan * logic should be delegated to {@link ReadTask}s that are returned by {@link #createReadTasks()}. * diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/SupportsScanUnsafeRow.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/SupportsScanUnsafeRow.java index 829f9a078760..d5eada808a16 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/SupportsScanUnsafeRow.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/SupportsScanUnsafeRow.java @@ -23,8 +23,6 @@ import org.apache.spark.annotation.InterfaceStability; import org.apache.spark.sql.Row; import org.apache.spark.sql.catalyst.expressions.UnsafeRow; -import org.apache.spark.sql.sources.v2.reader.DataSourceV2Reader; -import org.apache.spark.sql.sources.v2.reader.ReadTask; /** * A mix-in interface for {@link DataSourceV2Reader}. Data source readers can implement this @@ -39,7 +37,8 @@ public interface SupportsScanUnsafeRow extends DataSourceV2Reader { @Override default List> createReadTasks() { - throw new IllegalStateException("createReadTasks should not be called with SupportsScanUnsafeRow."); + throw new IllegalStateException( + "createReadTasks should not be called with SupportsScanUnsafeRow."); } /** diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/sources/v2/JavaAdvancedDataSourceV2.java b/sql/core/src/test/java/test/org/apache/spark/sql/sources/v2/JavaAdvancedDataSourceV2.java index 50900e98dedb..7aacf0346d2f 100644 --- a/sql/core/src/test/java/test/org/apache/spark/sql/sources/v2/JavaAdvancedDataSourceV2.java +++ b/sql/core/src/test/java/test/org/apache/spark/sql/sources/v2/JavaAdvancedDataSourceV2.java @@ -32,7 +32,9 @@ public class JavaAdvancedDataSourceV2 implements DataSourceV2, ReadSupport { - class Reader implements DataSourceV2Reader, SupportsPushDownRequiredColumns, SupportsPushDownFilters { + class Reader implements DataSourceV2Reader, SupportsPushDownRequiredColumns, + SupportsPushDownFilters { + private StructType requiredSchema = new StructType().add("i", "int").add("j", "int"); private Filter[] filters = new Filter[0]; From 93ff67576566e93eaca3220507049166133ad4b1 Mon Sep 17 00:00:00 2001 From: Sean Owen Date: Tue, 19 Sep 2017 21:09:38 +0100 Subject: [PATCH 2/2] Undo inadvertent R doc change --- R/pkg/DESCRIPTION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/R/pkg/DESCRIPTION b/R/pkg/DESCRIPTION index c70839805222..d1c846c04827 100644 --- a/R/pkg/DESCRIPTION +++ b/R/pkg/DESCRIPTION @@ -57,5 +57,5 @@ Collate: 'types.R' 'utils.R' 'window.R' -RoxygenNote: 6.0.1 +RoxygenNote: 5.0.1 VignetteBuilder: knitr