Skip to content

Commit 75c83a4

Browse files
committed
passed test compile
1 parent 1859701 commit 75c83a4

File tree

9 files changed

+40
-35
lines changed

9 files changed

+40
-35
lines changed

mllib/src/main/scala/org/apache/spark/mllib/linalg/Vectors.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,12 @@ trait Vector extends Serializable {
5454
* Converts the instance to a breeze vector.
5555
*/
5656
private[mllib] def toBreeze: BV[Double]
57+
58+
/**
59+
* Gets the value of the ith element.
60+
* @param i index
61+
*/
62+
private[mllib] def apply(i: Int): Double = toBreeze(i)
5763
}
5864

5965
/**

mllib/src/test/java/org/apache/spark/mllib/classification/JavaNaiveBayesSuite.java

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919

2020
import org.apache.spark.api.java.JavaRDD;
2121
import org.apache.spark.api.java.JavaSparkContext;
22+
import org.apache.spark.mllib.linalg.Vectors;
2223
import org.apache.spark.mllib.regression.LabeledPoint;
2324
import org.junit.After;
2425
import org.junit.Assert;
@@ -45,12 +46,12 @@ public void tearDown() {
4546
}
4647

4748
private static final List<LabeledPoint> POINTS = Arrays.asList(
48-
new LabeledPoint(0, new double[] {1.0, 0.0, 0.0}),
49-
new LabeledPoint(0, new double[] {2.0, 0.0, 0.0}),
50-
new LabeledPoint(1, new double[] {0.0, 1.0, 0.0}),
51-
new LabeledPoint(1, new double[] {0.0, 2.0, 0.0}),
52-
new LabeledPoint(2, new double[] {0.0, 0.0, 1.0}),
53-
new LabeledPoint(2, new double[] {0.0, 0.0, 2.0})
49+
new LabeledPoint(0, Vectors.dense(1.0, 0.0, 0.0)),
50+
new LabeledPoint(0, Vectors.dense(2.0, 0.0, 0.0)),
51+
new LabeledPoint(1, Vectors.dense(0.0, 1.0, 0.0)),
52+
new LabeledPoint(1, Vectors.dense(0.0, 2.0, 0.0)),
53+
new LabeledPoint(2, Vectors.dense(0.0, 0.0, 1.0)),
54+
new LabeledPoint(2, Vectors.dense(0.0, 0.0, 2.0))
5455
);
5556

5657
private int validatePrediction(List<LabeledPoint> points, NaiveBayesModel model) {

mllib/src/test/java/org/apache/spark/mllib/linalg/JavaVectorsSuite.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,10 @@
1919

2020
import java.io.Serializable;
2121

22-
import com.google.common.collect.Lists;
23-
2422
import scala.Tuple2;
2523

24+
import com.google.common.collect.Lists;
25+
2626
import org.junit.Test;
2727
import static org.junit.Assert.*;
2828

@@ -36,7 +36,7 @@ public void denseArrayConstruction() {
3636

3737
@Test
3838
public void sparseArrayConstruction() {
39-
Vector v = Vectors.sparse(3, Lists.newArrayList(
39+
Vector v = Vectors.sparse(3, Lists.<Tuple2<Integer, Double>>newArrayList(
4040
new Tuple2<Integer, Double>(0, 2.0),
4141
new Tuple2<Integer, Double>(2, 3.0)));
4242
assertArrayEquals(new double[]{2.0, 0.0, 3.0}, v.toArray(), 0.0);

mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ class LogisticRegressionSuite extends FunSuite with LocalSparkContext with Shoul
112112
val testData = LogisticRegressionSuite.generateLogisticInput(A, B, nPoints, 42)
113113

114114
val initialB = -1.0
115-
val initialWeights = Array(initialB)
115+
val initialWeights = Vectors.dense(initialB)
116116

117117
val testRDD = sc.parallelize(testData, 2)
118118
testRDD.cache()

mllib/src/test/scala/org/apache/spark/mllib/classification/NaiveBayesSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,11 @@ package org.apache.spark.mllib.classification
1919

2020
import scala.util.Random
2121

22-
import org.scalatest.BeforeAndAfterAll
2322
import org.scalatest.FunSuite
2423

2524
import org.apache.spark.mllib.regression.LabeledPoint
2625
import org.apache.spark.mllib.util.LocalSparkContext
26+
import org.apache.spark.mllib.linalg.Vectors
2727

2828
object NaiveBayesSuite {
2929

@@ -54,7 +54,7 @@ object NaiveBayesSuite {
5454
if (rnd.nextDouble() < _theta(y)(j)) 1 else 0
5555
}
5656

57-
LabeledPoint(y, xi)
57+
LabeledPoint(y, Vectors.dense(xi))
5858
}
5959
}
6060
}

mllib/src/test/scala/org/apache/spark/mllib/classification/SVMSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,14 +20,14 @@ package org.apache.spark.mllib.classification
2020
import scala.util.Random
2121
import scala.collection.JavaConversions._
2222

23-
import org.scalatest.BeforeAndAfterAll
2423
import org.scalatest.FunSuite
2524

2625
import org.jblas.DoubleMatrix
2726

2827
import org.apache.spark.SparkException
2928
import org.apache.spark.mllib.regression._
3029
import org.apache.spark.mllib.util.LocalSparkContext
30+
import org.apache.spark.mllib.linalg.Vectors
3131

3232
object SVMSuite {
3333

@@ -54,7 +54,7 @@ object SVMSuite {
5454
intercept + 0.01 * rnd.nextGaussian()
5555
if (yD < 0) 0.0 else 1.0
5656
}
57-
y.zip(x).map(p => LabeledPoint(p._1, p._2))
57+
y.zip(x).map(p => LabeledPoint(p._1, Vectors.dense(p._2)))
5858
}
5959

6060
}
@@ -110,7 +110,7 @@ class SVMSuite extends FunSuite with LocalSparkContext {
110110

111111
val initialB = -1.0
112112
val initialC = -1.0
113-
val initialWeights = Array(initialB,initialC)
113+
val initialWeights = Vectors.dense(initialB, initialC)
114114

115115
val testRDD = sc.parallelize(testData, 2)
116116
testRDD.cache()

mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,13 +20,12 @@ package org.apache.spark.mllib.optimization
2020
import scala.util.Random
2121
import scala.collection.JavaConversions._
2222

23-
import org.scalatest.BeforeAndAfterAll
2423
import org.scalatest.FunSuite
2524
import org.scalatest.matchers.ShouldMatchers
2625

27-
import org.apache.spark.SparkContext
2826
import org.apache.spark.mllib.regression._
2927
import org.apache.spark.mllib.util.LocalSparkContext
28+
import org.apache.spark.mllib.linalg.Vectors
3029

3130
object GradientDescentSuite {
3231

@@ -58,7 +57,7 @@ object GradientDescentSuite {
5857
if (yVal > 0) 1 else 0
5958
}
6059

61-
val testData = (0 until nPoints).map(i => LabeledPoint(y(i), Array(x1(i))))
60+
val testData = (0 until nPoints).map(i => LabeledPoint(y(i), Vectors.dense(Array(x1(i)))))
6261
testData
6362
}
6463
}
@@ -83,11 +82,11 @@ class GradientDescentSuite extends FunSuite with LocalSparkContext with ShouldMa
8382
// Add a extra variable consisting of all 1.0's for the intercept.
8483
val testData = GradientDescentSuite.generateGDInput(A, B, nPoints, 42)
8584
val data = testData.map { case LabeledPoint(label, features) =>
86-
label -> Array(1.0, features: _*)
85+
label -> Vectors.dense(1.0, features.toArray: _*)
8786
}
8887

8988
val dataRDD = sc.parallelize(data, 2).cache()
90-
val initialWeightsWithIntercept = Array(1.0, initialWeights: _*)
89+
val initialWeightsWithIntercept = Vectors.dense(0.0, initialWeights: _*)
9190

9291
val (_, loss) = GradientDescent.runMiniBatchSGD(
9392
dataRDD,
@@ -113,13 +112,13 @@ class GradientDescentSuite extends FunSuite with LocalSparkContext with ShouldMa
113112
// Add a extra variable consisting of all 1.0's for the intercept.
114113
val testData = GradientDescentSuite.generateGDInput(2.0, -1.5, 10000, 42)
115114
val data = testData.map { case LabeledPoint(label, features) =>
116-
label -> Array(1.0, features: _*)
115+
label -> Vectors.dense(1.0, features.toArray: _*)
117116
}
118117

119118
val dataRDD = sc.parallelize(data, 2).cache()
120119

121120
// Prepare non-zero weights
122-
val initialWeightsWithIntercept = Array(1.0, 0.5)
121+
val initialWeightsWithIntercept = Vectors.dense(1.0, 0.5)
123122

124123
val regParam0 = 0
125124
val (newWeights0, loss0) = GradientDescent.runMiniBatchSGD(

mllib/src/test/scala/org/apache/spark/mllib/regression/LassoSuite.scala

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,9 @@
1717

1818
package org.apache.spark.mllib.regression
1919

20-
21-
import org.scalatest.BeforeAndAfterAll
2220
import org.scalatest.FunSuite
2321

24-
import org.apache.spark.SparkContext
22+
import org.apache.spark.mllib.linalg.Vectors
2523
import org.apache.spark.mllib.util.{LinearDataGenerator, LocalSparkContext}
2624

2725
class LassoSuite extends FunSuite with LocalSparkContext {
@@ -51,7 +49,6 @@ class LassoSuite extends FunSuite with LocalSparkContext {
5149
ls.optimizer.setStepSize(1.0).setRegParam(0.01).setNumIterations(20)
5250

5351
val model = ls.run(testRDD)
54-
5552
val weight0 = model.weights(0)
5653
val weight1 = model.weights(1)
5754
assert(model.intercept >= 1.9 && model.intercept <= 2.1, model.intercept + " not in [1.9, 2.1]")
@@ -79,7 +76,7 @@ class LassoSuite extends FunSuite with LocalSparkContext {
7976

8077
val initialB = -1.0
8178
val initialC = -1.0
82-
val initialWeights = Array(initialB,initialC)
79+
val initialWeights = Vectors.dense(Array(initialB, initialC))
8380

8481
val testRDD = sc.parallelize(testData, 2)
8582
testRDD.cache()
@@ -88,7 +85,6 @@ class LassoSuite extends FunSuite with LocalSparkContext {
8885
ls.optimizer.setStepSize(1.0).setRegParam(0.01).setNumIterations(20)
8986

9087
val model = ls.run(testRDD, initialWeights)
91-
9288
val weight0 = model.weights(0)
9389
val weight1 = model.weights(1)
9490
assert(model.intercept >= 1.9 && model.intercept <= 2.1, model.intercept + " not in [1.9, 2.1]")

mllib/src/test/scala/org/apache/spark/mllib/regression/LinearRegressionSuite.scala

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -40,11 +40,12 @@ class LinearRegressionSuite extends FunSuite with LocalSparkContext {
4040
linReg.optimizer.setNumIterations(1000).setStepSize(1.0)
4141

4242
val model = linReg.run(testRDD)
43-
4443
assert(model.intercept >= 2.5 && model.intercept <= 3.5)
45-
assert(model.weights.length === 2)
46-
assert(model.weights(0) >= 9.0 && model.weights(0) <= 11.0)
47-
assert(model.weights(1) >= 9.0 && model.weights(1) <= 11.0)
44+
45+
val weights = model.weights
46+
assert(weights.size === 2)
47+
assert(weights(0) >= 9.0 && weights(0) <= 11.0)
48+
assert(weights(1) >= 9.0 && weights(1) <= 11.0)
4849

4950
val validationData = LinearDataGenerator.generateLinearInput(
5051
3.0, Array(10.0, 10.0), 100, 17)
@@ -67,9 +68,11 @@ class LinearRegressionSuite extends FunSuite with LocalSparkContext {
6768
val model = linReg.run(testRDD)
6869

6970
assert(model.intercept === 0.0)
70-
assert(model.weights.length === 2)
71-
assert(model.weights(0) >= 9.0 && model.weights(0) <= 11.0)
72-
assert(model.weights(1) >= 9.0 && model.weights(1) <= 11.0)
71+
72+
val weights = model.weights
73+
assert(weights.size === 2)
74+
assert(weights(0) >= 9.0 && weights(0) <= 11.0)
75+
assert(weights(1) >= 9.0 && weights(1) <= 11.0)
7376

7477
val validationData = LinearDataGenerator.generateLinearInput(
7578
0.0, Array(10.0, 10.0), 100, 17)

0 commit comments

Comments
 (0)