Skip to content

Commit

Permalink
[SPARK-6093] [MLLIB] Add RegressionMetrics in PySpark/MLlib
Browse files Browse the repository at this point in the history
https://issues.apache.org/jira/browse/SPARK-6093

Author: Yanbo Liang <[email protected]>

Closes apache#5941 from yanboliang/spark-6093 and squashes the following commits:

6934af3 [Yanbo Liang] change to @Property
aac3bc5 [Yanbo Liang] Add RegressionMetrics in PySpark/MLlib
  • Loading branch information
yanboliang authored and mengxr committed May 7, 2015
1 parent 068c315 commit 1712a7c
Show file tree
Hide file tree
Showing 2 changed files with 85 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import org.apache.spark.rdd.RDD
import org.apache.spark.Logging
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.stat.{MultivariateStatisticalSummary, MultivariateOnlineSummarizer}
import org.apache.spark.sql.DataFrame

/**
* :: Experimental ::
Expand All @@ -32,6 +33,14 @@ import org.apache.spark.mllib.stat.{MultivariateStatisticalSummary, Multivariate
@Experimental
class RegressionMetrics(predictionAndObservations: RDD[(Double, Double)]) extends Logging {

/**
* An auxiliary constructor taking a DataFrame.
* @param predictionAndObservations a DataFrame with two double columns:
* prediction and observation
*/
private[mllib] def this(predictionAndObservations: DataFrame) =
this(predictionAndObservations.map(r => (r.getDouble(0), r.getDouble(1))))

/**
* Use MultivariateOnlineSummarizer to calculate summary statistics of observations and errors.
*/
Expand Down
78 changes: 76 additions & 2 deletions python/pyspark/mllib/evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ class BinaryClassificationMetrics(JavaModelWrapper):
>>> scoreAndLabels = sc.parallelize([
... (0.1, 0.0), (0.1, 1.0), (0.4, 0.0), (0.6, 0.0), (0.6, 1.0), (0.6, 1.0), (0.8, 1.0)], 2)
>>> metrics = BinaryClassificationMetrics(scoreAndLabels)
>>> metrics.areaUnderROC()
>>> metrics.areaUnderROC
0.70...
>>> metrics.areaUnderPR()
>>> metrics.areaUnderPR
0.83...
>>> metrics.unpersist()
"""
Expand All @@ -47,13 +47,15 @@ def __init__(self, scoreAndLabels):
java_model = java_class(df._jdf)
super(BinaryClassificationMetrics, self).__init__(java_model)

@property
def areaUnderROC(self):
"""
Computes the area under the receiver operating characteristic
(ROC) curve.
"""
return self.call("areaUnderROC")

@property
def areaUnderPR(self):
"""
Computes the area under the precision-recall curve.
Expand All @@ -67,6 +69,78 @@ def unpersist(self):
self.call("unpersist")


class RegressionMetrics(JavaModelWrapper):
"""
Evaluator for regression.
>>> predictionAndObservations = sc.parallelize([
... (2.5, 3.0), (0.0, -0.5), (2.0, 2.0), (8.0, 7.0)])
>>> metrics = RegressionMetrics(predictionAndObservations)
>>> metrics.explainedVariance
0.95...
>>> metrics.meanAbsoluteError
0.5...
>>> metrics.meanSquaredError
0.37...
>>> metrics.rootMeanSquaredError
0.61...
>>> metrics.r2
0.94...
"""

def __init__(self, predictionAndObservations):
"""
:param predictionAndObservations: an RDD of (prediction, observation) pairs.
"""
sc = predictionAndObservations.ctx
sql_ctx = SQLContext(sc)
df = sql_ctx.createDataFrame(predictionAndObservations, schema=StructType([
StructField("prediction", DoubleType(), nullable=False),
StructField("observation", DoubleType(), nullable=False)]))
java_class = sc._jvm.org.apache.spark.mllib.evaluation.RegressionMetrics
java_model = java_class(df._jdf)
super(RegressionMetrics, self).__init__(java_model)

@property
def explainedVariance(self):
"""
Returns the explained variance regression score.
explainedVariance = 1 - variance(y - \hat{y}) / variance(y)
"""
return self.call("explainedVariance")

@property
def meanAbsoluteError(self):
"""
Returns the mean absolute error, which is a risk function corresponding to the
expected value of the absolute error loss or l1-norm loss.
"""
return self.call("meanAbsoluteError")

@property
def meanSquaredError(self):
"""
Returns the mean squared error, which is a risk function corresponding to the
expected value of the squared error loss or quadratic loss.
"""
return self.call("meanSquaredError")

@property
def rootMeanSquaredError(self):
"""
Returns the root mean squared error, which is defined as the square root of
the mean squared error.
"""
return self.call("rootMeanSquaredError")

@property
def r2(self):
"""
Returns R^2^, the coefficient of determination.
"""
return self.call("r2")


def _test():
import doctest
from pyspark import SparkContext
Expand Down

0 comments on commit 1712a7c

Please sign in to comment.