Skip to content

Commit ba0ba1e

Browse files
committed
add unit tests for pipeline
1 parent 0586c7b commit ba0ba1e

File tree

2 files changed

+117
-1
lines changed

2 files changed

+117
-1
lines changed

python/pyspark/ml/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -174,7 +174,8 @@ def fit(self, dataset, params={}):
174174
else: # must be an Estimator
175175
model = stage.fit(dataset, paramMap)
176176
transformers.append(model)
177-
dataset = model.transform(dataset, paramMap)
177+
if i < indexOfLastEstimator:
178+
dataset = model.transform(dataset, paramMap)
178179
else:
179180
transformers.append(stage)
180181
return PipelineModel(transformers)

python/pyspark/ml/tests.py

Lines changed: 115 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,115 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one or more
3+
# contributor license agreements. See the NOTICE file distributed with
4+
# this work for additional information regarding copyright ownership.
5+
# The ASF licenses this file to You under the Apache License, Version 2.0
6+
# (the "License"); you may not use this file except in compliance with
7+
# the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
17+
18+
"""
19+
Unit tests for Spark ML Python APIs.
20+
"""
21+
22+
import sys
23+
24+
if sys.version_info[:2] <= (2, 6):
25+
try:
26+
import unittest2 as unittest
27+
except ImportError:
28+
sys.stderr.write('Please install unittest2 to test with Python 2.6 or earlier')
29+
sys.exit(1)
30+
else:
31+
import unittest
32+
33+
from pyspark.tests import ReusedPySparkTestCase as PySparkTestCase
34+
from pyspark.sql import SchemaRDD
35+
from pyspark.ml import Transformer, Estimator, Model, Pipeline
36+
from pyspark.ml.param import Param
37+
38+
39+
class MockDataset(SchemaRDD):
40+
41+
def __init__(self):
42+
self.index = 0
43+
44+
45+
class MockTransformer(Transformer):
46+
47+
def __init__(self):
48+
super(MockTransformer, self).__init__()
49+
self.fake = Param(self, "fake", "fake", None)
50+
self.dataset_index = None
51+
self.fake_param_value = None
52+
53+
def transform(self, dataset, params={}):
54+
self.dataset_index = dataset.index
55+
if self.fake in params:
56+
self.fake_param_value = params[self.fake]
57+
dataset.index += 1
58+
return dataset
59+
60+
61+
class MockEstimator(Estimator):
62+
63+
def __init__(self):
64+
super(MockEstimator, self).__init__()
65+
self.fake = Param(self, "fake", "fake", None)
66+
self.dataset_index = None
67+
self.fake_param_value = None
68+
self.model = None
69+
70+
def fit(self, dataset, params={}):
71+
self.dataset_index = dataset.index
72+
if self.fake in params:
73+
self.fake_param_value = params[self.fake]
74+
model = MockModel()
75+
self.model = model
76+
return model
77+
78+
79+
class MockModel(MockTransformer, Model):
80+
81+
def __init__(self):
82+
super(MockModel, self).__init__()
83+
84+
85+
class PipelineTests(PySparkTestCase):
86+
87+
def test_pipeline(self):
88+
dataset = MockDataset()
89+
estimator0 = MockEstimator()
90+
transformer1 = MockTransformer()
91+
estimator2 = MockEstimator()
92+
transformer3 = MockTransformer()
93+
pipeline = Pipeline() \
94+
.setStages([estimator0, transformer1, estimator2, transformer3])
95+
pipeline_model = pipeline.fit(dataset, {estimator0.fake: 0, transformer1.fake: 1})
96+
self.assertEqual(0, estimator0.dataset_index)
97+
self.assertEqual(0, estimator0.fake_param_value)
98+
model0 = estimator0.model
99+
self.assertEqual(0, model0.dataset_index)
100+
self.assertEqual(1, transformer1.dataset_index)
101+
self.assertEqual(1, transformer1.fake_param_value)
102+
self.assertEqual(2, estimator2.dataset_index)
103+
model2 = estimator2.model
104+
self.assertIsNone(model2.dataset_index, "The model produced by the last estimator should "
105+
"not be called during fit.")
106+
dataset = pipeline_model.transform(dataset)
107+
self.assertEqual(2, model0.dataset_index)
108+
self.assertEqual(3, transformer1.dataset_index)
109+
self.assertEqual(4, model2.dataset_index)
110+
self.assertEqual(5, transformer3.dataset_index)
111+
self.assertEqual(6, dataset.index)
112+
113+
114+
if __name__ == "__main__":
115+
unittest.main()

0 commit comments

Comments
 (0)