|
20 | 20 | individual modules.
|
21 | 21 | """
|
22 | 22 | import os
|
| 23 | +import sys |
23 | 24 | import pydoc
|
24 | 25 | import shutil
|
25 | 26 | import tempfile
|
26 |
| -import unittest |
| 27 | + |
| 28 | +if sys.version_info[:2] <= (2, 6): |
| 29 | + try: |
| 30 | + import unittest2 as unittest |
| 31 | + except ImportError: |
| 32 | + sys.stderr.write('Please install unittest2 to test with Python 2.6 or earlier') |
| 33 | + sys.exit(1) |
| 34 | +else: |
| 35 | + import unittest |
27 | 36 |
|
28 | 37 | from pyspark.sql import SQLContext, IntegerType, Row, ArrayType, StructType, StructField, \
|
29 | 38 | UserDefinedType, DoubleType
|
@@ -83,18 +92,16 @@ def setUpClass(cls):
|
83 | 92 | ReusedPySparkTestCase.setUpClass()
|
84 | 93 | cls.tempdir = tempfile.NamedTemporaryFile(delete=False)
|
85 | 94 | os.unlink(cls.tempdir.name)
|
| 95 | + cls.sqlCtx = SQLContext(cls.sc) |
| 96 | + cls.testData = [Row(key=i, value=str(i)) for i in range(100)] |
| 97 | + rdd = cls.sc.parallelize(cls.testData) |
| 98 | + cls.df = cls.sqlCtx.inferSchema(rdd) |
86 | 99 |
|
87 | 100 | @classmethod
|
88 | 101 | def tearDownClass(cls):
|
89 | 102 | ReusedPySparkTestCase.tearDownClass()
|
90 | 103 | shutil.rmtree(cls.tempdir.name, ignore_errors=True)
|
91 | 104 |
|
92 |
| - def setUp(self): |
93 |
| - self.sqlCtx = SQLContext(self.sc) |
94 |
| - self.testData = [Row(key=i, value=str(i)) for i in range(100)] |
95 |
| - rdd = self.sc.parallelize(self.testData) |
96 |
| - self.df = self.sqlCtx.inferSchema(rdd) |
97 |
| - |
98 | 105 | def test_udf(self):
|
99 | 106 | self.sqlCtx.registerFunction("twoArgs", lambda x, y: len(x) + y, IntegerType())
|
100 | 107 | [row] = self.sqlCtx.sql("SELECT twoArgs('test', 1)").collect()
|
|
0 commit comments