Skip to content

Commit c0cbe0b

Browse files
author
Davies Liu
committed
fix tests
1 parent 8466d1d commit c0cbe0b

File tree

2 files changed

+4
-4
lines changed

2 files changed

+4
-4
lines changed

python/pyspark/sql/tests.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ def test_serialize_nested_array_and_map(self):
186186
self.assertEqual("2", row.d)
187187

188188
def test_infer_schema(self):
189-
d = [Row(l=[], d={}),
189+
d = [Row(l=[], d={}, s=None),
190190
Row(l=[Row(a=1, b='s')], d={"key": Row(c=1.0, d="2")}, s="")]
191191
rdd = self.sc.parallelize(d)
192192
df = self.sqlCtx.createDataFrame(rdd)

python/pyspark/sql/types.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -604,7 +604,7 @@ def _infer_type(obj):
604604
ExamplePointUDT
605605
"""
606606
if obj is None:
607-
raise ValueError("Can not infer type for None")
607+
return NullType()
608608

609609
if hasattr(obj, '__UDT__'):
610610
return obj.__UDT__
@@ -1014,7 +1014,7 @@ def _verify_type(obj, dataType):
10141014
return
10151015

10161016
_type = type(dataType)
1017-
assert _type in _acceptable_types, "unkown datatype: %s" % dataType
1017+
assert _type in _acceptable_types, "unknown datatype: %s" % dataType
10181018

10191019
# subclass of them can not be deserialized in JVM
10201020
if type(obj) not in _acceptable_types[_type]:
@@ -1032,7 +1032,7 @@ def _verify_type(obj, dataType):
10321032

10331033
elif isinstance(dataType, StructType):
10341034
if len(obj) != len(dataType.fields):
1035-
raise ValueError("Length of object (%d) does not match with"
1035+
raise ValueError("Length of object (%d) does not match with "
10361036
"length of fields (%d)" % (len(obj), len(dataType.fields)))
10371037
for v, f in zip(obj, dataType.fields):
10381038
_verify_type(v, f.dataType)

0 commit comments

Comments
 (0)