Skip to content

Commit 832443c

Browse files
author
Andrew Or
committed
Merge branch 'master' of github.com:apache/spark into viz2
2 parents 429e9e1 + 1ffa8cb commit 832443c

File tree

15 files changed

+1174
-29
lines changed

15 files changed

+1174
-29
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -701,7 +701,7 @@ object SparkSubmit {
701701
}
702702

703703
/** Provides utility functions to be used inside SparkSubmit. */
704-
private[deploy] object SparkSubmitUtils {
704+
private[spark] object SparkSubmitUtils {
705705

706706
// Exposed for testing
707707
var printStream = SparkSubmit.printStream

docs/building-spark.md

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,6 @@ Because HDFS is not protocol-compatible across versions, if you want to read fro
6666
<tr><th>Hadoop version</th><th>Profile required</th></tr>
6767
</thead>
6868
<tbody>
69-
<tr><td>0.23.x</td><td>hadoop-0.23</td></tr>
7069
<tr><td>1.x to 2.1.x</td><td>(none)</td></tr>
7170
<tr><td>2.2.x</td><td>hadoop-2.2</td></tr>
7271
<tr><td>2.3.x</td><td>hadoop-2.3</td></tr>
@@ -82,9 +81,6 @@ mvn -Dhadoop.version=1.2.1 -DskipTests clean package
8281

8382
# Cloudera CDH 4.2.0 with MapReduce v1
8483
mvn -Dhadoop.version=2.0.0-mr1-cdh4.2.0 -DskipTests clean package
85-
86-
# Apache Hadoop 0.23.x
87-
mvn -Phadoop-0.23 -Dhadoop.version=0.23.7 -DskipTests clean package
8884
{% endhighlight %}
8985

9086
You can enable the "yarn" profile and optionally set the "yarn.version" property if it is different from "hadoop.version". Spark only supports YARN versions 2.2.0 and later.

docs/hadoop-third-party-distributions.md

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,6 @@ the _exact_ Hadoop version you are running to avoid any compatibility errors.
2929
<tr><th>Release</th><th>Version code</th></tr>
3030
<tr><td>CDH 4.X.X (YARN mode)</td><td>2.0.0-cdh4.X.X</td></tr>
3131
<tr><td>CDH 4.X.X</td><td>2.0.0-mr1-cdh4.X.X</td></tr>
32-
<tr><td>CDH 3u6</td><td>0.20.2-cdh3u6</td></tr>
33-
<tr><td>CDH 3u5</td><td>0.20.2-cdh3u5</td></tr>
34-
<tr><td>CDH 3u4</td><td>0.20.2-cdh3u4</td></tr>
3532
</table>
3633
</td>
3734
<td>

make-distribution.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ while (( "$#" )); do
5858
--hadoop)
5959
echo "Error: '--hadoop' is no longer supported:"
6060
echo "Error: use Maven profiles and options -Dhadoop.version and -Dyarn.version instead."
61-
echo "Error: Related profiles include hadoop-0.23, hdaoop-2.2, hadoop-2.3 and hadoop-2.4."
61+
echo "Error: Related profiles include hadoop-2.2, hadoop-2.3 and hadoop-2.4."
6262
exit_with_usage
6363
;;
6464
--with-yarn)

pom.xml

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1614,20 +1614,6 @@
16141614
http://hadoop.apache.org/docs/ra.b.c/hadoop-project-dist/hadoop-common/dependency-analysis.html
16151615
-->
16161616

1617-
<profile>
1618-
<id>hadoop-0.23</id>
1619-
<!-- SPARK-1121: Adds an explicit dependency on Avro to work around a Hadoop 0.23.X issue -->
1620-
<dependencies>
1621-
<dependency>
1622-
<groupId>org.apache.avro</groupId>
1623-
<artifactId>avro</artifactId>
1624-
</dependency>
1625-
</dependencies>
1626-
<properties>
1627-
<hadoop.version>0.23.10</hadoop.version>
1628-
</properties>
1629-
</profile>
1630-
16311617
<profile>
16321618
<id>hadoop-2.2</id>
16331619
<properties>

python/pyspark/ml/tuning.py

Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one or more
3+
# contributor license agreements. See the NOTICE file distributed with
4+
# this work for additional information regarding copyright ownership.
5+
# The ASF licenses this file to You under the Apache License, Version 2.0
6+
# (the "License"); you may not use this file except in compliance with
7+
# the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
17+
18+
import itertools
19+
20+
__all__ = ['ParamGridBuilder']
21+
22+
23+
class ParamGridBuilder(object):
24+
"""
25+
Builder for a param grid used in grid search-based model selection.
26+
27+
>>> from classification import LogisticRegression
28+
>>> lr = LogisticRegression()
29+
>>> output = ParamGridBuilder().baseOn({lr.labelCol: 'l'}) \
30+
.baseOn([lr.predictionCol, 'p']) \
31+
.addGrid(lr.regParam, [1.0, 2.0, 3.0]) \
32+
.addGrid(lr.maxIter, [1, 5]) \
33+
.addGrid(lr.featuresCol, ['f']) \
34+
.build()
35+
>>> expected = [ \
36+
{lr.regParam: 1.0, lr.featuresCol: 'f', lr.maxIter: 1, lr.labelCol: 'l', lr.predictionCol: 'p'}, \
37+
{lr.regParam: 2.0, lr.featuresCol: 'f', lr.maxIter: 1, lr.labelCol: 'l', lr.predictionCol: 'p'}, \
38+
{lr.regParam: 3.0, lr.featuresCol: 'f', lr.maxIter: 1, lr.labelCol: 'l', lr.predictionCol: 'p'}, \
39+
{lr.regParam: 1.0, lr.featuresCol: 'f', lr.maxIter: 5, lr.labelCol: 'l', lr.predictionCol: 'p'}, \
40+
{lr.regParam: 2.0, lr.featuresCol: 'f', lr.maxIter: 5, lr.labelCol: 'l', lr.predictionCol: 'p'}, \
41+
{lr.regParam: 3.0, lr.featuresCol: 'f', lr.maxIter: 5, lr.labelCol: 'l', lr.predictionCol: 'p'}]
42+
>>> len(output) == len(expected)
43+
True
44+
>>> all([m in expected for m in output])
45+
True
46+
"""
47+
48+
def __init__(self):
49+
self._param_grid = {}
50+
51+
def addGrid(self, param, values):
52+
"""
53+
Sets the given parameters in this grid to fixed values.
54+
"""
55+
self._param_grid[param] = values
56+
57+
return self
58+
59+
def baseOn(self, *args):
60+
"""
61+
Sets the given parameters in this grid to fixed values.
62+
Accepts either a parameter dictionary or a list of (parameter, value) pairs.
63+
"""
64+
if isinstance(args[0], dict):
65+
self.baseOn(*args[0].items())
66+
else:
67+
for (param, value) in args:
68+
self.addGrid(param, [value])
69+
70+
return self
71+
72+
def build(self):
73+
"""
74+
Builds and returns all combinations of parameters specified
75+
by the param grid.
76+
"""
77+
keys = self._param_grid.keys()
78+
grid_values = self._param_grid.values()
79+
return [dict(zip(keys, prod)) for prod in itertools.product(*grid_values)]
80+
81+
82+
if __name__ == "__main__":
83+
import doctest
84+
doctest.testmod()

python/run-tests

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,7 @@ function run_ml_tests() {
9898
echo "Run ml tests ..."
9999
run_test "pyspark/ml/feature.py"
100100
run_test "pyspark/ml/classification.py"
101+
run_test "pyspark/ml/tuning.py"
101102
run_test "pyspark/ml/tests.py"
102103
}
103104

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@ import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Subquery}
2727
*/
2828
class NoSuchTableException extends Exception
2929

30+
class NoSuchDatabaseException extends Exception
31+
3032
/**
3133
* An interface for looking up relations by name. Used by an [[Analyzer]].
3234
*/

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/package.scala

Lines changed: 31 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,31 @@
1717

1818
package org.apache.spark.sql.catalyst
1919

20-
import java.io.{PrintWriter, ByteArrayOutputStream, FileInputStream, File}
20+
import java.io._
2121

2222
import org.apache.spark.util.Utils
2323

2424
package object util {
2525

26+
/** Silences output to stderr or stdout for the duration of f */
27+
def quietly[A](f: => A): A = {
28+
val origErr = System.err
29+
val origOut = System.out
30+
try {
31+
System.setErr(new PrintStream(new OutputStream {
32+
def write(b: Int) = {}
33+
}))
34+
System.setOut(new PrintStream(new OutputStream {
35+
def write(b: Int) = {}
36+
}))
37+
38+
f
39+
} finally {
40+
System.setErr(origErr)
41+
System.setOut(origOut)
42+
}
43+
}
44+
2645
def fileToString(file: File, encoding: String = "UTF-8"): String = {
2746
val inStream = new FileInputStream(file)
2847
val outStream = new ByteArrayOutputStream
@@ -42,10 +61,9 @@ package object util {
4261
new String(outStream.toByteArray, encoding)
4362
}
4463

45-
def resourceToString(
46-
resource:String,
47-
encoding: String = "UTF-8",
48-
classLoader: ClassLoader = Utils.getSparkClassLoader): String = {
64+
def resourceToBytes(
65+
resource: String,
66+
classLoader: ClassLoader = Utils.getSparkClassLoader): Array[Byte] = {
4967
val inStream = classLoader.getResourceAsStream(resource)
5068
val outStream = new ByteArrayOutputStream
5169
try {
@@ -61,7 +79,14 @@ package object util {
6179
finally {
6280
inStream.close()
6381
}
64-
new String(outStream.toByteArray, encoding)
82+
outStream.toByteArray
83+
}
84+
85+
def resourceToString(
86+
resource:String,
87+
encoding: String = "UTF-8",
88+
classLoader: ClassLoader = Utils.getSparkClassLoader): String = {
89+
new String(resourceToBytes(resource, classLoader), encoding)
6590
}
6691

6792
def stringToFile(file: File, str: String): File = {
Lines changed: 149 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,149 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql.hive.client
19+
20+
import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchTableException}
21+
22+
case class HiveDatabase(
23+
name: String,
24+
location: String)
25+
26+
abstract class TableType { val name: String }
27+
case object ExternalTable extends TableType { override val name = "EXTERNAL_TABLE" }
28+
case object IndexTable extends TableType { override val name = "INDEX_TABLE" }
29+
case object ManagedTable extends TableType { override val name = "MANAGED_TABLE" }
30+
case object VirtualView extends TableType { override val name = "VIRTUAL_VIEW" }
31+
32+
case class HiveStorageDescriptor(
33+
location: String,
34+
inputFormat: String,
35+
outputFormat: String,
36+
serde: String)
37+
38+
case class HivePartition(
39+
values: Seq[String],
40+
storage: HiveStorageDescriptor)
41+
42+
case class HiveColumn(name: String, hiveType: String, comment: String)
43+
case class HiveTable(
44+
specifiedDatabase: Option[String],
45+
name: String,
46+
schema: Seq[HiveColumn],
47+
partitionColumns: Seq[HiveColumn],
48+
properties: Map[String, String],
49+
serdeProperties: Map[String, String],
50+
tableType: TableType,
51+
location: Option[String] = None,
52+
inputFormat: Option[String] = None,
53+
outputFormat: Option[String] = None,
54+
serde: Option[String] = None) {
55+
56+
@transient
57+
private[client] var client: ClientInterface = _
58+
59+
private[client] def withClient(ci: ClientInterface): this.type = {
60+
client = ci
61+
this
62+
}
63+
64+
def database: String = specifiedDatabase.getOrElse(sys.error("database not resolved"))
65+
66+
def isPartitioned: Boolean = partitionColumns.nonEmpty
67+
68+
def getAllPartitions: Seq[HivePartition] = client.getAllPartitions(this)
69+
70+
// Hive does not support backticks when passing names to the client.
71+
def qualifiedName: String = s"$database.$name"
72+
}
73+
74+
/**
75+
* An externally visible interface to the Hive client. This interface is shared across both the
76+
* internal and external classloaders for a given version of Hive and thus must expose only
77+
* shared classes.
78+
*/
79+
trait ClientInterface {
80+
/**
81+
* Runs a HiveQL command using Hive, returning the results as a list of strings. Each row will
82+
* result in one string.
83+
*/
84+
def runSqlHive(sql: String): Seq[String]
85+
86+
/** Returns the names of all tables in the given database. */
87+
def listTables(dbName: String): Seq[String]
88+
89+
/** Returns the name of the active database. */
90+
def currentDatabase: String
91+
92+
/** Returns the metadata for specified database, throwing an exception if it doesn't exist */
93+
def getDatabase(name: String): HiveDatabase = {
94+
getDatabaseOption(name).getOrElse(throw new NoSuchDatabaseException)
95+
}
96+
97+
/** Returns the metadata for a given database, or None if it doesn't exist. */
98+
def getDatabaseOption(name: String): Option[HiveDatabase]
99+
100+
/** Returns the specified table, or throws [[NoSuchTableException]]. */
101+
def getTable(dbName: String, tableName: String): HiveTable = {
102+
getTableOption(dbName, tableName).getOrElse(throw new NoSuchTableException)
103+
}
104+
105+
/** Returns the metadata for the specified table or None if it doens't exist. */
106+
def getTableOption(dbName: String, tableName: String): Option[HiveTable]
107+
108+
/** Creates a table with the given metadata. */
109+
def createTable(table: HiveTable): Unit
110+
111+
/** Updates the given table with new metadata. */
112+
def alterTable(table: HiveTable): Unit
113+
114+
/** Creates a new database with the given name. */
115+
def createDatabase(database: HiveDatabase): Unit
116+
117+
/** Returns all partitions for the given table. */
118+
def getAllPartitions(hTable: HiveTable): Seq[HivePartition]
119+
120+
/** Loads a static partition into an existing table. */
121+
def loadPartition(
122+
loadPath: String,
123+
tableName: String,
124+
partSpec: java.util.LinkedHashMap[String, String], // Hive relies on LinkedHashMap ordering
125+
replace: Boolean,
126+
holdDDLTime: Boolean,
127+
inheritTableSpecs: Boolean,
128+
isSkewedStoreAsSubdir: Boolean): Unit
129+
130+
/** Loads data into an existing table. */
131+
def loadTable(
132+
loadPath: String, // TODO URI
133+
tableName: String,
134+
replace: Boolean,
135+
holdDDLTime: Boolean): Unit
136+
137+
/** Loads new dynamic partitions into an existing table. */
138+
def loadDynamicPartitions(
139+
loadPath: String,
140+
tableName: String,
141+
partSpec: java.util.LinkedHashMap[String, String], // Hive relies on LinkedHashMap ordering
142+
replace: Boolean,
143+
numDP: Int,
144+
holdDDLTime: Boolean,
145+
listBucketingEnabled: Boolean): Unit
146+
147+
/** Used for testing only. Removes all metadata from this instance of Hive. */
148+
def reset(): Unit
149+
}

0 commit comments

Comments
 (0)