Skip to content

Commit f896d09

Browse files
ekrivokonmaprEgor Krivokon
authored andcommitted
MapR [SPARK-879] Add changes to examples module to build for Spark-3.x (apache#817)
1 parent 9928ef7 commit f896d09

File tree

4 files changed

+7
-107
lines changed

4 files changed

+7
-107
lines changed

examples/pom.xml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -109,11 +109,11 @@
109109
<artifactId>scopt_${scala.binary.version}</artifactId>
110110
<version>3.7.1</version>
111111
</dependency>
112-
<dependency>
113-
<groupId>${hive.parquet.group}</groupId>
114-
<artifactId>parquet-hadoop-bundle</artifactId>
115-
<scope>provided</scope>
116-
</dependency>
112+
<!-- <dependency>-->
113+
<!-- <groupId>com.twitter</groupId>-->
114+
<!-- <artifactId>parquet-hadoop-bundle</artifactId>-->
115+
<!-- <scope>provided</scope>-->
116+
<!-- </dependency>-->
117117
<!-- <dependency>-->
118118
<!-- <groupId>org.apache.spark</groupId>-->
119119
<!-- <artifactId>spark-streaming-kafka-producer_${scala.binary.version}</artifactId>-->

examples/src/main/scala/org/apache/spark/examples/sql/hive/SparkHiveExample.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@ object SparkHiveExample {
3030
case class Record(key: Int, value: String)
3131
// $example off:spark_hive$
3232

33-
def main(args: Array[String]): Unit = {
3433
val kv1Stream = SparkHiveExample.getClass.getResourceAsStream("/kv1.txt")
3534
val kv1File = File.createTempFile("kv1", "txt")
3635
kv1File.deleteOnExit()

examples/src/main/scala/org/apache/spark/examples/streaming/KafkaProducerExample.scala

Lines changed: 0 additions & 99 deletions
This file was deleted.

examples/src/main/scala/org/apache/spark/examples/streaming/V09DirectKafkaWordCount.scala renamed to examples/src/main/scala/org/apache/spark/examples/streaming/V010DirectKafkaWordCount.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import org.apache.kafka.clients.consumer.ConsumerConfig
2222

2323
import org.apache.spark.SparkConf
2424
import org.apache.spark.streaming.{Seconds, StreamingContext}
25-
import org.apache.spark.streaming.kafka09.{
25+
import org.apache.spark.streaming.kafka010.{
2626
ConsumerStrategies,
2727
KafkaUtils,
2828
LocationStrategies
@@ -46,7 +46,7 @@ import org.apache.spark.streaming.kafka09.{
4646
* topic1,topic2 my-consumer-group latest batch-interval pollTimeout
4747
*/
4848

49-
object V09DirectKafkaWordCount {
49+
object V010DirectKafkaWordCount {
5050
def main(args: Array[String]) {
5151
if (args.length < 4) {
5252
System.err.println(s"""

0 commit comments

Comments
 (0)