Skip to content

Commit 23f751a

Browse files
author
Davies Liu
committed
Fill in SparkR examples in programming guide
sqlCtx -> sqlContext
1 parent 470d745 commit 23f751a

File tree

11 files changed

+1073
-302
lines changed

11 files changed

+1073
-302
lines changed

R/pkg/R/DataFrame.R

Lines changed: 82 additions & 82 deletions
Large diffs are not rendered by default.

R/pkg/R/SQLContext.R

Lines changed: 80 additions & 81 deletions
Large diffs are not rendered by default.

R/pkg/R/pairRDD.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -313,7 +313,7 @@ setMethod("groupByKey",
313313
#' @rdname reduceByKey
314314
#' @aliases reduceByKey,RDD,integer-method
315315
setMethod("reduceByKey",
316-
signature(x = "RDD", combineFunc = "ANY", numPartitions = "integer"),
316+
signature(x = "RDD", combineFunc = "ANY", numPartitions = "numeric"),
317317
function(x, combineFunc, numPartitions) {
318318
reduceVals <- function(part) {
319319
vals <- new.env()
@@ -327,7 +327,7 @@ setMethod("reduceByKey",
327327
convertEnvsToList(keys, vals)
328328
}
329329
locallyReduced <- lapplyPartition(x, reduceVals)
330-
shuffled <- partitionBy(locallyReduced, numPartitions)
330+
shuffled <- partitionBy(locallyReduced, as.integer(numPartitions))
331331
lapplyPartition(shuffled, reduceVals)
332332
})
333333

R/pkg/R/sparkR.R

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -222,19 +222,19 @@ sparkR.init <- function(
222222
#' @examples
223223
#'\dontrun{
224224
#' sc <- sparkR.init()
225-
#' sqlCtx <- sparkRSQL.init(sc)
225+
#' sqlContext <- sparkRSQL.init(sc)
226226
#'}
227227

228228
sparkRSQL.init <- function(jsc) {
229229
if (exists(".sparkRSQLsc", envir = .sparkREnv)) {
230230
return(get(".sparkRSQLsc", envir = .sparkREnv))
231231
}
232232

233-
sqlCtx <- callJStatic("org.apache.spark.sql.api.r.SQLUtils",
233+
sqlContext <- callJStatic("org.apache.spark.sql.api.r.SQLUtils",
234234
"createSQLContext",
235235
jsc)
236-
assign(".sparkRSQLsc", sqlCtx, envir = .sparkREnv)
237-
sqlCtx
236+
assign(".sparkRSQLsc", sqlContext, envir = .sparkREnv)
237+
sqlContext
238238
}
239239

240240
#' Initialize a new HiveContext.
@@ -246,7 +246,7 @@ sparkRSQL.init <- function(jsc) {
246246
#' @examples
247247
#'\dontrun{
248248
#' sc <- sparkR.init()
249-
#' sqlCtx <- sparkRHive.init(sc)
249+
#' sqlContext <- sparkRHive.init(sc)
250250
#'}
251251

252252
sparkRHive.init <- function(jsc) {

R/pkg/inst/profile/shell.R

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,8 @@
2424
library(SparkR)
2525
sc <- sparkR.init(Sys.getenv("MASTER", unset = ""))
2626
assign("sc", sc, envir=.GlobalEnv)
27-
sqlCtx <- sparkRSQL.init(sc)
28-
assign("sqlCtx", sqlCtx, envir=.GlobalEnv)
27+
sqlContext <- sparkRSQL.init(sc)
28+
assign("sqlContext", sqlContext, envir=.GlobalEnv)
2929
cat("\n Welcome to SparkR!")
30-
cat("\n Spark context is available as sc, SQL context is available as sqlCtx\n")
30+
cat("\n Spark context is available as sc, SQL context is available as sqlContext\n")
3131
}

0 commit comments

Comments
 (0)