Skip to content

Commit 9aa06c5

Browse files
committed
Merge pull request #7 from marmbrus/docFixes
Fix broken scala doc links / warnings.
2 parents b1acb36 + 7eff191 commit 9aa06c5

24 files changed

+123
-95
lines changed

build.sbt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@ version := "0.1-SNAPSHOT"
88

99
scalaVersion := "2.10.3"
1010

11+
scalacOptions ++= Seq("-deprecation", "-feature", "-unchecked")
12+
1113
resolvers += "Local Maven Repository" at "file://"+Path.userHome.absolutePath+"/.m2/repository"
1214

1315
libraryDependencies += "org.apache.spark" %% "spark-core" % "0.9.0-incubating-SNAPSHOT"

src/main/scala/catalyst/analysis/Analyzer.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,8 @@ class Analyzer(catalog: Catalog, registry: FunctionRegistry, caseSensitive: Bool
6767
}
6868

6969
/**
70-
* Replaces [[UnresolvedAttribute]]s with concrete [[AttributeReference]]s
71-
* from a logical plan node's children.
70+
* Replaces [[UnresolvedAttribute]]s with concrete
71+
* [[expressions.AttributeReference AttributeReferences]] from a logical plan node's children.
7272
*/
7373
object ResolveReferences extends Rule[LogicalPlan] {
7474
def apply(plan: LogicalPlan): LogicalPlan = plan transformUp {
@@ -85,7 +85,7 @@ class Analyzer(catalog: Catalog, registry: FunctionRegistry, caseSensitive: Bool
8585
}
8686

8787
/**
88-
* Replaces [[UnresolvedFunction]]s with concrete [[Expression]]s.
88+
* Replaces [[UnresolvedFunction]]s with concrete [[expressions.Expression Expressions]].
8989
*/
9090
object ResolveFunctions extends Rule[LogicalPlan] {
9191
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
@@ -141,7 +141,7 @@ class Analyzer(catalog: Catalog, registry: FunctionRegistry, caseSensitive: Bool
141141
}
142142

143143
/**
144-
* Returns true if [[exprs]] contains a star.
144+
* Returns true if `exprs` contains a [[Star]].
145145
*/
146146
protected def containsStar(exprs: Seq[NamedExpression]): Boolean =
147147
exprs.collect { case _: Star => true }.nonEmpty

src/main/scala/catalyst/analysis/typeCoercion.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,11 +43,11 @@ object ConvertNaNs extends Rule[LogicalPlan] {
4343
* Loosely based on rules from "Hadoop: The Definitive Guide" 2nd edition, by Tom White
4444
*
4545
* The implicit conversion rules can be summarized as follows:
46-
* $ - Any integral numeric type can be implicitly converted to a wider type.
47-
* $ - All the integral numeric types, FLOAT, and (perhaps surprisingly) STRING can be implicitly
46+
* - Any integral numeric type can be implicitly converted to a wider type.
47+
* - All the integral numeric types, FLOAT, and (perhaps surprisingly) STRING can be implicitly
4848
* converted to DOUBLE.
49-
* $ - TINYINT, SMALLINT, and INT can all be converted to FLOAT.
50-
* $ - BOOLEAN types cannot be converted to any other type.
49+
* - TINYINT, SMALLINT, and INT can all be converted to FLOAT.
50+
* - BOOLEAN types cannot be converted to any other type.
5151
*
5252
* String conversions are handled by the PromoteStrings rule.
5353
*/

src/main/scala/catalyst/dsl.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
package catalyst
22

3+
import scala.language.implicitConversions
4+
35
import analysis.UnresolvedAttribute
46
import expressions._
57
import plans._

src/main/scala/catalyst/errors/package.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@ package object errors {
1717
}
1818

1919
/**
20-
* Wraps any exceptions that are thrown while executing [[f]] in an [[OptimizationException]], attaching the provided
21-
* [[tree]].
20+
* Wraps any exceptions that are thrown while executing `f` in an [[OptimizationException]],
21+
* attaching the provided `tree`.
2222
*/
2323
def attachTree[TreeType <: TreeNode[_], A](tree: TreeType, msg: String = "")(f: => A): A = {
2424
try f catch {
@@ -27,8 +27,8 @@ package object errors {
2727
}
2828

2929
/**
30-
* Executes [[f]] which is expected to throw an OptimizationException. The first tree encountered in the stack
31-
* of exceptions of type [[TreeType]] is returned.
30+
* Executes `f` which is expected to throw an OptimizationException. The first tree encountered in
31+
* the stack of exceptions of type `TreeType` is returned.
3232
*/
3333
def getTree[TreeType <: TreeNode[_]](f: => Unit): TreeType = ??? // TODO: Implement
3434
}

src/main/scala/catalyst/examples/SchemaRddExample.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,13 +15,14 @@ object SchemaRddExample {
1515
("12/2/2013", "WARN: blah blah") :: Nil
1616
)
1717

18+
val dateRegEx = "(\\d+)\\/(\\d+)\\/(\\d+)".r
1819
/**
1920
* Example using the symbol based API. In this example, the attribute names that are passed to
2021
* the first constructor are resolved during catalyst's analysis phase. Then at runtime only
2122
* the requested attributes are passed to the UDF. Since this analysis occurs at runtime,
2223
* the developer must manually annotate their function with the correct argument types.
2324
*/
24-
val filtered = testLogs.filter('date)((date: String) => new java.util.Date(date).getDay == 1)
25+
val filtered = testLogs.filter('date) { case dateRegEx(_,day,_) => day.toInt == 1 }
2526
filtered.toRdd.collect.foreach(println)
2627

2728

@@ -35,7 +36,7 @@ object SchemaRddExample {
3536
* being resolved at runtime. Thus, we cannot return typed results. As such all dynamic calls
3637
* always return strings.
3738
*/
38-
val filtered2 = testLogs.filter(row => new java.util.Date(row.date).getDay == 1)
39+
val filtered2 = testLogs.filter( _.date match { case dateRegEx(_,day,_) => day.toInt == 1 } )
3940
filtered2.toRdd.collect.foreach(println)
4041
}
4142
}

src/main/scala/catalyst/examples/ViewsExample.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
package catalyst
2+
package examples
23

34
import catalyst.analysis.UnresolvedRelation
45
import catalyst.plans.Inner

src/main/scala/catalyst/execution/MetastoreCatalog.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ import org.apache.hadoop.hive.conf.HiveConf
55
import org.apache.hadoop.hive.metastore.api.{FieldSchema, Partition, Table, StorageDescriptor, SerDeInfo}
66
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient
77
import org.apache.hadoop.hive.ql.plan.TableDesc
8-
import org.apache.hadoop.hive.serde2.Deserializer
8+
import org.apache.hadoop.hive.serde2.AbstractDeserializer
99
import org.apache.hadoop.mapred.InputFormat
1010

1111
import analysis.Catalog
@@ -100,7 +100,7 @@ case class MetastoreRelation(databaseName: String, tableName: String, alias: Opt
100100
}
101101

102102
val tableDesc = new TableDesc(
103-
Class.forName(table.getSd.getSerdeInfo.getSerializationLib).asInstanceOf[Class[Deserializer]],
103+
Class.forName(table.getSd.getSerdeInfo.getSerializationLib).asInstanceOf[Class[AbstractDeserializer]],
104104
Class.forName(table.getSd.getInputFormat).asInstanceOf[Class[InputFormat[_,_]]],
105105
Class.forName(table.getSd.getOutputFormat),
106106
hiveQlTable.getMetadata

src/main/scala/catalyst/execution/SharkContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ class SharkContext(
3030
import SharkContext._
3131

3232
/**
33-
* Execute the command and return the results as a sequence. Each element
33+
* Execute the command using Hive and return the results as a sequence. Each element
3434
* in the sequence is one row.
3535
*/
3636
def runHive(cmd: String, maxRows: Int = 1000): Seq[String] = {

src/main/scala/catalyst/execution/SharkInstance.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ package catalyst
22
package execution
33

44
import java.io.File
5+
import scala.language.implicitConversions
56

67
import analysis.{SimpleAnalyzer, Analyzer}
78
import frontend.hive._

0 commit comments

Comments
 (0)