Skip to content
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions .bazelrc
Original file line number Diff line number Diff line change
@@ -1,5 +1,17 @@
try-import %workspace%/.bazelrc.local

# To build with Scala 2.12, pass "--config scala_2.12" to "bazel build"
common:scala_2.12 --repo_env=SCALA_VERSION=2.12.18
common:scala_2.12 --define=SCALA_VERSION=2.12.18
common:scala_2.13 --repo_env=SCALA_VERSION=2.13.12
common:scala_2.13 --define=SCALA_VERSION=2.13.12
Comment on lines +4 to +7
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

💡 Verification agent

🧩 Analysis chain

Verify Scala version compatibility.

Let's check if these are the latest compatible versions.


🌐 Web query:

What are the latest stable versions of Scala 2.12 and 2.13?

💡 Result:

As of February 2025, the latest stable versions of Scala are:

Series Latest Version Release Date Notable Features/Changes
Scala 2.13 2.13.16 January 15, 2025 • Breaking change: .tail/.init now throw exceptions on empty strings[1]
• JDK 24 support via ASM upgrade[1]
• JLine upgrades for REPL on Windows[1]
Scala 2.12 2.12.20 September 4, 2024 • Security vulnerability fixes[7]
• Compatibility with newer JVMs[7]
• Minimal maintenance continues for sbt compatibility[4]

Key details:

  • Scala 2.13.x remains actively maintained with regular updates aligning with Scala 3 features and JDK support[1][7]
  • Scala 2.12.x receives security patches and JVM compatibility updates but minimal feature development[4][7]
  • Both versions maintain binary compatibility within their respective series[5][7]

For new projects, consider migrating to Scala 3.x series which offers improved features and tooling support[3][4]. Existing projects should upgrade to these latest versions for security fixes and JDK compatibility improvements[7][10].

Citations:


Action: Update Scala Versions in .bazelrc

  • Currently set: 2.12.18 and 2.13.12.
  • Latest stable: 2.12.20 and 2.13.16 (Feb 2025).
  • Verify if updating is feasible and document any migration details.


# Default scala version to 2.12
# To set a different default Scala version, add the following to .bazelrc.local:
# common --config scala_2.12
common --repo_env=SCALA_VERSION=2.13.12
common --define=SCALA_VERSION=2.13.12

build --java_language_version=11
build --java_runtime_version=11
build --remote_cache=https://storage.googleapis.com/zipline-bazel-cache
Expand Down
3 changes: 2 additions & 1 deletion .scalafmt.conf
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
version = 2.5.0
version = 3.8.3
runner.dialect = scala212
align.openParenCallSite = true
align.openParenDefnSite = true
danglingParentheses.defnSite = false
Expand Down
22 changes: 5 additions & 17 deletions WORKSPACE
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
workspace(name = "chronon")

# Scala version used across the project
SCALA_VERSION = "2.12.18"

load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")

# Load scala version from the config
load("//:scala_config.bzl", "scala_version")
scala_version(name = "scala_config")
load("@scala_config//:version.bzl", "SCALA_VERSION")

# Contains useful bazel utility functions and rules
http_archive(
name = "bazel_skylib",
Expand Down Expand Up @@ -74,22 +76,8 @@ http_archive(

# Initialize Scala with specific version support
load("@io_bazel_rules_scala//:scala_config.bzl", "scala_config")

scala_config(scala_version = SCALA_VERSION)

load("@io_bazel_rules_scala//scala:scala_maven_import_external.bzl", "scala_maven_import_external")

scala_maven_import_external(
name = "scala_compiler_source_2_12_18",
artifact = "org.scala-lang:scala-compiler:%s:sources" % SCALA_VERSION,
artifact_sha256 = "f79ee80f140218253f2a38c9d73f8a9b552d06afce7a5f61cf08079a388e21df",
licenses = ["notice"],
server_urls = [
"https://repo1.maven.org/maven2",
"https://mirror.bazel.build/repo1.maven.org/maven2",
],
)

load("@io_bazel_rules_scala//scala:scala.bzl", "scala_repositories")
scala_repositories()

Expand Down
10 changes: 8 additions & 2 deletions aggregator/BUILD.bazel
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
scala_library(
name = "lib",
srcs = glob(["src/main/**/*.scala"]),
format = True,
format = select({
"//tools/config:scala_2_13": False, # Disable for 2.13
"//conditions:default": True, # Enable for other versions
}),
visibility = ["//visibility:public"],
deps = [
"//api:lib",
Expand Down Expand Up @@ -50,7 +53,10 @@ test_deps = [
scala_library(
name = "test_lib",
srcs = glob(["src/test/**/*.scala"]),
format = True,
format = select({
"//tools/config:scala_2_13": False, # Disable for 2.13
"//conditions:default": True, # Enable for other versions
}),
visibility = ["//visibility:public"],
deps = test_deps,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,12 @@ object StatsGenerator {
val comparisonSketch = KllFloatsSketch.heapify(Memory.wrap(comparison.asInstanceOf[Array[Byte]]))
val binsToDoubles = (0 to bins).map(_.toDouble / bins).toArray
val keySet =
referenceSketch.getQuantiles(binsToDoubles).union(comparisonSketch.getQuantiles(binsToDoubles)).distinct.sorted
referenceSketch
.getQuantiles(binsToDoubles)
.union(comparisonSketch.getQuantiles(binsToDoubles))
.distinct
.sorted
.toArray
val referencePMF = regularize(referenceSketch.getPMF(keySet), eps)
val comparisonPMF = regularize(comparisonSketch.getPMF(keySet), eps)
var psi = 0.0
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import org.junit.Assert._
import org.scalatest.flatspec.AnyFlatSpec

import java.util
import scala.jdk.CollectionConverters._
import ai.chronon.api.ScalaJavaConversions._

class ApproxHistogramTest extends AnyFlatSpec {
it should "histogram" in {
Expand Down Expand Up @@ -144,10 +144,10 @@ class ApproxHistogramTest extends AnyFlatSpec {
assertTrue(ir.sketch.isDefined)

val normalized = approxHistogram.denormalize(approxHistogram.normalize(ir))
assertEquals(expected, approxHistogram.finalize(normalized).asScala)
assertEquals(expected, approxHistogram.finalize(normalized).toScala)
}

def toHashMap[T](map: Map[T, Long]): util.HashMap[T, Long] = new util.HashMap[T, Long](map.asJava)
def toHashMap[T](map: Map[T, Long]): util.HashMap[T, Long] = new util.HashMap[T, Long](map.toJava)

def makeIr[T](agg: ApproxHistogram[T], counts: Map[T, Long]): ApproxHistogramIr[T] = {
val values = counts.toSeq.sortBy(_._2)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import org.junit.Assert._
import org.scalatest.flatspec.AnyFlatSpec

import java.util
import scala.jdk.CollectionConverters._
import ai.chronon.api.ScalaJavaConversions._

class FrequentItemsTest extends AnyFlatSpec {
it should "non power of two and truncate" in {
Expand Down Expand Up @@ -157,5 +157,5 @@ class FrequentItemsTest extends AnyFlatSpec {
(sketch, ir)
}

def toHashMap[T](map: Map[T, Long]): java.util.HashMap[T, Long] = new java.util.HashMap[T, Long](map.asJava)
def toHashMap[T](map: Map[T, Long]): java.util.HashMap[T, Long] = new java.util.HashMap[T, Long](map.toJava)
}
10 changes: 8 additions & 2 deletions api/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,10 @@ scala_library(
"src/main/**/*.scala",
"src/main/**/*.java",
]),
format = True,
format = select({
"//tools/config:scala_2_13": False, # Disable for 2.13
"//conditions:default": True, # Enable for other versions
}),
visibility = ["//visibility:public"],
deps = [
":thrift_java",
Expand Down Expand Up @@ -70,7 +73,10 @@ test_deps = [
scala_library(
name = "test-lib",
srcs = glob(["src/test/**/*.scala"]),
format = True,
format = select({
"//tools/config:scala_2_13": False, # Disable for 2.13
"//conditions:default": True, # Enable for other versions
}),
visibility = ["//visibility:public"],
deps = test_deps,
)
Expand Down
2 changes: 1 addition & 1 deletion api/src/main/scala/ai/chronon/api/Extensions.scala
Original file line number Diff line number Diff line change
Expand Up @@ -917,7 +917,7 @@ object Extensions {

def outputColumnsByGroup: Map[String, Array[String]] = {
val preDeriveCols = (joinPartColumns ++ externalPartColumns)
val preDerivedWithoutRenamed = preDeriveCols.mapValues(_.filterNot(renamedColumns.contains))
val preDerivedWithoutRenamed = preDeriveCols.mapValues(_.filterNot(renamedColumns.contains)).toMap
val derivedColumns: Array[String] = Option(join.derivations) match {
case Some(derivations) => derivations.toScala.map { _.getName }.filter(_ == "*").toArray
case None => Array.empty
Expand Down
13 changes: 2 additions & 11 deletions api/src/main/scala/ai/chronon/api/ScalaJavaConversions.scala
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package ai.chronon.api

import scala.collection.parallel.ParSeq
import scala.jdk.CollectionConverters._
import scala.collection.Seq

object ScalaJavaConversions {

Expand All @@ -17,7 +17,7 @@ object ScalaJavaConversions {
if (list == null) {
null
} else {
list.asScala
list.asScala.toSeq
}
}

Expand Down Expand Up @@ -65,15 +65,6 @@ object ScalaJavaConversions {
}
}
}
implicit class IterableOps[T](it: Iterable[T]) {
def parallel: ParSeq[T] = {
if (it == null) {
null
} else {
it.toSeq.par
}
}
}
implicit class MapOps[K, V](map: java.util.Map[K, V]) {
def toScala: Map[K, V] = {
if (map == null) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package ai.chronon.api.test

import ai.chronon.api.Constants
import ai.chronon.api.ScalaJavaConversions.JListOps
import ai.chronon.api.ScalaJavaConversions._
import ai.chronon.api.ThriftJsonCodec
import ai.chronon.observability.TileDriftSeries
import ai.chronon.observability.TileSummarySeries
Expand All @@ -10,7 +10,6 @@ import org.scalatest.matchers.should.Matchers

import java.lang.{Double => JDouble}
import java.lang.{Long => JLong}
import scala.jdk.CollectionConverters.asScalaBufferConverter

class TileSeriesSerializationTest extends AnyFlatSpec with Matchers {

Expand Down Expand Up @@ -40,7 +39,7 @@ class TileSeriesSerializationTest extends AnyFlatSpec with Matchers {
val series =
ThriftJsonCodec.fromJsonStr[TileDriftSeries](json, true, classOf[TileDriftSeries])(manifest[TileDriftSeries])

val drifts = series.getPercentileDriftSeries.asScala.toList
val drifts = series.getPercentileDriftSeries.toScala
drifts.size should be(5)
drifts(0) should be(0.1)
drifts(1) should be(Constants.magicNullDouble)
Expand Down Expand Up @@ -70,7 +69,7 @@ class TileSeriesSerializationTest extends AnyFlatSpec with Matchers {
val series = ThriftJsonCodec.fromJsonStr[TileSummarySeries](json, true, classOf[TileSummarySeries])(
manifest[TileSummarySeries])

val counts = series.getCount.asScala.toList
val counts = series.getCount.toScala
counts.size should be(5)
counts(0) should be(100L)
counts(1) should be(Constants.magicNullLong)
Expand Down
15 changes: 10 additions & 5 deletions cloud_aws/BUILD.bazel
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
scala_library(
name = "cloud_aws_lib",
srcs = glob(["src/main/**/*.scala"]),
format = select({
"//tools/config:scala_2_13": False, # Disable for 2.13
"//conditions:default": True, # Enable for other versions
}),
visibility = ["//visibility:public"],
format = True,
deps = [
maven_artifact("software.amazon.awssdk:dynamodb"),
maven_artifact("software.amazon.awssdk:regions"),
Expand All @@ -12,10 +15,10 @@ scala_library(
maven_artifact("com.google.guava:guava"),
maven_artifact("org.slf4j:slf4j-api"),
maven_scala_artifact("org.scala-lang.modules:scala-collection-compat"),
"//spark:lib",
"//online:lib",
"//api:lib",
"//api:thrift_java",
"//online:lib",
"//spark:lib",
],
)

Expand All @@ -37,12 +40,14 @@ test_deps = [
scala_library(
name = "test_lib",
srcs = glob(["src/test/**/*.scala"]),
format = True,
format = select({
"//tools/config:scala_2_13": False, # Disable for 2.13
"//conditions:default": True, # Enable for other versions
}),
visibility = ["//visibility:public"],
deps = test_deps,
)


scala_test_suite(
name = "tests",
srcs = glob(["src/test/**/*.scala"]),
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package ai.chronon.integrations.aws

import ai.chronon.api.Constants
import ai.chronon.api.ScalaJavaConversions._
import ai.chronon.online.KVStore
import ai.chronon.online.KVStore.GetResponse
import ai.chronon.online.KVStore.ListRequest
Expand Down Expand Up @@ -33,10 +34,11 @@ import java.time.Instant
import java.util
import java.util.concurrent.ConcurrentHashMap
import scala.concurrent.Future
import scala.jdk.CollectionConverters._
import scala.util.Success
import scala.util.Try

import scala.collection.Seq

object DynamoDBKVStoreConstants {
// Read capacity units to configure DynamoDB table with
val readCapacityUnits = "read-capacity"
Expand Down Expand Up @@ -97,8 +99,8 @@ class DynamoDBKVStoreImpl(dynamoDbClient: DynamoDbClient) extends KVStore {

val request =
CreateTableRequest.builder
.attributeDefinitions(keyAttributes.toList.asJava)
.keySchema(keySchema.toList.asJava)
.attributeDefinitions(keyAttributes.toList.toJava)
.keySchema(keySchema.toList.toJava)
.provisionedThroughput(ProvisionedThroughput.builder.readCapacityUnits(rcu).writeCapacityUnits(wcu).build)
.tableName(dataset)
.build
Expand Down Expand Up @@ -130,7 +132,7 @@ class DynamoDBKVStoreImpl(dynamoDbClient: DynamoDbClient) extends KVStore {
val (getLookups, queryLookups) = requests.partition(r => r.startTsMillis.isEmpty)
val getItemRequestPairs = getLookups.map { req =>
val keyAttributeMap = primaryKeyMap(req.keyBytes)
(req, GetItemRequest.builder.key(keyAttributeMap.asJava).tableName(req.dataset).build)
(req, GetItemRequest.builder.key(keyAttributeMap.toJava).tableName(req.dataset).build)
}

val queryRequestPairs = queryLookups.map { req =>
Expand All @@ -149,7 +151,7 @@ class DynamoDBKVStoreImpl(dynamoDbClient: DynamoDbClient) extends KVStore {
dynamoDbClient.getItem(getItemReq).item()
}

val response = item.map(i => List(i).asJava)
val response = item.map(i => List(i).toJava)
val resultValue: Try[Seq[TimedValue]] = extractTimedValues(response, defaultTimestamp)
GetResponse(req, resultValue)
}
Expand Down Expand Up @@ -183,7 +185,7 @@ class DynamoDBKVStoreImpl(dynamoDbClient: DynamoDbClient) extends KVStore {

val scanBuilder = ScanRequest.builder.tableName(request.dataset).limit(listLimit)
val scanRequest = maybeExclusiveStartKeyAttribute match {
case Some(value) => scanBuilder.exclusiveStartKey(Map(partitionKeyColumn -> value).asJava).build
case Some(value) => scanBuilder.exclusiveStartKey(Map(partitionKeyColumn -> value).toJava).build
case _ => scanBuilder.build
}

Expand All @@ -195,7 +197,7 @@ class DynamoDBKVStoreImpl(dynamoDbClient: DynamoDbClient) extends KVStore {
val noPagesLeftResponse = ListResponse(request, resultElements, Map.empty)
val listResponse = tryScanResponse match {
case Success(scanResponse) if scanResponse.hasLastEvaluatedKey =>
val lastEvalKey = scanResponse.lastEvaluatedKey().asScala.get(partitionKeyColumn)
val lastEvalKey = scanResponse.lastEvaluatedKey().toScala.get(partitionKeyColumn)
lastEvalKey match {
case Some(av) => ListResponse(request, resultElements, Map(continuationKey -> av.b().asByteArray()))
case _ => noPagesLeftResponse
Expand All @@ -218,7 +220,7 @@ class DynamoDBKVStoreImpl(dynamoDbClient: DynamoDbClient) extends KVStore {
req.tsMillis.map(ts => Map(sortKeyColumn -> AttributeValue.builder.n(ts.toString).build)).getOrElse(Map.empty)

val putItemReq =
PutItemRequest.builder.tableName(req.dataset).item((attributeMap ++ tsMap).asJava).build()
PutItemRequest.builder.tableName(req.dataset).item((attributeMap ++ tsMap).toJava).build()
(req.dataset, putItemReq)
}

Expand Down Expand Up @@ -272,8 +274,8 @@ class DynamoDBKVStoreImpl(dynamoDbClient: DynamoDbClient) extends KVStore {
private def extractTimedValues(response: Try[util.List[util.Map[String, AttributeValue]]],
defaultTimestamp: Long): Try[Seq[TimedValue]] = {
response.map { ddbResponseList =>
ddbResponseList.asScala.map { ddbResponseMap =>
val responseMap = ddbResponseMap.asScala
ddbResponseList.toScala.map { ddbResponseMap =>
val responseMap = ddbResponseMap.toScala
if (responseMap.isEmpty)
throw new Exception("Empty response returned from DynamoDB")

Expand All @@ -290,8 +292,8 @@ class DynamoDBKVStoreImpl(dynamoDbClient: DynamoDbClient) extends KVStore {
private def extractListValues(tryScanResponse: Try[ScanResponse]): Try[Seq[ListValue]] = {
tryScanResponse.map { response =>
val ddbResponseList = response.items()
ddbResponseList.asScala.map { ddbResponseMap =>
val responseMap = ddbResponseMap.asScala
ddbResponseList.toScala.map { ddbResponseMap =>
val responseMap = ddbResponseMap.toScala
if (responseMap.isEmpty)
throw new Exception("Empty response returned from DynamoDB")

Expand Down Expand Up @@ -333,8 +335,8 @@ class DynamoDBKVStoreImpl(dynamoDbClient: DynamoDbClient) extends KVStore {
QueryRequest.builder
.tableName(request.dataset)
.keyConditionExpression(s"$partitionAlias = :partitionKeyValue AND $timeAlias BETWEEN :start AND :end")
.expressionAttributeNames(attrNameAliasMap.asJava)
.expressionAttributeValues(attrValuesMap.asJava)
.expressionAttributeNames(attrNameAliasMap.toJava)
.expressionAttributeValues(attrValuesMap.toJava)
.build
}
}
Loading