Description
Hello,
I have Elasticache (Redis) with SSL enabled set up. When i run the code against redis cache without SSL enabled it works fine but get error when running it against SSL enabled cache.
Below is my spark conf:
SparkConf sparkConf = new SparkConf().setAppName(appName);
sparkConf.set("spark.redis.host", cacheHost);
sparkConf.set("spark.redis.port", "6379");
sparkConf.set("spark.redis.ssl", "true");
SparkSession.builder().config(sparkConf).getOrCreate();
Here is the full exception::
redis.clients.jedis.exceptions.JedisConnectionException: java.net.SocketTimeoutException: Read timed out
at redis.clients.jedis.util.RedisInputStream.ensureFill(RedisInputStream.java:205)
at redis.clients.jedis.util.RedisInputStream.readByte(RedisInputStream.java:43)
at redis.clients.jedis.Protocol.process(Protocol.java:155)
at redis.clients.jedis.Protocol.read(Protocol.java:220)
at redis.clients.jedis.Connection.readProtocolWithCheckingBroken(Connection.java:318)
at redis.clients.jedis.Connection.getBinaryBulkReply(Connection.java:255)
at redis.clients.jedis.Connection.getBulkReply(Connection.java:245)
at redis.clients.jedis.BinaryJedis.info(BinaryJedis.java:2942)
at com.redislabs.provider.redis.RedisConfig.clusterEnabled(RedisConfig.scala:194)
at com.redislabs.provider.redis.RedisConfig.getNodes(RedisConfig.scala:317)
at com.redislabs.provider.redis.RedisConfig.getHosts(RedisConfig.scala:233)
at com.redislabs.provider.redis.RedisConfig.(RedisConfig.scala:132)
at org.apache.spark.sql.redis.RedisSourceRelation.(RedisSourceRelation.scala:34)
at org.apache.spark.sql.redis.DefaultSource.createRelation(DefaultSource.scala:21)
at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:86)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:156)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80)
at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:676)
at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:285)
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:271)