@@ -84,21 +84,11 @@ class SparkHadoopUtil extends Logging {
84
84
// the behavior of the old implementation of this code, for backwards compatibility.
85
85
if (conf != null ) {
86
86
// Explicitly check for S3 environment variables
87
- val keyId = System .getenv(" AWS_ACCESS_KEY_ID" )
88
- val accessKey = System .getenv(" AWS_SECRET_ACCESS_KEY" )
89
- if (keyId != null && accessKey != null ) {
90
- hadoopConf.set(" fs.s3.awsAccessKeyId" , keyId)
91
- hadoopConf.set(" fs.s3n.awsAccessKeyId" , keyId)
92
- hadoopConf.set(" fs.s3a.access.key" , keyId)
93
- hadoopConf.set(" fs.s3.awsSecretAccessKey" , accessKey)
94
- hadoopConf.set(" fs.s3n.awsSecretAccessKey" , accessKey)
95
- hadoopConf.set(" fs.s3a.secret.key" , accessKey)
96
-
97
- val sessionToken = System .getenv(" AWS_SESSION_TOKEN" )
98
- if (sessionToken != null ) {
99
- hadoopConf.set(" fs.s3a.session.token" , sessionToken)
100
- }
101
- }
87
+ initWithAWSConf(hadoopConf)
88
+
89
+ // Check for Qiniu AK SK
90
+ initWithQiniuConf(hadoopConf)
91
+
102
92
// Copy any "spark.hadoop.foo=bar" system properties into conf as "foo=bar"
103
93
conf.getAll.foreach { case (key, value) =>
104
94
if (key.startsWith(" spark.hadoop." )) {
@@ -110,6 +100,33 @@ class SparkHadoopUtil extends Logging {
110
100
}
111
101
}
112
102
103
+ private def initWithAWSConf (hadoopConf : Configuration ) = {
104
+ if (System .getenv(" AWS_ACCESS_KEY_ID" ) != null &&
105
+ System .getenv(" AWS_SECRET_ACCESS_KEY" ) != null ) {
106
+ val keyId = System .getenv(" AWS_ACCESS_KEY_ID" )
107
+ val accessKey = System .getenv(" AWS_SECRET_ACCESS_KEY" )
108
+
109
+ hadoopConf.set(" fs.s3.awsAccessKeyId" , keyId)
110
+ hadoopConf.set(" fs.s3n.awsAccessKeyId" , keyId)
111
+ hadoopConf.set(" fs.s3a.access.key" , keyId)
112
+ hadoopConf.set(" fs.s3.awsSecretAccessKey" , accessKey)
113
+ hadoopConf.set(" fs.s3n.awsSecretAccessKey" , accessKey)
114
+ hadoopConf.set(" fs.s3a.secret.key" , accessKey)
115
+ }
116
+ }
117
+
118
+ private def initWithQiniuConf (hadoopConf : Configuration ) = {
119
+ if (System .getenv(" QINIU_ACCESS_KEY" ) != null &&
120
+ System .getenv(" QINIU_SECRET_KEY" ) != null ) {
121
+ val accessKey = System .getenv(" QINIU_ACCESS_KEY" )
122
+ val secretKey = System .getenv(" QINIU_SECRET_KEY" )
123
+
124
+ hadoopConf.set(" fs.qiniu.access.key" , accessKey)
125
+ hadoopConf.set(" fs.qiniu.secret.key" , secretKey)
126
+ logInfo(" Success init conf with qiniu ak sk." )
127
+ }
128
+ }
129
+
113
130
/**
114
131
* Return an appropriate (subclass) of Configuration. Creating config can initializes some Hadoop
115
132
* subsystems.
0 commit comments