@@ -139,7 +139,7 @@ class WriteAheadLogSuite extends FunSuite with BeforeAndAfter with BeforeAndAfte
139
139
140
140
test(" WriteAheadLogManager - write rotating logs" ) {
141
141
// Write data using manager
142
- val dataToWrite = generateRandomData(10 )
142
+ val dataToWrite = generateRandomData()
143
143
val dir = pathForTest
144
144
writeDataUsingManager(dir, dataToWrite)
145
145
@@ -154,7 +154,7 @@ class WriteAheadLogSuite extends FunSuite with BeforeAndAfter with BeforeAndAfte
154
154
// Write data manually for testing reading through manager
155
155
val dir = pathForTest
156
156
val writtenData = (1 to 10 ).map { i =>
157
- val data = generateRandomData(10 )
157
+ val data = generateRandomData()
158
158
val file = dir + s " /log- $i- $i"
159
159
writeDataManually(data, file)
160
160
data
@@ -171,7 +171,7 @@ class WriteAheadLogSuite extends FunSuite with BeforeAndAfter with BeforeAndAfte
171
171
172
172
test(" WriteAheadLogManager - recover past logs when creating new manager" ) {
173
173
// Write data with manager, recover with new manager and verify
174
- val dataToWrite = generateRandomData(100 )
174
+ val dataToWrite = generateRandomData()
175
175
val dir = pathForTest
176
176
writeDataUsingManager(dir, dataToWrite)
177
177
val logFiles = getLogFilesInDirectory(dir)
@@ -183,7 +183,7 @@ class WriteAheadLogSuite extends FunSuite with BeforeAndAfter with BeforeAndAfte
183
183
test(" WriteAheadLogManager - cleanup old logs" ) {
184
184
// Write data with manager, recover with new manager and verify
185
185
val dir = pathForTest
186
- val dataToWrite = generateRandomData(100 )
186
+ val dataToWrite = generateRandomData()
187
187
val fakeClock = new ManualClock
188
188
val manager = new WriteAheadLogManager (dir, hadoopConf,
189
189
rollingIntervalSecs = 1 , callerName = " WriteAheadLogSuite" , clock = fakeClock)
@@ -239,8 +239,10 @@ object WriteAheadLogSuite {
239
239
240
240
def writeDataUsingManager (logDirectory : String , data : Seq [String ]) {
241
241
val fakeClock = new ManualClock
242
+ fakeClock.setTime(1000000 )
242
243
val manager = new WriteAheadLogManager (logDirectory, hadoopConf,
243
244
rollingIntervalSecs = 1 , callerName = " WriteAheadLogSuite" , clock = fakeClock)
245
+ // Ensure that 500 does not get sorted after 2000, so put a high base value.
244
246
data.foreach { item =>
245
247
fakeClock.addToTime(500 )
246
248
manager.writeToLog(item)
@@ -290,8 +292,8 @@ object WriteAheadLogSuite {
290
292
data
291
293
}
292
294
293
- def generateRandomData (numItems : Int = 50 , itemSize : Int = 50 ): Seq [String ] = {
294
- (1 to numItems ).map {
295
+ def generateRandomData (): Seq [String ] = {
296
+ (1 to 50 ).map {
295
297
_.toString
296
298
}
297
299
}
@@ -300,11 +302,8 @@ object WriteAheadLogSuite {
300
302
val logDirectoryPath = new Path (directory)
301
303
val fileSystem = HdfsUtils .getFileSystemForPath(logDirectoryPath, hadoopConf)
302
304
303
- implicit def fileStatusOrdering [A <: FileStatus ]: Ordering [A ] = Ordering
304
- .by(f => f.getModificationTime)
305
-
306
305
if (fileSystem.exists(logDirectoryPath) && fileSystem.getFileStatus(logDirectoryPath).isDir) {
307
- fileSystem.listStatus(logDirectoryPath).sorted. map {
306
+ fileSystem.listStatus(logDirectoryPath).map {
308
307
_.getPath.toString
309
308
}
310
309
} else {
0 commit comments