Skip to content

Commit 097bfb6

Browse files
committed
Merge remote-tracking branch 'origin/main' into fix-resumetoken-timetravel
2 parents 5c3e6ed + a60f2c7 commit 097bfb6

File tree

6 files changed

+71
-6
lines changed

6 files changed

+71
-6
lines changed

.changeset/dull-pumas-punch.md

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
---
2+
'@powersync/service-module-mongodb-storage': patch
3+
'@powersync/service-errors': patch
4+
'@powersync/lib-service-mongodb': patch
5+
'@powersync/service-core': patch
6+
'@powersync/service-image': patch
7+
---
8+
9+
[MongoDB Storage] Improve error messages for checksum query timeouts

libs/lib-mongodb/src/db/errors.ts

Lines changed: 22 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,9 @@
1-
import { DatabaseConnectionError, ErrorCode, ServiceError } from '@powersync/lib-services-framework';
1+
import {
2+
DatabaseConnectionError,
3+
DatabaseQueryError,
4+
ErrorCode,
5+
ServiceError
6+
} from '@powersync/lib-services-framework';
27
import { isMongoServerError } from './mongo.js';
38
import { MongoNetworkError, MongoServerSelectionError } from 'mongodb';
49

@@ -58,6 +63,22 @@ export function mapConnectionError(err: any): ServiceError {
5863
}
5964
}
6065

66+
export function mapQueryError(err: any, context: string): ServiceError {
67+
if (ServiceError.isServiceError(err)) {
68+
return err;
69+
} else if (isMongoServerError(err)) {
70+
if (err.codeName == 'MaxTimeMSExpired') {
71+
return new DatabaseQueryError(ErrorCode.PSYNC_S2403, `Query timed out ${context}`, err);
72+
}
73+
74+
// Fallback
75+
return new DatabaseQueryError(ErrorCode.PSYNC_S2404, `MongoDB server error ${context}: ${err.codeName}`, err);
76+
} else {
77+
// Fallback
78+
return new DatabaseQueryError(ErrorCode.PSYNC_S2404, `MongoDB connection error ${context}`, err);
79+
}
80+
}
81+
6182
function isNetworkError(err: any): err is MongoNetworkError {
6283
return err?.name === 'MongoNetworkError';
6384
}

libs/lib-mongodb/src/db/mongo.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ export const MONGO_SOCKET_TIMEOUT_MS = 60_000;
1919
*
2020
* Must be less than MONGO_SOCKET_TIMEOUT_MS to ensure proper error handling.
2121
*/
22-
export const MONGO_OPERATION_TIMEOUT_MS = 30_000;
22+
export const MONGO_OPERATION_TIMEOUT_MS = 40_000;
2323

2424
/**
2525
* Same as above, but specifically for clear operations.

modules/module-mongodb-storage/src/storage/implementation/MongoSyncBucketStorage.ts

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -348,7 +348,10 @@ export class MongoSyncBucketStorage
348348
// 1. We can calculate the document size accurately without serializing again.
349349
// 2. We can delay parsing the results until it's needed.
350350
// We manually use bson.deserialize below
351-
raw: true
351+
raw: true,
352+
353+
// Limit the time for the operation to complete, to avoid getting connection timeouts
354+
maxTimeMS: lib_mongo.db.MONGO_OPERATION_TIMEOUT_MS
352355
}
353356
) as unknown as mongo.FindCursor<Buffer>;
354357

@@ -357,7 +360,9 @@ export class MongoSyncBucketStorage
357360
// to the lower of the batch count and size limits.
358361
// This is similar to using `singleBatch: true` in the find options, but allows
359362
// detecting "hasMore".
360-
let { data, hasMore: batchHasMore } = await readSingleBatch(cursor);
363+
let { data, hasMore: batchHasMore } = await readSingleBatch(cursor).catch((e) => {
364+
throw lib_mongo.mapQueryError(e, 'while reading bucket data');
365+
});
361366
if (data.length == batchLimit) {
362367
// Limit reached - could have more data, despite the cursor being drained.
363368
batchHasMore = true;
@@ -486,9 +491,12 @@ export class MongoSyncBucketStorage
486491
}
487492
}
488493
],
489-
{ session: undefined, readConcern: 'snapshot' }
494+
{ session: undefined, readConcern: 'snapshot', maxTimeMS: lib_mongo.db.MONGO_OPERATION_TIMEOUT_MS }
490495
)
491-
.toArray();
496+
.toArray()
497+
.catch((e) => {
498+
throw lib_mongo.mapQueryError(e, 'while reading checksums');
499+
});
492500

493501
return new Map<string, storage.PartialChecksum>(
494502
aggregate.map((doc) => {

packages/service-errors/src/codes.ts

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -428,6 +428,17 @@ export enum ErrorCode {
428428
*/
429429
PSYNC_S2402 = 'PSYNC_S2402',
430430

431+
/**
432+
* Query timed out. Could be due to a large query or a temporary load issue on the storage database.
433+
* Retry the request.
434+
*/
435+
PSYNC_S2403 = 'PSYNC_S2403',
436+
437+
/**
438+
* Query failure on the storage database. See error details for more information.
439+
*/
440+
PSYNC_S2404 = 'PSYNC_S2404',
441+
431442
// ## PSYNC_S23xx: Sync API errors - Postgres Storage
432443

433444
// ## PSYNC_S3xxx: Service configuration issues

packages/service-errors/src/errors.ts

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -243,3 +243,19 @@ export class DatabaseConnectionError extends ServiceError {
243243
this.cause = cause;
244244
}
245245
}
246+
247+
export class DatabaseQueryError extends ServiceError {
248+
public cause: any;
249+
250+
constructor(code: ErrorCode, message: string, cause?: any) {
251+
super({
252+
code: code,
253+
status: 500,
254+
description: message,
255+
// Cause is always logged. Return details via the API only in development mode
256+
details: process.env.NODE_ENV !== 'production' && cause != null ? `cause: ${cause.message}` : undefined,
257+
stack: process.env.NODE_ENV !== 'production' ? cause.stack : undefined
258+
});
259+
this.cause = cause;
260+
}
261+
}

0 commit comments

Comments
 (0)