Skip to content

Commit aa52a25

Browse files
committed
Refactor batching logic
1 parent 29811db commit aa52a25

File tree

2 files changed

+85
-69
lines changed

2 files changed

+85
-69
lines changed

src/__tests__/dataloader.test.js

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,24 @@ describe('Primary API', () => {
106106
expect(loadCalls).toEqual([ [ 1 ] ]);
107107
});
108108

109+
it('coalesces identical requests across sized batches', async () => {
110+
const [ identityLoader, loadCalls ] = idLoader<number>({ maxBatchSize: 2 });
111+
112+
const promise1a = identityLoader.load(1);
113+
const promise2 = identityLoader.load(2);
114+
const promise1b = identityLoader.load(1);
115+
const promise3 = identityLoader.load(3);
116+
117+
const [ value1a, value2, value1b, value3 ] =
118+
await Promise.all([ promise1a, promise2, promise1b, promise3 ]);
119+
expect(value1a).toBe(1);
120+
expect(value2).toBe(2);
121+
expect(value1b).toBe(1);
122+
expect(value3).toBe(3);
123+
124+
expect(loadCalls).toEqual([ [ 1, 2 ], [ 3 ] ]);
125+
});
126+
109127
it('caches repeated requests', async () => {
110128
const [ identityLoader, loadCalls ] = idLoader<string>();
111129

src/index.js

Lines changed: 67 additions & 69 deletions
Original file line numberDiff line numberDiff line change
@@ -54,14 +54,14 @@ class DataLoader<K, V, C = K> {
5454
this._batchLoadFn = batchLoadFn;
5555
this._options = options;
5656
this._promiseCache = getValidCacheMap(options);
57-
this._queue = [];
57+
this._batch = null;
5858
}
5959

6060
// Private
6161
_batchLoadFn: BatchLoadFn<K, V>;
6262
_options: ?Options<K, V, C>;
6363
_promiseCache: ?CacheMap<C, Promise<V>>;
64-
_queue: LoaderQueue<K, V>;
64+
_batch: Batch<K, V> | null;
6565

6666
/**
6767
* Loads a key, returning a `Promise` for the value represented by that key.
@@ -76,7 +76,7 @@ class DataLoader<K, V, C = K> {
7676

7777
// Determine options
7878
var options = this._options;
79-
var shouldBatch = !options || options.batch !== false;
79+
var batch = getCurrentBatch(this);
8080
var cache = this._promiseCache;
8181
var cacheKey = getCacheKey(options, key);
8282

@@ -88,23 +88,11 @@ class DataLoader<K, V, C = K> {
8888
}
8989
}
9090

91-
// Otherwise, produce a new Promise for this value.
91+
// Otherwise, produce a new Promise for this key, and enqueue it to be
92+
// dispatched along with the current batch.
93+
batch.keys.push(key);
9294
var promise = new Promise((resolve, reject) => {
93-
// Enqueue this Promise to be dispatched.
94-
this._queue.push({ key, resolve, reject });
95-
96-
// Determine if a dispatch of this queue should be scheduled.
97-
// A single dispatch should be scheduled per queue at the time when the
98-
// queue changes from "empty" to "full".
99-
if (this._queue.length === 1) {
100-
if (shouldBatch) {
101-
// If batching, schedule a task to dispatch the queue.
102-
enqueuePostPromiseJob(() => dispatchQueue(this));
103-
} else {
104-
// Otherwise dispatch the (queue of one) immediately.
105-
dispatchQueue(this);
106-
}
107-
}
95+
batch.callbacks.push({ resolve, reject });
10896
});
10997

11098
// If caching, cache this promise.
@@ -239,43 +227,61 @@ var enqueuePostPromiseJob =
239227
// Private: cached resolved Promise instance
240228
var resolvedPromise;
241229

242-
// Private: given the current state of a Loader instance, perform a batch load
243-
// from its current queue.
244-
function dispatchQueue<K, V>(loader: DataLoader<K, V, any>) {
245-
// Take the current loader queue, replacing it with an empty queue.
246-
var queue = loader._queue;
247-
loader._queue = [];
248-
249-
// If a maxBatchSize was provided and the queue is longer, then segment the
250-
// queue into multiple batches, otherwise treat the queue as a single batch.
251-
var maxBatchSize = loader._options && loader._options.maxBatchSize;
252-
if (maxBatchSize && maxBatchSize > 0 && maxBatchSize < queue.length) {
253-
for (var i = 0; i < queue.length / maxBatchSize; i++) {
254-
dispatchQueueBatch(
255-
loader,
256-
queue.slice(i * maxBatchSize, (i + 1) * maxBatchSize)
257-
);
258-
}
259-
} else {
260-
dispatchQueueBatch(loader, queue);
230+
// Private: Describes a batch of requests
231+
type Batch<K, V> = {
232+
hasDispatched: boolean,
233+
keys: Array<K>,
234+
callbacks: Array<{
235+
resolve: (value: V) => void;
236+
reject: (error: Error) => void;
237+
}>
238+
}
239+
240+
// Private: Either returns the current batch, or creates and schedules a
241+
// dispatch of a new batch for the given loader.
242+
function getCurrentBatch<K, V>(loader: DataLoader<K, V, any>): Batch<K, V> {
243+
var options = loader._options;
244+
var maxBatchSize =
245+
(options && options.maxBatchSize) ||
246+
(options && options.batch === false ? 1 : 0);
247+
248+
// If there is an existing batch which has not yet dispatched and is within
249+
// the limit of the batch size, then return it.
250+
var existingBatch = loader._batch;
251+
if (
252+
existingBatch !== null &&
253+
!existingBatch.hasDispatched &&
254+
(maxBatchSize === 0 || existingBatch.keys.length < maxBatchSize)
255+
) {
256+
return existingBatch;
261257
}
258+
259+
// Otherwise, create a new batch for this loader.
260+
var newBatch = { hasDispatched: false, keys: [], callbacks: [] };
261+
262+
// Store it on the loader so it may be reused.
263+
loader._batch = newBatch;
264+
265+
// Then schedule a task to dispatch this batch of requests.
266+
enqueuePostPromiseJob(() => dispatchBatch(loader, newBatch));
267+
268+
return newBatch;
262269
}
263270

264-
function dispatchQueueBatch<K, V>(
271+
function dispatchBatch<K, V>(
265272
loader: DataLoader<K, V, any>,
266-
queue: LoaderQueue<K, V>
273+
batch: Batch<K, V>
267274
) {
268-
// Collect all keys to be loaded in this dispatch
269-
var keys = queue.map(({ key }) => key);
275+
// Mark this batch as having been dispatched.
276+
batch.hasDispatched = true;
270277

271-
// Call the provided batchLoadFn for this loader with the loader queue's keys.
272-
var batchLoadFn = loader._batchLoadFn;
273-
// Call with the loader as the `this` context.
274-
var batchPromise = batchLoadFn.call(loader, keys);
278+
// Call the provided batchLoadFn for this loader with the batch's keys and
279+
// with the loader as the `this` context.
280+
var batchPromise = loader._batchLoadFn(batch.keys);
275281

276282
// Assert the expected response from batchLoadFn
277283
if (!batchPromise || typeof batchPromise.then !== 'function') {
278-
return failedDispatch(loader, queue, new TypeError(
284+
return failedDispatch(loader, batch, new TypeError(
279285
'DataLoader must be constructed with a function which accepts ' +
280286
'Array<key> and returns Promise<Array<value>>, but the function did ' +
281287
`not return a Promise: ${String(batchPromise)}.`
@@ -293,41 +299,40 @@ function dispatchQueueBatch<K, V>(
293299
`not return a Promise of an Array: ${String(values)}.`
294300
);
295301
}
296-
if (values.length !== keys.length) {
302+
if (values.length !== batch.keys.length) {
297303
throw new TypeError(
298304
'DataLoader must be constructed with a function which accepts ' +
299305
'Array<key> and returns Promise<Array<value>>, but the function did ' +
300306
'not return a Promise of an Array of the same length as the Array ' +
301307
'of keys.' +
302-
`\n\nKeys:\n${String(keys)}` +
308+
`\n\nKeys:\n${String(batch.keys)}` +
303309
`\n\nValues:\n${String(values)}`
304310
);
305311
}
306312

307-
// Step through the values, resolving or rejecting each Promise in the
308-
// loaded queue.
309-
queue.forEach(({ resolve, reject }, index) => {
310-
var value = values[index];
313+
// Step through values, resolving or rejecting each Promise in the batch.
314+
for (var i = 0; i < batch.callbacks.length; i++) {
315+
var value = values[i];
311316
if (value instanceof Error) {
312-
reject(value);
317+
batch.callbacks[i].reject(value);
313318
} else {
314-
resolve(value);
319+
batch.callbacks[i].resolve(value);
315320
}
316-
});
317-
}).catch(error => failedDispatch(loader, queue, error));
321+
}
322+
}).catch(error => failedDispatch(loader, batch, error));
318323
}
319324

320325
// Private: do not cache individual loads if the entire batch dispatch fails,
321326
// but still reject each request so they do not hang.
322327
function failedDispatch<K, V>(
323328
loader: DataLoader<K, V, any>,
324-
queue: LoaderQueue<K, V>,
329+
batch: Batch<K, V>,
325330
error: Error
326331
) {
327-
queue.forEach(({ key, reject }) => {
328-
loader.clear(key);
329-
reject(error);
330-
});
332+
for (var i = 0; i < batch.keys.length; i++) {
333+
loader.clear(batch.keys[i]);
334+
batch.callbacks[i].reject(error);
335+
}
331336
}
332337

333338
// Private: produce a cache key for a given key (and options)
@@ -362,13 +367,6 @@ function getValidCacheMap<K, V, C>(
362367
return cacheMap;
363368
}
364369

365-
// Private
366-
type LoaderQueue<K, V> = Array<{
367-
key: K;
368-
resolve: (value: V) => void;
369-
reject: (error: Error) => void;
370-
}>;
371-
372370
// Private
373371
function isArrayLike(x: mixed): boolean {
374372
return (

0 commit comments

Comments
 (0)