Skip to content

Commit

Permalink
Follow-ups to graphql#42
Browse files Browse the repository at this point in the history
- Add maxBatchSize to type definitions.
- Add some additional comments.
- Optimize common path.
- Fix lint and type checking errors.
  • Loading branch information
leebyron committed Sep 23, 2016
1 parent baf07db commit a972eb5
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 7 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ Create a new `DataLoader` given a batch loading function and options.
- *batch*: Default `true`. Set to `false` to disable batching, instead
immediately invoking `batchLoadFn` with a single load key.

- *maxBatchSize*: Default infinite. Limits the number of items that get
- *maxBatchSize*: Default `Infinity`. Limits the number of items that get
passed in to the `batchLoadFn`.

- *cache*: Default `true`. Set to `false` to disable caching, instead
Expand Down
6 changes: 6 additions & 0 deletions src/index.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,12 @@ type Options<K, V> = {
*/
batch?: boolean,

/**
* Default `Infinity`. Limits the number of items that get
* passed in to the `batchLoadFn`.
*/
maxBatchSize?: number;

/**
* Default `true`. Set to `false` to disable caching,
* instead creating a new Promise and new key in
Expand Down
23 changes: 17 additions & 6 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@ type BatchLoadFn<K, V> = (keys: Array<K>) => Promise<Array<V | Error>>;
// custom cache instance.
type Options<K, V> = {
batch?: boolean;
maxBatchSize?: number;
cache?: boolean;
cacheKeyFn?: (key: any) => any;
cacheMap?: CacheMap<K, Promise<V>>;
maxBatchSize?: number;
};

// If a custom cache is provided, it must be of this type (a subset of ES6 Map).
Expand Down Expand Up @@ -222,14 +222,25 @@ function dispatchQueue<K, V>(loader: DataLoader<K, V>) {
var queue = loader._queue;
loader._queue = [];

var maxBatchSize = loader._options && loader._options.maxBatchSize ||
queue.length;
for (var i = 0; i < queue.length / maxBatchSize; i++) {
loadKeys(loader, queue.slice(i * maxBatchSize, (i + 1) * maxBatchSize));
// If a maxBatchSize was provided and the queue is longer, then segment the
// queue into multiple batches, otherwise treat the queue as a single batch.
var maxBatchSize = loader._options && loader._options.maxBatchSize;
if (maxBatchSize && maxBatchSize > 0 && maxBatchSize < queue.length) {
for (var i = 0; i < queue.length / maxBatchSize; i++) {
dispatchQueueBatch(
loader,
queue.slice(i * maxBatchSize, (i + 1) * maxBatchSize)
);
}
} else {
dispatchQueueBatch(loader, queue);
}
}

function loadKeys<K, V>(loader: DataLoader<K, V>, queue: LoaderQueue<K, V>) {
function dispatchQueueBatch<K, V>(
loader: DataLoader<K, V>,
queue: LoaderQueue<K, V>
) {
// Collect all keys to be loaded in this dispatch
var keys = queue.map(({ key }) => key);

Expand Down

0 comments on commit a972eb5

Please sign in to comment.