asyncBatch
AsyncBatcher
ClassAll core concepts from the Batching Guide apply to async batching as well.
While the synchronous batcher works well for many use cases, async batching provides additional capabilities that are particularly useful when:
TanStack Pacer provides async batching through the AsyncBatcher class and the asyncBatch function. Unlike the synchronous version, the async batcher handles Promises and provides robust error handling capabilities.
The asyncBatch function provides a simple way to create an async batching function:
import { asyncBatch } from '@tanstack/pacer'
const processAsyncBatch = asyncBatch<number>(
async (items) => {
// Process the batch asynchronously
const results = await Promise.all(
items.map(item => processApiCall(item))
)
return results
},
{
maxSize: 3,
wait: 2000,
onSuccess: (results, batcher) => {
console.log('Batch completed successfully:', results)
console.log('Total successes:', batcher.store.state.successCount)
},
onError: (error, failedItems, batcher) => {
console.error('Batch failed:', error)
console.log('Failed items:', failedItems)
console.log('Total errors:', batcher.store.state.errorCount)
}
}
)
// Add items to be batched
processAsyncBatch(1)
processAsyncBatch(2)
processAsyncBatch(3) // Triggers batch processing
import { asyncBatch } from '@tanstack/pacer'
const processAsyncBatch = asyncBatch<number>(
async (items) => {
// Process the batch asynchronously
const results = await Promise.all(
items.map(item => processApiCall(item))
)
return results
},
{
maxSize: 3,
wait: 2000,
onSuccess: (results, batcher) => {
console.log('Batch completed successfully:', results)
console.log('Total successes:', batcher.store.state.successCount)
},
onError: (error, failedItems, batcher) => {
console.error('Batch failed:', error)
console.log('Failed items:', failedItems)
console.log('Total errors:', batcher.store.state.errorCount)
}
}
)
// Add items to be batched
processAsyncBatch(1)
processAsyncBatch(2)
processAsyncBatch(3) // Triggers batch processing
For more control over async batch behavior, use the AsyncBatcher class directly:
import { AsyncBatcher } from '@tanstack/pacer'
const batcher = new AsyncBatcher<number>(
async (items) => {
// Process the batch asynchronously
const results = await Promise.all(
items.map(item => processApiCall(item))
)
return results
},
{
maxSize: 5,
wait: 3000,
onSuccess: (results, batcher) => {
console.log('Batch succeeded:', results)
},
onError: (error, failedItems, batcher) => {
console.error('Batch failed:', error)
console.log('Failed items:', failedItems)
}
}
)
// Access current state via TanStack Store
console.log(batcher.store.state.successCount) // Number of successful batch executions
console.log(batcher.store.state.errorCount) // Number of failed batch executions
console.log(batcher.store.state.isExecuting) // Whether a batch is currently executing
console.log(batcher.store.state.lastResult) // Result from most recent batch
// Add items to the batch
batcher.addItem(1)
batcher.addItem(2)
// Control batch execution
batcher.stop() // Stop processing
batcher.start() // Resume processing
import { AsyncBatcher } from '@tanstack/pacer'
const batcher = new AsyncBatcher<number>(
async (items) => {
// Process the batch asynchronously
const results = await Promise.all(
items.map(item => processApiCall(item))
)
return results
},
{
maxSize: 5,
wait: 3000,
onSuccess: (results, batcher) => {
console.log('Batch succeeded:', results)
},
onError: (error, failedItems, batcher) => {
console.error('Batch failed:', error)
console.log('Failed items:', failedItems)
}
}
)
// Access current state via TanStack Store
console.log(batcher.store.state.successCount) // Number of successful batch executions
console.log(batcher.store.state.errorCount) // Number of failed batch executions
console.log(batcher.store.state.isExecuting) // Whether a batch is currently executing
console.log(batcher.store.state.lastResult) // Result from most recent batch
// Add items to the batch
batcher.addItem(1)
batcher.addItem(2)
// Control batch execution
batcher.stop() // Stop processing
batcher.start() // Resume processing
Unlike the synchronous batcher which returns void, the async version allows you to capture and use the return value from your batch function:
const batcher = new AsyncBatcher<string>(
async (items) => {
const results = await processBatch(items)
return results
},
{
maxSize: 5,
onSuccess: (results, batcher) => {
// Handle the returned results
console.log('Batch results:', results)
}
}
)
const batcher = new AsyncBatcher<string>(
async (items) => {
const results = await processBatch(items)
return results
},
{
maxSize: 5,
onSuccess: (results, batcher) => {
// Handle the returned results
console.log('Batch results:', results)
}
}
)
The async batcher provides comprehensive error handling capabilities:
const batcher = new AsyncBatcher<number>(
async (items) => {
// This might throw an error
const results = await riskyBatchOperation(items)
return results
},
{
maxSize: 3,
onError: (error, failedItems, batcher) => {
// Handle batch errors
console.error('Batch processing failed:', error)
console.log('Items that failed:', failedItems)
console.log('Total error count:', batcher.store.state.errorCount)
},
throwOnError: false, // Don't throw errors, just handle them
onSuccess: (results, batcher) => {
console.log('Batch succeeded:', results)
console.log('Total success count:', batcher.store.state.successCount)
},
onSettled: (batcher) => {
// Called after every batch (success or failure)
console.log('Batch settled. Total batches:', batcher.store.state.settleCount)
}
}
)
const batcher = new AsyncBatcher<number>(
async (items) => {
// This might throw an error
const results = await riskyBatchOperation(items)
return results
},
{
maxSize: 3,
onError: (error, failedItems, batcher) => {
// Handle batch errors
console.error('Batch processing failed:', error)
console.log('Items that failed:', failedItems)
console.log('Total error count:', batcher.store.state.errorCount)
},
throwOnError: false, // Don't throw errors, just handle them
onSuccess: (results, batcher) => {
console.log('Batch succeeded:', results)
console.log('Total success count:', batcher.store.state.successCount)
},
onSettled: (batcher) => {
// Called after every batch (success or failure)
console.log('Batch settled. Total batches:', batcher.store.state.settleCount)
}
}
)
The async batcher tracks when batches are actively executing:
const batcher = new AsyncBatcher<number>(
async (items) => {
console.log('Starting batch execution...')
const results = await longRunningBatchOperation(items)
console.log('Batch execution completed')
return results
},
{
maxSize: 5,
onItemsChange: (batcher) => {
console.log('Is executing:', batcher.store.state.isExecuting)
console.log('Items in queue:', batcher.store.state.size)
}
}
)
const batcher = new AsyncBatcher<number>(
async (items) => {
console.log('Starting batch execution...')
const results = await longRunningBatchOperation(items)
console.log('Batch execution completed')
return results
},
{
maxSize: 5,
onItemsChange: (batcher) => {
console.log('Is executing:', batcher.store.state.isExecuting)
console.log('Items in queue:', batcher.store.state.size)
}
}
)
The AsyncBatcher supports these async-specific callbacks:
The async batcher provides flexible error handling through the throwOnError option:
const batcher = new AsyncBatcher<number>(
async (items) => {
// This might throw an error
throw new Error('Batch processing failed')
},
{
maxSize: 3,
onError: (error, failedItems, batcher) => {
console.error('Handling error:', error)
},
throwOnError: true, // Will throw errors even with onError handler
// throwOnError: false, // Will swallow errors (default if onError is provided)
// throwOnError: undefined, // Uses default behavior based on onError presence
}
)
const batcher = new AsyncBatcher<number>(
async (items) => {
// This might throw an error
throw new Error('Batch processing failed')
},
{
maxSize: 3,
onError: (error, failedItems, batcher) => {
console.error('Handling error:', error)
},
throwOnError: true, // Will throw errors even with onError handler
// throwOnError: false, // Will swallow errors (default if onError is provided)
// throwOnError: undefined, // Uses default behavior based on onError presence
}
)
Like the synchronous batcher, the async batcher supports dynamic options:
const batcher = new AsyncBatcher<number>(
async (items) => {
return await processBatch(items)
},
{
// Dynamic batch size based on success rate
maxSize: (batcher) => {
const successRate = batcher.store.state.successCount / Math.max(1, batcher.store.state.settleCount)
return successRate > 0.8 ? 10 : 5 // Larger batches if success rate is high
},
// Dynamic wait time based on error count
wait: (batcher) => {
return batcher.store.state.errorCount > 5 ? 5000 : 2000 // Wait longer if errors are frequent
}
}
)
const batcher = new AsyncBatcher<number>(
async (items) => {
return await processBatch(items)
},
{
// Dynamic batch size based on success rate
maxSize: (batcher) => {
const successRate = batcher.store.state.successCount / Math.max(1, batcher.store.state.settleCount)
return successRate > 0.8 ? 10 : 5 // Larger batches if success rate is high
},
// Dynamic wait time based on error count
wait: (batcher) => {
return batcher.store.state.errorCount > 5 ? 5000 : 2000 // Wait longer if errors are frequent
}
}
)
The async batcher supports flushing pending batches to trigger processing immediately:
const batcher = new AsyncBatcher(asyncBatchFn, { maxSize: 10, wait: 5000 })
batcher.addItem('item1')
batcher.addItem('item2')
console.log(batcher.store.state.isPending) // true
// Flush immediately instead of waiting
const result = await batcher.flush()
console.log('Flush result:', result)
console.log(batcher.store.state.isEmpty) // true (batch was processed)
const batcher = new AsyncBatcher(asyncBatchFn, { maxSize: 10, wait: 5000 })
batcher.addItem('item1')
batcher.addItem('item2')
console.log(batcher.store.state.isPending) // true
// Flush immediately instead of waiting
const result = await batcher.flush()
console.log('Flush result:', result)
console.log(batcher.store.state.isEmpty) // true (batch was processed)
The AsyncBatcher class uses TanStack Store for reactive state management, providing real-time access to batch execution state, error tracking, and processing statistics. All state is stored in a TanStack Store and can be accessed via asyncBatcher.store.state, although, if you are using a framework adapter like React or Solid, you will not want to read the state from here. Instead, you will read the state from asyncBatcher.state along with providing a selector callback as the 3rd argument to the useAsyncBatcher hook to opt-in to state tracking as shown below.
Framework adapters support a selector argument that allows you to specify which state changes will trigger re-renders. This optimizes performance by preventing unnecessary re-renders when irrelevant state changes occur.
By default, asyncBatcher.state is empty ({}) as the selector is empty by default. This is where reactive state from a TanStack Store useStore gets stored. You must opt-in to state tracking by providing a selector function.
// Default behavior - no reactive state subscriptions
const batcher = useAsyncBatcher(asyncBatchFn, { maxSize: 5, wait: 1000 })
console.log(batcher.state) // {}
// Opt-in to re-render when isExecuting changes
const batcher = useAsyncBatcher(
asyncBatchFn,
{ maxSize: 5, wait: 1000 },
(state) => ({ isExecuting: state.isExecuting })
)
console.log(batcher.state.isExecuting) // Reactive value
// Multiple state properties
const batcher = useAsyncBatcher(
asyncBatchFn,
{ maxSize: 5, wait: 1000 },
(state) => ({
isExecuting: state.isExecuting,
successCount: state.successCount,
errorCount: state.errorCount
})
)
// Default behavior - no reactive state subscriptions
const batcher = useAsyncBatcher(asyncBatchFn, { maxSize: 5, wait: 1000 })
console.log(batcher.state) // {}
// Opt-in to re-render when isExecuting changes
const batcher = useAsyncBatcher(
asyncBatchFn,
{ maxSize: 5, wait: 1000 },
(state) => ({ isExecuting: state.isExecuting })
)
console.log(batcher.state.isExecuting) // Reactive value
// Multiple state properties
const batcher = useAsyncBatcher(
asyncBatchFn,
{ maxSize: 5, wait: 1000 },
(state) => ({
isExecuting: state.isExecuting,
successCount: state.successCount,
errorCount: state.errorCount
})
)
You can provide initial state values when creating an async batcher. This is commonly used to restore state from persistent storage:
// Load initial state from localStorage
const savedState = localStorage.getItem('async-batcher-state')
const initialState = savedState ? JSON.parse(savedState) : {}
const batcher = new AsyncBatcher(asyncBatchFn, {
maxSize: 5,
wait: 1000,
initialState
})
// Load initial state from localStorage
const savedState = localStorage.getItem('async-batcher-state')
const initialState = savedState ? JSON.parse(savedState) : {}
const batcher = new AsyncBatcher(asyncBatchFn, {
maxSize: 5,
wait: 1000,
initialState
})
The store is reactive and supports subscriptions:
const batcher = new AsyncBatcher(asyncBatchFn, { maxSize: 5, wait: 1000 })
// Subscribe to state changes
const unsubscribe = batcher.store.subscribe((state) => {
// do something with the state like persist it to localStorage
})
// Unsubscribe when done
unsubscribe()
const batcher = new AsyncBatcher(asyncBatchFn, { maxSize: 5, wait: 1000 })
// Subscribe to state changes
const unsubscribe = batcher.store.subscribe((state) => {
// do something with the state like persist it to localStorage
})
// Unsubscribe when done
unsubscribe()
Note: This is unnecessary when using a framework adapter because the underlying useStore hook already does this. You can also import and use useStore from TanStack Store to turn util.store.state into reactive state with a custom selector wherever you want if necessary.
const batcher = useAsyncBatcher(asyncBatchFn, { maxSize: 5, wait: 1000 })
// you could manually use the `useStore` hook to subscribe to state changes in whatever scope you want
const state = useStore(batcher.store, (state) => ({
successCount: state.successCount,
}))
console.log(state)
const batcher = useAsyncBatcher(asyncBatchFn, { maxSize: 5, wait: 1000 })
// you could manually use the `useStore` hook to subscribe to state changes in whatever scope you want
const state = useStore(batcher.store, (state) => ({
successCount: state.successCount,
}))
console.log(state)
The AsyncBatcherState includes:
The async batcher tracks items that failed during batch processing:
const batcher = new AsyncBatcher<number>(
async (items) => {
// This might fail for some items
if (items.some(item => item < 0)) {
throw new Error('Negative numbers not allowed')
}
return await processBatch(items)
},
{
maxSize: 3,
onError: (error, failedItems, batcher) => {
console.log('Failed items:', failedItems)
console.log('All failed items:', batcher.peekFailedItems())
}
}
)
const batcher = new AsyncBatcher<number>(
async (items) => {
// This might fail for some items
if (items.some(item => item < 0)) {
throw new Error('Negative numbers not allowed')
}
return await processBatch(items)
},
{
maxSize: 3,
onError: (error, failedItems, batcher) => {
console.log('Failed items:', failedItems)
console.log('All failed items:', batcher.peekFailedItems())
}
}
)
Each framework adapter provides hooks that build on top of the core async batching functionality to integrate with the framework's state management system. Hooks like useAsyncBatcher or similar are available for each framework.
For core batching concepts and synchronous batching, see the Batching Guide.
Your weekly dose of JavaScript news. Delivered every Monday to over 100,000 devs, for free.