2019-06-23 03:35:23 -07:00
import { IExecuteFunctions } from 'n8n-core' ;
import {
2020-04-08 04:24:49 -07:00
IDataObject ,
2019-06-23 03:35:23 -07:00
INodeExecutionData ,
INodeType ,
INodeTypeDescription ,
} from 'n8n-workflow' ;
export class SplitInBatches implements INodeType {
description : INodeTypeDescription = {
displayName : 'Split In Batches' ,
name : 'splitInBatches' ,
2019-07-26 02:27:46 -07:00
icon : 'fa:th-large' ,
2019-06-23 03:35:23 -07:00
group : [ 'organization' ] ,
version : 1 ,
2021-07-03 05:40:16 -07:00
description : 'Split data into batches and iterate over each batch' ,
2019-06-23 03:35:23 -07:00
defaults : {
name : 'SplitInBatches' ,
color : '#007755' ,
} ,
inputs : [ 'main' ] ,
outputs : [ 'main' ] ,
properties : [
2021-09-19 07:13:48 -07:00
{
displayName : 'You may not need this node — n8n nodes automatically run once for each input item. <a href="https://docs.n8n.io/getting-started/key-concepts/looping.html#using-loops-in-n8n" target="_blank">More info</a>' ,
name : 'splitInBatchesNotice' ,
type : 'notice' ,
default : '' ,
} ,
2019-06-23 03:35:23 -07:00
{
displayName : 'Batch Size' ,
name : 'batchSize' ,
type : 'number' ,
typeOptions : {
minValue : 1 ,
} ,
default : 10 ,
2022-05-06 14:01:25 -07:00
description : 'The number of items to return with each call' ,
2019-06-23 03:35:23 -07:00
} ,
2020-04-08 04:24:49 -07:00
{
displayName : 'Options' ,
name : 'options' ,
type : 'collection' ,
placeholder : 'Add Option' ,
default : { } ,
options : [
{
displayName : 'Reset' ,
name : 'reset' ,
type : 'boolean' ,
default : false ,
2022-06-20 07:54:01 -07:00
description : 'Whether the node will be reset and so with the current input-data newly initialized' ,
2020-04-08 04:24:49 -07:00
} ,
] ,
} ,
2019-06-23 03:35:23 -07:00
] ,
} ;
async execute ( this : IExecuteFunctions ) : Promise < INodeExecutionData [ ] [ ] | null > {
2019-08-01 13:55:33 -07:00
// Get the input data and create a new array so that we can remove
// items without a problem
const items = this . getInputData ( ) . slice ( ) ;
2019-06-23 03:35:23 -07:00
const nodeContext = this . getContext ( 'node' ) ;
const batchSize = this . getNodeParameter ( 'batchSize' , 0 ) as number ;
const returnItems : INodeExecutionData [ ] = [ ] ;
2020-04-08 04:24:49 -07:00
const options = this . getNodeParameter ( 'options' , 0 , { } ) as IDataObject ;
if ( nodeContext . items === undefined || options . reset === true ) {
2019-06-23 03:35:23 -07:00
// Is the first time the node runs
nodeContext . currentRunIndex = 0 ;
nodeContext . maxRunIndex = Math . ceil ( items . length / batchSize ) ;
// Get the items which should be returned
returnItems . push . apply ( returnItems , items . splice ( 0 , batchSize ) ) ;
// Set the other items to be saved in the context to return at later runs
nodeContext . items = items ;
2020-04-08 04:24:49 -07:00
} else {
2019-06-23 03:35:23 -07:00
// The node has been called before. So return the next batch of items.
nodeContext . currentRunIndex += 1 ;
returnItems . push . apply ( returnItems , nodeContext . items . splice ( 0 , batchSize ) ) ;
}
nodeContext . noItemsLeft = nodeContext . items . length === 0 ;
if ( returnItems . length === 0 ) {
// No data left to return so stop execution of the branch
return null ;
}
2022-06-03 08:25:07 -07:00
returnItems . map ( ( item , index ) = > {
item . pairedItem = {
item : index ,
} ;
} ) ;
2019-06-23 03:35:23 -07:00
return this . prepareOutputData ( returnItems ) ;
}
}