mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-15 09:04:07 -08:00
87def60979
Github issue / Community forum post (link here to close automatically): https://community.n8n.io/t/langchain-memory-chat/23733 --------- Signed-off-by: Oleg Ivaniv <me@olegivaniv.com> Co-authored-by: Oleg Ivaniv <me@olegivaniv.com> Co-authored-by: Val <68596159+valya@users.noreply.github.com> Co-authored-by: Alex Grozav <alex@grozav.com> Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ <aditya@netroy.in> Co-authored-by: Deborah <deborah@starfallprojects.co.uk> Co-authored-by: Jesper Bylund <mail@jesperbylund.com> Co-authored-by: Jon <jonathan.bennetts@gmail.com> Co-authored-by: Michael Kret <88898367+michael-radency@users.noreply.github.com> Co-authored-by: Giulio Andreini <andreini@netseven.it> Co-authored-by: Mason Geloso <Mason.geloso@gmail.com> Co-authored-by: Mason Geloso <hone@Masons-Mac-mini.local> Co-authored-by: Mutasem Aldmour <mutasem@n8n.io>
99 lines
2.8 KiB
TypeScript
99 lines
2.8 KiB
TypeScript
import {
|
|
type IExecuteFunctions,
|
|
type INodeExecutionData,
|
|
NodeConnectionType,
|
|
NodeOperationError,
|
|
} from 'n8n-workflow';
|
|
|
|
import type { CharacterTextSplitter } from 'langchain/text_splitter';
|
|
import type { Document } from 'langchain/document';
|
|
import { JSONLoader } from 'langchain/document_loaders/fs/json';
|
|
import { TextLoader } from 'langchain/document_loaders/fs/text';
|
|
import { getMetadataFiltersValues } from './helpers';
|
|
|
|
export class N8nJsonLoader {
|
|
private context: IExecuteFunctions;
|
|
|
|
private optionsPrefix: string;
|
|
|
|
constructor(context: IExecuteFunctions, optionsPrefix = '') {
|
|
this.context = context;
|
|
this.optionsPrefix = optionsPrefix;
|
|
}
|
|
|
|
async processAll(items?: INodeExecutionData[]): Promise<Document[]> {
|
|
const docs: Document[] = [];
|
|
|
|
if (!items) return [];
|
|
|
|
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
|
const processedDocuments = await this.processItem(items[itemIndex], itemIndex);
|
|
|
|
docs.push(...processedDocuments);
|
|
}
|
|
|
|
return docs;
|
|
}
|
|
|
|
async processItem(item: INodeExecutionData, itemIndex: number): Promise<Document[]> {
|
|
const mode = this.context.getNodeParameter('jsonMode', itemIndex, 'allInputData') as
|
|
| 'allInputData'
|
|
| 'expressionData';
|
|
|
|
const pointers = this.context.getNodeParameter(
|
|
`${this.optionsPrefix}pointers`,
|
|
itemIndex,
|
|
'',
|
|
) as string;
|
|
const pointersArray = pointers.split(',').map((pointer) => pointer.trim());
|
|
|
|
const textSplitter = (await this.context.getInputConnectionData(
|
|
NodeConnectionType.AiTextSplitter,
|
|
0,
|
|
)) as CharacterTextSplitter | undefined;
|
|
const metadata = getMetadataFiltersValues(this.context, itemIndex) ?? [];
|
|
|
|
if (!item) return [];
|
|
|
|
let documentLoader: JSONLoader | TextLoader | null = null;
|
|
|
|
if (mode === 'allInputData') {
|
|
const itemString = JSON.stringify(item.json);
|
|
const itemBlob = new Blob([itemString], { type: 'application/json' });
|
|
documentLoader = new JSONLoader(itemBlob, pointersArray);
|
|
}
|
|
|
|
if (mode === 'expressionData') {
|
|
const dataString = this.context.getNodeParameter('jsonData', itemIndex) as string | object;
|
|
if (typeof dataString === 'object') {
|
|
const itemBlob = new Blob([JSON.stringify(dataString)], { type: 'application/json' });
|
|
documentLoader = new JSONLoader(itemBlob, pointersArray);
|
|
}
|
|
|
|
if (typeof dataString === 'string') {
|
|
const itemBlob = new Blob([dataString], { type: 'text/plain' });
|
|
documentLoader = new TextLoader(itemBlob);
|
|
}
|
|
}
|
|
|
|
if (documentLoader === null) {
|
|
// This should never happen
|
|
throw new NodeOperationError(this.context.getNode(), 'Document loader is not initialized');
|
|
}
|
|
|
|
const docs = textSplitter
|
|
? await documentLoader.loadAndSplit(textSplitter)
|
|
: await documentLoader.load();
|
|
|
|
if (metadata) {
|
|
docs.forEach((doc) => {
|
|
doc.metadata = {
|
|
...doc.metadata,
|
|
...metadata,
|
|
};
|
|
});
|
|
}
|
|
return docs;
|
|
}
|
|
}
|