mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-15 00:54:06 -08:00
74 lines
2.2 KiB
TypeScript
74 lines
2.2 KiB
TypeScript
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
|
import {
|
|
NodeConnectionType,
|
|
type INodeType,
|
|
type INodeTypeDescription,
|
|
type ISupplyDataFunctions,
|
|
type SupplyData,
|
|
} from 'n8n-workflow';
|
|
|
|
import type { ChatOllamaInput } from '@langchain/ollama';
|
|
import { ChatOllama } from '@langchain/ollama';
|
|
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
|
import { ollamaModel, ollamaOptions, ollamaDescription } from '../LMOllama/description';
|
|
import { N8nLlmTracing } from '../N8nLlmTracing';
|
|
|
|
export class LmChatOllama implements INodeType {
|
|
description: INodeTypeDescription = {
|
|
displayName: 'Ollama Chat Model',
|
|
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
|
|
name: 'lmChatOllama',
|
|
icon: 'file:ollama.svg',
|
|
group: ['transform'],
|
|
version: 1,
|
|
description: 'Language Model Ollama',
|
|
defaults: {
|
|
name: 'Ollama Chat Model',
|
|
},
|
|
codex: {
|
|
categories: ['AI'],
|
|
subcategories: {
|
|
AI: ['Language Models', 'Root Nodes'],
|
|
'Language Models': ['Chat Models (Recommended)'],
|
|
},
|
|
resources: {
|
|
primaryDocumentation: [
|
|
{
|
|
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatollama/',
|
|
},
|
|
],
|
|
},
|
|
},
|
|
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
|
inputs: [],
|
|
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
|
outputs: [NodeConnectionType.AiLanguageModel],
|
|
outputNames: ['Model'],
|
|
...ollamaDescription,
|
|
properties: [
|
|
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
|
|
ollamaModel,
|
|
ollamaOptions,
|
|
],
|
|
};
|
|
|
|
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
|
const credentials = await this.getCredentials('ollamaApi');
|
|
|
|
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
|
const options = this.getNodeParameter('options', itemIndex, {}) as ChatOllamaInput;
|
|
|
|
const model = new ChatOllama({
|
|
...options,
|
|
baseUrl: credentials.baseUrl as string,
|
|
model: modelName,
|
|
format: options.format === 'default' ? undefined : options.format,
|
|
callbacks: [new N8nLlmTracing(this)],
|
|
});
|
|
|
|
return {
|
|
response: model,
|
|
};
|
|
}
|
|
}
|