mirror of
https://github.com/n8n-io/n8n.git
synced 2025-03-05 20:50:17 -08:00
feat(Anthropic Chat Model Node): Fetch models dynamically & support thinking (#13543)
This commit is contained in:
parent
615a42afd5
commit
461df371f7
|
@ -14,6 +14,7 @@ import {
|
|||
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import { searchModels } from './methods/searchModels';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
|
@ -69,15 +70,23 @@ const modelField: INodeProperties = {
|
|||
default: 'claude-2',
|
||||
};
|
||||
|
||||
const MIN_THINKING_BUDGET = 1024;
|
||||
const DEFAULT_MAX_TOKENS = 4096;
|
||||
export class LmChatAnthropic implements INodeType {
|
||||
methods = {
|
||||
listSearch: {
|
||||
searchModels,
|
||||
},
|
||||
};
|
||||
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Anthropic Chat Model',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
|
||||
name: 'lmChatAnthropic',
|
||||
icon: 'file:anthropic.svg',
|
||||
group: ['transform'],
|
||||
version: [1, 1.1, 1.2],
|
||||
defaultVersion: 1.2,
|
||||
version: [1, 1.1, 1.2, 1.3],
|
||||
defaultVersion: 1.3,
|
||||
description: 'Language Model Anthropic',
|
||||
defaults: {
|
||||
name: 'Anthropic Chat Model',
|
||||
|
@ -135,7 +144,43 @@ export class LmChatAnthropic implements INodeType {
|
|||
),
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [{ _cnd: { gte: 1.2 } }],
|
||||
'@version': [{ _cnd: { lte: 1.2 } }],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'model',
|
||||
type: 'resourceLocator',
|
||||
default: {
|
||||
mode: 'list',
|
||||
value: 'claude-3-7-sonnet-20250219',
|
||||
cachedResultName: 'Claude 3.7 Sonnet',
|
||||
},
|
||||
required: true,
|
||||
modes: [
|
||||
{
|
||||
displayName: 'From List',
|
||||
name: 'list',
|
||||
type: 'list',
|
||||
placeholder: 'Select a model...',
|
||||
typeOptions: {
|
||||
searchListMethod: 'searchModels',
|
||||
searchable: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'ID',
|
||||
name: 'id',
|
||||
type: 'string',
|
||||
placeholder: 'Claude Sonnet',
|
||||
},
|
||||
],
|
||||
description:
|
||||
'The model. Choose from the list, or specify an ID. <a href="https://docs.anthropic.com/claude/docs/models-overview">Learn more</a>.',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [{ _cnd: { gte: 1.3 } }],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -150,7 +195,7 @@ export class LmChatAnthropic implements INodeType {
|
|||
{
|
||||
displayName: 'Maximum Number of Tokens',
|
||||
name: 'maxTokensToSample',
|
||||
default: 4096,
|
||||
default: DEFAULT_MAX_TOKENS,
|
||||
description: 'The maximum number of tokens to generate in the completion',
|
||||
type: 'number',
|
||||
},
|
||||
|
@ -162,6 +207,11 @@ export class LmChatAnthropic implements INodeType {
|
|||
description:
|
||||
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
hide: {
|
||||
thinking: [true],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Top K',
|
||||
|
@ -171,6 +221,11 @@ export class LmChatAnthropic implements INodeType {
|
|||
description:
|
||||
'Used to remove "long tail" low probability responses. Defaults to -1, which disables it.',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
hide: {
|
||||
thinking: [true],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Top P',
|
||||
|
@ -180,6 +235,30 @@ export class LmChatAnthropic implements INodeType {
|
|||
description:
|
||||
'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
hide: {
|
||||
thinking: [true],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Enable Thinking',
|
||||
name: 'thinking',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Whether to enable thinking mode for the model',
|
||||
},
|
||||
{
|
||||
displayName: 'Thinking Budget (Tokens)',
|
||||
name: 'thinkingBudget',
|
||||
type: 'number',
|
||||
default: MIN_THINKING_BUDGET,
|
||||
description: 'The maximum number of tokens to use for thinking',
|
||||
displayOptions: {
|
||||
show: {
|
||||
thinking: [true],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -189,13 +268,21 @@ export class LmChatAnthropic implements INodeType {
|
|||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('anthropicApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const version = this.getNode().typeVersion;
|
||||
const modelName =
|
||||
version >= 1.3
|
||||
? (this.getNodeParameter('model.value', itemIndex) as string)
|
||||
: (this.getNodeParameter('model', itemIndex) as string);
|
||||
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as {
|
||||
maxTokensToSample?: number;
|
||||
temperature: number;
|
||||
topK: number;
|
||||
topP: number;
|
||||
topK?: number;
|
||||
topP?: number;
|
||||
thinking?: boolean;
|
||||
thinkingBudget?: number;
|
||||
};
|
||||
let invocationKwargs = {};
|
||||
|
||||
const tokensUsageParser = (llmOutput: LLMResult['llmOutput']) => {
|
||||
const usage = (llmOutput?.usage as { input_tokens: number; output_tokens: number }) ?? {
|
||||
|
@ -208,6 +295,27 @@ export class LmChatAnthropic implements INodeType {
|
|||
totalTokens: usage.input_tokens + usage.output_tokens,
|
||||
};
|
||||
};
|
||||
|
||||
if (options.thinking) {
|
||||
invocationKwargs = {
|
||||
thinking: {
|
||||
type: 'enabled',
|
||||
// If thinking is enabled, we need to set a budget.
|
||||
// We fallback to 1024 as that is the minimum
|
||||
budget_tokens: options.thinkingBudget ?? MIN_THINKING_BUDGET,
|
||||
},
|
||||
// The default Langchain max_tokens is -1 (no limit) but Anthropic requires a number
|
||||
// higher than budget_tokens
|
||||
max_tokens: options.maxTokensToSample ?? DEFAULT_MAX_TOKENS,
|
||||
// These need to be unset when thinking is enabled.
|
||||
// Because the invocationKwargs will override the model options
|
||||
// we can pass options to the model and then override them here
|
||||
top_k: undefined,
|
||||
top_p: undefined,
|
||||
temperature: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
const model = new ChatAnthropic({
|
||||
anthropicApiKey: credentials.apiKey as string,
|
||||
modelName,
|
||||
|
@ -217,6 +325,7 @@ export class LmChatAnthropic implements INodeType {
|
|||
topP: options.topP,
|
||||
callbacks: [new N8nLlmTracing(this, { tokensUsageParser })],
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
invocationKwargs,
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
import type { ILoadOptionsFunctions } from 'n8n-workflow';
|
||||
|
||||
import { searchModels, type AnthropicModel } from '../searchModels';
|
||||
|
||||
describe('searchModels', () => {
|
||||
let mockContext: jest.Mocked<ILoadOptionsFunctions>;
|
||||
|
||||
const mockModels: AnthropicModel[] = [
|
||||
{
|
||||
id: 'claude-3-opus-20240229',
|
||||
display_name: 'Claude 3 Opus',
|
||||
type: 'model',
|
||||
created_at: '2024-02-29T00:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 'claude-3-sonnet-20240229',
|
||||
display_name: 'Claude 3 Sonnet',
|
||||
type: 'model',
|
||||
created_at: '2024-02-29T00:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 'claude-3-haiku-20240307',
|
||||
display_name: 'Claude 3 Haiku',
|
||||
type: 'model',
|
||||
created_at: '2024-03-07T00:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 'claude-2.1',
|
||||
display_name: 'Claude 2.1',
|
||||
type: 'model',
|
||||
created_at: '2023-11-21T00:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 'claude-2.0',
|
||||
display_name: 'Claude 2.0',
|
||||
type: 'model',
|
||||
created_at: '2023-07-11T00:00:00Z',
|
||||
},
|
||||
];
|
||||
|
||||
beforeEach(() => {
|
||||
mockContext = {
|
||||
helpers: {
|
||||
httpRequestWithAuthentication: jest.fn().mockResolvedValue({
|
||||
data: mockModels,
|
||||
}),
|
||||
},
|
||||
} as unknown as jest.Mocked<ILoadOptionsFunctions>;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should fetch models from Anthropic API', async () => {
|
||||
const result = await searchModels.call(mockContext);
|
||||
|
||||
expect(mockContext.helpers.httpRequestWithAuthentication).toHaveBeenCalledWith('anthropicApi', {
|
||||
url: 'https://api.anthropic.com/v1/models',
|
||||
headers: {
|
||||
'anthropic-version': '2023-06-01',
|
||||
},
|
||||
});
|
||||
expect(result.results).toHaveLength(5);
|
||||
});
|
||||
|
||||
it('should sort models by created_at date, most recent first', async () => {
|
||||
const result = await searchModels.call(mockContext);
|
||||
const sortedResults = result.results;
|
||||
|
||||
expect(sortedResults[0].value).toBe('claude-3-haiku-20240307');
|
||||
expect(sortedResults[1].value).toBe('claude-3-opus-20240229');
|
||||
expect(sortedResults[2].value).toBe('claude-3-sonnet-20240229');
|
||||
expect(sortedResults[3].value).toBe('claude-2.1');
|
||||
expect(sortedResults[4].value).toBe('claude-2.0');
|
||||
});
|
||||
|
||||
it('should filter models based on search term', async () => {
|
||||
const result = await searchModels.call(mockContext, 'claude-3');
|
||||
|
||||
expect(result.results).toHaveLength(3);
|
||||
expect(result.results).toEqual([
|
||||
{ name: 'Claude 3 Haiku', value: 'claude-3-haiku-20240307' },
|
||||
{ name: 'Claude 3 Opus', value: 'claude-3-opus-20240229' },
|
||||
{ name: 'Claude 3 Sonnet', value: 'claude-3-sonnet-20240229' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle case-insensitive search', async () => {
|
||||
const result = await searchModels.call(mockContext, 'CLAUDE-3');
|
||||
|
||||
expect(result.results).toHaveLength(3);
|
||||
expect(result.results).toEqual([
|
||||
{ name: 'Claude 3 Haiku', value: 'claude-3-haiku-20240307' },
|
||||
{ name: 'Claude 3 Opus', value: 'claude-3-opus-20240229' },
|
||||
{ name: 'Claude 3 Sonnet', value: 'claude-3-sonnet-20240229' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle when no models match the filter', async () => {
|
||||
const result = await searchModels.call(mockContext, 'nonexistent-model');
|
||||
|
||||
expect(result.results).toHaveLength(0);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,60 @@
|
|||
import type {
|
||||
ILoadOptionsFunctions,
|
||||
INodeListSearchItems,
|
||||
INodeListSearchResult,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export interface AnthropicModel {
|
||||
id: string;
|
||||
display_name: string;
|
||||
type: string;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
export async function searchModels(
|
||||
this: ILoadOptionsFunctions,
|
||||
filter?: string,
|
||||
): Promise<INodeListSearchResult> {
|
||||
const response = (await this.helpers.httpRequestWithAuthentication.call(this, 'anthropicApi', {
|
||||
url: 'https://api.anthropic.com/v1/models',
|
||||
headers: {
|
||||
'anthropic-version': '2023-06-01',
|
||||
},
|
||||
})) as { data: AnthropicModel[] };
|
||||
|
||||
const models = response.data || [];
|
||||
let results: INodeListSearchItems[] = [];
|
||||
|
||||
if (filter) {
|
||||
for (const model of models) {
|
||||
if (model.id.toLowerCase().includes(filter.toLowerCase())) {
|
||||
results.push({
|
||||
name: model.display_name,
|
||||
value: model.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
results = models.map((model) => ({
|
||||
name: model.display_name,
|
||||
value: model.id,
|
||||
}));
|
||||
}
|
||||
|
||||
// Sort models with more recent ones first (claude-3 before claude-2)
|
||||
results = results.sort((a, b) => {
|
||||
const modelA = models.find((m) => m.id === a.value);
|
||||
const modelB = models.find((m) => m.id === b.value);
|
||||
|
||||
if (!modelA || !modelB) return 0;
|
||||
|
||||
// Sort by created_at date, most recent first
|
||||
const dateA = new Date(modelA.created_at);
|
||||
const dateB = new Date(modelB.created_at);
|
||||
return dateB.getTime() - dateA.getTime();
|
||||
});
|
||||
|
||||
return {
|
||||
results,
|
||||
};
|
||||
}
|
|
@ -140,7 +140,7 @@
|
|||
"@google-cloud/resource-manager": "5.3.0",
|
||||
"@google/generative-ai": "0.21.0",
|
||||
"@huggingface/inference": "2.8.0",
|
||||
"@langchain/anthropic": "0.3.11",
|
||||
"@langchain/anthropic": "0.3.14",
|
||||
"@langchain/aws": "0.1.3",
|
||||
"@langchain/cohere": "0.3.2",
|
||||
"@langchain/community": "0.3.24",
|
||||
|
|
|
@ -493,7 +493,7 @@ importers:
|
|||
version: 3.666.0(@aws-sdk/client-sts@3.666.0)
|
||||
'@getzep/zep-cloud':
|
||||
specifier: 1.0.12
|
||||
version: 1.0.12(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)(langchain@0.3.11(d2c70e6e899e44bb5377ca6198a97c6c))
|
||||
version: 1.0.12(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)(langchain@0.3.11(c10b80e38f5a8711ccad1e2174de91e6))
|
||||
'@getzep/zep-js':
|
||||
specifier: 0.9.0
|
||||
version: 0.9.0
|
||||
|
@ -510,8 +510,8 @@ importers:
|
|||
specifier: 2.8.0
|
||||
version: 2.8.0
|
||||
'@langchain/anthropic':
|
||||
specifier: 0.3.11
|
||||
version: 0.3.11(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)
|
||||
specifier: 0.3.14
|
||||
version: 0.3.14(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)
|
||||
'@langchain/aws':
|
||||
specifier: 0.1.3
|
||||
version: 0.1.3(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@aws-sdk/client-sts@3.666.0)(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))
|
||||
|
@ -520,7 +520,7 @@ importers:
|
|||
version: 0.3.2(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)
|
||||
'@langchain/community':
|
||||
specifier: 0.3.24
|
||||
version: 0.3.24(8549324d4d39cb6e91ae9ff5fc6970d3)
|
||||
version: 0.3.24(1ea346ff95b1be1e3f1f4333b25e2811)
|
||||
'@langchain/core':
|
||||
specifier: 'catalog:'
|
||||
version: 0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1))
|
||||
|
@ -610,7 +610,7 @@ importers:
|
|||
version: 23.0.1
|
||||
langchain:
|
||||
specifier: 0.3.11
|
||||
version: 0.3.11(d2c70e6e899e44bb5377ca6198a97c6c)
|
||||
version: 0.3.11(c10b80e38f5a8711ccad1e2174de91e6)
|
||||
lodash:
|
||||
specifier: 'catalog:'
|
||||
version: 4.17.21
|
||||
|
@ -2295,8 +2295,8 @@ packages:
|
|||
'@anthropic-ai/sdk@0.27.3':
|
||||
resolution: {integrity: sha512-IjLt0gd3L4jlOfilxVXTifn42FnVffMgDC04RJK1KDZpmkBWLv0XC92MVVmkxrFZNS/7l3xWgP/I3nqtX1sQHw==}
|
||||
|
||||
'@anthropic-ai/sdk@0.32.1':
|
||||
resolution: {integrity: sha512-U9JwTrDvdQ9iWuABVsMLj8nJVwAyQz6QXvgLsVhryhCEPkLsbcP/MXxm+jYcAwLoV8ESbaTTjnD4kuAFa+Hyjg==}
|
||||
'@anthropic-ai/sdk@0.37.0':
|
||||
resolution: {integrity: sha512-tHjX2YbkUBwEgg0JZU3EFSSAQPoK4qQR/NFYa8Vtzd5UAyXzZksCw2In69Rml4R/TyHPBfRYaLK35XiOe33pjw==}
|
||||
|
||||
'@apidevtools/json-schema-ref-parser@11.7.0':
|
||||
resolution: {integrity: sha512-pRrmXMCwnmrkS3MLgAIW5dXRzeTv6GLjkjb4HmxNnvAKXN1Nfzp4KmGADBQvlVUcqi+a5D+hfGDLLnd5NnYxog==}
|
||||
|
@ -4006,8 +4006,8 @@ packages:
|
|||
'@kwsites/promise-deferred@1.1.1':
|
||||
resolution: {integrity: sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw==}
|
||||
|
||||
'@langchain/anthropic@0.3.11':
|
||||
resolution: {integrity: sha512-rYjDZjMwVQ+cYeJd9IoSESdkkG8fc0m3siGRYKNy6qgYMnqCz8sUPKBanXwbZAs6wvspPCGgNK9WONfaCeX97A==}
|
||||
'@langchain/anthropic@0.3.14':
|
||||
resolution: {integrity: sha512-zfix+qo/coIkgjTYpadp71IAWGXriIfImYLwMr1HnFsit4/RN9DU+aEOdm0nTwycbaneUpwWs5yfje8IKWHfsA==}
|
||||
engines: {node: '>=18'}
|
||||
peerDependencies:
|
||||
'@langchain/core': '>=0.2.21 <0.4.0'
|
||||
|
@ -13879,7 +13879,7 @@ snapshots:
|
|||
- encoding
|
||||
- supports-color
|
||||
|
||||
'@anthropic-ai/sdk@0.32.1(encoding@0.1.13)':
|
||||
'@anthropic-ai/sdk@0.37.0(encoding@0.1.13)':
|
||||
dependencies:
|
||||
'@types/node': 18.16.16
|
||||
'@types/node-fetch': 2.6.4
|
||||
|
@ -16125,7 +16125,7 @@ snapshots:
|
|||
'@gar/promisify@1.1.3':
|
||||
optional: true
|
||||
|
||||
'@getzep/zep-cloud@1.0.12(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)(langchain@0.3.11(d2c70e6e899e44bb5377ca6198a97c6c))':
|
||||
'@getzep/zep-cloud@1.0.12(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)(langchain@0.3.11(c10b80e38f5a8711ccad1e2174de91e6))':
|
||||
dependencies:
|
||||
form-data: 4.0.0
|
||||
node-fetch: 2.7.0(encoding@0.1.13)
|
||||
|
@ -16134,7 +16134,7 @@ snapshots:
|
|||
zod: 3.24.1
|
||||
optionalDependencies:
|
||||
'@langchain/core': 0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1))
|
||||
langchain: 0.3.11(d2c70e6e899e44bb5377ca6198a97c6c)
|
||||
langchain: 0.3.11(c10b80e38f5a8711ccad1e2174de91e6)
|
||||
transitivePeerDependencies:
|
||||
- encoding
|
||||
|
||||
|
@ -16616,9 +16616,9 @@ snapshots:
|
|||
|
||||
'@kwsites/promise-deferred@1.1.1': {}
|
||||
|
||||
'@langchain/anthropic@0.3.11(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)':
|
||||
'@langchain/anthropic@0.3.14(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)':
|
||||
dependencies:
|
||||
'@anthropic-ai/sdk': 0.32.1(encoding@0.1.13)
|
||||
'@anthropic-ai/sdk': 0.37.0(encoding@0.1.13)
|
||||
'@langchain/core': 0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1))
|
||||
fast-xml-parser: 4.4.1
|
||||
zod: 3.24.1
|
||||
|
@ -16653,7 +16653,7 @@ snapshots:
|
|||
- aws-crt
|
||||
- encoding
|
||||
|
||||
'@langchain/community@0.3.24(8549324d4d39cb6e91ae9ff5fc6970d3)':
|
||||
'@langchain/community@0.3.24(1ea346ff95b1be1e3f1f4333b25e2811)':
|
||||
dependencies:
|
||||
'@browserbasehq/stagehand': 1.9.0(@playwright/test@1.49.1)(deepmerge@4.3.1)(dotenv@16.4.5)(encoding@0.1.13)(openai@4.78.1(encoding@0.1.13)(zod@3.24.1))(zod@3.24.1)
|
||||
'@ibm-cloud/watsonx-ai': 1.1.2
|
||||
|
@ -16664,7 +16664,7 @@ snapshots:
|
|||
flat: 5.0.2
|
||||
ibm-cloud-sdk-core: 5.1.0
|
||||
js-yaml: 4.1.0
|
||||
langchain: 0.3.11(d2c70e6e899e44bb5377ca6198a97c6c)
|
||||
langchain: 0.3.11(c10b80e38f5a8711ccad1e2174de91e6)
|
||||
langsmith: 0.2.15(openai@4.78.1(encoding@0.1.13)(zod@3.24.1))
|
||||
openai: 4.78.1(encoding@0.1.13)(zod@3.24.1)
|
||||
uuid: 10.0.0
|
||||
|
@ -16679,7 +16679,7 @@ snapshots:
|
|||
'@aws-sdk/credential-provider-node': 3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@aws-sdk/client-sts@3.666.0)
|
||||
'@azure/storage-blob': 12.18.0(encoding@0.1.13)
|
||||
'@browserbasehq/sdk': 2.0.0(encoding@0.1.13)
|
||||
'@getzep/zep-cloud': 1.0.12(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)(langchain@0.3.11(d2c70e6e899e44bb5377ca6198a97c6c))
|
||||
'@getzep/zep-cloud': 1.0.12(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)(langchain@0.3.11(c10b80e38f5a8711ccad1e2174de91e6))
|
||||
'@getzep/zep-js': 0.9.0
|
||||
'@google-ai/generativelanguage': 2.6.0(encoding@0.1.13)
|
||||
'@google-cloud/storage': 7.12.1(encoding@0.1.13)
|
||||
|
@ -19998,6 +19998,14 @@ snapshots:
|
|||
transitivePeerDependencies:
|
||||
- debug
|
||||
|
||||
axios@1.7.4(debug@4.4.0):
|
||||
dependencies:
|
||||
follow-redirects: 1.15.6(debug@4.4.0)
|
||||
form-data: 4.0.0
|
||||
proxy-from-env: 1.1.0
|
||||
transitivePeerDependencies:
|
||||
- debug
|
||||
|
||||
axios@1.7.7:
|
||||
dependencies:
|
||||
follow-redirects: 1.15.6(debug@4.3.6)
|
||||
|
@ -22218,6 +22226,10 @@ snapshots:
|
|||
optionalDependencies:
|
||||
debug: 4.3.7
|
||||
|
||||
follow-redirects@1.15.6(debug@4.4.0):
|
||||
optionalDependencies:
|
||||
debug: 4.4.0
|
||||
|
||||
for-each@0.3.3:
|
||||
dependencies:
|
||||
is-callable: 1.2.7
|
||||
|
@ -22809,7 +22821,7 @@ snapshots:
|
|||
'@types/debug': 4.1.12
|
||||
'@types/node': 18.16.16
|
||||
'@types/tough-cookie': 4.0.2
|
||||
axios: 1.7.4
|
||||
axios: 1.7.4(debug@4.4.0)
|
||||
camelcase: 6.3.0
|
||||
debug: 4.4.0
|
||||
dotenv: 16.4.5
|
||||
|
@ -22819,7 +22831,7 @@ snapshots:
|
|||
isstream: 0.1.2
|
||||
jsonwebtoken: 9.0.2
|
||||
mime-types: 2.1.35
|
||||
retry-axios: 2.6.0(axios@1.7.4(debug@4.4.0))
|
||||
retry-axios: 2.6.0(axios@1.7.4)
|
||||
tough-cookie: 4.1.3
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
@ -23808,7 +23820,7 @@ snapshots:
|
|||
|
||||
kuler@2.0.0: {}
|
||||
|
||||
langchain@0.3.11(d2c70e6e899e44bb5377ca6198a97c6c):
|
||||
langchain@0.3.11(c10b80e38f5a8711ccad1e2174de91e6):
|
||||
dependencies:
|
||||
'@langchain/core': 0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1))
|
||||
'@langchain/openai': 0.3.17(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)
|
||||
|
@ -23824,7 +23836,7 @@ snapshots:
|
|||
zod: 3.24.1
|
||||
zod-to-json-schema: 3.23.3(zod@3.24.1)
|
||||
optionalDependencies:
|
||||
'@langchain/anthropic': 0.3.11(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)
|
||||
'@langchain/anthropic': 0.3.14(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)
|
||||
'@langchain/aws': 0.1.3(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@aws-sdk/client-sts@3.666.0)(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))
|
||||
'@langchain/cohere': 0.3.2(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)
|
||||
'@langchain/google-genai': 0.1.6(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(zod@3.24.1)
|
||||
|
@ -26192,7 +26204,7 @@ snapshots:
|
|||
|
||||
ret@0.1.15: {}
|
||||
|
||||
retry-axios@2.6.0(axios@1.7.4(debug@4.4.0)):
|
||||
retry-axios@2.6.0(axios@1.7.4):
|
||||
dependencies:
|
||||
axios: 1.7.4
|
||||
|
||||
|
|
Loading…
Reference in a new issue