diff --git a/cypress/e2e/30-langchain.cy.ts b/cypress/e2e/30-langchain.cy.ts index e23b7e4da3..fb453816f6 100644 --- a/cypress/e2e/30-langchain.cy.ts +++ b/cypress/e2e/30-langchain.cy.ts @@ -44,6 +44,7 @@ import { openNode, getConnectionBySourceAndTarget, } from '../composables/workflow'; +import { NDV, WorkflowPage } from '../pages'; import { createMockNodeExecutionData, runMockWorkflowExecution } from '../utils'; describe('Langchain Integration', () => { @@ -232,95 +233,96 @@ describe('Langchain Integration', () => { const inputMessage = 'Hello!'; const outputMessage = 'Hi there! How can I assist you today?'; + const runData = [ + createMockNodeExecutionData(MANUAL_CHAT_TRIGGER_NODE_NAME, { + jsonData: { + main: { input: inputMessage }, + }, + }), + createMockNodeExecutionData(AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, { + jsonData: { + ai_languageModel: { + response: { + generations: [ + { + text: `{ + "action": "Final Answer", + "action_input": "${outputMessage}" +}`, + message: { + lc: 1, + type: 'constructor', + id: ['langchain', 'schema', 'AIMessage'], + kwargs: { + content: `{ + "action": "Final Answer", + "action_input": "${outputMessage}" +}`, + additional_kwargs: {}, + }, + }, + generationInfo: { finish_reason: 'stop' }, + }, + ], + llmOutput: { + tokenUsage: { + completionTokens: 26, + promptTokens: 519, + totalTokens: 545, + }, + }, + }, + }, + }, + metadata: { + subRun: [{ node: AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, runIndex: 0 }], + }, + inputOverride: { + ai_languageModel: [ + [ + { + json: { + messages: [ + { + lc: 1, + type: 'constructor', + id: ['langchain', 'schema', 'SystemMessage'], + kwargs: { + content: + 'Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist. However, above all else, all responses must adhere to the format of RESPONSE FORMAT INSTRUCTIONS.', + additional_kwargs: {}, + }, + }, + { + lc: 1, + type: 'constructor', + id: ['langchain', 'schema', 'HumanMessage'], + kwargs: { + content: + 'TOOLS\n------\nAssistant can ask the user to use tools to look up information that may be helpful in answering the users original question. The tools the human can use are:\n\n\n\nRESPONSE FORMAT INSTRUCTIONS\n----------------------------\n\nOutput a JSON markdown code snippet containing a valid JSON object in one of two formats:\n\n**Option 1:**\nUse this if you want the human to use a tool.\nMarkdown code snippet formatted in the following schema:\n\n```json\n{\n "action": string, // The action to take. Must be one of []\n "action_input": string // The input to the action. May be a stringified object.\n}\n```\n\n**Option #2:**\nUse this if you want to respond directly and conversationally to the human. Markdown code snippet formatted in the following schema:\n\n```json\n{\n "action": "Final Answer",\n "action_input": string // You should put what you want to return to use here and make sure to use valid json newline characters.\n}\n```\n\nFor both options, remember to always include the surrounding markdown code snippet delimiters (begin with "```json" and end with "```")!\n\n\nUSER\'S INPUT\n--------------------\nHere is the user\'s input (remember to respond with a markdown code snippet of a json blob with a single action, and NOTHING else):\n\nHello!', + additional_kwargs: {}, + }, + }, + ], + options: { stop: ['Observation:'], promptIndex: 0 }, + }, + }, + ], + ], + }, + }), + createMockNodeExecutionData(AGENT_NODE_NAME, { + jsonData: { + main: { output: 'Hi there! How can I assist you today?' }, + }, + }), + ]; runMockWorkflowExecution({ trigger: () => { sendManualChatMessage(inputMessage); }, - runData: [ - createMockNodeExecutionData(MANUAL_CHAT_TRIGGER_NODE_NAME, { - jsonData: { - main: { input: inputMessage }, - }, - }), - createMockNodeExecutionData(AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, { - jsonData: { - ai_languageModel: { - response: { - generations: [ - { - text: `{ - "action": "Final Answer", - "action_input": "${outputMessage}" -}`, - message: { - lc: 1, - type: 'constructor', - id: ['langchain', 'schema', 'AIMessage'], - kwargs: { - content: `{ - "action": "Final Answer", - "action_input": "${outputMessage}" -}`, - additional_kwargs: {}, - }, - }, - generationInfo: { finish_reason: 'stop' }, - }, - ], - llmOutput: { - tokenUsage: { - completionTokens: 26, - promptTokens: 519, - totalTokens: 545, - }, - }, - }, - }, - }, - metadata: { - subRun: [{ node: AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, runIndex: 0 }], - }, - inputOverride: { - ai_languageModel: [ - [ - { - json: { - messages: [ - { - lc: 1, - type: 'constructor', - id: ['langchain', 'schema', 'SystemMessage'], - kwargs: { - content: - 'Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist. However, above all else, all responses must adhere to the format of RESPONSE FORMAT INSTRUCTIONS.', - additional_kwargs: {}, - }, - }, - { - lc: 1, - type: 'constructor', - id: ['langchain', 'schema', 'HumanMessage'], - kwargs: { - content: - 'TOOLS\n------\nAssistant can ask the user to use tools to look up information that may be helpful in answering the users original question. The tools the human can use are:\n\n\n\nRESPONSE FORMAT INSTRUCTIONS\n----------------------------\n\nOutput a JSON markdown code snippet containing a valid JSON object in one of two formats:\n\n**Option 1:**\nUse this if you want the human to use a tool.\nMarkdown code snippet formatted in the following schema:\n\n```json\n{\n "action": string, // The action to take. Must be one of []\n "action_input": string // The input to the action. May be a stringified object.\n}\n```\n\n**Option #2:**\nUse this if you want to respond directly and conversationally to the human. Markdown code snippet formatted in the following schema:\n\n```json\n{\n "action": "Final Answer",\n "action_input": string // You should put what you want to return to use here and make sure to use valid json newline characters.\n}\n```\n\nFor both options, remember to always include the surrounding markdown code snippet delimiters (begin with "```json" and end with "```")!\n\n\nUSER\'S INPUT\n--------------------\nHere is the user\'s input (remember to respond with a markdown code snippet of a json blob with a single action, and NOTHING else):\n\nHello!', - additional_kwargs: {}, - }, - }, - ], - options: { stop: ['Observation:'], promptIndex: 0 }, - }, - }, - ], - ], - }, - }), - createMockNodeExecutionData(AGENT_NODE_NAME, { - jsonData: { - main: { output: 'Hi there! How can I assist you today?' }, - }, - }), - ], + runData, lastNodeExecuted: AGENT_NODE_NAME, }); @@ -357,4 +359,56 @@ describe('Langchain Integration', () => { getConnectionBySourceAndTarget(CHAT_TRIGGER_NODE_DISPLAY_NAME, AGENT_NODE_NAME).should('exist'); getNodes().should('have.length', 3); }); + it('should render runItems for sub-nodes and allow switching between them', () => { + const workflowPage = new WorkflowPage(); + const ndv = new NDV(); + + cy.visit(workflowPage.url); + cy.createFixtureWorkflow('In_memory_vector_store_fake_embeddings.json'); + workflowPage.actions.zoomToFit(); + + workflowPage.actions.executeNode('Populate VS'); + cy.get('[data-label="25 items"]').should('exist'); + + const assertInputOutputText = (text: string, assertion: 'exist' | 'not.exist') => { + ndv.getters.outputPanel().contains(text).should(assertion); + ndv.getters.inputPanel().contains(text).should(assertion); + }; + + workflowPage.actions.openNode('Character Text Splitter'); + ndv.getters.outputRunSelector().should('exist'); + ndv.getters.inputRunSelector().should('exist'); + ndv.getters.inputRunSelector().find('input').should('include.value', '3 of 3'); + ndv.getters.outputRunSelector().find('input').should('include.value', '3 of 3'); + assertInputOutputText('Kyiv', 'exist'); + assertInputOutputText('Berlin', 'not.exist'); + assertInputOutputText('Prague', 'not.exist'); + + ndv.actions.changeOutputRunSelector('2 of 3'); + assertInputOutputText('Berlin', 'exist'); + assertInputOutputText('Kyiv', 'not.exist'); + assertInputOutputText('Prague', 'not.exist'); + + ndv.actions.changeOutputRunSelector('1 of 3'); + assertInputOutputText('Prague', 'exist'); + assertInputOutputText('Berlin', 'not.exist'); + assertInputOutputText('Kyiv', 'not.exist'); + + ndv.actions.toggleInputRunLinking(); + ndv.actions.changeOutputRunSelector('2 of 3'); + ndv.getters.inputRunSelector().find('input').should('include.value', '1 of 3'); + ndv.getters.outputRunSelector().find('input').should('include.value', '2 of 3'); + ndv.getters.inputPanel().contains('Prague').should('exist'); + ndv.getters.inputPanel().contains('Berlin').should('not.exist'); + + ndv.getters.outputPanel().contains('Berlin').should('exist'); + ndv.getters.outputPanel().contains('Prague').should('not.exist'); + + ndv.actions.toggleInputRunLinking(); + ndv.getters.inputRunSelector().find('input').should('include.value', '1 of 3'); + ndv.getters.outputRunSelector().find('input').should('include.value', '1 of 3'); + assertInputOutputText('Prague', 'exist'); + assertInputOutputText('Berlin', 'not.exist'); + assertInputOutputText('Kyiv', 'not.exist'); + }); }); diff --git a/cypress/fixtures/In_memory_vector_store_fake_embeddings.json b/cypress/fixtures/In_memory_vector_store_fake_embeddings.json new file mode 100644 index 0000000000..1f804bedb3 --- /dev/null +++ b/cypress/fixtures/In_memory_vector_store_fake_embeddings.json @@ -0,0 +1,347 @@ +{ + "name": "fake_embeddings", + "nodes": [ + { + "parameters": {}, + "id": "de3c1210-3be7-49a6-86ef-9435e661f23f", + "name": "When clicking ‘Test workflow’", + "type": "n8n-nodes-base.manualTrigger", + "typeVersion": 1, + "position": [ + 480, + 760 + ] + }, + { + "parameters": { + "jsonMode": "expressionData", + "jsonData": "={{ $('Code').item.json.city }}", + "options": {} + }, + "id": "de3cb132-14ef-426b-ad33-8365a93dd11f", + "name": "Default Data Loader", + "type": "@n8n/n8n-nodes-langchain.documentDefaultDataLoader", + "typeVersion": 1, + "position": [ + 1100, + 900 + ] + }, + { + "parameters": { + "jsCode": "const kyiv = `Kyiv (also Kiev)[a] is the capital and most populous city of Ukraine. It is in north-central Ukraine along the Dnieper River. As of 1 January 2022, its population was 2,952,301,[2] making Kyiv the seventh-most populous city in Europe.[11] Kyiv is an important industrial, scientific, educational, and cultural center in Eastern Europe. It is home to many high-tech industries, higher education institutions, and historical landmarks. The city has an extensive system of public transport and infrastructure, including the Kyiv Metro.\n\nThe city's name is said to derive from the name of Kyi, one of its four legendary founders. During its history, Kyiv, one of the oldest cities in Eastern Europe, passed through several stages of prominence and obscurity. The city probably existed as a commercial center as early as the 5th century. A Slavic settlement on the great trade route between Scandinavia and Constantinople, Kyiv was a tributary of the Khazars,[12] until its capture by the Varangians (Vikings) in the mid-9th century. Under Varangian rule, the city became a capital of Kievan Rus', the first East Slavic state. Completely destroyed during the Mongol invasions in 1240, the city lost most of its influence for the centuries to come. Coming under Lithuania, then Poland and then Russia, the city would grow from a frontier market into an important centre of Orthodox learning in the sixteenth century, and later of industry, commerce, and administration by the nineteenth.[1]\n\nThe city prospered again during the Russian Empire's Industrial Revolution in the late 19th century. In 1918, when the Ukrainian People's Republic declared independence from the Russian Republic after the October Revolution there, Kyiv became its capital. From the end of the Ukrainian-Soviet and Polish-Soviet wars in 1921, Kyiv was a city of the Ukrainian SSR, and made its capital in 1934. The city suffered significant destruction during World War II but quickly recovered in the postwar years, remaining the Soviet Union's third-largest city.\n\nFollowing the collapse of the Soviet Union and Ukrainian independence in 1991, Kyiv remained Ukraine's capital and experienced a steady influx of ethnic Ukrainian migrants from other regions of the country.[13] During the country's transformation to a market economy and electoral democracy, Kyiv has continued to be Ukraine's largest and wealthiest city. Its armament-dependent industrial output fell after the Soviet collapse, adversely affecting science and technology, but new sectors of the economy such as services and finance facilitated Kyiv's growth in salaries and investment, as well as providing continuous funding for the development of housing and urban infrastructure. Kyiv emerged as the most pro-Western region of Ukraine; parties advocating tighter integration with the European Union dominate during elections.`\n\nconst berlin = `Berlin[a] is the capital and largest city of Germany, both by area and by population.[11] Its more than 3.85 million inhabitants[12] make it the European Union's most populous city, as measured by population within city limits.[13] The city is also one of the states of Germany, and is the third smallest state in the country in terms of area. Berlin is surrounded by the state of Brandenburg, and Brandenburg's capital Potsdam is nearby. The urban area of Berlin has a population of over 4.5 million and is therefore the most populous urban area in Germany.[5][14] The Berlin-Brandenburg capital region has around 6.2 million inhabitants and is Germany's second-largest metropolitan region after the Rhine-Ruhr region, and the sixth-biggest metropolitan region by GDP in the European Union.[15]\n\nBerlin was built along the banks of the Spree river, which flows into the Havel in the western borough of Spandau. The city incorporates lakes in the western and southeastern boroughs, the largest of which is Müggelsee. About one-third of the city's area is composed of forests, parks and gardens, rivers, canals, and lakes.[16]\n\nFirst documented in the 13th century[10] and at the crossing of two important historic trade routes,[17] Berlin was designated the capital of the Margraviate of Brandenburg (1417–1701), Kingdom of Prussia (1701–1918), German Empire (1871–1918), Weimar Republic (1919–1933), and Nazi Germany (1933–1945). Berlin has served as a scientific, artistic, and philosophical hub during the Age of Enlightenment, Neoclassicism, and the German revolutions of 1848–1849. During the Gründerzeit, an industrialization-induced economic boom triggered a rapid population increase in Berlin. 1920s Berlin was the third-largest city in the world by population.[18]\n\nAfter World War II and following Berlin's occupation, the city was split into West Berlin and East Berlin, divided by the Berlin Wall.[19] East Berlin was declared the capital of East Germany, while Bonn became the West German capital. Following German reunification in 1990, Berlin once again became the capital of all of Germany. Due to its geographic location and history, Berlin has been called \"the heart of Europe\".[20][21][22]`\n\nconst prague = `Prague (/ˈprɑːɡ/ PRAHG; Czech: Praha [ˈpraɦa] ⓘ)[a] is the capital and largest city of the Czech Republic[9] and the historical capital of Bohemia. Situated on the Vltava river, Prague is home to about 1.4 million people.\n\nPrague is a political, cultural, and economic hub of Central Europe, with a rich history and Romanesque, Gothic, Renaissance and Baroque architectures. It was the capital of the Kingdom of Bohemia and residence of several Holy Roman Emperors, most notably Charles IV (r. 1346–1378) and Rudolf II (r. 1575–1611).[9] It was an important city to the Habsburg monarchy and Austria-Hungary. The city played major roles in the Bohemian and the Protestant Reformations, the Thirty Years' War and in 20th-century history as the capital of Czechoslovakia between the World Wars and the post-war Communist era.[10]\n\nPrague is home to a number of cultural attractions including Prague Castle, Charles Bridge, Old Town Square with the Prague astronomical clock, the Jewish Quarter, Petřín hill and Vyšehrad. Since 1992, the historic center of Prague has been included in the UNESCO list of World Heritage Sites.\n\nThe city has more than ten major museums, along with numerous theatres, galleries, cinemas, and other historical exhibits. An extensive modern public transportation system connects the city. It is home to a wide range of public and private schools, including Charles University in Prague, the oldest university in Central Europe.\n\nPrague is classified as an \"Alpha-\" global city according to GaWC studies.[11] In 2019, the city was ranked as 69th most livable city in the world by Mercer.[12] In the same year, the PICSA Index ranked the city as 13th most livable city in the world.[13] Its rich history makes it a popular tourist destination and as of 2017, the city receives more than 8.5 million international visitors annually. In 2017, Prague was listed as the fifth most visited European city after London, Paris, Rome, and Istanbul.[14]`\n\nreturn [prague, berlin, kyiv].map(i => ({ city: i}))" + }, + "id": "ce9d517e-2dd9-45e4-a566-79bd79cd809b", + "name": "Code", + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [ + 740, + 760 + ] + }, + { + "parameters": { + "chunkSize": 300 + }, + "id": "ebe5f3a5-4d90-4a33-bf48-f160f0e83967", + "name": "Character Text Splitter", + "type": "@n8n/n8n-nodes-langchain.textSplitterCharacterTextSplitter", + "typeVersion": 1, + "position": [ + 1100, + 1060 + ] + }, + { + "parameters": { + "code": { + "supplyData": { + "code": "const { FakeEmbeddings } = require('@langchain/core/utils/testing');\n\nreturn new FakeEmbeddings();" + } + }, + "outputs": { + "output": [ + { + "type": "ai_embedding" + } + ] + } + }, + "id": "0eac6c5b-89a9-48a4-bd21-19f2b20c3424", + "name": "Fake Embeddings 3", + "type": "@n8n/n8n-nodes-langchain.code", + "typeVersion": 1, + "position": [ + 660, + 1220 + ] + }, + { + "parameters": { + "mode": "load", + "prompt": "Tester", + "topK": 3 + }, + "id": "8c9b39bf-59d6-4769-98e1-54988d9d6b53", + "name": "Get All VS", + "type": "@n8n/n8n-nodes-langchain.vectorStoreInMemory", + "typeVersion": 1, + "position": [ + 680, + 1080 + ] + }, + { + "parameters": { + "code": { + "supplyData": { + "code": "const { FakeEmbeddings } = require('@langchain/core/utils/testing');\n\nreturn new FakeEmbeddings();" + } + }, + "outputs": { + "output": [ + { + "type": "ai_embedding" + } + ] + } + }, + "id": "e46004ec-baf6-425c-9897-3faec9e29676", + "name": "Fake Embeddings", + "type": "@n8n/n8n-nodes-langchain.code", + "typeVersion": 1, + "position": [ + 920, + 900 + ] + }, + { + "parameters": { + "promptType": "define", + "text": "Just testing", + "options": {} + }, + "id": "b132b323-a813-469c-859b-f1b3ede743a3", + "name": "Question and Answer Chain", + "type": "@n8n/n8n-nodes-langchain.chainRetrievalQa", + "typeVersion": 1.3, + "position": [ + 1680, + 780 + ] + }, + { + "parameters": {}, + "id": "b9c412e5-d739-4c82-9a2e-6c0af0cae8f9", + "name": "Vector Store Retriever", + "type": "@n8n/n8n-nodes-langchain.retrieverVectorStore", + "typeVersion": 1, + "position": [ + 1760, + 920 + ] + }, + { + "parameters": { + "code": { + "supplyData": { + "code": "const { FakeChatModel } = require('@langchain/core/utils/testing');\n\nreturn new FakeChatModel({});" + } + }, + "outputs": { + "output": [ + { + "type": "ai_languageModel" + } + ] + } + }, + "id": "962b4b87-ffd6-4ab8-8776-6e9c0920930a", + "name": "Fake Language Model", + "type": "@n8n/n8n-nodes-langchain.code", + "typeVersion": 1, + "position": [ + 1620, + 920 + ] + }, + { + "parameters": { + "code": { + "supplyData": { + "code": "const { FakeEmbeddings } = require('@langchain/core/utils/testing');\n\nreturn new FakeEmbeddings();" + } + }, + "outputs": { + "output": [ + { + "type": "ai_embedding" + } + ] + } + }, + "id": "c78be34f-6459-4414-86bd-f2670ece129d", + "name": "Fake Embeddings 2", + "type": "@n8n/n8n-nodes-langchain.code", + "typeVersion": 1, + "position": [ + 1700, + 1200 + ] + }, + { + "parameters": {}, + "id": "3cee9727-6b97-477c-8277-e8883a98786d", + "name": "Retriever VS", + "type": "@n8n/n8n-nodes-langchain.vectorStoreInMemory", + "typeVersion": 1, + "position": [ + 1700, + 1060 + ] + }, + { + "parameters": { + "mode": "insert" + }, + "id": "5793ec6b-ac00-4a5d-a79c-ff557143e46b", + "name": "Populate VS", + "type": "@n8n/n8n-nodes-langchain.vectorStoreInMemory", + "typeVersion": 1, + "position": [ + 980, + 760 + ] + } + ], + "pinData": {}, + "connections": { + "When clicking ‘Test workflow’": { + "main": [ + [ + { + "node": "Code", + "type": "main", + "index": 0 + }, + { + "node": "Get All VS", + "type": "main", + "index": 0 + } + ] + ] + }, + "Default Data Loader": { + "ai_document": [ + [ + { + "node": "Populate VS", + "type": "ai_document", + "index": 0 + } + ] + ] + }, + "Code": { + "main": [ + [ + { + "node": "Populate VS", + "type": "main", + "index": 0 + } + ] + ] + }, + "Character Text Splitter": { + "ai_textSplitter": [ + [ + { + "node": "Default Data Loader", + "type": "ai_textSplitter", + "index": 0 + } + ] + ] + }, + "Fake Embeddings 3": { + "ai_embedding": [ + [ + { + "node": "Get All VS", + "type": "ai_embedding", + "index": 0 + } + ] + ] + }, + "Fake Embeddings": { + "ai_embedding": [ + [ + { + "node": "Populate VS", + "type": "ai_embedding", + "index": 0 + } + ] + ] + }, + "Vector Store Retriever": { + "ai_retriever": [ + [ + { + "node": "Question and Answer Chain", + "type": "ai_retriever", + "index": 0 + } + ] + ] + }, + "Fake Language Model": { + "ai_languageModel": [ + [ + { + "node": "Question and Answer Chain", + "type": "ai_languageModel", + "index": 0 + } + ] + ] + }, + "Fake Embeddings 2": { + "ai_embedding": [ + [ + { + "node": "Retriever VS", + "type": "ai_embedding", + "index": 0 + } + ] + ] + }, + "Retriever VS": { + "ai_vectorStore": [ + [ + { + "node": "Vector Store Retriever", + "type": "ai_vectorStore", + "index": 0 + } + ] + ] + } + }, + "active": false, + "settings": { + "executionOrder": "v1" + }, + "versionId": "4ad44cc6-d5f7-48af-8455-c3957baba04c", + "meta": { + "templateCredsSetupCompleted": true, + "instanceId": "27cc9b56542ad45b38725555722c50a1c3fee1670bbb67980558314ee08517c4" + }, + "id": "ZjxsuN0rMHRVCb2c", + "tags": [] +} diff --git a/cypress/utils/executions.ts b/cypress/utils/executions.ts index 0b4814fdc9..0f42972856 100644 --- a/cypress/utils/executions.ts +++ b/cypress/utils/executions.ts @@ -16,7 +16,7 @@ export function createMockNodeExecutionData( return { [name]: { startTime: new Date().getTime(), - executionTime: 0, + executionTime: 1, executionStatus, data: jsonData ? Object.keys(jsonData).reduce((acc, key) => { @@ -33,6 +33,7 @@ export function createMockNodeExecutionData( }, {} as ITaskDataConnections) : data, source: [null], + inputOverride, ...rest, }, }; diff --git a/packages/editor-ui/src/components/InputPanel.vue b/packages/editor-ui/src/components/InputPanel.vue index a5f5dc96d8..36a191eaec 100644 --- a/packages/editor-ui/src/components/InputPanel.vue +++ b/packages/editor-ui/src/components/InputPanel.vue @@ -179,7 +179,7 @@ export default defineComponent({ rootNode(): string { const workflow = this.workflow; - const rootNodes = workflow.getChildNodes(this.activeNode?.name ?? '', 'ALL_NON_MAIN'); + const rootNodes = workflow.getChildNodes(this.activeNode?.name ?? '', 'ALL'); return rootNodes[0]; }, @@ -342,7 +342,7 @@ export default defineComponent({ :node="currentNode" :nodes="isMappingMode ? rootNodesParents : parentNodes" :workflow="workflow" - :run-index="runIndex" + :run-index="isMappingMode ? 0 : runIndex" :linked-runs="linkedRuns" :can-link-runs="!mappedNode && canLinkRuns" :too-much-data-title="$locale.baseText('ndv.input.tooMuchData.title')" diff --git a/packages/editor-ui/src/components/NodeDetailsView.vue b/packages/editor-ui/src/components/NodeDetailsView.vue index 8294ff2f24..8deda08ccf 100644 --- a/packages/editor-ui/src/components/NodeDetailsView.vue +++ b/packages/editor-ui/src/components/NodeDetailsView.vue @@ -154,6 +154,29 @@ const parentNode = computed(() => { }); const inputNodeName = computed(() => { + const nodeOutputs = + activeNode.value && activeNodeType.value + ? NodeHelpers.getNodeOutputs(props.workflowObject, activeNode.value, activeNodeType.value) + : []; + + const nonMainOutputs = nodeOutputs.filter((output) => { + if (typeof output === 'string') return output !== NodeConnectionType.Main; + + return output.type !== NodeConnectionType.Main; + }); + + const isSubNode = nonMainOutputs.length > 0; + + if (isSubNode && activeNode.value) { + // For sub-nodes, we need to get their connected output node to determine the input + // because sub-nodes use specialized outputs (e.g. NodeConnectionType.AiTool) + // instead of the standard Main output type + const connectedOutputNode = props.workflowObject.getChildNodes( + activeNode.value.name, + 'ALL_NON_MAIN', + )?.[0]; + return connectedOutputNode; + } return selectedInput.value || parentNode.value; }); diff --git a/packages/editor-ui/src/components/RunDataAi/RunDataAi.vue b/packages/editor-ui/src/components/RunDataAi/RunDataAi.vue index aa4cefd169..4092f09884 100644 --- a/packages/editor-ui/src/components/RunDataAi/RunDataAi.vue +++ b/packages/editor-ui/src/components/RunDataAi/RunDataAi.vue @@ -164,8 +164,9 @@ const aiData = computed(() => { const result: AIResult[] = []; const connectedSubNodes = props.workflow.getParentNodes(props.node.name, 'ALL_NON_MAIN'); const rootNodeResult = workflowsStore.getWorkflowResultDataByNodeName(props.node.name); - const rootNodeStartTime = rootNodeResult?.[0]?.startTime ?? 0; - const rootNodeEndTime = rootNodeStartTime + (rootNodeResult?.[0]?.executionTime ?? 0); + const rootNodeStartTime = rootNodeResult?.[props.runIndex ?? 0]?.startTime ?? 0; + const rootNodeEndTime = + rootNodeStartTime + (rootNodeResult?.[props.runIndex ?? 0]?.executionTime ?? 0); connectedSubNodes.forEach((nodeName) => { const nodeRunData = workflowsStore.getWorkflowResultDataByNodeName(nodeName) ?? []; @@ -193,7 +194,7 @@ const aiData = computed(() => { const currentNodeResult = result.filter((r) => { const startTime = r.data?.metadata?.startTime ?? 0; - return startTime >= rootNodeStartTime && startTime <= rootNodeEndTime; + return startTime >= rootNodeStartTime && startTime < rootNodeEndTime; }); return currentNodeResult;