mirror of
https://github.com/n8n-io/n8n.git
synced 2025-03-05 20:50:17 -08:00
fix(core): Flush responses for ai streaming endpoints (#10633)
This commit is contained in:
parent
14952eb83b
commit
6bb6a5c6cd
|
@ -1,31 +1,40 @@
|
||||||
import { Post, RestController } from '@/decorators';
|
import type { Response } from 'express';
|
||||||
import { AiAssistantService } from '@/services/ai-assistant.service';
|
|
||||||
import { AiAssistantRequest } from '@/requests';
|
|
||||||
import { Response } from 'express';
|
|
||||||
import type { AiAssistantSDK } from '@n8n_io/ai-assistant-sdk';
|
import type { AiAssistantSDK } from '@n8n_io/ai-assistant-sdk';
|
||||||
import { Readable, promises } from 'node:stream';
|
import { WritableStream } from 'node:stream/web';
|
||||||
import { InternalServerError } from 'express-openapi-validator/dist/openapi.validator';
|
|
||||||
import { strict as assert } from 'node:assert';
|
import { strict as assert } from 'node:assert';
|
||||||
import { ErrorReporterProxy } from 'n8n-workflow';
|
import { ErrorReporterProxy } from 'n8n-workflow';
|
||||||
|
|
||||||
|
import { Post, RestController } from '@/decorators';
|
||||||
|
import { InternalServerError } from '@/errors/response-errors/internal-server.error';
|
||||||
|
import { AiAssistantRequest } from '@/requests';
|
||||||
|
import { AiAssistantService } from '@/services/ai-assistant.service';
|
||||||
|
|
||||||
|
type FlushableResponse = Response & { flush: () => void };
|
||||||
|
|
||||||
@RestController('/ai-assistant')
|
@RestController('/ai-assistant')
|
||||||
export class AiAssistantController {
|
export class AiAssistantController {
|
||||||
constructor(private readonly aiAssistantService: AiAssistantService) {}
|
constructor(private readonly aiAssistantService: AiAssistantService) {}
|
||||||
|
|
||||||
@Post('/chat', { rateLimit: { limit: 100 } })
|
@Post('/chat', { rateLimit: { limit: 100 } })
|
||||||
async chat(req: AiAssistantRequest.Chat, res: Response) {
|
async chat(req: AiAssistantRequest.Chat, res: FlushableResponse) {
|
||||||
try {
|
try {
|
||||||
const stream = await this.aiAssistantService.chat(req.body, req.user);
|
const aiResponse = await this.aiAssistantService.chat(req.body, req.user);
|
||||||
|
if (aiResponse.body) {
|
||||||
if (stream.body) {
|
res.header('Content-type', 'application/json-lines').flush();
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
await aiResponse.body.pipeTo(
|
||||||
await promises.pipeline(Readable.fromWeb(stream.body), res);
|
new WritableStream({
|
||||||
|
write(chunk) {
|
||||||
|
res.write(chunk);
|
||||||
|
res.flush();
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
res.end();
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// todo add sentry reporting
|
|
||||||
assert(e instanceof Error);
|
assert(e instanceof Error);
|
||||||
ErrorReporterProxy.error(e);
|
ErrorReporterProxy.error(e);
|
||||||
throw new InternalServerError({ message: `Something went wrong: ${e.message}` });
|
throw new InternalServerError(`Something went wrong: ${e.message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,7 +47,7 @@ export class AiAssistantController {
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
assert(e instanceof Error);
|
assert(e instanceof Error);
|
||||||
ErrorReporterProxy.error(e);
|
ErrorReporterProxy.error(e);
|
||||||
throw new InternalServerError({ message: `Something went wrong: ${e.message}` });
|
throw new InternalServerError(`Something went wrong: ${e.message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue