mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-09 22:24:05 -08:00
fix(Code Node): Install python modules always in a user-writable folder (#6568)
* upgrade pyodide * install pyodide modules to a custom user-writable path * in `augmentObject` `newData` is never undefined
This commit is contained in:
parent
071e56f7fd
commit
bf351243df
|
@ -93,7 +93,8 @@
|
||||||
"element-ui@2.15.12": "patches/element-ui@2.15.12.patch",
|
"element-ui@2.15.12": "patches/element-ui@2.15.12.patch",
|
||||||
"typedi@0.10.0": "patches/typedi@0.10.0.patch",
|
"typedi@0.10.0": "patches/typedi@0.10.0.patch",
|
||||||
"@sentry/cli@2.17.0": "patches/@sentry__cli@2.17.0.patch",
|
"@sentry/cli@2.17.0": "patches/@sentry__cli@2.17.0.patch",
|
||||||
"pkce-challenge@3.0.0": "patches/pkce-challenge@3.0.0.patch"
|
"pkce-challenge@3.0.0": "patches/pkce-challenge@3.0.0.patch",
|
||||||
|
"pyodide@0.23.4": "patches/pyodide@0.23.4.patch"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -128,6 +128,7 @@ import {
|
||||||
setAllWorkflowExecutionMetadata,
|
setAllWorkflowExecutionMetadata,
|
||||||
setWorkflowExecutionMetadata,
|
setWorkflowExecutionMetadata,
|
||||||
} from './WorkflowExecutionMetadata';
|
} from './WorkflowExecutionMetadata';
|
||||||
|
import { getUserN8nFolderPath } from './UserSettings';
|
||||||
|
|
||||||
axios.defaults.timeout = 300000;
|
axios.defaults.timeout = 300000;
|
||||||
// Prevent axios from adding x-form-www-urlencoded headers by default
|
// Prevent axios from adding x-form-www-urlencoded headers by default
|
||||||
|
@ -2245,6 +2246,9 @@ const getFileSystemHelperFunctions = (node: INode): FileSystemHelperFunctions =>
|
||||||
}
|
}
|
||||||
return createReadStream(filePath);
|
return createReadStream(filePath);
|
||||||
},
|
},
|
||||||
|
getStoragePath() {
|
||||||
|
return path.join(getUserN8nFolderPath(), `storage/${node.type}`);
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const getNodeHelperFunctions = ({
|
const getNodeHelperFunctions = ({
|
||||||
|
|
|
@ -108,10 +108,8 @@ export class Code implements INodeType {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (language === 'python') {
|
if (language === 'python') {
|
||||||
const modules = this.getNodeParameter('modules', index) as string;
|
|
||||||
const moduleImports: string[] = modules ? modules.split(',').map((m) => m.trim()) : [];
|
|
||||||
context.printOverwrite = workflowMode === 'manual' ? this.sendMessageToUI : null;
|
context.printOverwrite = workflowMode === 'manual' ? this.sendMessageToUI : null;
|
||||||
return new PythonSandbox(context, code, moduleImports, index, this.helpers);
|
return new PythonSandbox(context, code, index, this.helpers);
|
||||||
} else {
|
} else {
|
||||||
const sandbox = new JavaScriptSandbox(context, code, index, workflowMode, this.helpers);
|
const sandbox = new JavaScriptSandbox(context, code, index, workflowMode, this.helpers);
|
||||||
if (workflowMode === 'manual') {
|
if (workflowMode === 'manual') {
|
||||||
|
|
|
@ -2,26 +2,15 @@ import type { PyodideInterface } from 'pyodide';
|
||||||
|
|
||||||
let pyodideInstance: PyodideInterface | undefined;
|
let pyodideInstance: PyodideInterface | undefined;
|
||||||
|
|
||||||
export async function LoadPyodide(): Promise<PyodideInterface> {
|
export async function LoadPyodide(packageCacheDir: string): Promise<PyodideInterface> {
|
||||||
if (pyodideInstance === undefined) {
|
if (pyodideInstance === undefined) {
|
||||||
// TODO: Find better way to suppress warnings
|
|
||||||
//@ts-ignore
|
|
||||||
globalThis.Blob = (await import('node:buffer')).Blob;
|
|
||||||
|
|
||||||
// From: https://github.com/nodejs/node/issues/30810
|
|
||||||
const { emitWarning } = process;
|
|
||||||
process.emitWarning = (warning, ...args) => {
|
|
||||||
if (args[0] === 'ExperimentalWarning') {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (args[0] && typeof args[0] === 'object' && args[0].type === 'ExperimentalWarning') {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
return emitWarning(warning, ...(args as string[]));
|
|
||||||
};
|
|
||||||
|
|
||||||
const { loadPyodide } = await import('pyodide');
|
const { loadPyodide } = await import('pyodide');
|
||||||
pyodideInstance = await loadPyodide();
|
pyodideInstance = await loadPyodide({ packageCacheDir });
|
||||||
|
|
||||||
|
await pyodideInstance.runPythonAsync(`
|
||||||
|
from _pyodide_core import jsproxy_typedict
|
||||||
|
from js import Object
|
||||||
|
`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return pyodideInstance;
|
return pyodideInstance;
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
|
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
|
||||||
import type { PyProxyDict } from 'pyodide';
|
import type { PyDict } from 'pyodide/ffi';
|
||||||
import { LoadPyodide } from './Pyodide';
|
import { LoadPyodide } from './Pyodide';
|
||||||
import type { SandboxContext } from './Sandbox';
|
import type { SandboxContext } from './Sandbox';
|
||||||
import { Sandbox } from './Sandbox';
|
import { Sandbox } from './Sandbox';
|
||||||
|
@ -18,7 +18,6 @@ export class PythonSandbox extends Sandbox {
|
||||||
constructor(
|
constructor(
|
||||||
context: SandboxContext,
|
context: SandboxContext,
|
||||||
private pythonCode: string,
|
private pythonCode: string,
|
||||||
private moduleImports: string[],
|
|
||||||
itemIndex: number | undefined,
|
itemIndex: number | undefined,
|
||||||
helpers: IExecuteFunctions['helpers'],
|
helpers: IExecuteFunctions['helpers'],
|
||||||
) {
|
) {
|
||||||
|
@ -51,47 +50,35 @@ export class PythonSandbox extends Sandbox {
|
||||||
}
|
}
|
||||||
|
|
||||||
private async runCodeInPython<T>() {
|
private async runCodeInPython<T>() {
|
||||||
// Below workaround from here:
|
const packageCacheDir = this.helpers.getStoragePath();
|
||||||
// https://github.com/pyodide/pyodide/discussions/3537#discussioncomment-4864345
|
const pyodide = await LoadPyodide(packageCacheDir);
|
||||||
const runCode = `
|
|
||||||
from _pyodide_core import jsproxy_typedict
|
|
||||||
from js import Object
|
|
||||||
jsproxy_typedict[0] = type(Object.new().as_object_map())
|
|
||||||
|
|
||||||
if printOverwrite:
|
|
||||||
print = printOverwrite
|
|
||||||
|
|
||||||
async def __main():
|
|
||||||
${this.pythonCode
|
|
||||||
.split('\n')
|
|
||||||
.map((line) => ' ' + line)
|
|
||||||
.join('\n')}
|
|
||||||
await __main()
|
|
||||||
`;
|
|
||||||
const pyodide = await LoadPyodide();
|
|
||||||
|
|
||||||
const moduleImportsFiltered = this.moduleImports.filter(
|
|
||||||
(importModule) => !['asyncio', 'pyodide', 'math'].includes(importModule),
|
|
||||||
);
|
|
||||||
|
|
||||||
if (moduleImportsFiltered.length) {
|
|
||||||
await pyodide.loadPackage('micropip');
|
|
||||||
const micropip = pyodide.pyimport('micropip');
|
|
||||||
await Promise.all(
|
|
||||||
moduleImportsFiltered.map((importModule) => micropip.install(importModule)),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let executionResult;
|
let executionResult;
|
||||||
try {
|
try {
|
||||||
|
await pyodide.runPythonAsync('jsproxy_typedict[0] = type(Object.new().as_object_map())');
|
||||||
|
|
||||||
|
await pyodide.loadPackagesFromImports(this.pythonCode);
|
||||||
|
|
||||||
const dict = pyodide.globals.get('dict');
|
const dict = pyodide.globals.get('dict');
|
||||||
const globalsDict: PyProxyDict = dict();
|
const globalsDict: PyDict = dict();
|
||||||
for (const key of Object.keys(this.context)) {
|
for (const key of Object.keys(this.context)) {
|
||||||
if ((key === '_env' && envAccessBlocked) || key === '_node') continue;
|
if ((key === '_env' && envAccessBlocked) || key === '_node') continue;
|
||||||
const value = this.context[key];
|
const value = this.context[key];
|
||||||
globalsDict.set(key, value);
|
globalsDict.set(key, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await pyodide.runPythonAsync(`
|
||||||
|
if 'printOverwrite' in globals():
|
||||||
|
print = printOverwrite
|
||||||
|
`);
|
||||||
|
|
||||||
|
const runCode = `
|
||||||
|
async def __main():
|
||||||
|
${this.pythonCode
|
||||||
|
.split('\n')
|
||||||
|
.map((line) => ' ' + line)
|
||||||
|
.join('\n')}
|
||||||
|
await __main()`;
|
||||||
executionResult = await pyodide.runPythonAsync(runCode, { globals: globalsDict });
|
executionResult = await pyodide.runPythonAsync(runCode, { globals: globalsDict });
|
||||||
globalsDict.destroy();
|
globalsDict.destroy();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
|
@ -35,7 +35,7 @@ export abstract class Sandbox {
|
||||||
constructor(
|
constructor(
|
||||||
private textKeys: SandboxTextKeys,
|
private textKeys: SandboxTextKeys,
|
||||||
protected itemIndex: number | undefined,
|
protected itemIndex: number | undefined,
|
||||||
private helpers: IExecuteFunctions['helpers'],
|
protected helpers: IExecuteFunctions['helpers'],
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
abstract runCodeAllItems(): Promise<INodeExecutionData[]>;
|
abstract runCodeAllItems(): Promise<INodeExecutionData[]>;
|
||||||
|
|
|
@ -45,19 +45,4 @@ export const pythonCodeDescription: INodeProperties[] = [
|
||||||
},
|
},
|
||||||
default: '',
|
default: '',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
displayName: 'Python Modules',
|
|
||||||
name: 'modules',
|
|
||||||
displayOptions: {
|
|
||||||
show: {
|
|
||||||
language: ['python'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
type: 'string',
|
|
||||||
default: '',
|
|
||||||
placeholder: 'opencv-python',
|
|
||||||
description:
|
|
||||||
'Comma-separated list of Python modules to load. They have to be installed to be able to be loaded and imported.',
|
|
||||||
noDataExpression: true,
|
|
||||||
},
|
|
||||||
];
|
];
|
||||||
|
|
|
@ -831,7 +831,7 @@
|
||||||
"pg-promise": "^10.5.8",
|
"pg-promise": "^10.5.8",
|
||||||
"pretty-bytes": "^5.6.0",
|
"pretty-bytes": "^5.6.0",
|
||||||
"promise-ftp": "^1.3.5",
|
"promise-ftp": "^1.3.5",
|
||||||
"pyodide": "^0.22.1",
|
"pyodide": "^0.23.4",
|
||||||
"redis": "^3.1.1",
|
"redis": "^3.1.1",
|
||||||
"rhea": "^1.0.11",
|
"rhea": "^1.0.11",
|
||||||
"rss-parser": "^3.7.0",
|
"rss-parser": "^3.7.0",
|
||||||
|
|
|
@ -133,7 +133,11 @@ export function augmentObject<T extends object>(data: T): T {
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
},
|
},
|
||||||
|
has(target, key) {
|
||||||
|
if (deletedProperties.indexOf(key) !== -1) return false;
|
||||||
|
const newKeys = Object.keys(newData);
|
||||||
|
return Reflect.has(newKeys.length ? newData : target, key);
|
||||||
|
},
|
||||||
ownKeys(target) {
|
ownKeys(target) {
|
||||||
const originalKeys = Reflect.ownKeys(target);
|
const originalKeys = Reflect.ownKeys(target);
|
||||||
const newKeys = Object.keys(newData);
|
const newKeys = Object.keys(newData);
|
||||||
|
|
|
@ -671,6 +671,7 @@ interface JsonHelperFunctions {
|
||||||
|
|
||||||
export interface FileSystemHelperFunctions {
|
export interface FileSystemHelperFunctions {
|
||||||
createReadStream(path: PathLike): Promise<Readable>;
|
createReadStream(path: PathLike): Promise<Readable>;
|
||||||
|
getStoragePath(): string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface BinaryHelperFunctions {
|
export interface BinaryHelperFunctions {
|
||||||
|
|
|
@ -51,6 +51,9 @@ export function create(
|
||||||
get(target, name, receiver) {
|
get(target, name, receiver) {
|
||||||
return Reflect.get(target, name, receiver);
|
return Reflect.get(target, name, receiver);
|
||||||
},
|
},
|
||||||
|
has(target, key) {
|
||||||
|
return Reflect.has(target, key);
|
||||||
|
},
|
||||||
set(target, name, value) {
|
set(target, name, value) {
|
||||||
if (parent === undefined) {
|
if (parent === undefined) {
|
||||||
// If no parent is given mark current data as changed
|
// If no parent is given mark current data as changed
|
||||||
|
|
|
@ -146,6 +146,7 @@ export class WorkflowDataProxy {
|
||||||
return new Proxy(
|
return new Proxy(
|
||||||
{},
|
{},
|
||||||
{
|
{
|
||||||
|
has: () => true,
|
||||||
ownKeys(target) {
|
ownKeys(target) {
|
||||||
if (Reflect.ownKeys(target).length === 0) {
|
if (Reflect.ownKeys(target).length === 0) {
|
||||||
// Target object did not get set yet
|
// Target object did not get set yet
|
||||||
|
@ -178,6 +179,7 @@ export class WorkflowDataProxy {
|
||||||
return new Proxy(
|
return new Proxy(
|
||||||
{},
|
{},
|
||||||
{
|
{
|
||||||
|
has: () => true,
|
||||||
ownKeys(target) {
|
ownKeys(target) {
|
||||||
return Reflect.ownKeys(target);
|
return Reflect.ownKeys(target);
|
||||||
},
|
},
|
||||||
|
@ -202,6 +204,7 @@ export class WorkflowDataProxy {
|
||||||
const node = this.workflow.nodes[nodeName];
|
const node = this.workflow.nodes[nodeName];
|
||||||
|
|
||||||
return new Proxy(node.parameters, {
|
return new Proxy(node.parameters, {
|
||||||
|
has: () => true,
|
||||||
ownKeys(target) {
|
ownKeys(target) {
|
||||||
return Reflect.ownKeys(target);
|
return Reflect.ownKeys(target);
|
||||||
},
|
},
|
||||||
|
@ -384,6 +387,7 @@ export class WorkflowDataProxy {
|
||||||
return new Proxy(
|
return new Proxy(
|
||||||
{ binary: undefined, data: undefined, json: undefined },
|
{ binary: undefined, data: undefined, json: undefined },
|
||||||
{
|
{
|
||||||
|
has: () => true,
|
||||||
get(target, name, receiver) {
|
get(target, name, receiver) {
|
||||||
if (name === 'isProxy') return true;
|
if (name === 'isProxy') return true;
|
||||||
name = name.toString();
|
name = name.toString();
|
||||||
|
@ -461,6 +465,7 @@ export class WorkflowDataProxy {
|
||||||
return new Proxy(
|
return new Proxy(
|
||||||
{},
|
{},
|
||||||
{
|
{
|
||||||
|
has: () => true,
|
||||||
get(target, name, receiver) {
|
get(target, name, receiver) {
|
||||||
if (name === 'isProxy') return true;
|
if (name === 'isProxy') return true;
|
||||||
|
|
||||||
|
@ -491,6 +496,7 @@ export class WorkflowDataProxy {
|
||||||
return new Proxy(
|
return new Proxy(
|
||||||
{},
|
{},
|
||||||
{
|
{
|
||||||
|
has: () => true,
|
||||||
ownKeys(target) {
|
ownKeys(target) {
|
||||||
return allowedValues;
|
return allowedValues;
|
||||||
},
|
},
|
||||||
|
@ -538,6 +544,7 @@ export class WorkflowDataProxy {
|
||||||
return new Proxy(
|
return new Proxy(
|
||||||
{},
|
{},
|
||||||
{
|
{
|
||||||
|
has: () => true,
|
||||||
ownKeys(target) {
|
ownKeys(target) {
|
||||||
return allowedValues;
|
return allowedValues;
|
||||||
},
|
},
|
||||||
|
@ -580,6 +587,7 @@ export class WorkflowDataProxy {
|
||||||
return new Proxy(
|
return new Proxy(
|
||||||
{},
|
{},
|
||||||
{
|
{
|
||||||
|
has: () => true,
|
||||||
get(target, name, receiver) {
|
get(target, name, receiver) {
|
||||||
if (name === 'isProxy') return true;
|
if (name === 'isProxy') return true;
|
||||||
|
|
||||||
|
@ -950,6 +958,7 @@ export class WorkflowDataProxy {
|
||||||
return new Proxy(
|
return new Proxy(
|
||||||
{},
|
{},
|
||||||
{
|
{
|
||||||
|
has: () => true,
|
||||||
ownKeys(target) {
|
ownKeys(target) {
|
||||||
return [
|
return [
|
||||||
'pairedItem',
|
'pairedItem',
|
||||||
|
@ -1073,6 +1082,7 @@ export class WorkflowDataProxy {
|
||||||
},
|
},
|
||||||
|
|
||||||
$input: new Proxy({} as ProxyInput, {
|
$input: new Proxy({} as ProxyInput, {
|
||||||
|
has: () => true,
|
||||||
ownKeys(target) {
|
ownKeys(target) {
|
||||||
return ['all', 'context', 'first', 'item', 'last', 'params'];
|
return ['all', 'context', 'first', 'item', 'last', 'params'];
|
||||||
},
|
},
|
||||||
|
@ -1238,6 +1248,7 @@ export class WorkflowDataProxy {
|
||||||
};
|
};
|
||||||
|
|
||||||
return new Proxy(base, {
|
return new Proxy(base, {
|
||||||
|
has: () => true,
|
||||||
get(target, name, receiver) {
|
get(target, name, receiver) {
|
||||||
if (name === 'isProxy') return true;
|
if (name === 'isProxy') return true;
|
||||||
|
|
||||||
|
|
|
@ -557,5 +557,19 @@ describe('AugmentObject', () => {
|
||||||
writable: true,
|
writable: true,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('should return valid values on `has` calls', () => {
|
||||||
|
const originalObject = {
|
||||||
|
x: {
|
||||||
|
y: {},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const augmentedObject = augmentObject(originalObject);
|
||||||
|
expect('y' in augmentedObject.x).toBe(true);
|
||||||
|
expect('z' in augmentedObject.x).toBe(false);
|
||||||
|
|
||||||
|
augmentedObject.x.z = 5;
|
||||||
|
expect('z' in augmentedObject.x).toBe(true);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
20
patches/pyodide@0.23.4.patch
Normal file
20
patches/pyodide@0.23.4.patch
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
diff --git a/pyodide.d.ts b/pyodide.d.ts
|
||||||
|
index d5ed46f6345855a75ec6f2b7ef73237a0af64a7e..e0087c83792558ca30713e9d07a9a37625f68d8d 100644
|
||||||
|
--- a/pyodide.d.ts
|
||||||
|
+++ b/pyodide.d.ts
|
||||||
|
@@ -1118,6 +1118,15 @@ export declare function loadPyodide(options?: {
|
||||||
|
* (``pyodide.js`` or ``pyodide.mjs``) removed.
|
||||||
|
*/
|
||||||
|
indexURL?: string;
|
||||||
|
+ /**
|
||||||
|
+ * The file path where packages will be cached in `node.js`. If a package
|
||||||
|
+ * exists in `packageCacheDir` it is loaded from there, otherwise it is
|
||||||
|
+ * downloaded from the JsDelivr CDN and then cached into `packageCacheDir`.
|
||||||
|
+ * Only applies when running in node.js. Ignored in browsers.
|
||||||
|
+ *
|
||||||
|
+ * Default: same as indexURL
|
||||||
|
+ */
|
||||||
|
+ packageCacheDir?: string;
|
||||||
|
/**
|
||||||
|
* file. You can produce custom lock files with :py:func:`micropip.freeze`.
|
||||||
|
* Default: ```${indexURL}/repodata.json```
|
|
@ -35,6 +35,9 @@ patchedDependencies:
|
||||||
pkce-challenge@3.0.0:
|
pkce-challenge@3.0.0:
|
||||||
hash: dypouzb3lve7vncq25i5fuanki
|
hash: dypouzb3lve7vncq25i5fuanki
|
||||||
path: patches/pkce-challenge@3.0.0.patch
|
path: patches/pkce-challenge@3.0.0.patch
|
||||||
|
pyodide@0.23.4:
|
||||||
|
hash: kzcwsjcayy5m6iezu7r4tdimjq
|
||||||
|
path: patches/pyodide@0.23.4.patch
|
||||||
typedi@0.10.0:
|
typedi@0.10.0:
|
||||||
hash: 62r6bc2crgimafeyruodhqlgo4
|
hash: 62r6bc2crgimafeyruodhqlgo4
|
||||||
path: patches/typedi@0.10.0.patch
|
path: patches/typedi@0.10.0.patch
|
||||||
|
@ -1142,8 +1145,8 @@ importers:
|
||||||
specifier: ^1.3.5
|
specifier: ^1.3.5
|
||||||
version: 1.3.5(promise-ftp-common@1.1.5)
|
version: 1.3.5(promise-ftp-common@1.1.5)
|
||||||
pyodide:
|
pyodide:
|
||||||
specifier: ^0.22.1
|
specifier: ^0.23.4
|
||||||
version: 0.22.1
|
version: 0.23.4(patch_hash=kzcwsjcayy5m6iezu7r4tdimjq)
|
||||||
redis:
|
redis:
|
||||||
specifier: ^3.1.1
|
specifier: ^3.1.1
|
||||||
version: 3.1.2
|
version: 3.1.2
|
||||||
|
@ -18656,8 +18659,8 @@ packages:
|
||||||
resolution: {integrity: sha512-t+x1zEHDjBwkDGY5v5ApnZ/utcd4XYDiJsaQQoptTXgUXX95sDg1elCdJghzicm7n2mbCBJ3uYWr6M22SO19rg==}
|
resolution: {integrity: sha512-t+x1zEHDjBwkDGY5v5ApnZ/utcd4XYDiJsaQQoptTXgUXX95sDg1elCdJghzicm7n2mbCBJ3uYWr6M22SO19rg==}
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
/pyodide@0.22.1:
|
/pyodide@0.23.4(patch_hash=kzcwsjcayy5m6iezu7r4tdimjq):
|
||||||
resolution: {integrity: sha512-6+PkFLTC+kcBKtFQxYBxR44J5IBxLm8UGkobLgZv1SxzV9qOU2rb0YYf0qDtlnfDiN/IQd2uckf+D8Zwe88Mqg==}
|
resolution: {integrity: sha512-WpQUHaIXQ1xede5BMqPAjBcmopxN22s5hEsYOR8T7/UW/fkNLFUn07SaemUgthbtvedD5JGymMMj4VpD9sGMTg==}
|
||||||
dependencies:
|
dependencies:
|
||||||
base-64: 1.0.0
|
base-64: 1.0.0
|
||||||
node-fetch: 2.6.8
|
node-fetch: 2.6.8
|
||||||
|
@ -18667,6 +18670,7 @@ packages:
|
||||||
- encoding
|
- encoding
|
||||||
- utf-8-validate
|
- utf-8-validate
|
||||||
dev: false
|
dev: false
|
||||||
|
patched: true
|
||||||
|
|
||||||
/python-struct@1.1.3:
|
/python-struct@1.1.3:
|
||||||
resolution: {integrity: sha512-UsI/mNvk25jRpGKYI38Nfbv84z48oiIWwG67DLVvjRhy8B/0aIK+5Ju5WOHgw/o9rnEmbAS00v4rgKFQeC332Q==}
|
resolution: {integrity: sha512-UsI/mNvk25jRpGKYI38Nfbv84z48oiIWwG67DLVvjRhy8B/0aIK+5Ju5WOHgw/o9rnEmbAS00v4rgKFQeC332Q==}
|
||||||
|
|
Loading…
Reference in a new issue