mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-24 05:04:05 -08:00
Add PromQL logic code and labels explorer from PromLens, add testing deps
Some checks failed
CI / Go tests (push) Has been cancelled
CI / More Go tests (push) Has been cancelled
CI / Go tests with previous Go version (push) Has been cancelled
CI / UI tests (push) Has been cancelled
CI / Go tests on Windows (push) Has been cancelled
CI / Mixins tests (push) Has been cancelled
CI / Build Prometheus for common architectures (0) (push) Has been cancelled
CI / Build Prometheus for common architectures (1) (push) Has been cancelled
CI / Build Prometheus for common architectures (2) (push) Has been cancelled
CI / Build Prometheus for all architectures (0) (push) Has been cancelled
CI / Build Prometheus for all architectures (1) (push) Has been cancelled
CI / Build Prometheus for all architectures (10) (push) Has been cancelled
CI / Build Prometheus for all architectures (11) (push) Has been cancelled
CI / Build Prometheus for all architectures (2) (push) Has been cancelled
CI / Build Prometheus for all architectures (3) (push) Has been cancelled
CI / Build Prometheus for all architectures (4) (push) Has been cancelled
CI / Build Prometheus for all architectures (5) (push) Has been cancelled
CI / Build Prometheus for all architectures (6) (push) Has been cancelled
CI / Build Prometheus for all architectures (7) (push) Has been cancelled
CI / Build Prometheus for all architectures (8) (push) Has been cancelled
CI / Build Prometheus for all architectures (9) (push) Has been cancelled
CI / Check generated parser (push) Has been cancelled
CI / golangci-lint (push) Has been cancelled
CI / fuzzing (push) Has been cancelled
CI / codeql (push) Has been cancelled
CI / Report status of build Prometheus for all architectures (push) Has been cancelled
CI / Publish main branch artifacts (push) Has been cancelled
CI / Publish release artefacts (push) Has been cancelled
CI / Publish UI on npm Registry (push) Has been cancelled
Some checks failed
CI / Go tests (push) Has been cancelled
CI / More Go tests (push) Has been cancelled
CI / Go tests with previous Go version (push) Has been cancelled
CI / UI tests (push) Has been cancelled
CI / Go tests on Windows (push) Has been cancelled
CI / Mixins tests (push) Has been cancelled
CI / Build Prometheus for common architectures (0) (push) Has been cancelled
CI / Build Prometheus for common architectures (1) (push) Has been cancelled
CI / Build Prometheus for common architectures (2) (push) Has been cancelled
CI / Build Prometheus for all architectures (0) (push) Has been cancelled
CI / Build Prometheus for all architectures (1) (push) Has been cancelled
CI / Build Prometheus for all architectures (10) (push) Has been cancelled
CI / Build Prometheus for all architectures (11) (push) Has been cancelled
CI / Build Prometheus for all architectures (2) (push) Has been cancelled
CI / Build Prometheus for all architectures (3) (push) Has been cancelled
CI / Build Prometheus for all architectures (4) (push) Has been cancelled
CI / Build Prometheus for all architectures (5) (push) Has been cancelled
CI / Build Prometheus for all architectures (6) (push) Has been cancelled
CI / Build Prometheus for all architectures (7) (push) Has been cancelled
CI / Build Prometheus for all architectures (8) (push) Has been cancelled
CI / Build Prometheus for all architectures (9) (push) Has been cancelled
CI / Check generated parser (push) Has been cancelled
CI / golangci-lint (push) Has been cancelled
CI / fuzzing (push) Has been cancelled
CI / codeql (push) Has been cancelled
CI / Report status of build Prometheus for all architectures (push) Has been cancelled
CI / Publish main branch artifacts (push) Has been cancelled
CI / Publish release artefacts (push) Has been cancelled
CI / Publish UI on npm Registry (push) Has been cancelled
Signed-off-by: Julius Volz <julius.volz@gmail.com>
This commit is contained in:
parent
6999e8063f
commit
87a22500e1
6
web/ui/mantine-ui/src/api/responseTypes/series.ts
Normal file
6
web/ui/mantine-ui/src/api/responseTypes/series.ts
Normal file
|
@ -0,0 +1,6 @@
|
|||
// Result type for /api/v1/series endpoint.
|
||||
|
||||
import { Metric } from "./query";
|
||||
|
||||
// See: https://prometheus.io/docs/prometheus/latest/querying/api/#finding-series-by-label-matchers
|
||||
export type SeriesResult = Metric[];
|
21
web/ui/mantine-ui/src/lib/formatFloatValue.ts
Normal file
21
web/ui/mantine-ui/src/lib/formatFloatValue.ts
Normal file
|
@ -0,0 +1,21 @@
|
|||
export const parsePrometheusFloat = (str: string): number => {
|
||||
switch (str) {
|
||||
case "+Inf":
|
||||
return Infinity;
|
||||
case "-Inf":
|
||||
return -Infinity;
|
||||
default:
|
||||
return parseFloat(str);
|
||||
}
|
||||
};
|
||||
|
||||
export const formatPrometheusFloat = (num: number): string => {
|
||||
switch (num) {
|
||||
case Infinity:
|
||||
return "+Inf";
|
||||
case -Infinity:
|
||||
return "-Inf";
|
||||
default:
|
||||
return num.toString();
|
||||
}
|
||||
};
|
|
@ -4,6 +4,7 @@ import App from "./App.tsx";
|
|||
import store from "./state/store.ts";
|
||||
import { Provider } from "react-redux";
|
||||
import "./fonts/codicon.ttf";
|
||||
import "./promql.css";
|
||||
|
||||
ReactDOM.createRoot(document.getElementById("root")!).render(
|
||||
<React.StrictMode>
|
||||
|
|
|
@ -11,7 +11,6 @@ import {
|
|||
Alert,
|
||||
TextInput,
|
||||
Anchor,
|
||||
Divider,
|
||||
} from "@mantine/core";
|
||||
import { useSuspenseAPIQuery } from "../api/api";
|
||||
import { AlertingRule, AlertingRulesResult } from "../api/responseTypes/rules";
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import {
|
||||
ActionIcon,
|
||||
Box,
|
||||
Button,
|
||||
Group,
|
||||
InputBase,
|
||||
|
@ -7,6 +8,7 @@ import {
|
|||
Menu,
|
||||
Modal,
|
||||
rem,
|
||||
Skeleton,
|
||||
useComputedColorScheme,
|
||||
} from "@mantine/core";
|
||||
import {
|
||||
|
@ -198,8 +200,6 @@ const ExpressionInput: FC<ExpressionInputProps> = ({
|
|||
|
||||
return (
|
||||
<Group align="flex-start" wrap="nowrap" gap="xs">
|
||||
{/* TODO: For wrapped long lines, the input grows in width more and more, the
|
||||
longer the line is. Figure out why and fix it. */}
|
||||
{/* eslint-disable-next-line @typescript-eslint/no-explicit-any */}
|
||||
<InputBase<any>
|
||||
leftSection={
|
||||
|
@ -313,7 +313,13 @@ const ExpressionInput: FC<ExpressionInputProps> = ({
|
|||
multiline
|
||||
/>
|
||||
|
||||
<Button variant="primary" onClick={() => executeQuery(expr)}>
|
||||
<Button
|
||||
variant="primary"
|
||||
onClick={() => executeQuery(expr)}
|
||||
// Without this, the button can be squeezed to a width
|
||||
// that doesn't fit its text when the window is too narrow.
|
||||
style={{ flexShrink: 0 }}
|
||||
>
|
||||
Execute
|
||||
</Button>
|
||||
<Modal
|
||||
|
@ -323,7 +329,15 @@ const ExpressionInput: FC<ExpressionInputProps> = ({
|
|||
title="Explore metrics"
|
||||
>
|
||||
<ErrorBoundary key={location.pathname} title="Error showing metrics">
|
||||
<Suspense fallback={<Loader />}>
|
||||
<Suspense
|
||||
fallback={
|
||||
<Box mt="lg">
|
||||
{Array.from(Array(20), (_, i) => (
|
||||
<Skeleton key={i} height={30} mb={15} width="100%" />
|
||||
))}
|
||||
</Box>
|
||||
}
|
||||
>
|
||||
<MetricsExplorer
|
||||
metricNames={metricNames}
|
||||
insertText={(text: string) => {
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
.labelValue {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.labelValue:hover {
|
||||
background-color: light-dark(
|
||||
var(--mantine-color-gray-2),
|
||||
var(--mantine-color-gray-8)
|
||||
);
|
||||
border-radius: var(--mantine-radius-sm);
|
||||
}
|
||||
|
||||
.promqlPill {
|
||||
background-color: light-dark(
|
||||
var(--mantine-color-gray-1),
|
||||
var(--mantine-color-dark-5)
|
||||
);
|
||||
}
|
|
@ -0,0 +1,415 @@
|
|||
import { FC, useMemo, useState } from "react";
|
||||
import {
|
||||
LabelMatcher,
|
||||
matchType,
|
||||
nodeType,
|
||||
VectorSelector,
|
||||
} from "../../../promql/ast";
|
||||
import {
|
||||
Alert,
|
||||
Anchor,
|
||||
Autocomplete,
|
||||
Box,
|
||||
Button,
|
||||
CopyButton,
|
||||
Group,
|
||||
List,
|
||||
Pill,
|
||||
Text,
|
||||
SegmentedControl,
|
||||
Select,
|
||||
Skeleton,
|
||||
Stack,
|
||||
Table,
|
||||
} from "@mantine/core";
|
||||
import { escapeString } from "../../../lib/escapeString";
|
||||
import serializeNode from "../../../promql/serialize";
|
||||
import { SeriesResult } from "../../../api/responseTypes/series";
|
||||
import { useAPIQuery } from "../../../api/api";
|
||||
import { Metric } from "../../../api/responseTypes/query";
|
||||
import {
|
||||
IconAlertTriangle,
|
||||
IconArrowLeft,
|
||||
IconCheck,
|
||||
IconCodePlus,
|
||||
IconCopy,
|
||||
IconX,
|
||||
} from "@tabler/icons-react";
|
||||
import { formatNode } from "../../../promql/format";
|
||||
import classes from "./LabelsExplorer.module.css";
|
||||
|
||||
type LabelsExplorerProps = {
|
||||
metricName: string;
|
||||
insertText: (_text: string) => void;
|
||||
hideLabelsExplorer: () => void;
|
||||
};
|
||||
|
||||
const LabelsExplorer: FC<LabelsExplorerProps> = ({
|
||||
metricName,
|
||||
insertText,
|
||||
hideLabelsExplorer,
|
||||
}) => {
|
||||
const [expandedLabels, setExpandedLabels] = useState<string[]>([]);
|
||||
const [matchers, setMatchers] = useState<LabelMatcher[]>([]);
|
||||
const [newMatcher, setNewMatcher] = useState<LabelMatcher | null>(null);
|
||||
const [sortByCard, setSortByCard] = useState<boolean>(true);
|
||||
|
||||
const removeMatcher = (name: string) => {
|
||||
setMatchers(matchers.filter((m) => m.name !== name));
|
||||
};
|
||||
|
||||
const addMatcher = () => {
|
||||
if (newMatcher === null) {
|
||||
throw new Error("tried to add null label matcher");
|
||||
}
|
||||
|
||||
setMatchers([...matchers, newMatcher]);
|
||||
setNewMatcher(null);
|
||||
};
|
||||
|
||||
const matcherBadge = (m: LabelMatcher) => (
|
||||
<Pill
|
||||
key={m.name}
|
||||
size="md"
|
||||
withRemoveButton
|
||||
onRemove={() => {
|
||||
removeMatcher(m.name);
|
||||
}}
|
||||
className={classes.promqlPill}
|
||||
>
|
||||
<span className="promql-code">
|
||||
<span className="promql-label-name">{m.name}</span>
|
||||
{m.type}
|
||||
<span className="promql-string">"{escapeString(m.value)}"</span>
|
||||
</span>
|
||||
</Pill>
|
||||
);
|
||||
|
||||
const selector: VectorSelector = {
|
||||
type: nodeType.vectorSelector,
|
||||
name: metricName,
|
||||
matchers,
|
||||
offset: 0,
|
||||
timestamp: null,
|
||||
startOrEnd: null,
|
||||
};
|
||||
|
||||
// Based on the selected pool (if any), load the list of targets.
|
||||
const { data, error, isLoading } = useAPIQuery<SeriesResult>({
|
||||
path: `/series`,
|
||||
params: {
|
||||
"match[]": serializeNode(selector),
|
||||
},
|
||||
});
|
||||
|
||||
// When new series data is loaded, update the corresponding label cardinality and example data.
|
||||
const [numSeries, sortedLabelCards, labelExamples] = useMemo(() => {
|
||||
const labelCardinalities: Record<string, number> = {};
|
||||
const labelExamples: Record<string, { value: string; count: number }[]> =
|
||||
{};
|
||||
|
||||
const labelValuesByName: Record<string, Record<string, number>> = {};
|
||||
|
||||
if (data !== undefined) {
|
||||
data.data.forEach((series: Metric) => {
|
||||
Object.entries(series).forEach(([ln, lv]) => {
|
||||
if (ln !== "__name__") {
|
||||
if (!(ln in labelValuesByName)) {
|
||||
labelValuesByName[ln] = { [lv]: 1 };
|
||||
} else {
|
||||
if (!(lv in labelValuesByName[ln])) {
|
||||
labelValuesByName[ln][lv] = 1;
|
||||
} else {
|
||||
labelValuesByName[ln][lv]++;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
Object.entries(labelValuesByName).forEach(([ln, lvs]) => {
|
||||
labelCardinalities[ln] = Object.keys(lvs).length;
|
||||
// labelExamples[ln] = Array.from({ length: Math.min(5, lvs.size) }, (i => () => i.next().value)(lvs.keys()));
|
||||
// Sort label values by their number of occurrences within this label name.
|
||||
labelExamples[ln] = Object.entries(lvs)
|
||||
.sort(([, aCnt], [, bCnt]) => bCnt - aCnt)
|
||||
.map(([lv, cnt]) => ({ value: lv, count: cnt }));
|
||||
});
|
||||
}
|
||||
|
||||
// Sort labels by cardinality if desired, so the labels with the most values are at the top.
|
||||
const sortedLabelCards = Object.entries(labelCardinalities).sort((a, b) =>
|
||||
sortByCard ? b[1] - a[1] : 0
|
||||
);
|
||||
|
||||
return [data?.data.length, sortedLabelCards, labelExamples];
|
||||
}, [data, sortByCard]);
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<Alert
|
||||
color="red"
|
||||
title="Error querying series"
|
||||
icon={<IconAlertTriangle size={14} />}
|
||||
>
|
||||
<strong>Error:</strong> {error.message}
|
||||
</Alert>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Stack fz="sm">
|
||||
<Stack style={{ overflow: "auto" }}>
|
||||
{/* Selector */}
|
||||
<Group align="center" mt="lg" wrap="nowrap">
|
||||
<Box w={70} fw={700} style={{ flexShrink: 0 }}>
|
||||
Selector:
|
||||
</Box>
|
||||
<Pill.Group>
|
||||
<Pill size="md" className={classes.promqlPill}>
|
||||
<span style={{ wordBreak: "break-word", whiteSpace: "pre" }}>
|
||||
{formatNode(selector, false)}
|
||||
</span>
|
||||
</Pill>
|
||||
</Pill.Group>
|
||||
<Group wrap="nowrap">
|
||||
<Button
|
||||
variant="light"
|
||||
size="xs"
|
||||
onClick={() => insertText(serializeNode(selector))}
|
||||
leftSection={<IconCodePlus size={18} />}
|
||||
title="Insert selector at cursor and close explorer"
|
||||
>
|
||||
Insert
|
||||
</Button>
|
||||
<CopyButton value={serializeNode(selector)}>
|
||||
{({ copied, copy }) => (
|
||||
<Button
|
||||
variant="light"
|
||||
size="xs"
|
||||
leftSection={
|
||||
copied ? <IconCheck size={18} /> : <IconCopy size={18} />
|
||||
}
|
||||
onClick={copy}
|
||||
title="Copy selector to clipboard"
|
||||
>
|
||||
Copy
|
||||
</Button>
|
||||
)}
|
||||
</CopyButton>
|
||||
</Group>
|
||||
</Group>
|
||||
{/* Filters */}
|
||||
<Group align="center">
|
||||
<Box w={70} fw={700} style={{ flexShrink: 0 }}>
|
||||
Filters:
|
||||
</Box>
|
||||
|
||||
{matchers.length > 0 ? (
|
||||
<Pill.Group>{matchers.map((m) => matcherBadge(m))}</Pill.Group>
|
||||
) : (
|
||||
<>No label filters</>
|
||||
)}
|
||||
</Group>
|
||||
{/* Number of series */}
|
||||
<Group
|
||||
style={{ display: "flex", alignItems: "center", marginBottom: 25 }}
|
||||
>
|
||||
<Box w={70} fw={700} style={{ flexShrink: 0 }}>
|
||||
Results:
|
||||
</Box>
|
||||
<>{numSeries !== undefined ? `${numSeries} series` : "loading..."}</>
|
||||
</Group>
|
||||
</Stack>
|
||||
{/* Sort order */}
|
||||
<Group justify="space-between">
|
||||
<Box>
|
||||
<Button
|
||||
variant="light"
|
||||
size="xs"
|
||||
onClick={hideLabelsExplorer}
|
||||
leftSection={<IconArrowLeft size={18} />}
|
||||
>
|
||||
Back to all metrics
|
||||
</Button>
|
||||
</Box>
|
||||
<SegmentedControl
|
||||
w="fit-content"
|
||||
size="xs"
|
||||
value={sortByCard ? "cardinality" : "alphabetic"}
|
||||
onChange={(value) => setSortByCard(value === "cardinality")}
|
||||
data={[
|
||||
{ label: "By cardinality", value: "cardinality" },
|
||||
{ label: "Alphabetic", value: "alphabetic" },
|
||||
]}
|
||||
/>
|
||||
</Group>
|
||||
|
||||
{/* Labels and their values */}
|
||||
{isLoading ? (
|
||||
<Box mt="lg">
|
||||
{Array.from(Array(10), (_, i) => (
|
||||
<Skeleton key={i} height={40} mb={15} width="100%" />
|
||||
))}
|
||||
</Box>
|
||||
) : (
|
||||
<Table fz="sm">
|
||||
<Table.Thead>
|
||||
<Table.Tr>
|
||||
<Table.Th>Label</Table.Th>
|
||||
<Table.Th>Values</Table.Th>
|
||||
</Table.Tr>
|
||||
</Table.Thead>
|
||||
<Table.Tbody>
|
||||
{sortedLabelCards.map(([ln, card]) => (
|
||||
<Table.Tr key={ln}>
|
||||
<Table.Td w="50%">
|
||||
<form
|
||||
onSubmit={(e: React.FormEvent) => {
|
||||
// Without this, the page gets reloaded for forms that only have a single input field, see
|
||||
// https://stackoverflow.com/questions/1370021/why-does-forms-with-single-input-field-submit-upon-pressing-enter-key-in-input.
|
||||
e.preventDefault();
|
||||
}}
|
||||
>
|
||||
<Group justify="space-between" align="baseline">
|
||||
<span className="promql-code promql-label-name">
|
||||
{ln}
|
||||
</span>
|
||||
{matchers.some((m) => m.name === ln) ? (
|
||||
matcherBadge(matchers.find((m) => m.name === ln)!)
|
||||
) : newMatcher?.name === ln ? (
|
||||
<Group wrap="nowrap" gap="xs">
|
||||
<Select
|
||||
size="xs"
|
||||
w={50}
|
||||
style={{ width: "auto" }}
|
||||
value={newMatcher.type}
|
||||
data={Object.values(matchType).map((mt) => ({
|
||||
value: mt,
|
||||
label: mt,
|
||||
}))}
|
||||
onChange={(_value, option) =>
|
||||
setNewMatcher({
|
||||
...newMatcher,
|
||||
type: option.value as matchType,
|
||||
})
|
||||
}
|
||||
/>
|
||||
<Autocomplete
|
||||
value={newMatcher.value}
|
||||
size="xs"
|
||||
placeholder="label value"
|
||||
onChange={(value) =>
|
||||
setNewMatcher({ ...newMatcher, value: value })
|
||||
}
|
||||
data={labelExamples[ln].map((ex) => ex.value)}
|
||||
autoFocus
|
||||
/>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="xs"
|
||||
onClick={() => addMatcher()}
|
||||
style={{ flexShrink: 0 }}
|
||||
>
|
||||
Apply
|
||||
</Button>
|
||||
<Button
|
||||
variant="light"
|
||||
w={40}
|
||||
size="xs"
|
||||
onClick={() => setNewMatcher(null)}
|
||||
title="Cancel"
|
||||
style={{ flexShrink: 0 }}
|
||||
>
|
||||
<IconX size={18} />
|
||||
</Button>
|
||||
</Group>
|
||||
) : (
|
||||
<Button
|
||||
variant="light"
|
||||
size="xs"
|
||||
mr="xs"
|
||||
onClick={() =>
|
||||
setNewMatcher({
|
||||
name: ln,
|
||||
type: matchType.equal,
|
||||
value: "",
|
||||
})
|
||||
}
|
||||
>
|
||||
Filter...
|
||||
</Button>
|
||||
)}
|
||||
</Group>
|
||||
</form>
|
||||
</Table.Td>
|
||||
<Table.Td w="50%">
|
||||
<Text fw={700} fz="sm" my="xs">
|
||||
{card} value{card > 1 && "s"}
|
||||
</Text>
|
||||
<List size="sm" listStyleType="none">
|
||||
{(expandedLabels.includes(ln)
|
||||
? labelExamples[ln]
|
||||
: labelExamples[ln].slice(0, 5)
|
||||
).map(({ value, count }) => (
|
||||
<List.Item key={value}>
|
||||
<span
|
||||
className={`${classes.labelValue} promql-code promql-string`}
|
||||
onClick={() => {
|
||||
setMatchers([
|
||||
...matchers.filter((m) => m.name !== ln),
|
||||
{ name: ln, type: matchType.equal, value: value },
|
||||
]);
|
||||
setNewMatcher(null);
|
||||
}}
|
||||
title="Click to filter by value"
|
||||
>
|
||||
"{escapeString(value)}"
|
||||
</span>{" "}
|
||||
({count} series)
|
||||
</List.Item>
|
||||
))}
|
||||
|
||||
{expandedLabels.includes(ln) ? (
|
||||
<List.Item my="xs">
|
||||
<Anchor
|
||||
size="sm"
|
||||
href="#"
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
setExpandedLabels(
|
||||
expandedLabels.filter((l) => l != ln)
|
||||
);
|
||||
}}
|
||||
>
|
||||
Hide full values
|
||||
</Anchor>
|
||||
</List.Item>
|
||||
) : (
|
||||
labelExamples[ln].length > 5 && (
|
||||
<List.Item my="xs">
|
||||
<Anchor
|
||||
size="sm"
|
||||
href="#"
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
setExpandedLabels([...expandedLabels, ln]);
|
||||
}}
|
||||
>
|
||||
Show {labelExamples[ln].length - 5} more values...
|
||||
</Anchor>
|
||||
</List.Item>
|
||||
)
|
||||
)}
|
||||
</List>
|
||||
</Table.Td>
|
||||
</Table.Tr>
|
||||
))}
|
||||
</Table.Tbody>
|
||||
</Table>
|
||||
)}
|
||||
</Stack>
|
||||
);
|
||||
};
|
||||
|
||||
export default LabelsExplorer;
|
|
@ -0,0 +1,7 @@
|
|||
.typeLabel {
|
||||
color: light-dark(#008080, #14bfad);
|
||||
}
|
||||
|
||||
.helpLabel {
|
||||
color: light-dark(#800000, #ff8585);
|
||||
}
|
|
@ -1,19 +1,14 @@
|
|||
import { FC, useState } from "react";
|
||||
import { FC, useMemo, useState } from "react";
|
||||
import { useSuspenseAPIQuery } from "../../../api/api";
|
||||
import { MetadataResult } from "../../../api/responseTypes/metadata";
|
||||
import {
|
||||
ActionIcon,
|
||||
Alert,
|
||||
Anchor,
|
||||
Group,
|
||||
Stack,
|
||||
Table,
|
||||
TextInput,
|
||||
} from "@mantine/core";
|
||||
import { ActionIcon, Group, Stack, Table, TextInput } from "@mantine/core";
|
||||
import React from "react";
|
||||
import { Fuzzy } from "@nexucis/fuzzy";
|
||||
import sanitizeHTML from "sanitize-html";
|
||||
import { IconCopy, IconTerminal, IconZoomIn } from "@tabler/icons-react";
|
||||
import { IconCodePlus, IconCopy, IconZoomCode } from "@tabler/icons-react";
|
||||
import LabelsExplorer from "./LabelsExplorer";
|
||||
import { useDebouncedValue } from "@mantine/hooks";
|
||||
import classes from "./MetricsExplorer.module.css";
|
||||
|
||||
const fuz = new Fuzzy({
|
||||
pre: '<b style="color: rgb(0, 102, 191)">',
|
||||
|
@ -43,14 +38,22 @@ const MetricsExplorer: FC<MetricsExplorerProps> = ({
|
|||
insertText,
|
||||
close,
|
||||
}) => {
|
||||
// const metricMeta = promAPI.useFetchAPI<MetricMetadata>(`/api/v1/metadata`);
|
||||
console.log("metricNames");
|
||||
// Fetch the alerting rules data.
|
||||
const { data } = useSuspenseAPIQuery<MetadataResult>({
|
||||
path: `/metadata`,
|
||||
});
|
||||
const [selectedMetric, setSelectedMetric] = useState<string | null>(null);
|
||||
|
||||
const [filterText, setFilterText] = useState<string>("");
|
||||
const [filterText, setFilterText] = useState("");
|
||||
const [debouncedFilterText] = useDebouncedValue(filterText, 250);
|
||||
|
||||
const searchMatches = useMemo(() => {
|
||||
if (debouncedFilterText === "") {
|
||||
return metricNames.map((m) => ({ original: m, rendered: m }));
|
||||
}
|
||||
return getSearchMatches(debouncedFilterText, metricNames);
|
||||
}, [debouncedFilterText, metricNames]);
|
||||
|
||||
const getMeta = (m: string) =>
|
||||
data.data[m.replace(/(_count|_sum|_bucket)$/, "")] || [
|
||||
|
@ -59,14 +62,14 @@ const MetricsExplorer: FC<MetricsExplorerProps> = ({
|
|||
|
||||
if (selectedMetric !== null) {
|
||||
return (
|
||||
<Alert>
|
||||
TODO: The labels explorer for a metric still needs to be implemented.
|
||||
<br />
|
||||
<br />
|
||||
<Anchor fz="1em" onClick={() => setSelectedMetric(null)}>
|
||||
Back to metrics list
|
||||
</Anchor>
|
||||
</Alert>
|
||||
<LabelsExplorer
|
||||
metricName={selectedMetric}
|
||||
insertText={(text: string) => {
|
||||
insertText(text);
|
||||
close();
|
||||
}}
|
||||
hideLabelsExplorer={() => setSelectedMetric(null)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -90,18 +93,19 @@ const MetricsExplorer: FC<MetricsExplorerProps> = ({
|
|||
</Table.Tr>
|
||||
</Table.Thead>
|
||||
<Table.Tbody>
|
||||
{(filterText === ""
|
||||
? metricNames.map((m) => ({ original: m, rendered: m }))
|
||||
: getSearchMatches(filterText, metricNames)
|
||||
).map((m) => (
|
||||
{searchMatches.map((m) => (
|
||||
<Table.Tr key={m.original}>
|
||||
<Table.Td>
|
||||
<Group justify="space-between">
|
||||
{debouncedFilterText === "" ? (
|
||||
m.original
|
||||
) : (
|
||||
<div
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: sanitizeHTML(m.rendered, sanitizeOpts),
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<Group gap="xs">
|
||||
<ActionIcon
|
||||
size="sm"
|
||||
|
@ -112,7 +116,7 @@ const MetricsExplorer: FC<MetricsExplorerProps> = ({
|
|||
setSelectedMetric(m.original);
|
||||
}}
|
||||
>
|
||||
<IconZoomIn
|
||||
<IconZoomCode
|
||||
style={{ width: "70%", height: "70%" }}
|
||||
stroke={1.5}
|
||||
/>
|
||||
|
@ -121,13 +125,13 @@ const MetricsExplorer: FC<MetricsExplorerProps> = ({
|
|||
size="sm"
|
||||
color="gray"
|
||||
variant="light"
|
||||
title="Insert at cursor"
|
||||
title="Insert at cursor and close explorer"
|
||||
onClick={() => {
|
||||
insertText(m.original);
|
||||
close();
|
||||
}}
|
||||
>
|
||||
<IconTerminal
|
||||
<IconCodePlus
|
||||
style={{ width: "70%", height: "70%" }}
|
||||
stroke={1.5}
|
||||
/>
|
||||
|
@ -149,18 +153,18 @@ const MetricsExplorer: FC<MetricsExplorerProps> = ({
|
|||
</Group>
|
||||
</Group>
|
||||
</Table.Td>
|
||||
<Table.Td c="cyan.9" fs="italic" px="lg">
|
||||
<Table.Td px="lg">
|
||||
{getMeta(m.original).map((meta, idx) => (
|
||||
<React.Fragment key={idx}>
|
||||
{meta.type}
|
||||
<span className={classes.typeLabel}>{meta.type}</span>
|
||||
<br />
|
||||
</React.Fragment>
|
||||
))}
|
||||
</Table.Td>
|
||||
<Table.Td c="pink.9">
|
||||
<Table.Td>
|
||||
{getMeta(m.original).map((meta, idx) => (
|
||||
<React.Fragment key={idx}>
|
||||
{meta.help}
|
||||
<span className={classes.helpLabel}>{meta.help}</span>
|
||||
<br />
|
||||
</React.Fragment>
|
||||
))}
|
||||
|
|
35
web/ui/mantine-ui/src/promql.css
Normal file
35
web/ui/mantine-ui/src/promql.css
Normal file
|
@ -0,0 +1,35 @@
|
|||
.promql-code {
|
||||
font-family: "DejaVu Sans Mono", monospace;
|
||||
}
|
||||
|
||||
.promql-keyword {
|
||||
color: light-dark(#008080, #14bfad);
|
||||
}
|
||||
|
||||
.promql-metric-name {
|
||||
color: light-dark(#000, #fff);
|
||||
}
|
||||
|
||||
.promql-label-name {
|
||||
color: light-dark(#800000, #ff8585);
|
||||
}
|
||||
|
||||
.promql-string {
|
||||
color: light-dark(#a31515, #fca5a5);
|
||||
}
|
||||
|
||||
.promql-paren,
|
||||
.promql-brace {
|
||||
}
|
||||
|
||||
.promql-ellipsis {
|
||||
color: light-dark(rgb(170, 170, 170), rgb(170, 170, 170));
|
||||
}
|
||||
|
||||
.promql-duration {
|
||||
color: light-dark(#09885a, #22c55e);
|
||||
}
|
||||
|
||||
.promql-number {
|
||||
color: light-dark(#09885a, #22c55e);
|
||||
}
|
210
web/ui/mantine-ui/src/promql/ast.ts
Normal file
210
web/ui/mantine-ui/src/promql/ast.ts
Normal file
|
@ -0,0 +1,210 @@
|
|||
export enum nodeType {
|
||||
aggregation = 'aggregation',
|
||||
binaryExpr = 'binaryExpr',
|
||||
call = 'call',
|
||||
matrixSelector = 'matrixSelector',
|
||||
subquery = 'subquery',
|
||||
numberLiteral = 'numberLiteral',
|
||||
parenExpr = 'parenExpr',
|
||||
stringLiteral = 'stringLiteral',
|
||||
unaryExpr = 'unaryExpr',
|
||||
vectorSelector = 'vectorSelector',
|
||||
placeholder = 'placeholder',
|
||||
}
|
||||
|
||||
export enum aggregationType {
|
||||
sum = 'sum',
|
||||
min = 'min',
|
||||
max = 'max',
|
||||
avg = 'avg',
|
||||
stddev = 'stddev',
|
||||
stdvar = 'stdvar',
|
||||
count = 'count',
|
||||
group = 'group',
|
||||
countValues = 'count_values',
|
||||
bottomk = 'bottomk',
|
||||
topk = 'topk',
|
||||
quantile = 'quantile',
|
||||
limitK = 'limitk',
|
||||
limitRatio = 'limit_ratio',
|
||||
}
|
||||
|
||||
export enum binaryOperatorType {
|
||||
add = '+',
|
||||
sub = '-',
|
||||
mul = '*',
|
||||
div = '/',
|
||||
mod = '%',
|
||||
pow = '^',
|
||||
eql = '==',
|
||||
neq = '!=',
|
||||
gtr = '>',
|
||||
lss = '<',
|
||||
gte = '>=',
|
||||
lte = '<=',
|
||||
and = 'and',
|
||||
or = 'or',
|
||||
unless = 'unless',
|
||||
atan2 = 'atan2',
|
||||
}
|
||||
|
||||
export const compOperatorTypes: binaryOperatorType[] = [
|
||||
binaryOperatorType.eql,
|
||||
binaryOperatorType.neq,
|
||||
binaryOperatorType.gtr,
|
||||
binaryOperatorType.lss,
|
||||
binaryOperatorType.gte,
|
||||
binaryOperatorType.lte,
|
||||
];
|
||||
|
||||
export const setOperatorTypes: binaryOperatorType[] = [
|
||||
binaryOperatorType.and,
|
||||
binaryOperatorType.or,
|
||||
binaryOperatorType.unless,
|
||||
];
|
||||
|
||||
export enum unaryOperatorType {
|
||||
plus = '+',
|
||||
minus = '-',
|
||||
}
|
||||
|
||||
export enum vectorMatchCardinality {
|
||||
oneToOne = 'one-to-one',
|
||||
manyToOne = 'many-to-one',
|
||||
oneToMany = 'one-to-many',
|
||||
manyToMany = 'many-to-many',
|
||||
}
|
||||
|
||||
export enum valueType {
|
||||
// TODO: 'none' should never make it out of Prometheus. Do we need this here?
|
||||
none = 'none',
|
||||
vector = 'vector',
|
||||
scalar = 'scalar',
|
||||
matrix = 'matrix',
|
||||
string = 'string',
|
||||
}
|
||||
|
||||
export enum matchType {
|
||||
equal = '=',
|
||||
notEqual = '!=',
|
||||
matchRegexp = '=~',
|
||||
matchNotRegexp = '!~',
|
||||
}
|
||||
|
||||
export interface Func {
|
||||
name: string;
|
||||
argTypes: valueType[];
|
||||
variadic: number;
|
||||
returnType: valueType;
|
||||
}
|
||||
|
||||
export interface LabelMatcher {
|
||||
type: matchType;
|
||||
name: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface VectorMatching {
|
||||
card: vectorMatchCardinality;
|
||||
labels: string[];
|
||||
on: boolean;
|
||||
include: string[];
|
||||
}
|
||||
|
||||
export type StartOrEnd = 'start' | 'end' | null;
|
||||
|
||||
// AST Node Types.
|
||||
|
||||
export interface Aggregation {
|
||||
type: nodeType.aggregation;
|
||||
expr: ASTNode;
|
||||
op: aggregationType;
|
||||
param: ASTNode | null;
|
||||
grouping: string[];
|
||||
without: boolean;
|
||||
}
|
||||
|
||||
export interface BinaryExpr {
|
||||
type: nodeType.binaryExpr;
|
||||
op: binaryOperatorType;
|
||||
lhs: ASTNode;
|
||||
rhs: ASTNode;
|
||||
matching: VectorMatching | null;
|
||||
bool: boolean;
|
||||
}
|
||||
|
||||
export interface Call {
|
||||
type: nodeType.call;
|
||||
func: Func;
|
||||
args: ASTNode[];
|
||||
}
|
||||
|
||||
export interface MatrixSelector {
|
||||
type: nodeType.matrixSelector;
|
||||
name: string;
|
||||
matchers: LabelMatcher[];
|
||||
range: number;
|
||||
offset: number;
|
||||
timestamp: number | null;
|
||||
startOrEnd: StartOrEnd;
|
||||
}
|
||||
|
||||
export interface Subquery {
|
||||
type: nodeType.subquery;
|
||||
expr: ASTNode;
|
||||
range: number;
|
||||
offset: number;
|
||||
step: number;
|
||||
timestamp: number | null;
|
||||
startOrEnd: StartOrEnd;
|
||||
}
|
||||
|
||||
export interface NumberLiteral {
|
||||
type: nodeType.numberLiteral;
|
||||
val: string; // Can't be 'number' because JS doesn't support NaN/Inf/-Inf etc.
|
||||
}
|
||||
|
||||
export interface ParenExpr {
|
||||
type: nodeType.parenExpr;
|
||||
expr: ASTNode;
|
||||
}
|
||||
|
||||
export interface StringLiteral {
|
||||
type: nodeType.stringLiteral;
|
||||
val: string;
|
||||
}
|
||||
|
||||
export interface UnaryExpr {
|
||||
type: nodeType.unaryExpr;
|
||||
op: unaryOperatorType;
|
||||
expr: ASTNode;
|
||||
}
|
||||
|
||||
export interface VectorSelector {
|
||||
type: nodeType.vectorSelector;
|
||||
name: string;
|
||||
matchers: LabelMatcher[];
|
||||
offset: number;
|
||||
timestamp: number | null;
|
||||
startOrEnd: StartOrEnd;
|
||||
}
|
||||
|
||||
export interface Placeholder {
|
||||
type: nodeType.placeholder;
|
||||
children: ASTNode[];
|
||||
}
|
||||
|
||||
type ASTNode =
|
||||
| Aggregation
|
||||
| BinaryExpr
|
||||
| Call
|
||||
| MatrixSelector
|
||||
| Subquery
|
||||
| NumberLiteral
|
||||
| ParenExpr
|
||||
| StringLiteral
|
||||
| UnaryExpr
|
||||
| VectorSelector
|
||||
| Placeholder;
|
||||
|
||||
export default ASTNode;
|
2219
web/ui/mantine-ui/src/promql/binOp.test.ts
Normal file
2219
web/ui/mantine-ui/src/promql/binOp.test.ts
Normal file
File diff suppressed because it is too large
Load diff
419
web/ui/mantine-ui/src/promql/binOp.ts
Normal file
419
web/ui/mantine-ui/src/promql/binOp.ts
Normal file
|
@ -0,0 +1,419 @@
|
|||
import { InstantSample, Metric } from "../api/responseTypes/query";
|
||||
import {
|
||||
formatPrometheusFloat,
|
||||
parsePrometheusFloat,
|
||||
} from "../lib/formatFloatValue";
|
||||
import {
|
||||
binaryOperatorType,
|
||||
vectorMatchCardinality,
|
||||
VectorMatching,
|
||||
} from "./ast";
|
||||
import { isComparisonOperator } from "./utils";
|
||||
|
||||
// We use a special (otherwise invalid) sample value to indicate that
|
||||
// a sample has been filtered away by a comparison operator.
|
||||
export const filteredSampleValue = "filtered";
|
||||
|
||||
export enum MatchErrorType {
|
||||
multipleMatchesForOneToOneMatching = "multipleMatchesForOneToOneMatching",
|
||||
multipleMatchesOnBothSides = "multipleMatchesOnBothSides",
|
||||
multipleMatchesOnOneSide = "multipleMatchesOnOneSide",
|
||||
}
|
||||
|
||||
// There's no group_x() modifier, but one of the sides has multiple matches.
|
||||
export interface MultipleMatchesForOneToOneMatchingError {
|
||||
type: MatchErrorType.multipleMatchesForOneToOneMatching;
|
||||
dupeSide: "left" | "right";
|
||||
}
|
||||
|
||||
// There's no group_x() modifier and there are multiple matches on both sides.
|
||||
// This is good to keep as a separate error from MultipleMatchesForOneToOneMatchingError
|
||||
// because it can't be fixed by adding group_x() but rather by expanding the set of
|
||||
// matching labels.
|
||||
export interface MultipleMatchesOnBothSidesError {
|
||||
type: MatchErrorType.multipleMatchesOnBothSides;
|
||||
}
|
||||
|
||||
// There's a group_x() modifier, but the "one" side has multiple matches. This could mean
|
||||
// that either the matching labels are not sufficient or that group_x() is the wrong way around.
|
||||
export interface MultipleMatchesOnOneSideError {
|
||||
type: MatchErrorType.multipleMatchesOnOneSide;
|
||||
}
|
||||
|
||||
export type VectorMatchError =
|
||||
| MultipleMatchesForOneToOneMatchingError
|
||||
| MultipleMatchesOnBothSidesError
|
||||
| MultipleMatchesOnOneSideError;
|
||||
|
||||
// A single match group as produced by a vector-to-vector binary operation, with all of its
|
||||
// left-hand side and right-hand side series, as well as a result and error, if applicable.
|
||||
export type BinOpMatchGroup = {
|
||||
groupLabels: Metric;
|
||||
rhs: InstantSample[];
|
||||
rhsCount: number; // Number of samples before applying limits.
|
||||
lhs: InstantSample[];
|
||||
lhsCount: number; // Number of samples before applying limits.
|
||||
result: {
|
||||
sample: InstantSample;
|
||||
// Which "many"-side sample did this sample come from? This is needed for use cases where
|
||||
// we want to style the corresponding "many" side input sample and the result sample in
|
||||
// a similar way (e.g. shading them in the same color) to be able to trace which "many"
|
||||
// side sample a result sample came from.
|
||||
manySideIdx: number;
|
||||
}[];
|
||||
error: VectorMatchError | null;
|
||||
};
|
||||
|
||||
// The result of computeVectorVectorBinOp(), modeling the match groups produced by a
|
||||
// vector-to-vector binary operation.
|
||||
export type BinOpMatchGroups = {
|
||||
[sig: string]: BinOpMatchGroup;
|
||||
};
|
||||
|
||||
export type BinOpResult = {
|
||||
groups: BinOpMatchGroups;
|
||||
// Can differ from the number of returned groups if a limit was applied.
|
||||
numGroups: number;
|
||||
};
|
||||
|
||||
// FNV-1a hash parameters.
|
||||
const FNV_PRIME = 0x01000193;
|
||||
const OFFSET_BASIS = 0x811c9dc5;
|
||||
const SEP = "\uD800".charCodeAt(0); // Using a Unicode "high surrogate" code point as a separator. These should not appear by themselves (without a low surrogate pairing) in a valid Unicode string.
|
||||
|
||||
// Compute an FNV-1a hash over a given set of values in order to
|
||||
// produce a signature for a match group.
|
||||
export const fnv1a = (values: string[]): string => {
|
||||
let h = OFFSET_BASIS;
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
// Skip labels that are not set on the metric.
|
||||
if (values[i] !== undefined) {
|
||||
for (let c = 0; c < values[i].length; c++) {
|
||||
h ^= values[i].charCodeAt(c);
|
||||
h *= FNV_PRIME;
|
||||
}
|
||||
}
|
||||
|
||||
if (i < values.length - 1) {
|
||||
h ^= SEP;
|
||||
h *= FNV_PRIME;
|
||||
}
|
||||
}
|
||||
return h.toString();
|
||||
};
|
||||
|
||||
// Return a function that generates the match group signature for a given label set.
|
||||
const signatureFunc = (on: boolean, names: string[]) => {
|
||||
names.sort();
|
||||
|
||||
if (on) {
|
||||
return (lset: Metric): string => {
|
||||
return fnv1a(names.map((ln: string) => lset[ln]));
|
||||
};
|
||||
}
|
||||
|
||||
return (lset: Metric): string =>
|
||||
fnv1a(
|
||||
Object.keys(lset)
|
||||
.filter((ln) => !names.includes(ln) && ln !== "__name__")
|
||||
.map((ln) => lset[ln])
|
||||
);
|
||||
};
|
||||
|
||||
// For a given metric, return only the labels used for matching.
|
||||
const matchLabels = (metric: Metric, on: boolean, labels: string[]): Metric => {
|
||||
const result: Metric = {};
|
||||
for (const name in metric) {
|
||||
if (labels.includes(name) === on && (on || name !== "__name__")) {
|
||||
result[name] = metric[name];
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
export const scalarBinOp = (
|
||||
op: binaryOperatorType,
|
||||
lhs: number,
|
||||
rhs: number
|
||||
): number => {
|
||||
const { value, keep } = vectorElemBinop(op, lhs, rhs);
|
||||
if (isComparisonOperator(op)) {
|
||||
return Number(keep);
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
export const vectorElemBinop = (
|
||||
op: binaryOperatorType,
|
||||
lhs: number,
|
||||
rhs: number
|
||||
): { value: number; keep: boolean } => {
|
||||
switch (op) {
|
||||
case binaryOperatorType.add:
|
||||
return { value: lhs + rhs, keep: true };
|
||||
case binaryOperatorType.sub:
|
||||
return { value: lhs - rhs, keep: true };
|
||||
case binaryOperatorType.mul:
|
||||
return { value: lhs * rhs, keep: true };
|
||||
case binaryOperatorType.div:
|
||||
return { value: lhs / rhs, keep: true };
|
||||
case binaryOperatorType.pow:
|
||||
return { value: Math.pow(lhs, rhs), keep: true };
|
||||
case binaryOperatorType.mod:
|
||||
return { value: lhs % rhs, keep: true };
|
||||
case binaryOperatorType.eql:
|
||||
return { value: lhs, keep: lhs === rhs };
|
||||
case binaryOperatorType.neq:
|
||||
return { value: lhs, keep: lhs !== rhs };
|
||||
case binaryOperatorType.gtr:
|
||||
return { value: lhs, keep: lhs > rhs };
|
||||
case binaryOperatorType.lss:
|
||||
return { value: lhs, keep: lhs < rhs };
|
||||
case binaryOperatorType.gte:
|
||||
return { value: lhs, keep: lhs >= rhs };
|
||||
case binaryOperatorType.lte:
|
||||
return { value: lhs, keep: lhs <= rhs };
|
||||
case binaryOperatorType.atan2:
|
||||
return { value: Math.atan2(lhs, rhs), keep: true };
|
||||
default:
|
||||
throw new Error("invalid binop");
|
||||
}
|
||||
};
|
||||
|
||||
// Operations that change the metric's original meaning should drop the metric name from the result.
|
||||
const shouldDropMetricName = (op: binaryOperatorType): boolean =>
|
||||
[
|
||||
binaryOperatorType.add,
|
||||
binaryOperatorType.sub,
|
||||
binaryOperatorType.mul,
|
||||
binaryOperatorType.div,
|
||||
binaryOperatorType.pow,
|
||||
binaryOperatorType.mod,
|
||||
binaryOperatorType.atan2,
|
||||
].includes(op);
|
||||
|
||||
// Compute the time series labels for the result metric.
|
||||
export const resultMetric = (
|
||||
lhs: Metric,
|
||||
rhs: Metric,
|
||||
op: binaryOperatorType,
|
||||
matching: VectorMatching
|
||||
): Metric => {
|
||||
const result: Metric = {};
|
||||
|
||||
// Start out with all labels from the LHS.
|
||||
for (const name in lhs) {
|
||||
result[name] = lhs[name];
|
||||
}
|
||||
|
||||
// Drop metric name for operations that change the metric's meaning.
|
||||
if (shouldDropMetricName(op)) {
|
||||
delete result.__name__;
|
||||
}
|
||||
|
||||
// Keep only match group labels for 1:1 matches.
|
||||
if (matching.card === vectorMatchCardinality.oneToOne) {
|
||||
if (matching.on) {
|
||||
// Drop all labels that are not in the "on" clause.
|
||||
for (const name in result) {
|
||||
if (!matching.labels.includes(name)) {
|
||||
delete result[name];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Drop all labels that are in the "ignoring" clause.
|
||||
for (const name of matching.labels) {
|
||||
delete result[name];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Include extra labels from the RHS that were mentioned in a group_x(...) modifier.
|
||||
matching.include.forEach((name) => {
|
||||
if (name in rhs) {
|
||||
result[name] = rhs[name];
|
||||
} else {
|
||||
// If we are trying to include a label from the "one" side that is not actually set there,
|
||||
// we need to make sure that we don't accidentally take its value from the "many" side
|
||||
// if it exists there.
|
||||
//
|
||||
// Example to provoke this case:
|
||||
//
|
||||
// up == on(job, instance) group_left(__name__) node_exporter_build_info*1
|
||||
delete result[name];
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
// Compute the match groups and results for each match group for a binary operator between two vectors.
|
||||
// In the error case, the match groups are still populated and returned, but the error field is set for
|
||||
// the respective group. Results are not populated for error cases, since especially in the case of a
|
||||
// many-to-many matching, the cross-product output can become prohibitively expensive.
|
||||
export const computeVectorVectorBinOp = (
|
||||
op: binaryOperatorType,
|
||||
matching: VectorMatching,
|
||||
bool: boolean,
|
||||
lhs: InstantSample[],
|
||||
rhs: InstantSample[],
|
||||
limits?: {
|
||||
maxGroups?: number;
|
||||
maxSeriesPerGroup?: number;
|
||||
}
|
||||
): BinOpResult => {
|
||||
// For the simplification of further calculations, we assume that the "one" side of a one-to-many match
|
||||
// is always the right-hand side of the binop and swap otherwise to ensure this. We swap back in the end.
|
||||
[lhs, rhs] =
|
||||
matching.card === vectorMatchCardinality.oneToMany
|
||||
? [rhs, lhs]
|
||||
: [lhs, rhs];
|
||||
|
||||
const groups: BinOpMatchGroups = {};
|
||||
const sigf = signatureFunc(matching.on, matching.labels);
|
||||
|
||||
// While we only use this set to compute a count of limited groups in the end, we can encounter each
|
||||
// group multiple times (since multiple series can map to the same group). So we need to use a set
|
||||
// to track which groups we've already counted.
|
||||
const outOfLimitGroups = new Set<string>();
|
||||
|
||||
// Add all RHS samples to the grouping map.
|
||||
rhs.forEach((rs) => {
|
||||
const sig = sigf(rs.metric);
|
||||
|
||||
if (!(sig in groups)) {
|
||||
if (limits?.maxGroups && Object.keys(groups).length >= limits.maxGroups) {
|
||||
outOfLimitGroups.add(sig);
|
||||
return;
|
||||
}
|
||||
|
||||
groups[sig] = {
|
||||
groupLabels: matchLabels(rs.metric, matching.on, matching.labels),
|
||||
lhs: [],
|
||||
lhsCount: 0,
|
||||
rhs: [],
|
||||
rhsCount: 0,
|
||||
result: [],
|
||||
error: null,
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
!limits?.maxSeriesPerGroup ||
|
||||
groups[sig].rhsCount < limits.maxSeriesPerGroup
|
||||
) {
|
||||
groups[sig].rhs.push(rs);
|
||||
}
|
||||
groups[sig].rhsCount++;
|
||||
});
|
||||
|
||||
// Add all LHS samples to the grouping map.
|
||||
lhs.forEach((ls) => {
|
||||
const sig = sigf(ls.metric);
|
||||
|
||||
if (!(sig in groups)) {
|
||||
if (limits?.maxGroups && Object.keys(groups).length >= limits.maxGroups) {
|
||||
outOfLimitGroups.add(sig);
|
||||
return;
|
||||
}
|
||||
|
||||
groups[sig] = {
|
||||
groupLabels: matchLabels(ls.metric, matching.on, matching.labels),
|
||||
lhs: [],
|
||||
lhsCount: 0,
|
||||
rhs: [],
|
||||
rhsCount: 0,
|
||||
result: [],
|
||||
error: null,
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
!limits?.maxSeriesPerGroup ||
|
||||
groups[sig].lhsCount < limits.maxSeriesPerGroup
|
||||
) {
|
||||
groups[sig].lhs.push(ls);
|
||||
}
|
||||
groups[sig].lhsCount++;
|
||||
});
|
||||
|
||||
// Annotate the match groups with errors (if any) and populate the results.
|
||||
Object.values(groups).forEach((mg) => {
|
||||
if (matching.card === vectorMatchCardinality.oneToOne) {
|
||||
if (mg.lhs.length > 1 && mg.rhs.length > 1) {
|
||||
mg.error = { type: MatchErrorType.multipleMatchesOnBothSides };
|
||||
} else if (mg.lhs.length > 1 || mg.rhs.length > 1) {
|
||||
mg.error = {
|
||||
type: MatchErrorType.multipleMatchesForOneToOneMatching,
|
||||
dupeSide: mg.lhs.length > 1 ? "left" : "right",
|
||||
};
|
||||
}
|
||||
} else if (mg.rhs.length > 1) {
|
||||
// Check for dupes on the "one" side in one-to-many or many-to-one matching.
|
||||
mg.error = {
|
||||
type: MatchErrorType.multipleMatchesOnOneSide,
|
||||
};
|
||||
}
|
||||
|
||||
if (mg.error) {
|
||||
// We don't populate results for error cases, as especially in the case of a
|
||||
// many-to-many matching, the cross-product output can become expensive,
|
||||
// and the LHS/RHS are sufficient to diagnose the matching problem.
|
||||
return;
|
||||
}
|
||||
|
||||
// Calculate the results for this match group.
|
||||
mg.rhs.forEach((rs) => {
|
||||
mg.lhs.forEach((ls, lIdx) => {
|
||||
if (!ls.value || !rs.value) {
|
||||
// TODO: Implement native histogram support.
|
||||
throw new Error("native histogram support not implemented yet");
|
||||
}
|
||||
|
||||
const [vl, vr] =
|
||||
matching.card !== vectorMatchCardinality.oneToMany
|
||||
? [ls.value[1], rs.value[1]]
|
||||
: [rs.value[1], ls.value[1]];
|
||||
let { value, keep } = vectorElemBinop(
|
||||
op,
|
||||
parsePrometheusFloat(vl),
|
||||
parsePrometheusFloat(vr)
|
||||
);
|
||||
|
||||
const metric = resultMetric(ls.metric, rs.metric, op, matching);
|
||||
if (bool) {
|
||||
value = keep ? 1.0 : 0.0;
|
||||
delete metric.__name__;
|
||||
}
|
||||
|
||||
mg.result.push({
|
||||
sample: {
|
||||
metric: metric,
|
||||
value: [
|
||||
ls.value[0],
|
||||
keep || bool ? formatPrometheusFloat(value) : filteredSampleValue,
|
||||
],
|
||||
},
|
||||
manySideIdx: lIdx,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// If we originally swapped the LHS and RHS, swap them back to the original order.
|
||||
if (matching.card === vectorMatchCardinality.oneToMany) {
|
||||
Object.keys(groups).forEach((sig) => {
|
||||
[groups[sig].lhs, groups[sig].rhs] = [groups[sig].rhs, groups[sig].lhs];
|
||||
[groups[sig].lhsCount, groups[sig].rhsCount] = [
|
||||
groups[sig].rhsCount,
|
||||
groups[sig].lhsCount,
|
||||
];
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
groups,
|
||||
numGroups: Object.keys(groups).length + outOfLimitGroups.size,
|
||||
};
|
||||
};
|
140
web/ui/mantine-ui/src/promql/cmd/gen_functions_docs/main.go
Normal file
140
web/ui/mantine-ui/src/promql/cmd/gen_functions_docs/main.go
Normal file
|
@ -0,0 +1,140 @@
|
|||
// Copyright 2022 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/regexp"
|
||||
"github.com/russross/blackfriday/v2"
|
||||
)
|
||||
|
||||
var funcDocsRe = regexp.MustCompile("^## `(.+)\\(\\)`\n$|^## (Trigonometric Functions)\n$")
|
||||
|
||||
func main() {
|
||||
resp, err := http.Get("https://raw.githubusercontent.com/prometheus/prometheus/master/docs/querying/functions.md")
|
||||
if err != nil {
|
||||
log.Fatalln("Failed to fetch function docs:", err)
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
log.Fatalln("Bad status code while fetching function docs:", resp.Status)
|
||||
}
|
||||
|
||||
funcDocs := map[string]string{}
|
||||
|
||||
r := bufio.NewReader(resp.Body)
|
||||
currentFunc := ""
|
||||
currentDocs := ""
|
||||
|
||||
saveCurrent := func() {
|
||||
switch currentFunc {
|
||||
case "<aggregation>_over_time":
|
||||
for _, fn := range []string{
|
||||
"avg_over_time",
|
||||
"min_over_time",
|
||||
"max_over_time",
|
||||
"sum_over_time",
|
||||
"count_over_time",
|
||||
"quantile_over_time",
|
||||
"stddev_over_time",
|
||||
"stdvar_over_time",
|
||||
"last_over_time",
|
||||
"present_over_time",
|
||||
"mad_over_time",
|
||||
} {
|
||||
funcDocs[fn] = currentDocs
|
||||
}
|
||||
case "Trigonometric Functions":
|
||||
for _, fn := range []string{
|
||||
"acos",
|
||||
"acosh",
|
||||
"asin",
|
||||
"asinh",
|
||||
"atan",
|
||||
"atanh",
|
||||
"cos",
|
||||
"cosh",
|
||||
"sin",
|
||||
"sinh",
|
||||
"tan",
|
||||
"tanh",
|
||||
"deg",
|
||||
"pi",
|
||||
"rad",
|
||||
} {
|
||||
funcDocs[fn] = currentDocs
|
||||
}
|
||||
default:
|
||||
funcDocs[currentFunc] = currentDocs
|
||||
}
|
||||
}
|
||||
|
||||
for {
|
||||
line, err := r.ReadString('\n')
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
saveCurrent()
|
||||
break
|
||||
}
|
||||
log.Fatalln("Error reading response body:", err)
|
||||
}
|
||||
|
||||
matches := funcDocsRe.FindStringSubmatch(line)
|
||||
if len(matches) > 0 {
|
||||
if currentFunc != "" {
|
||||
saveCurrent()
|
||||
}
|
||||
currentDocs = ""
|
||||
|
||||
currentFunc = string(matches[1])
|
||||
if matches[2] != "" {
|
||||
// This is the case for "## Trigonometric Functions"
|
||||
currentFunc = matches[2]
|
||||
}
|
||||
} else {
|
||||
currentDocs += line
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Println("import React from 'react';")
|
||||
fmt.Println("")
|
||||
fmt.Println("const funcDocs: Record<string, React.ReactNode> = {")
|
||||
|
||||
funcNames := make([]string, 0, len(funcDocs))
|
||||
for k := range funcDocs {
|
||||
funcNames = append(funcNames, k)
|
||||
}
|
||||
sort.Strings(funcNames)
|
||||
for _, fn := range funcNames {
|
||||
// Translate:
|
||||
// { ===> {'{'}
|
||||
// } ===> {'}'}
|
||||
//
|
||||
// TODO: Make this set of conflicting string replacements less hacky.
|
||||
jsxEscapedDocs := strings.ReplaceAll(funcDocs[fn], "{", `__LEFT_BRACE__'{'__RIGHT_BRACE__`)
|
||||
jsxEscapedDocs = strings.ReplaceAll(jsxEscapedDocs, "}", `__LEFT_BRACE__'}'__RIGHT_BRACE__`)
|
||||
jsxEscapedDocs = strings.ReplaceAll(jsxEscapedDocs, "__LEFT_BRACE__", "{")
|
||||
jsxEscapedDocs = strings.ReplaceAll(jsxEscapedDocs, "__RIGHT_BRACE__", "}")
|
||||
fmt.Printf(" '%s': <>%s</>,\n", fn, string(blackfriday.Run([]byte(jsxEscapedDocs))))
|
||||
}
|
||||
fmt.Println("};")
|
||||
fmt.Println("")
|
||||
fmt.Println("export default funcDocs;")
|
||||
}
|
50
web/ui/mantine-ui/src/promql/cmd/gen_functions_list/main.go
Normal file
50
web/ui/mantine-ui/src/promql/cmd/gen_functions_list/main.go
Normal file
|
@ -0,0 +1,50 @@
|
|||
// Copyright 2022 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
)
|
||||
|
||||
func formatValueType(vt parser.ValueType) string {
|
||||
return "valueType." + string(vt)
|
||||
}
|
||||
|
||||
func formatValueTypes(vts []parser.ValueType) string {
|
||||
fmtVts := make([]string, 0, len(vts))
|
||||
for _, vt := range vts {
|
||||
fmtVts = append(fmtVts, formatValueType(vt))
|
||||
}
|
||||
return strings.Join(fmtVts, ", ")
|
||||
}
|
||||
|
||||
func main() {
|
||||
fnNames := make([]string, 0, len(parser.Functions))
|
||||
for name := range parser.Functions {
|
||||
fnNames = append(fnNames, name)
|
||||
}
|
||||
sort.Strings(fnNames)
|
||||
fmt.Println(`import { valueType, Func } from './ast';
|
||||
|
||||
export const functionSignatures: Record<string, Func> = {`)
|
||||
for _, fnName := range fnNames {
|
||||
fn := parser.Functions[fnName]
|
||||
fmt.Printf(" %s: { name: '%s', argTypes: [%s], variadic: %d, returnType: %s },\n", fn.Name, fn.Name, formatValueTypes(fn.ArgTypes), fn.Variadic, formatValueType(fn.ReturnType))
|
||||
}
|
||||
fmt.Println("}")
|
||||
}
|
322
web/ui/mantine-ui/src/promql/format.tsx
Normal file
322
web/ui/mantine-ui/src/promql/format.tsx
Normal file
|
@ -0,0 +1,322 @@
|
|||
import React, { ReactElement, ReactNode } from "react";
|
||||
import ASTNode, {
|
||||
VectorSelector,
|
||||
matchType,
|
||||
vectorMatchCardinality,
|
||||
nodeType,
|
||||
StartOrEnd,
|
||||
MatrixSelector,
|
||||
} from "./ast";
|
||||
import { formatPrometheusDuration } from "../lib/formatTime";
|
||||
import { maybeParenthesizeBinopChild, escapeString } from "./utils";
|
||||
|
||||
export const labelNameList = (labels: string[]): React.ReactNode[] => {
|
||||
return labels.map((l, i) => {
|
||||
return (
|
||||
<span key={i}>
|
||||
{i !== 0 && ", "}
|
||||
<span className="promql-code promql-label-name">{l}</span>
|
||||
</span>
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
const formatAtAndOffset = (
|
||||
timestamp: number | null,
|
||||
startOrEnd: StartOrEnd,
|
||||
offset: number
|
||||
): ReactNode => (
|
||||
<>
|
||||
{timestamp !== null ? (
|
||||
<>
|
||||
{" "}
|
||||
<span className="promql-operator">@</span>{" "}
|
||||
<span className="promql-number">{(timestamp / 1000).toFixed(3)}</span>
|
||||
</>
|
||||
) : startOrEnd !== null ? (
|
||||
<>
|
||||
{" "}
|
||||
<span className="promql-operator">@</span>{" "}
|
||||
<span className="promql-keyword">{startOrEnd}</span>
|
||||
<span className="promql-paren">(</span>
|
||||
<span className="promql-paren">)</span>
|
||||
</>
|
||||
) : (
|
||||
<></>
|
||||
)}
|
||||
{offset === 0 ? (
|
||||
<></>
|
||||
) : offset > 0 ? (
|
||||
<>
|
||||
{" "}
|
||||
<span className="promql-keyword">offset</span>{" "}
|
||||
<span className="promql-duration">
|
||||
{formatPrometheusDuration(offset)}
|
||||
</span>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
{" "}
|
||||
<span className="promql-keyword">offset</span>{" "}
|
||||
<span className="promql-duration">
|
||||
-{formatPrometheusDuration(-offset)}
|
||||
</span>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
|
||||
const formatSelector = (
|
||||
node: VectorSelector | MatrixSelector
|
||||
): ReactElement => {
|
||||
const matchLabels = node.matchers
|
||||
.filter(
|
||||
(m) =>
|
||||
!(
|
||||
m.name === "__name__" &&
|
||||
m.type === matchType.equal &&
|
||||
m.value === node.name
|
||||
)
|
||||
)
|
||||
.map((m, i) => (
|
||||
<span key={i}>
|
||||
{i !== 0 && ","}
|
||||
<span className="promql-label-name">{m.name}</span>
|
||||
{m.type}
|
||||
<span className="promql-string">"{escapeString(m.value)}"</span>
|
||||
</span>
|
||||
));
|
||||
|
||||
return (
|
||||
<>
|
||||
<span className="promql-metric-name">{node.name}</span>
|
||||
{matchLabels.length > 0 && (
|
||||
<>
|
||||
{"{"}
|
||||
<span className="promql-metric-name">{matchLabels}</span>
|
||||
{"}"}
|
||||
</>
|
||||
)}
|
||||
{node.type === nodeType.matrixSelector && (
|
||||
<>
|
||||
[
|
||||
<span className="promql-duration">
|
||||
{formatPrometheusDuration(node.range)}
|
||||
</span>
|
||||
]
|
||||
</>
|
||||
)}
|
||||
{formatAtAndOffset(node.timestamp, node.startOrEnd, node.offset)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const ellipsis = <span className="promql-ellipsis">…</span>;
|
||||
|
||||
const formatNodeInternal = (
|
||||
node: ASTNode,
|
||||
showChildren: boolean,
|
||||
maxDepth?: number
|
||||
): React.ReactNode => {
|
||||
if (maxDepth === 0) {
|
||||
return ellipsis;
|
||||
}
|
||||
|
||||
const childMaxDepth = maxDepth === undefined ? undefined : maxDepth - 1;
|
||||
|
||||
switch (node.type) {
|
||||
case nodeType.aggregation:
|
||||
return (
|
||||
<>
|
||||
<span className="promql-keyword">{node.op}</span>
|
||||
{node.without ? (
|
||||
<>
|
||||
{" "}
|
||||
<span className="promql-keyword">without</span>
|
||||
<span className="promql-paren">(</span>
|
||||
{labelNameList(node.grouping)}
|
||||
<span className="promql-paren">)</span>{" "}
|
||||
</>
|
||||
) : (
|
||||
node.grouping.length > 0 && (
|
||||
<>
|
||||
{" "}
|
||||
<span className="promql-keyword">by</span>
|
||||
<span className="promql-paren">(</span>
|
||||
{labelNameList(node.grouping)}
|
||||
<span className="promql-paren">)</span>{" "}
|
||||
</>
|
||||
)
|
||||
)}
|
||||
{showChildren && (
|
||||
<>
|
||||
<span className="promql-paren">(</span>
|
||||
{node.param !== null && (
|
||||
<>{formatNode(node.param, showChildren, childMaxDepth)}, </>
|
||||
)}
|
||||
{formatNode(node.expr, showChildren, childMaxDepth)}
|
||||
<span className="promql-paren">)</span>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
case nodeType.subquery:
|
||||
return (
|
||||
<>
|
||||
{showChildren && formatNode(node.expr, showChildren, childMaxDepth)}[
|
||||
<span className="promql-duration">
|
||||
{formatPrometheusDuration(node.range)}
|
||||
</span>
|
||||
:
|
||||
{node.step !== 0 && (
|
||||
<span className="promql-duration">
|
||||
{formatPrometheusDuration(node.step)}
|
||||
</span>
|
||||
)}
|
||||
]{formatAtAndOffset(node.timestamp, node.startOrEnd, node.offset)}
|
||||
</>
|
||||
);
|
||||
case nodeType.parenExpr:
|
||||
return (
|
||||
<>
|
||||
<span className="promql-paren">(</span>
|
||||
{showChildren && formatNode(node.expr, showChildren, childMaxDepth)}
|
||||
<span className="promql-paren">)</span>
|
||||
</>
|
||||
);
|
||||
case nodeType.call: {
|
||||
const children =
|
||||
childMaxDepth === undefined || childMaxDepth > 0
|
||||
? node.args.map((arg, i) => (
|
||||
<span key={i}>
|
||||
{i !== 0 && ", "}
|
||||
{formatNode(arg, showChildren)}
|
||||
</span>
|
||||
))
|
||||
: node.args.length > 0
|
||||
? ellipsis
|
||||
: "";
|
||||
|
||||
return (
|
||||
<>
|
||||
<span className="promql-keyword">{node.func.name}</span>
|
||||
{showChildren && (
|
||||
<>
|
||||
<span className="promql-paren">(</span>
|
||||
{children}
|
||||
<span className="promql-paren">)</span>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
case nodeType.matrixSelector:
|
||||
return formatSelector(node);
|
||||
case nodeType.vectorSelector:
|
||||
return formatSelector(node);
|
||||
case nodeType.numberLiteral:
|
||||
return <span className="promql-number">{node.val}</span>;
|
||||
case nodeType.stringLiteral:
|
||||
return <span className="promql-string">"{escapeString(node.val)}"</span>;
|
||||
case nodeType.unaryExpr:
|
||||
return (
|
||||
<>
|
||||
<span className="promql-operator">{node.op}</span>
|
||||
{showChildren && formatNode(node.expr, showChildren, childMaxDepth)}
|
||||
</>
|
||||
);
|
||||
case nodeType.binaryExpr: {
|
||||
let matching = <></>;
|
||||
let grouping = <></>;
|
||||
const vm = node.matching;
|
||||
if (vm !== null && (vm.labels.length > 0 || vm.on)) {
|
||||
if (vm.on) {
|
||||
matching = (
|
||||
<>
|
||||
{" "}
|
||||
<span className="promql-keyword">on</span>
|
||||
<span className="promql-paren">(</span>
|
||||
{labelNameList(vm.labels)}
|
||||
<span className="promql-paren">)</span>
|
||||
</>
|
||||
);
|
||||
} else {
|
||||
matching = (
|
||||
<>
|
||||
{" "}
|
||||
<span className="promql-keyword">ignoring</span>
|
||||
<span className="promql-paren">(</span>
|
||||
{labelNameList(vm.labels)}
|
||||
<span className="promql-paren">)</span>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
vm.card === vectorMatchCardinality.manyToOne ||
|
||||
vm.card === vectorMatchCardinality.oneToMany
|
||||
) {
|
||||
grouping = (
|
||||
<>
|
||||
<span className="promql-keyword">
|
||||
{" "}
|
||||
group_
|
||||
{vm.card === vectorMatchCardinality.manyToOne
|
||||
? "left"
|
||||
: "right"}
|
||||
</span>
|
||||
<span className="promql-paren">(</span>
|
||||
{labelNameList(vm.include)}
|
||||
<span className="promql-paren">)</span>
|
||||
</>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
{showChildren &&
|
||||
formatNode(
|
||||
maybeParenthesizeBinopChild(node.op, node.lhs),
|
||||
showChildren,
|
||||
childMaxDepth
|
||||
)}{" "}
|
||||
{["atan2", "and", "or", "unless"].includes(node.op) ? (
|
||||
<span className="promql-keyword">{node.op}</span>
|
||||
) : (
|
||||
<span className="promql-operator">{node.op}</span>
|
||||
)}
|
||||
{node.bool && (
|
||||
<>
|
||||
{" "}
|
||||
<span className="promql-keyword">bool</span>
|
||||
</>
|
||||
)}
|
||||
{matching}
|
||||
{grouping}{" "}
|
||||
{showChildren &&
|
||||
formatNode(
|
||||
maybeParenthesizeBinopChild(node.op, node.rhs),
|
||||
showChildren,
|
||||
childMaxDepth
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
case nodeType.placeholder:
|
||||
// TODO: Include possible children of placeholders somehow?
|
||||
return ellipsis;
|
||||
default:
|
||||
throw new Error("unsupported node type");
|
||||
}
|
||||
};
|
||||
|
||||
export const formatNode = (
|
||||
node: ASTNode,
|
||||
showChildren: boolean,
|
||||
maxDepth?: number
|
||||
): React.ReactElement => (
|
||||
<span className="promql-code">
|
||||
{formatNodeInternal(node, showChildren, maxDepth)}
|
||||
</span>
|
||||
);
|
2736
web/ui/mantine-ui/src/promql/functionDocs.tsx
Normal file
2736
web/ui/mantine-ui/src/promql/functionDocs.tsx
Normal file
File diff suppressed because it is too large
Load diff
103
web/ui/mantine-ui/src/promql/functionMeta.ts
Normal file
103
web/ui/mantine-ui/src/promql/functionMeta.ts
Normal file
|
@ -0,0 +1,103 @@
|
|||
export const functionArgNames: Record<string, string[]> = {
|
||||
// abs: ['value'],
|
||||
// absent: [],
|
||||
// absent_over_time: [],
|
||||
// avg_over_time: [],
|
||||
// ceil: [],
|
||||
// changes: [],
|
||||
clamp: ['input series', 'min', 'max'],
|
||||
// clamp_max: [],
|
||||
// clamp_min: [],
|
||||
// count_over_time: [],
|
||||
day_of_month: ['timestamp (default = vector(time()))'],
|
||||
day_of_week: ['timestamp (default = vector(time()))'],
|
||||
days_in_month: ['timestamp (default = vector(time()))'],
|
||||
// delta: [],
|
||||
// deriv: [],
|
||||
// exp: [],
|
||||
// floor: [],
|
||||
histogram_quantile: ['target quantile', 'histogram'],
|
||||
holt_winters: ['input series', 'smoothing factor', 'trend factor'],
|
||||
hour: ['timestamp (default = vector(time()))'],
|
||||
// idelta: [],
|
||||
// increase: [],
|
||||
// irate: [],
|
||||
label_join: ['series', 'destination label', 'separator', 'source label'],
|
||||
label_replace: ['input series', 'destination label', 'replacement', 'source label', 'regex'],
|
||||
// ln: [],
|
||||
// log10: [],
|
||||
// log2: [],
|
||||
// max_over_time: [],
|
||||
// min_over_time: [],
|
||||
minute: ['timestamp (default = vector(time()))'],
|
||||
month: ['timestamp (default = vector(time()))'],
|
||||
predict_linear: ['input series', 'duration from now [s]'],
|
||||
quantile_over_time: ['target quantile', 'input series'],
|
||||
// rate: [],
|
||||
// resets: [],
|
||||
round: ['input series', 'to nearest (default = 1)'],
|
||||
// scalar: [],
|
||||
// sort: [],
|
||||
// sort_desc: [],
|
||||
// sqrt: [],
|
||||
// stddev_over_time: [],
|
||||
// stdvar_over_time: [],
|
||||
// sum_over_time: [],
|
||||
// time: [],
|
||||
// timestamp: [],
|
||||
// vector: [],
|
||||
year: ['timestamp (default = vector(time()))'],
|
||||
};
|
||||
|
||||
export const functionDescriptions: Record<string, string> = {
|
||||
abs: 'return absolute values of input series',
|
||||
absent: 'determine whether input vector is empty',
|
||||
absent_over_time: 'determine whether input range vector is empty',
|
||||
avg_over_time: 'average series values over time',
|
||||
ceil: 'round up values of input series to nearest integer',
|
||||
changes: 'return number of value changes in input series over time',
|
||||
clamp: 'limit the value of input series to a certain range',
|
||||
clamp_max: 'limit the value of input series to a maximum',
|
||||
clamp_min: 'limit the value of input series to a minimum',
|
||||
count_over_time: 'count the number of values for each input series',
|
||||
day_of_month: 'return the day of the month for provided timestamps',
|
||||
day_of_week: 'return the day of the week for provided timestamps',
|
||||
days_in_month: 'return the number of days in current month for provided timestamps',
|
||||
delta: 'calculate the difference between beginning and end of a range vector (for gauges)',
|
||||
deriv: 'calculate the per-second derivative over series in a range vector (for gauges)',
|
||||
exp: 'calculate exponential function for input vector values',
|
||||
floor: 'round down values of input series to nearest integer',
|
||||
histogram_quantile: 'calculate quantiles from histogram buckets',
|
||||
holt_winters: 'calculate smoothed value of input series',
|
||||
hour: 'return the hour of the day for provided timestamps',
|
||||
idelta: 'calculate the difference between the last two samples of a range vector (for counters)',
|
||||
increase: 'calculate the increase in value over a range of time (for counters)',
|
||||
irate: 'calculate the per-second increase over the last two samples of a range vector (for counters)',
|
||||
label_join: 'join together label values into new label',
|
||||
label_replace: 'set or replace label values',
|
||||
last_over_time: 'get the last sample value from a time range',
|
||||
ln: 'calculate natural logarithm of input series',
|
||||
log10: 'calulcate base-10 logarithm of input series',
|
||||
log2: 'calculate base-2 logarithm of input series',
|
||||
max_over_time: 'return the maximum value over time for input series',
|
||||
min_over_time: 'return the minimum value over time for input series',
|
||||
minute: 'return the minute of the hour for provided timestamps',
|
||||
month: 'return the month for provided timestamps',
|
||||
predict_linear: 'predict the value of a gauge into the future',
|
||||
quantile_over_time: 'calculate value quantiles over time for input series',
|
||||
rate: 'calculate per-second increase over a range vector (for counters)',
|
||||
resets: 'return number of value decreases (resets) in input series of time',
|
||||
round: 'round values of input series to nearest integer',
|
||||
scalar: 'convert single-element series vector into scalar value',
|
||||
sgn: 'return the sign of the input value (-1, 0, or 1)',
|
||||
sort: 'sort input series ascendingly by value',
|
||||
sort_desc: 'sort input series descendingly by value',
|
||||
sqrt: 'return the square root for input series',
|
||||
stddev_over_time: 'calculate the standard deviation within input series over time',
|
||||
stdvar_over_time: 'calculate the standard variation within input series over time',
|
||||
sum_over_time: 'calculate the sum over the values of input series over time',
|
||||
time: 'return the Unix timestamp at the current evaluation time',
|
||||
timestamp: 'return the Unix timestamp for the samples in the input vector',
|
||||
vector: 'convert a scalar value into a single-element series vector',
|
||||
year: 'return the year for provided timestamps',
|
||||
};
|
140
web/ui/mantine-ui/src/promql/functionSignatures.ts
Normal file
140
web/ui/mantine-ui/src/promql/functionSignatures.ts
Normal file
|
@ -0,0 +1,140 @@
|
|||
import { valueType, Func } from './ast';
|
||||
|
||||
export const functionSignatures: Record<string, Func> = {
|
||||
abs: { name: 'abs', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
absent: { name: 'absent', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
absent_over_time: { name: 'absent_over_time', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
acos: { name: 'acos', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
acosh: { name: 'acosh', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
asin: { name: 'asin', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
asinh: { name: 'asinh', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
atan: { name: 'atan', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
atanh: { name: 'atanh', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
avg_over_time: { name: 'avg_over_time', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
ceil: { name: 'ceil', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
changes: { name: 'changes', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
clamp: {
|
||||
name: 'clamp',
|
||||
argTypes: [valueType.vector, valueType.scalar, valueType.scalar],
|
||||
variadic: 0,
|
||||
returnType: valueType.vector,
|
||||
},
|
||||
clamp_max: {
|
||||
name: 'clamp_max',
|
||||
argTypes: [valueType.vector, valueType.scalar],
|
||||
variadic: 0,
|
||||
returnType: valueType.vector,
|
||||
},
|
||||
clamp_min: {
|
||||
name: 'clamp_min',
|
||||
argTypes: [valueType.vector, valueType.scalar],
|
||||
variadic: 0,
|
||||
returnType: valueType.vector,
|
||||
},
|
||||
cos: { name: 'cos', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
cosh: { name: 'cosh', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
count_over_time: { name: 'count_over_time', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
day_of_month: { name: 'day_of_month', argTypes: [valueType.vector], variadic: 1, returnType: valueType.vector },
|
||||
day_of_week: { name: 'day_of_week', argTypes: [valueType.vector], variadic: 1, returnType: valueType.vector },
|
||||
day_of_year: { name: 'day_of_year', argTypes: [valueType.vector], variadic: 1, returnType: valueType.vector },
|
||||
days_in_month: { name: 'days_in_month', argTypes: [valueType.vector], variadic: 1, returnType: valueType.vector },
|
||||
deg: { name: 'deg', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
delta: { name: 'delta', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
deriv: { name: 'deriv', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
exp: { name: 'exp', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
floor: { name: 'floor', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
histogram_avg: { name: 'histogram_avg', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
histogram_count: { name: 'histogram_count', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
histogram_fraction: {
|
||||
name: 'histogram_fraction',
|
||||
argTypes: [valueType.scalar, valueType.scalar, valueType.vector],
|
||||
variadic: 0,
|
||||
returnType: valueType.vector,
|
||||
},
|
||||
histogram_quantile: {
|
||||
name: 'histogram_quantile',
|
||||
argTypes: [valueType.scalar, valueType.vector],
|
||||
variadic: 0,
|
||||
returnType: valueType.vector,
|
||||
},
|
||||
histogram_stddev: { name: 'histogram_stddev', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
histogram_stdvar: { name: 'histogram_stdvar', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
histogram_sum: { name: 'histogram_sum', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
holt_winters: {
|
||||
name: 'holt_winters',
|
||||
argTypes: [valueType.matrix, valueType.scalar, valueType.scalar],
|
||||
variadic: 0,
|
||||
returnType: valueType.vector,
|
||||
},
|
||||
hour: { name: 'hour', argTypes: [valueType.vector], variadic: 1, returnType: valueType.vector },
|
||||
idelta: { name: 'idelta', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
increase: { name: 'increase', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
irate: { name: 'irate', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
label_join: {
|
||||
name: 'label_join',
|
||||
argTypes: [valueType.vector, valueType.string, valueType.string, valueType.string],
|
||||
variadic: -1,
|
||||
returnType: valueType.vector,
|
||||
},
|
||||
label_replace: {
|
||||
name: 'label_replace',
|
||||
argTypes: [valueType.vector, valueType.string, valueType.string, valueType.string, valueType.string],
|
||||
variadic: 0,
|
||||
returnType: valueType.vector,
|
||||
},
|
||||
last_over_time: { name: 'last_over_time', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
ln: { name: 'ln', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
log10: { name: 'log10', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
log2: { name: 'log2', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
mad_over_time: { name: 'mad_over_time', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
max_over_time: { name: 'max_over_time', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
min_over_time: { name: 'min_over_time', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
minute: { name: 'minute', argTypes: [valueType.vector], variadic: 1, returnType: valueType.vector },
|
||||
month: { name: 'month', argTypes: [valueType.vector], variadic: 1, returnType: valueType.vector },
|
||||
pi: { name: 'pi', argTypes: [], variadic: 0, returnType: valueType.scalar },
|
||||
predict_linear: {
|
||||
name: 'predict_linear',
|
||||
argTypes: [valueType.matrix, valueType.scalar],
|
||||
variadic: 0,
|
||||
returnType: valueType.vector,
|
||||
},
|
||||
present_over_time: { name: 'present_over_time', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
quantile_over_time: {
|
||||
name: 'quantile_over_time',
|
||||
argTypes: [valueType.scalar, valueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: valueType.vector,
|
||||
},
|
||||
rad: { name: 'rad', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
rate: { name: 'rate', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
resets: { name: 'resets', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
round: { name: 'round', argTypes: [valueType.vector, valueType.scalar], variadic: 1, returnType: valueType.vector },
|
||||
scalar: { name: 'scalar', argTypes: [valueType.vector], variadic: 0, returnType: valueType.scalar },
|
||||
sgn: { name: 'sgn', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
sin: { name: 'sin', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
sinh: { name: 'sinh', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
sort: { name: 'sort', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
sort_by_label: {
|
||||
name: 'sort_by_label',
|
||||
argTypes: [valueType.vector, valueType.string],
|
||||
variadic: -1,
|
||||
returnType: valueType.vector,
|
||||
},
|
||||
sort_by_label_desc: {
|
||||
name: 'sort_by_label_desc',
|
||||
argTypes: [valueType.vector, valueType.string],
|
||||
variadic: -1,
|
||||
returnType: valueType.vector,
|
||||
},
|
||||
sort_desc: { name: 'sort_desc', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
sqrt: { name: 'sqrt', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
stddev_over_time: { name: 'stddev_over_time', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
stdvar_over_time: { name: 'stdvar_over_time', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
sum_over_time: { name: 'sum_over_time', argTypes: [valueType.matrix], variadic: 0, returnType: valueType.vector },
|
||||
tan: { name: 'tan', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
tanh: { name: 'tanh', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
time: { name: 'time', argTypes: [], variadic: 0, returnType: valueType.scalar },
|
||||
timestamp: { name: 'timestamp', argTypes: [valueType.vector], variadic: 0, returnType: valueType.vector },
|
||||
vector: { name: 'vector', argTypes: [valueType.scalar], variadic: 0, returnType: valueType.vector },
|
||||
year: { name: 'year', argTypes: [valueType.vector], variadic: 1, returnType: valueType.vector },
|
||||
};
|
160
web/ui/mantine-ui/src/promql/serialize.ts
Normal file
160
web/ui/mantine-ui/src/promql/serialize.ts
Normal file
|
@ -0,0 +1,160 @@
|
|||
import { formatPrometheusDuration } from "../lib/formatTime";
|
||||
import ASTNode, {
|
||||
VectorSelector,
|
||||
matchType,
|
||||
vectorMatchCardinality,
|
||||
nodeType,
|
||||
StartOrEnd,
|
||||
MatrixSelector,
|
||||
} from "./ast";
|
||||
import {
|
||||
aggregatorsWithParam,
|
||||
maybeParenthesizeBinopChild,
|
||||
escapeString,
|
||||
} from "./utils";
|
||||
|
||||
const serializeAtAndOffset = (
|
||||
timestamp: number | null,
|
||||
startOrEnd: StartOrEnd,
|
||||
offset: number
|
||||
): string =>
|
||||
`${timestamp !== null ? ` @ ${(timestamp / 1000).toFixed(3)}` : startOrEnd !== null ? ` @ ${startOrEnd}()` : ""}${
|
||||
offset === 0
|
||||
? ""
|
||||
: offset > 0
|
||||
? ` offset ${formatPrometheusDuration(offset)}`
|
||||
: ` offset -${formatPrometheusDuration(-offset)}`
|
||||
}`;
|
||||
|
||||
const serializeSelector = (node: VectorSelector | MatrixSelector): string => {
|
||||
const matchers = node.matchers
|
||||
.filter(
|
||||
(m) =>
|
||||
!(
|
||||
m.name === "__name__" &&
|
||||
m.type === matchType.equal &&
|
||||
m.value === node.name
|
||||
)
|
||||
)
|
||||
.map((m) => `${m.name}${m.type}"${escapeString(m.value)}"`);
|
||||
|
||||
const range =
|
||||
node.type === nodeType.matrixSelector
|
||||
? `[${formatPrometheusDuration(node.range)}]`
|
||||
: "";
|
||||
const atAndOffset = serializeAtAndOffset(
|
||||
node.timestamp,
|
||||
node.startOrEnd,
|
||||
node.offset
|
||||
);
|
||||
|
||||
return `${node.name}${matchers.length > 0 ? `{${matchers.join(",")}}` : ""}${range}${atAndOffset}`;
|
||||
};
|
||||
|
||||
const serializeNode = (
|
||||
node: ASTNode,
|
||||
indent = 0,
|
||||
pretty = false,
|
||||
initialIndent = true
|
||||
): string => {
|
||||
const childListSeparator = pretty ? "\n" : "";
|
||||
const childSeparator = pretty ? "\n" : " ";
|
||||
const childIndent = indent + 2;
|
||||
const ind = pretty ? " ".repeat(indent) : "";
|
||||
// Needed for unary operators.
|
||||
const initialInd = initialIndent ? ind : "";
|
||||
|
||||
switch (node.type) {
|
||||
case nodeType.aggregation:
|
||||
return `${initialInd}${node.op}${
|
||||
node.without
|
||||
? ` without(${node.grouping.join(", ")}) `
|
||||
: node.grouping.length > 0
|
||||
? ` by(${node.grouping.join(", ")}) `
|
||||
: ""
|
||||
}(${childListSeparator}${
|
||||
aggregatorsWithParam.includes(node.op) && node.param !== null
|
||||
? `${serializeNode(node.param, childIndent, pretty)},${childSeparator}`
|
||||
: ""
|
||||
}${serializeNode(node.expr, childIndent, pretty)}${childListSeparator}${ind})`;
|
||||
|
||||
case nodeType.subquery:
|
||||
return `${initialInd}${serializeNode(node.expr, indent, pretty)}[${formatPrometheusDuration(node.range)}:${
|
||||
node.step !== 0 ? formatPrometheusDuration(node.step) : ""
|
||||
}]${serializeAtAndOffset(node.timestamp, node.startOrEnd, node.offset)}`;
|
||||
|
||||
case nodeType.parenExpr:
|
||||
return `${initialInd}(${childListSeparator}${serializeNode(
|
||||
node.expr,
|
||||
childIndent,
|
||||
pretty
|
||||
)}${childListSeparator}${ind})`;
|
||||
|
||||
case nodeType.call: {
|
||||
const sep = node.args.length > 0 ? childListSeparator : "";
|
||||
|
||||
return `${initialInd}${node.func.name}(${sep}${node.args
|
||||
.map((arg) => serializeNode(arg, childIndent, pretty))
|
||||
.join("," + childSeparator)}${sep}${node.args.length > 0 ? ind : ""})`;
|
||||
}
|
||||
|
||||
case nodeType.matrixSelector:
|
||||
return `${initialInd}${serializeSelector(node)}`;
|
||||
|
||||
case nodeType.vectorSelector:
|
||||
return `${initialInd}${serializeSelector(node)}`;
|
||||
|
||||
case nodeType.numberLiteral:
|
||||
return `${initialInd}${node.val}`;
|
||||
|
||||
case nodeType.stringLiteral:
|
||||
return `${initialInd}"${escapeString(node.val)}"`;
|
||||
|
||||
case nodeType.unaryExpr:
|
||||
return `${initialInd}${node.op}${serializeNode(node.expr, indent, pretty, false)}`;
|
||||
|
||||
case nodeType.binaryExpr: {
|
||||
let matching = "";
|
||||
let grouping = "";
|
||||
const vm = node.matching;
|
||||
if (vm !== null && (vm.labels.length > 0 || vm.on)) {
|
||||
if (vm.on) {
|
||||
matching = ` on(${vm.labels.join(", ")})`;
|
||||
} else {
|
||||
matching = ` ignoring(${vm.labels.join(", ")})`;
|
||||
}
|
||||
|
||||
if (
|
||||
vm.card === vectorMatchCardinality.manyToOne ||
|
||||
vm.card === vectorMatchCardinality.oneToMany
|
||||
) {
|
||||
grouping = ` group_${vm.card === vectorMatchCardinality.manyToOne ? "left" : "right"}(${vm.include.join(",")})`;
|
||||
}
|
||||
}
|
||||
|
||||
return `${serializeNode(maybeParenthesizeBinopChild(node.op, node.lhs), childIndent, pretty)}${childSeparator}${ind}${
|
||||
node.op
|
||||
}${node.bool ? " bool" : ""}${matching}${grouping}${childSeparator}${serializeNode(
|
||||
maybeParenthesizeBinopChild(node.op, node.rhs),
|
||||
childIndent,
|
||||
pretty
|
||||
)}`;
|
||||
}
|
||||
|
||||
case nodeType.placeholder:
|
||||
// TODO: Should we just throw an error when trying to serialize an AST containing a placeholder node?
|
||||
// (that would currently break editing-as-text of ASTs that contain placeholders)
|
||||
return `${initialInd}…${
|
||||
node.children.length > 0
|
||||
? `(${childListSeparator}${node.children
|
||||
.map((child) => serializeNode(child, childIndent, pretty))
|
||||
.join("," + childSeparator)}${childListSeparator}${ind})`
|
||||
: ""
|
||||
}`;
|
||||
|
||||
default:
|
||||
throw new Error("unsupported node type");
|
||||
}
|
||||
};
|
||||
|
||||
export default serializeNode;
|
657
web/ui/mantine-ui/src/promql/serializeAndFormat.test.ts
Normal file
657
web/ui/mantine-ui/src/promql/serializeAndFormat.test.ts
Normal file
|
@ -0,0 +1,657 @@
|
|||
import { describe, expect, it } from "vitest";
|
||||
import serializeNode from "./serialize";
|
||||
import ASTNode, {
|
||||
nodeType,
|
||||
matchType,
|
||||
aggregationType,
|
||||
unaryOperatorType,
|
||||
binaryOperatorType,
|
||||
vectorMatchCardinality,
|
||||
} from "./ast";
|
||||
import { functionSignatures } from "./functionSignatures";
|
||||
import { formatNode } from "./format";
|
||||
import { render } from "@testing-library/react";
|
||||
|
||||
describe("serializeNode and formatNode", () => {
|
||||
it("should serialize correctly", () => {
|
||||
const tests: { node: ASTNode; output: string; prettyOutput?: string }[] = [
|
||||
// Vector selectors.
|
||||
{
|
||||
node: {
|
||||
type: nodeType.vectorSelector,
|
||||
name: "metric_name",
|
||||
matchers: [],
|
||||
offset: 0,
|
||||
timestamp: null,
|
||||
startOrEnd: null,
|
||||
},
|
||||
output: "metric_name",
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.vectorSelector,
|
||||
name: "metric_name",
|
||||
matchers: [
|
||||
{ type: matchType.equal, name: "label1", value: "value1" },
|
||||
{ type: matchType.notEqual, name: "label2", value: "value2" },
|
||||
{ type: matchType.matchRegexp, name: "label3", value: "value3" },
|
||||
{ type: matchType.matchNotRegexp, name: "label4", value: "value4" },
|
||||
],
|
||||
offset: 0,
|
||||
timestamp: null,
|
||||
startOrEnd: null,
|
||||
},
|
||||
output:
|
||||
'metric_name{label1="value1",label2!="value2",label3=~"value3",label4!~"value4"}',
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.vectorSelector,
|
||||
name: "metric_name",
|
||||
matchers: [],
|
||||
offset: 60000,
|
||||
timestamp: null,
|
||||
startOrEnd: null,
|
||||
},
|
||||
output: "metric_name offset 1m",
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.vectorSelector,
|
||||
name: "metric_name",
|
||||
matchers: [],
|
||||
offset: -60000,
|
||||
timestamp: null,
|
||||
startOrEnd: "start",
|
||||
},
|
||||
output: "metric_name @ start() offset -1m",
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.vectorSelector,
|
||||
name: "metric_name",
|
||||
matchers: [],
|
||||
offset: -60000,
|
||||
timestamp: null,
|
||||
startOrEnd: "end",
|
||||
},
|
||||
output: "metric_name @ end() offset -1m",
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.vectorSelector,
|
||||
name: "metric_name",
|
||||
matchers: [],
|
||||
offset: -60000,
|
||||
timestamp: 123000,
|
||||
startOrEnd: null,
|
||||
},
|
||||
output: "metric_name @ 123.000 offset -1m",
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.vectorSelector,
|
||||
name: "",
|
||||
matchers: [
|
||||
{ type: matchType.equal, name: "__name__", value: "metric_name" },
|
||||
],
|
||||
offset: 60000,
|
||||
timestamp: null,
|
||||
startOrEnd: null,
|
||||
},
|
||||
output: '{__name__="metric_name"} offset 1m',
|
||||
},
|
||||
{
|
||||
// Escaping in label values.
|
||||
node: {
|
||||
type: nodeType.vectorSelector,
|
||||
name: "metric_name",
|
||||
matchers: [{ type: matchType.equal, name: "label1", value: '"""' }],
|
||||
offset: 0,
|
||||
timestamp: null,
|
||||
startOrEnd: null,
|
||||
},
|
||||
output: 'metric_name{label1="\\"\\"\\""}',
|
||||
},
|
||||
|
||||
// Matrix selectors.
|
||||
{
|
||||
node: {
|
||||
type: nodeType.matrixSelector,
|
||||
name: "metric_name",
|
||||
matchers: [
|
||||
{ type: matchType.equal, name: "label1", value: "value1" },
|
||||
{ type: matchType.notEqual, name: "label2", value: "value2" },
|
||||
{ type: matchType.matchRegexp, name: "label3", value: "value3" },
|
||||
{ type: matchType.matchNotRegexp, name: "label4", value: "value4" },
|
||||
],
|
||||
range: 300000,
|
||||
offset: 600000,
|
||||
timestamp: null,
|
||||
startOrEnd: null,
|
||||
},
|
||||
output:
|
||||
'metric_name{label1="value1",label2!="value2",label3=~"value3",label4!~"value4"}[5m] offset 10m',
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.matrixSelector,
|
||||
name: "metric_name",
|
||||
matchers: [],
|
||||
range: 300000,
|
||||
offset: -600000,
|
||||
timestamp: 123000,
|
||||
startOrEnd: null,
|
||||
},
|
||||
output: "metric_name[5m] @ 123.000 offset -10m",
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.matrixSelector,
|
||||
name: "metric_name",
|
||||
matchers: [],
|
||||
range: 300000,
|
||||
offset: -600000,
|
||||
timestamp: null,
|
||||
startOrEnd: "start",
|
||||
},
|
||||
output: "metric_name[5m] @ start() offset -10m",
|
||||
},
|
||||
|
||||
// Aggregations.
|
||||
{
|
||||
node: {
|
||||
type: nodeType.aggregation,
|
||||
expr: { type: nodeType.placeholder, children: [] },
|
||||
op: aggregationType.sum,
|
||||
param: null,
|
||||
grouping: [],
|
||||
without: false,
|
||||
},
|
||||
output: "sum(…)",
|
||||
prettyOutput: `sum(
|
||||
…
|
||||
)`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.aggregation,
|
||||
expr: { type: nodeType.placeholder, children: [] },
|
||||
op: aggregationType.topk,
|
||||
param: { type: nodeType.numberLiteral, val: "3" },
|
||||
grouping: [],
|
||||
without: false,
|
||||
},
|
||||
output: "topk(3, …)",
|
||||
prettyOutput: `topk(
|
||||
3,
|
||||
…
|
||||
)`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.aggregation,
|
||||
expr: { type: nodeType.placeholder, children: [] },
|
||||
op: aggregationType.sum,
|
||||
param: null,
|
||||
grouping: [],
|
||||
without: true,
|
||||
},
|
||||
output: "sum without() (…)",
|
||||
prettyOutput: `sum without() (
|
||||
…
|
||||
)`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.aggregation,
|
||||
expr: { type: nodeType.placeholder, children: [] },
|
||||
op: aggregationType.sum,
|
||||
param: null,
|
||||
grouping: ["label1", "label2"],
|
||||
without: false,
|
||||
},
|
||||
output: "sum by(label1, label2) (…)",
|
||||
prettyOutput: `sum by(label1, label2) (
|
||||
…
|
||||
)`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.aggregation,
|
||||
expr: { type: nodeType.placeholder, children: [] },
|
||||
op: aggregationType.sum,
|
||||
param: null,
|
||||
grouping: ["label1", "label2"],
|
||||
without: true,
|
||||
},
|
||||
output: "sum without(label1, label2) (…)",
|
||||
prettyOutput: `sum without(label1, label2) (
|
||||
…
|
||||
)`,
|
||||
},
|
||||
|
||||
// Subqueries.
|
||||
{
|
||||
node: {
|
||||
type: nodeType.subquery,
|
||||
expr: { type: nodeType.placeholder, children: [] },
|
||||
range: 300000,
|
||||
offset: 0,
|
||||
step: 0,
|
||||
timestamp: null,
|
||||
startOrEnd: null,
|
||||
},
|
||||
output: "…[5m:]",
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.subquery,
|
||||
expr: { type: nodeType.placeholder, children: [] },
|
||||
range: 300000,
|
||||
offset: 600000,
|
||||
step: 60000,
|
||||
timestamp: null,
|
||||
startOrEnd: null,
|
||||
},
|
||||
output: "…[5m:1m] offset 10m",
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.subquery,
|
||||
expr: { type: nodeType.placeholder, children: [] },
|
||||
range: 300000,
|
||||
offset: -600000,
|
||||
step: 60000,
|
||||
timestamp: 123000,
|
||||
startOrEnd: null,
|
||||
},
|
||||
output: "…[5m:1m] @ 123.000 offset -10m",
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.subquery,
|
||||
expr: { type: nodeType.placeholder, children: [] },
|
||||
range: 300000,
|
||||
offset: -600000,
|
||||
step: 60000,
|
||||
timestamp: null,
|
||||
startOrEnd: "end",
|
||||
},
|
||||
output: "…[5m:1m] @ end() offset -10m",
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.subquery,
|
||||
expr: {
|
||||
type: nodeType.call,
|
||||
func: functionSignatures["rate"],
|
||||
args: [
|
||||
{
|
||||
type: nodeType.matrixSelector,
|
||||
range: 600000,
|
||||
name: "metric_name",
|
||||
matchers: [],
|
||||
offset: 0,
|
||||
timestamp: null,
|
||||
startOrEnd: null,
|
||||
},
|
||||
],
|
||||
},
|
||||
range: 300000,
|
||||
offset: 0,
|
||||
step: 0,
|
||||
timestamp: null,
|
||||
startOrEnd: null,
|
||||
},
|
||||
output: "rate(metric_name[10m])[5m:]",
|
||||
prettyOutput: `rate(
|
||||
metric_name[10m]
|
||||
)[5m:]`,
|
||||
},
|
||||
|
||||
// Parentheses.
|
||||
{
|
||||
node: {
|
||||
type: nodeType.parenExpr,
|
||||
expr: { type: nodeType.placeholder, children: [] },
|
||||
},
|
||||
output: "(…)",
|
||||
prettyOutput: `(
|
||||
…
|
||||
)`,
|
||||
},
|
||||
|
||||
// Call.
|
||||
{
|
||||
node: {
|
||||
type: nodeType.call,
|
||||
func: functionSignatures["time"],
|
||||
args: [],
|
||||
},
|
||||
output: "time()",
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.call,
|
||||
func: functionSignatures["rate"],
|
||||
args: [{ type: nodeType.placeholder, children: [] }],
|
||||
},
|
||||
output: "rate(…)",
|
||||
prettyOutput: `rate(
|
||||
…
|
||||
)`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.call,
|
||||
func: functionSignatures["label_join"],
|
||||
args: [
|
||||
{ type: nodeType.placeholder, children: [] },
|
||||
{ type: nodeType.stringLiteral, val: "foo" },
|
||||
{ type: nodeType.stringLiteral, val: "bar" },
|
||||
{ type: nodeType.stringLiteral, val: "baz" },
|
||||
],
|
||||
},
|
||||
output: 'label_join(…, "foo", "bar", "baz")',
|
||||
prettyOutput: `label_join(
|
||||
…,
|
||||
"foo",
|
||||
"bar",
|
||||
"baz"
|
||||
)`,
|
||||
},
|
||||
|
||||
// Number literals.
|
||||
{
|
||||
node: {
|
||||
type: nodeType.numberLiteral,
|
||||
val: "1.2345",
|
||||
},
|
||||
output: "1.2345",
|
||||
},
|
||||
|
||||
// String literals.
|
||||
{
|
||||
node: {
|
||||
type: nodeType.stringLiteral,
|
||||
val: 'hello, " world',
|
||||
},
|
||||
output: '"hello, \\" world"',
|
||||
},
|
||||
|
||||
// Unary expressions.
|
||||
{
|
||||
node: {
|
||||
type: nodeType.unaryExpr,
|
||||
expr: { type: nodeType.placeholder, children: [] },
|
||||
op: unaryOperatorType.minus,
|
||||
},
|
||||
output: "-…",
|
||||
prettyOutput: "-…",
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.unaryExpr,
|
||||
expr: { type: nodeType.placeholder, children: [] },
|
||||
op: unaryOperatorType.plus,
|
||||
},
|
||||
output: "+…",
|
||||
prettyOutput: "+…",
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.unaryExpr,
|
||||
expr: {
|
||||
type: nodeType.parenExpr,
|
||||
expr: { type: nodeType.placeholder, children: [] },
|
||||
},
|
||||
op: unaryOperatorType.minus,
|
||||
},
|
||||
output: "-(…)",
|
||||
prettyOutput: `-(
|
||||
…
|
||||
)`,
|
||||
},
|
||||
{
|
||||
// Nested indentation.
|
||||
node: {
|
||||
type: nodeType.unaryExpr,
|
||||
expr: {
|
||||
type: nodeType.aggregation,
|
||||
op: aggregationType.sum,
|
||||
expr: {
|
||||
type: nodeType.unaryExpr,
|
||||
expr: {
|
||||
type: nodeType.parenExpr,
|
||||
expr: { type: nodeType.placeholder, children: [] },
|
||||
},
|
||||
op: unaryOperatorType.minus,
|
||||
},
|
||||
grouping: [],
|
||||
param: null,
|
||||
without: false,
|
||||
},
|
||||
op: unaryOperatorType.minus,
|
||||
},
|
||||
output: "-sum(-(…))",
|
||||
prettyOutput: `-sum(
|
||||
-(
|
||||
…
|
||||
)
|
||||
)`,
|
||||
},
|
||||
|
||||
// Binary expressions.
|
||||
{
|
||||
node: {
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.add,
|
||||
lhs: { type: nodeType.placeholder, children: [] },
|
||||
rhs: { type: nodeType.placeholder, children: [] },
|
||||
matching: null,
|
||||
bool: false,
|
||||
},
|
||||
output: "… + …",
|
||||
prettyOutput: ` …
|
||||
+
|
||||
…`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.add,
|
||||
lhs: { type: nodeType.placeholder, children: [] },
|
||||
rhs: { type: nodeType.placeholder, children: [] },
|
||||
matching: {
|
||||
card: vectorMatchCardinality.oneToOne,
|
||||
labels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
output: "… + …",
|
||||
prettyOutput: ` …
|
||||
+
|
||||
…`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.add,
|
||||
lhs: { type: nodeType.placeholder, children: [] },
|
||||
rhs: { type: nodeType.placeholder, children: [] },
|
||||
matching: {
|
||||
card: vectorMatchCardinality.oneToOne,
|
||||
labels: [],
|
||||
on: true,
|
||||
include: [],
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
output: "… + on() …",
|
||||
prettyOutput: ` …
|
||||
+ on()
|
||||
…`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.add,
|
||||
lhs: { type: nodeType.placeholder, children: [] },
|
||||
rhs: { type: nodeType.placeholder, children: [] },
|
||||
matching: {
|
||||
card: vectorMatchCardinality.oneToOne,
|
||||
labels: ["label1", "label2"],
|
||||
on: true,
|
||||
include: [],
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
output: "… + on(label1, label2) …",
|
||||
prettyOutput: ` …
|
||||
+ on(label1, label2)
|
||||
…`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.add,
|
||||
lhs: { type: nodeType.placeholder, children: [] },
|
||||
rhs: { type: nodeType.placeholder, children: [] },
|
||||
matching: {
|
||||
card: vectorMatchCardinality.oneToOne,
|
||||
labels: ["label1", "label2"],
|
||||
on: false,
|
||||
include: [],
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
output: "… + ignoring(label1, label2) …",
|
||||
prettyOutput: ` …
|
||||
+ ignoring(label1, label2)
|
||||
…`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.add,
|
||||
lhs: { type: nodeType.placeholder, children: [] },
|
||||
rhs: { type: nodeType.placeholder, children: [] },
|
||||
matching: {
|
||||
card: vectorMatchCardinality.oneToMany,
|
||||
labels: ["label1", "label2"],
|
||||
on: true,
|
||||
include: [],
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
output: "… + on(label1, label2) group_right() …",
|
||||
prettyOutput: ` …
|
||||
+ on(label1, label2) group_right()
|
||||
…`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.add,
|
||||
lhs: { type: nodeType.placeholder, children: [] },
|
||||
rhs: { type: nodeType.placeholder, children: [] },
|
||||
matching: {
|
||||
card: vectorMatchCardinality.oneToMany,
|
||||
labels: ["label1", "label2"],
|
||||
on: true,
|
||||
include: ["label3"],
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
output: "… + on(label1, label2) group_right(label3) …",
|
||||
prettyOutput: ` …
|
||||
+ on(label1, label2) group_right(label3)
|
||||
…`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.add,
|
||||
lhs: { type: nodeType.placeholder, children: [] },
|
||||
rhs: { type: nodeType.placeholder, children: [] },
|
||||
matching: {
|
||||
card: vectorMatchCardinality.manyToOne,
|
||||
labels: ["label1", "label2"],
|
||||
on: true,
|
||||
include: [],
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
output: "… + on(label1, label2) group_left() …",
|
||||
prettyOutput: ` …
|
||||
+ on(label1, label2) group_left()
|
||||
…`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.add,
|
||||
lhs: { type: nodeType.placeholder, children: [] },
|
||||
rhs: { type: nodeType.placeholder, children: [] },
|
||||
matching: {
|
||||
card: vectorMatchCardinality.manyToOne,
|
||||
labels: ["label1", "label2"],
|
||||
on: true,
|
||||
include: ["label3"],
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
output: "… + on(label1, label2) group_left(label3) …",
|
||||
prettyOutput: ` …
|
||||
+ on(label1, label2) group_left(label3)
|
||||
…`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.eql,
|
||||
lhs: { type: nodeType.placeholder, children: [] },
|
||||
rhs: { type: nodeType.placeholder, children: [] },
|
||||
matching: null,
|
||||
bool: true,
|
||||
},
|
||||
output: "… == bool …",
|
||||
prettyOutput: ` …
|
||||
== bool
|
||||
…`,
|
||||
},
|
||||
{
|
||||
node: {
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.eql,
|
||||
lhs: { type: nodeType.placeholder, children: [] },
|
||||
rhs: { type: nodeType.placeholder, children: [] },
|
||||
matching: {
|
||||
card: vectorMatchCardinality.oneToMany,
|
||||
labels: ["label1", "label2"],
|
||||
on: true,
|
||||
include: ["label3"],
|
||||
},
|
||||
bool: true,
|
||||
},
|
||||
output: "… == bool on(label1, label2) group_right(label3) …",
|
||||
prettyOutput: ` …
|
||||
== bool on(label1, label2) group_right(label3)
|
||||
…`,
|
||||
},
|
||||
];
|
||||
|
||||
tests.forEach((t) => {
|
||||
expect(serializeNode(t.node)).toBe(t.output);
|
||||
expect(serializeNode(t.node, 0, true)).toBe(
|
||||
t.prettyOutput !== undefined ? t.prettyOutput : t.output
|
||||
);
|
||||
|
||||
const { container } = render(formatNode(t.node, true));
|
||||
expect(container.textContent).toBe(t.output);
|
||||
});
|
||||
});
|
||||
});
|
155
web/ui/mantine-ui/src/promql/utils.test.ts
Normal file
155
web/ui/mantine-ui/src/promql/utils.test.ts
Normal file
|
@ -0,0 +1,155 @@
|
|||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
getNonParenNodeType,
|
||||
containsPlaceholders,
|
||||
nodeValueType,
|
||||
} from "./utils";
|
||||
import { nodeType, valueType, binaryOperatorType } from "./ast";
|
||||
|
||||
describe("getNonParenNodeType", () => {
|
||||
it("works for non-paren type", () => {
|
||||
expect(
|
||||
getNonParenNodeType({ type: nodeType.numberLiteral, val: "1" })
|
||||
).toBe(nodeType.numberLiteral);
|
||||
});
|
||||
|
||||
it("works for single parentheses wrapper", () => {
|
||||
expect(
|
||||
getNonParenNodeType({
|
||||
type: nodeType.parenExpr,
|
||||
expr: {
|
||||
type: nodeType.numberLiteral,
|
||||
val: "1",
|
||||
},
|
||||
})
|
||||
).toBe(nodeType.numberLiteral);
|
||||
});
|
||||
|
||||
it("works for multiple parentheses wrappers", () => {
|
||||
expect(
|
||||
getNonParenNodeType({
|
||||
type: nodeType.parenExpr,
|
||||
expr: {
|
||||
type: nodeType.parenExpr,
|
||||
expr: {
|
||||
type: nodeType.parenExpr,
|
||||
expr: {
|
||||
type: nodeType.numberLiteral,
|
||||
val: "1",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
).toBe(nodeType.numberLiteral);
|
||||
});
|
||||
});
|
||||
|
||||
describe("containsPlaceholders", () => {
|
||||
it("does not find placeholders in complete expressions", () => {
|
||||
expect(
|
||||
containsPlaceholders({
|
||||
type: nodeType.parenExpr,
|
||||
expr: {
|
||||
type: nodeType.numberLiteral,
|
||||
val: "1",
|
||||
},
|
||||
})
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it("finds placeholders at the root", () => {
|
||||
expect(
|
||||
containsPlaceholders({
|
||||
type: nodeType.placeholder,
|
||||
children: [],
|
||||
})
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it("finds placeholders in nested expressions with placeholders", () => {
|
||||
expect(
|
||||
containsPlaceholders({
|
||||
type: nodeType.parenExpr,
|
||||
expr: {
|
||||
type: nodeType.placeholder,
|
||||
children: [],
|
||||
},
|
||||
})
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("nodeValueType", () => {
|
||||
it("works for binary expressions with placeholders", () => {
|
||||
expect(
|
||||
nodeValueType({
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.add,
|
||||
lhs: { type: nodeType.placeholder, children: [] },
|
||||
rhs: { type: nodeType.placeholder, children: [] },
|
||||
matching: null,
|
||||
bool: false,
|
||||
})
|
||||
).toBeNull();
|
||||
});
|
||||
|
||||
it("works for scalar-scalar binops", () => {
|
||||
expect(
|
||||
nodeValueType({
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.add,
|
||||
lhs: { type: nodeType.numberLiteral, val: "1" },
|
||||
rhs: { type: nodeType.numberLiteral, val: "1" },
|
||||
matching: null,
|
||||
bool: false,
|
||||
})
|
||||
).toBe(valueType.scalar);
|
||||
});
|
||||
|
||||
it("works for scalar-vector binops", () => {
|
||||
expect(
|
||||
nodeValueType({
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.add,
|
||||
lhs: {
|
||||
type: nodeType.vectorSelector,
|
||||
name: "metric_name",
|
||||
matchers: [],
|
||||
offset: 0,
|
||||
timestamp: null,
|
||||
startOrEnd: null,
|
||||
},
|
||||
rhs: { type: nodeType.numberLiteral, val: "1" },
|
||||
matching: null,
|
||||
bool: false,
|
||||
})
|
||||
).toBe(valueType.vector);
|
||||
});
|
||||
|
||||
it("works for vector-vector binops", () => {
|
||||
expect(
|
||||
nodeValueType({
|
||||
type: nodeType.binaryExpr,
|
||||
op: binaryOperatorType.add,
|
||||
lhs: {
|
||||
type: nodeType.vectorSelector,
|
||||
name: "metric_name",
|
||||
matchers: [],
|
||||
offset: 0,
|
||||
timestamp: null,
|
||||
startOrEnd: null,
|
||||
},
|
||||
rhs: {
|
||||
type: nodeType.vectorSelector,
|
||||
name: "metric_name",
|
||||
matchers: [],
|
||||
offset: 0,
|
||||
timestamp: null,
|
||||
startOrEnd: null,
|
||||
},
|
||||
matching: null,
|
||||
bool: false,
|
||||
})
|
||||
).toBe(valueType.vector);
|
||||
});
|
||||
});
|
241
web/ui/mantine-ui/src/promql/utils.ts
Normal file
241
web/ui/mantine-ui/src/promql/utils.ts
Normal file
|
@ -0,0 +1,241 @@
|
|||
import ASTNode, { binaryOperatorType, nodeType, valueType, Call, compOperatorTypes, setOperatorTypes } from './ast';
|
||||
import { functionArgNames } from './functionMeta';
|
||||
|
||||
export const getNonParenNodeType = (n: ASTNode) => {
|
||||
let cur: ASTNode;
|
||||
for (cur = n; cur.type === 'parenExpr'; cur = cur.expr) {}
|
||||
return cur.type;
|
||||
};
|
||||
|
||||
export const isComparisonOperator = (op: binaryOperatorType) => {
|
||||
return compOperatorTypes.includes(op);
|
||||
};
|
||||
|
||||
export const isSetOperator = (op: binaryOperatorType) => {
|
||||
return setOperatorTypes.includes(op);
|
||||
};
|
||||
|
||||
const binOpPrecedence = {
|
||||
[binaryOperatorType.add]: 3,
|
||||
[binaryOperatorType.sub]: 3,
|
||||
[binaryOperatorType.mul]: 2,
|
||||
[binaryOperatorType.div]: 2,
|
||||
[binaryOperatorType.mod]: 2,
|
||||
[binaryOperatorType.pow]: 1,
|
||||
[binaryOperatorType.eql]: 4,
|
||||
[binaryOperatorType.neq]: 4,
|
||||
[binaryOperatorType.gtr]: 4,
|
||||
[binaryOperatorType.lss]: 4,
|
||||
[binaryOperatorType.gte]: 4,
|
||||
[binaryOperatorType.lte]: 4,
|
||||
[binaryOperatorType.and]: 5,
|
||||
[binaryOperatorType.or]: 6,
|
||||
[binaryOperatorType.unless]: 5,
|
||||
[binaryOperatorType.atan2]: 2,
|
||||
};
|
||||
|
||||
export const maybeParenthesizeBinopChild = (op: binaryOperatorType, child: ASTNode): ASTNode => {
|
||||
if (child.type !== nodeType.binaryExpr) {
|
||||
return child;
|
||||
}
|
||||
|
||||
if (binOpPrecedence[op] > binOpPrecedence[child.op]) {
|
||||
return child;
|
||||
}
|
||||
|
||||
// TODO: Parens aren't necessary for left-associativity within same precedence,
|
||||
// or right-associativity between two power operators.
|
||||
return {
|
||||
type: nodeType.parenExpr,
|
||||
expr: child,
|
||||
};
|
||||
};
|
||||
|
||||
export const getNodeChildren = (node: ASTNode): ASTNode[] => {
|
||||
switch (node.type) {
|
||||
case nodeType.aggregation:
|
||||
return node.param === null ? [node.expr] : [node.param, node.expr];
|
||||
case nodeType.subquery:
|
||||
return [node.expr];
|
||||
case nodeType.parenExpr:
|
||||
return [node.expr];
|
||||
case nodeType.call:
|
||||
return node.args;
|
||||
case nodeType.matrixSelector:
|
||||
case nodeType.vectorSelector:
|
||||
case nodeType.numberLiteral:
|
||||
case nodeType.stringLiteral:
|
||||
return [];
|
||||
case nodeType.placeholder:
|
||||
return node.children;
|
||||
case nodeType.unaryExpr:
|
||||
return [node.expr];
|
||||
case nodeType.binaryExpr:
|
||||
return [node.lhs, node.rhs];
|
||||
default:
|
||||
throw new Error('unsupported node type');
|
||||
}
|
||||
};
|
||||
|
||||
export const getNodeChild = (node: ASTNode, idx: number) => {
|
||||
switch (node.type) {
|
||||
case nodeType.aggregation:
|
||||
return node.param === null || idx === 1 ? node.expr : node.param;
|
||||
case nodeType.subquery:
|
||||
return node.expr;
|
||||
case nodeType.parenExpr:
|
||||
return node.expr;
|
||||
case nodeType.call:
|
||||
return node.args[idx];
|
||||
case nodeType.unaryExpr:
|
||||
return node.expr;
|
||||
case nodeType.binaryExpr:
|
||||
return idx === 0 ? node.lhs : node.rhs;
|
||||
default:
|
||||
throw new Error('unsupported node type');
|
||||
}
|
||||
};
|
||||
|
||||
export const containsPlaceholders = (node: ASTNode): boolean =>
|
||||
node.type === nodeType.placeholder || getNodeChildren(node).some((n) => containsPlaceholders(n));
|
||||
|
||||
export const nodeValueType = (node: ASTNode): valueType | null => {
|
||||
switch (node.type) {
|
||||
case nodeType.aggregation:
|
||||
return valueType.vector;
|
||||
case nodeType.binaryExpr:
|
||||
const childTypes = [nodeValueType(node.lhs), nodeValueType(node.rhs)];
|
||||
|
||||
if (childTypes.includes(null)) {
|
||||
// One of the children is or a has a placeholder and thus an undefined type.
|
||||
return null;
|
||||
}
|
||||
|
||||
if (childTypes.includes(valueType.vector)) {
|
||||
return valueType.vector;
|
||||
}
|
||||
|
||||
return valueType.scalar;
|
||||
case nodeType.call:
|
||||
return node.func.returnType;
|
||||
case nodeType.matrixSelector:
|
||||
return valueType.matrix;
|
||||
case nodeType.numberLiteral:
|
||||
return valueType.scalar;
|
||||
case nodeType.parenExpr:
|
||||
return nodeValueType(node.expr);
|
||||
case nodeType.placeholder:
|
||||
return null;
|
||||
case nodeType.stringLiteral:
|
||||
return valueType.string;
|
||||
case nodeType.subquery:
|
||||
return valueType.matrix;
|
||||
case nodeType.unaryExpr:
|
||||
return nodeValueType(node.expr);
|
||||
case nodeType.vectorSelector:
|
||||
return valueType.vector;
|
||||
default:
|
||||
throw new Error('invalid node type');
|
||||
}
|
||||
};
|
||||
|
||||
export const childDescription = (node: ASTNode, idx: number): string => {
|
||||
switch (node.type) {
|
||||
case nodeType.aggregation:
|
||||
if (aggregatorsWithParam.includes(node.op) && idx === 0) {
|
||||
switch (node.op) {
|
||||
case 'topk':
|
||||
case 'bottomk':
|
||||
case 'limitk':
|
||||
return 'k';
|
||||
case 'quantile':
|
||||
return 'quantile';
|
||||
case 'count_values':
|
||||
return 'target label name';
|
||||
case 'limit_ratio':
|
||||
return 'ratio';
|
||||
}
|
||||
}
|
||||
|
||||
return 'vector to aggregate';
|
||||
case nodeType.binaryExpr:
|
||||
return idx === 0 ? 'left-hand side' : 'right-hand side';
|
||||
case nodeType.call:
|
||||
if (functionArgNames.hasOwnProperty(node.func.name)) {
|
||||
const argNames = functionArgNames[node.func.name];
|
||||
return argNames[Math.min(functionArgNames[node.func.name].length - 1, idx)];
|
||||
}
|
||||
return 'argument';
|
||||
case nodeType.parenExpr:
|
||||
return 'expression';
|
||||
case nodeType.placeholder:
|
||||
return 'argument';
|
||||
case nodeType.subquery:
|
||||
return 'subquery to execute';
|
||||
case nodeType.unaryExpr:
|
||||
return 'expression';
|
||||
default:
|
||||
throw new Error('invalid node type');
|
||||
}
|
||||
};
|
||||
|
||||
export const aggregatorsWithParam = ['topk', 'bottomk', 'quantile', 'count_values', 'limitk', 'limit_ratio'];
|
||||
|
||||
export const anyValueType = [valueType.scalar, valueType.string, valueType.matrix, valueType.vector];
|
||||
|
||||
export const allowedChildValueTypes = (node: ASTNode, idx: number): valueType[] => {
|
||||
switch (node.type) {
|
||||
case nodeType.aggregation:
|
||||
if (aggregatorsWithParam.includes(node.op) && idx === 0) {
|
||||
if (node.op === 'count_values') {
|
||||
return [valueType.string];
|
||||
}
|
||||
return [valueType.scalar];
|
||||
}
|
||||
|
||||
return [valueType.vector];
|
||||
case nodeType.binaryExpr:
|
||||
// TODO: Do deeper constraint checking here.
|
||||
// - Set ops only between vectors.
|
||||
// - Bools only for filter ops.
|
||||
// - Advanced: check cardinality.
|
||||
return [valueType.scalar, valueType.vector];
|
||||
case nodeType.call:
|
||||
return [node.func.argTypes[Math.min(idx, node.func.argTypes.length - 1)]];
|
||||
case nodeType.parenExpr:
|
||||
return anyValueType;
|
||||
case nodeType.placeholder:
|
||||
return anyValueType;
|
||||
case nodeType.subquery:
|
||||
return [valueType.vector];
|
||||
case nodeType.unaryExpr:
|
||||
return anyValueType;
|
||||
default:
|
||||
throw new Error('invalid node type');
|
||||
}
|
||||
};
|
||||
|
||||
export const canAddVarArg = (node: Call): boolean => {
|
||||
if (node.func.variadic === -1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// TODO: Only works for 1 vararg, but PromQL only has functions with either 1 (not 2, 3, ...) or unlimited (-1) varargs in practice, so this is fine for now.
|
||||
return node.args.length < node.func.argTypes.length;
|
||||
};
|
||||
|
||||
export const canRemoveVarArg = (node: Call): boolean => {
|
||||
return node.func.variadic !== 0 && node.args.length >= node.func.argTypes.length;
|
||||
};
|
||||
|
||||
export const humanizedValueType: Record<valueType, string> = {
|
||||
[valueType.none]: 'none',
|
||||
[valueType.string]: 'string',
|
||||
[valueType.scalar]: 'number (scalar)',
|
||||
[valueType.vector]: 'instant vector',
|
||||
[valueType.matrix]: 'range vector',
|
||||
};
|
||||
|
||||
export const escapeString = (str: string) => {
|
||||
return str.replace(/([\\"])/g, '\\$1');
|
||||
};
|
1
web/ui/mantine-ui/src/setupTests.ts
Normal file
1
web/ui/mantine-ui/src/setupTests.ts
Normal file
|
@ -0,0 +1 @@
|
|||
import "@testing-library/jest-dom";
|
11
web/ui/mantine-ui/vitest.config.ts
Normal file
11
web/ui/mantine-ui/vitest.config.ts
Normal file
|
@ -0,0 +1,11 @@
|
|||
import { defineConfig } from "vitest/config";
|
||||
import react from "@vitejs/plugin-react";
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
test: {
|
||||
globals: true,
|
||||
environment: "jsdom",
|
||||
setupFiles: "./src/setupTests.ts",
|
||||
},
|
||||
});
|
4676
web/ui/package-lock.json
generated
4676
web/ui/package-lock.json
generated
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue