From 43ef27e17e73a3c950f1980be3756ccfce34f9be Mon Sep 17 00:00:00 2001 From: nbav12 Date: Tue, 25 Feb 2025 10:10:51 +0200 Subject: [PATCH 01/33] Update dashboard.md (#101190) Typo --- docs/sources/developers/http_api/dashboard.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sources/developers/http_api/dashboard.md b/docs/sources/developers/http_api/dashboard.md index fe08dc99f150a..54bfaf631568f 100644 --- a/docs/sources/developers/http_api/dashboard.md +++ b/docs/sources/developers/http_api/dashboard.md @@ -355,7 +355,7 @@ Content-Type: application/json Status Codes: -- **200** – Deleted +- **200** – Restored - **401** – Unauthorized - **403** – Access denied - **404** – Not found From 047136c830f25592c19c22ebda583ece6a5779c1 Mon Sep 17 00:00:00 2001 From: nbav12 Date: Tue, 25 Feb 2025 10:11:00 +0200 Subject: [PATCH 02/33] Docs: Change link title (#101043) Update _index.md Proposal to change the link title (also the relref behind, respectively). At the sidebar, under: Set up > Install Grafana - there is a link with the title "RHEL or Fedora". Just to be uniform... --- docs/sources/setup-grafana/installation/_index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sources/setup-grafana/installation/_index.md b/docs/sources/setup-grafana/installation/_index.md index 940a80aa91cf7..44c69f8ab4769 100644 --- a/docs/sources/setup-grafana/installation/_index.md +++ b/docs/sources/setup-grafana/installation/_index.md @@ -32,7 +32,7 @@ Grafana relies on other open source software to operate. For a list of open sour Grafana supports the following operating systems: - [Debian or Ubuntu]({{< relref "./debian" >}}) -- [Red Hat, RHEL, or Fedora]({{< relref "./redhat-rhel-fedora" >}}) +- [RHEL or Fedora]({{< relref "./rhel-fedora" >}}) - [SUSE or openSUSE]({{< relref "./suse-opensuse" >}}) - [macOS]({{< relref "./mac" >}}) - [Windows]({{< relref "./windows" >}}) From eca045bc119ab1ab97ff02e3aa7134a03cb32620 Mon Sep 17 00:00:00 2001 From: Jack Westbrook Date: Tue, 25 Feb 2025 09:11:09 +0100 Subject: [PATCH 03/33] Build: Reduce repetition in rollup configs (#93916) * build(packages): add extendable pieces of rollup config * build(packages): rewrite rollup configs to make use of extendable parts --- packages/grafana-data/rollup.config.ts | 50 ++-------------- .../grafana-e2e-selectors/rollup.config.ts | 50 ++-------------- packages/grafana-flamegraph/rollup.config.ts | 50 ++-------------- packages/grafana-icons/package.json | 1 + packages/grafana-icons/rollup.config.ts | 42 ++------------ packages/grafana-prometheus/rollup.config.ts | 51 ++-------------- packages/grafana-runtime/rollup.config.ts | 50 ++-------------- packages/grafana-schema/rollup.config.ts | 57 +++--------------- packages/grafana-ui/rollup.config.ts | 48 ++------------- packages/rollup.config.parts.ts | 58 +++++++++++++++++++ 10 files changed, 109 insertions(+), 348 deletions(-) create mode 100644 packages/rollup.config.parts.ts diff --git a/packages/grafana-data/rollup.config.ts b/packages/grafana-data/rollup.config.ts index 9894024f99c2e..5a913449466c5 100644 --- a/packages/grafana-data/rollup.config.ts +++ b/packages/grafana-data/rollup.config.ts @@ -1,53 +1,15 @@ -import resolve from '@rollup/plugin-node-resolve'; import { createRequire } from 'node:module'; -import path from 'path'; -import dts from 'rollup-plugin-dts'; -import esbuild from 'rollup-plugin-esbuild'; -import { nodeExternals } from 'rollup-plugin-node-externals'; + +import { entryPoint, plugins, esmOutput, cjsOutput, tsDeclarationOutput } from '../rollup.config.parts'; const rq = createRequire(import.meta.url); const pkg = rq('./package.json'); -const legacyOutputDefaults = { - esModule: true, - interop: 'compat', -}; - export default [ { - input: 'src/index.ts', - plugins: [ - nodeExternals({ deps: true, packagePath: './package.json' }), - resolve(), - esbuild({ - target: 'es2018', - tsconfig: 'tsconfig.build.json', - }), - ], - output: [ - { - format: 'cjs', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.main), - ...legacyOutputDefaults, - }, - { - format: 'esm', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.module), - preserveModules: true, - // @ts-expect-error (TS cannot assure that `process.env.PROJECT_CWD` is a string) - preserveModulesRoot: path.join(process.env.PROJECT_CWD, `packages/grafana-data/src`), - ...legacyOutputDefaults, - }, - ], - }, - { - input: './compiled/index.d.ts', - plugins: [dts()], - output: { - file: pkg.publishConfig.types, - format: 'es', - }, + input: entryPoint, + plugins, + output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-data')], }, + tsDeclarationOutput(pkg), ]; diff --git a/packages/grafana-e2e-selectors/rollup.config.ts b/packages/grafana-e2e-selectors/rollup.config.ts index b70662c33a853..338dae23ac957 100644 --- a/packages/grafana-e2e-selectors/rollup.config.ts +++ b/packages/grafana-e2e-selectors/rollup.config.ts @@ -1,53 +1,15 @@ -import resolve from '@rollup/plugin-node-resolve'; import { createRequire } from 'node:module'; -import path from 'path'; -import dts from 'rollup-plugin-dts'; -import esbuild from 'rollup-plugin-esbuild'; -import { nodeExternals } from 'rollup-plugin-node-externals'; + +import { cjsOutput, entryPoint, esmOutput, plugins, tsDeclarationOutput } from '../rollup.config.parts'; const rq = createRequire(import.meta.url); const pkg = rq('./package.json'); -const legacyOutputDefaults = { - esModule: true, - interop: 'compat', -}; - export default [ { - input: 'src/index.ts', - plugins: [ - nodeExternals({ deps: true, packagePath: './package.json' }), - resolve(), - esbuild({ - target: 'es2018', - tsconfig: 'tsconfig.build.json', - }), - ], - output: [ - { - format: 'cjs', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.main), - ...legacyOutputDefaults, - }, - { - format: 'esm', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.module), - preserveModules: true, - // @ts-expect-error (TS cannot assure that `process.env.PROJECT_CWD` is a string) - preserveModulesRoot: path.join(process.env.PROJECT_CWD, `packages/grafana-e2e-selectors/src`), - ...legacyOutputDefaults, - }, - ], - }, - { - input: './compiled/index.d.ts', - plugins: [dts()], - output: { - file: pkg.publishConfig.types, - format: 'es', - }, + input: entryPoint, + plugins, + output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-e2e-selectors')], }, + tsDeclarationOutput(pkg), ]; diff --git a/packages/grafana-flamegraph/rollup.config.ts b/packages/grafana-flamegraph/rollup.config.ts index 86d0ad86cfa24..dbd64b4abdb29 100644 --- a/packages/grafana-flamegraph/rollup.config.ts +++ b/packages/grafana-flamegraph/rollup.config.ts @@ -1,53 +1,15 @@ -import resolve from '@rollup/plugin-node-resolve'; import { createRequire } from 'node:module'; -import path from 'path'; -import dts from 'rollup-plugin-dts'; -import esbuild from 'rollup-plugin-esbuild'; -import { nodeExternals } from 'rollup-plugin-node-externals'; + +import { cjsOutput, entryPoint, esmOutput, plugins, tsDeclarationOutput } from '../rollup.config.parts'; const rq = createRequire(import.meta.url); const pkg = rq('./package.json'); -const legacyOutputDefaults = { - esModule: true, - interop: 'compat', -}; - export default [ { - input: 'src/index.ts', - plugins: [ - nodeExternals({ deps: true, packagePath: './package.json' }), - resolve(), - esbuild({ - target: 'es2018', - tsconfig: 'tsconfig.build.json', - }), - ], - output: [ - { - format: 'cjs', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.main), - ...legacyOutputDefaults, - }, - { - format: 'esm', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.module), - preserveModules: true, - // @ts-expect-error (TS cannot assure that `process.env.PROJECT_CWD` is a string) - preserveModulesRoot: path.join(process.env.PROJECT_CWD, `packages/grafana-ui/src`), - ...legacyOutputDefaults, - }, - ], - }, - { - input: './compiled/index.d.ts', - plugins: [dts()], - output: { - file: pkg.publishConfig.types, - format: 'es', - }, + input: entryPoint, + plugins, + output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-flamegraph')], }, + tsDeclarationOutput(pkg), ]; diff --git a/packages/grafana-icons/package.json b/packages/grafana-icons/package.json index dec83cf36f88b..c08c70e14edaf 100644 --- a/packages/grafana-icons/package.json +++ b/packages/grafana-icons/package.json @@ -16,6 +16,7 @@ "types": "src/index.ts", "publishConfig": { "main": "dist/index.js", + "module": "dist/index.js", "types": "dist/index.d.ts", "access": "public" }, diff --git a/packages/grafana-icons/rollup.config.ts b/packages/grafana-icons/rollup.config.ts index 6bf3c3b536efa..fde6ca115b0fa 100644 --- a/packages/grafana-icons/rollup.config.ts +++ b/packages/grafana-icons/rollup.config.ts @@ -1,45 +1,15 @@ -import resolve from '@rollup/plugin-node-resolve'; import { createRequire } from 'node:module'; -import path from 'path'; -import dts from 'rollup-plugin-dts'; -import esbuild from 'rollup-plugin-esbuild'; -import { nodeExternals } from 'rollup-plugin-node-externals'; + +import { entryPoint, esmOutput, plugins, tsDeclarationOutput } from '../rollup.config.parts'; const rq = createRequire(import.meta.url); const pkg = rq('./package.json'); -const legacyOutputDefaults = { - esModule: true, - interop: 'compat', -}; - export default [ { - input: 'src/index.ts', - plugins: [ - nodeExternals({ deps: true, packagePath: './package.json' }), - resolve(), - esbuild({ - target: 'es2018', - tsconfig: 'tsconfig.build.json', - }), - ], - output: [ - { - format: 'esm', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.main), - preserveModules: true, - ...legacyOutputDefaults, - }, - ], - }, - { - input: 'src/index.ts', - plugins: [dts()], - output: { - file: pkg.publishConfig.types, - format: 'es', - }, + input: entryPoint, + plugins, + output: esmOutput(pkg, 'grafana-icons'), }, + tsDeclarationOutput(pkg, { input: 'src/index.ts' }), ]; diff --git a/packages/grafana-prometheus/rollup.config.ts b/packages/grafana-prometheus/rollup.config.ts index 7e630257c3665..5c7d9dd3c0461 100644 --- a/packages/grafana-prometheus/rollup.config.ts +++ b/packages/grafana-prometheus/rollup.config.ts @@ -1,55 +1,16 @@ import image from '@rollup/plugin-image'; -import resolve from '@rollup/plugin-node-resolve'; import { createRequire } from 'node:module'; -import path from 'path'; -import dts from 'rollup-plugin-dts'; -import esbuild from 'rollup-plugin-esbuild'; -import { nodeExternals } from 'rollup-plugin-node-externals'; + +import { cjsOutput, entryPoint, esmOutput, plugins, tsDeclarationOutput } from '../rollup.config.parts'; const rq = createRequire(import.meta.url); const pkg = rq('./package.json'); -const legacyOutputDefaults = { - esModule: true, - interop: 'compat', -}; - export default [ { - input: 'src/index.ts', - plugins: [ - nodeExternals({ deps: true, packagePath: './package.json' }), - resolve(), - esbuild({ - target: 'es2018', - tsconfig: 'tsconfig.build.json', - }), - image(), - ], - output: [ - { - format: 'cjs', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.main), - ...legacyOutputDefaults, - }, - { - format: 'esm', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.module), - preserveModules: true, - // @ts-expect-error (TS cannot assure that `process.env.PROJECT_CWD` is a string) - preserveModulesRoot: path.join(process.env.PROJECT_CWD, `packages/grafana-prometheus/src`), - ...legacyOutputDefaults, - }, - ], - }, - { - input: './compiled/index.d.ts', - plugins: [dts()], - output: { - file: pkg.publishConfig.types, - format: 'es', - }, + input: entryPoint, + plugins: [...plugins, image()], + output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-prometheus')], }, + tsDeclarationOutput(pkg), ]; diff --git a/packages/grafana-runtime/rollup.config.ts b/packages/grafana-runtime/rollup.config.ts index 25992a934418e..02f523deee1f6 100644 --- a/packages/grafana-runtime/rollup.config.ts +++ b/packages/grafana-runtime/rollup.config.ts @@ -1,53 +1,15 @@ -import resolve from '@rollup/plugin-node-resolve'; import { createRequire } from 'node:module'; -import path from 'path'; -import dts from 'rollup-plugin-dts'; -import esbuild from 'rollup-plugin-esbuild'; -import { nodeExternals } from 'rollup-plugin-node-externals'; + +import { cjsOutput, entryPoint, esmOutput, plugins, tsDeclarationOutput } from '../rollup.config.parts'; const rq = createRequire(import.meta.url); const pkg = rq('./package.json'); -const legacyOutputDefaults = { - esModule: true, - interop: 'compat', -}; - export default [ { - input: 'src/index.ts', - plugins: [ - nodeExternals({ deps: true, packagePath: './package.json' }), - resolve(), - esbuild({ - target: 'es2018', - tsconfig: 'tsconfig.build.json', - }), - ], - output: [ - { - format: 'cjs', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.main), - ...legacyOutputDefaults, - }, - { - format: 'esm', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.module), - preserveModules: true, - // @ts-expect-error (TS cannot assure that `process.env.PROJECT_CWD` is a string) - preserveModulesRoot: path.join(process.env.PROJECT_CWD, `packages/grafana-runtime/src`), - ...legacyOutputDefaults, - }, - ], - }, - { - input: './compiled/index.d.ts', - plugins: [dts()], - output: { - file: pkg.publishConfig.types, - format: 'es', - }, + input: entryPoint, + plugins, + output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-runtime')], }, + tsDeclarationOutput(pkg), ]; diff --git a/packages/grafana-schema/rollup.config.ts b/packages/grafana-schema/rollup.config.ts index 6a2f736c9445b..aa0c96aff066c 100644 --- a/packages/grafana-schema/rollup.config.ts +++ b/packages/grafana-schema/rollup.config.ts @@ -1,57 +1,22 @@ -import resolve from '@rollup/plugin-node-resolve'; import { glob } from 'glob'; import { createRequire } from 'node:module'; import { fileURLToPath } from 'node:url'; import path from 'path'; -import dts from 'rollup-plugin-dts'; -import esbuild from 'rollup-plugin-esbuild'; -import { nodeExternals } from 'rollup-plugin-node-externals'; + +import { cjsOutput, entryPoint, esmOutput, plugins, tsDeclarationOutput } from '../rollup.config.parts'; const rq = createRequire(import.meta.url); const pkg = rq('./package.json'); -const legacyOutputDefaults = { - esModule: true, - interop: 'compat', -}; +const [_, noderesolve, esbuild] = plugins; export default [ { - input: 'src/index.ts', - plugins: [ - nodeExternals({ deps: true, packagePath: './package.json' }), - resolve(), - esbuild({ - target: 'es2018', - tsconfig: 'tsconfig.build.json', - }), - ], - output: [ - { - format: 'cjs', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.main), - ...legacyOutputDefaults, - }, - { - format: 'esm', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.module), - preserveModules: true, - // @ts-expect-error (TS cannot assure that `process.env.PROJECT_CWD` is a string) - preserveModulesRoot: path.join(process.env.PROJECT_CWD, `packages/grafana-schema/src`), - ...legacyOutputDefaults, - }, - ], - }, - { - input: './dist/esm/index.d.ts', - plugins: [dts()], - output: { - file: pkg.publishConfig.types, - format: 'es', - }, + input: entryPoint, + plugins, + output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-schema')], }, + tsDeclarationOutput(pkg, { input: './dist/esm/index.d.ts' }), { input: Object.fromEntries( glob @@ -61,13 +26,7 @@ export default [ fileURLToPath(new URL(file, import.meta.url)), ]) ), - plugins: [ - resolve(), - esbuild({ - target: 'es2018', - tsconfig: 'tsconfig.build.json', - }), - ], + plugins: [noderesolve, esbuild], output: { format: 'esm', dir: path.dirname(pkg.publishConfig.module), diff --git a/packages/grafana-ui/rollup.config.ts b/packages/grafana-ui/rollup.config.ts index b18618dd7eaf7..d3cb5c2d8b86e 100644 --- a/packages/grafana-ui/rollup.config.ts +++ b/packages/grafana-ui/rollup.config.ts @@ -1,12 +1,9 @@ -import resolve from '@rollup/plugin-node-resolve'; import { createRequire } from 'node:module'; -import path from 'path'; import copy from 'rollup-plugin-copy'; -import dts from 'rollup-plugin-dts'; -import esbuild from 'rollup-plugin-esbuild'; -import { nodeExternals } from 'rollup-plugin-node-externals'; import svg from 'rollup-plugin-svg-import'; +import { cjsOutput, entryPoint, esmOutput, plugins, tsDeclarationOutput } from '../rollup.config.parts'; + const rq = createRequire(import.meta.url); const icons = rq('../../public/app/core/icons/cached.json'); const pkg = rq('./package.json'); @@ -15,51 +12,18 @@ const iconSrcPaths = icons.map((iconSubPath) => { return `../../public/img/icons/${iconSubPath}.svg`; }); -const legacyOutputDefaults = { - esModule: true, - interop: 'compat', -}; - export default [ { - input: 'src/index.ts', + input: entryPoint, plugins: [ - nodeExternals({ deps: true, packagePath: './package.json' }), + ...plugins, svg({ stringify: true }), - resolve(), copy({ targets: [{ src: iconSrcPaths, dest: './dist/public/' }], flatten: false, }), - esbuild({ - target: 'es2018', - tsconfig: 'tsconfig.build.json', - }), ], - output: [ - { - format: 'cjs', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.main), - ...legacyOutputDefaults, - }, - { - format: 'esm', - sourcemap: true, - dir: path.dirname(pkg.publishConfig.module), - preserveModules: true, - // @ts-expect-error (TS cannot assure that `process.env.PROJECT_CWD` is a string) - preserveModulesRoot: path.join(process.env.PROJECT_CWD, `packages/grafana-ui/src`), - ...legacyOutputDefaults, - }, - ], - }, - { - input: './compiled/index.d.ts', - plugins: [dts()], - output: { - file: pkg.publishConfig.types, - format: 'es', - }, + output: [cjsOutput(pkg), esmOutput(pkg, 'grafana-ui')], }, + tsDeclarationOutput(pkg), ]; diff --git a/packages/rollup.config.parts.ts b/packages/rollup.config.parts.ts new file mode 100644 index 0000000000000..ca805b07ed9eb --- /dev/null +++ b/packages/rollup.config.parts.ts @@ -0,0 +1,58 @@ +// This file contains the common parts of the rollup configuration that are shared across multiple packages. +import nodeResolve from '@rollup/plugin-node-resolve'; +import { dirname, resolve } from 'node:path'; +import dts from 'rollup-plugin-dts'; +import esbuild from 'rollup-plugin-esbuild'; +import { nodeExternals } from 'rollup-plugin-node-externals'; + +// This is the path to the root of the grafana project +// Prefer PROJECT_CWD env var set by yarn berry +const projectCwd = process.env.PROJECT_CWD ?? '../../'; + +export const entryPoint = 'src/index.ts'; + +// Plugins that are shared across all rollup configurations. Their order can affect build output. +// Externalising and resolving modules should happen before transformation. +export const plugins = [ + nodeExternals({ deps: true, packagePath: './package.json' }), + nodeResolve(), + esbuild({ + target: 'es2018', + tsconfig: 'tsconfig.build.json', + }), +]; + +// Generates a rollup configuration for commonjs output. +export function cjsOutput(pkg) { + return { + format: 'cjs', + sourcemap: true, + dir: dirname(pkg.publishConfig.main), + esModule: true, + interop: 'compat', + }; +} + +// Generate a rollup configuration for es module output. +export function esmOutput(pkg, pkgName) { + return { + format: 'esm', + sourcemap: true, + dir: dirname(pkg.publishConfig.module), + preserveModules: true, + preserveModulesRoot: resolve(projectCwd, `packages/${pkgName}/src`), + }; +} + +// Generate a rollup configuration for rolling up typescript declaration files into a single file. +export function tsDeclarationOutput(pkg, overrides = {}) { + return { + input: './compiled/index.d.ts', + plugins: [dts()], + output: { + file: pkg.publishConfig.types, + format: 'es', + }, + ...overrides, + }; +} From 7773c658bb3280f0432fc9742109f8eb324c83a3 Mon Sep 17 00:00:00 2001 From: Marcus Andersson Date: Tue, 25 Feb 2025 09:17:17 +0100 Subject: [PATCH 04/33] PluginExtensions: Start using new APIs for datasource config extensions point (#101139) --- .../components/EditDataSource.test.tsx | 73 ++++++++++--------- .../datasources/components/EditDataSource.tsx | 46 +++++++----- .../extensions/usePluginComponents.tsx | 3 +- 3 files changed, 71 insertions(+), 51 deletions(-) diff --git a/public/app/features/datasources/components/EditDataSource.test.tsx b/public/app/features/datasources/components/EditDataSource.test.tsx index 378695c0506e9..be1736f77eefb 100644 --- a/public/app/features/datasources/components/EditDataSource.test.tsx +++ b/public/app/features/datasources/components/EditDataSource.test.tsx @@ -1,8 +1,9 @@ import { screen, render } from '@testing-library/react'; import { Provider } from 'react-redux'; -import { PluginExtensionTypes, PluginState } from '@grafana/data'; -import { setAngularLoader, setPluginExtensionsHook } from '@grafana/runtime'; +import { PluginState } from '@grafana/data'; +import { setAngularLoader, setPluginComponentsHook } from '@grafana/runtime'; +import { createComponentWithMeta } from 'app/features/plugins/extensions/usePluginComponents'; import { configureStore } from 'app/store/configureStore'; import { getMockDataSource, getMockDataSourceMeta, getMockDataSourceSettingsState } from '../__mocks__'; @@ -58,7 +59,7 @@ describe('', () => { }); beforeEach(() => { - setPluginExtensionsHook(jest.fn().mockReturnValue({ extensions: [] })); + setPluginComponentsHook(jest.fn().mockReturnValue({ isLoading: false, components: [] })); }); describe('On loading errors', () => { @@ -268,17 +269,19 @@ describe('', () => { it('should be possible to extend the form with a "component" extension in case the plugin ID is whitelisted', () => { const message = "I'm a UI extension component!"; - setPluginExtensionsHook( + setPluginComponentsHook( jest.fn().mockReturnValue({ - extensions: [ - { - id: '1', - pluginId: 'grafana-pdc-app', - type: PluginExtensionTypes.component, - title: 'Example component', - description: 'Example description', - component: () =>
{message}
, - }, + isLoading: false, + components: [ + createComponentWithMeta( + { + pluginId: 'grafana-pdc-app', + title: 'Example component', + description: 'Example description', + component: () =>
{message}
, + }, + '1' + ), ], }) ); @@ -297,17 +300,19 @@ describe('', () => { it('should NOT be possible to extend the form with a "component" extension in case the plugin ID is NOT whitelisted', () => { const message = "I'm a UI extension component!"; - setPluginExtensionsHook( + setPluginComponentsHook( jest.fn().mockReturnValue({ - extensions: [ - { - id: '1', - pluginId: 'myorg-basic-app', - type: PluginExtensionTypes.component, - title: 'Example component', - description: 'Example description', - component: () =>
{message}
, - }, + isLoading: false, + components: [ + createComponentWithMeta( + { + pluginId: 'myorg-basic-app', + title: 'Example component', + description: 'Example description', + component: () =>
{message}
, + }, + '1' + ), ], }) ); @@ -327,17 +332,19 @@ describe('', () => { const message = "I'm a UI extension component!"; const component = jest.fn().mockReturnValue(
{message}
); - setPluginExtensionsHook( + setPluginComponentsHook( jest.fn().mockReturnValue({ - extensions: [ - { - id: '1', - pluginId: 'grafana-pdc-app', - type: PluginExtensionTypes.component, - title: 'Example component', - description: 'Example description', - component, - }, + isLoading: false, + components: [ + createComponentWithMeta( + { + pluginId: 'grafana-pdc-app', + title: 'Example component', + description: 'Example description', + component, + }, + '1' + ), ], }) ); diff --git a/public/app/features/datasources/components/EditDataSource.tsx b/public/app/features/datasources/components/EditDataSource.tsx index e948bad2013db..7fe90eba06347 100644 --- a/public/app/features/datasources/components/EditDataSource.tsx +++ b/public/app/features/datasources/components/EditDataSource.tsx @@ -9,10 +9,9 @@ import { DataSourceSettings as DataSourceSettingsType, PluginExtensionPoints, PluginExtensionDataSourceConfigContext, - DataSourceJsonData, DataSourceUpdatedSuccessfully, } from '@grafana/data'; -import { getDataSourceSrv, usePluginComponentExtensions } from '@grafana/runtime'; +import { getDataSourceSrv, usePluginComponents, UsePluginComponentsResult } from '@grafana/runtime'; import appEvents from 'app/core/app_events'; import PageLoader from 'app/core/components/PageLoader/PageLoader'; import { DataSourceSettingsState, useDispatch } from 'app/types'; @@ -118,6 +117,7 @@ export function EditDataSourceView({ const { plugin, loadError, testingStatus, loading } = dataSourceSettings; const { readOnly, hasWriteRights, hasDeleteRights } = dataSourceRights; const hasDataSource = dataSource.id > 0; + const { components, isLoading } = useDataSourceConfigPluginExtensions(); const dsi = getDataSourceSrv()?.getInstanceSettings(dataSource.uid); @@ -137,16 +137,6 @@ export function EditDataSourceView({ onTest(); }; - const extensionPointId = PluginExtensionPoints.DataSourceConfig; - const { extensions } = usePluginComponentExtensions<{ - context: PluginExtensionDataSourceConfigContext; - }>({ extensionPointId }); - - const allowedExtensions = useMemo(() => { - const allowedPluginIds = ['grafana-pdc-app', 'grafana-auth-app']; - return extensions.filter((e) => allowedPluginIds.includes(e.pluginId)); - }, [extensions]); - if (loadError) { return ( ; } @@ -204,11 +194,9 @@ export function EditDataSourceView({ )} {/* Extension point */} - {allowedExtensions.map((extension) => { - const Component = extension.component; - + {components.map((Component) => { return ( -
+
); } + +type DataSourceConfigPluginExtensionProps = { + context: PluginExtensionDataSourceConfigContext; +}; + +function useDataSourceConfigPluginExtensions(): UsePluginComponentsResult { + const { components, isLoading } = usePluginComponents({ + extensionPointId: PluginExtensionPoints.DataSourceConfig, + }); + + return useMemo(() => { + const allowedComponents = components.filter((component) => { + switch (component.meta.pluginId) { + case 'grafana-pdc-app': + case 'grafana-auth-app': + return true; + default: + return false; + } + }); + + return { components: allowedComponents, isLoading }; + }, [components, isLoading]); +} diff --git a/public/app/features/plugins/extensions/usePluginComponents.tsx b/public/app/features/plugins/extensions/usePluginComponents.tsx index d5259a85c4100..c3e406d5d2352 100644 --- a/public/app/features/plugins/extensions/usePluginComponents.tsx +++ b/public/app/features/plugins/extensions/usePluginComponents.tsx @@ -80,7 +80,8 @@ export function usePluginComponents({ }, [extensionPointId, limitPerPlugin, pluginContext, registryState, isLoadingAppPlugins]); } -function createComponentWithMeta( +// exported so it can be used in tests +export function createComponentWithMeta( registryItem: AddedComponentRegistryItem, extensionPointId: string ): React.ComponentType & { meta: PluginExtensionComponentMeta } { From 00f51f8b7685a49e333a5b333121e4707998a43e Mon Sep 17 00:00:00 2001 From: "grafana-pr-automation[bot]" <140550294+grafana-pr-automation[bot]@users.noreply.github.com> Date: Tue, 25 Feb 2025 11:34:26 +0200 Subject: [PATCH 05/33] I18n: Download translations from Crowdin (#101260) New Crowdin translations by GitHub Action Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- public/locales/de-DE/grafana.json | 7 ++++++- public/locales/es-ES/grafana.json | 7 ++++++- public/locales/fr-FR/grafana.json | 7 ++++++- public/locales/pt-BR/grafana.json | 7 ++++++- public/locales/zh-Hans/grafana.json | 7 ++++++- 5 files changed, 30 insertions(+), 5 deletions(-) diff --git a/public/locales/de-DE/grafana.json b/public/locales/de-DE/grafana.json index 93ff994ba795d..1222cec202292 100644 --- a/public/locales/de-DE/grafana.json +++ b/public/locales/de-DE/grafana.json @@ -1750,6 +1750,12 @@ "drawer": { "close": "Schließen" }, + "feature-badge": { + "experimental": "", + "new": "", + "preview": "", + "private-preview": "" + }, "field-link-list": { "external-links-heading": "" }, @@ -2756,7 +2762,6 @@ "close": "Menü schließen", "dock": "Menü andocken", "list-label": "Navigation", - "new": "", "open": "", "undock": "Menü abdocken" }, diff --git a/public/locales/es-ES/grafana.json b/public/locales/es-ES/grafana.json index a7f1f3424f46e..e6d9efb5f9b53 100644 --- a/public/locales/es-ES/grafana.json +++ b/public/locales/es-ES/grafana.json @@ -1750,6 +1750,12 @@ "drawer": { "close": "Cerrar" }, + "feature-badge": { + "experimental": "", + "new": "", + "preview": "", + "private-preview": "" + }, "field-link-list": { "external-links-heading": "" }, @@ -2756,7 +2762,6 @@ "close": "Cerrar menú", "dock": "Anclar el menú", "list-label": "Navegación", - "new": "", "open": "", "undock": "Desanclar el menú" }, diff --git a/public/locales/fr-FR/grafana.json b/public/locales/fr-FR/grafana.json index b7d309f71b54c..f6ab1a8be742f 100644 --- a/public/locales/fr-FR/grafana.json +++ b/public/locales/fr-FR/grafana.json @@ -1750,6 +1750,12 @@ "drawer": { "close": "Fermer" }, + "feature-badge": { + "experimental": "", + "new": "", + "preview": "", + "private-preview": "" + }, "field-link-list": { "external-links-heading": "" }, @@ -2756,7 +2762,6 @@ "close": "Fermer le menu", "dock": "Ancrer le menu", "list-label": "Navigation", - "new": "", "open": "", "undock": "Ancrer le menu" }, diff --git a/public/locales/pt-BR/grafana.json b/public/locales/pt-BR/grafana.json index 32cddf19af35b..c10c3c0eaa40c 100644 --- a/public/locales/pt-BR/grafana.json +++ b/public/locales/pt-BR/grafana.json @@ -1750,6 +1750,12 @@ "drawer": { "close": "Fechar" }, + "feature-badge": { + "experimental": "", + "new": "", + "preview": "", + "private-preview": "" + }, "field-link-list": { "external-links-heading": "" }, @@ -2756,7 +2762,6 @@ "close": "Fechar menu", "dock": "Menu da dock", "list-label": "Navegação", - "new": "", "open": "", "undock": "Desacoplar menu" }, diff --git a/public/locales/zh-Hans/grafana.json b/public/locales/zh-Hans/grafana.json index f51ef70420514..eb52a53913b5c 100644 --- a/public/locales/zh-Hans/grafana.json +++ b/public/locales/zh-Hans/grafana.json @@ -1741,6 +1741,12 @@ "drawer": { "close": "关闭" }, + "feature-badge": { + "experimental": "", + "new": "", + "preview": "", + "private-preview": "" + }, "field-link-list": { "external-links-heading": "" }, @@ -2746,7 +2752,6 @@ "close": "关闭菜单", "dock": "停靠菜单", "list-label": "导航", - "new": "", "open": "", "undock": "取消停靠菜单" }, From bceddd78a8c6dae77b76562175851ad0e7ffab62 Mon Sep 17 00:00:00 2001 From: Josh Hunt Date: Tue, 25 Feb 2025 09:59:13 +0000 Subject: [PATCH 06/33] Navigation: Revert new items auto-expanding (#101230) --- public/app/core/components/AppChrome/MegaMenu/MegaMenuItem.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/app/core/components/AppChrome/MegaMenu/MegaMenuItem.tsx b/public/app/core/components/AppChrome/MegaMenu/MegaMenuItem.tsx index 23ed7d4fc84a5..f643f7199d5e2 100644 --- a/public/app/core/components/AppChrome/MegaMenu/MegaMenuItem.tsx +++ b/public/app/core/components/AppChrome/MegaMenu/MegaMenuItem.tsx @@ -35,7 +35,7 @@ export function MegaMenuItem({ link, activeItem, level = 0, onClick, onPin, isPi const isActive = link === activeItem || (level === MAX_DEPTH && hasActiveChild); const [sectionExpanded, setSectionExpanded] = useLocalStorage( `grafana.navigation.expanded[${link.text}]`, - Boolean(hasActiveChild || link.isNew) + Boolean(hasActiveChild) ); const showExpandButton = level < MAX_DEPTH && Boolean(linkHasChildren(link) || link.emptyMessage); const item = useRef(null); From 3aedb9159e319bf05df7ce1baf3a0f68480b5ad4 Mon Sep 17 00:00:00 2001 From: Matheus Macabu Date: Tue, 25 Feb 2025 11:15:41 +0100 Subject: [PATCH 07/33] Chore: Update golang.org/x/crypto and golang.org/x/ouath2 to address security issues (#101268) - CVE-2025-22869 and Go issue https://go.dev/issue/71931. - CVE-2025-22868 and Go issue https://go.dev/issue/71490. --- apps/alerting/notifications/go.mod | 10 +++++----- apps/alerting/notifications/go.sum | 20 ++++++++++---------- apps/investigations/go.mod | 8 ++++---- apps/investigations/go.sum | 16 ++++++++-------- apps/playlist/go.mod | 8 ++++---- apps/playlist/go.sum | 16 ++++++++-------- go.mod | 10 +++++----- go.sum | 20 ++++++++++---------- go.work.sum | 6 ++++++ pkg/aggregator/go.mod | 10 +++++----- pkg/aggregator/go.sum | 20 ++++++++++---------- pkg/apimachinery/go.mod | 6 +++--- pkg/apimachinery/go.sum | 12 ++++++------ pkg/apiserver/go.mod | 9 +++++---- pkg/apiserver/go.sum | 20 ++++++++++---------- pkg/build/go.mod | 8 ++++---- pkg/build/go.sum | 16 ++++++++-------- pkg/codegen/go.mod | 4 ++-- pkg/codegen/go.sum | 8 ++++---- pkg/plugins/codegen/go.mod | 4 ++-- pkg/plugins/codegen/go.sum | 12 ++++++------ pkg/promlib/go.mod | 6 ++++-- pkg/promlib/go.sum | 16 ++++++++-------- pkg/storage/unified/apistore/go.mod | 10 +++++----- pkg/storage/unified/apistore/go.sum | 20 ++++++++++---------- pkg/storage/unified/resource/go.mod | 10 +++++----- pkg/storage/unified/resource/go.sum | 20 ++++++++++---------- 27 files changed, 167 insertions(+), 158 deletions(-) diff --git a/apps/alerting/notifications/go.mod b/apps/alerting/notifications/go.mod index ebbf0284681b3..74dc1dfe0286e 100644 --- a/apps/alerting/notifications/go.mod +++ b/apps/alerting/notifications/go.mod @@ -72,13 +72,13 @@ require ( go.opentelemetry.io/proto/otlp v1.5.0 // indirect go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect - golang.org/x/crypto v0.32.0 // indirect + golang.org/x/crypto v0.35.0 // indirect golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 // indirect - golang.org/x/net v0.34.0 // indirect - golang.org/x/oauth2 v0.26.0 // indirect + golang.org/x/net v0.35.0 // indirect + golang.org/x/oauth2 v0.27.0 // indirect golang.org/x/sys v0.30.0 // indirect - golang.org/x/term v0.28.0 // indirect - golang.org/x/text v0.21.0 // indirect + golang.org/x/term v0.29.0 // indirect + golang.org/x/text v0.22.0 // indirect golang.org/x/time v0.9.0 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20250115164207-1a7da9e5054f // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20250115164207-1a7da9e5054f // indirect diff --git a/apps/alerting/notifications/go.sum b/apps/alerting/notifications/go.sum index 34db4a1a6920d..cae5be008792c 100644 --- a/apps/alerting/notifications/go.sum +++ b/apps/alerting/notifications/go.sum @@ -207,8 +207,8 @@ go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= -golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= +golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs= +golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -223,11 +223,11 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE= -golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= +golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -239,12 +239,12 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg= -golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= +golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU= +golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY= golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/apps/investigations/go.mod b/apps/investigations/go.mod index 498b2e5eee74e..12da166af2750 100644 --- a/apps/investigations/go.mod +++ b/apps/investigations/go.mod @@ -62,12 +62,12 @@ require ( go.opentelemetry.io/otel/sdk v1.34.0 // indirect go.opentelemetry.io/otel/trace v1.34.0 // indirect go.opentelemetry.io/proto/otlp v1.5.0 // indirect - golang.org/x/net v0.34.0 // indirect - golang.org/x/oauth2 v0.26.0 // indirect + golang.org/x/net v0.35.0 // indirect + golang.org/x/oauth2 v0.27.0 // indirect golang.org/x/sync v0.11.0 // indirect golang.org/x/sys v0.30.0 // indirect - golang.org/x/term v0.28.0 // indirect - golang.org/x/text v0.21.0 // indirect + golang.org/x/term v0.29.0 // indirect + golang.org/x/text v0.22.0 // indirect golang.org/x/time v0.9.0 // indirect gomodules.xyz/jsonpatch/v2 v2.4.0 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20250115164207-1a7da9e5054f // indirect diff --git a/apps/investigations/go.sum b/apps/investigations/go.sum index f6f6a0e3981c5..d3caa3d08e33e 100644 --- a/apps/investigations/go.sum +++ b/apps/investigations/go.sum @@ -155,10 +155,10 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= -golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE= -golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= +golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= +golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -169,12 +169,12 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg= -golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= +golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU= +golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY= golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/apps/playlist/go.mod b/apps/playlist/go.mod index ccc6715d0f5ec..cd07f8c199e81 100644 --- a/apps/playlist/go.mod +++ b/apps/playlist/go.mod @@ -63,12 +63,12 @@ require ( go.opentelemetry.io/otel/sdk v1.34.0 // indirect go.opentelemetry.io/otel/trace v1.34.0 // indirect go.opentelemetry.io/proto/otlp v1.5.0 // indirect - golang.org/x/net v0.34.0 // indirect - golang.org/x/oauth2 v0.26.0 // indirect + golang.org/x/net v0.35.0 // indirect + golang.org/x/oauth2 v0.27.0 // indirect golang.org/x/sync v0.11.0 // indirect golang.org/x/sys v0.30.0 // indirect - golang.org/x/term v0.28.0 // indirect - golang.org/x/text v0.21.0 // indirect + golang.org/x/term v0.29.0 // indirect + golang.org/x/text v0.22.0 // indirect golang.org/x/time v0.9.0 // indirect gomodules.xyz/jsonpatch/v2 v2.4.0 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20250115164207-1a7da9e5054f // indirect diff --git a/apps/playlist/go.sum b/apps/playlist/go.sum index f6f6a0e3981c5..d3caa3d08e33e 100644 --- a/apps/playlist/go.sum +++ b/apps/playlist/go.sum @@ -155,10 +155,10 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= -golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE= -golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= +golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= +golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -169,12 +169,12 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg= -golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= +golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU= +golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY= golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/go.mod b/go.mod index d512579c14981..7eef812cbf673 100644 --- a/go.mod +++ b/go.mod @@ -167,13 +167,13 @@ require ( go.uber.org/goleak v1.3.0 // @grafana/grafana-search-and-storage go.uber.org/zap v1.27.0 // @grafana/identity-access-team gocloud.dev v0.40.0 // @grafana/grafana-app-platform-squad - golang.org/x/crypto v0.32.0 // @grafana/grafana-backend-group + golang.org/x/crypto v0.35.0 // @grafana/grafana-backend-group golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 // @grafana/alerting-backend golang.org/x/mod v0.22.0 // indirect; @grafana/grafana-backend-group - golang.org/x/net v0.34.0 // @grafana/oss-big-tent @grafana/partner-datasources - golang.org/x/oauth2 v0.26.0 // @grafana/identity-access-team + golang.org/x/net v0.35.0 // @grafana/oss-big-tent @grafana/partner-datasources + golang.org/x/oauth2 v0.27.0 // @grafana/identity-access-team golang.org/x/sync v0.11.0 // @grafana/alerting-backend - golang.org/x/text v0.21.0 // @grafana/grafana-backend-group + golang.org/x/text v0.22.0 // @grafana/grafana-backend-group golang.org/x/time v0.9.0 // @grafana/grafana-backend-group golang.org/x/tools v0.29.0 // indirect; @grafana/grafana-as-code gonum.org/v1/gonum v0.15.1 // @grafana/oss-big-tent @@ -520,7 +520,7 @@ require ( go.uber.org/multierr v1.11.0 // indirect go4.org/netipx v0.0.0-20230125063823-8449b0a6169f // indirect golang.org/x/sys v0.30.0 // indirect - golang.org/x/term v0.28.0 // indirect + golang.org/x/term v0.29.0 // indirect golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect gomodules.xyz/jsonpatch/v2 v2.4.0 // indirect google.golang.org/genproto v0.0.0-20241021214115-324edc3d5d38 // indirect diff --git a/go.sum b/go.sum index 2b409ed61b537..ce99251124110 100644 --- a/go.sum +++ b/go.sum @@ -2552,8 +2552,8 @@ golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= -golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= +golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs= +golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -2699,8 +2699,8 @@ golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -2736,8 +2736,8 @@ golang.org/x/oauth2 v0.7.0/go.mod h1:hPLQkd9LyjfXTiRohC/41GhcFqxisoUQ99sCUOHO9x4 golang.org/x/oauth2 v0.8.0/go.mod h1:yr7u4HXZRm1R1kBWqr/xKNqewf0plRYoB7sla+BCIXE= golang.org/x/oauth2 v0.12.0/go.mod h1:A74bZ3aGXgCY0qaIC9Ahg6Lglin4AMAco8cIv9baba4= golang.org/x/oauth2 v0.16.0/go.mod h1:hqZ+0LWXsiVoZpeld6jVt06P3adbS2Uu911W1SsJv2o= -golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE= -golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= +golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -2904,8 +2904,8 @@ golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg= -golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= +golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU= +golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -2925,8 +2925,8 @@ golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= diff --git a/go.work.sum b/go.work.sum index 6d37067853e97..0e79edfd1d2e9 100644 --- a/go.work.sum +++ b/go.work.sum @@ -1112,6 +1112,8 @@ golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5D golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54= golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= +golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= +golang.org/x/crypto v0.33.0/go.mod h1:bVdXmD7IV/4GdElGPozy6U7lWdRXA4qyRVGJV57uQ5M= golang.org/x/exp v0.0.0-20230515195305-f3d0a9c9a5cc/go.mod h1:V1LtkGg67GoY2N1AnLN78QLrzxkLyJw7RJb1gzOOz9w= golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842/go.mod h1:XtvwrStGgqGPLc4cjQfWqZHG1YFdYs6swckp8vpsjnc= golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY= @@ -1135,8 +1137,12 @@ golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0= golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= golang.org/x/net v0.32.0/go.mod h1:CwU0IoeOlnQQWJ6ioyFrfRuomB8GKF6KbYXZVyeXNfs= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= +golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= diff --git a/pkg/aggregator/go.mod b/pkg/aggregator/go.mod index 2f254da37556b..edfe54e00b2df 100644 --- a/pkg/aggregator/go.mod +++ b/pkg/aggregator/go.mod @@ -134,15 +134,15 @@ require ( go.opentelemetry.io/proto/otlp v1.5.0 // indirect go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect - golang.org/x/crypto v0.32.0 // indirect + golang.org/x/crypto v0.35.0 // indirect golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 // indirect golang.org/x/mod v0.22.0 // indirect - golang.org/x/net v0.34.0 // indirect - golang.org/x/oauth2 v0.26.0 // indirect + golang.org/x/net v0.35.0 // indirect + golang.org/x/oauth2 v0.27.0 // indirect golang.org/x/sync v0.11.0 // indirect golang.org/x/sys v0.30.0 // indirect - golang.org/x/term v0.28.0 // indirect - golang.org/x/text v0.21.0 // indirect + golang.org/x/term v0.29.0 // indirect + golang.org/x/text v0.22.0 // indirect golang.org/x/time v0.9.0 // indirect golang.org/x/tools v0.29.0 // indirect golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect diff --git a/pkg/aggregator/go.sum b/pkg/aggregator/go.sum index dcacd54e2da5f..eb715898bf0dd 100644 --- a/pkg/aggregator/go.sum +++ b/pkg/aggregator/go.sum @@ -396,8 +396,8 @@ go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= -golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= +golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs= +golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= @@ -418,11 +418,11 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE= -golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= +golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -447,12 +447,12 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg= -golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= +golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU= +golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY= golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/pkg/apimachinery/go.mod b/pkg/apimachinery/go.mod index 6608d7f91b377..36b8d4b827e66 100644 --- a/pkg/apimachinery/go.mod +++ b/pkg/apimachinery/go.mod @@ -36,11 +36,11 @@ require ( go.opentelemetry.io/otel v1.34.0 // indirect go.opentelemetry.io/otel/sdk v1.34.0 // indirect go.opentelemetry.io/otel/trace v1.34.0 // indirect - golang.org/x/crypto v0.32.0 // indirect - golang.org/x/net v0.34.0 // indirect + golang.org/x/crypto v0.35.0 // indirect + golang.org/x/net v0.35.0 // indirect golang.org/x/sync v0.11.0 // indirect golang.org/x/sys v0.30.0 // indirect - golang.org/x/text v0.21.0 // indirect + golang.org/x/text v0.22.0 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20250115164207-1a7da9e5054f // indirect google.golang.org/grpc v1.70.0 // indirect google.golang.org/protobuf v1.36.4 // indirect diff --git a/pkg/apimachinery/go.sum b/pkg/apimachinery/go.sum index 864e82c4e316f..5b3d03a464a6c 100644 --- a/pkg/apimachinery/go.sum +++ b/pkg/apimachinery/go.sum @@ -89,8 +89,8 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= -golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= +golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs= +golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= @@ -103,8 +103,8 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -135,8 +135,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= diff --git a/pkg/apiserver/go.mod b/pkg/apiserver/go.mod index 1c208fbbb0cee..83a53b3151a04 100644 --- a/pkg/apiserver/go.mod +++ b/pkg/apiserver/go.mod @@ -81,11 +81,12 @@ require ( go.opentelemetry.io/proto/otlp v1.5.0 // indirect go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect - golang.org/x/net v0.34.0 // indirect - golang.org/x/oauth2 v0.26.0 // indirect + golang.org/x/crypto v0.35.0 // indirect + golang.org/x/net v0.35.0 // indirect + golang.org/x/oauth2 v0.27.0 // indirect golang.org/x/sys v0.30.0 // indirect - golang.org/x/term v0.28.0 // indirect - golang.org/x/text v0.21.0 // indirect + golang.org/x/term v0.29.0 // indirect + golang.org/x/text v0.22.0 // indirect golang.org/x/time v0.9.0 // indirect golang.org/x/tools v0.29.0 // indirect google.golang.org/genproto v0.0.0-20241021214115-324edc3d5d38 // indirect diff --git a/pkg/apiserver/go.sum b/pkg/apiserver/go.sum index 921a0ec5c7068..fddf55e1b9341 100644 --- a/pkg/apiserver/go.sum +++ b/pkg/apiserver/go.sum @@ -233,8 +233,8 @@ go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= -golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= +golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs= +golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= @@ -251,11 +251,11 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE= -golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= +golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -269,12 +269,12 @@ golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg= -golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= +golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU= +golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY= golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/pkg/build/go.mod b/pkg/build/go.mod index d404a6ce87e2f..1d5f29a7bed31 100644 --- a/pkg/build/go.mod +++ b/pkg/build/go.mod @@ -26,12 +26,12 @@ require ( go.opentelemetry.io/otel v1.34.0 // indirect; @grafana/grafana-backend-group go.opentelemetry.io/otel/sdk v1.34.0 // indirect; @grafana/grafana-backend-group go.opentelemetry.io/otel/trace v1.34.0 // indirect; @grafana/grafana-backend-group - golang.org/x/crypto v0.32.0 // indirect; @grafana/grafana-backend-group + golang.org/x/crypto v0.35.0 // indirect; @grafana/grafana-backend-group golang.org/x/mod v0.22.0 // @grafana/grafana-backend-group - golang.org/x/net v0.34.0 // indirect; @grafana/oss-big-tent @grafana/partner-datasources - golang.org/x/oauth2 v0.26.0 // @grafana/identity-access-team + golang.org/x/net v0.35.0 // indirect; @grafana/oss-big-tent @grafana/partner-datasources + golang.org/x/oauth2 v0.27.0 // @grafana/identity-access-team golang.org/x/sync v0.11.0 // indirect; @grafana/alerting-backend - golang.org/x/text v0.21.0 // indirect; @grafana/grafana-backend-group + golang.org/x/text v0.22.0 // indirect; @grafana/grafana-backend-group golang.org/x/time v0.9.0 // indirect; @grafana/grafana-backend-group google.golang.org/api v0.216.0 // @grafana/grafana-backend-group google.golang.org/grpc v1.70.0 // indirect; @grafana/plugins-platform-backend diff --git a/pkg/build/go.sum b/pkg/build/go.sum index e1c3f80a9a7b0..7c868c04a4cbf 100644 --- a/pkg/build/go.sum +++ b/pkg/build/go.sum @@ -267,8 +267,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= -golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= +golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs= +golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= @@ -288,11 +288,11 @@ golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE= -golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= +golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -315,8 +315,8 @@ golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20220526004731-065cf7ba2467/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY= golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= diff --git a/pkg/codegen/go.mod b/pkg/codegen/go.mod index acc83fd50d802..3c3b0bfa94b37 100644 --- a/pkg/codegen/go.mod +++ b/pkg/codegen/go.mod @@ -44,9 +44,9 @@ require ( github.com/xlab/treeprint v1.2.0 // indirect github.com/yalue/merged_fs v1.3.0 // indirect golang.org/x/mod v0.22.0 // indirect - golang.org/x/net v0.34.0 // indirect + golang.org/x/net v0.35.0 // indirect golang.org/x/sync v0.11.0 // indirect - golang.org/x/text v0.21.0 // indirect + golang.org/x/text v0.22.0 // indirect golang.org/x/tools v0.29.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/pkg/codegen/go.sum b/pkg/codegen/go.sum index 445e9e138eb1f..25214c4b99eaf 100644 --- a/pkg/codegen/go.sum +++ b/pkg/codegen/go.sum @@ -100,12 +100,12 @@ github.com/yalue/merged_fs v1.3.0 h1:qCeh9tMPNy/i8cwDsQTJ5bLr6IRxbs6meakNE5O+wyY github.com/yalue/merged_fs v1.3.0/go.mod h1:WqqchfVYQyclV2tnR7wtRhBddzBvLVR83Cjw9BKQw0M= golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w= golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE= golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/pkg/plugins/codegen/go.mod b/pkg/plugins/codegen/go.mod index 1aa5a6488608c..5d415a7f052a4 100644 --- a/pkg/plugins/codegen/go.mod +++ b/pkg/plugins/codegen/go.mod @@ -43,10 +43,10 @@ require ( github.com/xlab/treeprint v1.2.0 // indirect github.com/yalue/merged_fs v1.3.0 // indirect golang.org/x/mod v0.22.0 // indirect - golang.org/x/net v0.34.0 // indirect + golang.org/x/net v0.35.0 // indirect golang.org/x/oauth2 v0.24.0 // indirect golang.org/x/sync v0.11.0 // indirect - golang.org/x/text v0.21.0 // indirect + golang.org/x/text v0.22.0 // indirect golang.org/x/tools v0.29.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/pkg/plugins/codegen/go.sum b/pkg/plugins/codegen/go.sum index d8efc875fc982..a940ced9650cb 100644 --- a/pkg/plugins/codegen/go.sum +++ b/pkg/plugins/codegen/go.sum @@ -94,16 +94,16 @@ github.com/yalue/merged_fs v1.3.0 h1:qCeh9tMPNy/i8cwDsQTJ5bLr6IRxbs6meakNE5O+wyY github.com/yalue/merged_fs v1.3.0/go.mod h1:WqqchfVYQyclV2tnR7wtRhBddzBvLVR83Cjw9BKQw0M= golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= golang.org/x/oauth2 v0.24.0 h1:KTBBxWqUa0ykRPLtV69rRto9TLXcqYkeswu48x/gvNE= golang.org/x/oauth2 v0.24.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w= golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= -golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= +golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE= golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/pkg/promlib/go.mod b/pkg/promlib/go.mod index 3d743ca89bd9e..95380de2939e1 100644 --- a/pkg/promlib/go.mod +++ b/pkg/promlib/go.mod @@ -107,12 +107,14 @@ require ( go.opentelemetry.io/otel/sdk v1.34.0 // indirect go.opentelemetry.io/proto/otlp v1.5.0 // indirect go.uber.org/atomic v1.11.0 // indirect + golang.org/x/crypto v0.35.0 // indirect golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 // indirect golang.org/x/mod v0.22.0 // indirect - golang.org/x/net v0.34.0 // indirect + golang.org/x/net v0.35.0 // indirect + golang.org/x/oauth2 v0.27.0 // indirect golang.org/x/sync v0.11.0 // indirect golang.org/x/sys v0.30.0 // indirect - golang.org/x/text v0.21.0 // indirect + golang.org/x/text v0.22.0 // indirect golang.org/x/tools v0.29.0 // indirect golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect google.golang.org/api v0.216.0 // indirect diff --git a/pkg/promlib/go.sum b/pkg/promlib/go.sum index 9f9b18c82b057..66a0da7e79027 100644 --- a/pkg/promlib/go.sum +++ b/pkg/promlib/go.sum @@ -329,8 +329,8 @@ go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= -golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= +golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs= +golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= @@ -342,10 +342,10 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= -golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE= -golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= +golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= +golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -367,8 +367,8 @@ golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY= golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/pkg/storage/unified/apistore/go.mod b/pkg/storage/unified/apistore/go.mod index b7abdb3bb91c4..529d564795df6 100644 --- a/pkg/storage/unified/apistore/go.mod +++ b/pkg/storage/unified/apistore/go.mod @@ -366,14 +366,14 @@ require ( go.uber.org/mock v0.5.0 // indirect go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect - golang.org/x/crypto v0.32.0 // indirect + golang.org/x/crypto v0.35.0 // indirect golang.org/x/mod v0.22.0 // indirect - golang.org/x/net v0.34.0 // indirect - golang.org/x/oauth2 v0.26.0 // indirect + golang.org/x/net v0.35.0 // indirect + golang.org/x/oauth2 v0.27.0 // indirect golang.org/x/sync v0.11.0 // indirect golang.org/x/sys v0.30.0 // indirect - golang.org/x/term v0.28.0 // indirect - golang.org/x/text v0.21.0 // indirect + golang.org/x/term v0.29.0 // indirect + golang.org/x/text v0.22.0 // indirect golang.org/x/time v0.9.0 // indirect golang.org/x/tools v0.29.0 // indirect golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect diff --git a/pkg/storage/unified/apistore/go.sum b/pkg/storage/unified/apistore/go.sum index 87e7727af96c7..1f2097a2cdf9a 100644 --- a/pkg/storage/unified/apistore/go.sum +++ b/pkg/storage/unified/apistore/go.sum @@ -1174,8 +1174,8 @@ golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= -golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= +golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs= +golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1263,8 +1263,8 @@ golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -1273,8 +1273,8 @@ golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4Iltr golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE= -golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= +golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1370,8 +1370,8 @@ golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg= -golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= +golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU= +golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1385,8 +1385,8 @@ golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= diff --git a/pkg/storage/unified/resource/go.mod b/pkg/storage/unified/resource/go.mod index 833dd96a890b5..ca61c948c7f84 100644 --- a/pkg/storage/unified/resource/go.mod +++ b/pkg/storage/unified/resource/go.mod @@ -217,14 +217,14 @@ require ( go.opentelemetry.io/otel/sdk v1.34.0 // indirect go.opentelemetry.io/proto/otlp v1.5.0 // indirect go.uber.org/atomic v1.11.0 // indirect - golang.org/x/crypto v0.32.0 // indirect + golang.org/x/crypto v0.35.0 // indirect golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 // indirect golang.org/x/mod v0.22.0 // indirect - golang.org/x/net v0.34.0 // indirect - golang.org/x/oauth2 v0.26.0 // indirect + golang.org/x/net v0.35.0 // indirect + golang.org/x/oauth2 v0.27.0 // indirect golang.org/x/sys v0.30.0 // indirect - golang.org/x/term v0.28.0 // indirect - golang.org/x/text v0.21.0 // indirect + golang.org/x/term v0.29.0 // indirect + golang.org/x/text v0.22.0 // indirect golang.org/x/time v0.9.0 // indirect golang.org/x/tools v0.29.0 // indirect golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect diff --git a/pkg/storage/unified/resource/go.sum b/pkg/storage/unified/resource/go.sum index 7d566e2e658cc..53cd5a812ae21 100644 --- a/pkg/storage/unified/resource/go.sum +++ b/pkg/storage/unified/resource/go.sum @@ -824,8 +824,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= -golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= +golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs= +golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= @@ -866,14 +866,14 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= -golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= -golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190402181905-9f3314589c9a/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE= -golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= +golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -931,8 +931,8 @@ golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg= -golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= +golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU= +golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= @@ -942,8 +942,8 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY= From e78136c568a4deded4d954dfa04121e434e6b372 Mon Sep 17 00:00:00 2001 From: Josh Hunt Date: Tue, 25 Feb 2025 10:17:02 +0000 Subject: [PATCH 08/33] Navigation: Send isNew flag in grafana_navigation_item_clicked events (#101209) --- .../components/AppChrome/MegaMenu/utils.ts | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/public/app/core/components/AppChrome/MegaMenu/utils.ts b/public/app/core/components/AppChrome/MegaMenu/utils.ts index abbcbc043c17f..2afb61f8cc4c8 100644 --- a/public/app/core/components/AppChrome/MegaMenu/utils.ts +++ b/public/app/core/components/AppChrome/MegaMenu/utils.ts @@ -35,21 +35,36 @@ export const enrichHelpItem = (helpItem: NavModelItem) => { return helpItem; }; -export const enrichWithInteractionTracking = (item: NavModelItem, megaMenuDockedState: boolean) => { +export const enrichWithInteractionTracking = ( + item: NavModelItem, + megaMenuDockedState: boolean, + ancestorIsNew = false +) => { // creating a new object here to not mutate the original item object const newItem = { ...item }; const onClick = newItem.onClick; + + let isNew: 'item' | 'ancestor' | undefined = undefined; + if (newItem.isNew) { + isNew = 'item'; + } else if (ancestorIsNew) { + isNew = 'ancestor'; + } + newItem.onClick = () => { reportInteraction('grafana_navigation_item_clicked', { path: newItem.url ?? newItem.id, menuIsDocked: megaMenuDockedState, itemIsBookmarked: Boolean(config.featureToggles.pinNavItems && newItem?.parentItem?.id === 'bookmarks'), bookmarkToggleOn: Boolean(config.featureToggles.pinNavItems), + isNew, }); onClick?.(); }; if (newItem.children) { - newItem.children = newItem.children.map((item) => enrichWithInteractionTracking(item, megaMenuDockedState)); + newItem.children = newItem.children.map((item) => + enrichWithInteractionTracking(item, megaMenuDockedState, isNew !== undefined) + ); } return newItem; }; From b641fd64f923fdff443059788e71139c1b50be35 Mon Sep 17 00:00:00 2001 From: Alexander Akhmetov Date: Tue, 25 Feb 2025 11:26:36 +0100 Subject: [PATCH 09/33] Alerting: API to create rule groups using mimirtool (#100558) What is this feature? Adds an API endpoint to create alert rules with mimirtool: - POST /convert/prometheus/config/v1/rules/{NamespaceTitle} - Accepts a single rule group in a Prometheus YAML format and creates or updates a Grafana rule group from it. The endpoint uses the conversion package from #100224. Key parts The API works similarly to the provisioning API. If the rule does not exist, it will be created, otherwise updated. Any rules not present in the new group will be deleted, ensuring the group is fully synchronized with the provided configuration. Since the API works with namespace titles (folders), the handler automatically creates a folder in the root based on the provided title if it does not exist. It also requires a special header, X-Grafana-Alerting-Datasource-UID. This header specifies which datasource to use for the new rules. If the rule group's evaluation interval is not specified, it uses the DefaultRuleEvaluationInterval from settings. --- pkg/services/ngalert/api/api.go | 6 +- .../ngalert/api/api_convert_prometheus.go | 176 +++++++++++++- .../api/api_convert_prometheus_test.go | 212 +++++++++++++++++ pkg/services/ngalert/api/authorization.go | 21 +- pkg/services/ngalert/api/persist.go | 2 + pkg/services/ngalert/api/tooling/api.json | 5 +- .../definitions/convert_prometheus_api.go | 6 +- pkg/services/ngalert/api/tooling/post.json | 7 +- pkg/services/ngalert/api/tooling/spec.json | 7 +- pkg/services/ngalert/models/provisioning.go | 4 +- pkg/services/ngalert/prom/convert.go | 28 +-- pkg/services/ngalert/prom/convert_test.go | 46 ++-- pkg/services/ngalert/prom/models.go | 14 ++ pkg/services/ngalert/store/alert_rule.go | 30 --- pkg/services/ngalert/store/alert_rule_test.go | 52 ----- pkg/services/ngalert/store/namespace.go | 97 ++++++++ pkg/services/ngalert/store/namespace_test.go | 220 ++++++++++++++++++ pkg/services/ngalert/tests/fakes/rules.go | 34 +++ .../alerting/api_convert_prometheus_test.go | 196 ++++++++++++++++ pkg/tests/api/alerting/testing.go | 28 ++- public/api-merged.json | 2 - public/openapi3.json | 2 - 22 files changed, 1049 insertions(+), 146 deletions(-) create mode 100644 pkg/services/ngalert/api/api_convert_prometheus_test.go create mode 100644 pkg/services/ngalert/store/namespace.go create mode 100644 pkg/services/ngalert/store/namespace_test.go create mode 100644 pkg/tests/api/alerting/api_convert_prometheus_test.go diff --git a/pkg/services/ngalert/api/api.go b/pkg/services/ngalert/api/api.go index 8973ca6cce74a..10d50a6b0b522 100644 --- a/pkg/services/ngalert/api/api.go +++ b/pkg/services/ngalert/api/api.go @@ -187,8 +187,8 @@ func (api *API) RegisterAPIEndpoints(m *metrics.API) { }), m) if api.FeatureManager.IsEnabledGlobally(featuremgmt.FlagAlertingConversionAPI) { - api.RegisterConvertPrometheusApiEndpoints(NewConvertPrometheusApi(&ConvertPrometheusSrv{ - logger: logger, - }), m) + api.RegisterConvertPrometheusApiEndpoints(NewConvertPrometheusApi( + NewConvertPrometheusSrv(&api.Cfg.UnifiedAlerting, logger, api.RuleStore, api.DatasourceCache, api.AlertRules), + ), m) } } diff --git a/pkg/services/ngalert/api/api_convert_prometheus.go b/pkg/services/ngalert/api/api_convert_prometheus.go index babe4f1bca4f9..c27bf9b118af2 100644 --- a/pkg/services/ngalert/api/api_convert_prometheus.go +++ b/pkg/services/ngalert/api/api_convert_prometheus.go @@ -1,14 +1,60 @@ package api import ( + "fmt" + "net/http" + "strconv" + "strings" + "github.com/grafana/grafana/pkg/api/response" + "github.com/grafana/grafana/pkg/apimachinery/errutil" "github.com/grafana/grafana/pkg/infra/log" contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" + "github.com/grafana/grafana/pkg/services/datasources" + "github.com/grafana/grafana/pkg/services/folder" apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" + "github.com/grafana/grafana/pkg/services/ngalert/models" + "github.com/grafana/grafana/pkg/services/ngalert/prom" + "github.com/grafana/grafana/pkg/services/ngalert/provisioning" + "github.com/grafana/grafana/pkg/setting" +) + +const ( + datasourceUIDHeader = "X-Grafana-Alerting-Datasource-UID" + recordingRulesPausedHeader = "X-Grafana-Alerting-Recording-Rules-Paused" + alertRulesPausedHeader = "X-Grafana-Alerting-Alert-Rules-Paused" ) +var ( + errDatasourceUIDHeaderMissing = errutil.ValidationFailed( + "alerting.datasourceUIDHeaderMissing", + errutil.WithPublicMessage(fmt.Sprintf("Missing datasource UID header: %s", datasourceUIDHeader)), + ).Errorf("missing datasource UID header") + + errInvalidHeaderValueMsg = "Invalid value for header {{.Public.Header}}: must be 'true' or 'false'" + errInvalidHeaderValueBase = errutil.ValidationFailed("aleting.invalidHeaderValue").MustTemplate(errInvalidHeaderValueMsg, errutil.WithPublic(errInvalidHeaderValueMsg)) +) + +func errInvalidHeaderValue(header string) error { + return errInvalidHeaderValueBase.Build(errutil.TemplateData{Public: map[string]any{"Header": header}}) +} + type ConvertPrometheusSrv struct { - logger log.Logger + cfg *setting.UnifiedAlertingSettings + logger log.Logger + ruleStore RuleStore + datasourceCache datasources.CacheService + alertRuleService *provisioning.AlertRuleService +} + +func NewConvertPrometheusSrv(cfg *setting.UnifiedAlertingSettings, logger log.Logger, ruleStore RuleStore, datasourceCache datasources.CacheService, alertRuleService *provisioning.AlertRuleService) *ConvertPrometheusSrv { + return &ConvertPrometheusSrv{ + cfg: cfg, + logger: logger, + ruleStore: ruleStore, + datasourceCache: datasourceCache, + alertRuleService: alertRuleService, + } } func (srv *ConvertPrometheusSrv) RouteConvertPrometheusGetRules(c *contextmodel.ReqContext) response.Response { @@ -28,9 +74,131 @@ func (srv *ConvertPrometheusSrv) RouteConvertPrometheusGetNamespace(c *contextmo } func (srv *ConvertPrometheusSrv) RouteConvertPrometheusGetRuleGroup(c *contextmodel.ReqContext, namespaceTitle string, group string) response.Response { - return response.Error(501, "Not implemented", nil) + // Just to make the mimirtool rules load work. It first checks if the group exists, and if the endpoint returns 501 it fails. + return response.YAML(http.StatusOK, apimodels.PrometheusRuleGroup{}) } -func (srv *ConvertPrometheusSrv) RouteConvertPrometheusPostRuleGroup(c *contextmodel.ReqContext, namespaceTitle string, prometheusGroup apimodels.PrometheusRuleGroup) response.Response { - return response.Error(501, "Not implemented", nil) +func (srv *ConvertPrometheusSrv) RouteConvertPrometheusPostRuleGroup(c *contextmodel.ReqContext, namespaceTitle string, promGroup apimodels.PrometheusRuleGroup) response.Response { + logger := srv.logger.FromContext(c.Req.Context()) + logger = logger.New("folder_title", namespaceTitle, "group", promGroup.Name) + + logger.Info("Converting Prometheus rule group", "rules", len(promGroup.Rules)) + + ns, errResp := srv.getOrCreateNamespace(c, namespaceTitle, logger) + if errResp != nil { + return errResp + } + + datasourceUID := strings.TrimSpace(c.Req.Header.Get(datasourceUIDHeader)) + if datasourceUID == "" { + return response.Err(errDatasourceUIDHeaderMissing) + } + ds, err := srv.datasourceCache.GetDatasourceByUID(c.Req.Context(), datasourceUID, c.SignedInUser, c.SkipDSCache) + if err != nil { + logger.Error("Failed to get datasource", "datasource_uid", datasourceUID, "error", err) + return errorToResponse(err) + } + + group, err := srv.convertToGrafanaRuleGroup(c, ds, ns.UID, promGroup, logger) + if err != nil { + return errorToResponse(err) + } + + err = srv.alertRuleService.ReplaceRuleGroup(c.Req.Context(), c.SignedInUser, *group, models.ProvenanceConvertedPrometheus) + if err != nil { + logger.Error("Failed to replace rule group", "error", err) + return errorToResponse(err) + } + + return response.JSON(http.StatusAccepted, map[string]string{"status": "success"}) +} + +func (srv *ConvertPrometheusSrv) getOrCreateNamespace(c *contextmodel.ReqContext, title string, logger log.Logger) (*folder.Folder, response.Response) { + logger.Debug("Getting or creating a new folder") + + ns, err := srv.ruleStore.GetOrCreateNamespaceInRootByTitle( + c.Req.Context(), + title, + c.SignedInUser.GetOrgID(), + c.SignedInUser, + ) + if err != nil { + logger.Error("Failed to get or create a new folder", "error", err) + return nil, toNamespaceErrorResponse(err) + } + + logger.Debug("Using folder for the converted rules", "folder_uid", ns.UID) + + return ns, nil +} + +func (srv *ConvertPrometheusSrv) convertToGrafanaRuleGroup(c *contextmodel.ReqContext, ds *datasources.DataSource, namespaceUID string, promGroup apimodels.PrometheusRuleGroup, logger log.Logger) (*models.AlertRuleGroup, error) { + logger.Info("Converting Prometheus rules to Grafana rules", "rules", len(promGroup.Rules), "folder_uid", namespaceUID, "datasource_uid", ds.UID, "datasource_type", ds.Type) + + rules := make([]prom.PrometheusRule, len(promGroup.Rules)) + for i, r := range promGroup.Rules { + rules[i] = prom.PrometheusRule{ + Alert: r.Alert, + Expr: r.Expr, + For: r.For, + KeepFiringFor: r.KeepFiringFor, + Labels: r.Labels, + Annotations: r.Annotations, + Record: r.Record, + } + } + group := prom.PrometheusRuleGroup{ + Name: promGroup.Name, + Interval: promGroup.Interval, + Rules: rules, + } + + pauseRecordingRules, err := parseBooleanHeader(c.Req.Header.Get(recordingRulesPausedHeader), recordingRulesPausedHeader) + if err != nil { + return nil, err + } + + pauseAlertRules, err := parseBooleanHeader(c.Req.Header.Get(alertRulesPausedHeader), alertRulesPausedHeader) + if err != nil { + return nil, err + } + + converter, err := prom.NewConverter( + prom.Config{ + DatasourceUID: ds.UID, + DatasourceType: ds.Type, + DefaultInterval: srv.cfg.DefaultRuleEvaluationInterval, + RecordingRules: prom.RulesConfig{ + IsPaused: pauseRecordingRules, + }, + AlertRules: prom.RulesConfig{ + IsPaused: pauseAlertRules, + }, + }, + ) + if err != nil { + logger.Error("Failed to create Prometheus converter", "datasource_uid", ds.UID, "datasource_type", ds.Type, "error", err) + return nil, err + } + + grafanaGroup, err := converter.PrometheusRulesToGrafana(c.SignedInUser.GetOrgID(), namespaceUID, group) + if err != nil { + logger.Error("Failed to convert Prometheus rules to Grafana rules", "error", err) + return nil, err + } + + return grafanaGroup, nil +} + +// parseBooleanHeader parses a boolean header value, returning an error if the header +// is present but invalid. If the header is not present, returns (false, nil). +func parseBooleanHeader(header string, headerName string) (bool, error) { + if header == "" { + return false, nil + } + val, err := strconv.ParseBool(header) + if err != nil { + return false, errInvalidHeaderValue(headerName) + } + return val, nil } diff --git a/pkg/services/ngalert/api/api_convert_prometheus_test.go b/pkg/services/ngalert/api/api_convert_prometheus_test.go new file mode 100644 index 0000000000000..8dea6f3f17816 --- /dev/null +++ b/pkg/services/ngalert/api/api_convert_prometheus_test.go @@ -0,0 +1,212 @@ +package api + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" + + prommodel "github.com/prometheus/common/model" + "github.com/stretchr/testify/require" + + "github.com/grafana/grafana/pkg/infra/log" + contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" + "github.com/grafana/grafana/pkg/services/datasources" + dsfakes "github.com/grafana/grafana/pkg/services/datasources/fakes" + "github.com/grafana/grafana/pkg/services/folder/foldertest" + acfakes "github.com/grafana/grafana/pkg/services/ngalert/accesscontrol/fakes" + apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" + "github.com/grafana/grafana/pkg/services/ngalert/provisioning" + "github.com/grafana/grafana/pkg/services/ngalert/tests/fakes" + "github.com/grafana/grafana/pkg/services/user" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/util" + "github.com/grafana/grafana/pkg/web" +) + +const ( + existingDSUID = "test-ds" +) + +func TestRouteConvertPrometheusPostRuleGroup(t *testing.T) { + simpleGroup := apimodels.PrometheusRuleGroup{ + Name: "Test Group", + Interval: prommodel.Duration(1 * time.Minute), + Rules: []apimodels.PrometheusRule{ + { + Alert: "TestAlert", + Expr: "up == 0", + For: util.Pointer(prommodel.Duration(5 * time.Minute)), + Labels: map[string]string{ + "severity": "critical", + }, + }, + }, + } + + t.Run("without datasource UID header should return 400", func(t *testing.T) { + srv, _ := createConvertPrometheusSrv(t) + rc := createRequestCtx() + rc.Req.Header.Set(datasourceUIDHeader, "") + + response := srv.RouteConvertPrometheusPostRuleGroup(rc, "test", apimodels.PrometheusRuleGroup{}) + + require.Equal(t, http.StatusBadRequest, response.Status()) + require.Contains(t, string(response.Body()), "Missing datasource UID header") + }) + + t.Run("with invalid datasource should return error", func(t *testing.T) { + srv, _ := createConvertPrometheusSrv(t) + rc := createRequestCtx() + rc.Req.Header.Set(datasourceUIDHeader, "non-existing-ds") + + response := srv.RouteConvertPrometheusPostRuleGroup(rc, "test", apimodels.PrometheusRuleGroup{}) + + require.Equal(t, http.StatusNotFound, response.Status()) + }) + + t.Run("with rule group without evaluation interval should return 202", func(t *testing.T) { + srv, _ := createConvertPrometheusSrv(t) + rc := createRequestCtx() + + response := srv.RouteConvertPrometheusPostRuleGroup(rc, "test", simpleGroup) + require.Equal(t, http.StatusAccepted, response.Status()) + }) + + t.Run("with valid pause header values should return 202", func(t *testing.T) { + testCases := []struct { + name string + headerName string + headerValue string + }{ + { + name: "true recording rules pause value", + headerName: recordingRulesPausedHeader, + headerValue: "true", + }, + { + name: "false recording rules pause value", + headerName: recordingRulesPausedHeader, + headerValue: "false", + }, + { + name: "true alert rules pause value", + headerName: alertRulesPausedHeader, + headerValue: "true", + }, + { + name: "false alert rules pause value", + headerName: alertRulesPausedHeader, + headerValue: "false", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + srv, _ := createConvertPrometheusSrv(t) + rc := createRequestCtx() + rc.Req.Header.Set(tc.headerName, tc.headerValue) + + response := srv.RouteConvertPrometheusPostRuleGroup(rc, "test", simpleGroup) + require.Equal(t, http.StatusAccepted, response.Status()) + }) + } + }) + + t.Run("with invalid pause header values should return 400", func(t *testing.T) { + testCases := []struct { + name string + headerName string + headerValue string + expectedError string + }{ + { + name: "invalid recording rules pause value", + headerName: recordingRulesPausedHeader, + headerValue: "invalid", + expectedError: "Invalid value for header X-Grafana-Alerting-Recording-Rules-Paused: must be 'true' or 'false'", + }, + { + name: "invalid alert rules pause value", + headerName: alertRulesPausedHeader, + headerValue: "invalid", + expectedError: "Invalid value for header X-Grafana-Alerting-Alert-Rules-Paused: must be 'true' or 'false'", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + srv, _ := createConvertPrometheusSrv(t) + rc := createRequestCtx() + rc.Req.Header.Set(tc.headerName, tc.headerValue) + + response := srv.RouteConvertPrometheusPostRuleGroup(rc, "test", simpleGroup) + require.Equal(t, http.StatusBadRequest, response.Status()) + require.Contains(t, string(response.Body()), tc.expectedError) + }) + } + }) + + t.Run("with valid request should return 202", func(t *testing.T) { + srv, _ := createConvertPrometheusSrv(t) + rc := createRequestCtx() + + response := srv.RouteConvertPrometheusPostRuleGroup(rc, "test", simpleGroup) + require.Equal(t, http.StatusAccepted, response.Status()) + }) +} + +func createConvertPrometheusSrv(t *testing.T) (*ConvertPrometheusSrv, datasources.CacheService) { + t.Helper() + + ruleStore := fakes.NewRuleStore(t) + folder := randFolder() + ruleStore.Folders[1] = append(ruleStore.Folders[1], folder) + + dsCache := &dsfakes.FakeCacheService{} + ds := &datasources.DataSource{ + UID: existingDSUID, + Type: datasources.DS_PROMETHEUS, + } + dsCache.DataSources = append(dsCache.DataSources, ds) + + quotas := &provisioning.MockQuotaChecker{} + quotas.EXPECT().LimitOK() + + folderService := foldertest.NewFakeService() + + alertRuleService := provisioning.NewAlertRuleService( + ruleStore, + fakes.NewFakeProvisioningStore(), + folderService, + quotas, + &provisioning.NopTransactionManager{}, + 60, + 10, + 100, + log.New("test"), + &provisioning.NotificationSettingsValidatorProviderFake{}, + &acfakes.FakeRuleService{}, + ) + + cfg := &setting.UnifiedAlertingSettings{ + DefaultRuleEvaluationInterval: 1 * time.Minute, + } + + srv := NewConvertPrometheusSrv(cfg, log.NewNopLogger(), ruleStore, dsCache, alertRuleService) + + return srv, dsCache +} + +func createRequestCtx() *contextmodel.ReqContext { + req := httptest.NewRequest("GET", "http://localhost", nil) + req.Header.Set(datasourceUIDHeader, existingDSUID) + + return &contextmodel.ReqContext{ + Context: &web.Context{ + Req: req, + Resp: web.NewResponseWriter("GET", httptest.NewRecorder()), + }, + SignedInUser: &user.SignedInUser{OrgID: 1}, + } +} diff --git a/pkg/services/ngalert/api/authorization.go b/pkg/services/ngalert/api/authorization.go index 155f3e0131e1a..a1cedae4b735b 100644 --- a/pkg/services/ngalert/api/authorization.go +++ b/pkg/services/ngalert/api/authorization.go @@ -132,23 +132,26 @@ func (api *API) authorize(method, path string) web.Handler { ) case http.MethodGet + "/api/convert/prometheus/config/v1/rules": - eval = ac.EvalPermission(ac.ActionAlertingRuleRead) + eval = ac.EvalAll( + ac.EvalPermission(ac.ActionAlertingRuleRead), + ac.EvalPermission(dashboards.ActionFoldersRead), + ) case http.MethodPost + "/api/convert/prometheus/config/v1/rules/{NamespaceTitle}": eval = ac.EvalAll( - ac.EvalPermission(dashboards.ActionFoldersWrite), - ac.EvalPermission(ac.ActionAlertingRuleRead), - ac.EvalPermission(ac.ActionAlertingRuleUpdate), ac.EvalPermission(ac.ActionAlertingRuleCreate), - ac.EvalPermission(ac.ActionAlertingRuleDelete), + ac.EvalPermission(ac.ActionAlertingProvisioningSetStatus), ) case http.MethodDelete + "/api/convert/prometheus/config/v1/rules/{NamespaceTitle}/{Group}", http.MethodDelete + "/api/convert/prometheus/config/v1/rules/{NamespaceTitle}": - eval = ac.EvalAll( - ac.EvalPermission(ac.ActionAlertingRuleDelete), - ac.EvalPermission(ac.ActionAlertingRuleRead), - ac.EvalPermission(dashboards.ActionFoldersRead), + eval = ac.EvalAny( + ac.EvalAll( + ac.EvalPermission(ac.ActionAlertingRuleRead), + ac.EvalPermission(dashboards.ActionFoldersRead), + ac.EvalPermission(ac.ActionAlertingRuleDelete), + ac.EvalPermission(ac.ActionAlertingProvisioningSetStatus), + ), ) // Alert Instances and Silences diff --git a/pkg/services/ngalert/api/persist.go b/pkg/services/ngalert/api/persist.go index 952db94f1f56c..57eda01631d10 100644 --- a/pkg/services/ngalert/api/persist.go +++ b/pkg/services/ngalert/api/persist.go @@ -15,6 +15,8 @@ type RuleStore interface { // by returning map[string]struct{} instead of map[string]*folder.Folder GetUserVisibleNamespaces(context.Context, int64, identity.Requester) (map[string]*folder.Folder, error) GetNamespaceByUID(ctx context.Context, uid string, orgID int64, user identity.Requester) (*folder.Folder, error) + GetNamespaceInRootByTitle(ctx context.Context, fullpath string, orgID int64, user identity.Requester) (*folder.Folder, error) + GetOrCreateNamespaceInRootByTitle(ctx context.Context, title string, orgID int64, user identity.Requester) (*folder.Folder, error) GetAlertRuleByUID(ctx context.Context, query *ngmodels.GetAlertRuleByUIDQuery) (*ngmodels.AlertRule, error) GetAlertRulesGroupByRuleUID(ctx context.Context, query *ngmodels.GetAlertRulesGroupByRuleUIDQuery) ([]*ngmodels.AlertRule, error) diff --git a/pkg/services/ngalert/api/tooling/api.json b/pkg/services/ngalert/api/tooling/api.json index 7ed5ae04ba070..0a2fb0af0d4d0 100644 --- a/pkg/services/ngalert/api/tooling/api.json +++ b/pkg/services/ngalert/api/tooling/api.json @@ -4493,6 +4493,7 @@ "type": "object" }, "URL": { + "description": "The general form represented is:\n\n[scheme:][//[userinfo@]host][/]path[?query][#fragment]\n\nURLs that do not start with a slash after the scheme are interpreted as:\n\nscheme:opaque[?query][#fragment]\n\nThe Host field contains the host and port subcomponents of the URL.\nWhen the port is present, it is separated from the host with a colon.\nWhen the host is an IPv6 address, it must be enclosed in square brackets:\n\"[fe80::1]:80\". The [net.JoinHostPort] function combines a host and port\ninto a string suitable for the Host field, adding square brackets to\nthe host when necessary.\n\nNote that the Path field is stored in decoded form: /%47%6f%2f becomes /Go/.\nA consequence is that it is impossible to tell which slashes in the Path were\nslashes in the raw URL and which were %2f. This distinction is rarely important,\nbut when it is, the code should use the [URL.EscapedPath] method, which preserves\nthe original encoding of Path.\n\nThe RawPath field is an optional field which is only set when the default\nencoding of Path is different from the escaped path. See the EscapedPath method\nfor more details.\n\nURL's String method uses the EscapedPath method to obtain the path.", "properties": { "ForceQuery": { "type": "boolean" @@ -4528,7 +4529,7 @@ "$ref": "#/definitions/Userinfo" } }, - "title": "URL is a custom URL type that allows validation at configuration load time.", + "title": "A URL represents a parsed URL (technically, a URI reference).", "type": "object" }, "UpdateRuleGroupResponse": { @@ -4931,7 +4932,6 @@ "type": "object" }, "gettableAlerts": { - "description": "GettableAlerts gettable alerts", "items": { "$ref": "#/definitions/gettableAlert", "type": "object" @@ -5056,7 +5056,6 @@ "type": "object" }, "gettableSilences": { - "description": "GettableSilences gettable silences", "items": { "$ref": "#/definitions/gettableSilence", "type": "object" diff --git a/pkg/services/ngalert/api/tooling/definitions/convert_prometheus_api.go b/pkg/services/ngalert/api/tooling/definitions/convert_prometheus_api.go index b2bae42eacb93..5b6454d1d8477 100644 --- a/pkg/services/ngalert/api/tooling/definitions/convert_prometheus_api.go +++ b/pkg/services/ngalert/api/tooling/definitions/convert_prometheus_api.go @@ -84,11 +84,11 @@ type RouteConvertPrometheusPostRuleGroupParams struct { // in: path NamespaceTitle string // in: header - DatasourceUID string `json:"x-datasource-uid"` + DatasourceUID string `json:"x-grafana-alerting-datasource-uid"` // in: header - RecordingRulesPaused bool `json:"x-recording-rules-paused"` + RecordingRulesPaused bool `json:"x-grafana-alerting-recording-rules-paused"` // in: header - AlertRulesPaused bool `json:"x-alert-rules-paused"` + AlertRulesPaused bool `json:"x-grafana-alerting-alert-rules-paused"` // in:body Body PrometheusRuleGroup } diff --git a/pkg/services/ngalert/api/tooling/post.json b/pkg/services/ngalert/api/tooling/post.json index 26e062391cfed..825af6c52afae 100644 --- a/pkg/services/ngalert/api/tooling/post.json +++ b/pkg/services/ngalert/api/tooling/post.json @@ -4932,6 +4932,7 @@ "type": "object" }, "gettableAlerts": { + "description": "GettableAlerts gettable alerts", "items": { "$ref": "#/definitions/gettableAlert", "type": "object" @@ -6515,17 +6516,17 @@ }, { "in": "header", - "name": "x-datasource-uid", + "name": "x-grafana-alerting-datasource-uid", "type": "string" }, { "in": "header", - "name": "x-recording-rules-paused", + "name": "x-grafana-alerting-recording-rules-paused", "type": "boolean" }, { "in": "header", - "name": "x-alert-rules-paused", + "name": "x-grafana-alerting-alert-rules-paused", "type": "boolean" }, { diff --git a/pkg/services/ngalert/api/tooling/spec.json b/pkg/services/ngalert/api/tooling/spec.json index f42d1f7c408d5..e8f3f798fdae2 100644 --- a/pkg/services/ngalert/api/tooling/spec.json +++ b/pkg/services/ngalert/api/tooling/spec.json @@ -1194,17 +1194,17 @@ }, { "type": "string", - "name": "x-datasource-uid", + "name": "x-grafana-alerting-datasource-uid", "in": "header" }, { "type": "boolean", - "name": "x-recording-rules-paused", + "name": "x-grafana-alerting-recording-rules-paused", "in": "header" }, { "type": "boolean", - "name": "x-alert-rules-paused", + "name": "x-grafana-alerting-alert-rules-paused", "in": "header" }, { @@ -8872,6 +8872,7 @@ } }, "gettableAlerts": { + "description": "GettableAlerts gettable alerts", "type": "array", "items": { "type": "object", diff --git a/pkg/services/ngalert/models/provisioning.go b/pkg/services/ngalert/models/provisioning.go index 7933830588ba5..d792e821939ff 100644 --- a/pkg/services/ngalert/models/provisioning.go +++ b/pkg/services/ngalert/models/provisioning.go @@ -8,10 +8,12 @@ const ( ProvenanceNone Provenance = "" ProvenanceAPI Provenance = "api" ProvenanceFile Provenance = "file" + // ProvenanceConvertedPrometheus is used for objects converted from Prometheus definitions. + ProvenanceConvertedPrometheus Provenance = "converted_prometheus" ) var ( - KnownProvenances = []Provenance{ProvenanceNone, ProvenanceAPI, ProvenanceFile} + KnownProvenances = []Provenance{ProvenanceNone, ProvenanceAPI, ProvenanceFile, ProvenanceConvertedPrometheus} ) // Provisionable represents a resource that can be created through a provisioning mechanism, such as Terraform or config file. diff --git a/pkg/services/ngalert/prom/convert.go b/pkg/services/ngalert/prom/convert.go index bb9d4c3d61664..e2f46b7434163 100644 --- a/pkg/services/ngalert/prom/convert.go +++ b/pkg/services/ngalert/prom/convert.go @@ -27,8 +27,11 @@ const ( // Config defines the configuration options for the Prometheus to Grafana rules converter. type Config struct { - DatasourceUID string - DatasourceType string + DatasourceUID string + DatasourceType string + // DefaultInterval is the default interval for rules in the groups that + // don't have Interval set. + DefaultInterval time.Duration FromTimeRange *time.Duration EvaluationOffset *time.Duration ExecErrState models.ExecutionErrorState @@ -68,6 +71,9 @@ func NewConverter(cfg Config) (*Converter, error) { if cfg.DatasourceType == "" { return nil, fmt.Errorf("datasource type is required") } + if cfg.DefaultInterval == 0 { + return nil, fmt.Errorf("default evaluation interval is required") + } if cfg.FromTimeRange == nil { cfg.FromTimeRange = defaultConfig.FromTimeRange } @@ -93,9 +99,8 @@ func NewConverter(cfg Config) (*Converter, error) { // PrometheusRulesToGrafana converts a Prometheus rule group into Grafana Alerting rule group. func (p *Converter) PrometheusRulesToGrafana(orgID int64, namespaceUID string, group PrometheusRuleGroup) (*models.AlertRuleGroup, error) { for _, rule := range group.Rules { - err := validatePrometheusRule(rule) - if err != nil { - return nil, fmt.Errorf("invalid Prometheus rule '%s': %w", rule.Alert, err) + if err := rule.Validate(); err != nil { + return nil, err } } @@ -107,18 +112,15 @@ func (p *Converter) PrometheusRulesToGrafana(orgID int64, namespaceUID string, g return grafanaGroup, nil } -func validatePrometheusRule(rule PrometheusRule) error { - if rule.KeepFiringFor != nil { - return fmt.Errorf("keep_firing_for is not supported") - } - - return nil -} - func (p *Converter) convertRuleGroup(orgID int64, namespaceUID string, promGroup PrometheusRuleGroup) (*models.AlertRuleGroup, error) { uniqueNames := map[string]int{} rules := make([]models.AlertRule, 0, len(promGroup.Rules)) + interval := time.Duration(promGroup.Interval) + if interval == 0 { + interval = p.cfg.DefaultInterval + } + for i, rule := range promGroup.Rules { gr, err := p.convertRule(orgID, namespaceUID, promGroup.Name, rule) if err != nil { diff --git a/pkg/services/ngalert/prom/convert_test.go b/pkg/services/ngalert/prom/convert_test.go index 3bad4795fcbd5..332cb356804f1 100644 --- a/pkg/services/ngalert/prom/convert_test.go +++ b/pkg/services/ngalert/prom/convert_test.go @@ -19,7 +19,7 @@ import ( ) func TestPrometheusRulesToGrafana(t *testing.T) { - fiveMin := prommodel.Duration(5 * time.Minute) + defaultInterval := 2 * time.Minute testCases := []struct { name string @@ -40,7 +40,7 @@ func TestPrometheusRulesToGrafana(t *testing.T) { { Alert: "alert-1", Expr: "cpu_usage > 80", - For: &fiveMin, + For: util.Pointer(prommodel.Duration(5 * time.Minute)), Labels: map[string]string{ "severity": "critical", }, @@ -63,14 +63,14 @@ func TestPrometheusRulesToGrafana(t *testing.T) { { Alert: "alert-1", Expr: "up == 0", - KeepFiringFor: &fiveMin, + KeepFiringFor: util.Pointer(prommodel.Duration(5 * time.Minute)), }, }, }, expectError: true, }, { - name: "rule with empty interval", + name: "rule group with empty interval", orgID: 1, namespace: "namespaceUID", promGroup: PrometheusRuleGroup{ @@ -89,7 +89,8 @@ func TestPrometheusRulesToGrafana(t *testing.T) { orgID: 1, namespace: "namespaceUID", promGroup: PrometheusRuleGroup{ - Name: "test-group-1", + Name: "test-group-1", + Interval: prommodel.Duration(10 * time.Second), Rules: []PrometheusRule{ { Record: "some_metric", @@ -105,6 +106,7 @@ func TestPrometheusRulesToGrafana(t *testing.T) { t.Run(tc.name, func(t *testing.T) { tc.config.DatasourceUID = "datasource-uid" tc.config.DatasourceType = datasources.DS_PROMETHEUS + tc.config.DefaultInterval = defaultInterval converter, err := NewConverter(tc.config) require.NoError(t, err) @@ -117,7 +119,11 @@ func TestPrometheusRulesToGrafana(t *testing.T) { require.NoError(t, err, tc.name) require.Equal(t, tc.promGroup.Name, grafanaGroup.Title, tc.name) + expectedInterval := int64(time.Duration(tc.promGroup.Interval).Seconds()) + if expectedInterval == 0 { + expectedInterval = int64(defaultInterval.Seconds()) + } require.Equal(t, expectedInterval, grafanaGroup.Interval, tc.name) require.Equal(t, len(tc.promGroup.Rules), len(grafanaGroup.Rules), tc.name) @@ -164,8 +170,9 @@ func TestPrometheusRulesToGrafana(t *testing.T) { func TestPrometheusRulesToGrafanaWithDuplicateRuleNames(t *testing.T) { cfg := Config{ - DatasourceUID: "datasource-uid", - DatasourceType: datasources.DS_PROMETHEUS, + DatasourceUID: "datasource-uid", + DatasourceType: datasources.DS_PROMETHEUS, + DefaultInterval: 2 * time.Minute, } converter, err := NewConverter(cfg) require.NoError(t, err) @@ -257,8 +264,9 @@ func TestCreateThresholdNode(t *testing.T) { func TestPrometheusRulesToGrafana_NodesInRules(t *testing.T) { cfg := Config{ - DatasourceUID: "datasource-uid", - DatasourceType: datasources.DS_PROMETHEUS, + DatasourceUID: "datasource-uid", + DatasourceType: datasources.DS_PROMETHEUS, + DefaultInterval: 2 * time.Minute, } converter, err := NewConverter(cfg) require.NoError(t, err) @@ -344,8 +352,9 @@ func TestPrometheusRulesToGrafana_UID(t *testing.T) { } converter, err := NewConverter(Config{ - DatasourceUID: "datasource-uid", - DatasourceType: datasources.DS_PROMETHEUS, + DatasourceUID: "datasource-uid", + DatasourceType: datasources.DS_PROMETHEUS, + DefaultInterval: 2 * time.Minute, }) require.NoError(t, err) @@ -372,8 +381,9 @@ func TestPrometheusRulesToGrafana_UID(t *testing.T) { namespace := "some-namespace" converter, err := NewConverter(Config{ - DatasourceUID: "datasource-uid", - DatasourceType: datasources.DS_PROMETHEUS, + DatasourceUID: "datasource-uid", + DatasourceType: datasources.DS_PROMETHEUS, + DefaultInterval: 2 * time.Minute, }) require.NoError(t, err) @@ -390,8 +400,9 @@ func TestPrometheusRulesToGrafana_UID(t *testing.T) { namespace := "some-namespace" converter, err := NewConverter(Config{ - DatasourceUID: "datasource-uid", - DatasourceType: datasources.DS_PROMETHEUS, + DatasourceUID: "datasource-uid", + DatasourceType: datasources.DS_PROMETHEUS, + DefaultInterval: 2 * time.Minute, }) require.NoError(t, err) @@ -408,8 +419,9 @@ func TestPrometheusRulesToGrafana_UID(t *testing.T) { namespace := "some-namespace" converter, err := NewConverter(Config{ - DatasourceUID: "datasource-uid", - DatasourceType: datasources.DS_PROMETHEUS, + DatasourceUID: "datasource-uid", + DatasourceType: datasources.DS_PROMETHEUS, + DefaultInterval: 2 * time.Minute, }) require.NoError(t, err) diff --git a/pkg/services/ngalert/prom/models.go b/pkg/services/ngalert/prom/models.go index f7e8bbfc95b1d..cbf57b2015c64 100644 --- a/pkg/services/ngalert/prom/models.go +++ b/pkg/services/ngalert/prom/models.go @@ -2,6 +2,12 @@ package prom import ( prommodel "github.com/prometheus/common/model" + + "github.com/grafana/grafana/pkg/apimachinery/errutil" +) + +var ( + ErrPrometheusRuleValidationFailed = errutil.ValidationFailed("alerting.prometheusRuleInvalid") ) type PrometheusRulesFile struct { @@ -23,3 +29,11 @@ type PrometheusRule struct { Annotations map[string]string `yaml:"annotations,omitempty"` Record string `yaml:"record,omitempty"` } + +func (r *PrometheusRule) Validate() error { + if r.KeepFiringFor != nil { + return ErrPrometheusRuleValidationFailed.Errorf("keep_firing_for is not supported") + } + + return nil +} diff --git a/pkg/services/ngalert/store/alert_rule.go b/pkg/services/ngalert/store/alert_rule.go index d688d6bb53d1c..0d11d9eaaaa8c 100644 --- a/pkg/services/ngalert/store/alert_rule.go +++ b/pkg/services/ngalert/store/alert_rule.go @@ -648,36 +648,6 @@ func (st DBstore) GetRuleGroupInterval(ctx context.Context, orgID int64, namespa }) } -// GetUserVisibleNamespaces returns the folders that are visible to the user -func (st DBstore) GetUserVisibleNamespaces(ctx context.Context, orgID int64, user identity.Requester) (map[string]*folder.Folder, error) { - folders, err := st.FolderService.GetFolders(ctx, folder.GetFoldersQuery{ - OrgID: orgID, - WithFullpath: true, - SignedInUser: user, - }) - if err != nil { - return nil, err - } - - namespaceMap := make(map[string]*folder.Folder) - for _, f := range folders { - namespaceMap[f.UID] = f - } - return namespaceMap, nil -} - -// GetNamespaceByUID is a handler for retrieving a namespace by its UID. Alerting rules follow a Grafana folder-like structure which we call namespaces. -func (st DBstore) GetNamespaceByUID(ctx context.Context, uid string, orgID int64, user identity.Requester) (*folder.Folder, error) { - f, err := st.FolderService.GetFolders(ctx, folder.GetFoldersQuery{OrgID: orgID, UIDs: []string{uid}, WithFullpath: true, SignedInUser: user}) - if err != nil { - return nil, err - } - if len(f) == 0 { - return nil, dashboards.ErrFolderAccessDenied - } - return f[0], nil -} - func (st DBstore) GetAlertRulesKeysForScheduling(ctx context.Context) ([]ngmodels.AlertRuleKeyWithVersion, error) { var result []ngmodels.AlertRuleKeyWithVersion err := st.SQLStore.WithDbSession(ctx, func(sess *db.Session) error { diff --git a/pkg/services/ngalert/store/alert_rule_test.go b/pkg/services/ngalert/store/alert_rule_test.go index 79b8f70468190..6f6f9453e1848 100644 --- a/pkg/services/ngalert/store/alert_rule_test.go +++ b/pkg/services/ngalert/store/alert_rule_test.go @@ -782,58 +782,6 @@ func TestIntegration_DeleteAlertRulesByUID(t *testing.T) { }) } -func TestIntegration_GetNamespaceByUID(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test") - } - - sqlStore := db.InitTestDB(t) - cfg := setting.NewCfg() - folderService := setupFolderService(t, sqlStore, cfg, featuremgmt.WithFeatures()) - b := &fakeBus{} - logger := log.New("test-dbstore") - store := createTestStore(sqlStore, folderService, logger, cfg.UnifiedAlerting, b) - - u := &user.SignedInUser{ - UserID: 1, - OrgID: 1, - OrgRole: org.RoleAdmin, - IsGrafanaAdmin: true, - } - - uid := uuid.NewString() - parentUid := uuid.NewString() - title := "folder/title" - parentTitle := "parent-title" - createFolder(t, store, parentUid, parentTitle, 1, "") - createFolder(t, store, uid, title, 1, parentUid) - - actual, err := store.GetNamespaceByUID(context.Background(), uid, 1, u) - require.NoError(t, err) - require.Equal(t, title, actual.Title) - require.Equal(t, uid, actual.UID) - require.Equal(t, title, actual.Fullpath) - - t.Run("error when user does not have permissions", func(t *testing.T) { - someUser := &user.SignedInUser{ - UserID: 2, - OrgID: 1, - OrgRole: org.RoleViewer, - } - _, err = store.GetNamespaceByUID(context.Background(), uid, 1, someUser) - require.ErrorIs(t, err, dashboards.ErrFolderAccessDenied) - }) - - t.Run("when nested folders are enabled full path should be populated with correct value", func(t *testing.T) { - store.FolderService = setupFolderService(t, sqlStore, cfg, featuremgmt.WithFeatures(featuremgmt.FlagNestedFolders)) - actual, err := store.GetNamespaceByUID(context.Background(), uid, 1, u) - require.NoError(t, err) - require.Equal(t, title, actual.Title) - require.Equal(t, uid, actual.UID) - require.Equal(t, "parent-title/folder\\/title", actual.Fullpath) - }) -} - func TestIntegrationInsertAlertRules(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") diff --git a/pkg/services/ngalert/store/namespace.go b/pkg/services/ngalert/store/namespace.go new file mode 100644 index 0000000000000..9279ff840f929 --- /dev/null +++ b/pkg/services/ngalert/store/namespace.go @@ -0,0 +1,97 @@ +package store + +import ( + "context" + "errors" + "sort" + + "github.com/grafana/grafana/pkg/apimachinery/identity" + "github.com/grafana/grafana/pkg/services/dashboards" + "github.com/grafana/grafana/pkg/services/folder" +) + +// GetUserVisibleNamespaces returns the folders that are visible to the user +func (st DBstore) GetUserVisibleNamespaces(ctx context.Context, orgID int64, user identity.Requester) (map[string]*folder.Folder, error) { + folders, err := st.FolderService.GetFolders(ctx, folder.GetFoldersQuery{ + OrgID: orgID, + WithFullpath: true, + SignedInUser: user, + }) + if err != nil { + return nil, err + } + + namespaceMap := make(map[string]*folder.Folder) + for _, f := range folders { + namespaceMap[f.UID] = f + } + return namespaceMap, nil +} + +// GetNamespaceByUID is a handler for retrieving a namespace by its UID. Alerting rules follow a Grafana folder-like structure which we call namespaces. +func (st DBstore) GetNamespaceByUID(ctx context.Context, uid string, orgID int64, user identity.Requester) (*folder.Folder, error) { + f, err := st.FolderService.GetFolders(ctx, folder.GetFoldersQuery{OrgID: orgID, UIDs: []string{uid}, WithFullpath: true, SignedInUser: user}) + if err != nil { + return nil, err + } + if len(f) == 0 { + return nil, dashboards.ErrFolderAccessDenied + } + return f[0], nil +} + +// GetNamespaceInRootByTitle gets namespace by its title in the root folder. +func (st DBstore) GetNamespaceInRootByTitle(ctx context.Context, title string, orgID int64, user identity.Requester) (*folder.Folder, error) { + q := &folder.GetChildrenQuery{ + UID: folder.RootFolderUID, + OrgID: orgID, + SignedInUser: user, + } + folders, err := st.FolderService.GetChildren(ctx, q) + if err != nil { + return nil, err + } + + foundByTitle := []*folder.Folder{} + for _, f := range folders { + if f.Title == title && f.ParentUID == folder.RootFolderUID { + foundByTitle = append(foundByTitle, f) + } + } + + if len(foundByTitle) == 0 { + return nil, dashboards.ErrFolderAccessDenied + } + + // Sort by UID to return the first folder in case of multiple folders with the same title + sort.Slice(foundByTitle, func(i, j int) bool { + return foundByTitle[i].UID < foundByTitle[j].UID + }) + + return foundByTitle[0], nil +} + +// GetOrCreateNamespaceInRootByTitle gets or creates a namespace by title in the _root_ folder. +func (st DBstore) GetOrCreateNamespaceInRootByTitle(ctx context.Context, title string, orgID int64, user identity.Requester) (*folder.Folder, error) { + var f *folder.Folder + var err error + + f, err = st.GetNamespaceInRootByTitle(ctx, title, orgID, user) + if err != nil && !errors.Is(err, dashboards.ErrFolderAccessDenied) { + return nil, err + } + + if f == nil { + cmd := &folder.CreateFolderCommand{ + OrgID: orgID, + Title: title, + SignedInUser: user, + } + f, err = st.FolderService.Create(ctx, cmd) + if err != nil { + return nil, err + } + } + + return f, nil +} diff --git a/pkg/services/ngalert/store/namespace_test.go b/pkg/services/ngalert/store/namespace_test.go new file mode 100644 index 0000000000000..8ba163a1080a6 --- /dev/null +++ b/pkg/services/ngalert/store/namespace_test.go @@ -0,0 +1,220 @@ +package store + +import ( + "context" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/services/dashboards" + "github.com/grafana/grafana/pkg/services/featuremgmt" + "github.com/grafana/grafana/pkg/services/folder" + "github.com/grafana/grafana/pkg/services/org" + "github.com/grafana/grafana/pkg/services/user" + + "github.com/grafana/grafana/pkg/infra/db" + "github.com/grafana/grafana/pkg/setting" +) + +func TestIntegration_GetUserVisibleNamespaces(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + + sqlStore := db.InitTestDB(t) + cfg := setting.NewCfg() + folderService := setupFolderService(t, sqlStore, cfg, featuremgmt.WithFeatures()) + b := &fakeBus{} + logger := log.New("test-dbstore") + store := createTestStore(sqlStore, folderService, logger, cfg.UnifiedAlerting, b) + + admin := &user.SignedInUser{ + UserID: 1, + OrgID: 1, + OrgRole: org.RoleAdmin, + IsGrafanaAdmin: true, + } + + folders := []struct { + uid string + title string + parentUid string + }{ + {uid: uuid.NewString(), title: "folder1", parentUid: ""}, + {uid: uuid.NewString(), title: "folder2", parentUid: ""}, + {uid: uuid.NewString(), title: "nested/folder", parentUid: ""}, + } + + for _, f := range folders { + createFolder(t, store, f.uid, f.title, 1, f.parentUid) + } + + t.Run("returns all folders", func(t *testing.T) { + namespaces, err := store.GetUserVisibleNamespaces(context.Background(), 1, admin) + require.NoError(t, err) + require.Len(t, namespaces, len(folders)) + }) + + t.Run("returns empty list for a non existing org", func(t *testing.T) { + emptyOrgID := int64(999) + namespaces, err := store.GetUserVisibleNamespaces(context.Background(), emptyOrgID, admin) + require.NoError(t, err) + require.Empty(t, namespaces) + }) +} + +func TestIntegration_GetNamespaceByUID(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + + sqlStore := db.InitTestDB(t) + cfg := setting.NewCfg() + folderService := setupFolderService(t, sqlStore, cfg, featuremgmt.WithFeatures()) + b := &fakeBus{} + logger := log.New("test-dbstore") + store := createTestStore(sqlStore, folderService, logger, cfg.UnifiedAlerting, b) + + u := &user.SignedInUser{ + UserID: 1, + OrgID: 1, + OrgRole: org.RoleAdmin, + IsGrafanaAdmin: true, + } + + uid := uuid.NewString() + parentUid := uuid.NewString() + title := "folder/title" + parentTitle := "parent-title" + createFolder(t, store, parentUid, parentTitle, 1, "") + createFolder(t, store, uid, title, 1, parentUid) + + actual, err := store.GetNamespaceByUID(context.Background(), uid, 1, u) + require.NoError(t, err) + require.Equal(t, title, actual.Title) + require.Equal(t, uid, actual.UID) + require.Equal(t, title, actual.Fullpath) + + t.Run("error when user does not have permissions", func(t *testing.T) { + someUser := &user.SignedInUser{ + UserID: 2, + OrgID: 1, + OrgRole: org.RoleViewer, + } + _, err = store.GetNamespaceByUID(context.Background(), uid, 1, someUser) + require.ErrorIs(t, err, dashboards.ErrFolderAccessDenied) + }) + + t.Run("error when folder does not exist", func(t *testing.T) { + nonExistentUID := uuid.NewString() + _, err := store.GetNamespaceByUID(context.Background(), nonExistentUID, 1, u) + require.ErrorIs(t, err, dashboards.ErrFolderAccessDenied) + }) + + t.Run("when nested folders are enabled full path should be populated with correct value", func(t *testing.T) { + store.FolderService = setupFolderService(t, sqlStore, cfg, featuremgmt.WithFeatures(featuremgmt.FlagNestedFolders)) + actual, err := store.GetNamespaceByUID(context.Background(), uid, 1, u) + require.NoError(t, err) + require.Equal(t, title, actual.Title) + require.Equal(t, uid, actual.UID) + require.Equal(t, "parent-title/folder\\/title", actual.Fullpath) + }) +} + +func TestIntegration_GetNamespaceInRootByTitle(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + + sqlStore := db.InitTestDB(t) + cfg := setting.NewCfg() + folderService := setupFolderService(t, sqlStore, cfg, featuremgmt.WithFeatures()) + b := &fakeBus{} + logger := log.New("test-dbstore") + store := createTestStore(sqlStore, folderService, logger, cfg.UnifiedAlerting, b) + store.FolderService = setupFolderService(t, sqlStore, cfg, featuremgmt.WithFeatures(featuremgmt.FlagNestedFolders)) + + u := &user.SignedInUser{ + UserID: 1, + OrgID: 1, + OrgRole: org.RoleAdmin, + IsGrafanaAdmin: true, + } + + uid := uuid.NewString() + title := "folder-title" + createFolder(t, store, uid, title, 1, "") + + actual, err := store.GetNamespaceInRootByTitle(context.Background(), title, 1, u) + require.NoError(t, err) + require.Equal(t, title, actual.Title) + require.Equal(t, uid, actual.UID) +} + +func TestIntegration_GetOrCreateNamespaceInRootByTitle(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + + u := &user.SignedInUser{ + UserID: 1, + OrgID: 1, + OrgRole: org.RoleAdmin, + IsGrafanaAdmin: true, + } + + setupStore := func(t *testing.T) *DBstore { + sqlStore := db.InitTestDB(t) + cfg := setting.NewCfg() + folderService := setupFolderService(t, sqlStore, cfg, featuremgmt.WithFeatures()) + b := &fakeBus{} + logger := log.New("test-dbstore") + store := createTestStore(sqlStore, folderService, logger, cfg.UnifiedAlerting, b) + store.FolderService = setupFolderService(t, sqlStore, cfg, featuremgmt.WithFeatures(featuremgmt.FlagNestedFolders)) + + return store + } + + t.Run("should create folder when it does not exist", func(t *testing.T) { + store := setupStore(t) + + f, err := store.GetOrCreateNamespaceInRootByTitle(context.Background(), "new folder", 1, u) + require.NoError(t, err) + require.Equal(t, "new folder", f.Title) + require.NotEmpty(t, f.UID) + + folders, err := store.FolderService.GetFolders( + context.Background(), + folder.GetFoldersQuery{ + OrgID: 1, + WithFullpath: true, + SignedInUser: u, + }, + ) + require.NoError(t, err) + require.Len(t, folders, 1) + }) + + t.Run("should return existing folder when it exists", func(t *testing.T) { + store := setupStore(t) + + title := "existing folder" + createFolder(t, store, "", title, 1, "") + f, err := store.GetOrCreateNamespaceInRootByTitle(context.Background(), title, 1, u) + require.NoError(t, err) + require.Equal(t, title, f.Title) + + folders, err := store.FolderService.GetFolders( + context.Background(), + folder.GetFoldersQuery{ + OrgID: 1, + WithFullpath: true, + SignedInUser: u, + }, + ) + require.NoError(t, err) + require.Len(t, folders, 1) + }) +} diff --git a/pkg/services/ngalert/tests/fakes/rules.go b/pkg/services/ngalert/tests/fakes/rules.go index 5cdabe62857f9..15ebf90f40cf3 100644 --- a/pkg/services/ngalert/tests/fakes/rules.go +++ b/pkg/services/ngalert/tests/fakes/rules.go @@ -258,6 +258,40 @@ func (f *RuleStore) GetNamespaceByUID(_ context.Context, uid string, orgID int64 return nil, fmt.Errorf("not found") } +func (f *RuleStore) GetOrCreateNamespaceInRootByTitle(ctx context.Context, title string, orgID int64, user identity.Requester) (*folder.Folder, error) { + f.mtx.Lock() + defer f.mtx.Unlock() + + for _, folder := range f.Folders[orgID] { + if folder.Title == title { + return folder, nil + } + } + + newFolder := &folder.Folder{ + ID: rand.Int63(), // nolint:staticcheck + UID: util.GenerateShortUID(), + Title: title, + Fullpath: "fullpath_" + title, + } + + f.Folders[orgID] = append(f.Folders[orgID], newFolder) + return newFolder, nil +} + +func (f *RuleStore) GetNamespaceInRootByTitle(ctx context.Context, title string, orgID int64, user identity.Requester) (*folder.Folder, error) { + f.mtx.Lock() + defer f.mtx.Unlock() + + for _, folder := range f.Folders[orgID] { + if folder.Title == title && folder.ParentUID == "" { + return folder, nil + } + } + + return nil, fmt.Errorf("namespace with title '%s' not found", title) +} + func (f *RuleStore) UpdateAlertRules(_ context.Context, _ *models.UserUID, q []models.UpdateRule) error { f.mtx.Lock() defer f.mtx.Unlock() diff --git a/pkg/tests/api/alerting/api_convert_prometheus_test.go b/pkg/tests/api/alerting/api_convert_prometheus_test.go new file mode 100644 index 0000000000000..326063a15ce86 --- /dev/null +++ b/pkg/tests/api/alerting/api_convert_prometheus_test.go @@ -0,0 +1,196 @@ +package alerting + +import ( + "testing" + "time" + + prommodel "github.com/prometheus/common/model" + "github.com/stretchr/testify/require" + + "github.com/grafana/grafana/pkg/services/datasources" + apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" + "github.com/grafana/grafana/pkg/services/org" + "github.com/grafana/grafana/pkg/services/user" + "github.com/grafana/grafana/pkg/tests/testinfra" + "github.com/grafana/grafana/pkg/util" +) + +func TestIntegrationConvertPrometheusEndpoints(t *testing.T) { + testinfra.SQLiteIntegrationTest(t) + + // Setup Grafana and its Database + dir, path := testinfra.CreateGrafDir(t, testinfra.GrafanaOpts{ + DisableLegacyAlerting: true, + EnableUnifiedAlerting: true, + DisableAnonymous: true, + AppModeProduction: true, + EnableFeatureToggles: []string{"alertingConversionAPI"}, + }) + + grafanaListedAddr, env := testinfra.StartGrafanaEnv(t, dir, path) + + // Create a user to make authenticated requests + createUser(t, env.SQLStore, env.Cfg, user.CreateUserCommand{ + DefaultOrgRole: string(org.RoleAdmin), + Password: "password", + Login: "admin", + }) + + apiClient := newAlertingApiClient(grafanaListedAddr, "admin", "password") + namespace := "test-namespace" + + promGroup1 := apimodels.PrometheusRuleGroup{ + Name: "test-group-1", + Interval: prommodel.Duration(60 * time.Second), + Rules: []apimodels.PrometheusRule{ + // Recording rule + { + Record: "test:requests:rate5m", + Expr: "sum(rate(test_requests_total[5m])) by (job)", + Labels: map[string]string{ + "env": "prod", + "team": "infra", + }, + }, + // Two alerting rules + { + Alert: "HighMemoryUsage", + Expr: "process_memory_usage > 80", + For: util.Pointer(prommodel.Duration(5 * time.Minute)), + Labels: map[string]string{ + "severity": "warning", + "team": "alerting", + }, + Annotations: map[string]string{ + "annotation-1": "value-1", + "annotation-2": "value-2", + }, + }, + { + Alert: "ServiceDown", + Expr: "up == 0", + For: util.Pointer(prommodel.Duration(2 * time.Minute)), + Labels: map[string]string{ + "severity": "critical", + }, + Annotations: map[string]string{ + "annotation-1": "value-1", + }, + }, + }, + } + + promGroup2 := apimodels.PrometheusRuleGroup{ + Name: "test-group-2", + Interval: prommodel.Duration(60 * time.Second), + Rules: []apimodels.PrometheusRule{ + { + Alert: "HighDiskUsage", + Expr: "disk_usage > 80", + For: util.Pointer(prommodel.Duration(1 * time.Minute)), + Labels: map[string]string{ + "severity": "low", + "team": "alerting", + }, + Annotations: map[string]string{ + "annotation-5": "value-5", + }, + }, + }, + } + + ds := apiClient.CreateDatasource(t, datasources.DS_PROMETHEUS) + + t.Run("create two rule groups and get them back", func(t *testing.T) { + apiClient.ConvertPrometheusPostRuleGroup(t, namespace, ds.Body.Datasource.UID, promGroup1, nil) + apiClient.ConvertPrometheusPostRuleGroup(t, namespace, ds.Body.Datasource.UID, promGroup2, nil) + + ns, _, _ := apiClient.GetAllRulesWithStatus(t) + + require.Len(t, ns[namespace], 2) + + rulesByGroupName := map[string][]apimodels.GettableExtendedRuleNode{} + for _, group := range ns[namespace] { + rulesByGroupName[group.Name] = append(rulesByGroupName[group.Name], group.Rules...) + } + + require.Len(t, rulesByGroupName[promGroup1.Name], 3) + require.Len(t, rulesByGroupName[promGroup2.Name], 1) + }) + + t.Run("when pausing header is set, rules should be paused", func(t *testing.T) { + tests := []struct { + name string + recordingPaused bool + alertPaused bool + }{ + { + name: "do not pause rules", + recordingPaused: false, + alertPaused: false, + }, + { + name: "pause recording rules", + recordingPaused: true, + alertPaused: false, + }, + { + name: "pause alert rules", + recordingPaused: false, + alertPaused: true, + }, + { + name: "pause both recording and alert rules", + recordingPaused: true, + alertPaused: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + headers := map[string]string{} + if tc.recordingPaused { + headers["X-Grafana-Alerting-Recording-Rules-Paused"] = "true" + } + if tc.alertPaused { + headers["X-Grafana-Alerting-Alert-Rules-Paused"] = "true" + } + apiClient.ConvertPrometheusPostRuleGroup(t, namespace, ds.Body.Datasource.UID, promGroup1, headers) + + ns, _, _ := apiClient.GetAllRulesWithStatus(t) + + rulesByGroupName := map[string][]apimodels.GettableExtendedRuleNode{} + for _, group := range ns[namespace] { + rulesByGroupName[group.Name] = append(rulesByGroupName[group.Name], group.Rules...) + } + + require.Len(t, rulesByGroupName[promGroup1.Name], 3) + + pausedRecordingRules := 0 + pausedAlertRules := 0 + + for _, rule := range rulesByGroupName[promGroup1.Name] { + if rule.GrafanaManagedAlert.IsPaused { + if rule.GrafanaManagedAlert.Record != nil { + pausedRecordingRules++ + } else { + pausedAlertRules++ + } + } + } + + if tc.recordingPaused { + require.Equal(t, 1, pausedRecordingRules) + } else { + require.Equal(t, 0, pausedRecordingRules) + } + + if tc.alertPaused { + require.Equal(t, 2, pausedAlertRules) + } else { + require.Equal(t, 0, pausedAlertRules) + } + }) + } + }) +} diff --git a/pkg/tests/api/alerting/testing.go b/pkg/tests/api/alerting/testing.go index 5272883af9e18..de927dc890ccc 100644 --- a/pkg/tests/api/alerting/testing.go +++ b/pkg/tests/api/alerting/testing.go @@ -18,6 +18,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" "github.com/grafana/grafana/pkg/api" "github.com/grafana/grafana/pkg/expr" @@ -752,7 +753,13 @@ func (a apiClient) SubmitRuleForTesting(t *testing.T, config apimodels.PostableE func (a apiClient) CreateTestDatasource(t *testing.T) (result api.CreateOrUpdateDatasourceResponse) { t.Helper() - payload := fmt.Sprintf(`{"name":"TestData-%s","type":"testdata","access":"proxy","isDefault":false}`, uuid.NewString()) + return a.CreateDatasource(t, "testdata") +} + +func (a apiClient) CreateDatasource(t *testing.T, dsType string) (result api.CreateOrUpdateDatasourceResponse) { + t.Helper() + + payload := fmt.Sprintf(`{"name":"TestDatasource-%s","type":"%s","access":"proxy","isDefault":false}`, uuid.NewString(), dsType) buf := bytes.Buffer{} buf.Write([]byte(payload)) @@ -1094,6 +1101,25 @@ func (a apiClient) GetRuleByUID(t *testing.T, ruleUID string) apimodels.Gettable return rule } +func (a apiClient) ConvertPrometheusPostRuleGroup(t *testing.T, namespaceTitle, datasourceUID string, promGroup apimodels.PrometheusRuleGroup, headers map[string]string) { + t.Helper() + + data, err := yaml.Marshal(promGroup) + require.NoError(t, err) + buf := bytes.NewReader(data) + + req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/api/convert/prometheus/config/v1/rules/%s", a.url, namespaceTitle), buf) + require.NoError(t, err) + req.Header.Add("X-Grafana-Alerting-Datasource-UID", datasourceUID) + + for key, value := range headers { + req.Header.Add(key, value) + } + + _, status, raw := sendRequest[apimodels.ConvertPrometheusResponse](t, req, http.StatusAccepted) + requireStatusCode(t, http.StatusAccepted, status, raw) +} + func sendRequest[T any](t *testing.T, req *http.Request, successStatusCode int) (T, int, string) { t.Helper() client := &http.Client{} diff --git a/public/api-merged.json b/public/api-merged.json index 09bd1962ae9ff..dc455b6a36f8b 100644 --- a/public/api-merged.json +++ b/public/api-merged.json @@ -22771,7 +22771,6 @@ } }, "gettableAlerts": { - "description": "GettableAlerts gettable alerts", "type": "array", "items": { "type": "object", @@ -22896,7 +22895,6 @@ } }, "gettableSilences": { - "description": "GettableSilences gettable silences", "type": "array", "items": { "type": "object", diff --git a/public/openapi3.json b/public/openapi3.json index e5be6cd60ce83..fabc90f92cf41 100644 --- a/public/openapi3.json +++ b/public/openapi3.json @@ -12838,7 +12838,6 @@ "type": "object" }, "gettableAlerts": { - "description": "GettableAlerts gettable alerts", "items": { "$ref": "#/components/schemas/gettableAlert" }, @@ -12962,7 +12961,6 @@ "type": "object" }, "gettableSilences": { - "description": "GettableSilences gettable silences", "items": { "$ref": "#/components/schemas/gettableSilence" }, From 03e94e7a3ea2a2e5cc5078b26b751781bf601f48 Mon Sep 17 00:00:00 2001 From: Alexander Akhmetov Date: Tue, 25 Feb 2025 11:32:28 +0100 Subject: [PATCH 10/33] Alerting: Update grafana/alerting (#101215) * Update grafana/alerting from 9d7e00921e44 to 2acbeef29642 * Change the package for the TLSClient * Fix TestContactPointFromContactPointExports test --- go.mod | 2 +- go.sum | 4 +-- .../api/tooling/definitions/contact_points.go | 29 +++++++++++++------ pkg/services/notifications/webhook.go | 4 +-- pkg/storage/unified/apistore/go.mod | 2 +- pkg/storage/unified/apistore/go.sum | 4 +-- pkg/storage/unified/resource/go.mod | 2 +- pkg/storage/unified/resource/go.sum | 4 +-- 8 files changed, 31 insertions(+), 20 deletions(-) diff --git a/go.mod b/go.mod index 7eef812cbf673..ae2caa2aaef15 100644 --- a/go.mod +++ b/go.mod @@ -71,7 +71,7 @@ require ( github.com/googleapis/gax-go/v2 v2.14.1 // @grafana/grafana-backend-group github.com/gorilla/mux v1.8.1 // @grafana/grafana-backend-group github.com/gorilla/websocket v1.5.3 // @grafana/grafana-app-platform-squad - github.com/grafana/alerting v0.0.0-20250221202230-9d7e00921e44 // @grafana/alerting-backend + github.com/grafana/alerting v0.0.0-20250224133628-2acbeef29642 // @grafana/alerting-backend github.com/grafana/authlib v0.0.0-20250219100139-6a3b1bbb50e7 // @grafana/identity-access-team github.com/grafana/authlib/types v0.0.0-20250219092154-21ce22b49f31 // @grafana/identity-access-team github.com/grafana/dataplane/examples v0.0.1 // @grafana/observability-metrics diff --git a/go.sum b/go.sum index ce99251124110..edbbd7eb15907 100644 --- a/go.sum +++ b/go.sum @@ -1511,8 +1511,8 @@ github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7Fsg github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/grafana/alerting v0.0.0-20250221202230-9d7e00921e44 h1:vboqvbAO0s0CTALHnqfmNvhCP1ziBcZNpYDbORqvOgg= -github.com/grafana/alerting v0.0.0-20250221202230-9d7e00921e44/go.mod h1:hdGB3dSl8Ma9Rjo2YiAEAjMkZ5HiNJbNDqRKDefRZrM= +github.com/grafana/alerting v0.0.0-20250224133628-2acbeef29642 h1:iQ0h/h+QoguSZDF+ZpPxcM/C+m1kjh+aXjMpxywowPA= +github.com/grafana/alerting v0.0.0-20250224133628-2acbeef29642/go.mod h1:hdGB3dSl8Ma9Rjo2YiAEAjMkZ5HiNJbNDqRKDefRZrM= github.com/grafana/authlib v0.0.0-20250219100139-6a3b1bbb50e7 h1:NTMmow+74I3Jb033xhbRgWQS7A//5TDhiM4tl7bsVP4= github.com/grafana/authlib v0.0.0-20250219100139-6a3b1bbb50e7/go.mod h1:T3X4z0ejGfJOiOmZLFeKCRT/yxWJq/RtclAc/PHj/w4= github.com/grafana/authlib/types v0.0.0-20250219092154-21ce22b49f31 h1:EokLC5grHwLPs4tXW8T6E8187H1e5G9AP0QQ5B60HbA= diff --git a/pkg/services/ngalert/api/tooling/definitions/contact_points.go b/pkg/services/ngalert/api/tooling/definitions/contact_points.go index 498ac6f0d371d..597694e6d6063 100644 --- a/pkg/services/ngalert/api/tooling/definitions/contact_points.go +++ b/pkg/services/ngalert/api/tooling/definitions/contact_points.go @@ -313,15 +313,26 @@ type WebhookIntegration struct { URL string `json:"url" yaml:"url" hcl:"url"` - HTTPMethod *string `json:"httpMethod,omitempty" yaml:"httpMethod,omitempty" hcl:"http_method"` - MaxAlerts *int64 `json:"maxAlerts,omitempty" yaml:"maxAlerts,omitempty" hcl:"max_alerts"` - AuthorizationScheme *string `json:"authorization_scheme,omitempty" yaml:"authorization_scheme,omitempty" hcl:"authorization_scheme"` - AuthorizationCredentials *Secret `json:"authorization_credentials,omitempty" yaml:"authorization_credentials,omitempty" hcl:"authorization_credentials"` - User *string `json:"username,omitempty" yaml:"username,omitempty" hcl:"basic_auth_user"` - Password *Secret `json:"password,omitempty" yaml:"password,omitempty" hcl:"basic_auth_password"` - Title *string `json:"title,omitempty" yaml:"title,omitempty" hcl:"title"` - Message *string `json:"message,omitempty" yaml:"message,omitempty" hcl:"message"` - TLSConfig *TLSConfig `json:"tlsConfig,omitempty" yaml:"tlsConfig,omitempty" hcl:"tlsConfig,block"` + HTTPMethod *string `json:"httpMethod,omitempty" yaml:"httpMethod,omitempty" hcl:"http_method"` + MaxAlerts *int64 `json:"maxAlerts,omitempty" yaml:"maxAlerts,omitempty" hcl:"max_alerts"` + AuthorizationScheme *string `json:"authorization_scheme,omitempty" yaml:"authorization_scheme,omitempty" hcl:"authorization_scheme"` + AuthorizationCredentials *Secret `json:"authorization_credentials,omitempty" yaml:"authorization_credentials,omitempty" hcl:"authorization_credentials"` + User *string `json:"username,omitempty" yaml:"username,omitempty" hcl:"basic_auth_user"` + Password *Secret `json:"password,omitempty" yaml:"password,omitempty" hcl:"basic_auth_password"` + Title *string `json:"title,omitempty" yaml:"title,omitempty" hcl:"title"` + Message *string `json:"message,omitempty" yaml:"message,omitempty" hcl:"message"` + TLSConfig *TLSConfig `json:"tlsConfig,omitempty" yaml:"tlsConfig,omitempty" hcl:"tlsConfig,block"` + HMACConfig *HMACConfig `json:"hmacConfig,omitempty" yaml:"hmacConfig,omitempty" hcl:"hmacConfig,block"` +} + +type HMACConfig struct { + // Secret to use for HMAC signing. + Secret *Secret `json:"secret,omitempty" yaml:"secret,omitempty" hcl:"secret"` + // Header is the name of the header containing the HMAC signature. + Header string `json:"header,omitempty" yaml:"header,omitempty" hcl:"header"` + // TimestampHeader is the name of the header containing the timestamp + // used to generate the HMAC signature. If empty, timestamp is not included. + TimestampHeader string `yaml:"timestampHeader,omitempty" json:"timestampHeader,omitempty" hcl:"timestamp_header"` } type WecomIntegration struct { diff --git a/pkg/services/notifications/webhook.go b/pkg/services/notifications/webhook.go index be92ff76b4065..c912645bd555c 100644 --- a/pkg/services/notifications/webhook.go +++ b/pkg/services/notifications/webhook.go @@ -10,7 +10,7 @@ import ( "net/http" "net/url" - alertingReceivers "github.com/grafana/alerting/receivers" + alertingHTTP "github.com/grafana/alerting/http" "github.com/grafana/grafana/pkg/util" ) @@ -71,7 +71,7 @@ func (ns *NotificationService) sendWebRequestSync(ctx context.Context, webhook * request.Header.Set(k, v) } - resp, err := alertingReceivers.NewTLSClient(webhook.TLSConfig).Do(request) + resp, err := alertingHTTP.NewTLSClient(webhook.TLSConfig).Do(request) if err != nil { return redactURL(err) } diff --git a/pkg/storage/unified/apistore/go.mod b/pkg/storage/unified/apistore/go.mod index 529d564795df6..16a36800ba0c2 100644 --- a/pkg/storage/unified/apistore/go.mod +++ b/pkg/storage/unified/apistore/go.mod @@ -192,7 +192,7 @@ require ( github.com/googleapis/gax-go/v2 v2.14.1 // indirect github.com/gorilla/mux v1.8.1 // indirect github.com/gorilla/websocket v1.5.3 // indirect - github.com/grafana/alerting v0.0.0-20250221202230-9d7e00921e44 // indirect + github.com/grafana/alerting v0.0.0-20250224133628-2acbeef29642 // indirect github.com/grafana/authlib v0.0.0-20250219100139-6a3b1bbb50e7 // indirect github.com/grafana/dataplane/sdata v0.0.9 // indirect github.com/grafana/dskit v0.0.0-20241105154643-a6b453a88040 // indirect diff --git a/pkg/storage/unified/apistore/go.sum b/pkg/storage/unified/apistore/go.sum index 1f2097a2cdf9a..27c6a2641e932 100644 --- a/pkg/storage/unified/apistore/go.sum +++ b/pkg/storage/unified/apistore/go.sum @@ -566,8 +566,8 @@ github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/grafana/alerting v0.0.0-20250221202230-9d7e00921e44 h1:vboqvbAO0s0CTALHnqfmNvhCP1ziBcZNpYDbORqvOgg= -github.com/grafana/alerting v0.0.0-20250221202230-9d7e00921e44/go.mod h1:hdGB3dSl8Ma9Rjo2YiAEAjMkZ5HiNJbNDqRKDefRZrM= +github.com/grafana/alerting v0.0.0-20250224133628-2acbeef29642 h1:iQ0h/h+QoguSZDF+ZpPxcM/C+m1kjh+aXjMpxywowPA= +github.com/grafana/alerting v0.0.0-20250224133628-2acbeef29642/go.mod h1:hdGB3dSl8Ma9Rjo2YiAEAjMkZ5HiNJbNDqRKDefRZrM= github.com/grafana/authlib v0.0.0-20250219100139-6a3b1bbb50e7 h1:NTMmow+74I3Jb033xhbRgWQS7A//5TDhiM4tl7bsVP4= github.com/grafana/authlib v0.0.0-20250219100139-6a3b1bbb50e7/go.mod h1:T3X4z0ejGfJOiOmZLFeKCRT/yxWJq/RtclAc/PHj/w4= github.com/grafana/authlib/types v0.0.0-20250219092154-21ce22b49f31 h1:EokLC5grHwLPs4tXW8T6E8187H1e5G9AP0QQ5B60HbA= diff --git a/pkg/storage/unified/resource/go.mod b/pkg/storage/unified/resource/go.mod index ca61c948c7f84..76a89ac0d4f2c 100644 --- a/pkg/storage/unified/resource/go.mod +++ b/pkg/storage/unified/resource/go.mod @@ -117,7 +117,7 @@ require ( github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect github.com/googleapis/gax-go/v2 v2.14.1 // indirect github.com/gorilla/mux v1.8.1 // indirect - github.com/grafana/alerting v0.0.0-20250221202230-9d7e00921e44 // indirect + github.com/grafana/alerting v0.0.0-20250224133628-2acbeef29642 // indirect github.com/grafana/dataplane/sdata v0.0.9 // indirect github.com/grafana/grafana-app-sdk/logging v0.30.0 // indirect github.com/grafana/grafana-aws-sdk v0.31.5 // indirect diff --git a/pkg/storage/unified/resource/go.sum b/pkg/storage/unified/resource/go.sum index 53cd5a812ae21..a912c869d286e 100644 --- a/pkg/storage/unified/resource/go.sum +++ b/pkg/storage/unified/resource/go.sum @@ -397,8 +397,8 @@ github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2z github.com/gorilla/mux v1.7.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= -github.com/grafana/alerting v0.0.0-20250221202230-9d7e00921e44 h1:vboqvbAO0s0CTALHnqfmNvhCP1ziBcZNpYDbORqvOgg= -github.com/grafana/alerting v0.0.0-20250221202230-9d7e00921e44/go.mod h1:hdGB3dSl8Ma9Rjo2YiAEAjMkZ5HiNJbNDqRKDefRZrM= +github.com/grafana/alerting v0.0.0-20250224133628-2acbeef29642 h1:iQ0h/h+QoguSZDF+ZpPxcM/C+m1kjh+aXjMpxywowPA= +github.com/grafana/alerting v0.0.0-20250224133628-2acbeef29642/go.mod h1:hdGB3dSl8Ma9Rjo2YiAEAjMkZ5HiNJbNDqRKDefRZrM= github.com/grafana/authlib v0.0.0-20250219100139-6a3b1bbb50e7 h1:NTMmow+74I3Jb033xhbRgWQS7A//5TDhiM4tl7bsVP4= github.com/grafana/authlib v0.0.0-20250219100139-6a3b1bbb50e7/go.mod h1:T3X4z0ejGfJOiOmZLFeKCRT/yxWJq/RtclAc/PHj/w4= github.com/grafana/authlib/types v0.0.0-20250219092154-21ce22b49f31 h1:EokLC5grHwLPs4tXW8T6E8187H1e5G9AP0QQ5B60HbA= From 120b2776644059cb3187280772cce22a33665f58 Mon Sep 17 00:00:00 2001 From: Andres Martinez Gotor Date: Tue, 25 Feb 2025 11:41:44 +0100 Subject: [PATCH 11/33] Advisor: Cloud fixes (#101136) --- apps/advisor/pkg/app/authorizer.go | 2 +- apps/advisor/pkg/app/authorizer_test.go | 4 ++ .../pkg/app/checkregistry/checkregistry.go | 1 + .../pkg/app/checks/datasourcecheck/check.go | 18 +++++++- .../app/checks/datasourcecheck/check_test.go | 41 ++++++++++++++++- apps/advisor/pkg/app/checks/utils.go | 16 +++++++ apps/advisor/pkg/app/checks/utils_test.go | 46 +++++++++++++++++++ .../pkg/app/checkscheduler/checkscheduler.go | 12 +++-- .../checktyperegisterer.go | 9 +++- .../checktyperegisterer_test.go | 20 ++++++++ pkg/registry/apps/advisor/register.go | 1 + 11 files changed, 162 insertions(+), 8 deletions(-) create mode 100644 apps/advisor/pkg/app/checks/utils_test.go diff --git a/apps/advisor/pkg/app/authorizer.go b/apps/advisor/pkg/app/authorizer.go index 67defbc85d931..4d216ee9edd16 100644 --- a/apps/advisor/pkg/app/authorizer.go +++ b/apps/advisor/pkg/app/authorizer.go @@ -22,7 +22,7 @@ func GetAuthorizer() authorizer.Authorizer { } // check if is admin - if u.GetIsGrafanaAdmin() { + if u.HasRole(identity.RoleAdmin) { return authorizer.DecisionAllow, "", nil } diff --git a/apps/advisor/pkg/app/authorizer_test.go b/apps/advisor/pkg/app/authorizer_test.go index 84414362fbd01..1384176969e0b 100644 --- a/apps/advisor/pkg/app/authorizer_test.go +++ b/apps/advisor/pkg/app/authorizer_test.go @@ -75,4 +75,8 @@ func (m *mockUser) GetIsGrafanaAdmin() bool { return m.isGrafanaAdmin } +func (m *mockUser) HasRole(role identity.RoleType) bool { + return role == identity.RoleAdmin && m.isGrafanaAdmin +} + // Implement other methods of identity.Requester as needed diff --git a/apps/advisor/pkg/app/checkregistry/checkregistry.go b/apps/advisor/pkg/app/checkregistry/checkregistry.go index 1b690c70892e3..335b9cc7567c0 100644 --- a/apps/advisor/pkg/app/checkregistry/checkregistry.go +++ b/apps/advisor/pkg/app/checkregistry/checkregistry.go @@ -62,4 +62,5 @@ func (s *Service) Checks() []checks.Check { type AdvisorAppConfig struct { CheckRegistry CheckService PluginConfig map[string]string + StackID string } diff --git a/apps/advisor/pkg/app/checks/datasourcecheck/check.go b/apps/advisor/pkg/app/checks/datasourcecheck/check.go index 2cab8e3f903a2..0cdac81e4b83f 100644 --- a/apps/advisor/pkg/app/checks/datasourcecheck/check.go +++ b/apps/advisor/pkg/app/checks/datasourcecheck/check.go @@ -2,17 +2,18 @@ package datasourcecheck import ( "context" + "errors" "fmt" "github.com/grafana/grafana-plugin-sdk-go/backend" advisor "github.com/grafana/grafana/apps/advisor/pkg/apis/advisor/v0alpha1" "github.com/grafana/grafana/apps/advisor/pkg/app/checks" "github.com/grafana/grafana/pkg/apimachinery/identity" + "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/services/datasources" "github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore" "github.com/grafana/grafana/pkg/util" - "k8s.io/klog/v2" ) type check struct { @@ -20,6 +21,7 @@ type check struct { PluginStore pluginstore.Store PluginContextProvider pluginContextProvider PluginClient plugins.Client + log log.Logger } func New( @@ -33,6 +35,7 @@ func New( PluginStore: pluginStore, PluginContextProvider: pluginContextProvider, PluginClient: pluginClient, + log: log.New("advisor.datasourcecheck"), } } @@ -58,6 +61,7 @@ func (c *check) Steps() []checks.Step { &healthCheckStep{ PluginContextProvider: c.PluginContextProvider, PluginClient: c.PluginClient, + log: c.log, }, } } @@ -102,6 +106,7 @@ func (s *uidValidationStep) Run(ctx context.Context, obj *advisor.CheckSpec, i a type healthCheckStep struct { PluginContextProvider pluginContextProvider PluginClient plugins.Client + log log.Logger } func (s *healthCheckStep) Title() string { @@ -134,7 +139,7 @@ func (s *healthCheckStep) Run(ctx context.Context, obj *advisor.CheckSpec, i any pCtx, err := s.PluginContextProvider.GetWithDataSource(ctx, ds.Type, requester, ds) if err != nil { // Unable to check health check - klog.Error("Failed to get plugin context", "datasource_uid", ds.UID, "error", err) + s.log.Error("Failed to get plugin context", "datasource_uid", ds.UID, "error", err) return nil, nil } req := &backend.CheckHealthRequest{ @@ -143,6 +148,15 @@ func (s *healthCheckStep) Run(ctx context.Context, obj *advisor.CheckSpec, i any } resp, err := s.PluginClient.CheckHealth(ctx, req) if err != nil || resp.Status != backend.HealthStatusOk { + if err != nil { + s.log.Debug("Failed to check health", "datasource_uid", ds.UID, "error", err) + if errors.Is(err, plugins.ErrMethodNotImplemented) || errors.Is(err, plugins.ErrPluginUnavailable) { + // The plugin does not support backend health checks + return nil, nil + } + } else { + s.log.Debug("Failed to check health", "datasource_uid", ds.UID, "status", resp.Status, "message", resp.Message) + } return checks.NewCheckReportFailure( advisor.CheckReportFailureSeverityHigh, s.ID(), diff --git a/apps/advisor/pkg/app/checks/datasourcecheck/check_test.go b/apps/advisor/pkg/app/checks/datasourcecheck/check_test.go index 813a6434ad950..ee8a991555d99 100644 --- a/apps/advisor/pkg/app/checks/datasourcecheck/check_test.go +++ b/apps/advisor/pkg/app/checks/datasourcecheck/check_test.go @@ -7,6 +7,7 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend" advisor "github.com/grafana/grafana/apps/advisor/pkg/apis/advisor/v0alpha1" "github.com/grafana/grafana/pkg/apimachinery/identity" + "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/services/datasources" "github.com/grafana/grafana/pkg/services/user" @@ -28,6 +29,7 @@ func TestCheck_Run(t *testing.T) { DatasourceSvc: mockDatasourceSvc, PluginContextProvider: mockPluginContextProvider, PluginClient: mockPluginClient, + log: log.New("advisor.datasourcecheck"), } ctx := identity.WithRequester(context.Background(), &user.SignedInUser{}) @@ -62,6 +64,7 @@ func TestCheck_Run(t *testing.T) { DatasourceSvc: mockDatasourceSvc, PluginContextProvider: mockPluginContextProvider, PluginClient: mockPluginClient, + log: log.New("advisor.datasourcecheck"), } ctx := identity.WithRequester(context.Background(), &user.SignedInUser{}) @@ -97,6 +100,7 @@ func TestCheck_Run(t *testing.T) { DatasourceSvc: mockDatasourceSvc, PluginContextProvider: mockPluginContextProvider, PluginClient: mockPluginClient, + log: log.New("advisor.datasourcecheck"), } ctx := identity.WithRequester(context.Background(), &user.SignedInUser{}) @@ -118,6 +122,40 @@ func TestCheck_Run(t *testing.T) { assert.Len(t, failures, 1) assert.Equal(t, "health-check", failures[0].StepID) }) + + t.Run("should skip health check when plugin does not support backend health checks", func(t *testing.T) { + datasources := []*datasources.DataSource{ + {UID: "valid-uid-1", Type: "prometheus", Name: "Prometheus"}, + } + mockDatasourceSvc := &MockDatasourceSvc{dss: datasources} + mockPluginContextProvider := &MockPluginContextProvider{pCtx: backend.PluginContext{}} + mockPluginClient := &MockPluginClient{err: plugins.ErrMethodNotImplemented} + + check := &check{ + DatasourceSvc: mockDatasourceSvc, + PluginContextProvider: mockPluginContextProvider, + PluginClient: mockPluginClient, + log: log.New("advisor.datasourcecheck"), + } + + ctx := identity.WithRequester(context.Background(), &user.SignedInUser{}) + items, err := check.Items(ctx) + assert.NoError(t, err) + failures := []advisor.CheckReportFailure{} + for _, step := range check.Steps() { + for _, item := range items { + stepFailures, err := step.Run(ctx, &advisor.CheckSpec{}, item) + assert.NoError(t, err) + if stepFailures != nil { + failures = append(failures, *stepFailures) + } + } + } + + assert.NoError(t, err) + assert.Equal(t, 1, len(items)) + assert.Len(t, failures, 0) + }) } type MockDatasourceSvc struct { @@ -142,8 +180,9 @@ type MockPluginClient struct { plugins.Client res *backend.CheckHealthResult + err error } func (m *MockPluginClient) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) { - return m.res, nil + return m.res, m.err } diff --git a/apps/advisor/pkg/app/checks/utils.go b/apps/advisor/pkg/app/checks/utils.go index b9b609b10a48a..145c98d09612c 100644 --- a/apps/advisor/pkg/app/checks/utils.go +++ b/apps/advisor/pkg/app/checks/utils.go @@ -1,7 +1,12 @@ package checks import ( + "fmt" + "strconv" + + "github.com/grafana/authlib/types" advisor "github.com/grafana/grafana/apps/advisor/pkg/apis/advisor/v0alpha1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) const ( @@ -22,3 +27,14 @@ func NewCheckReportFailure( Links: links, } } + +func GetNamespace(stackID string) (string, error) { + if stackID == "" { + return metav1.NamespaceDefault, nil + } + stackId, err := strconv.ParseInt(stackID, 10, 64) + if err != nil { + return "", fmt.Errorf("invalid stack id: %s", stackID) + } + return types.CloudNamespaceFormatter(stackId), nil +} diff --git a/apps/advisor/pkg/app/checks/utils_test.go b/apps/advisor/pkg/app/checks/utils_test.go new file mode 100644 index 0000000000000..ad0fda89de877 --- /dev/null +++ b/apps/advisor/pkg/app/checks/utils_test.go @@ -0,0 +1,46 @@ +package checks + +import ( + "testing" + + "github.com/stretchr/testify/assert" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +func TestGetNamespace(t *testing.T) { + tests := []struct { + name string + input string + expected string + expectedErr string + }{ + { + name: "empty stack ID", + input: "", + expected: metav1.NamespaceDefault, + }, + { + name: "valid stack ID", + input: "1234567890", + expected: "stacks-1234567890", + }, + { + name: "invalid stack ID", + input: "invalid", + expected: "", + expectedErr: "invalid stack id: invalid", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := GetNamespace(tt.input) + if tt.expectedErr != "" { + assert.EqualError(t, err, tt.expectedErr) + } else { + assert.NoError(t, err) + } + assert.Equal(t, tt.expected, result) + }) + } +} diff --git a/apps/advisor/pkg/app/checkscheduler/checkscheduler.go b/apps/advisor/pkg/app/checkscheduler/checkscheduler.go index 0a2d2c4e0bf88..2bcb8d43f19ff 100644 --- a/apps/advisor/pkg/app/checkscheduler/checkscheduler.go +++ b/apps/advisor/pkg/app/checkscheduler/checkscheduler.go @@ -29,6 +29,7 @@ type Runner struct { client resource.Client evaluationInterval time.Duration maxHistory int + namespace string } // NewRunner creates a new Runner. @@ -47,6 +48,10 @@ func New(cfg app.Config) (app.Runnable, error) { if err != nil { return nil, err } + namespace, err := checks.GetNamespace(specificConfig.StackID) + if err != nil { + return nil, err + } // Prepare storage client clientGenerator := k8s.NewClientRegistry(cfg.KubeConfig, k8s.ClientConfig{}) @@ -60,6 +65,7 @@ func New(cfg app.Config) (app.Runnable, error) { client: client, evaluationInterval: evalInterval, maxHistory: maxHistory, + namespace: namespace, }, nil } @@ -114,7 +120,7 @@ func (r *Runner) Run(ctx context.Context) error { // regardless of its ID. This assumes that the checks are created in batches // so a batch will have a similar creation time. func (r *Runner) checkLastCreated(ctx context.Context) (time.Time, error) { - list, err := r.client.List(ctx, metav1.NamespaceDefault, resource.ListOptions{}) + list, err := r.client.List(ctx, r.namespace, resource.ListOptions{}) if err != nil { return time.Time{}, err } @@ -134,7 +140,7 @@ func (r *Runner) createChecks(ctx context.Context) error { obj := &advisorv0alpha1.Check{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "check-", - Namespace: metav1.NamespaceDefault, + Namespace: r.namespace, Labels: map[string]string{ checks.TypeLabel: check.ID(), }, @@ -152,7 +158,7 @@ func (r *Runner) createChecks(ctx context.Context) error { // cleanupChecks deletes the olders checks if the number of checks exceeds the limit. func (r *Runner) cleanupChecks(ctx context.Context) error { - list, err := r.client.List(ctx, metav1.NamespaceDefault, resource.ListOptions{Limit: -1}) + list, err := r.client.List(ctx, r.namespace, resource.ListOptions{Limit: -1}) if err != nil { return err } diff --git a/apps/advisor/pkg/app/checktyperegisterer/checktyperegisterer.go b/apps/advisor/pkg/app/checktyperegisterer/checktyperegisterer.go index 50f2a09b6d2e5..2ed38e51ecb9c 100644 --- a/apps/advisor/pkg/app/checktyperegisterer/checktyperegisterer.go +++ b/apps/advisor/pkg/app/checktyperegisterer/checktyperegisterer.go @@ -9,6 +9,7 @@ import ( "github.com/grafana/grafana-app-sdk/resource" advisorv0alpha1 "github.com/grafana/grafana/apps/advisor/pkg/apis/advisor/v0alpha1" "github.com/grafana/grafana/apps/advisor/pkg/app/checkregistry" + "github.com/grafana/grafana/apps/advisor/pkg/app/checks" "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) @@ -19,6 +20,7 @@ import ( type Runner struct { checkRegistry checkregistry.CheckService client resource.Client + namespace string } // NewRunner creates a new Runner. @@ -29,6 +31,10 @@ func New(cfg app.Config) (app.Runnable, error) { return nil, fmt.Errorf("invalid config type") } checkRegistry := specificConfig.CheckRegistry + namespace, err := checks.GetNamespace(specificConfig.StackID) + if err != nil { + return nil, err + } // Prepare storage client clientGenerator := k8s.NewClientRegistry(cfg.KubeConfig, k8s.ClientConfig{}) @@ -40,6 +46,7 @@ func New(cfg app.Config) (app.Runnable, error) { return &Runner{ checkRegistry: checkRegistry, client: client, + namespace: namespace, }, nil } @@ -58,7 +65,7 @@ func (r *Runner) Run(ctx context.Context) error { obj := &advisorv0alpha1.CheckType{ ObjectMeta: metav1.ObjectMeta{ Name: t.ID(), - Namespace: metav1.NamespaceDefault, + Namespace: r.namespace, }, Spec: advisorv0alpha1.CheckTypeSpec{ Name: t.ID(), diff --git a/apps/advisor/pkg/app/checktyperegisterer/checktyperegisterer_test.go b/apps/advisor/pkg/app/checktyperegisterer/checktyperegisterer_test.go index f1623a6534b08..97fc77425047e 100644 --- a/apps/advisor/pkg/app/checktyperegisterer/checktyperegisterer_test.go +++ b/apps/advisor/pkg/app/checktyperegisterer/checktyperegisterer_test.go @@ -3,6 +3,7 @@ package checktyperegisterer import ( "context" "errors" + "fmt" "testing" "github.com/grafana/grafana-app-sdk/resource" @@ -88,6 +89,24 @@ func TestCheckTypesRegisterer_Run(t *testing.T) { }, expectedErr: errors.New("update error"), }, + { + name: "custom namespace", + checks: []checks.Check{ + &mockCheck{ + id: "check1", + steps: []checks.Step{ + &mockStep{id: "step1", title: "Step 1", description: "Description 1"}, + }, + }, + }, + createFunc: func(ctx context.Context, id resource.Identifier, obj resource.Object, opts resource.CreateOptions) (resource.Object, error) { + if obj.GetNamespace() != "custom-namespace" { + return nil, fmt.Errorf("expected namespace %s, got %s", "custom-namespace", obj.GetNamespace()) + } + return obj, nil + }, + expectedErr: nil, + }, } for _, tt := range tests { @@ -98,6 +117,7 @@ func TestCheckTypesRegisterer_Run(t *testing.T) { createFunc: tt.createFunc, updateFunc: tt.updateFunc, }, + namespace: "custom-namespace", } err := r.Run(context.Background()) if err != nil { diff --git a/pkg/registry/apps/advisor/register.go b/pkg/registry/apps/advisor/register.go index 352e364eff0de..e825f392265f0 100644 --- a/pkg/registry/apps/advisor/register.go +++ b/pkg/registry/apps/advisor/register.go @@ -24,6 +24,7 @@ func RegisterApp( specificConfig := checkregistry.AdvisorAppConfig{ CheckRegistry: checkRegistry, PluginConfig: pluginConfig, + StackID: cfg.StackID, } appCfg := &runner.AppBuilderConfig{ OpenAPIDefGetter: advisorv0alpha1.GetOpenAPIDefinitions, From 305f05d85281fcfd91d23aa7aab30f666d5cc48f Mon Sep 17 00:00:00 2001 From: Jack Baldry Date: Tue, 25 Feb 2025 10:47:46 +0000 Subject: [PATCH 12/33] Use version 2 of the deploy preview workflow (#101266) * Use v2 deploy preview Supports building with multiple documentation sources together. Signed-off-by: Jack Baldry * Lint a file Signed-off-by: Jack Baldry --------- Signed-off-by: Jack Baldry --- .github/workflows/deploy-pr-preview.yml | 19 ++++--- docs/sources/fundamentals/exemplars/index.md | 57 ++++++++++++++------ 2 files changed, 54 insertions(+), 22 deletions(-) diff --git a/.github/workflows/deploy-pr-preview.yml b/.github/workflows/deploy-pr-preview.yml index dcb8fcd1c2aea..bfad92ebf73d7 100644 --- a/.github/workflows/deploy-pr-preview.yml +++ b/.github/workflows/deploy-pr-preview.yml @@ -11,14 +11,21 @@ on: jobs: deploy-pr-preview: - if: ${{ ! github.event.pull_request.head.repo.fork }} + if: "!github.event.pull_request.head.repo.fork" uses: grafana/writers-toolkit/.github/workflows/deploy-preview.yml@main with: - sha: ${{ github.event.pull_request.head.sha }} branch: ${{ github.head_ref }} event_number: ${{ github.event.number }} - title: ${{ github.event.pull_request.title }} repo: grafana - website_directory: content/docs/grafana/latest - relative_prefix: /docs/grafana/latest/ - index_file: true + sha: ${{ github.event.pull_request.head.sha }} + sources: | + [ + { + "index_file": "content/docs/grafana/_index.md", + "relative_prefix": "/docs/grafana/latest/", + "repo": "grafana", + "source_directory": "docs/sources", + "website_directory": "content/docs/grafana/latest" + } + ] + title: ${{ github.event.pull_request.title }} diff --git a/docs/sources/fundamentals/exemplars/index.md b/docs/sources/fundamentals/exemplars/index.md index 363bc122b475d..4181c50cac584 100644 --- a/docs/sources/fundamentals/exemplars/index.md +++ b/docs/sources/fundamentals/exemplars/index.md @@ -20,21 +20,31 @@ weight: 800 # Introduction to exemplars -An exemplar is a specific trace representative of measurement taken in a given time interval. While metrics excel at giving you an aggregated view of your system, traces give you a fine grained view of a single request; exemplars are a way to link the two. +An exemplar is a specific trace representative of measurement taken in a given time interval. +While metrics excel at giving you an aggregated view of your system, traces give you a fine grained view of a single request; exemplars are a way to link the two. -Suppose your company website is experiencing a surge in traffic volumes. While more than eighty percent of the users are able to access the website in under two seconds, some users are experiencing a higher than normal response time resulting in bad user experience. +Suppose your company website is experiencing a surge in traffic volumes. +While more than eighty percent of the users are able to access the website in under two seconds, some users are experiencing a higher than normal response time resulting in bad user experience. -To identify the factors that are contributing to the latency, you must compare a trace for a fast response against a trace for a slow response. Given the vast amount of data in a typical production environment, it will be extremely laborious and time-consuming effort. +To identify the factors that are contributing to the latency, you must compare a trace for a fast response against a trace for a slow response. +Given the vast amount of data in a typical production environment, it's an extremely laborious and time-consuming effort. -Use exemplars to help isolate problems within your data distribution by pinpointing query traces exhibiting high latency within a time interval. Once you localize the latency problem to a few exemplar traces, you can combine it with additional system based information or location properties to perform a root cause analysis faster, leading to quick resolutions to performance issues. +Use exemplars to help isolate problems within your data distribution by pinpointing query traces exhibiting high latency within a time interval. +After you localize the latency problem to a few exemplar traces, you can combine it with additional system based information or location properties to perform a root cause analysis faster, leading to quick resolutions to performance issues. -Support for exemplars is available for the Prometheus data source only. Once you enable the functionality, exemplar data is available by default. For more information on exemplar configuration and how to enable exemplars, refer to [configuring exemplars in the Prometheus data source]({{< relref "../../datasources/prometheus/configure-prometheus-data-source#exemplars" >}}). +Support for exemplars is available for the Prometheus data source only. +After you enable the functionality, exemplar data is available by default. +For more information on exemplar configuration and how to enable exemplars, refer to [configuring exemplars in the Prometheus data source](../../datasources/prometheus/configure-prometheus-data-source/#exemplars). -Grafana shows exemplars alongside a metric in the Explore view and in dashboards. Each exemplar displays as a highlighted star. You can hover your cursor over an exemplar to view the unique trace ID, which is a combination of a key value pair. To investigate further, click the blue button next to the `traceID` property. +Grafana shows exemplars alongside a metric in the Explore view and in dashboards. +Each exemplar displays as a highlighted star. +You can hover your cursor over an exemplar to view the unique trace ID, which is a combination of a key value pair. +To investigate further, click the blue button next to the `traceID` property. {{< figure src="/media/docs/grafana/exemplars/screenshot-exemplars.png" class="docs-image--no-shadow" max-width= "750px" caption="Screenshot showing the detail window of an exemplar" >}} -Refer to [View exemplar data]({{< relref "#view-exemplar-data" >}}) for instructions on how to drill down and view exemplar trace details from metrics and logs. To know more about exemplars, refer to the blogpost [Intro to exemplars, which enable Grafana Tempo’s distributed tracing at massive scale](/blog/2021/03/31/intro-to-exemplars-which-enable-grafana-tempos-distributed-tracing-at-massive-scale/). +Refer to [View exemplar data](#view-exemplar-data) for instructions on how to drill down and view exemplar trace details from metrics and logs. +To know more about exemplars, refer to the blog post [Intro to exemplars, which enable Grafana Tempo’s distributed tracing at massive scale](/blog/2021/03/31/intro-to-exemplars-which-enable-grafana-tempos-distributed-tracing-at-massive-scale/). ## View exemplar data @@ -42,15 +52,19 @@ When support for exemplar support is enabled for a Prometheus data source, you c ### In Explore -Explore visualizes exemplar traces as highlighted stars alongside metrics data. For more information on how Explore visualizes trace data, refer to [Tracing in Explore]({{< relref "../../explore/trace-integration" >}}). +Explore visualizes exemplar traces as highlighted stars alongside metrics data. +For more information on how Explore visualizes trace data, refer to [Tracing in Explore](../../explore/trace-integration/). To examine the details of an exemplar trace: -1. Place your cursor over an exemplar (highlighted star). Depending on the trace data source you are using, you will see a blue button with the label `Query with `. In the following example, the tracing data source is Tempo. +1. Place your cursor over an exemplar (highlighted star). + Depending on the trace data source you are using, you may see a blue button with the label `Query with `. + In the following example, the tracing data source is Tempo. {{< figure src="/media/docs/grafana/exemplars/screenshot-exemplar-details.png" class="docs-image--no-shadow" max-width= "350px" caption="Screenshot showing exemplar details" >}} -1. Click the **Query with Tempo** option next to the `traceID` property. The trace details, including the spans within the trace are listed in a separate panel on the right. +1. Click the **Query with Tempo** option next to the `traceID` property. + The trace details, including the spans within the trace are listed in a separate panel on the right. {{< figure src="/media/docs/grafana/exemplars/screenshot-exemplar-explore-view.png" class="docs-image--no-shadow" max-width= "900px" caption="Explorer view with panel showing trace details" >}} @@ -58,13 +72,20 @@ For more information on how to drill down and analyze the trace and span details ### In logs -You can also view exemplar trace details from the Loki logs in Explore. Use regex within the Derived fields links for Loki to extract the `traceID` information. Now when you expand Loki logs, you can see a `traceID` property under the **Detected fields** section. To learn more about how to extract a part of a log message into an internal or external link, refer to [using derived fields in Loki]({{< relref "../../explore/logs-integration" >}}). +You can also view exemplar trace details from the Loki logs in Explore. +Use regular expressions within the Derived fields links for Loki to extract the `traceID` information. +Now when you expand Loki logs, you can see a `traceID` property under the **Detected fields** section. +To learn more about how to extract a part of a log message into an internal or external link, refer to [using derived fields in Loki](../../explore/logs-integration/). To view the details of an exemplar trace: -1. Expand a log line and scroll down to the `Fields` section. Depending on your backend trace data source, you will see a blue button with the label ``. +1. Expand a log line and scroll down to the `Fields` section. + Depending on your backend trace data source, you may see a blue button with the label ``. -1. Click the blue button next to the `traceID` property. Typically, it will have the name of the backend data source. In the following example, the tracing data source is Tempo. The trace details, including the spans within the trace are listed in a separate panel on the right. +1. Click the blue button next to the `traceID` property. + Typically, it has the name of the backend data source. + In the following example, the tracing data source is Tempo. + The trace details, including the spans within the trace are listed in a separate panel on the right. {{< figure src="/media/docs/grafana/exemplars/screenshot-exemplar-loki-logs.png" class="docs-image--no-shadow" max-width= "750px" caption="Explorer view with panel showing trace details" >}} @@ -78,16 +99,20 @@ This panel shows the details of the trace in different segments. You can add more traces to the results using the `Add query` button. -- The next segment shows the entire span for the specific trace as a narrow strip. All levels of the trace from the client all the way down to database query is displayed, which provides a bird's eye view of the time distribution across all layers over which the HTTP request was processed. +- The next segment shows the entire span for the specific trace as a narrow strip. + All levels of the trace from the client all the way down to database query is displayed, which provides a bird's eye view of the time distribution across all layers over which the HTTP request was processed. 1. You can click within this strip view to display a magnified view of a smaller time segment within the span. This magnified view shows up in the bottom segment of the panel. 1. In the magnified view, you can expand or collapse the various levels of the trace to drill down to the specific span of interest. - For example, if the strip view shows that most of the latency was within the app layer, you can expand the trace down the app layer to investigate the problem further. To expand a particular layer of span, click the left icon. The same button can collapse an expanded span. + For example, if the strip view shows that most of the latency was within the app layer, you can expand the trace down the app layer to investigate the problem further. + To expand a particular layer of span, click the left icon. + The same button can collapse an expanded span. - To see the details of the span at any level, click the span itself. - This displays additional metadata associated with the span. The metadata itself is initially shown in a narrow strip but you can see more details by clicking the metadata strip. + This displays additional metadata associated with the span. + The metadata itself is initially shown in a narrow strip but you can see more details by clicking the metadata strip. {{< figure src="/media/docs/grafana/exemplars/screenshot-exemplar-span-details.png" class="docs-image--no-shadow" max-width= "600px" caption="Span details" >}} From 2585fec99e225eba2c57be81409a3e369590229e Mon Sep 17 00:00:00 2001 From: Pepe Cano <825430+ppcano@users.noreply.github.com> Date: Tue, 25 Feb 2025 12:06:38 +0100 Subject: [PATCH 13/33] Alerting: Clarify that the AWS SNS subject field cannot be empty (#100780) * Alerting: Clarify that the AWS SNS subject field cannot be empty * minor copy change --- .../ngalert/notifier/channels_config/available_channels.go | 4 ++-- public/app/features/alerting/unified/mockGrafanaNotifiers.ts | 5 +++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/pkg/services/ngalert/notifier/channels_config/available_channels.go b/pkg/services/ngalert/notifier/channels_config/available_channels.go index a64c641a40eec..2e2ce80d24d31 100644 --- a/pkg/services/ngalert/notifier/channels_config/available_channels.go +++ b/pkg/services/ngalert/notifier/channels_config/available_channels.go @@ -1689,13 +1689,13 @@ func GetAvailableNotifiers() []*NotifierPlugin { Label: "Subject", Element: ElementTypeTextArea, InputType: InputTypeText, - Description: "Optional subject. You can use templates to customize this field", + Description: "Optional subject. By default, this field uses the default title template and can be customized with templates and custom messages. It cannot be an empty string", PropertyName: "subject", Placeholder: alertingTemplates.DefaultMessageTitleEmbed, }, { Label: "Message", - Description: "Optional message. You can use templates to customize this field. Using a custom message will replace the default message", + Description: "Optional message. By default, this field uses the default message template and can be customized with templates and custom messages", Element: ElementTypeTextArea, PropertyName: "message", Placeholder: alertingTemplates.DefaultMessageEmbed, diff --git a/public/app/features/alerting/unified/mockGrafanaNotifiers.ts b/public/app/features/alerting/unified/mockGrafanaNotifiers.ts index a15904148ea54..984a9eb1a1466 100644 --- a/public/app/features/alerting/unified/mockGrafanaNotifiers.ts +++ b/public/app/features/alerting/unified/mockGrafanaNotifiers.ts @@ -3146,7 +3146,8 @@ export const grafanaAlertNotifiers: Record = { element: 'input', inputType: 'text', label: 'Subject', - description: 'Optional subject. You can use templates to customize this field', + description: + 'Optional subject. By default, this field uses the default title template and can be customized using templates. It cannot be an empty string', placeholder: '{{ template "default.title" . }}', propertyName: 'subject', selectOptions: null, @@ -3165,7 +3166,7 @@ export const grafanaAlertNotifiers: Record = { inputType: '', label: 'Message', description: - 'Optional message. You can use templates to customize this field. Using a custom message will replace the default message', + 'Optional message. By default, this field uses the default message template and can be customized with templates and custom messages', placeholder: '{{ template "default.message" . }}', propertyName: 'message', selectOptions: null, From bfabe2ce82e0e89c1c37ca6b3ff40d24e98017b1 Mon Sep 17 00:00:00 2001 From: Ieva Date: Tue, 25 Feb 2025 11:07:27 +0000 Subject: [PATCH 14/33] Ks8/Folders: Fix status codes returned on GET (#101237) return the correct status code for folder fetching failures --- pkg/registry/apis/folders/legacy_storage.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkg/registry/apis/folders/legacy_storage.go b/pkg/registry/apis/folders/legacy_storage.go index f85b282979b37..0c96769a942d9 100644 --- a/pkg/registry/apis/folders/legacy_storage.go +++ b/pkg/registry/apis/folders/legacy_storage.go @@ -142,7 +142,8 @@ func (s *legacyStorage) Get(ctx context.Context, name string, options *metav1.Ge if errors.Is(err, dashboards.ErrFolderNotFound) || err == nil { err = resourceInfo.NewNotFound(name) } - return nil, err + statusErr := apierrors.ToFolderStatusError(err) + return nil, &statusErr } r, err := convertToK8sResource(dto, s.namespacer) From 87a40343321455e1b12622460de98331f66af73c Mon Sep 17 00:00:00 2001 From: antonio <45235678+tonypowa@users.noreply.github.com> Date: Tue, 25 Feb 2025 12:13:16 +0100 Subject: [PATCH 15/33] docs: link to tutorial from docs (#101277) * docs: link to tutorial from docs * missing part of url --- .../alerting/alerting-rules/link-alert-rules-to-panels.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/sources/alerting/alerting-rules/link-alert-rules-to-panels.md b/docs/sources/alerting/alerting-rules/link-alert-rules-to-panels.md index aeaddf1e4057d..746cb77805219 100644 --- a/docs/sources/alerting/alerting-rules/link-alert-rules-to-panels.md +++ b/docs/sources/alerting/alerting-rules/link-alert-rules-to-panels.md @@ -44,6 +44,10 @@ Grafana allows you to link an alert rule to a dashboard panel. This can help you An alert rule is linked to a panel by setting the [`dashboardUId` and `panelId` annotations](ref:annotations). Both annotations must be set together. +{{% admonition type="tutorial" %}} +For a hands-on example of integrating alert rules with dashboards, check out [Part 5 of our Get Started with Grafana Alerting tutorial](http://www.grafana.com/tutorials/alerting-get-started-pt5/). +{{% /admonition %}} + ## Link alert rules to panels When configuring the alert rule, you can set the dashboard and panel annotations as shown in this [video](https://youtu.be/ClLp-iSoaSY?si=qKWnvSVaQuvYcuw9&t=170). From df99c928232fefc463ec9ce9847af18ed86b868b Mon Sep 17 00:00:00 2001 From: Oscar Kilhed Date: Tue, 25 Feb 2025 12:44:11 +0100 Subject: [PATCH 16/33] Dashboards: Disable flaky import dashboard test in old architecture. (#101282) disable flaky test --- e2e/old-arch/dashboards-suite/import-dashboard.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/e2e/old-arch/dashboards-suite/import-dashboard.spec.ts b/e2e/old-arch/dashboards-suite/import-dashboard.spec.ts index 6d4bfed5e6887..55d58705111e0 100644 --- a/e2e/old-arch/dashboards-suite/import-dashboard.spec.ts +++ b/e2e/old-arch/dashboards-suite/import-dashboard.spec.ts @@ -6,7 +6,7 @@ describe('Import Dashboards Test', () => { e2e.flows.login(Cypress.env('USERNAME'), Cypress.env('PASSWORD')); }); - it('Ensure you can import a number of json test dashboards from a specific test directory', () => { + it.skip('Ensure you can import a number of json test dashboards from a specific test directory', () => { e2e.flows.importDashboard(testDashboard, 1000); }); }); From ce8a874bf0c6af272c7084b6a0838683ffcbc019 Mon Sep 17 00:00:00 2001 From: Andres Martinez Gotor Date: Tue, 25 Feb 2025 13:37:41 +0100 Subject: [PATCH 17/33] Advisor: Preinstall app plugin if enabled (#101289) --- pkg/setting/setting_plugins.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pkg/setting/setting_plugins.go b/pkg/setting/setting_plugins.go index d798c1206c9ae..8a459cc768754 100644 --- a/pkg/setting/setting_plugins.go +++ b/pkg/setting/setting_plugins.go @@ -55,6 +55,9 @@ func (cfg *Cfg) readPluginSettings(iniFile *ini.File) error { for _, plugin := range defaultPreinstallPlugins { preinstallPlugins[plugin.ID] = plugin } + if cfg.IsFeatureToggleEnabled("grafanaAdvisor") { // Use literal string to avoid circular dependency + preinstallPlugins["grafana-advisor-app"] = InstallPlugin{"grafana-advisor-app", "", ""} + } // Add the plugins defined in the configuration for _, plugin := range rawInstallPlugins { parts := strings.Split(plugin, "@") From 1a65154e746d01b69787a8c56a0d34cfe54bfa53 Mon Sep 17 00:00:00 2001 From: Will Assis <35489495+gassiss@users.noreply.github.com> Date: Tue, 25 Feb 2025 09:38:32 -0300 Subject: [PATCH 18/33] fix (unified-storage): Fix error when trying to get parents of folder as a viewer (#101245) * Fix error when trying to get parents of folder as a viewer with unified-storage enabled --- .../folder/folderimpl/unifiedstore.go | 8 ++ .../folder/folderimpl/unifiedstore_test.go | 87 +++++++++++++++++++ 2 files changed, 95 insertions(+) diff --git a/pkg/services/folder/folderimpl/unifiedstore.go b/pkg/services/folder/folderimpl/unifiedstore.go index 12452a7f2de02..b1615eb4b1e68 100644 --- a/pkg/services/folder/folderimpl/unifiedstore.go +++ b/pkg/services/folder/folderimpl/unifiedstore.go @@ -2,7 +2,9 @@ package folderimpl import ( "context" + "errors" "fmt" + "net/http" "strings" apierrors "k8s.io/apimachinery/pkg/api/errors" @@ -144,6 +146,12 @@ func (ss *FolderUnifiedStoreImpl) GetParents(ctx context.Context, q folder.GetPa for parentUid != "" { out, err := ss.k8sclient.Get(ctx, parentUid, q.OrgID, v1.GetOptions{}) if err != nil { + var statusError *apierrors.StatusError + if errors.As(err, &statusError) && statusError.ErrStatus.Code == http.StatusForbidden { + // If we get a Forbidden error when requesting the parent folder, it means the user does not have access + // to it, nor its parents. So we can stop looping + break + } return nil, err } diff --git a/pkg/services/folder/folderimpl/unifiedstore_test.go b/pkg/services/folder/folderimpl/unifiedstore_test.go index 89c68f6d0df60..f056ae96ba562 100644 --- a/pkg/services/folder/folderimpl/unifiedstore_test.go +++ b/pkg/services/folder/folderimpl/unifiedstore_test.go @@ -2,6 +2,7 @@ package folderimpl import ( "context" + "net/http" "testing" claims "github.com/grafana/authlib/types" @@ -12,6 +13,8 @@ import ( "github.com/grafana/grafana/pkg/storage/unified/resource" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" + apierrors "k8s.io/apimachinery/pkg/api/errors" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/selection" ) @@ -85,6 +88,90 @@ func TestComputeFullPath(t *testing.T) { } } +func TestGetParents(t *testing.T) { + mockCli := new(client.MockK8sHandler) + store := FolderUnifiedStoreImpl{ + k8sclient: mockCli, + } + + ctx := context.Background() + orgID := int64(1) + + t.Run("should return list of parent folders of a given folder uid", func(t *testing.T) { + mockCli.On("Get", mock.Anything, "parentone", orgID, mock.Anything, mock.Anything).Return(&unstructured.Unstructured{ + Object: map[string]interface{}{ + "metadata": map[string]interface{}{ + "name": "parentone", + "annotations": map[string]interface{}{"grafana.app/folder": "parenttwo"}, + }, + }, + }, nil).Once() + mockCli.On("Get", mock.Anything, "parenttwo", orgID, mock.Anything, mock.Anything).Return(&unstructured.Unstructured{ + Object: map[string]interface{}{ + "metadata": map[string]interface{}{ + "name": "parenttwo", + "annotations": map[string]interface{}{"grafana.app/folder": "parentthree"}, + }, + }, + }, nil).Once() + mockCli.On("Get", mock.Anything, "parentthree", orgID, mock.Anything, mock.Anything).Return(&unstructured.Unstructured{ + Object: map[string]interface{}{ + "metadata": map[string]interface{}{ + "name": "parentthree", + "annotations": map[string]interface{}{"grafana.app/folder": "parentfour"}, + }, + }, + }, nil).Once() + mockCli.On("Get", mock.Anything, "parentfour", orgID, mock.Anything, mock.Anything).Return(&unstructured.Unstructured{ + Object: map[string]interface{}{ + "metadata": map[string]interface{}{ + "name": "parentfour", + }, + }, + }, nil).Once() + result, err := store.GetParents(ctx, folder.GetParentsQuery{ + UID: "parentone", + OrgID: orgID, + }) + + require.NoError(t, err) + require.Len(t, result, 3) + require.Equal(t, "parentfour", result[0].UID) + require.Equal(t, "parentthree", result[1].UID) + require.Equal(t, "parenttwo", result[2].UID) + }) + + t.Run("should stop if user doesnt have access to the parent folder", func(t *testing.T) { + mockCli.On("Get", mock.Anything, "parentone", orgID, mock.Anything, mock.Anything).Return(&unstructured.Unstructured{ + Object: map[string]interface{}{ + "metadata": map[string]interface{}{ + "name": "parentone", + "annotations": map[string]interface{}{"grafana.app/folder": "parenttwo"}, + }, + }, + }, nil).Once() + mockCli.On("Get", mock.Anything, "parenttwo", orgID, mock.Anything, mock.Anything).Return(&unstructured.Unstructured{ + Object: map[string]interface{}{ + "metadata": map[string]interface{}{ + "name": "parenttwo", + "annotations": map[string]interface{}{"grafana.app/folder": "parentthree"}, + }, + }, + }, nil).Once() + mockCli.On("Get", mock.Anything, "parentthree", orgID, mock.Anything, mock.Anything).Return(nil, &apierrors.StatusError{ + ErrStatus: metav1.Status{Code: http.StatusForbidden}, + }).Once() + result, err := store.GetParents(ctx, folder.GetParentsQuery{ + UID: "parentone", + OrgID: orgID, + }) + + require.NoError(t, err) + require.Len(t, result, 1) + require.Equal(t, "parenttwo", result[0].UID) + }) +} + func TestGetChildren(t *testing.T) { mockCli := new(client.MockK8sHandler) store := FolderUnifiedStoreImpl{ From f3433fd47235de1301457d76ad50ebd8e8b4e70d Mon Sep 17 00:00:00 2001 From: Gabriel MABILLE Date: Tue, 25 Feb 2025 13:44:40 +0100 Subject: [PATCH 19/33] RBAC: Remove accessControlOnCall feature toggle (#101222) * RBAC: Remove accessControlOnCall feature toggle * Leave the other one in place * Tests * frontend * Readd empty ft to frontend test * Remove legacy RBAC check * Fix test * no need for context * Remove unused variable * Remove unecessary param * remove unecessary param from tests * More tests :D --- .../feature-toggles/index.md | 1 - .../src/types/featureToggles.gen.ts | 1 - pkg/api/api.go | 2 +- pkg/api/pluginproxy/ds_proxy.go | 3 +- pkg/api/pluginproxy/ds_proxy_test.go | 2 +- pkg/api/pluginproxy/pluginproxy.go | 3 +- pkg/api/pluginproxy/pluginproxy_test.go | 2 +- pkg/middleware/auth.go | 10 +- pkg/middleware/auth_test.go | 12 +- pkg/services/accesscontrol/acimpl/service.go | 9 +- .../accesscontrol/acimpl/service_test.go | 1 - pkg/services/accesscontrol/api/api.go | 10 +- pkg/services/accesscontrol/api/api_test.go | 7 +- .../resourcepermissions/service.go | 2 +- .../resourcepermissions/service_test.go | 12 +- pkg/services/featuremgmt/registry.go | 8 - pkg/services/featuremgmt/toggles_gen.csv | 1 - pkg/services/featuremgmt/toggles_gen.go | 4 - pkg/services/featuremgmt/toggles_gen.json | 1 + pkg/services/navtree/navtreeimpl/applinks.go | 5 +- .../navtree/navtreeimpl/applinks_test.go | 167 +++++++----------- .../plugins/components/AppRootPage.test.tsx | 4 +- .../plugins/components/AppRootPage.tsx | 2 +- 23 files changed, 91 insertions(+), 178 deletions(-) diff --git a/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md b/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md index 2b446b110474f..699ecbe13c533 100644 --- a/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md +++ b/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md @@ -28,7 +28,6 @@ Most [generally available](https://grafana.com/docs/release-life-cycle/#general- | `featureHighlights` | Highlight Grafana Enterprise features | | | `correlations` | Correlations page | Yes | | `cloudWatchCrossAccountQuerying` | Enables cross-account querying in CloudWatch datasources | Yes | -| `accessControlOnCall` | Access control primitives for OnCall | Yes | | `nestedFolders` | Enable folder nesting | Yes | | `logsContextDatasourceUi` | Allow datasource to provide custom UI for context view | Yes | | `lokiQuerySplitting` | Split large interval queries into subqueries with smaller time intervals | Yes | diff --git a/packages/grafana-data/src/types/featureToggles.gen.ts b/packages/grafana-data/src/types/featureToggles.gen.ts index cda45576882f5..b64ba1ed23832 100644 --- a/packages/grafana-data/src/types/featureToggles.gen.ts +++ b/packages/grafana-data/src/types/featureToggles.gen.ts @@ -43,7 +43,6 @@ export interface FeatureToggles { cloudWatchCrossAccountQuerying?: boolean; showDashboardValidationWarnings?: boolean; mysqlAnsiQuotes?: boolean; - accessControlOnCall?: boolean; nestedFolders?: boolean; alertingBacktesting?: boolean; editPanelCSVDragAndDrop?: boolean; diff --git a/pkg/api/api.go b/pkg/api/api.go index 5b085c89d00d1..18e52419ece2d 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -66,7 +66,7 @@ func (hs *HTTPServer) registerRoutes() { reqSignedInNoAnonymous := middleware.ReqSignedInNoAnonymous reqGrafanaAdmin := middleware.ReqGrafanaAdmin reqOrgAdmin := middleware.ReqOrgAdmin - reqRoleForAppRoute := middleware.RoleAppPluginAuth(hs.AccessControl, hs.pluginStore, hs.Features, hs.log) + reqRoleForAppRoute := middleware.RoleAppPluginAuth(hs.AccessControl, hs.pluginStore, hs.log) reqSnapshotPublicModeOrCreate := middleware.SnapshotPublicModeOrCreate(hs.Cfg, hs.AccessControl) reqSnapshotPublicModeOrDelete := middleware.SnapshotPublicModeOrDelete(hs.Cfg, hs.AccessControl) redirectFromLegacyPanelEditURL := middleware.RedirectFromLegacyPanelEditURL(hs.Cfg) diff --git a/pkg/api/pluginproxy/ds_proxy.go b/pkg/api/pluginproxy/ds_proxy.go index 1fac3d64fb6ca..c8f158138bfb5 100644 --- a/pkg/api/pluginproxy/ds_proxy.go +++ b/pkg/api/pluginproxy/ds_proxy.go @@ -340,8 +340,7 @@ func (proxy *DataSourceProxy) validateRequest() error { func (proxy *DataSourceProxy) hasAccessToRoute(route *plugins.Route) bool { ctxLogger := logger.FromContext(proxy.ctx.Req.Context()) - useRBAC := proxy.features.IsEnabled(proxy.ctx.Req.Context(), featuremgmt.FlagAccessControlOnCall) && route.ReqAction != "" - if useRBAC { + if route.ReqAction != "" { routeEval := pluginac.GetDataSourceRouteEvaluator(proxy.ds.UID, route.ReqAction) hasAccess := routeEval.Evaluate(proxy.ctx.GetPermissions()) if !hasAccess { diff --git a/pkg/api/pluginproxy/ds_proxy_test.go b/pkg/api/pluginproxy/ds_proxy_test.go index e6b56e7c88235..a07e36f61dae7 100644 --- a/pkg/api/pluginproxy/ds_proxy_test.go +++ b/pkg/api/pluginproxy/ds_proxy_test.go @@ -1094,7 +1094,7 @@ func setupDSProxyTest(t *testing.T, ctx *contextmodel.ReqContext, ds *datasource cfg := setting.NewCfg() secretsService := secretsmng.SetupTestService(t, fakes.NewFakeSecretsStore()) secretsStore := secretskvs.NewSQLSecretsKVStore(dbtest.NewFakeDB(), secretsService, log.NewNopLogger()) - features := featuremgmt.WithFeatures(featuremgmt.FlagAccessControlOnCall) + features := featuremgmt.WithFeatures() dsService, err := datasourceservice.ProvideService(nil, secretsService, secretsStore, cfg, features, acimpl.ProvideAccessControl(features), &actest.FakePermissionsService{}, quotatest.New(false, nil), &pluginstore.FakePluginStore{}, &pluginfakes.FakePluginClient{}, plugincontext.ProvideBaseService(cfg, pluginconfig.NewFakePluginRequestConfigProvider())) diff --git a/pkg/api/pluginproxy/pluginproxy.go b/pkg/api/pluginproxy/pluginproxy.go index 8288c15276f92..c804cdc0136f4 100644 --- a/pkg/api/pluginproxy/pluginproxy.go +++ b/pkg/api/pluginproxy/pluginproxy.go @@ -129,8 +129,7 @@ func (proxy *PluginProxy) HandleRequest() { } func (proxy *PluginProxy) hasAccessToRoute(route *plugins.Route) bool { - useRBAC := proxy.features.IsEnabled(proxy.ctx.Req.Context(), featuremgmt.FlagAccessControlOnCall) && route.ReqAction != "" - if useRBAC { + if route.ReqAction != "" { routeEval := pluginac.GetPluginRouteEvaluator(proxy.ps.PluginID, route.ReqAction) hasAccess := ac.HasAccess(proxy.accessControl, proxy.ctx)(routeEval) if !hasAccess { diff --git a/pkg/api/pluginproxy/pluginproxy_test.go b/pkg/api/pluginproxy/pluginproxy_test.go index ae122d558a4ac..14034835c4936 100644 --- a/pkg/api/pluginproxy/pluginproxy_test.go +++ b/pkg/api/pluginproxy/pluginproxy_test.go @@ -557,7 +557,7 @@ func TestPluginProxyRoutesAccessControl(t *testing.T) { SecureJSONData: map[string][]byte{}, } cfg := &setting.Cfg{} - proxy, err := NewPluginProxy(ps, testRoutes, ctx, tc.proxyPath, cfg, secretsService, tracing.InitializeTracerForTest(), &http.Transport{}, acimpl.ProvideAccessControl(featuremgmt.WithFeatures()), featuremgmt.WithFeatures(featuremgmt.FlagAccessControlOnCall)) + proxy, err := NewPluginProxy(ps, testRoutes, ctx, tc.proxyPath, cfg, secretsService, tracing.InitializeTracerForTest(), &http.Transport{}, acimpl.ProvideAccessControl(featuremgmt.WithFeatures()), featuremgmt.WithFeatures()) require.NoError(t, err) proxy.HandleRequest() diff --git a/pkg/middleware/auth.go b/pkg/middleware/auth.go index 815e3af6cc7b6..f9a3a6a842d18 100644 --- a/pkg/middleware/auth.go +++ b/pkg/middleware/auth.go @@ -16,7 +16,6 @@ import ( "github.com/grafana/grafana/pkg/services/authn" contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" "github.com/grafana/grafana/pkg/services/dashboards" - "github.com/grafana/grafana/pkg/services/featuremgmt" "github.com/grafana/grafana/pkg/services/org" "github.com/grafana/grafana/pkg/services/pluginsintegration/pluginaccesscontrol" "github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore" @@ -138,9 +137,7 @@ func CanAdminPlugins(cfg *setting.Cfg, accessControl ac.AccessControl) func(c *c } } -func RoleAppPluginAuth(accessControl ac.AccessControl, ps pluginstore.Store, features featuremgmt.FeatureToggles, - logger log.Logger, -) func(c *contextmodel.ReqContext) { +func RoleAppPluginAuth(accessControl ac.AccessControl, ps pluginstore.Store, logger log.Logger) func(c *contextmodel.ReqContext) { return func(c *contextmodel.ReqContext) { pluginID := web.Params(c.Req)[":id"] p, exists := ps.Plugin(c.Req.Context(), pluginID) @@ -164,12 +161,11 @@ func RoleAppPluginAuth(accessControl ac.AccessControl, ps pluginstore.Store, fea } if normalizeIncludePath(u.Path) == path { - useRBAC := features.IsEnabledGlobally(featuremgmt.FlagAccessControlOnCall) && i.RequiresRBACAction() - if useRBAC && !hasAccess(pluginaccesscontrol.GetPluginRouteEvaluator(pluginID, i.Action)) { + if i.RequiresRBACAction() && !hasAccess(pluginaccesscontrol.GetPluginRouteEvaluator(pluginID, i.Action)) { logger.Debug("Plugin include is covered by RBAC, user doesn't have access", "plugin", pluginID, "include", i.Name) permitted = false break - } else if !useRBAC && !c.HasUserRole(i.Role) { + } else if !i.RequiresRBACAction() && !c.HasUserRole(i.Role) { permitted = false break } diff --git a/pkg/middleware/auth_test.go b/pkg/middleware/auth_test.go index b9b83ae9baeb2..fdca1d04ee3ef 100644 --- a/pkg/middleware/auth_test.go +++ b/pkg/middleware/auth_test.go @@ -204,11 +204,10 @@ func TestRoleAppPluginAuth(t *testing.T) { 0: tc.role, }, }) - features := featuremgmt.WithFeatures() logger := &logtest.Fake{} ac := &actest.FakeAccessControl{} - sc.m.Get("/a/:id/*", RoleAppPluginAuth(ac, ps, features, logger), func(c *contextmodel.ReqContext) { + sc.m.Get("/a/:id/*", RoleAppPluginAuth(ac, ps, logger), func(c *contextmodel.ReqContext) { c.JSON(http.StatusOK, map[string]interface{}{}) }) sc.fakeReq("GET", path).exec() @@ -227,10 +226,9 @@ func TestRoleAppPluginAuth(t *testing.T) { 0: org.RoleViewer, }, }) - features := featuremgmt.WithFeatures() logger := &logtest.Fake{} ac := &actest.FakeAccessControl{} - sc.m.Get("/a/:id/*", RoleAppPluginAuth(ac, &pluginstore.FakePluginStore{}, features, logger), func(c *contextmodel.ReqContext) { + sc.m.Get("/a/:id/*", RoleAppPluginAuth(ac, &pluginstore.FakePluginStore{}, logger), func(c *contextmodel.ReqContext) { c.JSON(http.StatusOK, map[string]interface{}{}) }) sc.fakeReq("GET", "/a/test-app/test").exec() @@ -245,7 +243,6 @@ func TestRoleAppPluginAuth(t *testing.T) { 0: org.RoleViewer, }, }) - features := featuremgmt.WithFeatures() logger := &logtest.Fake{} ac := &actest.FakeAccessControl{} sc.m.Get("/a/:id/*", RoleAppPluginAuth(ac, pluginstore.NewFakePluginStore(pluginstore.Plugin{ @@ -259,7 +256,7 @@ func TestRoleAppPluginAuth(t *testing.T) { }, }, }, - }), features, logger), func(c *contextmodel.ReqContext) { + }), logger), func(c *contextmodel.ReqContext) { c.JSON(http.StatusOK, map[string]interface{}{}) }) sc.fakeReq("GET", "/a/test-app/notExistingPath").exec() @@ -307,7 +304,6 @@ func TestRoleAppPluginAuth(t *testing.T) { }, }) logger := &logtest.Fake{} - features := featuremgmt.WithFeatures(featuremgmt.FlagAccessControlOnCall) ac := &actest.FakeAccessControl{ ExpectedEvaluate: tc.evalResult, ExpectedErr: tc.evalErr, @@ -327,7 +323,7 @@ func TestRoleAppPluginAuth(t *testing.T) { }, }) - sc.m.Get("/a/:id/*", RoleAppPluginAuth(ac, ps, features, logger), func(c *contextmodel.ReqContext) { + sc.m.Get("/a/:id/*", RoleAppPluginAuth(ac, ps, logger), func(c *contextmodel.ReqContext) { c.JSON(http.StatusOK, map[string]interface{}{}) }) sc.fakeReq("GET", path).exec() diff --git a/pkg/services/accesscontrol/acimpl/service.go b/pkg/services/accesscontrol/acimpl/service.go index d98c5dc8c2a90..1a2751978cdf8 100644 --- a/pkg/services/accesscontrol/acimpl/service.go +++ b/pkg/services/accesscontrol/acimpl/service.go @@ -68,7 +68,7 @@ func ProvideService( lock, ) - api.NewAccessControlAPI(routeRegister, accessControl, service, userService, features).RegisterAPIEndpoints() + api.NewAccessControlAPI(routeRegister, accessControl, service, userService).RegisterAPIEndpoints() if err := accesscontrol.DeclareFixedRoles(service, cfg); err != nil { return nil, err } @@ -472,14 +472,9 @@ func (s *Service) RegisterFixedRoles(ctx context.Context) error { // DeclarePluginRoles allow the caller to declare, to the service, plugin roles and their assignments // to organization roles ("Viewer", "Editor", "Admin") or "Grafana Admin" func (s *Service) DeclarePluginRoles(ctx context.Context, ID, name string, regs []plugins.RoleRegistration) error { - ctx, span := tracer.Start(ctx, "accesscontrol.acimpl.DeclarePluginRoles") + _, span := tracer.Start(ctx, "accesscontrol.acimpl.DeclarePluginRoles") defer span.End() - // Protect behind feature toggle - if !s.features.IsEnabled(ctx, featuremgmt.FlagAccessControlOnCall) { - return nil - } - acRegs := pluginutils.ToRegistrations(ID, name, regs) for _, r := range acRegs { if err := pluginutils.ValidatePluginRole(ID, r.Role); err != nil { diff --git a/pkg/services/accesscontrol/acimpl/service_test.go b/pkg/services/accesscontrol/acimpl/service_test.go index 74acae53652b9..2d991d98ab67e 100644 --- a/pkg/services/accesscontrol/acimpl/service_test.go +++ b/pkg/services/accesscontrol/acimpl/service_test.go @@ -253,7 +253,6 @@ func TestService_DeclarePluginRoles(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ac := setupTestEnv(t) - ac.features = featuremgmt.WithFeatures(featuremgmt.FlagAccessControlOnCall) // Reset the registations ac.registrations = accesscontrol.RegistrationList{} diff --git a/pkg/services/accesscontrol/api/api.go b/pkg/services/accesscontrol/api/api.go index 1620ce261a776..be79a93eaf569 100644 --- a/pkg/services/accesscontrol/api/api.go +++ b/pkg/services/accesscontrol/api/api.go @@ -17,20 +17,17 @@ import ( "github.com/grafana/grafana/pkg/middleware/requestmeta" ac "github.com/grafana/grafana/pkg/services/accesscontrol" contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" - "github.com/grafana/grafana/pkg/services/featuremgmt" "github.com/grafana/grafana/pkg/services/user" ) var tracer = otel.Tracer("github.com/grafana/grafana/pkg/services/accesscontrol/api") -func NewAccessControlAPI(router routing.RouteRegister, accesscontrol ac.AccessControl, service ac.Service, - userSvc user.Service, features featuremgmt.FeatureToggles) *AccessControlAPI { +func NewAccessControlAPI(router routing.RouteRegister, accesscontrol ac.AccessControl, service ac.Service, userSvc user.Service) *AccessControlAPI { return &AccessControlAPI{ RouteRegister: router, Service: service, userSvc: userSvc, AccessControl: accesscontrol, - features: features, } } @@ -39,7 +36,6 @@ type AccessControlAPI struct { AccessControl ac.AccessControl RouteRegister routing.RouteRegister userSvc user.Service - features featuremgmt.FeatureToggles } func (api *AccessControlAPI) RegisterAPIEndpoints() { @@ -48,9 +44,7 @@ func (api *AccessControlAPI) RegisterAPIEndpoints() { api.RouteRegister.Group("/api/access-control", func(rr routing.RouteRegister) { rr.Get("/user/actions", middleware.ReqSignedIn, routing.Wrap(api.getUserActions)) rr.Get("/user/permissions", middleware.ReqSignedIn, routing.Wrap(api.getUserPermissions)) - if api.features.IsEnabledGlobally(featuremgmt.FlagAccessControlOnCall) { - rr.Get("/users/permissions/search", authorize(ac.EvalPermission(ac.ActionUsersPermissionsRead)), routing.Wrap(api.searchUsersPermissions)) - } + rr.Get("/users/permissions/search", authorize(ac.EvalPermission(ac.ActionUsersPermissionsRead)), routing.Wrap(api.searchUsersPermissions)) }, requestmeta.SetOwner(requestmeta.TeamAuth)) } diff --git a/pkg/services/accesscontrol/api/api_test.go b/pkg/services/accesscontrol/api/api_test.go index 4068b3204d7b5..1826ac65100b9 100644 --- a/pkg/services/accesscontrol/api/api_test.go +++ b/pkg/services/accesscontrol/api/api_test.go @@ -12,7 +12,6 @@ import ( ac "github.com/grafana/grafana/pkg/services/accesscontrol" "github.com/grafana/grafana/pkg/services/accesscontrol/actest" "github.com/grafana/grafana/pkg/services/datasources" - "github.com/grafana/grafana/pkg/services/featuremgmt" "github.com/grafana/grafana/pkg/services/user" "github.com/grafana/grafana/pkg/services/user/usertest" "github.com/grafana/grafana/pkg/util" @@ -42,7 +41,7 @@ func TestAPI_getUserActions(t *testing.T) { for _, tt := range tests { t.Run(tt.desc, func(t *testing.T) { acSvc := actest.FakeService{ExpectedPermissions: tt.permissions} - api := NewAccessControlAPI(routing.NewRouteRegister(), actest.FakeAccessControl{}, acSvc, &usertest.FakeUserService{}, featuremgmt.WithFeatures()) + api := NewAccessControlAPI(routing.NewRouteRegister(), actest.FakeAccessControl{}, acSvc, &usertest.FakeUserService{}) api.RegisterAPIEndpoints() server := webtest.NewServer(t, api.RouteRegister) @@ -95,7 +94,7 @@ func TestAPI_getUserPermissions(t *testing.T) { for _, tt := range tests { t.Run(tt.desc, func(t *testing.T) { acSvc := actest.FakeService{ExpectedPermissions: tt.permissions} - api := NewAccessControlAPI(routing.NewRouteRegister(), actest.FakeAccessControl{}, acSvc, &usertest.FakeUserService{}, featuremgmt.WithFeatures()) + api := NewAccessControlAPI(routing.NewRouteRegister(), actest.FakeAccessControl{}, acSvc, &usertest.FakeUserService{}) api.RegisterAPIEndpoints() server := webtest.NewServer(t, api.RouteRegister) @@ -192,7 +191,7 @@ func TestAccessControlAPI_searchUsersPermissions(t *testing.T) { mockUserSvc := usertest.NewMockService(t) mockUserSvc.On("GetByUID", mock.Anything, &user.GetUserByUIDQuery{UID: "user_2_uid"}).Return(&user.User{ID: 2}, nil).Maybe() mockUserSvc.On("GetByUID", mock.Anything, &user.GetUserByUIDQuery{UID: "non_existent_uid"}).Return(nil, user.ErrUserNotFound).Maybe() - api := NewAccessControlAPI(routing.NewRouteRegister(), accessControl, acSvc, mockUserSvc, featuremgmt.WithFeatures(featuremgmt.FlagAccessControlOnCall)) + api := NewAccessControlAPI(routing.NewRouteRegister(), accessControl, acSvc, mockUserSvc) api.RegisterAPIEndpoints() server := webtest.NewServer(t, api.RouteRegister) diff --git a/pkg/services/accesscontrol/resourcepermissions/service.go b/pkg/services/accesscontrol/resourcepermissions/service.go index 2a6f89a210615..37334ce35ebdd 100644 --- a/pkg/services/accesscontrol/resourcepermissions/service.go +++ b/pkg/services/accesscontrol/resourcepermissions/service.go @@ -583,7 +583,7 @@ func (a *ActionSetSvc) RegisterActionSets(ctx context.Context, pluginID string, ctx, span := tracer.Start(ctx, "accesscontrol.resourcepermissions.RegisterActionSets") defer span.End() - if !a.features.IsEnabled(ctx, featuremgmt.FlagAccessActionSets) || !a.features.IsEnabled(ctx, featuremgmt.FlagAccessControlOnCall) { + if !a.features.IsEnabled(ctx, featuremgmt.FlagAccessActionSets) { return nil } for _, reg := range registrations { diff --git a/pkg/services/accesscontrol/resourcepermissions/service_test.go b/pkg/services/accesscontrol/resourcepermissions/service_test.go index 29d534cf218cd..3022dd6fb46ba 100644 --- a/pkg/services/accesscontrol/resourcepermissions/service_test.go +++ b/pkg/services/accesscontrol/resourcepermissions/service_test.go @@ -328,7 +328,7 @@ func TestStore_RegisterActionSet(t *testing.T) { tests := []actionSetTest{ { desc: "should be able to register a plugin action set if the right feature toggles are enabled", - features: featuremgmt.WithFeatures(featuremgmt.FlagAccessActionSets, featuremgmt.FlagAccessControlOnCall), + features: featuremgmt.WithFeatures(featuremgmt.FlagAccessActionSets), pluginID: "test-app", pluginActions: []plugins.ActionSet{ { @@ -345,7 +345,7 @@ func TestStore_RegisterActionSet(t *testing.T) { }, { desc: "should not register plugin action set if feature toggles are missing", - features: featuremgmt.WithFeatures(featuremgmt.FlagAccessControlOnCall), + features: featuremgmt.WithFeatures(), pluginID: "test-app", pluginActions: []plugins.ActionSet{ { @@ -357,7 +357,7 @@ func TestStore_RegisterActionSet(t *testing.T) { }, { desc: "should be able to register multiple plugin action sets", - features: featuremgmt.WithFeatures(featuremgmt.FlagAccessActionSets, featuremgmt.FlagAccessControlOnCall), + features: featuremgmt.WithFeatures(featuremgmt.FlagAccessActionSets), pluginID: "test-app", pluginActions: []plugins.ActionSet{ { @@ -382,7 +382,7 @@ func TestStore_RegisterActionSet(t *testing.T) { }, { desc: "action set actions should be added not replaced", - features: featuremgmt.WithFeatures(featuremgmt.FlagAccessActionSets, featuremgmt.FlagAccessControlOnCall), + features: featuremgmt.WithFeatures(featuremgmt.FlagAccessActionSets), pluginID: "test-app", pluginActions: []plugins.ActionSet{ { @@ -425,7 +425,7 @@ func TestStore_RegisterActionSet(t *testing.T) { }, { desc: "should not be able to register an action that doesn't have a plugin prefix", - features: featuremgmt.WithFeatures(featuremgmt.FlagAccessActionSets, featuremgmt.FlagAccessControlOnCall), + features: featuremgmt.WithFeatures(featuremgmt.FlagAccessActionSets), pluginID: "test-app", pluginActions: []plugins.ActionSet{ { @@ -441,7 +441,7 @@ func TestStore_RegisterActionSet(t *testing.T) { }, { desc: "should not be able to register action set that is not in the allow list", - features: featuremgmt.WithFeatures(featuremgmt.FlagAccessActionSets, featuremgmt.FlagAccessControlOnCall), + features: featuremgmt.WithFeatures(featuremgmt.FlagAccessActionSets), pluginID: "test-app", pluginActions: []plugins.ActionSet{ { diff --git a/pkg/services/featuremgmt/registry.go b/pkg/services/featuremgmt/registry.go index 6ea80687ab7a5..937cd429810eb 100644 --- a/pkg/services/featuremgmt/registry.go +++ b/pkg/services/featuremgmt/registry.go @@ -198,14 +198,6 @@ var ( Stage: FeatureStageExperimental, Owner: grafanaSearchAndStorageSquad, }, - { - Name: "accessControlOnCall", - Description: "Access control primitives for OnCall", - Stage: FeatureStageGeneralAvailability, - Owner: identityAccessTeam, - HideFromAdminPage: true, - Expression: "true", // enabled by default - }, { Name: "nestedFolders", Description: "Enable folder nesting", diff --git a/pkg/services/featuremgmt/toggles_gen.csv b/pkg/services/featuremgmt/toggles_gen.csv index 1a74645d2a312..f6c7593139201 100644 --- a/pkg/services/featuremgmt/toggles_gen.csv +++ b/pkg/services/featuremgmt/toggles_gen.csv @@ -24,7 +24,6 @@ grpcServer,preview,@grafana/search-and-storage,false,false,false cloudWatchCrossAccountQuerying,GA,@grafana/aws-datasources,false,false,false showDashboardValidationWarnings,experimental,@grafana/dashboards-squad,false,false,false mysqlAnsiQuotes,experimental,@grafana/search-and-storage,false,false,false -accessControlOnCall,GA,@grafana/identity-access-team,false,false,false nestedFolders,GA,@grafana/search-and-storage,false,false,false alertingBacktesting,experimental,@grafana/alerting-squad,false,false,false editPanelCSVDragAndDrop,experimental,@grafana/dataviz-squad,false,false,true diff --git a/pkg/services/featuremgmt/toggles_gen.go b/pkg/services/featuremgmt/toggles_gen.go index 043d9446a3af5..eb23fb7815bd4 100644 --- a/pkg/services/featuremgmt/toggles_gen.go +++ b/pkg/services/featuremgmt/toggles_gen.go @@ -107,10 +107,6 @@ const ( // Use double quotes to escape keyword in a MySQL query FlagMysqlAnsiQuotes = "mysqlAnsiQuotes" - // FlagAccessControlOnCall - // Access control primitives for OnCall - FlagAccessControlOnCall = "accessControlOnCall" - // FlagNestedFolders // Enable folder nesting FlagNestedFolders = "nestedFolders" diff --git a/pkg/services/featuremgmt/toggles_gen.json b/pkg/services/featuremgmt/toggles_gen.json index 67a1638768440..fcf2d51730408 100644 --- a/pkg/services/featuremgmt/toggles_gen.json +++ b/pkg/services/featuremgmt/toggles_gen.json @@ -52,6 +52,7 @@ "name": "accessControlOnCall", "resourceVersion": "1726562036211", "creationTimestamp": "2022-10-19T16:10:09Z", + "deletionTimestamp": "2025-02-24T14:40:54Z", "annotations": { "grafana.app/updatedTimestamp": "2024-09-17 08:33:56.211355566 +0000 UTC" } diff --git a/pkg/services/navtree/navtreeimpl/applinks.go b/pkg/services/navtree/navtreeimpl/applinks.go index 5c6b88756d340..4c71eb150ed53 100644 --- a/pkg/services/navtree/navtreeimpl/applinks.go +++ b/pkg/services/navtree/navtreeimpl/applinks.go @@ -268,13 +268,12 @@ func (s *ServiceImpl) addPluginToSection(c *contextmodel.ReqContext, treeRoot *n func (s *ServiceImpl) hasAccessToInclude(c *contextmodel.ReqContext, pluginID string) func(include *plugins.Includes) bool { hasAccess := ac.HasAccess(s.accessControl, c) return func(include *plugins.Includes) bool { - useRBAC := s.features.IsEnabledGlobally(featuremgmt.FlagAccessControlOnCall) && include.RequiresRBACAction() - if useRBAC && !hasAccess(pluginaccesscontrol.GetPluginRouteEvaluator(pluginID, include.Action)) { + if include.RequiresRBACAction() && !hasAccess(pluginaccesscontrol.GetPluginRouteEvaluator(pluginID, include.Action)) { s.log.Debug("plugin include is covered by RBAC, user doesn't have access", "plugin", pluginID, "include", include.Name) return false - } else if !useRBAC && !c.HasUserRole(include.Role) { + } else if !include.RequiresRBACAction() && !c.HasUserRole(include.Role) { return false } return true diff --git a/pkg/services/navtree/navtreeimpl/applinks_test.go b/pkg/services/navtree/navtreeimpl/applinks_test.go index b064430e7fc55..a5a7fc15e99a2 100644 --- a/pkg/services/navtree/navtreeimpl/applinks_test.go +++ b/pkg/services/navtree/navtreeimpl/applinks_test.go @@ -450,114 +450,67 @@ func TestAddAppLinksAccessControl(t *testing.T) { }, } - t.Run("Without plugin RBAC - Enforce role", func(t *testing.T) { - t.Run("Should not add app links when the user cannot access app plugins", func(t *testing.T) { - treeRoot := navtree.NavTreeRoot{} - user.Permissions = map[int64]map[string][]string{} - user.OrgRole = identity.RoleAdmin - - err := service.addAppLinks(&treeRoot, reqCtx) - require.NoError(t, err) - require.Len(t, treeRoot.Children, 0) - }) - t.Run(" Should add all includes when the user is an editor", func(t *testing.T) { - treeRoot := navtree.NavTreeRoot{} - user.Permissions = map[int64]map[string][]string{ - 1: {pluginaccesscontrol.ActionAppAccess: []string{"*"}}, - } - user.OrgRole = identity.RoleEditor - - err := service.addAppLinks(&treeRoot, reqCtx) - require.NoError(t, err) - appsNode := treeRoot.FindById(navtree.NavIDApps) - require.Len(t, appsNode.Children, 1) - require.Equal(t, "Test app1 name", appsNode.Children[0].Text) - require.Equal(t, "/a/test-app1/home", appsNode.Children[0].Url) - require.Len(t, appsNode.Children[0].Children, 2) - require.Equal(t, "/a/test-app1/catalog", appsNode.Children[0].Children[0].Url) - require.Equal(t, "/a/test-app1/announcements", appsNode.Children[0].Children[1].Url) - }) - t.Run("Should add two includes when the user is a viewer", func(t *testing.T) { - treeRoot := navtree.NavTreeRoot{} - user.Permissions = map[int64]map[string][]string{ - 1: {pluginaccesscontrol.ActionAppAccess: []string{"*"}}, - } - user.OrgRole = identity.RoleViewer - - err := service.addAppLinks(&treeRoot, reqCtx) - require.NoError(t, err) - appsNode := treeRoot.FindById(navtree.NavIDApps) - require.Len(t, appsNode.Children, 1) - require.Equal(t, "Test app1 name", appsNode.Children[0].Text) - require.Equal(t, "/a/test-app1/home", appsNode.Children[0].Url) - require.Len(t, appsNode.Children[0].Children, 1) - require.Equal(t, "/a/test-app1/announcements", appsNode.Children[0].Children[0].Url) - }) + t.Run("Should not see any includes with no app access", func(t *testing.T) { + treeRoot := navtree.NavTreeRoot{} + user.Permissions = map[int64]map[string][]string{ + 1: {pluginaccesscontrol.ActionAppAccess: []string{"plugins:id:not-the-test-app1"}}, + } + user.OrgRole = identity.RoleNone + service.features = featuremgmt.WithFeatures() + + err := service.addAppLinks(&treeRoot, reqCtx) + require.NoError(t, err) + require.Len(t, treeRoot.Children, 0) }) + t.Run("Should only see the announcements as a none role user with app access", func(t *testing.T) { + treeRoot := navtree.NavTreeRoot{} + user.Permissions = map[int64]map[string][]string{ + 1: {pluginaccesscontrol.ActionAppAccess: []string{"plugins:id:test-app1"}}, + } + user.OrgRole = identity.RoleNone + service.features = featuremgmt.WithFeatures() - t.Run("With plugin RBAC - Enforce action first", func(t *testing.T) { - t.Run("Should not see any includes with no app access", func(t *testing.T) { - treeRoot := navtree.NavTreeRoot{} - user.Permissions = map[int64]map[string][]string{ - 1: {pluginaccesscontrol.ActionAppAccess: []string{"plugins:id:not-the-test-app1"}}, - } - user.OrgRole = identity.RoleNone - service.features = featuremgmt.WithFeatures(featuremgmt.FlagAccessControlOnCall) - - err := service.addAppLinks(&treeRoot, reqCtx) - require.NoError(t, err) - require.Len(t, treeRoot.Children, 0) - }) - t.Run("Should only see the announcements as a none role user with app access", func(t *testing.T) { - treeRoot := navtree.NavTreeRoot{} - user.Permissions = map[int64]map[string][]string{ - 1: {pluginaccesscontrol.ActionAppAccess: []string{"plugins:id:test-app1"}}, - } - user.OrgRole = identity.RoleNone - service.features = featuremgmt.WithFeatures(featuremgmt.FlagAccessControlOnCall) - - err := service.addAppLinks(&treeRoot, reqCtx) - require.NoError(t, err) - appsNode := treeRoot.FindById(navtree.NavIDApps) - require.Len(t, appsNode.Children, 1) - require.Equal(t, "Test app1 name", appsNode.Children[0].Text) - require.Len(t, appsNode.Children[0].Children, 1) - require.Equal(t, "/a/test-app1/announcements", appsNode.Children[0].Children[0].Url) - }) - t.Run("Should now see the catalog as a viewer with catalog read", func(t *testing.T) { - treeRoot := navtree.NavTreeRoot{} - user.Permissions = map[int64]map[string][]string{ - 1: {pluginaccesscontrol.ActionAppAccess: []string{"plugins:id:test-app1"}, catalogReadAction: []string{}}, - } - user.OrgRole = identity.RoleViewer - service.features = featuremgmt.WithFeatures(featuremgmt.FlagAccessControlOnCall) - - err := service.addAppLinks(&treeRoot, reqCtx) - require.NoError(t, err) - appsNode := treeRoot.FindById(navtree.NavIDApps) - require.Len(t, appsNode.Children, 1) - require.Equal(t, "Test app1 name", appsNode.Children[0].Text) - require.Equal(t, "/a/test-app1/home", appsNode.Children[0].Url) - require.Len(t, appsNode.Children[0].Children, 2) - require.Equal(t, "/a/test-app1/catalog", appsNode.Children[0].Children[0].Url) - require.Equal(t, "/a/test-app1/announcements", appsNode.Children[0].Children[1].Url) - }) - t.Run("Should not see the catalog include as an editor without catalog read", func(t *testing.T) { - treeRoot := navtree.NavTreeRoot{} - user.Permissions = map[int64]map[string][]string{ - 1: {pluginaccesscontrol.ActionAppAccess: []string{"*"}}, - } - user.OrgRole = identity.RoleEditor - service.features = featuremgmt.WithFeatures(featuremgmt.FlagAccessControlOnCall) - - err := service.addAppLinks(&treeRoot, reqCtx) - require.NoError(t, err) - appsNode := treeRoot.FindById(navtree.NavIDApps) - require.Len(t, appsNode.Children, 1) - require.Equal(t, "Test app1 name", appsNode.Children[0].Text) - require.Equal(t, "/a/test-app1/home", appsNode.Children[0].Url) - require.Len(t, appsNode.Children[0].Children, 1) - require.Equal(t, "/a/test-app1/announcements", appsNode.Children[0].Children[0].Url) - }) + err := service.addAppLinks(&treeRoot, reqCtx) + require.NoError(t, err) + appsNode := treeRoot.FindById(navtree.NavIDApps) + require.Len(t, appsNode.Children, 1) + require.Equal(t, "Test app1 name", appsNode.Children[0].Text) + require.Len(t, appsNode.Children[0].Children, 1) + require.Equal(t, "/a/test-app1/announcements", appsNode.Children[0].Children[0].Url) + }) + t.Run("Should now see the catalog as a viewer with catalog read", func(t *testing.T) { + treeRoot := navtree.NavTreeRoot{} + user.Permissions = map[int64]map[string][]string{ + 1: {pluginaccesscontrol.ActionAppAccess: []string{"plugins:id:test-app1"}, catalogReadAction: []string{}}, + } + user.OrgRole = identity.RoleViewer + service.features = featuremgmt.WithFeatures() + + err := service.addAppLinks(&treeRoot, reqCtx) + require.NoError(t, err) + appsNode := treeRoot.FindById(navtree.NavIDApps) + require.Len(t, appsNode.Children, 1) + require.Equal(t, "Test app1 name", appsNode.Children[0].Text) + require.Equal(t, "/a/test-app1/home", appsNode.Children[0].Url) + require.Len(t, appsNode.Children[0].Children, 2) + require.Equal(t, "/a/test-app1/catalog", appsNode.Children[0].Children[0].Url) + require.Equal(t, "/a/test-app1/announcements", appsNode.Children[0].Children[1].Url) + }) + t.Run("Should not see the catalog include as an editor without catalog read", func(t *testing.T) { + treeRoot := navtree.NavTreeRoot{} + user.Permissions = map[int64]map[string][]string{ + 1: {pluginaccesscontrol.ActionAppAccess: []string{"*"}}, + } + user.OrgRole = identity.RoleEditor + service.features = featuremgmt.WithFeatures() + + err := service.addAppLinks(&treeRoot, reqCtx) + require.NoError(t, err) + appsNode := treeRoot.FindById(navtree.NavIDApps) + require.Len(t, appsNode.Children, 1) + require.Equal(t, "Test app1 name", appsNode.Children[0].Text) + require.Equal(t, "/a/test-app1/home", appsNode.Children[0].Url) + require.Len(t, appsNode.Children[0].Children, 1) + require.Equal(t, "/a/test-app1/announcements", appsNode.Children[0].Children[0].Url) }) } diff --git a/public/app/features/plugins/components/AppRootPage.test.tsx b/public/app/features/plugins/components/AppRootPage.test.tsx index c9e5ec616be8e..594028bcebf2a 100644 --- a/public/app/features/plugins/components/AppRootPage.test.tsx +++ b/public/app/features/plugins/components/AppRootPage.test.tsx @@ -30,9 +30,7 @@ jest.mock('../plugin_loader', () => ({ jest.mock('@grafana/runtime', () => ({ ...jest.requireActual('@grafana/runtime'), config: { - featureToggles: { - accessControlOnCall: true, - }, + featureToggles: {}, apps: {}, theme2: { breakpoints: { diff --git a/public/app/features/plugins/components/AppRootPage.tsx b/public/app/features/plugins/components/AppRootPage.tsx index 95a280027df99..22e324e74781e 100644 --- a/public/app/features/plugins/components/AppRootPage.tsx +++ b/public/app/features/plugins/components/AppRootPage.tsx @@ -135,7 +135,7 @@ export function AppRootPage({ pluginId, pluginNavSection }: Props) { } // Check if action exists and give access if user has the required permission. - if (pluginInclude?.action && config.featureToggles.accessControlOnCall) { + if (pluginInclude?.action) { return contextSrv.hasPermission(pluginInclude.action); } From 2e78bcfb412ca2578808420feecfdbe1683fd00f Mon Sep 17 00:00:00 2001 From: Leon Sorokin Date: Tue, 25 Feb 2025 07:50:32 -0600 Subject: [PATCH 20/33] Transformations: Add round() to Unary mode of `Add field from calc` (#101295) --- packages/grafana-data/src/utils/unaryOperators.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/grafana-data/src/utils/unaryOperators.ts b/packages/grafana-data/src/utils/unaryOperators.ts index a7e9a85b78292..e440382f31e48 100644 --- a/packages/grafana-data/src/utils/unaryOperators.ts +++ b/packages/grafana-data/src/utils/unaryOperators.ts @@ -4,6 +4,7 @@ export enum UnaryOperationID { Abs = 'abs', Exp = 'exp', Ln = 'ln', + Round = 'round', Floor = 'floor', Ceil = 'ceil', } @@ -35,6 +36,12 @@ export const unaryOperators = new Registry(() => { operation: (value: number) => Math.log(value), unaryOperationID: UnaryOperationID.Ln, }, + { + id: UnaryOperationID.Round, + name: 'Round', + operation: (value: number) => Math.round(value), + unaryOperationID: UnaryOperationID.Round, + }, { id: UnaryOperationID.Floor, name: 'Floor', From 8d7108d7747eb450d36a1113671ad5bc30a8e870 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Tue, 25 Feb 2025 15:03:29 +0100 Subject: [PATCH 21/33] Dashboard: Add new elements logic (#101162) --- .../dashboard-scene/scene/DashboardScene.tsx | 12 +++-- .../scene/layout-rows/RowsLayoutManager.tsx | 13 ------ .../scene/layout-tabs/TabsLayoutManager.tsx | 4 -- .../scene/layouts-shared/addNew.ts | 45 +++++++++++++++++++ .../scene/types/DashboardLayoutManager.ts | 10 ----- 5 files changed, 54 insertions(+), 30 deletions(-) create mode 100644 public/app/features/dashboard-scene/scene/layouts-shared/addNew.ts diff --git a/public/app/features/dashboard-scene/scene/DashboardScene.tsx b/public/app/features/dashboard-scene/scene/DashboardScene.tsx index 3048549bcb9eb..9978a002fe75c 100644 --- a/public/app/features/dashboard-scene/scene/DashboardScene.tsx +++ b/public/app/features/dashboard-scene/scene/DashboardScene.tsx @@ -78,7 +78,9 @@ import { isUsingAngularDatasourcePlugin, isUsingAngularPanelPlugin } from './ang import { setupKeyboardShortcuts } from './keyboardShortcuts'; import { DashboardGridItem } from './layout-default/DashboardGridItem'; import { DefaultGridLayoutManager } from './layout-default/DefaultGridLayoutManager'; +import { addNewRowTo, addNewTabTo } from './layouts-shared/addNew'; import { DashboardLayoutManager } from './types/DashboardLayoutManager'; +import { LayoutParent } from './types/LayoutParent'; export const PERSISTED_PROPS = ['title', 'description', 'tags', 'editable', 'graphTooltip', 'links', 'meta', 'preload']; export const PANEL_SEARCH_VAR = 'systemPanelFilterVar'; @@ -141,7 +143,7 @@ export interface DashboardSceneState extends SceneObjectState { editPane: DashboardEditPane; } -export class DashboardScene extends SceneObjectBase { +export class DashboardScene extends SceneObjectBase implements LayoutParent { static Component = DashboardSceneRenderer; /** @@ -593,11 +595,11 @@ export class DashboardScene extends SceneObjectBase { } public onCreateNewRow() { - this.state.body.addNewRow(); + addNewRowTo(this.state.body); } public onCreateNewTab() { - this.state.body.addNewTab(); + addNewTabTo(this.state.body); } public onCreateNewPanel(): VizPanel { @@ -613,6 +615,10 @@ export class DashboardScene extends SceneObjectBase { layout.activateRepeaters?.(); } + public getLayout(): DashboardLayoutManager { + return this.state.body; + } + /** * Called by the SceneQueryRunner to provide contextual parameters (tracking) props for the request */ diff --git a/public/app/features/dashboard-scene/scene/layout-rows/RowsLayoutManager.tsx b/public/app/features/dashboard-scene/scene/layout-rows/RowsLayoutManager.tsx index 54fb186937d1e..89a75f2976963 100644 --- a/public/app/features/dashboard-scene/scene/layout-rows/RowsLayoutManager.tsx +++ b/public/app/features/dashboard-scene/scene/layout-rows/RowsLayoutManager.tsx @@ -3,11 +3,9 @@ import { t } from 'app/core/internationalization'; import { isClonedKey } from '../../utils/clone'; import { dashboardSceneGraph } from '../../utils/dashboardSceneGraph'; -import { getDashboardSceneFor } from '../../utils/utils'; import { DashboardGridItem } from '../layout-default/DashboardGridItem'; import { DefaultGridLayoutManager } from '../layout-default/DefaultGridLayoutManager'; import { RowRepeaterBehavior } from '../layout-default/RowRepeaterBehavior'; -import { TabsLayoutManager } from '../layout-tabs/TabsLayoutManager'; import { DashboardLayoutManager } from '../types/DashboardLayoutManager'; import { LayoutRegistryItem } from '../types/LayoutRegistryItem'; @@ -85,17 +83,6 @@ export class RowsLayoutManager extends SceneObjectBase i this.setState({ rows: [...this.state.rows, new RowItem()] }); } - public addNewTab() { - const shouldAddTab = this.hasVizPanels(); - const tabsLayout = TabsLayoutManager.createFromLayout(this); - - if (shouldAddTab) { - tabsLayout.addNewTab(); - } - - getDashboardSceneFor(this).switchLayout(tabsLayout); - } - public editModeChanged(isEditing: boolean) { this.state.rows.forEach((row) => row.getLayout().editModeChanged?.(isEditing)); } diff --git a/public/app/features/dashboard-scene/scene/layout-tabs/TabsLayoutManager.tsx b/public/app/features/dashboard-scene/scene/layout-tabs/TabsLayoutManager.tsx index 3e2c338b26f8b..cc4bb9c5a0bee 100644 --- a/public/app/features/dashboard-scene/scene/layout-tabs/TabsLayoutManager.tsx +++ b/public/app/features/dashboard-scene/scene/layout-tabs/TabsLayoutManager.tsx @@ -95,10 +95,6 @@ export class TabsLayoutManager extends SceneObjectBase i return false; } - public addNewRow() { - this.getCurrentTab().getLayout().addNewRow(); - } - public addNewTab() { const currentTab = new TabItem(); this.setState({ tabs: [...this.state.tabs, currentTab], currentTabIndex: this.state.tabs.length }); diff --git a/public/app/features/dashboard-scene/scene/layouts-shared/addNew.ts b/public/app/features/dashboard-scene/scene/layouts-shared/addNew.ts new file mode 100644 index 0000000000000..11385b322a2b4 --- /dev/null +++ b/public/app/features/dashboard-scene/scene/layouts-shared/addNew.ts @@ -0,0 +1,45 @@ +import { SceneObject } from '@grafana/scenes'; + +import { DefaultGridLayoutManager } from '../layout-default/DefaultGridLayoutManager'; +import { RowsLayoutManager } from '../layout-rows/RowsLayoutManager'; +import { TabsLayoutManager } from '../layout-tabs/TabsLayoutManager'; +import { isLayoutParent } from '../types/LayoutParent'; + +export function addNewTabTo(sceneObject: SceneObject) { + if (sceneObject instanceof TabsLayoutManager) { + sceneObject.addNewTab(); + return; + } + + const layoutParent = sceneObject.parent!; + if (!isLayoutParent(layoutParent)) { + throw new Error('Parent layout is not a LayoutParent'); + } + + layoutParent.switchLayout(TabsLayoutManager.createFromLayout(layoutParent.getLayout())); +} + +export function addNewRowTo(sceneObject: SceneObject) { + if (sceneObject instanceof RowsLayoutManager) { + sceneObject.addNewRow(); + return; + } + + if (sceneObject instanceof DefaultGridLayoutManager) { + sceneObject.addNewRow(); + return; + } + + if (sceneObject instanceof TabsLayoutManager) { + const currentTab = sceneObject.getCurrentTab(); + addNewRowTo(currentTab.state.layout); + return; + } + + const layoutParent = sceneObject.parent!; + if (!isLayoutParent(layoutParent)) { + throw new Error('Parent layout is not a LayoutParent'); + } + + layoutParent.switchLayout(RowsLayoutManager.createFromLayout(layoutParent.getLayout())); +} diff --git a/public/app/features/dashboard-scene/scene/types/DashboardLayoutManager.ts b/public/app/features/dashboard-scene/scene/types/DashboardLayoutManager.ts index 6041799d8db43..53862b2578dda 100644 --- a/public/app/features/dashboard-scene/scene/types/DashboardLayoutManager.ts +++ b/public/app/features/dashboard-scene/scene/types/DashboardLayoutManager.ts @@ -44,16 +44,6 @@ export interface DashboardLayoutManager extends SceneObject { */ hasVizPanels(): boolean; - /** - * Add row - */ - addNewRow(): void; - - /** - * Add tab - */ - addNewTab(): void; - /** * Notify the layout manager that the edit mode has changed * @param isEditing From c1d9d4d15a90c330d1846441dacf61126ea7c7a5 Mon Sep 17 00:00:00 2001 From: Karl Persson <23356117+kalleep@users.noreply.github.com> Date: Tue, 25 Feb 2025 15:06:25 +0100 Subject: [PATCH 22/33] User: Handle unique constraints errors (#101274) * Handle unique constraints errors --- pkg/services/user/userimpl/store.go | 33 +++++++++++++++++++++++- pkg/services/user/userimpl/store_test.go | 13 ++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) diff --git a/pkg/services/user/userimpl/store.go b/pkg/services/user/userimpl/store.go index f26cf95158fcb..6ada365b1b00a 100644 --- a/pkg/services/user/userimpl/store.go +++ b/pkg/services/user/userimpl/store.go @@ -2,11 +2,15 @@ package userimpl import ( "context" + "errors" "fmt" "strconv" "strings" "time" + "github.com/go-sql-driver/mysql" + "github.com/mattn/go-sqlite3" + "github.com/grafana/grafana/pkg/events" "github.com/grafana/grafana/pkg/infra/db" "github.com/grafana/grafana/pkg/infra/log" @@ -72,7 +76,7 @@ func (ss *sqlStore) Insert(ctx context.Context, cmd *user.User) (int64, error) { return nil }) if err != nil { - return 0, err + return 0, handleSQLError(err) } return cmd.ID, nil @@ -580,3 +584,30 @@ func setOptional[T any](v *T, add func(v T)) { add(*v) } } + +func handleSQLError(err error) error { + if isUniqueConstraintError(err) { + return user.ErrUserAlreadyExists + } + return err +} + +func isUniqueConstraintError(err error) bool { + // check mysql error code + var me *mysql.MySQLError + if errors.As(err, &me) && me.Number == 1062 { + return true + } + + // for postgres we check the error message + if strings.Contains(err.Error(), "duplicate key value") { + return true + } + + var se sqlite3.Error + if errors.As(err, &se) && se.ExtendedCode == sqlite3.ErrConstraintUnique { + return true + } + + return false +} diff --git a/pkg/services/user/userimpl/store_test.go b/pkg/services/user/userimpl/store_test.go index 9d84d64cf17d8..bca7ac13a4965 100644 --- a/pkg/services/user/userimpl/store_test.go +++ b/pkg/services/user/userimpl/store_test.go @@ -68,6 +68,19 @@ func TestIntegrationUserDataAccess(t *testing.T) { require.NoError(t, err) }) + t.Run("error on duplicated user", func(t *testing.T) { + _, err := userStore.Insert(context.Background(), + &user.User{ + Email: "test@email.com", + Name: "test1", + Login: "test1", + Created: time.Now(), + Updated: time.Now(), + }, + ) + require.ErrorIs(t, err, user.ErrUserAlreadyExists) + }) + t.Run("get user", func(t *testing.T) { _, err := userStore.GetByEmail(context.Background(), &user.GetUserByEmailQuery{Email: "test@email.com"}, From d83db31a230b18ef57842ba3a9228a7b7a344cb8 Mon Sep 17 00:00:00 2001 From: Isabella Siu Date: Tue, 25 Feb 2025 09:16:55 -0500 Subject: [PATCH 23/33] Elasticsearch: Replace level in adhoc filters with level field name (#100315) Elasticsearch: replace level in adhoc filters with level field name --- .../datasource/elasticsearch/datasource.test.ts | 7 +++++++ .../plugins/datasource/elasticsearch/datasource.ts | 2 +- .../plugins/datasource/elasticsearch/modifyQuery.ts | 11 ++++++++--- 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/public/app/plugins/datasource/elasticsearch/datasource.test.ts b/public/app/plugins/datasource/elasticsearch/datasource.test.ts index 9cab4431395bb..f80d969bfb0a4 100644 --- a/public/app/plugins/datasource/elasticsearch/datasource.test.ts +++ b/public/app/plugins/datasource/elasticsearch/datasource.test.ts @@ -812,6 +812,13 @@ describe('ElasticDatasource', () => { const query = ds.addAdHocFilters('', filters); expect(query).toBe('field\\:name:/field value\\//'); }); + + it('should replace level with the log level field', () => { + const ds = createElasticDatasource({ jsonData: { logLevelField: 'level_field' } }); + const filters = [{ key: 'level', operator: '=', value: 'foo', condition: '' }]; + const query = ds.addAdHocFilters('', filters); + expect(query).toBe('level_field:"foo"'); + }); }); }); diff --git a/public/app/plugins/datasource/elasticsearch/datasource.ts b/public/app/plugins/datasource/elasticsearch/datasource.ts index 34d22b1e33ae6..bccb678babf11 100644 --- a/public/app/plugins/datasource/elasticsearch/datasource.ts +++ b/public/app/plugins/datasource/elasticsearch/datasource.ts @@ -1108,7 +1108,7 @@ export class ElasticDatasource } let finalQuery = query; adhocFilters.forEach((filter) => { - finalQuery = addAddHocFilter(finalQuery, filter); + finalQuery = addAddHocFilter(finalQuery, filter, this.logLevelField); }); return finalQuery; diff --git a/public/app/plugins/datasource/elasticsearch/modifyQuery.ts b/public/app/plugins/datasource/elasticsearch/modifyQuery.ts index 3971e3b139607..467d5bea494fc 100644 --- a/public/app/plugins/datasource/elasticsearch/modifyQuery.ts +++ b/public/app/plugins/datasource/elasticsearch/modifyQuery.ts @@ -83,7 +83,7 @@ function concatenate(query: string, filter: string, condition = 'AND'): string { /** * Adds a label:"value" expression to the query. */ -export function addAddHocFilter(query: string, filter: AdHocVariableFilter): string { +export function addAddHocFilter(query: string, filter: AdHocVariableFilter, logLevelField?: string): string { if (!filter.key || !filter.value) { return query; } @@ -94,15 +94,20 @@ export function addAddHocFilter(query: string, filter: AdHocVariableFilter): str value: filter.value.toString(), }; + let key = filter.key; + if (logLevelField && key === 'level') { + key = logLevelField; + } + const equalityFilters = ['=', '!=']; if (equalityFilters.includes(filter.operator)) { - return addFilterToQuery(query, filter.key, filter.value, filter.operator === '=' ? '' : '-'); + return addFilterToQuery(query, key, filter.value, filter.operator === '=' ? '' : '-'); } /** * Keys and values in ad hoc filters may contain characters such as * colons, which needs to be escaped. */ - const key = escapeFilter(filter.key); + key = escapeFilter(key); const value = escapeFilterValue(filter.value); const regexValue = escapeFilterValue(filter.value, false); let addHocFilter = ''; From 6eb335a8ceeedaa25d5af882c7634e81c7a05629 Mon Sep 17 00:00:00 2001 From: Alexander Akhmetov Date: Tue, 25 Feb 2025 15:49:08 +0100 Subject: [PATCH 24/33] Alerting: API to read rule groups using mimirtool (#100674) --- .../ngalert/api/api_convert_prometheus.go | 169 ++++++++++- .../api/api_convert_prometheus_test.go | 280 +++++++++++++++++- pkg/services/ngalert/api/api_provisioning.go | 4 +- pkg/services/ngalert/api/tooling/api.json | 1 + .../definitions/convert_prometheus_api.go | 20 +- pkg/services/ngalert/api/tooling/post.json | 20 +- pkg/services/ngalert/api/tooling/spec.json | 20 +- pkg/services/ngalert/models/alert_rule.go | 18 ++ pkg/services/ngalert/models/testing.go | 8 + .../ngalert/provisioning/alert_rules.go | 43 ++- .../ngalert/provisioning/alert_rules_test.go | 4 +- pkg/services/ngalert/store/alert_rule.go | 32 ++ pkg/services/ngalert/store/alert_rule_test.go | 58 ++++ pkg/services/ngalert/tests/fakes/rules.go | 3 +- .../alerting/api_convert_prometheus_test.go | 166 ++++++++--- pkg/tests/api/alerting/testing.go | 111 +++++-- public/api-merged.json | 1 + public/openapi3.json | 1 + 18 files changed, 835 insertions(+), 124 deletions(-) diff --git a/pkg/services/ngalert/api/api_convert_prometheus.go b/pkg/services/ngalert/api/api_convert_prometheus.go index c27bf9b118af2..e9dec2d44819a 100644 --- a/pkg/services/ngalert/api/api_convert_prometheus.go +++ b/pkg/services/ngalert/api/api_convert_prometheus.go @@ -1,15 +1,21 @@ package api import ( + "errors" "fmt" "net/http" "strconv" "strings" + "time" + + prommodel "github.com/prometheus/common/model" + "gopkg.in/yaml.v3" "github.com/grafana/grafana/pkg/api/response" "github.com/grafana/grafana/pkg/apimachinery/errutil" "github.com/grafana/grafana/pkg/infra/log" contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" + "github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/services/datasources" "github.com/grafana/grafana/pkg/services/folder" apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" @@ -17,6 +23,7 @@ import ( "github.com/grafana/grafana/pkg/services/ngalert/prom" "github.com/grafana/grafana/pkg/services/ngalert/provisioning" "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/util" ) const ( @@ -39,6 +46,18 @@ func errInvalidHeaderValue(header string) error { return errInvalidHeaderValueBase.Build(errutil.TemplateData{Public: map[string]any{"Header": header}}) } +// ConvertPrometheusSrv converts Prometheus rules to Grafana rules +// and retrieves them in a Prometheus-compatible format. +// +// It is designed to support mimirtool integration, so that rules that work with Mimir +// can be imported into Grafana. It works similarly to the provisioning API, +// where once a rule group is created, it is marked as "provisioned" (via provenance mechanism) +// and is not editable in the UI. +// +// This service returns only rule groups that were initially imported from Prometheus-compatible sources. +// Rule groups not imported from Prometheus are excluded because their original rule definitions are unavailable. +// When a rule group is converted from Prometheus to Grafana, the original definition is preserved alongside +// the Grafana rule and used for reading requests here. type ConvertPrometheusSrv struct { cfg *setting.UnifiedAlertingSettings logger log.Logger @@ -57,27 +76,117 @@ func NewConvertPrometheusSrv(cfg *setting.UnifiedAlertingSettings, logger log.Lo } } +// RouteConvertPrometheusGetRules returns all Grafana-managed alert rules in all namespaces (folders) +// that were imported from a Prometheus-compatible source. +// It responds with a YAML containing a mapping of folders to arrays of Prometheus rule groups. func (srv *ConvertPrometheusSrv) RouteConvertPrometheusGetRules(c *contextmodel.ReqContext) response.Response { - return response.Error(501, "Not implemented", nil) + logger := srv.logger.FromContext(c.Req.Context()) + + filterOpts := &provisioning.FilterOptions{ + ImportedPrometheusRule: util.Pointer(true), + } + groups, err := srv.alertRuleService.GetAlertGroupsWithFolderFullpath(c.Req.Context(), c.SignedInUser, filterOpts) + if err != nil { + logger.Error("Failed to get alert groups", "error", err) + return errorToResponse(err) + } + + namespaces, err := grafanaNamespacesToPrometheus(groups) + if err != nil { + logger.Error("Failed to convert Grafana rules to Prometheus format", "error", err) + return errorToResponse(err) + } + + return response.YAML(http.StatusOK, namespaces) } +// RouteConvertPrometheusDeleteNamespace deletes all rule groups that were imported from a Prometheus-compatible source +// within a specified namespace. func (srv *ConvertPrometheusSrv) RouteConvertPrometheusDeleteNamespace(c *contextmodel.ReqContext, namespaceTitle string) response.Response { return response.Error(501, "Not implemented", nil) } +// RouteConvertPrometheusDeleteRuleGroup deletes a specific rule group if it was imported from a Prometheus-compatible source. func (srv *ConvertPrometheusSrv) RouteConvertPrometheusDeleteRuleGroup(c *contextmodel.ReqContext, namespaceTitle string, group string) response.Response { return response.Error(501, "Not implemented", nil) } +// RouteConvertPrometheusGetNamespace returns the Grafana-managed alert rules for a specified namespace (folder). +// It responds with a YAML containing a mapping of a single folder to an array of Prometheus rule groups. func (srv *ConvertPrometheusSrv) RouteConvertPrometheusGetNamespace(c *contextmodel.ReqContext, namespaceTitle string) response.Response { - return response.Error(501, "Not implemented", nil) + logger := srv.logger.FromContext(c.Req.Context()) + + logger.Debug("Looking up folder in the root by title", "folder_title", namespaceTitle) + namespace, err := srv.ruleStore.GetNamespaceInRootByTitle(c.Req.Context(), namespaceTitle, c.SignedInUser.GetOrgID(), c.SignedInUser) + if err != nil { + logger.Error("Failed to get folder", "error", err) + return namespaceErrorResponse(err) + } + + filterOpts := &provisioning.FilterOptions{ + ImportedPrometheusRule: util.Pointer(true), + NamespaceUIDs: []string{namespace.UID}, + } + groups, err := srv.alertRuleService.GetAlertGroupsWithFolderFullpath(c.Req.Context(), c.SignedInUser, filterOpts) + if err != nil { + logger.Error("Failed to get alert groups", "error", err) + return errorToResponse(err) + } + + ns, err := grafanaNamespacesToPrometheus(groups) + if err != nil { + logger.Error("Failed to convert Grafana rules to Prometheus format", "error", err) + return errorToResponse(err) + } + + return response.YAML(http.StatusOK, ns) } +// RouteConvertPrometheusGetRuleGroup retrieves a single rule group for a given namespace (folder) +// in Prometheus-compatible YAML format if it was imported from a Prometheus-compatible source. func (srv *ConvertPrometheusSrv) RouteConvertPrometheusGetRuleGroup(c *contextmodel.ReqContext, namespaceTitle string, group string) response.Response { - // Just to make the mimirtool rules load work. It first checks if the group exists, and if the endpoint returns 501 it fails. - return response.YAML(http.StatusOK, apimodels.PrometheusRuleGroup{}) + logger := srv.logger.FromContext(c.Req.Context()) + + logger.Debug("Looking up folder in the root by title", "folder_title", namespaceTitle) + namespace, err := srv.ruleStore.GetNamespaceInRootByTitle(c.Req.Context(), namespaceTitle, c.SignedInUser.GetOrgID(), c.SignedInUser) + if err != nil { + logger.Error("Failed to get folder", "error", err) + return namespaceErrorResponse(err) + } + + filterOpts := &provisioning.FilterOptions{ + ImportedPrometheusRule: util.Pointer(true), + NamespaceUIDs: []string{namespace.UID}, + RuleGroups: []string{group}, + } + groupsWithFolders, err := srv.alertRuleService.GetAlertGroupsWithFolderFullpath(c.Req.Context(), c.SignedInUser, filterOpts) + if err != nil { + logger.Error("Failed to get alert group", "error", err) + return errorToResponse(err) + } + if len(groupsWithFolders) == 0 { + return response.Error(http.StatusNotFound, "Rule group not found", nil) + } + if len(groupsWithFolders) > 1 { + logger.Error("Multiple rule groups found when only one was expected", "folder_title", namespaceTitle, "group", group) + // It shouldn't happen, but if we get more than 1 group, we return an error. + return response.Error(http.StatusInternalServerError, "Multiple rule groups found", nil) + } + + promGroup, err := grafanaRuleGroupToPrometheus(groupsWithFolders[0].Title, groupsWithFolders[0].Rules) + if err != nil { + logger.Error("Failed to convert Grafana rule to Prometheus format", "error", err) + return errorToResponse(err) + } + + return response.YAML(http.StatusOK, promGroup) } +// RouteConvertPrometheusPostRuleGroup converts a Prometheus rule group into a Grafana rule group +// and creates or updates it within the specified namespace (folder). +// +// If the group already exists and was not imported from a Prometheus-compatible source initially, +// it will not be replaced and an error will be returned. func (srv *ConvertPrometheusSrv) RouteConvertPrometheusPostRuleGroup(c *contextmodel.ReqContext, namespaceTitle string, promGroup apimodels.PrometheusRuleGroup) response.Response { logger := srv.logger.FromContext(c.Req.Context()) logger = logger.New("folder_title", namespaceTitle, "group", promGroup.Name) @@ -101,6 +210,7 @@ func (srv *ConvertPrometheusSrv) RouteConvertPrometheusPostRuleGroup(c *contextm group, err := srv.convertToGrafanaRuleGroup(c, ds, ns.UID, promGroup, logger) if err != nil { + logger.Error("Failed to convert Prometheus rules to Grafana rules", "error", err) return errorToResponse(err) } @@ -202,3 +312,54 @@ func parseBooleanHeader(header string, headerName string) (bool, error) { } return val, nil } + +func grafanaNamespacesToPrometheus(groups []models.AlertRuleGroupWithFolderFullpath) (map[string][]apimodels.PrometheusRuleGroup, error) { + result := map[string][]apimodels.PrometheusRuleGroup{} + + for _, group := range groups { + promGroup, err := grafanaRuleGroupToPrometheus(group.Title, group.Rules) + if err != nil { + return nil, err + } + result[group.FolderFullpath] = append(result[group.FolderFullpath], promGroup) + } + + return result, nil +} + +func grafanaRuleGroupToPrometheus(group string, rules []models.AlertRule) (apimodels.PrometheusRuleGroup, error) { + if len(rules) == 0 { + return apimodels.PrometheusRuleGroup{}, nil + } + + interval := time.Duration(rules[0].IntervalSeconds) * time.Second + promGroup := apimodels.PrometheusRuleGroup{ + Name: group, + Interval: prommodel.Duration(interval), + Rules: make([]apimodels.PrometheusRule, len(rules)), + } + + for i, rule := range rules { + promDefinition := rule.PrometheusRuleDefinition() + if promDefinition == "" { + return apimodels.PrometheusRuleGroup{}, fmt.Errorf("failed to get the Prometheus definition of the rule with UID %s", rule.UID) + } + var r apimodels.PrometheusRule + if err := yaml.Unmarshal([]byte(promDefinition), &r); err != nil { + return apimodels.PrometheusRuleGroup{}, fmt.Errorf("failed to unmarshal Prometheus rule definition of the rule with UID %s: %w", rule.UID, err) + } + promGroup.Rules[i] = r + } + + return promGroup, nil +} + +func namespaceErrorResponse(err error) response.Response { + if errors.Is(err, dashboards.ErrFolderAccessDenied) { + // If there is no such folder, the error is ErrFolderAccessDenied. + // We should return 404 in this case, otherwise mimirtool does not work correctly. + return response.Empty(http.StatusNotFound) + } + + return toNamespaceErrorResponse(err) +} diff --git a/pkg/services/ngalert/api/api_convert_prometheus_test.go b/pkg/services/ngalert/api/api_convert_prometheus_test.go index 8dea6f3f17816..05f8434733436 100644 --- a/pkg/services/ngalert/api/api_convert_prometheus_test.go +++ b/pkg/services/ngalert/api/api_convert_prometheus_test.go @@ -1,6 +1,7 @@ package api import ( + "context" "net/http" "net/http/httptest" "testing" @@ -8,14 +9,17 @@ import ( prommodel "github.com/prometheus/common/model" "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" "github.com/grafana/grafana/pkg/infra/log" contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" "github.com/grafana/grafana/pkg/services/datasources" dsfakes "github.com/grafana/grafana/pkg/services/datasources/fakes" + "github.com/grafana/grafana/pkg/services/folder" "github.com/grafana/grafana/pkg/services/folder/foldertest" acfakes "github.com/grafana/grafana/pkg/services/ngalert/accesscontrol/fakes" apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" + "github.com/grafana/grafana/pkg/services/ngalert/models" "github.com/grafana/grafana/pkg/services/ngalert/provisioning" "github.com/grafana/grafana/pkg/services/ngalert/tests/fakes" "github.com/grafana/grafana/pkg/services/user" @@ -45,7 +49,7 @@ func TestRouteConvertPrometheusPostRuleGroup(t *testing.T) { } t.Run("without datasource UID header should return 400", func(t *testing.T) { - srv, _ := createConvertPrometheusSrv(t) + srv, _, _, _ := createConvertPrometheusSrv(t) rc := createRequestCtx() rc.Req.Header.Set(datasourceUIDHeader, "") @@ -56,7 +60,7 @@ func TestRouteConvertPrometheusPostRuleGroup(t *testing.T) { }) t.Run("with invalid datasource should return error", func(t *testing.T) { - srv, _ := createConvertPrometheusSrv(t) + srv, _, _, _ := createConvertPrometheusSrv(t) rc := createRequestCtx() rc.Req.Header.Set(datasourceUIDHeader, "non-existing-ds") @@ -66,7 +70,7 @@ func TestRouteConvertPrometheusPostRuleGroup(t *testing.T) { }) t.Run("with rule group without evaluation interval should return 202", func(t *testing.T) { - srv, _ := createConvertPrometheusSrv(t) + srv, _, _, _ := createConvertPrometheusSrv(t) rc := createRequestCtx() response := srv.RouteConvertPrometheusPostRuleGroup(rc, "test", simpleGroup) @@ -103,7 +107,7 @@ func TestRouteConvertPrometheusPostRuleGroup(t *testing.T) { for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { - srv, _ := createConvertPrometheusSrv(t) + srv, _, _, _ := createConvertPrometheusSrv(t) rc := createRequestCtx() rc.Req.Header.Set(tc.headerName, tc.headerValue) @@ -136,7 +140,7 @@ func TestRouteConvertPrometheusPostRuleGroup(t *testing.T) { for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { - srv, _ := createConvertPrometheusSrv(t) + srv, _, _, _ := createConvertPrometheusSrv(t) rc := createRequestCtx() rc.Req.Header.Set(tc.headerName, tc.headerValue) @@ -148,7 +152,7 @@ func TestRouteConvertPrometheusPostRuleGroup(t *testing.T) { }) t.Run("with valid request should return 202", func(t *testing.T) { - srv, _ := createConvertPrometheusSrv(t) + srv, _, _, _ := createConvertPrometheusSrv(t) rc := createRequestCtx() response := srv.RouteConvertPrometheusPostRuleGroup(rc, "test", simpleGroup) @@ -156,7 +160,267 @@ func TestRouteConvertPrometheusPostRuleGroup(t *testing.T) { }) } -func createConvertPrometheusSrv(t *testing.T) (*ConvertPrometheusSrv, datasources.CacheService) { +func TestRouteConvertPrometheusGetRuleGroup(t *testing.T) { + promRule := apimodels.PrometheusRule{ + Alert: "test alert", + Expr: "vector(1) > 0", + For: util.Pointer(prommodel.Duration(5 * time.Minute)), + Labels: map[string]string{ + "severity": "critical", + }, + Annotations: map[string]string{ + "summary": "test alert", + }, + } + promRuleYAML, err := yaml.Marshal(promRule) + require.NoError(t, err) + + t.Run("with non-existent folder should return 404", func(t *testing.T) { + srv, _, _, _ := createConvertPrometheusSrv(t) + rc := createRequestCtx() + + response := srv.RouteConvertPrometheusGetRuleGroup(rc, "non-existent", "test") + require.Equal(t, http.StatusNotFound, response.Status(), string(response.Body())) + }) + + t.Run("with non-existent group should return 404", func(t *testing.T) { + srv, _, _, _ := createConvertPrometheusSrv(t) + rc := createRequestCtx() + + response := srv.RouteConvertPrometheusGetRuleGroup(rc, "test", "non-existent") + require.Equal(t, http.StatusNotFound, response.Status(), string(response.Body())) + }) + + t.Run("with valid request should return 200", func(t *testing.T) { + srv, _, ruleStore, folderService := createConvertPrometheusSrv(t) + rc := createRequestCtx() + + // Create two folders in the root folder + fldr := randFolder() + fldr.ParentUID = "" + folderService.ExpectedFolder = fldr + folderService.ExpectedFolders = []*folder.Folder{fldr} + ruleStore.Folders[1] = append(ruleStore.Folders[1], fldr) + + // Create rules in both folders + groupKey := models.GenerateGroupKey(rc.SignedInUser.OrgID) + groupKey.NamespaceUID = fldr.UID + groupKey.RuleGroup = "test-group" + rule := models.RuleGen. + With(models.RuleGen.WithGroupKey(groupKey)). + With(models.RuleGen.WithTitle("TestAlert")). + With(models.RuleGen.WithIntervalSeconds(60)). + With(models.RuleGen.WithPrometheusOriginalRuleDefinition(string(promRuleYAML))). + GenerateRef() + ruleStore.PutRule(context.Background(), rule) + + // Create a rule in another group + groupKeyNotFromProm := models.GenerateGroupKey(rc.SignedInUser.OrgID) + groupKeyNotFromProm.NamespaceUID = fldr.UID + groupKeyNotFromProm.RuleGroup = "test-group-2" + ruleInOtherFolder := models.RuleGen. + With(models.RuleGen.WithGroupKey(groupKeyNotFromProm)). + With(models.RuleGen.WithTitle("in another group")). + With(models.RuleGen.WithIntervalSeconds(60)). + GenerateRef() + ruleStore.PutRule(context.Background(), ruleInOtherFolder) + + getResp := srv.RouteConvertPrometheusGetRuleGroup(rc, fldr.Title, groupKey.RuleGroup) + require.Equal(t, http.StatusOK, getResp.Status()) + + var respGroup apimodels.PrometheusRuleGroup + err := yaml.Unmarshal(getResp.Body(), &respGroup) + require.NoError(t, err) + + require.Equal(t, groupKey.RuleGroup, respGroup.Name) + require.Equal(t, prommodel.Duration(time.Duration(rule.IntervalSeconds)*time.Second), respGroup.Interval) + require.Len(t, respGroup.Rules, 1) + require.Equal(t, promRule.Alert, respGroup.Rules[0].Alert) + }) +} + +func TestRouteConvertPrometheusGetNamespace(t *testing.T) { + promRule1 := apimodels.PrometheusRule{ + Alert: "test alert", + Expr: "vector(1) > 0", + For: util.Pointer(prommodel.Duration(5 * time.Minute)), + Labels: map[string]string{ + "severity": "critical", + }, + Annotations: map[string]string{ + "summary": "test alert", + }, + } + + promRule2 := apimodels.PrometheusRule{ + Alert: "test alert 2", + Expr: "vector(1) > 0", + For: util.Pointer(prommodel.Duration(5 * time.Minute)), + Labels: map[string]string{ + "severity": "also critical", + }, + Annotations: map[string]string{ + "summary": "test alert 2", + }, + } + + promGroup1 := apimodels.PrometheusRuleGroup{ + Name: "Test Group", + Interval: prommodel.Duration(1 * time.Minute), + Rules: []apimodels.PrometheusRule{ + promRule1, + }, + } + promGroup2 := apimodels.PrometheusRuleGroup{ + Name: "Test Group 2", + Interval: prommodel.Duration(1 * time.Minute), + Rules: []apimodels.PrometheusRule{ + promRule2, + }, + } + + t.Run("with non-existent folder should return 404", func(t *testing.T) { + srv, _, _, _ := createConvertPrometheusSrv(t) + rc := createRequestCtx() + + response := srv.RouteConvertPrometheusGetNamespace(rc, "non-existent") + require.Equal(t, http.StatusNotFound, response.Status()) + }) + + t.Run("with valid request should return 200", func(t *testing.T) { + srv, _, ruleStore, folderService := createConvertPrometheusSrv(t) + rc := createRequestCtx() + + // Create two folders in the root folder + fldr := randFolder() + fldr.ParentUID = "" + fldr2 := randFolder() + fldr2.ParentUID = "" + folderService.ExpectedFolders = []*folder.Folder{fldr, fldr2} + ruleStore.Folders[1] = append(ruleStore.Folders[1], fldr, fldr2) + + // Create a Grafana rule for each Prometheus rule + for _, promGroup := range []apimodels.PrometheusRuleGroup{promGroup1, promGroup2} { + groupKey := models.GenerateGroupKey(rc.SignedInUser.OrgID) + groupKey.NamespaceUID = fldr.UID + groupKey.RuleGroup = promGroup.Name + promRuleYAML, err := yaml.Marshal(promGroup.Rules[0]) + require.NoError(t, err) + rule := models.RuleGen. + With(models.RuleGen.WithGroupKey(groupKey)). + With(models.RuleGen.WithTitle(promGroup.Rules[0].Alert)). + With(models.RuleGen.WithIntervalSeconds(60)). + With(models.RuleGen.WithPrometheusOriginalRuleDefinition(string(promRuleYAML))). + GenerateRef() + ruleStore.PutRule(context.Background(), rule) + } + + response := srv.RouteConvertPrometheusGetNamespace(rc, fldr.Title) + require.Equal(t, http.StatusOK, response.Status()) + + var respNamespaces map[string][]apimodels.PrometheusRuleGroup + err := yaml.Unmarshal(response.Body(), &respNamespaces) + require.NoError(t, err) + + require.Len(t, respNamespaces, 1) + require.Contains(t, respNamespaces, fldr.Fullpath) + require.ElementsMatch(t, respNamespaces[fldr.Fullpath], []apimodels.PrometheusRuleGroup{promGroup1, promGroup2}) + }) +} + +func TestRouteConvertPrometheusGetRules(t *testing.T) { + promRule1 := apimodels.PrometheusRule{ + Alert: "test alert", + Expr: "vector(1) > 0", + For: util.Pointer(prommodel.Duration(5 * time.Minute)), + Labels: map[string]string{ + "severity": "critical", + }, + Annotations: map[string]string{ + "summary": "test alert", + }, + } + + promRule2 := apimodels.PrometheusRule{ + Alert: "test alert 2", + Expr: "vector(1) > 0", + For: util.Pointer(prommodel.Duration(5 * time.Minute)), + Labels: map[string]string{ + "severity": "also critical", + }, + Annotations: map[string]string{ + "summary": "test alert 2", + }, + } + + promGroup1 := apimodels.PrometheusRuleGroup{ + Name: "Test Group", + Interval: prommodel.Duration(1 * time.Minute), + Rules: []apimodels.PrometheusRule{ + promRule1, + }, + } + promGroup2 := apimodels.PrometheusRuleGroup{ + Name: "Test Group 2", + Interval: prommodel.Duration(1 * time.Minute), + Rules: []apimodels.PrometheusRule{ + promRule2, + }, + } + + t.Run("with no rules should return empty response", func(t *testing.T) { + srv, _, _, _ := createConvertPrometheusSrv(t) + rc := createRequestCtx() + + response := srv.RouteConvertPrometheusGetRules(rc) + require.Equal(t, http.StatusOK, response.Status()) + + var respNamespaces map[string][]apimodels.PrometheusRuleGroup + err := yaml.Unmarshal(response.Body(), &respNamespaces) + require.NoError(t, err) + require.Empty(t, respNamespaces) + }) + + t.Run("with rules should return 200 with rules", func(t *testing.T) { + srv, _, ruleStore, folderService := createConvertPrometheusSrv(t) + rc := createRequestCtx() + + // Create a folder in the root + fldr := randFolder() + fldr.ParentUID = "" + folderService.ExpectedFolders = []*folder.Folder{fldr} + ruleStore.Folders[1] = append(ruleStore.Folders[1], fldr) + + // Create a Grafana rule for each Prometheus rule + for _, promGroup := range []apimodels.PrometheusRuleGroup{promGroup1, promGroup2} { + groupKey := models.GenerateGroupKey(rc.SignedInUser.OrgID) + groupKey.NamespaceUID = fldr.UID + groupKey.RuleGroup = promGroup.Name + promRuleYAML, err := yaml.Marshal(promGroup.Rules[0]) + require.NoError(t, err) + rule := models.RuleGen. + With(models.RuleGen.WithGroupKey(groupKey)). + With(models.RuleGen.WithTitle(promGroup.Rules[0].Alert)). + With(models.RuleGen.WithIntervalSeconds(60)). + With(models.RuleGen.WithPrometheusOriginalRuleDefinition(string(promRuleYAML))). + GenerateRef() + ruleStore.PutRule(context.Background(), rule) + } + + response := srv.RouteConvertPrometheusGetRules(rc) + require.Equal(t, http.StatusOK, response.Status()) + + var respNamespaces map[string][]apimodels.PrometheusRuleGroup + err := yaml.Unmarshal(response.Body(), &respNamespaces) + require.NoError(t, err) + + require.Len(t, respNamespaces, 1) + require.Contains(t, respNamespaces, fldr.Fullpath) + require.ElementsMatch(t, respNamespaces[fldr.Fullpath], []apimodels.PrometheusRuleGroup{promGroup1, promGroup2}) + }) +} + +func createConvertPrometheusSrv(t *testing.T) (*ConvertPrometheusSrv, datasources.CacheService, *fakes.RuleStore, *foldertest.FakeService) { t.Helper() ruleStore := fakes.NewRuleStore(t) @@ -195,7 +459,7 @@ func createConvertPrometheusSrv(t *testing.T) (*ConvertPrometheusSrv, datasource srv := NewConvertPrometheusSrv(cfg, log.NewNopLogger(), ruleStore, dsCache, alertRuleService) - return srv, dsCache + return srv, dsCache, ruleStore, folderService } func createRequestCtx() *contextmodel.ReqContext { diff --git a/pkg/services/ngalert/api/api_provisioning.go b/pkg/services/ngalert/api/api_provisioning.go index 06805da5b198f..6eb8f38a3c40c 100644 --- a/pkg/services/ngalert/api/api_provisioning.go +++ b/pkg/services/ngalert/api/api_provisioning.go @@ -77,7 +77,7 @@ type AlertRuleService interface { DeleteRuleGroup(ctx context.Context, user identity.Requester, folder, group string, provenance alerting_models.Provenance) error GetAlertRuleWithFolderFullpath(ctx context.Context, u identity.Requester, ruleUID string) (provisioning.AlertRuleWithFolderFullpath, error) GetAlertRuleGroupWithFolderFullpath(ctx context.Context, u identity.Requester, folder, group string) (alerting_models.AlertRuleGroupWithFolderFullpath, error) - GetAlertGroupsWithFolderFullpath(ctx context.Context, u identity.Requester, folderUIDs []string) ([]alerting_models.AlertRuleGroupWithFolderFullpath, error) + GetAlertGroupsWithFolderFullpath(ctx context.Context, u identity.Requester, opts *provisioning.FilterOptions) ([]alerting_models.AlertRuleGroupWithFolderFullpath, error) } func (srv *ProvisioningSrv) RouteGetPolicyTree(c *contextmodel.ReqContext) response.Response { @@ -452,7 +452,7 @@ func (srv *ProvisioningSrv) RouteGetAlertRulesExport(c *contextmodel.ReqContext) return srv.RouteGetAlertRuleGroupExport(c, folderUIDs[0], group) } - groupsWithFullpath, err := srv.alertRules.GetAlertGroupsWithFolderFullpath(c.Req.Context(), c.SignedInUser, folderUIDs) + groupsWithFullpath, err := srv.alertRules.GetAlertGroupsWithFolderFullpath(c.Req.Context(), c.SignedInUser, &provisioning.FilterOptions{NamespaceUIDs: folderUIDs}) if err != nil { return response.ErrOrFallback(http.StatusInternalServerError, "failed to get alert rules", err) } diff --git a/pkg/services/ngalert/api/tooling/api.json b/pkg/services/ngalert/api/tooling/api.json index 0a2fb0af0d4d0..7fc64a8291f67 100644 --- a/pkg/services/ngalert/api/tooling/api.json +++ b/pkg/services/ngalert/api/tooling/api.json @@ -4932,6 +4932,7 @@ "type": "object" }, "gettableAlerts": { + "description": "GettableAlerts gettable alerts", "items": { "$ref": "#/definitions/gettableAlert", "type": "object" diff --git a/pkg/services/ngalert/api/tooling/definitions/convert_prometheus_api.go b/pkg/services/ngalert/api/tooling/definitions/convert_prometheus_api.go index 5b6454d1d8477..9442ba9fc0fb7 100644 --- a/pkg/services/ngalert/api/tooling/definitions/convert_prometheus_api.go +++ b/pkg/services/ngalert/api/tooling/definitions/convert_prometheus_api.go @@ -6,10 +6,10 @@ import ( // swagger:route GET /convert/prometheus/config/v1/rules convert_prometheus RouteConvertPrometheusGetRules // -// Gets all namespaces with their rule groups in Prometheus format. +// Gets all Grafana-managed alert rules that were imported from Prometheus-compatible sources, grouped by namespace. // // Produces: -// - application/json +// - application/yaml // // Responses: // 200: PrometheusNamespace @@ -18,10 +18,10 @@ import ( // swagger:route GET /convert/prometheus/config/v1/rules/{NamespaceTitle} convert_prometheus RouteConvertPrometheusGetNamespace // -// Gets rules in prometheus format for a given namespace. +// Gets Grafana-managed alert rules that were imported from Prometheus-compatible sources for a specified namespace (folder). // // Produces: -// - application/json +// - application/yaml // // Responses: // 200: PrometheusNamespace @@ -30,10 +30,10 @@ import ( // swagger:route GET /convert/prometheus/config/v1/rules/{NamespaceTitle}/{Group} convert_prometheus RouteConvertPrometheusGetRuleGroup // -// Gets a rule group in Prometheus format. +// Gets a single rule group in Prometheus-compatible format if it was imported from a Prometheus-compatible source. // // Produces: -// - application/json +// - application/yaml // // Responses: // 200: PrometheusRuleGroup @@ -42,7 +42,9 @@ import ( // swagger:route POST /convert/prometheus/config/v1/rules/{NamespaceTitle} convert_prometheus RouteConvertPrometheusPostRuleGroup // -// Creates or updates a rule group in Prometheus format. +// Converts a Prometheus rule group into a Grafana rule group and creates or updates it within the specified namespace. +// If the group already exists and was not imported from a Prometheus-compatible source initially, +// it will not be replaced and an error will be returned. // // Consumes: // - application/yaml @@ -59,7 +61,7 @@ import ( // swagger:route DELETE /convert/prometheus/config/v1/rules/{NamespaceTitle} convert_prometheus RouteConvertPrometheusDeleteNamespace // -// Deletes all rule groups in the given namespace. +// Deletes all rule groups that were imported from Prometheus-compatible sources within the specified namespace. // // Produces: // - application/json @@ -70,7 +72,7 @@ import ( // swagger:route DELETE /convert/prometheus/config/v1/rules/{NamespaceTitle}/{Group} convert_prometheus RouteConvertPrometheusDeleteRuleGroup // -// Deletes a rule group in Prometheus format. +// Deletes a specific rule group if it was imported from a Prometheus-compatible source. // // Produces: // - application/json diff --git a/pkg/services/ngalert/api/tooling/post.json b/pkg/services/ngalert/api/tooling/post.json index 825af6c52afae..b48c13e2d7eb9 100644 --- a/pkg/services/ngalert/api/tooling/post.json +++ b/pkg/services/ngalert/api/tooling/post.json @@ -4770,7 +4770,6 @@ "type": "object" }, "alertGroups": { - "description": "AlertGroups alert groups", "items": { "$ref": "#/definitions/alertGroup", "type": "object" @@ -6403,7 +6402,7 @@ "get": { "operationId": "RouteConvertPrometheusGetRules", "produces": [ - "application/json" + "application/yaml" ], "responses": { "200": { @@ -6425,7 +6424,7 @@ } } }, - "summary": "Gets all namespaces with their rule groups in Prometheus format.", + "summary": "Gets all Grafana-managed alert rules that were imported from Prometheus-compatible sources, grouped by namespace.", "tags": [ "convert_prometheus" ] @@ -6459,7 +6458,7 @@ } } }, - "summary": "Deletes all rule groups in the given namespace.", + "summary": "Deletes all rule groups that were imported from Prometheus-compatible sources within the specified namespace.", "tags": [ "convert_prometheus" ] @@ -6475,7 +6474,7 @@ } ], "produces": [ - "application/json" + "application/yaml" ], "responses": { "200": { @@ -6497,7 +6496,7 @@ } } }, - "summary": "Gets rules in prometheus format for a given namespace.", + "summary": "Gets Grafana-managed alert rules that were imported from Prometheus-compatible sources for a specified namespace (folder).", "tags": [ "convert_prometheus" ] @@ -6506,6 +6505,7 @@ "consumes": [ "application/yaml" ], + "description": "If the group already exists and was not imported from a Prometheus-compatible source initially,\nit will not be replaced and an error will be returned.", "operationId": "RouteConvertPrometheusPostRuleGroup", "parameters": [ { @@ -6554,7 +6554,7 @@ } } }, - "summary": "Creates or updates a rule group in Prometheus format.", + "summary": "Converts a Prometheus rule group into a Grafana rule group and creates or updates it within the specified namespace.", "tags": [ "convert_prometheus" ], @@ -6595,7 +6595,7 @@ } } }, - "summary": "Deletes a rule group in Prometheus format.", + "summary": "Deletes a specific rule group if it was imported from a Prometheus-compatible source.", "tags": [ "convert_prometheus" ] @@ -6617,7 +6617,7 @@ } ], "produces": [ - "application/json" + "application/yaml" ], "responses": { "200": { @@ -6639,7 +6639,7 @@ } } }, - "summary": "Gets a rule group in Prometheus format.", + "summary": "Gets a single rule group in Prometheus-compatible format if it was imported from a Prometheus-compatible source.", "tags": [ "convert_prometheus" ] diff --git a/pkg/services/ngalert/api/tooling/spec.json b/pkg/services/ngalert/api/tooling/spec.json index e8f3f798fdae2..571a497cc6b72 100644 --- a/pkg/services/ngalert/api/tooling/spec.json +++ b/pkg/services/ngalert/api/tooling/spec.json @@ -1105,12 +1105,12 @@ "/convert/prometheus/config/v1/rules": { "get": { "produces": [ - "application/json" + "application/yaml" ], "tags": [ "convert_prometheus" ], - "summary": "Gets all namespaces with their rule groups in Prometheus format.", + "summary": "Gets all Grafana-managed alert rules that were imported from Prometheus-compatible sources, grouped by namespace.", "operationId": "RouteConvertPrometheusGetRules", "responses": { "200": { @@ -1137,12 +1137,12 @@ "/convert/prometheus/config/v1/rules/{NamespaceTitle}": { "get": { "produces": [ - "application/json" + "application/yaml" ], "tags": [ "convert_prometheus" ], - "summary": "Gets rules in prometheus format for a given namespace.", + "summary": "Gets Grafana-managed alert rules that were imported from Prometheus-compatible sources for a specified namespace (folder).", "operationId": "RouteConvertPrometheusGetNamespace", "parameters": [ { @@ -1174,6 +1174,7 @@ } }, "post": { + "description": "If the group already exists and was not imported from a Prometheus-compatible source initially,\nit will not be replaced and an error will be returned.", "consumes": [ "application/yaml" ], @@ -1183,7 +1184,7 @@ "tags": [ "convert_prometheus" ], - "summary": "Creates or updates a rule group in Prometheus format.", + "summary": "Converts a Prometheus rule group into a Grafana rule group and creates or updates it within the specified namespace.", "operationId": "RouteConvertPrometheusPostRuleGroup", "parameters": [ { @@ -1238,7 +1239,7 @@ "tags": [ "convert_prometheus" ], - "summary": "Deletes all rule groups in the given namespace.", + "summary": "Deletes all rule groups that were imported from Prometheus-compatible sources within the specified namespace.", "operationId": "RouteConvertPrometheusDeleteNamespace", "parameters": [ { @@ -1267,12 +1268,12 @@ "/convert/prometheus/config/v1/rules/{NamespaceTitle}/{Group}": { "get": { "produces": [ - "application/json" + "application/yaml" ], "tags": [ "convert_prometheus" ], - "summary": "Gets a rule group in Prometheus format.", + "summary": "Gets a single rule group in Prometheus-compatible format if it was imported from a Prometheus-compatible source.", "operationId": "RouteConvertPrometheusGetRuleGroup", "parameters": [ { @@ -1316,7 +1317,7 @@ "tags": [ "convert_prometheus" ], - "summary": "Deletes a rule group in Prometheus format.", + "summary": "Deletes a specific rule group if it was imported from a Prometheus-compatible source.", "operationId": "RouteConvertPrometheusDeleteRuleGroup", "parameters": [ { @@ -8710,7 +8711,6 @@ } }, "alertGroups": { - "description": "AlertGroups alert groups", "type": "array", "items": { "type": "object", diff --git a/pkg/services/ngalert/models/alert_rule.go b/pkg/services/ngalert/models/alert_rule.go index 66be69d0079ff..bef50b1b4e70e 100644 --- a/pkg/services/ngalert/models/alert_rule.go +++ b/pkg/services/ngalert/models/alert_rule.go @@ -394,6 +394,22 @@ func WithoutInternalLabels() LabelOption { } } +func (alertRule *AlertRule) ImportedFromPrometheus() bool { + if alertRule.Metadata.PrometheusStyleRule == nil { + return false + } + + return alertRule.Metadata.PrometheusStyleRule.OriginalRuleDefinition != "" +} + +func (alertRule *AlertRule) PrometheusRuleDefinition() string { + if !alertRule.ImportedFromPrometheus() { + return "" + } + + return alertRule.Metadata.PrometheusStyleRule.OriginalRuleDefinition +} + // GetLabels returns the labels specified as part of the alert rule. func (alertRule *AlertRule) GetLabels(opts ...LabelOption) map[string]string { labels := alertRule.Labels @@ -806,6 +822,8 @@ type ListAlertRulesQuery struct { ReceiverName string TimeIntervalName string + + ImportedPrometheusRule *bool } // CountAlertRulesQuery is the query for counting alert rules diff --git a/pkg/services/ngalert/models/testing.go b/pkg/services/ngalert/models/testing.go index 04869e537500a..6d5530570009a 100644 --- a/pkg/services/ngalert/models/testing.go +++ b/pkg/services/ngalert/models/testing.go @@ -216,6 +216,14 @@ func (a *AlertRuleMutators) WithEditorSettingsSimplifiedNotificationsSection(ena } } +func (a *AlertRuleMutators) WithPrometheusOriginalRuleDefinition(definition string) AlertRuleMutator { + return func(rule *AlertRule) { + rule.Metadata.PrometheusStyleRule = &PrometheusStyleRule{ + OriginalRuleDefinition: definition, + } + } +} + func (a *AlertRuleMutators) WithGroupIndex(groupIndex int) AlertRuleMutator { return func(rule *AlertRule) { rule.RuleGroupIndex = groupIndex diff --git a/pkg/services/ngalert/provisioning/alert_rules.go b/pkg/services/ngalert/provisioning/alert_rules.go index e58e674eff231..7e6f838727344 100644 --- a/pkg/services/ngalert/provisioning/alert_rules.go +++ b/pkg/services/ngalert/provisioning/alert_rules.go @@ -262,12 +262,41 @@ func (service *AlertRuleService) CreateAlertRule(ctx context.Context, user ident return rule, nil } +// FilterOptions provides filtering for alert rule queries. +// All fields are optional and will be applied as filters if provided. +type FilterOptions struct { + ImportedPrometheusRule *bool + RuleGroups []string + NamespaceUIDs []string +} + +func (opts *FilterOptions) apply(q models.ListAlertRulesQuery) models.ListAlertRulesQuery { + if opts == nil { + return q + } + + if opts.ImportedPrometheusRule != nil { + q.ImportedPrometheusRule = opts.ImportedPrometheusRule + } + + if len(opts.NamespaceUIDs) > 0 { + q.NamespaceUIDs = opts.NamespaceUIDs + } + + if len(opts.RuleGroups) > 0 { + q.RuleGroups = opts.RuleGroups + } + + return q +} + func (service *AlertRuleService) GetRuleGroup(ctx context.Context, user identity.Requester, namespaceUID, group string) (models.AlertRuleGroup, error) { q := models.ListAlertRulesQuery{ OrgID: user.GetOrgID(), NamespaceUIDs: []string{namespaceUID}, RuleGroups: []string{group}, } + ruleList, err := service.ruleStore.ListAlertRules(ctx, &q) if err != nil { return models.AlertRuleGroup{}, err @@ -748,15 +777,17 @@ func (service *AlertRuleService) GetAlertRuleGroupWithFolderFullpath(ctx context return res, nil } -// GetAlertGroupsWithFolderFullpath returns all groups with folder's full path in the folders identified by folderUID that have at least one alert. If argument folderUIDs is nil or empty - returns groups in all folders. -func (service *AlertRuleService) GetAlertGroupsWithFolderFullpath(ctx context.Context, user identity.Requester, folderUIDs []string) ([]models.AlertRuleGroupWithFolderFullpath, error) { +// GetAlertGroupsWithFolderFullpath returns all groups that have at least one alert with the full folder path for each group. + +// It queries all alert rules for the user's organization, applies optional filtering specified in filterOpts, +// and groups the rules by groups. The function then fetches folder details (including the full path) +// for each namespace (folder UID) associated with the rule groups. If the user lacks blanket read permissions, +// only the groups that the user is authorized to view are returned. +func (service *AlertRuleService) GetAlertGroupsWithFolderFullpath(ctx context.Context, user identity.Requester, filterOpts *FilterOptions) ([]models.AlertRuleGroupWithFolderFullpath, error) { q := models.ListAlertRulesQuery{ OrgID: user.GetOrgID(), } - - if len(folderUIDs) > 0 { - q.NamespaceUIDs = folderUIDs - } + q = filterOpts.apply(q) ruleList, err := service.ruleStore.ListAlertRules(ctx, &q) if err != nil { diff --git a/pkg/services/ngalert/provisioning/alert_rules_test.go b/pkg/services/ngalert/provisioning/alert_rules_test.go index 88ff3dcd8fd24..3a10b26fd3901 100644 --- a/pkg/services/ngalert/provisioning/alert_rules_test.go +++ b/pkg/services/ngalert/provisioning/alert_rules_test.go @@ -1644,7 +1644,7 @@ func TestProvisiongWithFullpath(t *testing.T) { require.NoError(t, err) assert.Equal(t, namespaceTitle, res2.FolderFullpath) - res3, err := ruleService.GetAlertGroupsWithFolderFullpath(context.Background(), &signedInUser, []string{namespaceUID}) + res3, err := ruleService.GetAlertGroupsWithFolderFullpath(context.Background(), &signedInUser, &FilterOptions{NamespaceUIDs: []string{namespaceUID}}) require.NoError(t, err) assert.Equal(t, namespaceTitle, res3[0].FolderFullpath) }) @@ -1675,7 +1675,7 @@ func TestProvisiongWithFullpath(t *testing.T) { require.NoError(t, err) assert.Equal(t, "my-namespace/my-other-namespace containing multiple \\/\\/", res2.FolderFullpath) - res3, err := ruleService.GetAlertGroupsWithFolderFullpath(context.Background(), &signedInUser, []string{otherNamespaceUID}) + res3, err := ruleService.GetAlertGroupsWithFolderFullpath(context.Background(), &signedInUser, &FilterOptions{NamespaceUIDs: []string{otherNamespaceUID}}) require.NoError(t, err) assert.Equal(t, "my-namespace/my-other-namespace containing multiple \\/\\/", res3[0].FolderFullpath) }) diff --git a/pkg/services/ngalert/store/alert_rule.go b/pkg/services/ngalert/store/alert_rule.go index 0d11d9eaaaa8c..c95c8de835fa3 100644 --- a/pkg/services/ngalert/store/alert_rule.go +++ b/pkg/services/ngalert/store/alert_rule.go @@ -554,6 +554,13 @@ func (st DBstore) ListAlertRules(ctx context.Context, query *ngmodels.ListAlertR } } + if query.ImportedPrometheusRule != nil { + q, err = st.filterImportedPrometheusRules(*query.ImportedPrometheusRule, q) + if err != nil { + return err + } + } + q = q.Asc("namespace_uid", "rule_group", "rule_group_idx", "id") alertRules := make([]*ngmodels.AlertRule, 0) @@ -593,6 +600,14 @@ func (st DBstore) ListAlertRules(ctx context.Context, query *ngmodels.ListAlertR continue } } + if query.ImportedPrometheusRule != nil { // remove false-positive hits from the result + hasOriginalRuleDefinition := converted.Metadata.PrometheusStyleRule != nil && len(converted.Metadata.PrometheusStyleRule.OriginalRuleDefinition) > 0 + if *query.ImportedPrometheusRule && !hasOriginalRuleDefinition { + continue + } else if !*query.ImportedPrometheusRule && hasOriginalRuleDefinition { + continue + } + } // MySQL (and potentially other databases) can use case-insensitive comparison. // This code makes sure we return groups that only exactly match the filter. if groupsMap != nil { @@ -928,6 +943,23 @@ func (st DBstore) filterByContentInNotificationSettings(value string, sess *xorm return sess.And(fmt.Sprintf("notification_settings %s ?", st.SQLStore.GetDialect().LikeStr()), "%"+search+"%"), nil } +func (st DBstore) filterImportedPrometheusRules(value bool, sess *xorm.Session) (*xorm.Session, error) { + if value { + // Filter for rules that have both prometheus_style_rule and original_rule_definition in metadata + return sess.And( + "metadata LIKE ? AND metadata LIKE ?", + "%prometheus_style_rule%", + "%original_rule_definition%", + ), nil + } + // Filter for rules that don't have prometheus_style_rule and original_rule_definition in metadata + return sess.And( + "metadata NOT LIKE ? AND metadata NOT LIKE ?", + "%prometheus_style_rule%", + "%original_rule_definition%", + ), nil +} + func (st DBstore) RenameReceiverInNotificationSettings(ctx context.Context, orgID int64, oldReceiver, newReceiver string, validateProvenance func(ngmodels.Provenance) bool, dryRun bool) ([]ngmodels.AlertRuleKey, []ngmodels.AlertRuleKey, error) { // fetch entire rules because Update method requires it because it copies rules to version table rules, err := st.ListAlertRules(ctx, &ngmodels.ListAlertRulesQuery{ diff --git a/pkg/services/ngalert/store/alert_rule_test.go b/pkg/services/ngalert/store/alert_rule_test.go index 6f6f9453e1848..1f833a3e10f3e 100644 --- a/pkg/services/ngalert/store/alert_rule_test.go +++ b/pkg/services/ngalert/store/alert_rule_test.go @@ -1828,6 +1828,64 @@ func TestIntegration_AlertRuleVersionsCleanup(t *testing.T) { }) } +func TestIntegration_ListAlertRules(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + sqlStore := db.InitTestDB(t) + cfg := setting.NewCfg() + cfg.UnifiedAlerting = setting.UnifiedAlertingSettings{ + BaseInterval: time.Duration(rand.Int63n(100)) * time.Second, + } + folderService := setupFolderService(t, sqlStore, cfg, featuremgmt.WithFeatures()) + b := &fakeBus{} + orgID := int64(1) + ruleGen := models.RuleGen + ruleGen = ruleGen.With( + ruleGen.WithIntervalMatching(cfg.UnifiedAlerting.BaseInterval), + ruleGen.WithOrgID(orgID), + ) + t.Run("filter by ImportedPrometheusRule", func(t *testing.T) { + store := createTestStore(sqlStore, folderService, &logtest.Fake{}, cfg.UnifiedAlerting, b) + regularRule := createRule(t, store, ruleGen) + importedRule := createRule(t, store, ruleGen.With( + models.RuleMuts.WithPrometheusOriginalRuleDefinition("data"), + )) + tc := []struct { + name string + importedPrometheusRule *bool + expectedRules []*models.AlertRule + }{ + { + name: "should return only imported prometheus rules when filter is true", + importedPrometheusRule: util.Pointer(true), + expectedRules: []*models.AlertRule{importedRule}, + }, + { + name: "should return only non-imported rules when filter is false", + importedPrometheusRule: util.Pointer(false), + expectedRules: []*models.AlertRule{regularRule}, + }, + { + name: "should return all rules when filter is not set", + importedPrometheusRule: nil, + expectedRules: []*models.AlertRule{regularRule, importedRule}, + }, + } + for _, tt := range tc { + t.Run(tt.name, func(t *testing.T) { + query := &models.ListAlertRulesQuery{ + OrgID: orgID, + ImportedPrometheusRule: tt.importedPrometheusRule, + } + result, err := store.ListAlertRules(context.Background(), query) + require.NoError(t, err) + require.ElementsMatch(t, tt.expectedRules, result) + }) + } + }) +} + func createTestStore( sqlStore db.DB, folderService folder.Service, diff --git a/pkg/services/ngalert/tests/fakes/rules.go b/pkg/services/ngalert/tests/fakes/rules.go index 15ebf90f40cf3..2fa4714b19d30 100644 --- a/pkg/services/ngalert/tests/fakes/rules.go +++ b/pkg/services/ngalert/tests/fakes/rules.go @@ -11,6 +11,7 @@ import ( "github.com/grafana/grafana/pkg/apimachinery/identity" "github.com/grafana/grafana/pkg/infra/metrics" + "github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/services/folder" "github.com/grafana/grafana/pkg/services/ngalert/models" "github.com/grafana/grafana/pkg/util" @@ -289,7 +290,7 @@ func (f *RuleStore) GetNamespaceInRootByTitle(ctx context.Context, title string, } } - return nil, fmt.Errorf("namespace with title '%s' not found", title) + return nil, dashboards.ErrFolderNotFound } func (f *RuleStore) UpdateAlertRules(_ context.Context, _ *models.UserUID, q []models.UpdateRule) error { diff --git a/pkg/tests/api/alerting/api_convert_prometheus_test.go b/pkg/tests/api/alerting/api_convert_prometheus_test.go index 326063a15ce86..674d708250f44 100644 --- a/pkg/tests/api/alerting/api_convert_prometheus_test.go +++ b/pkg/tests/api/alerting/api_convert_prometheus_test.go @@ -1,6 +1,7 @@ package alerting import ( + "net/http" "testing" "time" @@ -15,31 +16,8 @@ import ( "github.com/grafana/grafana/pkg/util" ) -func TestIntegrationConvertPrometheusEndpoints(t *testing.T) { - testinfra.SQLiteIntegrationTest(t) - - // Setup Grafana and its Database - dir, path := testinfra.CreateGrafDir(t, testinfra.GrafanaOpts{ - DisableLegacyAlerting: true, - EnableUnifiedAlerting: true, - DisableAnonymous: true, - AppModeProduction: true, - EnableFeatureToggles: []string{"alertingConversionAPI"}, - }) - - grafanaListedAddr, env := testinfra.StartGrafanaEnv(t, dir, path) - - // Create a user to make authenticated requests - createUser(t, env.SQLStore, env.Cfg, user.CreateUserCommand{ - DefaultOrgRole: string(org.RoleAdmin), - Password: "password", - Login: "admin", - }) - - apiClient := newAlertingApiClient(grafanaListedAddr, "admin", "password") - namespace := "test-namespace" - - promGroup1 := apimodels.PrometheusRuleGroup{ +var ( + promGroup1 = apimodels.PrometheusRuleGroup{ Name: "test-group-1", Interval: prommodel.Duration(60 * time.Second), Rules: []apimodels.PrometheusRule{ @@ -80,7 +58,7 @@ func TestIntegrationConvertPrometheusEndpoints(t *testing.T) { }, } - promGroup2 := apimodels.PrometheusRuleGroup{ + promGroup2 = apimodels.PrometheusRuleGroup{ Name: "test-group-2", Interval: prommodel.Duration(60 * time.Second), Rules: []apimodels.PrometheusRule{ @@ -99,24 +77,128 @@ func TestIntegrationConvertPrometheusEndpoints(t *testing.T) { }, } + promGroup3 = apimodels.PrometheusRuleGroup{ + Name: "test-group-3", + Interval: prommodel.Duration(60 * time.Second), + Rules: []apimodels.PrometheusRule{ + { + Alert: "ServiceDown", + Expr: "up == 0", + For: util.Pointer(prommodel.Duration(2 * time.Minute)), + Labels: map[string]string{ + "severity": "critical", + }, + Annotations: map[string]string{ + "annotation-1": "value-1", + }, + }, + }, + } +) + +func TestIntegrationConvertPrometheusEndpoints(t *testing.T) { + testinfra.SQLiteIntegrationTest(t) + + // Setup Grafana and its Database + dir, path := testinfra.CreateGrafDir(t, testinfra.GrafanaOpts{ + DisableLegacyAlerting: true, + EnableUnifiedAlerting: true, + DisableAnonymous: true, + AppModeProduction: true, + EnableFeatureToggles: []string{"alertingConversionAPI"}, + }) + + grafanaListedAddr, env := testinfra.StartGrafanaEnv(t, dir, path) + + // Create users to make authenticated requests + createUser(t, env.SQLStore, env.Cfg, user.CreateUserCommand{ + DefaultOrgRole: string(org.RoleAdmin), + Password: "password", + Login: "admin", + }) + apiClient := newAlertingApiClient(grafanaListedAddr, "admin", "password") + + createUser(t, env.SQLStore, env.Cfg, user.CreateUserCommand{ + DefaultOrgRole: string(org.RoleViewer), + Password: "password", + Login: "viewer", + }) + viewerClient := newAlertingApiClient(grafanaListedAddr, "viewer", "password") + + namespace1 := "test-namespace-1" + namespace2 := "test-namespace-2" + ds := apiClient.CreateDatasource(t, datasources.DS_PROMETHEUS) - t.Run("create two rule groups and get them back", func(t *testing.T) { - apiClient.ConvertPrometheusPostRuleGroup(t, namespace, ds.Body.Datasource.UID, promGroup1, nil) - apiClient.ConvertPrometheusPostRuleGroup(t, namespace, ds.Body.Datasource.UID, promGroup2, nil) + t.Run("create rule groups and get them back", func(t *testing.T) { + _, status, body := apiClient.ConvertPrometheusPostRuleGroup(t, namespace1, ds.Body.Datasource.UID, promGroup1, nil) + requireStatusCode(t, http.StatusAccepted, status, body) + _, status, body = apiClient.ConvertPrometheusPostRuleGroup(t, namespace1, ds.Body.Datasource.UID, promGroup2, nil) + requireStatusCode(t, http.StatusAccepted, status, body) + + // create a third group in a different namespace + _, status, body = apiClient.ConvertPrometheusPostRuleGroup(t, namespace2, ds.Body.Datasource.UID, promGroup3, nil) + requireStatusCode(t, http.StatusAccepted, status, body) - ns, _, _ := apiClient.GetAllRulesWithStatus(t) + // And a non-provisioned rule in another namespace + namespace3UID := util.GenerateShortUID() + apiClient.CreateFolder(t, namespace3UID, "folder") + createRule(t, apiClient, namespace3UID) - require.Len(t, ns[namespace], 2) + // Now get the first group + group1 := apiClient.ConvertPrometheusGetRuleGroupRules(t, namespace1, promGroup1.Name) + require.Equal(t, promGroup1, group1) - rulesByGroupName := map[string][]apimodels.GettableExtendedRuleNode{} - for _, group := range ns[namespace] { - rulesByGroupName[group.Name] = append(rulesByGroupName[group.Name], group.Rules...) + // Get namespace1 + ns1 := apiClient.ConvertPrometheusGetNamespaceRules(t, namespace1) + expectedNs1 := map[string][]apimodels.PrometheusRuleGroup{ + namespace1: {promGroup1, promGroup2}, } + require.Equal(t, expectedNs1, ns1) - require.Len(t, rulesByGroupName[promGroup1.Name], 3) - require.Len(t, rulesByGroupName[promGroup2.Name], 1) + // Get all namespaces + namespaces := apiClient.ConvertPrometheusGetAllRules(t) + expectedNamespaces := map[string][]apimodels.PrometheusRuleGroup{ + namespace1: {promGroup1, promGroup2}, + namespace2: {promGroup3}, + } + require.Equal(t, expectedNamespaces, namespaces) + }) + + t.Run("without permissions to create folders cannot create rule groups either", func(t *testing.T) { + _, status, raw := viewerClient.ConvertPrometheusPostRuleGroup(t, namespace1, ds.Body.Datasource.UID, promGroup1, nil) + requireStatusCode(t, http.StatusForbidden, status, raw) }) +} + +func TestIntegrationConvertPrometheusEndpoints_CreatePausedRules(t *testing.T) { + testinfra.SQLiteIntegrationTest(t) + + // Setup Grafana and its Database + dir, path := testinfra.CreateGrafDir(t, testinfra.GrafanaOpts{ + DisableLegacyAlerting: true, + EnableUnifiedAlerting: true, + DisableAnonymous: true, + AppModeProduction: true, + EnableFeatureToggles: []string{"alertingConversionAPI"}, + }) + + grafanaListedAddr, env := testinfra.StartGrafanaEnv(t, dir, path) + + // Create users to make authenticated requests + createUser(t, env.SQLStore, env.Cfg, user.CreateUserCommand{ + DefaultOrgRole: string(org.RoleAdmin), + Password: "password", + Login: "admin", + }) + apiClient := newAlertingApiClient(grafanaListedAddr, "admin", "password") + + ds := apiClient.CreateDatasource(t, datasources.DS_PROMETHEUS) + + namespace1 := "test-namespace-1" + + namespace1UID := util.GenerateShortUID() + apiClient.CreateFolder(t, namespace1UID, namespace1) t.Run("when pausing header is set, rules should be paused", func(t *testing.T) { tests := []struct { @@ -155,21 +237,17 @@ func TestIntegrationConvertPrometheusEndpoints(t *testing.T) { if tc.alertPaused { headers["X-Grafana-Alerting-Alert-Rules-Paused"] = "true" } - apiClient.ConvertPrometheusPostRuleGroup(t, namespace, ds.Body.Datasource.UID, promGroup1, headers) - ns, _, _ := apiClient.GetAllRulesWithStatus(t) + apiClient.ConvertPrometheusPostRuleGroup(t, namespace1, ds.Body.Datasource.UID, promGroup1, headers) - rulesByGroupName := map[string][]apimodels.GettableExtendedRuleNode{} - for _, group := range ns[namespace] { - rulesByGroupName[group.Name] = append(rulesByGroupName[group.Name], group.Rules...) - } + gr, _, _ := apiClient.GetRulesGroupWithStatus(t, namespace1UID, promGroup1.Name) - require.Len(t, rulesByGroupName[promGroup1.Name], 3) + require.Len(t, gr.Rules, 3) pausedRecordingRules := 0 pausedAlertRules := 0 - for _, rule := range rulesByGroupName[promGroup1.Name] { + for _, rule := range gr.Rules { if rule.GrafanaManagedAlert.IsPaused { if rule.GrafanaManagedAlert.Record != nil { pausedRecordingRules++ diff --git a/pkg/tests/api/alerting/testing.go b/pkg/tests/api/alerting/testing.go index de927dc890ccc..6594eaa4bb6f4 100644 --- a/pkg/tests/api/alerting/testing.go +++ b/pkg/tests/api/alerting/testing.go @@ -546,14 +546,14 @@ func (a apiClient) PostSilence(t *testing.T, s apimodels.PostableSilence) (apimo req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/api/alertmanager/grafana/api/v2/silences", a.url), bytes.NewReader(b)) require.NoError(t, err) req.Header.Set("Content-Type", "application/json") - return sendRequest[apimodels.PostSilencesOKBody](t, req, http.StatusAccepted) + return sendRequestJSON[apimodels.PostSilencesOKBody](t, req, http.StatusAccepted) } func (a apiClient) GetSilence(t *testing.T, id string) (apimodels.GettableSilence, int, string) { t.Helper() req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/api/alertmanager/grafana/api/v2/silence/%s", a.url, id), nil) require.NoError(t, err) - return sendRequest[apimodels.GettableSilence](t, req, http.StatusOK) + return sendRequestJSON[apimodels.GettableSilence](t, req, http.StatusOK) } func (a apiClient) GetSilences(t *testing.T, filters ...string) (apimodels.GettableSilences, int, string) { @@ -568,7 +568,7 @@ func (a apiClient) GetSilences(t *testing.T, filters ...string) (apimodels.Getta req, err := http.NewRequest(http.MethodGet, u.String(), nil) require.NoError(t, err) - return sendRequest[apimodels.GettableSilences](t, req, http.StatusOK) + return sendRequestJSON[apimodels.GettableSilences](t, req, http.StatusOK) } func (a apiClient) DeleteSilence(t *testing.T, id string) (any, int, string) { @@ -580,7 +580,7 @@ func (a apiClient) DeleteSilence(t *testing.T, id string) (any, int, string) { Message string `json:"message"` } - return sendRequest[dynamic](t, req, http.StatusOK) + return sendRequestJSON[dynamic](t, req, http.StatusOK) } func (a apiClient) GetRulesGroup(t *testing.T, folder string, group string) (apimodels.RuleGroupConfigResponse, int) { @@ -694,7 +694,7 @@ func (a apiClient) GetRuleGroupProvisioning(t *testing.T, folderUID string, grou req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/api/v1/provisioning/folder/%s/rule-groups/%s", a.url, folderUID, groupName), nil) require.NoError(t, err) - return sendRequest[apimodels.AlertRuleGroup](t, req, http.StatusOK) + return sendRequestJSON[apimodels.AlertRuleGroup](t, req, http.StatusOK) } func (a apiClient) CreateOrUpdateRuleGroupProvisioning(t *testing.T, group apimodels.AlertRuleGroup) (apimodels.AlertRuleGroup, int, string) { @@ -709,7 +709,7 @@ func (a apiClient) CreateOrUpdateRuleGroupProvisioning(t *testing.T, group apimo require.NoError(t, err) req.Header.Add("Content-Type", "application/json") - return sendRequest[apimodels.AlertRuleGroup](t, req, http.StatusOK) + return sendRequestJSON[apimodels.AlertRuleGroup](t, req, http.StatusOK) } func (a apiClient) SubmitRuleForBacktesting(t *testing.T, config apimodels.BacktestConfig) (int, string) { @@ -808,7 +808,7 @@ func (a apiClient) GetAllMuteTimingsWithStatus(t *testing.T) (apimodels.MuteTimi req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/api/v1/provisioning/mute-timings", a.url), nil) require.NoError(t, err) - return sendRequest[apimodels.MuteTimings](t, req, http.StatusOK) + return sendRequestJSON[apimodels.MuteTimings](t, req, http.StatusOK) } func (a apiClient) GetMuteTimingByNameWithStatus(t *testing.T, name string) (apimodels.MuteTimeInterval, int, string) { @@ -817,7 +817,7 @@ func (a apiClient) GetMuteTimingByNameWithStatus(t *testing.T, name string) (api req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/api/v1/provisioning/mute-timings/%s", a.url, name), nil) require.NoError(t, err) - return sendRequest[apimodels.MuteTimeInterval](t, req, http.StatusOK) + return sendRequestJSON[apimodels.MuteTimeInterval](t, req, http.StatusOK) } func (a apiClient) CreateMuteTimingWithStatus(t *testing.T, interval apimodels.MuteTimeInterval) (apimodels.MuteTimeInterval, int, string) { @@ -832,7 +832,7 @@ func (a apiClient) CreateMuteTimingWithStatus(t *testing.T, interval apimodels.M req.Header.Add("Content-Type", "application/json") require.NoError(t, err) - return sendRequest[apimodels.MuteTimeInterval](t, req, http.StatusCreated) + return sendRequestJSON[apimodels.MuteTimeInterval](t, req, http.StatusCreated) } func (a apiClient) EnsureMuteTiming(t *testing.T, interval apimodels.MuteTimeInterval) { @@ -854,7 +854,7 @@ func (a apiClient) UpdateMuteTimingWithStatus(t *testing.T, interval apimodels.M req.Header.Add("Content-Type", "application/json") require.NoError(t, err) - return sendRequest[apimodels.MuteTimeInterval](t, req, http.StatusAccepted) + return sendRequestJSON[apimodels.MuteTimeInterval](t, req, http.StatusAccepted) } func (a apiClient) DeleteMuteTimingWithStatus(t *testing.T, name string) (int, string) { @@ -908,7 +908,7 @@ func (a apiClient) GetRouteWithStatus(t *testing.T) (apimodels.Route, int, strin req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/api/v1/provisioning/policies", a.url), nil) require.NoError(t, err) - return sendRequest[apimodels.Route](t, req, http.StatusOK) + return sendRequestJSON[apimodels.Route](t, req, http.StatusOK) } func (a apiClient) GetRoute(t *testing.T) apimodels.Route { @@ -989,7 +989,7 @@ func (a apiClient) GetRuleHistoryWithStatus(t *testing.T, ruleUID string) (data. req, err := http.NewRequest(http.MethodGet, u.String(), nil) require.NoError(t, err) - return sendRequest[data.Frame](t, req, http.StatusOK) + return sendRequestJSON[data.Frame](t, req, http.StatusOK) } func (a apiClient) GetAllTimeIntervalsWithStatus(t *testing.T) ([]apimodels.GettableTimeIntervals, int, string) { @@ -998,7 +998,7 @@ func (a apiClient) GetAllTimeIntervalsWithStatus(t *testing.T) ([]apimodels.Gett req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/api/v1/notifications/time-intervals", a.url), nil) require.NoError(t, err) - return sendRequest[[]apimodels.GettableTimeIntervals](t, req, http.StatusOK) + return sendRequestJSON[[]apimodels.GettableTimeIntervals](t, req, http.StatusOK) } func (a apiClient) GetTimeIntervalByNameWithStatus(t *testing.T, name string) (apimodels.GettableTimeIntervals, int, string) { @@ -1007,7 +1007,7 @@ func (a apiClient) GetTimeIntervalByNameWithStatus(t *testing.T, name string) (a req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/api/v1/notifications/time-intervals/%s", a.url, name), nil) require.NoError(t, err) - return sendRequest[apimodels.GettableTimeIntervals](t, req, http.StatusOK) + return sendRequestJSON[apimodels.GettableTimeIntervals](t, req, http.StatusOK) } func (a apiClient) CreateReceiverWithStatus(t *testing.T, receiver apimodels.EmbeddedContactPoint) (apimodels.EmbeddedContactPoint, int, string) { @@ -1022,7 +1022,7 @@ func (a apiClient) CreateReceiverWithStatus(t *testing.T, receiver apimodels.Emb req.Header.Add("Content-Type", "application/json") require.NoError(t, err) - return sendRequest[apimodels.EmbeddedContactPoint](t, req, http.StatusAccepted) + return sendRequestJSON[apimodels.EmbeddedContactPoint](t, req, http.StatusAccepted) } func (a apiClient) EnsureReceiver(t *testing.T, receiver apimodels.EmbeddedContactPoint) { @@ -1075,33 +1075,33 @@ func (a apiClient) GetAlertmanagerConfigWithStatus(t *testing.T) (apimodels.Gett req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/api/alertmanager/grafana/config/api/v1/alerts", a.url), nil) require.NoError(t, err) - return sendRequest[apimodels.GettableUserConfig](t, req, http.StatusOK) + return sendRequestJSON[apimodels.GettableUserConfig](t, req, http.StatusOK) } func (a apiClient) GetActiveAlertsWithStatus(t *testing.T) (apimodels.AlertGroups, int, string) { t.Helper() req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/api/alertmanager/grafana/api/v2/alerts/groups", a.url), nil) require.NoError(t, err) - return sendRequest[apimodels.AlertGroups](t, req, http.StatusOK) + return sendRequestJSON[apimodels.AlertGroups](t, req, http.StatusOK) } func (a apiClient) GetRuleVersionsWithStatus(t *testing.T, ruleUID string) (apimodels.GettableRuleVersions, int, string) { t.Helper() req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/api/ruler/grafana/api/v1/rule/%s/versions", a.url, ruleUID), nil) require.NoError(t, err) - return sendRequest[apimodels.GettableRuleVersions](t, req, http.StatusOK) + return sendRequestJSON[apimodels.GettableRuleVersions](t, req, http.StatusOK) } func (a apiClient) GetRuleByUID(t *testing.T, ruleUID string) apimodels.GettableExtendedRuleNode { t.Helper() req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/api/ruler/grafana/api/v1/rule/%s", a.url, ruleUID), nil) require.NoError(t, err) - rule, status, raw := sendRequest[apimodels.GettableExtendedRuleNode](t, req, http.StatusOK) + rule, status, raw := sendRequestJSON[apimodels.GettableExtendedRuleNode](t, req, http.StatusOK) requireStatusCode(t, http.StatusOK, status, raw) return rule } -func (a apiClient) ConvertPrometheusPostRuleGroup(t *testing.T, namespaceTitle, datasourceUID string, promGroup apimodels.PrometheusRuleGroup, headers map[string]string) { +func (a apiClient) ConvertPrometheusPostRuleGroup(t *testing.T, namespaceTitle, datasourceUID string, promGroup apimodels.PrometheusRuleGroup, headers map[string]string) (apimodels.ConvertPrometheusResponse, int, string) { t.Helper() data, err := yaml.Marshal(promGroup) @@ -1116,30 +1116,85 @@ func (a apiClient) ConvertPrometheusPostRuleGroup(t *testing.T, namespaceTitle, req.Header.Add(key, value) } - _, status, raw := sendRequest[apimodels.ConvertPrometheusResponse](t, req, http.StatusAccepted) - requireStatusCode(t, http.StatusAccepted, status, raw) + return sendRequestJSON[apimodels.ConvertPrometheusResponse](t, req, http.StatusAccepted) +} + +func (a apiClient) ConvertPrometheusGetRuleGroupRules(t *testing.T, namespaceTitle, groupName string) apimodels.PrometheusRuleGroup { + t.Helper() + req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/api/convert/prometheus/config/v1/rules/%s/%s", a.url, namespaceTitle, groupName), nil) + require.NoError(t, err) + rule, status, raw := sendRequestYAML[apimodels.PrometheusRuleGroup](t, req, http.StatusOK) + requireStatusCode(t, http.StatusOK, status, raw) + return rule } -func sendRequest[T any](t *testing.T, req *http.Request, successStatusCode int) (T, int, string) { +func (a apiClient) ConvertPrometheusGetNamespaceRules(t *testing.T, namespaceTitle string) map[string][]apimodels.PrometheusRuleGroup { + t.Helper() + req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/api/convert/prometheus/config/v1/rules/%s", a.url, namespaceTitle), nil) + require.NoError(t, err) + ns, status, raw := sendRequestYAML[map[string][]apimodels.PrometheusRuleGroup](t, req, http.StatusOK) + requireStatusCode(t, http.StatusOK, status, raw) + return ns +} + +func (a apiClient) ConvertPrometheusGetAllRules(t *testing.T) map[string][]apimodels.PrometheusRuleGroup { + t.Helper() + req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/api/convert/prometheus/config/v1/rules", a.url), nil) + require.NoError(t, err) + result, status, raw := sendRequestYAML[map[string][]apimodels.PrometheusRuleGroup](t, req, http.StatusOK) + requireStatusCode(t, http.StatusOK, status, raw) + return result +} + +func sendRequestRaw(t *testing.T, req *http.Request) ([]byte, int, error) { t.Helper() client := &http.Client{} resp, err := client.Do(req) - require.NoError(t, err) + if err != nil { + return nil, 0, err + } defer func() { _ = resp.Body.Close() }() + body, err := io.ReadAll(resp.Body) - require.NoError(t, err) + if err != nil { + return nil, 0, err + } + + return body, resp.StatusCode, nil +} +func sendRequestJSON[T any](t *testing.T, req *http.Request, successStatusCode int) (T, int, string) { + t.Helper() var result T - if resp.StatusCode != successStatusCode { - return result, resp.StatusCode, string(body) + body, statusCode, err := sendRequestRaw(t, req) + require.NoError(t, err) + + if statusCode != successStatusCode { + return result, statusCode, string(body) } err = json.Unmarshal(body, &result) require.NoError(t, err) - return result, resp.StatusCode, string(body) + return result, statusCode, string(body) +} + +func sendRequestYAML[T any](t *testing.T, req *http.Request, successStatusCode int) (T, int, string) { + t.Helper() + var result T + + body, statusCode, err := sendRequestRaw(t, req) + require.NoError(t, err) + + if statusCode != successStatusCode { + return result, statusCode, string(body) + } + + err = yaml.Unmarshal(body, &result) + require.NoError(t, err) + return result, statusCode, string(body) } func requireStatusCode(t *testing.T, expected, actual int, response string) { diff --git a/public/api-merged.json b/public/api-merged.json index dc455b6a36f8b..9ddc287f6f69f 100644 --- a/public/api-merged.json +++ b/public/api-merged.json @@ -22771,6 +22771,7 @@ } }, "gettableAlerts": { + "description": "GettableAlerts gettable alerts", "type": "array", "items": { "type": "object", diff --git a/public/openapi3.json b/public/openapi3.json index fabc90f92cf41..8f339a39b59b3 100644 --- a/public/openapi3.json +++ b/public/openapi3.json @@ -12838,6 +12838,7 @@ "type": "object" }, "gettableAlerts": { + "description": "GettableAlerts gettable alerts", "items": { "$ref": "#/components/schemas/gettableAlert" }, From 1302ee48b994cfb2243fbc595459d5ef293e712a Mon Sep 17 00:00:00 2001 From: Andreas Christou Date: Tue, 25 Feb 2025 14:59:58 +0000 Subject: [PATCH 25/33] OpenTSDB: Support v2.4 (#100673) * Add version 2.4 to frontend * Update settings and types - Set all properties on backend for consistency * Update query logic to parse new and old format - Minor naming updates - Extract logic for initial frame creation - When parsing old api responses, ensure data is in ascending order - Update tests * Update docs and provisioning file * Fix lint * Update docs/sources/datasources/opentsdb/_index.md Co-authored-by: Larissa Wandzura <126723338+lwandz13@users.noreply.github.com> * Update docs/sources/datasources/opentsdb/_index.md Co-authored-by: Larissa Wandzura <126723338+lwandz13@users.noreply.github.com> * Review nit --------- Co-authored-by: Larissa Wandzura <126723338+lwandz13@users.noreply.github.com> --- devenv/datasources.yaml | 8 + docs/sources/datasources/opentsdb/_index.md | 8 +- pkg/tsdb/opentsdb/opentsdb.go | 148 +++++++++++++----- pkg/tsdb/opentsdb/opentsdb_test.go | 129 ++++++++++++++- pkg/tsdb/opentsdb/types.go | 15 +- .../opentsdb/components/OpenTsdbDetails.tsx | 1 + 6 files changed, 263 insertions(+), 46 deletions(-) diff --git a/devenv/datasources.yaml b/devenv/datasources.yaml index 452d2d3d1bc55..00efa243c1c90 100644 --- a/devenv/datasources.yaml +++ b/devenv/datasources.yaml @@ -136,6 +136,14 @@ datasources: tsdbResolution: 1 tsdbVersion: 3 + - name: gdev-opentsdb-v2.4 + type: opentsdb + access: proxy + url: http://localhost:4242 + jsonData: + tsdbResolution: 1 + tsdbVersion: 4 + - name: gdev-elasticsearch type: elasticsearch uid: gdev-elasticsearch diff --git a/docs/sources/datasources/opentsdb/_index.md b/docs/sources/datasources/opentsdb/_index.md index 560ba731bb54e..07a5a66add01f 100644 --- a/docs/sources/datasources/opentsdb/_index.md +++ b/docs/sources/datasources/opentsdb/_index.md @@ -62,7 +62,7 @@ To configure basic settings for the data source, complete the following steps: | **Default** | Default data source that will be be pre-selected for new panels. | | **URL** | The HTTP protocol, IP, and port of your OpenTSDB server (default port is usually 4242). | | **Allowed cookies** | Listing of cookies to forward to the data source. | -| **Version** | The OpenTSDB version. | +| **Version** | The OpenTSDB version (supported versions are: 2.4, 2.3, 2.2 and versions less than 2.1). | | **Resolution** | Metrics from OpenTSDB may have data points with either second or millisecond resolution. | | **Lookup limit** | Default is 1000. | @@ -98,9 +98,13 @@ can be used to query OpenTSDB. Fill Policy is also introduced in OpenTSDB 2.2. While using OpenTSDB 2.2 data source, make sure you use either Filters or Tags as they are mutually exclusive. If used together, might give you weird results. {{% /admonition %}} +{{% admonition type="note" %}} +When using OpenTSDB 2.4 with alerting, queries are executed with the parameter `arrays=true`. This causes OpenTSDB to return data points as an array of arrays instead of a map of key-value pairs. Grafana then converts this data into the appropriate data frame format. +{{% /admonition %}} + ### Auto complete suggestions -As soon as you start typing metric names, tag names and tag values , you should see highlighted auto complete suggestions for them. +As you begin typing metric names, tag names, or tag values, highlighted autocomplete suggestions will appear. The autocomplete only works if the OpenTSDB suggest API is enabled. ## Templating queries diff --git a/pkg/tsdb/opentsdb/opentsdb.go b/pkg/tsdb/opentsdb/opentsdb.go index 15a1fe270466d..798b27e57abdc 100644 --- a/pkg/tsdb/opentsdb/opentsdb.go +++ b/pkg/tsdb/opentsdb/opentsdb.go @@ -8,6 +8,8 @@ import ( "net/http" "net/url" "path" + "sort" + "strconv" "strings" "time" @@ -35,12 +37,21 @@ func ProvideService(httpClientProvider httpclient.Provider) *Service { } type datasourceInfo struct { - HTTPClient *http.Client - URL string + HTTPClient *http.Client + URL string + TSDBVersion float32 + TSDBResolution int32 + LookupLimit int32 } type DsAccess string +type JSONData struct { + TSDBVersion float32 `json:"tsdbVersion"` + TSDBResolution int32 `json:"tsdbResolution"` + LookupLimit int32 `json:"lookupLimit"` +} + func newInstanceSettings(httpClientProvider httpclient.Provider) datasource.InstanceFactoryFunc { return func(ctx context.Context, settings backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) { opts, err := settings.HTTPClientOptions(ctx) @@ -53,9 +64,18 @@ func newInstanceSettings(httpClientProvider httpclient.Provider) datasource.Inst return nil, err } + jsonData := JSONData{} + err = json.Unmarshal(settings.JSONData, &jsonData) + if err != nil { + return nil, fmt.Errorf("error reading settings: %w", err) + } + model := &datasourceInfo{ - HTTPClient: client, - URL: settings.URL, + HTTPClient: client, + URL: settings.URL, + TSDBVersion: jsonData.TSDBVersion, + TSDBResolution: jsonData.TSDBResolution, + LookupLimit: jsonData.LookupLimit, } return model, nil @@ -69,7 +89,7 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) q := req.Queries[0] - myRefID := q.RefID + refID := q.RefID tsdbQuery.Start = q.TimeRange.From.UnixNano() / int64(time.Millisecond) tsdbQuery.End = q.TimeRange.To.UnixNano() / int64(time.Millisecond) @@ -106,7 +126,7 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) } }() - result, err := s.parseResponse(logger, res, myRefID) + result, err := s.parseResponse(logger, res, refID, dsInfo.TSDBVersion) if err != nil { return &backend.QueryDataResponse{}, err } @@ -120,9 +140,11 @@ func (s *Service) createRequest(ctx context.Context, logger log.Logger, dsInfo * return nil, err } u.Path = path.Join(u.Path, "api/query") - queryParams := u.Query() - queryParams.Set("arrays", "true") - u.RawQuery = queryParams.Encode() + if dsInfo.TSDBVersion == 4 { + queryParams := u.Query() + queryParams.Set("arrays", "true") + u.RawQuery = queryParams.Encode() + } postData, err := json.Marshal(data) if err != nil { @@ -140,7 +162,67 @@ func (s *Service) createRequest(ctx context.Context, logger log.Logger, dsInfo * return req, nil } -func (s *Service) parseResponse(logger log.Logger, res *http.Response, myRefID string) (*backend.QueryDataResponse, error) { +func createInitialFrame(val OpenTsdbCommon, length int, refID string) *data.Frame { + labels := data.Labels{} + for label, value := range val.Tags { + labels[label] = value + } + + frame := data.NewFrameOfFieldTypes(val.Metric, length, data.FieldTypeTime, data.FieldTypeFloat64) + frame.Meta = &data.FrameMeta{Type: data.FrameTypeTimeSeriesMulti, TypeVersion: data.FrameTypeVersion{0, 1}} + frame.RefID = refID + timeField := frame.Fields[0] + timeField.Name = data.TimeSeriesTimeFieldName + dataField := frame.Fields[1] + dataField.Name = val.Metric + dataField.Labels = labels + + return frame +} + +// Parse response function for OpenTSDB version 2.4 +func parseResponse24(responseData []OpenTsdbResponse24, refID string, frames data.Frames) data.Frames { + for _, val := range responseData { + frame := createInitialFrame(val.OpenTsdbCommon, len(val.DataPoints), refID) + + for i, point := range val.DataPoints { + frame.SetRow(i, time.Unix(int64(point[0]), 0).UTC(), point[1]) + } + + frames = append(frames, frame) + } + + return frames +} + +// Parse response function for OpenTSDB versions < 2.4 +func parseResponseLT24(responseData []OpenTsdbResponse, refID string, frames data.Frames) (data.Frames, error) { + for _, val := range responseData { + frame := createInitialFrame(val.OpenTsdbCommon, len(val.DataPoints), refID) + + // Order the timestamps in ascending order to avoid issues like https://github.com/grafana/grafana/issues/38729 + timestamps := make([]string, 0, len(val.DataPoints)) + for timestamp := range val.DataPoints { + timestamps = append(timestamps, timestamp) + } + sort.Strings(timestamps) + + for i, timeString := range timestamps { + timestamp, err := strconv.ParseInt(timeString, 10, 64) + if err != nil { + logger.Info("Failed to unmarshal opentsdb timestamp", "timestamp", timeString) + return frames, err + } + frame.SetRow(i, time.Unix(timestamp, 0).UTC(), val.DataPoints[timeString]) + } + + frames = append(frames, frame) + } + + return frames, nil +} + +func (s *Service) parseResponse(logger log.Logger, res *http.Response, refID string, tsdbVersion float32) (*backend.QueryDataResponse, error) { resp := backend.NewQueryDataResponse() body, err := io.ReadAll(res.Body) @@ -158,38 +240,34 @@ func (s *Service) parseResponse(logger log.Logger, res *http.Response, myRefID s return nil, fmt.Errorf("request failed, status: %s", res.Status) } + frames := data.Frames{} + var responseData []OpenTsdbResponse - err = json.Unmarshal(body, &responseData) - if err != nil { - logger.Info("Failed to unmarshal opentsdb response", "error", err, "status", res.Status, "body", string(body)) - return nil, err - } + var responseData24 []OpenTsdbResponse24 + if tsdbVersion == 4 { + err = json.Unmarshal(body, &responseData24) + if err != nil { + logger.Info("Failed to unmarshal opentsdb response", "error", err, "status", res.Status, "body", string(body)) + return nil, err + } - frames := data.Frames{} - for _, val := range responseData { - labels := data.Labels{} - for label, value := range val.Tags { - labels[label] = value + frames = parseResponse24(responseData24, refID, frames) + } else { + err = json.Unmarshal(body, &responseData) + if err != nil { + logger.Info("Failed to unmarshal opentsdb response", "error", err, "status", res.Status, "body", string(body)) + return nil, err } - frame := data.NewFrameOfFieldTypes(val.Metric, len(val.DataPoints), data.FieldTypeTime, data.FieldTypeFloat64) - frame.Meta = &data.FrameMeta{Type: data.FrameTypeTimeSeriesMulti, TypeVersion: data.FrameTypeVersion{0, 1}} - frame.RefID = myRefID - timeField := frame.Fields[0] - timeField.Name = data.TimeSeriesTimeFieldName - dataField := frame.Fields[1] - dataField.Name = "value" - dataField.Labels = labels - - points := val.DataPoints - for i, point := range points { - frame.SetRow(i, time.Unix(int64(point[0]), 0).UTC(), point[1]) + frames, err = parseResponseLT24(responseData, refID, frames) + if err != nil { + return nil, err } - frames = append(frames, frame) } - result := resp.Responses[myRefID] + + result := resp.Responses[refID] result.Frames = frames - resp.Responses[myRefID] = result + resp.Responses[refID] = result return resp, nil } diff --git a/pkg/tsdb/opentsdb/opentsdb_test.go b/pkg/tsdb/opentsdb/opentsdb_test.go index 6c5e142c250c5..44c21f6df153b 100644 --- a/pkg/tsdb/opentsdb/opentsdb_test.go +++ b/pkg/tsdb/opentsdb/opentsdb_test.go @@ -33,12 +33,13 @@ func TestOpenTsdbExecutor(t *testing.T) { t.Run("Parse response should handle invalid JSON", func(t *testing.T) { response := `{ invalid }` - result, err := service.parseResponse(logger, &http.Response{Body: io.NopCloser(strings.NewReader(response))}, "A") + tsdbVersion := float32(4) + result, err := service.parseResponse(logger, &http.Response{Body: io.NopCloser(strings.NewReader(response))}, "A", tsdbVersion) require.Nil(t, result) require.Error(t, err) }) - t.Run("Parse response should handle JSON", func(t *testing.T) { + t.Run("Parse response should handle JSON (v2.4 and above)", func(t *testing.T) { response := ` [ { @@ -57,7 +58,7 @@ func TestOpenTsdbExecutor(t *testing.T) { data.NewField("Time", nil, []time.Time{ time.Date(2014, 7, 16, 20, 55, 46, 0, time.UTC), }), - data.NewField("value", map[string]string{"env": "prod", "app": "grafana"}, []float64{ + data.NewField("test", map[string]string{"env": "prod", "app": "grafana"}, []float64{ 50}), ) testFrame.Meta = &data.FrameMeta{ @@ -65,10 +66,124 @@ func TestOpenTsdbExecutor(t *testing.T) { TypeVersion: data.FrameTypeVersion{0, 1}, } testFrame.RefID = "A" + tsdbVersion := float32(4) resp := http.Response{Body: io.NopCloser(strings.NewReader(response))} resp.StatusCode = 200 - result, err := service.parseResponse(logger, &resp, "A") + result, err := service.parseResponse(logger, &resp, "A", tsdbVersion) + require.NoError(t, err) + + frame := result.Responses["A"] + + if diff := cmp.Diff(testFrame, frame.Frames[0], data.FrameTestCompareOptions()...); diff != "" { + t.Errorf("Result mismatch (-want +got):\n%s", diff) + } + }) + + t.Run("Parse response should handle JSON (v2.3 and below)", func(t *testing.T) { + response := ` + [ + { + "metric": "test", + "dps": { + "1405544146": 50.0 + }, + "tags" : { + "env": "prod", + "app": "grafana" + } + } + ]` + + testFrame := data.NewFrame("test", + data.NewField("Time", nil, []time.Time{ + time.Date(2014, 7, 16, 20, 55, 46, 0, time.UTC), + }), + data.NewField("test", map[string]string{"env": "prod", "app": "grafana"}, []float64{ + 50}), + ) + testFrame.Meta = &data.FrameMeta{ + Type: data.FrameTypeTimeSeriesMulti, + TypeVersion: data.FrameTypeVersion{0, 1}, + } + testFrame.RefID = "A" + tsdbVersion := float32(3) + + resp := http.Response{Body: io.NopCloser(strings.NewReader(response))} + resp.StatusCode = 200 + result, err := service.parseResponse(logger, &resp, "A", tsdbVersion) + require.NoError(t, err) + + frame := result.Responses["A"] + + if diff := cmp.Diff(testFrame, frame.Frames[0], data.FrameTestCompareOptions()...); diff != "" { + t.Errorf("Result mismatch (-want +got):\n%s", diff) + } + }) + + t.Run("Parse response should handle unordered JSON (v2.3 and below)", func(t *testing.T) { + response := ` + [ + { + "metric": "test", + "dps": { + "1405094109": 55.0, + "1405124146": 124.0, + "1405124212": 1284.0, + "1405019246": 50.0, + "1408352146": 812.0, + "1405534153": 153.0, + "1405124397": 9035.0, + "1401234774": 215.0, + "1409712532": 356.0, + "1491523811": 8953.0, + "1405239823": 258.0 + }, + "tags" : { + "env": "prod", + "app": "grafana" + } + } + ]` + + testFrame := data.NewFrame("test", + data.NewField("Time", nil, []time.Time{ + time.Date(2014, 5, 27, 23, 52, 54, 0, time.UTC), + time.Date(2014, 7, 10, 19, 7, 26, 0, time.UTC), + time.Date(2014, 7, 11, 15, 55, 9, 0, time.UTC), + time.Date(2014, 7, 12, 0, 15, 46, 0, time.UTC), + time.Date(2014, 7, 12, 0, 16, 52, 0, time.UTC), + time.Date(2014, 7, 12, 0, 19, 57, 0, time.UTC), + time.Date(2014, 7, 13, 8, 23, 43, 0, time.UTC), + time.Date(2014, 7, 16, 18, 9, 13, 0, time.UTC), + time.Date(2014, 8, 18, 8, 55, 46, 0, time.UTC), + time.Date(2014, 9, 3, 2, 48, 52, 0, time.UTC), + time.Date(2017, 4, 7, 0, 10, 11, 0, time.UTC), + }), + data.NewField("test", map[string]string{"env": "prod", "app": "grafana"}, []float64{ + 215, + 50, + 55, + 124, + 1284, + 9035, + 258, + 153, + 812, + 356, + 8953, + }), + ) + testFrame.Meta = &data.FrameMeta{ + Type: data.FrameTypeTimeSeriesMulti, + TypeVersion: data.FrameTypeVersion{0, 1}, + } + testFrame.RefID = "A" + tsdbVersion := float32(3) + + resp := http.Response{Body: io.NopCloser(strings.NewReader(response))} + resp.StatusCode = 200 + result, err := service.parseResponse(logger, &resp, "A", tsdbVersion) require.NoError(t, err) frame := result.Responses["A"] @@ -99,7 +214,7 @@ func TestOpenTsdbExecutor(t *testing.T) { data.NewField("Time", nil, []time.Time{ time.Date(2014, 7, 16, 20, 55, 46, 0, time.UTC), }), - data.NewField("value", map[string]string{"env": "prod", "app": "grafana"}, []float64{ + data.NewField("test", map[string]string{"env": "prod", "app": "grafana"}, []float64{ 50}), ) testFrame.Meta = &data.FrameMeta{ @@ -108,9 +223,11 @@ func TestOpenTsdbExecutor(t *testing.T) { } testFrame.RefID = myRefid + tsdbVersion := float32(4) + resp := http.Response{Body: io.NopCloser(strings.NewReader(response))} resp.StatusCode = 200 - result, err := service.parseResponse(logger, &resp, myRefid) + result, err := service.parseResponse(logger, &resp, myRefid, tsdbVersion) require.NoError(t, err) if diff := cmp.Diff(testFrame, result.Responses[myRefid].Frames[0], data.FrameTestCompareOptions()...); diff != "" { diff --git a/pkg/tsdb/opentsdb/types.go b/pkg/tsdb/opentsdb/types.go index 171a24e20670a..19d2ba7519794 100644 --- a/pkg/tsdb/opentsdb/types.go +++ b/pkg/tsdb/opentsdb/types.go @@ -6,8 +6,17 @@ type OpenTsdbQuery struct { Queries []map[string]any `json:"queries"` } +type OpenTsdbCommon struct { + Metric string `json:"metric"` + Tags map[string]string `json:"tags"` +} + type OpenTsdbResponse struct { - Metric string `json:"metric"` - Tags map[string]string `json:"tags"` - DataPoints [][]float64 `json:"dps"` + OpenTsdbCommon + DataPoints map[string]float64 `json:"dps"` +} + +type OpenTsdbResponse24 struct { + OpenTsdbCommon + DataPoints [][]float64 `json:"dps"` } diff --git a/public/app/plugins/datasource/opentsdb/components/OpenTsdbDetails.tsx b/public/app/plugins/datasource/opentsdb/components/OpenTsdbDetails.tsx index ba45bde7b2939..2c913d550146c 100644 --- a/public/app/plugins/datasource/opentsdb/components/OpenTsdbDetails.tsx +++ b/public/app/plugins/datasource/opentsdb/components/OpenTsdbDetails.tsx @@ -9,6 +9,7 @@ const tsdbVersions = [ { label: '<=2.1', value: 1 }, { label: '==2.2', value: 2 }, { label: '==2.3', value: 3 }, + { label: '==2.4', value: 4 }, ]; const tsdbResolutions = [ From c5250311fce1c3c62c5f993b4e9e8ddf7e1eafe8 Mon Sep 17 00:00:00 2001 From: Matias Chomicki Date: Tue, 25 Feb 2025 15:06:48 +0000 Subject: [PATCH 26/33] Logs: Re-run Loki queries in Explore when direction and sort order are changed (#99994) --- public/app/features/explore/Logs/Logs.test.tsx | 9 ++++++--- public/app/features/explore/Logs/Logs.tsx | 6 +++--- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/public/app/features/explore/Logs/Logs.test.tsx b/public/app/features/explore/Logs/Logs.test.tsx index 1749d92d48dfa..958bad54ad41c 100644 --- a/public/app/features/explore/Logs/Logs.test.tsx +++ b/public/app/features/explore/Logs/Logs.test.tsx @@ -48,17 +48,18 @@ jest.mock('../state/explorePane', () => ({ changePanelState: (exploreId: string, panel: 'logs', panelState: {} | ExploreLogsPanelState) => { return fakeChangePanelState(exploreId, panel, panelState); }, - changeQueries: (args: { queries: DataQuery[]; exploreId: string | undefined }) => { - return fakeChangeQueries(args); - }, })); const fakeChangeQueries = jest.fn().mockReturnValue({ type: 'fakeChangeQueries' }); +const fakeRunQueries = jest.fn().mockReturnValue({ type: 'fakeRunQueries' }); jest.mock('../state/query', () => ({ ...jest.requireActual('../state/query'), changeQueries: (args: { queries: DataQuery[]; exploreId: string | undefined }) => { return fakeChangeQueries(args); }, + runQueries: (args: { queries: DataQuery[]; exploreId: string | undefined }) => { + return fakeRunQueries(args); + }, })); describe('Logs', () => { @@ -388,6 +389,7 @@ describe('Logs', () => { expect(logRows.length).toBe(3); expect(logRows[0].textContent).toContain('log message 1'); expect(logRows[2].textContent).toContain('log message 3'); + expect(fakeRunQueries).not.toHaveBeenCalled(); }); it('should sync the query direction when changing the order of loki queries', async () => { @@ -399,6 +401,7 @@ describe('Logs', () => { exploreId: 'left', queries: [{ ...query, direction: LokiQueryDirection.Forward }], }); + expect(fakeRunQueries).toHaveBeenCalledWith({ exploreId: 'left' }); }); it('should not change the query direction when changing the order of non-loki queries', async () => { diff --git a/public/app/features/explore/Logs/Logs.tsx b/public/app/features/explore/Logs/Logs.tsx index ae6c8e1f7d0cc..feefbb1bee73d 100644 --- a/public/app/features/explore/Logs/Logs.tsx +++ b/public/app/features/explore/Logs/Logs.tsx @@ -75,7 +75,7 @@ import { import { useContentOutlineContext } from '../ContentOutline/ContentOutlineContext'; import { getUrlStateFromPaneState } from '../hooks/useStateSync'; import { changePanelState } from '../state/explorePane'; -import { changeQueries } from '../state/query'; +import { changeQueries, runQueries } from '../state/query'; import { LogsFeedback } from './LogsFeedback'; import { LogsMetaRow } from './LogsMetaRow'; @@ -478,12 +478,11 @@ const UnthemedLogs: React.FunctionComponent = (props: Props) => { if (query.datasource?.type !== 'loki' || !isLokiQuery(query)) { return query; } - hasLokiQueries = true; - if (query.direction === LokiQueryDirection.Scan) { // Don't override Scan. When the direction is Scan it means that the user specifically assigned this direction to the query. return query; } + hasLokiQueries = true; const newDirection = newSortOrder === LogsSortOrder.Ascending ? LokiQueryDirection.Forward : LokiQueryDirection.Backward; if (newDirection !== query.direction) { @@ -494,6 +493,7 @@ const UnthemedLogs: React.FunctionComponent = (props: Props) => { if (hasLokiQueries) { dispatch(changeQueries({ exploreId, queries: newQueries })); + dispatch(runQueries({ exploreId })); } } From 53e91fd5e8703b20c360c2a42f87dfb10b4b55f1 Mon Sep 17 00:00:00 2001 From: Georges Chaudy Date: Tue, 25 Feb 2025 17:28:31 +0100 Subject: [PATCH 27/33] unistore: close event stream on context cancelation (#101293) * add tests for broacaster * fix sql notifier not closing the stream * fix sql notifier not closing the stream * close sub * fix broadcaster test * fix broadcaster test * suggestion --- .../unified/resource/broadcaster_test.go | 38 +++++++++++++++++++ pkg/storage/unified/resource/server.go | 5 +-- pkg/storage/unified/sql/notifier_sql.go | 2 + pkg/storage/unified/sql/notifier_sql_test.go | 37 ++++++++++++++++++ .../unified/testing/storage_backend.go | 10 ++++- 5 files changed, 86 insertions(+), 6 deletions(-) diff --git a/pkg/storage/unified/resource/broadcaster_test.go b/pkg/storage/unified/resource/broadcaster_test.go index 8eedfa01ce564..3ae056a8a772a 100644 --- a/pkg/storage/unified/resource/broadcaster_test.go +++ b/pkg/storage/unified/resource/broadcaster_test.go @@ -104,3 +104,41 @@ func TestCache(t *testing.T) { // slice should return all values require.Equal(t, []int{4, 5, 6, 7, 8, 9, 10, 11, 12, 13}, c.Slice()) } + +func TestBroadcaster(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + ch := make(chan int) + input := []int{1, 2, 3} + go func() { + for _, v := range input { + ch <- v + } + }() + t.Cleanup(func() { + close(ch) + }) + + b, err := NewBroadcaster(ctx, func(out chan<- int) error { + go func() { + for v := range ch { + out <- v + } + }() + return nil + }) + require.NoError(t, err) + + sub, err := b.Subscribe(ctx) + require.NoError(t, err) + + for _, expected := range input { + v, ok := <-sub + require.True(t, ok) + require.Equal(t, expected, v) + } + + // cancel the context should close the stream + cancel() + _, ok := <-sub + require.False(t, ok) +} diff --git a/pkg/storage/unified/resource/server.go b/pkg/storage/unified/resource/server.go index 7853f9125dbed..148de06595a71 100644 --- a/pkg/storage/unified/resource/server.go +++ b/pkg/storage/unified/resource/server.go @@ -919,10 +919,7 @@ func (s *server) initWatcher() error { return err } go func() { - for { - // pipe all events - v := <-events - + for v := range events { if v == nil { s.log.Error("received nil event") continue diff --git a/pkg/storage/unified/sql/notifier_sql.go b/pkg/storage/unified/sql/notifier_sql.go index a4d0a008fccfa..9940f7c882f4a 100644 --- a/pkg/storage/unified/sql/notifier_sql.go +++ b/pkg/storage/unified/sql/notifier_sql.go @@ -120,6 +120,8 @@ func (p *pollingNotifier) poller(ctx context.Context, since groupResourceRV, str for { select { + case <-ctx.Done(): + return case <-p.done: return case <-t.C: diff --git a/pkg/storage/unified/sql/notifier_sql_test.go b/pkg/storage/unified/sql/notifier_sql_test.go index 693d63ea4851e..07fcfac3365cb 100644 --- a/pkg/storage/unified/sql/notifier_sql_test.go +++ b/pkg/storage/unified/sql/notifier_sql_test.go @@ -357,4 +357,41 @@ func TestPollingNotifier(t *testing.T) { t.Fatal("timeout waiting for events channel to close") } }) + + t.Run("stops polling when context is cancelled", func(t *testing.T) { + t.Parallel() + + ctx, cancel := context.WithCancel(context.Background()) + + cfg := &pollingNotifierConfig{ + dialect: sqltemplate.SQLite, + pollingInterval: 10 * time.Millisecond, + watchBufferSize: 10, + log: log.NewNopLogger(), + tracer: noop.NewTracerProvider().Tracer("test"), + batchLock: &batchLock{}, + listLatestRVs: func(ctx context.Context) (groupResourceRV, error) { return nil, nil }, + historyPoll: func(ctx context.Context, grp string, res string, since int64) ([]*historyPollResponse, error) { + return nil, nil + }, + done: make(chan struct{}), + } + + notifier, err := newPollingNotifier(cfg) + require.NoError(t, err) + require.NotNil(t, notifier) + + events, err := notifier.notify(ctx) + require.NoError(t, err) + require.NotNil(t, events) + + cancel() + + select { + case _, ok := <-events: + require.False(t, ok, "events channel should be closed") + case <-time.After(time.Second): + t.Fatal("timeout waiting for events channel to close") + } + }) } diff --git a/pkg/storage/unified/testing/storage_backend.go b/pkg/storage/unified/testing/storage_backend.go index d34cf3c0c71bf..d19f9d2b05a66 100644 --- a/pkg/storage/unified/testing/storage_backend.go +++ b/pkg/storage/unified/testing/storage_backend.go @@ -51,7 +51,7 @@ func RunStorageBackendTest(t *testing.T, newBackend NewBackendFunc, opts *TestOp fn func(*testing.T, resource.StorageBackend) }{ {TestHappyPath, runTestIntegrationBackendHappyPath}, - {TestWatchWriteEvents, runTestIntegrationBackendWatchWriteEventsFromLastest}, + {TestWatchWriteEvents, runTestIntegrationBackendWatchWriteEvents}, {TestList, runTestIntegrationBackendList}, {TestBlobSupport, runTestIntegrationBlobSupport}, {TestGetResourceStats, runTestIntegrationBackendGetResourceStats}, @@ -272,7 +272,7 @@ func runTestIntegrationBackendGetResourceStats(t *testing.T, backend resource.St }) } -func runTestIntegrationBackendWatchWriteEventsFromLastest(t *testing.T, backend resource.StorageBackend) { +func runTestIntegrationBackendWatchWriteEvents(t *testing.T, backend resource.StorageBackend) { ctx := testutil.NewTestContext(t, time.Now().Add(5*time.Second)) // Create a few resources before initing the watch @@ -287,6 +287,12 @@ func runTestIntegrationBackendWatchWriteEventsFromLastest(t *testing.T, backend _, err = writeEvent(ctx, backend, "item2", resource.WatchEvent_ADDED) require.NoError(t, err) require.Equal(t, "item2", (<-stream).Key.Name) + + // Should close the stream + ctx.Cancel() + + _, ok := <-stream + require.False(t, ok) } func runTestIntegrationBackendList(t *testing.T, backend resource.StorageBackend) { From bc00462875375ec26d6b0c6a81625df2f413418e Mon Sep 17 00:00:00 2001 From: Adela Almasan <88068998+adela-almasan@users.noreply.github.com> Date: Tue, 25 Feb 2025 10:52:06 -0600 Subject: [PATCH 28/33] Table: Enable actions option (#101069) --- public/app/plugins/panel/table/module.tsx | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/public/app/plugins/panel/table/module.tsx b/public/app/plugins/panel/table/module.tsx index f930e45de19b9..0d6466426da81 100644 --- a/public/app/plugins/panel/table/module.tsx +++ b/public/app/plugins/panel/table/module.tsx @@ -6,6 +6,7 @@ import { ReducerID, standardEditorsRegistry, identityOverrideProcessor, + FieldConfigProperty, } from '@grafana/data'; import { TableCellOptions, TableCellDisplayMode, defaultTableFieldOptions, TableCellHeight } from '@grafana/schema'; @@ -23,6 +24,11 @@ export const plugin = new PanelPlugin(TablePanel) .setPanelChangeHandler(tablePanelChangedHandler) .setMigrationHandler(tableMigrationHandler) .useFieldConfig({ + standardOptions: { + [FieldConfigProperty.Actions]: { + hideFromDefaults: false, + }, + }, useCustomConfig: (builder) => { builder .addNumberInput({ From 142a100915fd0362ecd98469b81b143fd3491b3e Mon Sep 17 00:00:00 2001 From: Ben Sully Date: Tue, 25 Feb 2025 17:07:17 +0000 Subject: [PATCH 29/33] fix(timeseries): allow annotations without color/isRegion/timeEnd (#101301) --- .../panel/timeseries/plugins/AnnotationsPlugin2.tsx | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/public/app/plugins/panel/timeseries/plugins/AnnotationsPlugin2.tsx b/public/app/plugins/panel/timeseries/plugins/AnnotationsPlugin2.tsx index 7a4c2b2db58b8..f01152e2f88cb 100644 --- a/public/app/plugins/panel/timeseries/plugins/AnnotationsPlugin2.tsx +++ b/public/app/plugins/panel/timeseries/plugins/AnnotationsPlugin2.tsx @@ -142,7 +142,7 @@ export const AnnotationsPlugin2 = ({ let yKey = config.scales[1].props.scaleKey; for (let i = 0; i < frame.length; i++) { - let color = getColorByName(vals.color[i] || DEFAULT_ANNOTATION_COLOR_HEX8); + let color = getColorByName(vals.color?.[i] || DEFAULT_ANNOTATION_COLOR_HEX8); let x0 = u.valToPos(vals.xMin[i], xKey, true); let x1 = u.valToPos(vals.xMax[i], xKey, true); @@ -173,12 +173,12 @@ export const AnnotationsPlugin2 = ({ ctx.setLineDash([5, 5]); for (let i = 0; i < vals.time.length; i++) { - let color = getColorByName(vals.color[i] || DEFAULT_ANNOTATION_COLOR_HEX8); + let color = getColorByName(vals.color?.[i] || DEFAULT_ANNOTATION_COLOR_HEX8); let x0 = u.valToPos(vals.time[i], 'x', true); renderLine(ctx, y0, y1, x0, color); - if (vals.isRegion[i]) { + if (vals.isRegion?.[i]) { let x1 = u.valToPos(vals.timeEnd[i], 'x', true); renderLine(ctx, y0, y1, x1, color); @@ -216,14 +216,14 @@ export const AnnotationsPlugin2 = ({ let markers: React.ReactNode[] = []; for (let i = 0; i < vals.time.length; i++) { - let color = getColorByName(vals.color[i] || DEFAULT_ANNOTATION_COLOR); + let color = getColorByName(vals.color?.[i] || DEFAULT_ANNOTATION_COLOR); let left = Math.round(plot.valToPos(vals.time[i], 'x')) || 0; // handles -0 let style: React.CSSProperties | null = null; let className = ''; let isVisible = true; - if (vals.isRegion[i]) { - let right = Math.round(plot.valToPos(vals.timeEnd[i], 'x')) || 0; // handles -0 + if (vals.isRegion?.[i]) { + let right = Math.round(plot.valToPos(vals.timeEnd?.[i], 'x')) || 0; // handles -0 isVisible = left < plot.rect.width && right > 0; From 4538c8cad96ed6e2817152de0edf97c789ab8788 Mon Sep 17 00:00:00 2001 From: Eric Leijonmarck Date: Tue, 25 Feb 2025 17:30:58 +0000 Subject: [PATCH 30/33] DS proxy: Remove ft `datasourceProxyDisableRBAC` and logic (#101239) delete ft datasourceproxy --- .../grafana-data/src/types/featureToggles.gen.ts | 1 - pkg/api/pluginproxy/ds_proxy.go | 12 ++---------- pkg/services/featuremgmt/registry.go | 8 -------- pkg/services/featuremgmt/toggles_gen.csv | 1 - pkg/services/featuremgmt/toggles_gen.go | 4 ---- pkg/services/featuremgmt/toggles_gen.json | 1 + 6 files changed, 3 insertions(+), 24 deletions(-) diff --git a/packages/grafana-data/src/types/featureToggles.gen.ts b/packages/grafana-data/src/types/featureToggles.gen.ts index b64ba1ed23832..85ba3f238c5ce 100644 --- a/packages/grafana-data/src/types/featureToggles.gen.ts +++ b/packages/grafana-data/src/types/featureToggles.gen.ts @@ -172,7 +172,6 @@ export interface FeatureToggles { newDashboardSharingComponent?: boolean; alertingListViewV2?: boolean; dashboardRestore?: boolean; - datasourceProxyDisableRBAC?: boolean; alertingDisableSendAlertsExternal?: boolean; preserveDashboardStateWhenNavigating?: boolean; alertingCentralAlertHistory?: boolean; diff --git a/pkg/api/pluginproxy/ds_proxy.go b/pkg/api/pluginproxy/ds_proxy.go index c8f158138bfb5..ce8bdf2770fc4 100644 --- a/pkg/api/pluginproxy/ds_proxy.go +++ b/pkg/api/pluginproxy/ds_proxy.go @@ -306,16 +306,8 @@ func (proxy *DataSourceProxy) validateRequest() error { continue } - if proxy.features.IsEnabled(proxy.ctx.Req.Context(), featuremgmt.FlagDatasourceProxyDisableRBAC) { - // TODO(aarongodin): following logic can be removed with FlagDatasourceProxyDisableRBAC as it is covered by - // proxy.hasAccessToRoute(..) - if route.ReqRole.IsValid() && !proxy.ctx.HasUserRole(route.ReqRole) { - return errors.New("plugin proxy route access denied") - } - } else { - if !proxy.hasAccessToRoute(route) { - return errors.New("plugin proxy route access denied") - } + if !proxy.hasAccessToRoute(route) { + return errors.New("plugin proxy route access denied") } proxy.matchedRoute = route diff --git a/pkg/services/featuremgmt/registry.go b/pkg/services/featuremgmt/registry.go index 937cd429810eb..a3a8a86191d49 100644 --- a/pkg/services/featuremgmt/registry.go +++ b/pkg/services/featuremgmt/registry.go @@ -1170,14 +1170,6 @@ var ( HideFromAdminPage: true, Expression: "false", // enabled by default }, - { - Name: "datasourceProxyDisableRBAC", - Description: "Disables applying a plugin route's ReqAction field to authorization", - Stage: FeatureStageGeneralAvailability, - Owner: identityAccessTeam, - HideFromDocs: true, - Expression: "false", - }, { Name: "alertingDisableSendAlertsExternal", Description: "Disables the ability to send alerts to an external Alertmanager datasource.", diff --git a/pkg/services/featuremgmt/toggles_gen.csv b/pkg/services/featuremgmt/toggles_gen.csv index f6c7593139201..065323e011e67 100644 --- a/pkg/services/featuremgmt/toggles_gen.csv +++ b/pkg/services/featuremgmt/toggles_gen.csv @@ -153,7 +153,6 @@ logsExploreTableDefaultVisualization,experimental,@grafana/observability-logs,fa newDashboardSharingComponent,GA,@grafana/sharing-squad,false,false,true alertingListViewV2,experimental,@grafana/alerting-squad,false,false,true dashboardRestore,experimental,@grafana/search-and-storage,false,false,false -datasourceProxyDisableRBAC,GA,@grafana/identity-access-team,false,false,false alertingDisableSendAlertsExternal,experimental,@grafana/alerting-squad,false,false,false preserveDashboardStateWhenNavigating,experimental,@grafana/dashboards-squad,false,false,false alertingCentralAlertHistory,experimental,@grafana/alerting-squad,false,false,true diff --git a/pkg/services/featuremgmt/toggles_gen.go b/pkg/services/featuremgmt/toggles_gen.go index eb23fb7815bd4..b2d704e7a103f 100644 --- a/pkg/services/featuremgmt/toggles_gen.go +++ b/pkg/services/featuremgmt/toggles_gen.go @@ -623,10 +623,6 @@ const ( // Enables deleted dashboard restore feature FlagDashboardRestore = "dashboardRestore" - // FlagDatasourceProxyDisableRBAC - // Disables applying a plugin route's ReqAction field to authorization - FlagDatasourceProxyDisableRBAC = "datasourceProxyDisableRBAC" - // FlagAlertingDisableSendAlertsExternal // Disables the ability to send alerts to an external Alertmanager datasource. FlagAlertingDisableSendAlertsExternal = "alertingDisableSendAlertsExternal" diff --git a/pkg/services/featuremgmt/toggles_gen.json b/pkg/services/featuremgmt/toggles_gen.json index fcf2d51730408..0d6059d55208c 100644 --- a/pkg/services/featuremgmt/toggles_gen.json +++ b/pkg/services/featuremgmt/toggles_gen.json @@ -1277,6 +1277,7 @@ "name": "datasourceProxyDisableRBAC", "resourceVersion": "1720021873452", "creationTimestamp": "2024-05-21T13:05:16Z", + "deletionTimestamp": "2025-02-24T17:23:43Z", "annotations": { "grafana.app/updatedTimestamp": "2024-07-03 15:51:13.452477 +0000 UTC" } From cd7a1d515c27d4ac11148102023f7d91e4f9704f Mon Sep 17 00:00:00 2001 From: Adela Almasan <88068998+adela-almasan@users.noreply.github.com> Date: Tue, 25 Feb 2025 13:54:07 -0600 Subject: [PATCH 31/33] Canvas: Fix oneClick migration (#101311) --- public/app/plugins/panel/canvas/migrations.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/app/plugins/panel/canvas/migrations.ts b/public/app/plugins/panel/canvas/migrations.ts index 479a540bff738..2292359802995 100644 --- a/public/app/plugins/panel/canvas/migrations.ts +++ b/public/app/plugins/panel/canvas/migrations.ts @@ -64,9 +64,9 @@ export const canvasMigrationHandler = (panel: PanelModel): Partial => { const root = panel.options?.root; if (root?.elements) { for (const element of root.elements) { - if (element.oneClickMode === OneClickMode.Link || element.oneClickLinks) { + if ((element.oneClickMode === OneClickMode.Link || element.oneClickLinks) && element.links?.length) { element.links[0].oneClick = true; - } else if (element.oneClickMode === OneClickMode.Action) { + } else if (element.oneClickMode === OneClickMode.Action && element.actions?.length) { element.actions[0].oneClick = true; } From 2681a93b478f6783f975803372374da1af49a7d7 Mon Sep 17 00:00:00 2001 From: Yuri Tseretyan Date: Tue, 25 Feb 2025 15:05:29 -0500 Subject: [PATCH 32/33] Fix permissions for Update Alerting Module action (#101223) * add id-token permission * use alerting-team app --- .github/workflows/alerting-update-module.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/alerting-update-module.yml b/.github/workflows/alerting-update-module.yml index eece934525e72..1d919f3dc4b55 100644 --- a/.github/workflows/alerting-update-module.yml +++ b/.github/workflows/alerting-update-module.yml @@ -13,6 +13,7 @@ jobs: permissions: contents: write pull-requests: write + id-token: write steps: - name: Checkout repository @@ -93,8 +94,8 @@ jobs: uses: grafana/shared-workflows/actions/get-vault-secrets@28361cdb22223e5f1e34358c86c20908e7248760 # 1.1.0 with: repo_secrets: | - GITHUB_APP_ID=github-app:app-id - GITHUB_APP_PRIVATE_KEY=github-app:private-key + GITHUB_APP_ID=alerting-team:app-id + GITHUB_APP_PRIVATE_KEY=alerting-team:private-key - name: "Generate token" id: generate_token @@ -127,4 +128,4 @@ jobs: if: steps.create-pr.outputs.pull-request-url != '' run: | echo "## Pull Request Created" >> $GITHUB_STEP_SUMMARY - echo "🔗 [View Pull Request](${{ steps.create-pr.outputs.pull-request-url }})" >> $GITHUB_STEP_SUMMARY \ No newline at end of file + echo "🔗 [View Pull Request](${{ steps.create-pr.outputs.pull-request-url }})" >> $GITHUB_STEP_SUMMARY From a7ecb19c3149b3c43bf9cf08a10f4842dbb59975 Mon Sep 17 00:00:00 2001 From: Jev Forsberg <46619047+baldm0mma@users.noreply.github.com> Date: Tue, 25 Feb 2025 16:23:28 -0700 Subject: [PATCH 33/33] Chore: Update base alpine docker image (#101320) * baldm0mma/ update base image arg * baldm0mma/ update alpine image * baldm0mma/ skip failing test * baldm0mma/ specifiy patch * baldm0mma/ flaky test? --- .drone.yml | 82 ++++++++++++++++----------------- Dockerfile | 2 +- scripts/drone/utils/images.star | 2 +- 3 files changed, 43 insertions(+), 43 deletions(-) diff --git a/.drone.yml b/.drone.yml index 6883dcac6b60d..491c607a83b6b 100644 --- a/.drone.yml +++ b/.drone.yml @@ -18,7 +18,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - go build -o ./bin/build -ldflags '-extldflags -static' ./pkg/build/cmd @@ -69,7 +69,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - go install github.com/bazelbuild/buildtools/buildifier@latest @@ -112,7 +112,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -170,7 +170,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -309,7 +309,7 @@ steps: path: /github-app - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -427,7 +427,7 @@ steps: path: /github-app - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -517,7 +517,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - go build -o ./bin/build -ldflags '-extldflags -static' ./pkg/build/cmd @@ -642,7 +642,7 @@ steps: path: /github-app - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - mkdir -p bin @@ -713,7 +713,7 @@ steps: -a targz:grafana:linux/arm/v7 -a docker:grafana:linux/amd64 -a docker:grafana:linux/amd64:ubuntu -a docker:grafana:linux/arm64 -a docker:grafana:linux/arm64:ubuntu -a docker:grafana:linux/arm/v7 -a docker:grafana:linux/arm/v7:ubuntu --go-version=1.23.5 --yarn-cache=$$YARN_CACHE_FOLDER - --build-id=$$DRONE_BUILD_NUMBER --ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.20.6 + --build-id=$$DRONE_BUILD_NUMBER --ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.21.3 --tag-format='{{ .version_base }}-{{ .buildID }}-{{ .arch }}' --ubuntu-tag-format='{{ .version_base }}-{{ .buildID }}-ubuntu-{{ .arch }}' --verify='false' --grafana-dir=$$PWD > packages.txt @@ -770,7 +770,7 @@ steps: GF_APP_MODE: development GF_SERVER_HTTP_PORT: "3001" GF_SERVER_ROUTER_LOGGING: "1" - image: alpine:3.20.6 + image: alpine:3.21.3 name: grafana-server - commands: - ./bin/build e2e-tests --port 3001 --suite dashboards-suite @@ -1114,7 +1114,7 @@ steps: name: compile-build-cmd - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -1280,7 +1280,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -1637,7 +1637,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -1708,7 +1708,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -1766,7 +1766,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -1842,7 +1842,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -1922,7 +1922,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - go build -o ./bin/build -ldflags '-extldflags -static' ./pkg/build/cmd @@ -1988,7 +1988,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -2063,7 +2063,7 @@ steps: path: /github-app - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - mkdir -p bin @@ -2133,7 +2133,7 @@ steps: -a targz:grafana:linux/arm/v7 -a docker:grafana:linux/amd64 -a docker:grafana:linux/amd64:ubuntu -a docker:grafana:linux/arm64 -a docker:grafana:linux/arm64:ubuntu -a docker:grafana:linux/arm/v7 -a docker:grafana:linux/arm/v7:ubuntu --go-version=1.23.5 --yarn-cache=$$YARN_CACHE_FOLDER - --build-id=$$DRONE_BUILD_NUMBER --ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.20.6 + --build-id=$$DRONE_BUILD_NUMBER --ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.21.3 --tag-format='{{ .version_base }}-{{ .buildID }}-{{ .arch }}' --ubuntu-tag-format='{{ .version_base }}-{{ .buildID }}-ubuntu-{{ .arch }}' --verify='false' --grafana-dir=$$PWD > packages.txt @@ -2194,7 +2194,7 @@ steps: GF_APP_MODE: development GF_SERVER_HTTP_PORT: "3001" GF_SERVER_ROUTER_LOGGING: "1" - image: alpine:3.20.6 + image: alpine:3.21.3 name: grafana-server - commands: - ./bin/build e2e-tests --port 3001 --suite dashboards-suite @@ -2611,7 +2611,7 @@ steps: name: compile-build-cmd - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -2856,7 +2856,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -2912,7 +2912,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -2986,7 +2986,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -3064,7 +3064,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - go build -o ./bin/build -ldflags '-extldflags -static' ./pkg/build/cmd @@ -3169,7 +3169,7 @@ steps: name: compile-build-cmd - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -3372,7 +3372,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - mkdir -p bin @@ -3504,7 +3504,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - mkdir -p bin @@ -3994,7 +3994,7 @@ steps: environment: _EXPERIMENTAL_DAGGER_CLOUD_TOKEN: from_secret: dagger_token - ALPINE_BASE: alpine:3.20.6 + ALPINE_BASE: alpine:3.21.3 CDN_DESTINATION: from_secret: rgm_cdn_destination DESTINATION: @@ -4069,7 +4069,7 @@ steps: environment: _EXPERIMENTAL_DAGGER_CLOUD_TOKEN: from_secret: dagger_token - ALPINE_BASE: alpine:3.20.6 + ALPINE_BASE: alpine:3.21.3 CDN_DESTINATION: from_secret: rgm_cdn_destination DESTINATION: @@ -4186,7 +4186,7 @@ steps: environment: _EXPERIMENTAL_DAGGER_CLOUD_TOKEN: from_secret: dagger_token - ALPINE_BASE: alpine:3.20.6 + ALPINE_BASE: alpine:3.21.3 CDN_DESTINATION: from_secret: rgm_cdn_destination DESTINATION: @@ -4288,7 +4288,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -4342,7 +4342,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -4423,7 +4423,7 @@ steps: environment: _EXPERIMENTAL_DAGGER_CLOUD_TOKEN: from_secret: dagger_token - ALPINE_BASE: alpine:3.20.6 + ALPINE_BASE: alpine:3.21.3 CDN_DESTINATION: from_secret: rgm_cdn_destination DESTINATION: @@ -4567,7 +4567,7 @@ steps: environment: _EXPERIMENTAL_DAGGER_CLOUD_TOKEN: from_secret: dagger_token - ALPINE_BASE: alpine:3.20.6 + ALPINE_BASE: alpine:3.21.3 CDN_DESTINATION: from_secret: rgm_cdn_destination DESTINATION: @@ -4695,7 +4695,7 @@ steps: environment: _EXPERIMENTAL_DAGGER_CLOUD_TOKEN: from_secret: dagger_token - ALPINE_BASE: alpine:3.20.6 + ALPINE_BASE: alpine:3.21.3 CDN_DESTINATION: from_secret: rgm_cdn_destination DESTINATION: @@ -4838,7 +4838,7 @@ steps: name: grabpl - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.20.6 + image: alpine:3.21.3 name: identify-runner - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -5262,7 +5262,7 @@ steps: - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM node:22-bookworm - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM google/cloud-sdk:431.0.0 - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM grafana/grafana-ci-deploy:1.3.3 - - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM alpine:3.20.6 + - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM alpine:3.21.3 - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM ubuntu:22.04 - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM byrnedo/alpine-curl:0.1.8 - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM plugins/slack @@ -5300,7 +5300,7 @@ steps: - trivy --exit-code 1 --severity HIGH,CRITICAL node:22-bookworm - trivy --exit-code 1 --severity HIGH,CRITICAL google/cloud-sdk:431.0.0 - trivy --exit-code 1 --severity HIGH,CRITICAL grafana/grafana-ci-deploy:1.3.3 - - trivy --exit-code 1 --severity HIGH,CRITICAL alpine:3.20.6 + - trivy --exit-code 1 --severity HIGH,CRITICAL alpine:3.21.3 - trivy --exit-code 1 --severity HIGH,CRITICAL ubuntu:22.04 - trivy --exit-code 1 --severity HIGH,CRITICAL byrnedo/alpine-curl:0.1.8 - trivy --exit-code 1 --severity HIGH,CRITICAL plugins/slack @@ -5564,6 +5564,6 @@ kind: secret name: gcr_credentials --- kind: signature -hmac: 558d477c002eb799c23f6631aafc7df933518e445e59f34ceb989e73f4dc60bc +hmac: f16a4715c7a4e6a4ffb1fe041b42fb966310fd5da455239614e9a239493aff82 ... diff --git a/Dockerfile b/Dockerfile index 9914a48dce0c9..09cba46da26af 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,7 @@ # to maintain formatting of multiline commands in vscode, add the following to settings.json: # "docker.languageserver.formatter.ignoreMultilineInstructions": true -ARG BASE_IMAGE=alpine:3.20 +ARG BASE_IMAGE=alpine:3.21 ARG JS_IMAGE=node:22-alpine ARG JS_PLATFORM=linux/amd64 ARG GO_IMAGE=golang:1.23.5-alpine diff --git a/scripts/drone/utils/images.star b/scripts/drone/utils/images.star index af160dacd5aec..d51f9905df0d0 100644 --- a/scripts/drone/utils/images.star +++ b/scripts/drone/utils/images.star @@ -16,7 +16,7 @@ images = { "node_deb": "node:{}-bookworm".format(nodejs_version[:2]), "cloudsdk": "google/cloud-sdk:431.0.0", "publish": "grafana/grafana-ci-deploy:1.3.3", - "alpine": "alpine:3.20.6", + "alpine": "alpine:3.21.3", "ubuntu": "ubuntu:22.04", "curl": "byrnedo/alpine-curl:0.1.8", "plugins_slack": "plugins/slack",