, version: string) => {
- const search = new URLSearchParams(history.location.search);
- if (search.get('_v') === version) return;
- search.set('_v', version);
- const path =
- history.location.pathname +
- '?' +
- search.toString() +
- (history.location.hash ? '#' + history.location.hash : '');
- history.replace(path);
-};
diff --git a/src/plugins/share/common/url_service/locators/redirect/parse_search_params.ts b/src/plugins/share/common/url_service/locators/redirect/parse_search_params.ts
index a4711c30db5d0..c66bf56b1858f 100644
--- a/src/plugins/share/common/url_service/locators/redirect/parse_search_params.ts
+++ b/src/plugins/share/common/url_service/locators/redirect/parse_search_params.ts
@@ -22,7 +22,9 @@ import type { RedirectOptions } from './types';
* @param urlSearch Search part of URL path.
* @returns Parsed out locator ID, version, and locator params.
*/
-export function parseSearchParams(urlSearch: string): RedirectOptions {
+export function parseSearchParams(
+ urlSearch: string
+): RedirectOptions
{
const search = new URLSearchParams(urlSearch);
const id = search.get('l');
@@ -66,7 +68,7 @@ export function parseSearchParams(urlSearch: string): RedirectOptions {
throw new Error(message);
}
- let params: unknown & SerializableRecord;
+ let params: P;
try {
params = JSON.parse(paramsJson);
} catch {
diff --git a/src/plugins/telemetry/schema/oss_plugins.json b/src/plugins/telemetry/schema/oss_plugins.json
index ec17c9b9d1a3b..e04e83dc46feb 100644
--- a/src/plugins/telemetry/schema/oss_plugins.json
+++ b/src/plugins/telemetry/schema/oss_plugins.json
@@ -10091,12 +10091,6 @@
"description": "Non-default value of setting."
}
},
- "observability:profilingUseLegacyCo2Calculation": {
- "type": "boolean",
- "_meta": {
- "description": "Non-default value of setting."
- }
- },
"observability:profilingCostPervCPUPerHour": {
"type": "integer",
"_meta": {
diff --git a/src/plugins/unified_search/public/filter_bar/filter_item/filter_item.scss b/src/plugins/unified_search/public/filter_bar/filter_item/filter_item.scss
index 1c16adbfc8c13..362aec7264983 100644
--- a/src/plugins/unified_search/public/filter_bar/filter_item/filter_item.scss
+++ b/src/plugins/unified_search/public/filter_bar/filter_item/filter_item.scss
@@ -8,8 +8,8 @@
line-height: $euiSize;
border: none;
color: $euiTextColor;
- padding-top: $euiSizeM / 2 + 1px;
- padding-bottom: $euiSizeM / 2 + 1px;
+ padding-top: calc($euiSizeM / 2) + 1px;
+ padding-bottom: calc($euiSizeM / 2) + 1px;
white-space: normal; /* 1 */
&:not(.globalFilterItem-isDisabled) {
@@ -54,8 +54,8 @@
left: 0;
width: $euiSizeXS;
background-color: $kbnGlobalFilterItemBorderColor;
- border-top-left-radius: $euiBorderRadius / 2;
- border-bottom-left-radius: $euiBorderRadius / 2;
+ border-top-left-radius: calc($euiBorderRadius / 2);
+ border-bottom-left-radius: calc($euiBorderRadius / 2);
}
}
diff --git a/src/plugins/unified_search/public/saved_query_management/saved_query_management_list.scss b/src/plugins/unified_search/public/saved_query_management/saved_query_management_list.scss
index 7ce304310ae56..2e6f639ea792d 100644
--- a/src/plugins/unified_search/public/saved_query_management/saved_query_management_list.scss
+++ b/src/plugins/unified_search/public/saved_query_management/saved_query_management_list.scss
@@ -5,7 +5,7 @@
}
.kbnSavedQueryManagement__text {
- padding: $euiSizeM $euiSizeM ($euiSizeM / 2) $euiSizeM;
+ padding: $euiSizeM $euiSizeM calc($euiSizeM / 2) $euiSizeM;
}
.kbnSavedQueryManagement__list {
@@ -13,5 +13,5 @@
max-height: inherit; // Fixes overflow for applied max-height
// Left/Right padding is calculated to match the left alignment of the
// popover text and buttons
- padding: ($euiSizeM / 2) $euiSizeXS !important; // Override flush
+ padding: calc($euiSizeM / 2) $euiSizeXS !important; // Override flush
}
diff --git a/src/plugins/vis_default_editor/public/_agg_params.scss b/src/plugins/vis_default_editor/public/_agg_params.scss
index 81faa06681c0d..c56ef94c3a4ba 100644
--- a/src/plugins/vis_default_editor/public/_agg_params.scss
+++ b/src/plugins/vis_default_editor/public/_agg_params.scss
@@ -1,7 +1,7 @@
.visEditorAggParam--half {
margin: $euiSize 0;
display: inline-block;
- width: calc(50% - #{$euiSizeS / 2});
+ width: calc(50% - #{calc($euiSizeS / 2)});
}
.visEditorAggParam--half-size {
diff --git a/src/plugins/vis_types/timeseries/public/application/components/_vis_with_splits.scss b/src/plugins/vis_types/timeseries/public/application/components/_vis_with_splits.scss
index 9e09a6c3477f3..036cf3f6a8fbd 100644
--- a/src/plugins/vis_types/timeseries/public/application/components/_vis_with_splits.scss
+++ b/src/plugins/vis_types/timeseries/public/application/components/_vis_with_splits.scss
@@ -20,7 +20,7 @@
> .tvbVis {
// Apply the minimum height on the vis itself so it doesn't interfere with flex calculations
// Gauges are not completely square, so the height is just slightly less than the width
- min-height: $euiSize * 12 / 1.25;
+ min-height: calc($euiSize * 12 / 1.25);
}
}
diff --git a/src/plugins/vis_types/timeseries/public/application/lib/set_is_reversed.js b/src/plugins/vis_types/timeseries/public/application/lib/set_is_reversed.js
index c2a2b4bb86af1..e37c55ab8a246 100644
--- a/src/plugins/vis_types/timeseries/public/application/lib/set_is_reversed.js
+++ b/src/plugins/vis_types/timeseries/public/application/lib/set_is_reversed.js
@@ -7,9 +7,7 @@
*/
import color from 'color';
-import { getUISettings } from '../../services';
-
-const isDarkTheme = () => getUISettings().get('theme:darkMode');
+import { getCoreStart } from '../../services';
/**
* Returns true if the color that is passed has low luminosity
@@ -23,7 +21,7 @@ const isColorDark = (c) => {
* Defaults to checking `theme:darkMode`.
*/
export const isThemeDark = (currentTheme) => {
- let themeIsDark = currentTheme || isDarkTheme();
+ let themeIsDark = currentTheme || getCoreStart().theme.getTheme().darkMode;
// If passing a string, check the luminosity
if (typeof currentTheme === 'string') {
diff --git a/src/plugins/vis_types/timeseries/public/application/visualizations/views/_metric.scss b/src/plugins/vis_types/timeseries/public/application/visualizations/views/_metric.scss
index bc2ce4f1a9e44..d5eb056dd172e 100644
--- a/src/plugins/vis_types/timeseries/public/application/visualizations/views/_metric.scss
+++ b/src/plugins/vis_types/timeseries/public/application/visualizations/views/_metric.scss
@@ -101,7 +101,7 @@
.tvbVisMetric__label--additional {
@include euiTextTruncate;
font-size: .25em; /* 1 */
- padding: ($euiSizeXS / 2) 0 0;
+ padding: calc($euiSizeXS / 2) 0 0;
text-align: center;
color: $tvbValueColor;
line-height: 1.2; // Ensure the descenders don't get cut off
diff --git a/src/plugins/vis_types/vega/public/plugin.ts b/src/plugins/vis_types/vega/public/plugin.ts
index 54f319850d817..96e383979b854 100644
--- a/src/plugins/vis_types/vega/public/plugin.ts
+++ b/src/plugins/vis_types/vega/public/plugin.ts
@@ -20,7 +20,7 @@ import {
setData,
setDataViews,
setInjectedVars,
- setUISettings,
+ setThemeService,
setDocLinks,
setMapsEms,
setUsageCollectionStart,
@@ -77,8 +77,6 @@ export class VegaPlugin implements Plugin {
enableExternalUrls: this.initializerContext.config.get().enableExternalUrls,
});
- setUISettings(core.uiSettings);
-
const visualizationDependencies: Readonly = {
core,
plugins: {
@@ -104,6 +102,7 @@ export class VegaPlugin implements Plugin {
setDataViews(dataViews);
setDocLinks(core.docLinks);
setMapsEms(mapsEms);
+ setThemeService(core.theme);
setUsageCollectionStart(usageCollection);
}
}
diff --git a/src/plugins/vis_types/vega/public/services.ts b/src/plugins/vis_types/vega/public/services.ts
index 4b3e0ca72cdc3..04c5c5cf1f447 100644
--- a/src/plugins/vis_types/vega/public/services.ts
+++ b/src/plugins/vis_types/vega/public/services.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { NotificationsStart, IUiSettingsClient, DocLinksStart } from '@kbn/core/public';
+import type { NotificationsStart, DocLinksStart, ThemeServiceStart } from '@kbn/core/public';
import { DataPublicPluginStart } from '@kbn/data-plugin/public';
import { DataViewsPublicPluginStart } from '@kbn/data-views-plugin/public';
@@ -22,7 +22,6 @@ export const [getDataViews, setDataViews] =
export const [getNotifications, setNotifications] =
createGetterSetter('Notifications');
-export const [getUISettings, setUISettings] = createGetterSetter('UISettings');
export const [getMapsEms, setMapsEms] = createGetterSetter('mapsEms');
export const [getInjectedVars, setInjectedVars] = createGetterSetter<{
@@ -35,3 +34,6 @@ export const [getDocLinks, setDocLinks] = createGetterSetter('doc
export const [getUsageCollectionStart, setUsageCollectionStart] =
createGetterSetter('UsageCollection');
+
+export const [getThemeService, setThemeService] =
+ createGetterSetter('ThemeServiceStart');
diff --git a/src/plugins/vis_types/vega/public/vega_view/vega_map_view/view.test.ts b/src/plugins/vis_types/vega/public/vega_view/vega_map_view/view.test.ts
index eafe75534154a..a42d76681c4ff 100644
--- a/src/plugins/vis_types/vega/public/vega_view/vega_map_view/view.test.ts
+++ b/src/plugins/vis_types/vega/public/vega_view/vega_map_view/view.test.ts
@@ -24,7 +24,7 @@ import {
setInjectedVars,
setData,
setNotifications,
- setUISettings,
+ setThemeService,
setDataViews,
} from '../../services';
import { initVegaLayer, initTmsRasterLayer } from './layers';
@@ -121,7 +121,7 @@ describe('vega_map_view/view', () => {
setData(dataPluginStart);
setDataViews(dataViewsStart);
setNotifications(coreStart.notifications);
- setUISettings(coreStart.uiSettings);
+ setThemeService(coreStart.theme);
async function createVegaMapView() {
await vegaParser.parseAsync();
diff --git a/src/plugins/vis_types/vega/public/vega_view/vega_map_view/view.ts b/src/plugins/vis_types/vega/public/vega_view/vega_map_view/view.ts
index fe1d6a27f3605..7e4ca5a19dd6a 100644
--- a/src/plugins/vis_types/vega/public/vega_view/vega_map_view/view.ts
+++ b/src/plugins/vis_types/vega/public/vega_view/vega_map_view/view.ts
@@ -15,7 +15,7 @@ import { maplibregl } from '@kbn/mapbox-gl';
import { initTmsRasterLayer, initVegaLayer } from './layers';
import { VegaBaseView } from '../vega_base_view';
-import { getUISettings } from '../../services';
+import { getThemeService } from '../../services';
import { defaultMapConfig, defaultMabBoxStyle, vegaLayerId } from './constants';
import { validateZoomSettings, injectMapPropsIntoSpec } from './utils';
@@ -98,7 +98,7 @@ export class VegaMapView extends VegaBaseView {
const { mapStyle, emsTileServiceId } = this._parser.mapConfig;
//
if (mapStyle) {
- const isDarkMode: boolean = getUISettings().get('theme:darkMode');
+ const isDarkMode: boolean = getThemeService().getTheme().darkMode;
return emsTileServiceId
? emsTileServiceId
: await this._serviceSettings.getDefaultTmsLayer(isDarkMode);
diff --git a/src/plugins/vis_types/vislib/public/vislib/lib/layout/_layout.scss b/src/plugins/vis_types/vislib/public/vislib/lib/layout/_layout.scss
index 4612602d93f1c..8b92af5a4fdcf 100644
--- a/src/plugins/vis_types/vislib/public/vislib/lib/layout/_layout.scss
+++ b/src/plugins/vis_types/vislib/public/vislib/lib/layout/_layout.scss
@@ -203,7 +203,7 @@
}
.slice {
- stroke-width: $euiSizeXS / 2;
+ stroke-width: calc($euiSizeXS / 2);
stroke: $euiColorEmptyShade;
&:hover {
diff --git a/src/setup_node_env/exit_on_warning.js b/src/setup_node_env/exit_on_warning.js
index 5e7bae8254c04..dc6e321074224 100644
--- a/src/setup_node_env/exit_on_warning.js
+++ b/src/setup_node_env/exit_on_warning.js
@@ -46,6 +46,13 @@ var IGNORE_WARNINGS = [
// We need to discard that warning
name: 'ProductNotSupportedSecurityError',
},
+ // https://github.com/browserify/browserify-rsa/pull/20
+ {
+ name: 'DeprecationWarning',
+ code: 'DEP0170',
+ message:
+ 'The URL https://github.com:crypto-browserify/browserify-rsa.git is invalid. Future versions of Node.js will throw an error.',
+ },
];
if (process.noProcessWarnings !== true) {
diff --git a/src/setup_node_env/heap_snapshot.js b/src/setup_node_env/heap_snapshot.js
deleted file mode 100644
index 94e4b35e2f887..0000000000000
--- a/src/setup_node_env/heap_snapshot.js
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-var getopts = require('getopts');
-var path = require('path');
-var v8 = require('node:v8');
-var worker = require('node:worker_threads');
-
-var execOpts = getopts(process.execArgv);
-var envOpts = getopts(process.env.NODE_OPTIONS ? process.env.NODE_OPTIONS.split(/\s+/) : []);
-var diagnosticDir = execOpts['diagnostic-dir'] || envOpts['diagnostic-dir'];
-var heapSnapshotSignal = execOpts['heapsnapshot-signal'] || envOpts['heapsnapshot-signal'];
-var heapSnapshotSerial = 0;
-
-function getHeapSnapshotPath() {
- var now = new Date();
-
- var year = now.getFullYear();
- var month = String(now.getMonth() + 1).padStart(2, '0');
- var day = String(now.getDate()).padStart(2, '0');
- var hours = String(now.getHours()).padStart(2, '0');
- var minutes = String(now.getMinutes()).padStart(2, '0');
- var seconds = String(now.getSeconds()).padStart(2, '0');
-
- var date = `${year}${month}${day}`;
- var time = `${hours}${minutes}${seconds}`;
- var pid = process.pid;
- var threadId = worker.threadId;
- var serial = (++heapSnapshotSerial).toString().padStart(3, '0');
-
- return path.join(diagnosticDir, `Heap.${date}.${time}.${pid}.${threadId}.${serial}.heapsnapshot`);
-}
-
-if (diagnosticDir && heapSnapshotSignal) {
- process.removeAllListeners(heapSnapshotSignal);
-
- process.on(heapSnapshotSignal, function () {
- var heapSnapshotPath = getHeapSnapshotPath();
- v8.writeHeapSnapshot(heapSnapshotPath);
- });
-}
diff --git a/src/setup_node_env/setup_env.js b/src/setup_node_env/setup_env.js
index d3076a2c3b9cf..7b37d98011cfb 100644
--- a/src/setup_node_env/setup_env.js
+++ b/src/setup_node_env/setup_env.js
@@ -11,8 +11,6 @@ require('./exit_on_warning');
require('./harden');
// The following require statements MUST be executed before any others - END
-// @todo Remove when migrated to Node 20 (#162696)
-require('./heap_snapshot');
require('symbol-observable');
require('source-map-support').install();
require('./node_version_validator');
diff --git a/test/common/services/security/test_user.ts b/test/common/services/security/test_user.ts
index bc5dbf68698bc..e8d88d81bb167 100644
--- a/test/common/services/security/test_user.ts
+++ b/test/common/services/security/test_user.ts
@@ -61,7 +61,10 @@ export class TestUser extends FtrService {
});
if (this.browser && this.testSubjects && !options?.skipBrowserRefresh) {
- if (await this.testSubjects.exists('kibanaChrome', { allowHidden: true })) {
+ if (
+ (await this.browser.hasOpenWindow()) &&
+ (await this.testSubjects.exists('kibanaChrome', { allowHidden: true }))
+ ) {
await this.browser.refresh();
// accept alert if it pops up
const alert = await this.browser.getAlert();
diff --git a/test/functional/apps/discover/group1/_discover_histogram.ts b/test/functional/apps/discover/group1/_discover_histogram.ts
index bdaf14fca96e4..64e9b0e47dc90 100644
--- a/test/functional/apps/discover/group1/_discover_histogram.ts
+++ b/test/functional/apps/discover/group1/_discover_histogram.ts
@@ -33,8 +33,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
const log = getService('log');
const queryBar = getService('queryBar');
- // FLAKY: https://github.com/elastic/kibana/issues/173586
- describe.skip('discover histogram', function describeIndexTests() {
+ describe('discover histogram', function describeIndexTests() {
before(async () => {
await esArchiver.loadIfNeeded('test/functional/fixtures/es_archiver/logstash_functional');
await esArchiver.load('test/functional/fixtures/es_archiver/long_window_logstash');
@@ -44,6 +43,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
);
await security.testUser.setRoles(['kibana_admin', 'long_window_logstash']);
await kibanaServer.uiSettings.replace(defaultSettings);
+ await PageObjects.timePicker.setDefaultAbsoluteRangeViaUiSettings();
await PageObjects.common.navigateToApp('discover');
});
after(async () => {
@@ -66,13 +66,10 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
it('should modify the time range when the histogram is brushed', async function () {
await PageObjects.common.navigateToApp('discover');
await PageObjects.discover.waitUntilSearchingHasFinished();
- await PageObjects.timePicker.setDefaultAbsoluteRange();
- await PageObjects.discover.waitUntilSearchingHasFinished();
// this is the number of renderings of the histogram needed when new data is fetched
let renderingCountInc = 1;
const prevRenderingCount = await elasticChart.getVisualizationRenderingCount();
- await PageObjects.timePicker.setDefaultAbsoluteRange();
- await PageObjects.discover.waitUntilSearchingHasFinished();
+ await queryBar.submitQuery();
await retry.waitFor('chart rendering complete', async () => {
const actualCount = await elasticChart.getVisualizationRenderingCount();
const expectedCount = prevRenderingCount + renderingCountInc;
@@ -108,8 +105,6 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
it('should update correctly when switching data views and brushing the histogram', async () => {
await PageObjects.common.navigateToApp('discover');
await PageObjects.discover.waitUntilSearchingHasFinished();
- await PageObjects.timePicker.setDefaultAbsoluteRange();
- await PageObjects.discover.waitUntilSearchingHasFinished();
await PageObjects.discover.selectIndexPattern('logstash-*');
await PageObjects.discover.waitUntilSearchingHasFinished();
await PageObjects.discover.selectIndexPattern('long-window-logstash-*');
@@ -282,7 +277,6 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
it('should recover from broken query search when clearing the query bar', async () => {
await PageObjects.common.navigateToApp('discover');
await PageObjects.discover.waitUntilSearchingHasFinished();
- await PageObjects.timePicker.setDefaultAbsoluteRange();
// Make sure the chart is visible
await testSubjects.click('unifiedHistogramChartOptionsToggle');
await testSubjects.click('unifiedHistogramChartToggle');
diff --git a/test/functional/apps/management/data_views/_data_view_create_delete.ts b/test/functional/apps/management/data_views/_data_view_create_delete.ts
index 245ac88606b50..e3bc2240887ad 100644
--- a/test/functional/apps/management/data_views/_data_view_create_delete.ts
+++ b/test/functional/apps/management/data_views/_data_view_create_delete.ts
@@ -187,8 +187,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
});
});
- // FLAKY: https://github.com/elastic/kibana/issues/173625
- describe.skip('index pattern edit', function () {
+ describe('index pattern edit', function () {
it('should update field list', async function () {
await PageObjects.settings.editIndexPattern(
'kibana_sample_data_flights',
diff --git a/test/functional/apps/management/data_views/_index_pattern_filter.ts b/test/functional/apps/management/data_views/_index_pattern_filter.ts
index 5c0dc5d0284f0..81ff2b450755d 100644
--- a/test/functional/apps/management/data_views/_index_pattern_filter.ts
+++ b/test/functional/apps/management/data_views/_index_pattern_filter.ts
@@ -24,13 +24,16 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
await PageObjects.settings.clickKibanaIndexPatterns();
});
+ after(async function () {
+ await kibanaServer.savedObjects.cleanStandardList();
+ });
+
beforeEach(async function () {
await PageObjects.settings.createIndexPattern('logstash-*');
});
afterEach(async function () {
await PageObjects.settings.removeIndexPattern();
- await kibanaServer.savedObjects.cleanStandardList();
});
it('should filter indexed fields by type', async function () {
diff --git a/test/functional/apps/management/data_views/_runtime_fields.ts b/test/functional/apps/management/data_views/_runtime_fields.ts
index d6aca3a88c925..cf0fb7f498974 100644
--- a/test/functional/apps/management/data_views/_runtime_fields.ts
+++ b/test/functional/apps/management/data_views/_runtime_fields.ts
@@ -61,6 +61,9 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
it('should modify runtime field', async function () {
await PageObjects.settings.filterField(fieldName);
await testSubjects.click('editFieldFormat');
+ await retry.try(async () => {
+ await testSubjects.existOrFail('flyoutTitle');
+ });
await PageObjects.settings.setFieldType('Long');
await PageObjects.settings.setFieldScriptWithoutToggle('emit(6);');
await PageObjects.settings.toggleRow('formatRow');
diff --git a/test/functional/page_objects/settings_page.ts b/test/functional/page_objects/settings_page.ts
index 5172d7d7d0ff4..cd650086e9e1a 100644
--- a/test/functional/page_objects/settings_page.ts
+++ b/test/functional/page_objects/settings_page.ts
@@ -177,9 +177,20 @@ export class SettingsPageObject extends FtrService {
async selectTimeFieldOption(selection: string) {
// open dropdown
const timefield = await this.getTimeFieldNameField();
- await timefield.click();
- await this.browser.pressKeys(selection);
- await this.browser.pressKeys(this.browser.keys.TAB);
+ const prevValue = await timefield.getAttribute('value');
+ const enabled = await timefield.isEnabled();
+
+ if (prevValue === selection || !enabled) {
+ return;
+ }
+ await this.retry.waitFor('time field dropdown have the right value', async () => {
+ await timefield.click();
+ await timefield.type(this.browser.keys.DELETE, { charByChar: true });
+ await this.browser.pressKeys(selection);
+ await this.browser.pressKeys(this.browser.keys.TAB);
+ const value = await timefield.getAttribute('value');
+ return value === selection;
+ });
}
async getTimeFieldOption(selection: string) {
@@ -192,7 +203,7 @@ export class SettingsPageObject extends FtrService {
async setNameField(dataViewName: string) {
const field = await this.getNameField();
- await field.clearValue();
+ await field.clearValueWithKeyboard();
await field.type(dataViewName);
}
@@ -486,7 +497,7 @@ export class SettingsPageObject extends FtrService {
async allowHiddenClick() {
await this.testSubjects.click('toggleAdvancedSetting');
const allowHiddenField = await this.testSubjects.find('allowHiddenField');
- (await allowHiddenField.findByTagName('button')).click();
+ await (await allowHiddenField.findByTagName('button')).click();
}
async createIndexPattern(
@@ -567,19 +578,26 @@ export class SettingsPageObject extends FtrService {
throw new Error('No Data View name provided for edit');
}
- this.clickEditIndexButton();
+ await this.clickEditIndexButton();
await this.header.waitUntilLoadingHasFinished();
await this.retry.try(async () => {
+ if (dataViewName) {
+ await this.setNameField(dataViewName);
+ }
await this.setIndexPatternField(indexPatternName);
+ await this.header.waitUntilLoadingHasFinished();
+ if (timefield) {
+ await this.selectTimeFieldOption(timefield);
+ }
+ const indexPatternSaveBtn = await this.getSaveIndexPatternButton();
+ await indexPatternSaveBtn.click();
+
+ const form = await this.testSubjects.findAll('indexPatternEditorForm');
+ const hasValidationErrors =
+ form.length !== 0 && (await form[0].getAttribute('data-validation-error')) === '1';
+ expect(hasValidationErrors).to.eql(false);
});
- if (dataViewName) {
- await this.setNameField(dataViewName);
- }
- if (timefield) {
- await this.selectTimeFieldOption(timefield);
- }
- await (await this.getSaveIndexPatternButton()).click();
if (errorCheck) {
await this.retry.try(async () => {
@@ -653,6 +671,10 @@ export class SettingsPageObject extends FtrService {
const currentName = await field.getAttribute('value');
this.log.debug(`setIndexPatternField set to ${currentName}`);
expect(currentName).to.eql(indexPatternName);
+ await this.retry.waitFor('validating the given index pattern should be finished', async () => {
+ const isValidating = await field.getAttribute('data-is-validating');
+ return isValidating === '0';
+ });
}
async getCreateIndexPatternGoToStep2Button() {
diff --git a/test/functional/services/remote/remote.ts b/test/functional/services/remote/remote.ts
index b3038130b0187..6ee7b6e8d9085 100644
--- a/test/functional/services/remote/remote.ts
+++ b/test/functional/services/remote/remote.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { NoSuchSessionError } from 'selenium-webdriver/lib/error';
+import { NoSuchSessionError, NoSuchWindowError } from 'selenium-webdriver/lib/error';
import { FtrProviderContext } from '../../ftr_provider_context';
import { initWebDriver, BrowserConfig } from './webdriver';
import { Browsers } from './browsers';
@@ -37,6 +37,10 @@ export async function RemoteProvider({ getService }: FtrProviderContext) {
// Avoid duplicating NoSuchSessionError error output on each hook
// https://developer.mozilla.org/en-US/docs/Web/WebDriver/Errors/InvalidSessionID
log.error('WebDriver session is no longer valid');
+ } else if (error instanceof NoSuchWindowError) {
+ // Avoid duplicating NoSuchWindowError error output on each hook
+ // https://developer.mozilla.org/en-US/docs/Web/WebDriver/Errors
+ log.error('Browser window is already closed');
} else {
throw error;
}
diff --git a/test/scripts/checks/baseline_plugin_public_api_docs.sh b/test/scripts/checks/baseline_plugin_public_api_docs.sh
deleted file mode 100755
index 72de7c0980a5f..0000000000000
--- a/test/scripts/checks/baseline_plugin_public_api_docs.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-###
-### rebuild plugin api docs to ensure it's not out of date
-###
-echo " -- building api docs"
-node --max-old-space-size=12000 scripts/build_api_docs
diff --git a/test/scripts/checks/bundle_limits.sh b/test/scripts/checks/bundle_limits.sh
deleted file mode 100755
index 10d9d9343fda4..0000000000000
--- a/test/scripts/checks/bundle_limits.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node scripts/build_kibana_platform_plugins --validate-limits
diff --git a/test/scripts/checks/commit/commit.sh b/test/scripts/checks/commit/commit.sh
deleted file mode 100755
index 180f6dfb56e29..0000000000000
--- a/test/scripts/checks/commit/commit.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-# Runs pre-commit hook script for the files touched in the last commit.
-# That way we can ensure a set of quick commit checks earlier as we removed
-# the pre-commit hook installation by default.
-# If files are more than 200 we will skip it and just use
-# the further ci steps that already check linting and file casing for the entire repo.
-"$(dirname "${0}")/commit_check_runner.sh"
diff --git a/test/scripts/checks/commit/commit_check_runner.sh b/test/scripts/checks/commit/commit_check_runner.sh
deleted file mode 100755
index 65ca9a6ecef06..0000000000000
--- a/test/scripts/checks/commit/commit_check_runner.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env bash
-
-run_quick_commit_checks() {
- echo "!!!!!!!! ATTENTION !!!!!!!!
-That check is intended to provide earlier CI feedback after we remove the automatic install for the local pre-commit hook.
-If you want, you can still manually install the pre-commit hook locally by running 'node scripts/register_git_hook locally'
-!!!!!!!!!!!!!!!!!!!!!!!!!!!
-"
-
- node scripts/precommit_hook.js --ref HEAD~1..HEAD --max-files 200
-}
-
-run_quick_commit_checks
diff --git a/test/scripts/checks/file_casing.sh b/test/scripts/checks/file_casing.sh
deleted file mode 100755
index 1a2240d0562ff..0000000000000
--- a/test/scripts/checks/file_casing.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node scripts/check_file_casing --quiet
diff --git a/test/scripts/checks/i18n.sh b/test/scripts/checks/i18n.sh
deleted file mode 100755
index 468b8394081e1..0000000000000
--- a/test/scripts/checks/i18n.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node scripts/i18n_check --ignore-missing
diff --git a/test/scripts/checks/jest_configs.sh b/test/scripts/checks/jest_configs.sh
deleted file mode 100755
index cebcbc63bb396..0000000000000
--- a/test/scripts/checks/jest_configs.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node scripts/check_jest_configs
diff --git a/test/scripts/checks/licenses.sh b/test/scripts/checks/licenses.sh
deleted file mode 100755
index 8a19cdc2fc126..0000000000000
--- a/test/scripts/checks/licenses.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node scripts/check_licenses --dev
diff --git a/test/scripts/checks/plugin_list_docs.sh b/test/scripts/checks/plugin_list_docs.sh
deleted file mode 100644
index b0f49d7845841..0000000000000
--- a/test/scripts/checks/plugin_list_docs.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-###
-### rebuild plugin list to ensure it's not out of date
-###
-echo " -- building plugin list docs"
-node scripts/build_plugin_list_docs
-
-###
-### verify no git modifications
-###
-GIT_CHANGES="$(git ls-files --modified)"
-if [ "$GIT_CHANGES" ]; then
- echo -e "\n${RED}ERROR: 'node scripts/build_plugin_list_docs' caused changes to the following files:${C_RESET}\n"
- echo -e "$GIT_CHANGES\n"
- exit 1
-fi
diff --git a/test/scripts/checks/telemetry.sh b/test/scripts/checks/telemetry.sh
deleted file mode 100755
index 09b2305f9d607..0000000000000
--- a/test/scripts/checks/telemetry.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node scripts/telemetry_check
diff --git a/test/scripts/checks/test_hardening.sh b/test/scripts/checks/test_hardening.sh
deleted file mode 100755
index 332edb0fcde68..0000000000000
--- a/test/scripts/checks/test_hardening.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node scripts/test_hardening
diff --git a/test/scripts/checks/test_projects.sh b/test/scripts/checks/test_projects.sh
deleted file mode 100755
index 6a1a8b958c4aa..0000000000000
--- a/test/scripts/checks/test_projects.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-yarn kbn run-in-packages test
diff --git a/test/scripts/checks/ts_projects.sh b/test/scripts/checks/ts_projects.sh
deleted file mode 100755
index 9963d10792f94..0000000000000
--- a/test/scripts/checks/ts_projects.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node scripts/check_ts_projects
diff --git a/test/scripts/checks/type_check_plugin_public_api_docs.sh b/test/scripts/checks/type_check_plugin_public_api_docs.sh
deleted file mode 100755
index b5fed38e192d2..0000000000000
--- a/test/scripts/checks/type_check_plugin_public_api_docs.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node scripts/build_ts_refs \
- --clean \
- --no-cache \
- --force
-
-node scripts/type_check
-
-echo " -- building api docs"
-node --max-old-space-size=12000 scripts/build_api_docs
diff --git a/test/scripts/checks/verify_notice.sh b/test/scripts/checks/verify_notice.sh
deleted file mode 100755
index 55dd1c04aaf8a..0000000000000
--- a/test/scripts/checks/verify_notice.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node scripts/notice --validate
diff --git a/test/scripts/jenkins_accessibility.sh b/test/scripts/jenkins_accessibility.sh
deleted file mode 100755
index fa582cf2d97d0..0000000000000
--- a/test/scripts/jenkins_accessibility.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_oss.sh
-
-node scripts/functional_tests \
- --debug --bail \
- --kibana-install-dir "$KIBANA_INSTALL_DIR" \
- --config test/accessibility/config.ts;
diff --git a/test/scripts/jenkins_apm_cypress.sh b/test/scripts/jenkins_apm_cypress.sh
deleted file mode 100755
index 2ccd7d760fba5..0000000000000
--- a/test/scripts/jenkins_apm_cypress.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_xpack.sh
-
-echo " -> Running APM cypress tests"
-cd "$XPACK_DIR"
-
-node plugins/apm/scripts/test/e2e.js
-
-echo ""
-echo ""
diff --git a/test/scripts/jenkins_build_kbn_sample_panel_action.sh b/test/scripts/jenkins_build_kbn_sample_panel_action.sh
deleted file mode 100755
index 67c3da246ed7c..0000000000000
--- a/test/scripts/jenkins_build_kbn_sample_panel_action.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-cd test/plugin_functional/plugins/kbn_sample_panel_action;
-if [[ ! -d "target" ]]; then
- yarn build;
-fi
-cd -;
diff --git a/test/scripts/jenkins_build_kibana.sh b/test/scripts/jenkins_build_kibana.sh
deleted file mode 100755
index 28d4feef3a4b9..0000000000000
--- a/test/scripts/jenkins_build_kibana.sh
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env bash
-
-cd "$KIBANA_DIR"
-source src/dev/ci_setup/setup_env.sh
-
-if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
- ./test/scripts/jenkins_build_plugins.sh
-fi
-
-# doesn't persist, also set in kibanaPipeline.groovy
-export KBN_NP_PLUGINS_BUILT=true
-
-# Do not build kibana for code coverage run
-if [[ -z "$CODE_COVERAGE" ]] ; then
- echo " -> building and extracting default Kibana distributable for use in functional tests"
- node scripts/build --debug
-
- echo " -> shipping metrics from build to ci-stats"
- node scripts/ship_ci_stats \
- --metrics target/optimizer_bundle_metrics.json \
- --metrics build/kibana/node_modules/@kbn/ui-shared-deps-src/shared_built_assets/metrics.json
-
- linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
- installDir="$KIBANA_DIR/install/kibana"
- mkdir -p "$installDir"
- tar -xzf "$linuxBuild" -C "$installDir" --strip=1
- cp "$linuxBuild" "$WORKSPACE/kibana-default.tar.gz"
-
- mkdir -p "$WORKSPACE/kibana-build"
- cp -pR install/kibana/. $WORKSPACE/kibana-build/
-
- echo " -> Archive built plugins"
- shopt -s globstar
- tar -zcf \
- "$WORKSPACE/kibana-default-plugins.tar.gz" \
- x-pack/plugins/**/target/public \
- x-pack/test/**/target/public \
- examples/**/target/public \
- x-pack/examples/**/target/public \
- test/**/target/public
- shopt -u globstar
-fi
diff --git a/test/scripts/jenkins_build_load_testing.sh b/test/scripts/jenkins_build_load_testing.sh
deleted file mode 100755
index f64caa3c02cab..0000000000000
--- a/test/scripts/jenkins_build_load_testing.sh
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env bash
-
-while getopts s: flag
-do
- case "${flag}" in
- s) simulations=${OPTARG};;
- esac
-done
-echo "Simulation classes: $simulations";
-
-cd "$KIBANA_DIR"
-source src/dev/ci_setup/setup_env.sh
-
-if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
- ./test/scripts/jenkins_xpack_build_plugins.sh
-fi
-
-echo " -> Configure Metricbeat monitoring"
-# Configure Metricbeat monitoring for Kibana and ElasticSearch, ingest monitoring data into Kibana Stats cluster
-# Getting the URL
-TOP="$(curl -L http://snapshots.elastic.co/latest/master.json)"
-MB_BUILD=$(echo $TOP | sed 's/.*"version" : "\(.*\)", "build_id.*/\1/')
-echo $MB_BUILD
-MB_BUILD_ID=$(echo $TOP | sed 's/.*"build_id" : "\(.*\)", "manifest_url.*/\1/')
-
-URL=https://snapshots.elastic.co/${MB_BUILD_ID}/downloads/beats/metricbeat/metricbeat-${MB_BUILD}-linux-x86_64.tar.gz
-URL=https://artifacts.elastic.co/downloads/beats/metricbeat/metricbeat-7.11.0-linux-x86_64.tar.gz
-echo $URL
-# Downloading the Metricbeat package
-while [ 1 ]; do
- wget -q --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 -t 0 --continue --no-check-certificate --tries=3 $URL
- if [ $? = 0 ]; then break; fi; # check return value, break if successful (0)
- sleep 1s;
-done;
-
-# Install Metricbeat
-echo "untar metricbeat and config"
-#tar -xzf metricbeat-${MB_BUILD}-linux-x86_64.tar.gz
-tar -xzf metricbeat-7.11.0-linux-x86_64.tar.gz
-#mv metricbeat-${MB_BUILD}-linux-x86_64 metricbeat-install
-mv metricbeat-7.11.0-linux-x86_64 metricbeat-install
-
-# Configure Metricbeat
-echo " -> Changing metricbeat config"
-pushd ../kibana-load-testing
-cp cfg/metricbeat/elasticsearch-xpack.yml $KIBANA_DIR/metricbeat-install/modules.d/elasticsearch-xpack.yml
-cp cfg/metricbeat/kibana-xpack.yml $KIBANA_DIR/metricbeat-install/modules.d/kibana-xpack.yml
-echo "fields.build: ${BUILD_ID}" >> cfg/metricbeat/metricbeat.yml
-echo "path.config: ${KIBANA_DIR}/metricbeat-install" >> cfg/metricbeat/metricbeat.yml
-echo "cloud.auth: ${USER_FROM_VAULT}:${PASS_FROM_VAULT}" >> cfg/metricbeat/metricbeat.yml
-cp cfg/metricbeat/metricbeat.yml $KIBANA_DIR/metricbeat-install/metricbeat.yml
-# Disable system monitoring: enabled for now to have more data
-#mv $KIBANA_DIR/metricbeat-install/modules.d/system.yml $KIBANA_DIR/metricbeat-install/modules.d/system.yml.disabled
-echo " -> Building puppeteer project"
-cd puppeteer
-yarn install && yarn build
-popd
-
-# doesn't persist, also set in kibanaPipeline.groovy
-export KBN_NP_PLUGINS_BUILT=true
-
-echo " -> Building and extracting default Kibana distributable for use in functional tests"
-cd "$KIBANA_DIR"
-node scripts/build --debug
-linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
-installDir="$KIBANA_DIR/install/kibana"
-mkdir -p "$installDir"
-tar -xzf "$linuxBuild" -C "$installDir" --strip=1
-
-mkdir -p "$WORKSPACE/kibana-build"
-cp -pR install/kibana/. $WORKSPACE/kibana-build/
-
-echo " -> Setup env for tests"
-source test/scripts/jenkins_test_setup_xpack.sh
-
-# Start Metricbeat
-echo " -> Starting metricbeat"
-pushd $KIBANA_DIR/metricbeat-install
-nohup ./metricbeat > metricbeat.log 2>&1 &
-popd
-
-echo " -> Running gatling load testing"
-export GATLING_SIMULATIONS="$simulations"
-node scripts/functional_tests \
- --kibana-install-dir "$KIBANA_INSTALL_DIR" \
- --config test/load/config.ts;
-
-
-echo " -> Simulations run is finished"
-
-# Show output of Metricbeat. Disabled. Enable for debug purposes
-#echo "output of metricbeat.log"
-#cat $KIBANA_DIR/metricbeat-install/metricbeat.log
diff --git a/test/scripts/jenkins_build_plugins.sh b/test/scripts/jenkins_build_plugins.sh
deleted file mode 100755
index dd1715065e799..0000000000000
--- a/test/scripts/jenkins_build_plugins.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-echo " -> building kibana platform plugins"
-node scripts/build_kibana_platform_plugins \
- --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
- --scan-dir "$KIBANA_DIR/test/health_gateway/plugins" \
- --scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \
- --scan-dir "$KIBANA_DIR/test/common/plugins" \
- --scan-dir "$KIBANA_DIR/examples" \
- --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
- --scan-dir "$KIBANA_DIR/test/common/plugins" \
- --scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \
- --scan-dir "$XPACK_DIR/test/functional_with_es_ssl/plugins" \
- --scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \
- --scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \
- --scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \
- --scan-dir "$XPACK_DIR/test/licensing_plugin/plugins" \
- --scan-dir "$XPACK_DIR/test/usage_collection/plugins" \
- --scan-dir "$XPACK_DIR/test/security_functional/fixtures/common" \
- --scan-dir "$XPACK_DIR/examples" \
- --workers 12
diff --git a/test/scripts/jenkins_ci_group.sh b/test/scripts/jenkins_ci_group.sh
deleted file mode 100755
index dde224823789b..0000000000000
--- a/test/scripts/jenkins_ci_group.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_oss.sh
-
-if [[ -z "$CODE_COVERAGE" ]]; then
- echo " -> Running functional and api tests"
-
- node scripts/functional_tests \
- --debug --bail \
- --kibana-install-dir "$KIBANA_INSTALL_DIR" \
- --include-tag "ciGroup$CI_GROUP"
-
- if [[ ! "$TASK_QUEUE_PROCESS_ID" && "$CI_GROUP" == "1" ]]; then
- source test/scripts/jenkins_build_kbn_sample_panel_action.sh
- ./test/scripts/test/plugin_functional.sh
- ./test/scripts/test/health_gateway.sh
- ./test/scripts/test/interpreter_functional.sh
- fi
-else
- echo " -> Running Functional tests with code coverage"
- export NODE_OPTIONS=--max_old_space_size=8192
-
- echo " -> making hard link clones"
- cd ..
- cp -RlP kibana "kibana${CI_GROUP}"
- cd "kibana${CI_GROUP}"
-
- echo " -> running tests from the clone folder"
- node scripts/functional_tests --debug --include-tag "ciGroup$CI_GROUP" --exclude-tag "skipCoverage" || true;
-
- echo " -> moving junit output, silently fail in case of no report"
- mkdir -p ../kibana/target/junit
- mv target/junit/* ../kibana/target/junit/ || echo "copying junit failed"
-
- echo " -> copying screenshots and html for failures"
- cp -r test/functional/screenshots/* ../kibana/test/functional/screenshots/ || echo "copying screenshots failed"
- cp -r test/functional/failure_debug ../kibana/test/functional/ || echo "copying html failed"
-fi
diff --git a/test/scripts/jenkins_cloud.sh b/test/scripts/jenkins_cloud.sh
deleted file mode 100755
index 57798a9afcac1..0000000000000
--- a/test/scripts/jenkins_cloud.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env bash
-
-# This script runs kibana tests compatible with cloud.
-#
-# The cloud instance setup is done in the elastic/elastic-stack-testing framework,
-# where the following environment variables are set pointing to the cloud instance.
-#
-# export TEST_KIBANA_HOSTNAME
-# export TEST_KIBANA_PROTOCOL=
-# export TEST_KIBANA_PORT=
-# export TEST_KIBANA_USER=
-# export TEST_KIBANA_PASS=
-#
-# export TEST_ES_HOSTNAME=
-# export TEST_ES_PROTOCOL=
-# export TEST_ES_PORT=
-# export TEST_ES_USER=
-# export TEST_ES_PASS=
-#
-
-set -e
-
-source "$(dirname $0)/../../src/dev/ci_setup/setup.sh"
-
-export TEST_BROWSER_HEADLESS=1
-node scripts/functional_test_runner --debug --exclude-tag skipCloud $@
diff --git a/test/scripts/jenkins_firefox_smoke.sh b/test/scripts/jenkins_firefox_smoke.sh
deleted file mode 100755
index 4566b11822bf5..0000000000000
--- a/test/scripts/jenkins_firefox_smoke.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_oss.sh
-
-node scripts/functional_tests \
- --bail --debug \
- --kibana-install-dir "$KIBANA_INSTALL_DIR" \
- --include-tag "includeFirefox" \
- --config test/functional/config.firefox.js;
diff --git a/test/scripts/jenkins_fleet_cypress.sh b/test/scripts/jenkins_fleet_cypress.sh
deleted file mode 100755
index e43259c1c1c3f..0000000000000
--- a/test/scripts/jenkins_fleet_cypress.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_xpack.sh
-
-echo " -> Running fleet cypress tests"
-cd "$XPACK_DIR"
-
-cd x-pack/plugins/fleet
-yarn --cwd x-pack/plugins/fleet cypress:run
-
-echo ""
-echo ""
diff --git a/test/scripts/jenkins_osquery_cypress.sh b/test/scripts/jenkins_osquery_cypress.sh
deleted file mode 100755
index b4a9420ff9440..0000000000000
--- a/test/scripts/jenkins_osquery_cypress.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_xpack.sh
-
-echo " -> Running osquery cypress tests"
-cd "$XPACK_DIR"
-
-node scripts/functional_tests \
- --debug --bail \
- --kibana-install-dir "$KIBANA_INSTALL_DIR" \
- --config test/osquery_cypress/cli_config.ts
-
-echo ""
-echo ""
diff --git a/test/scripts/jenkins_plugin_functional.sh b/test/scripts/jenkins_plugin_functional.sh
deleted file mode 100755
index 984e648bf6b84..0000000000000
--- a/test/scripts/jenkins_plugin_functional.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_oss.sh
-
-cd test/plugin_functional/plugins/kbn_sample_panel_action;
-if [[ ! -d "target" ]]; then
- yarn build;
-fi
-cd -;
-
-pwd
-
-./test/scripts/test/plugin_functional.sh
-./test/scripts/test/health_gateway.sh
-./test/scripts/test/interpreter_functional.sh
diff --git a/test/scripts/jenkins_runbld_junit.sh b/test/scripts/jenkins_runbld_junit.sh
deleted file mode 100755
index bcb6accd5f8cd..0000000000000
--- a/test/scripts/jenkins_runbld_junit.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-# This file just exists to give runbld something to invoke before processing junit reports
-echo 'Processing junit reports with runbld...'
diff --git a/test/scripts/jenkins_security_solution_cypress_chrome.sh b/test/scripts/jenkins_security_solution_cypress_chrome.sh
deleted file mode 100755
index 0605a319896ce..0000000000000
--- a/test/scripts/jenkins_security_solution_cypress_chrome.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_xpack.sh
-
-echo " -> Running security solution cypress tests"
-cd "$XPACK_DIR"
-
-node scripts/functional_tests \
- --debug --bail \
- --kibana-install-dir "$KIBANA_INSTALL_DIR" \
- --config test/security_solution_cypress/cli_config.ts
-
-echo ""
-echo ""
diff --git a/test/scripts/jenkins_setup.sh b/test/scripts/jenkins_setup.sh
deleted file mode 100755
index 8c8492d10e602..0000000000000
--- a/test/scripts/jenkins_setup.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/load_env_keys.sh
-source src/dev/ci_setup/extract_bootstrap_cache.sh
-source src/dev/ci_setup/setup.sh
-source src/dev/ci_setup/checkout_sibling_es.sh
\ No newline at end of file
diff --git a/test/scripts/jenkins_setup_parallel_workspace.sh b/test/scripts/jenkins_setup_parallel_workspace.sh
deleted file mode 100755
index 5274d05572e71..0000000000000
--- a/test/scripts/jenkins_setup_parallel_workspace.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env bash
-set -e
-
-CURRENT_DIR=$(pwd)
-
-# Copy everything except node_modules into the current workspace
-rsync -a ${WORKSPACE}/kibana/* . --exclude node_modules
-rsync -a ${WORKSPACE}/kibana/.??* .
-
-# Symlink all non-root, non-fixture node_modules into our new workspace
-cd ${WORKSPACE}/kibana
-find . -type d -name node_modules -not -path '*__fixtures__*' -not -path './node_modules*' -prune -print0 | xargs -0I % ln -s "${WORKSPACE}/kibana/%" "${CURRENT_DIR}/%"
-find . -type d -wholename '*__fixtures__*node_modules' -not -path './node_modules*' -prune -print0 | xargs -0I % cp -R "${WORKSPACE}/kibana/%" "${CURRENT_DIR}/%"
-cd "${CURRENT_DIR}"
-
-# Symlink all of the individual root-level node_modules into the node_modules/ directory
-mkdir -p node_modules
-ln -s ${WORKSPACE}/kibana/node_modules/* node_modules/
-ln -s ${WORKSPACE}/kibana/node_modules/.??* node_modules/
-
-# Copy a few node_modules instead of symlinking them. They don't work correctly if symlinked
-unlink node_modules/@kbn
-unlink node_modules/css-loader
-unlink node_modules/style-loader
-
-# packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts will fail if this is a symlink
-unlink node_modules/val-loader
-
-cp -R ${WORKSPACE}/kibana/node_modules/@kbn node_modules/
-cp -R ${WORKSPACE}/kibana/node_modules/css-loader node_modules/
-cp -R ${WORKSPACE}/kibana/node_modules/style-loader node_modules/
-cp -R ${WORKSPACE}/kibana/node_modules/val-loader node_modules/
diff --git a/test/scripts/jenkins_storybook.sh b/test/scripts/jenkins_storybook.sh
deleted file mode 100755
index 058c58ed922eb..0000000000000
--- a/test/scripts/jenkins_storybook.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-cd "$KIBANA_DIR"
-
-yarn storybook --site apm
-yarn storybook --site canvas
-yarn storybook --site cell_actions
-yarn storybook --site ci_composite
-yarn storybook --site content_management
-yarn storybook --site custom_integrations
-yarn storybook --site dashboard
-yarn storybook --site dashboard_enhanced
-yarn storybook --site data
-yarn storybook --site embeddable
-yarn storybook --site expression_error
-yarn storybook --site expression_image
-yarn storybook --site expression_metric
-yarn storybook --site expression_repeat_image
-yarn storybook --site expression_reveal_image
-yarn storybook --site expression_shape
-yarn storybook --site expression_tagcloud
-yarn storybook --site fleet
-yarn storybook --site infra
-yarn storybook --site kibana_react
-yarn storybook --site lists
-yarn storybook --site observability
-yarn storybook --site presentation
-yarn storybook --site security_solution
-yarn storybook --site solution_side_nav
-yarn storybook --site shared_ux
-yarn storybook --site ui_actions_enhanced
diff --git a/test/scripts/jenkins_test_setup.sh b/test/scripts/jenkins_test_setup.sh
deleted file mode 100755
index 05b88aa2dd0a2..0000000000000
--- a/test/scripts/jenkins_test_setup.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env bash
-
-set -e
-
-function post_work() {
- set +e
- if [[ -z "$REMOVE_KIBANA_INSTALL_DIR" && -z "$KIBANA_INSTALL_DIR" && -d "$KIBANA_INSTALL_DIR" ]]; then
- rm -rf "$REMOVE_KIBANA_INSTALL_DIR"
- fi
-}
-
-trap 'post_work' EXIT
-
-export TEST_BROWSER_HEADLESS=1
-
-source src/dev/ci_setup/setup_env.sh
-
-# For parallel workspaces, we should copy the .es directory from the root, because it should already have downloaded snapshots in it
-# This isn't part of jenkins_setup_parallel_workspace.sh just because not all tasks require ES
-if [[ ! -d .es && -d "$WORKSPACE/kibana/.es" ]]; then
- cp -R $WORKSPACE/kibana/.es ./
-fi
diff --git a/test/scripts/jenkins_test_setup_oss.sh b/test/scripts/jenkins_test_setup_oss.sh
deleted file mode 100755
index 29d396667c465..0000000000000
--- a/test/scripts/jenkins_test_setup_oss.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup.sh
-
-if [[ -z "$CODE_COVERAGE" ]]; then
- destDir="$WORKSPACE/kibana-build-${TASK_QUEUE_PROCESS_ID:-$CI_PARALLEL_PROCESS_NUMBER}"
-
- if [[ ! -d $destDir ]]; then
- mkdir -p $destDir
- cp -pR "$WORKSPACE/kibana-build/." $destDir/
- fi
-
- export KIBANA_INSTALL_DIR="$destDir"
-fi
diff --git a/test/scripts/jenkins_test_setup_xpack.sh b/test/scripts/jenkins_test_setup_xpack.sh
deleted file mode 100755
index 31acc4f4865e2..0000000000000
--- a/test/scripts/jenkins_test_setup_xpack.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup.sh
-
-if [[ -z "$CODE_COVERAGE" ]]; then
- destDir="$WORKSPACE/kibana-build-${TASK_QUEUE_PROCESS_ID:-$CI_PARALLEL_PROCESS_NUMBER}"
-
- if [[ ! -d $destDir ]]; then
- mkdir -p $destDir
- cp -pR "$WORKSPACE/kibana-build/." $destDir/
- fi
-
- export KIBANA_INSTALL_DIR="$(realpath $destDir)"
-
- cd "$XPACK_DIR"
-fi
diff --git a/test/scripts/jenkins_uptime_playwright.sh b/test/scripts/jenkins_uptime_playwright.sh
deleted file mode 100755
index 5bea30a223cd4..0000000000000
--- a/test/scripts/jenkins_uptime_playwright.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_xpack.sh
-
-echo " -> Running synthetics @elastic/synthetics tests"
-cd "$XPACK_DIR"
-
-node plugins/synthetics/scripts/e2e.js
-
-echo ""
-echo ""
diff --git a/test/scripts/jenkins_ux_synthetics.sh b/test/scripts/jenkins_ux_synthetics.sh
deleted file mode 100755
index acf2611e36b94..0000000000000
--- a/test/scripts/jenkins_ux_synthetics.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_xpack.sh
-
-echo " -> Running User Experience plugin @elastic/synthetics tests"
-cd "$XPACK_DIR"
-
-node plugins/ux/scripts/e2e.js
-
-echo ""
-echo ""
diff --git a/test/scripts/jenkins_xpack_accessibility.sh b/test/scripts/jenkins_xpack_accessibility.sh
deleted file mode 100755
index b1daa0ada1d50..0000000000000
--- a/test/scripts/jenkins_xpack_accessibility.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_xpack.sh
-
-node scripts/functional_tests \
- --debug --bail \
- --kibana-install-dir "$KIBANA_INSTALL_DIR" \
- --config test/accessibility/config.ts;
diff --git a/test/scripts/jenkins_xpack_baseline.sh b/test/scripts/jenkins_xpack_baseline.sh
deleted file mode 100755
index a0a98ccd5a5e7..0000000000000
--- a/test/scripts/jenkins_xpack_baseline.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-source "$KIBANA_DIR/src/dev/ci_setup/setup_percy.sh"
-
-echo " -> building and extracting default Kibana distributable"
-cd "$KIBANA_DIR"
-node scripts/build --debug
-
-echo " -> shipping metrics from build to ci-stats"
-node scripts/ship_ci_stats \
- --metrics target/optimizer_bundle_metrics.json \
- --metrics build/kibana/node_modules/@kbn/ui-shared-deps-src/shared_built_assets/metrics.json
-
-linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
-installDir="$KIBANA_DIR/install/kibana"
-mkdir -p "$installDir"
-tar -xzf "$linuxBuild" -C "$installDir" --strip=1
-
-mkdir -p "$WORKSPACE/kibana-build"
-cp -pR install/kibana/. $WORKSPACE/kibana-build/
-
-cd "$KIBANA_DIR"
-source "test/scripts/jenkins_xpack_saved_objects_field_metrics.sh"
diff --git a/test/scripts/jenkins_xpack_build_plugins.sh b/test/scripts/jenkins_xpack_build_plugins.sh
deleted file mode 100755
index bdf6ee2455527..0000000000000
--- a/test/scripts/jenkins_xpack_build_plugins.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-echo " -> building kibana platform plugins"
-node scripts/build_kibana_platform_plugins \
- --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
- --scan-dir "$KIBANA_DIR/test/common/plugins" \
- --scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \
- --scan-dir "$XPACK_DIR/test/functional_with_es_ssl/plugins" \
- --scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \
- --scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \
- --scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \
- --scan-dir "$XPACK_DIR/test/licensing_plugin/plugins" \
- --scan-dir "$XPACK_DIR/test/usage_collection/plugins" \
- --scan-dir "$XPACK_DIR/test/security_functional/fixtures/common" \
- --scan-dir "$KIBANA_DIR/examples" \
- --scan-dir "$XPACK_DIR/examples" \
- --workers 12
diff --git a/test/scripts/jenkins_xpack_ci_group.sh b/test/scripts/jenkins_xpack_ci_group.sh
deleted file mode 100755
index 59bcf45a2089f..0000000000000
--- a/test/scripts/jenkins_xpack_ci_group.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_xpack.sh
-
-if [[ -z "$CODE_COVERAGE" ]]; then
- echo " -> Running functional and api tests"
-
- node scripts/functional_tests \
- --debug --bail \
- --kibana-install-dir "$KIBANA_INSTALL_DIR" \
- --include-tag "ciGroup$CI_GROUP"
-
- echo ""
- echo ""
-else
- echo " -> Running X-Pack functional tests with code coverage"
- export NODE_OPTIONS=--max_old_space_size=8192
-
- echo " -> making hard link clones"
- cd ..
- cp -RlP kibana "kibana${CI_GROUP}"
- cd "kibana${CI_GROUP}/x-pack"
-
- echo " -> running tests from the clone folder"
- node scripts/functional_tests --debug --include-tag "ciGroup$CI_GROUP" --exclude-tag "skipCoverage" || true;
-
- echo " -> moving junit output, silently fail in case of no report"
- mkdir -p ../../kibana/target/junit
- mv ../target/junit/* ../../kibana/target/junit/ || echo "copying junit failed"
-
- echo " -> copying screenshots and html for failures"
- cp -r test/functional/screenshots/* ../../kibana/x-pack/test/functional/screenshots/ || echo "copying screenshots failed"
- cp -r test/functional/failure_debug ../../kibana/x-pack/test/functional/ || echo "copying html failed"
-fi
diff --git a/test/scripts/jenkins_xpack_firefox_smoke.sh b/test/scripts/jenkins_xpack_firefox_smoke.sh
deleted file mode 100755
index de19d3867520d..0000000000000
--- a/test/scripts/jenkins_xpack_firefox_smoke.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_xpack.sh
-
-node scripts/functional_tests \
- --debug --bail \
- --kibana-install-dir "$KIBANA_INSTALL_DIR" \
- --include-tag "includeFirefox" \
- --config test/functional/config.firefox.js \
- --config test/functional_embedded/config.firefox.ts;
diff --git a/test/scripts/jenkins_xpack_saved_objects_field_metrics.sh b/test/scripts/jenkins_xpack_saved_objects_field_metrics.sh
deleted file mode 100755
index fc3a7db06a43b..0000000000000
--- a/test/scripts/jenkins_xpack_saved_objects_field_metrics.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_xpack.sh
-
-node scripts/functional_tests \
- --debug --bail \
- --kibana-install-dir "$KIBANA_INSTALL_DIR" \
- --config test/saved_objects_field_count/config.ts;
diff --git a/test/scripts/lint/eslint.sh b/test/scripts/lint/eslint.sh
deleted file mode 100755
index 8395df85c5d30..0000000000000
--- a/test/scripts/lint/eslint.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node scripts/eslint --no-cache
diff --git a/test/scripts/lint/stylelint.sh b/test/scripts/lint/stylelint.sh
deleted file mode 100755
index 2f500c7e14aaa..0000000000000
--- a/test/scripts/lint/stylelint.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node scripts/stylelint
diff --git a/test/scripts/test/api_integration.sh b/test/scripts/test/api_integration.sh
deleted file mode 100755
index 06263c38b0728..0000000000000
--- a/test/scripts/test/api_integration.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node scripts/functional_tests \
- --config test/api_integration/config.js \
- --bail \
- --debug
diff --git a/test/scripts/test/health_gateway.sh b/test/scripts/test/health_gateway.sh
deleted file mode 100755
index 18a9b81b083de..0000000000000
--- a/test/scripts/test/health_gateway.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_oss.sh
-
-node scripts/functional_tests \
- --config test/health_gateway/config.ts \
- --bail \
- --debug \
- --kibana-install-dir $KIBANA_INSTALL_DIR
diff --git a/test/scripts/test/interpreter_functional.sh b/test/scripts/test/interpreter_functional.sh
deleted file mode 100755
index 2a40c81c34ad0..0000000000000
--- a/test/scripts/test/interpreter_functional.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_oss.sh
-
-node scripts/functional_tests \
- --config test/interpreter_functional/config.ts \
- --bail \
- --debug \
- --kibana-install-dir $KIBANA_INSTALL_DIR
diff --git a/test/scripts/test/jest_integration.sh b/test/scripts/test/jest_integration.sh
deleted file mode 100755
index 3b27ba06842be..0000000000000
--- a/test/scripts/test/jest_integration.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node --max-old-space-size=5120 scripts/jest_integration --ci
diff --git a/test/scripts/test/jest_unit.sh b/test/scripts/test/jest_unit.sh
deleted file mode 100755
index f368554e35760..0000000000000
--- a/test/scripts/test/jest_unit.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-source src/dev/ci_setup/setup_env.sh
-
-node scripts/jest --ci --maxWorkers=6
diff --git a/test/scripts/test/plugin_functional.sh b/test/scripts/test/plugin_functional.sh
deleted file mode 100755
index 115ddb81d3e45..0000000000000
--- a/test/scripts/test/plugin_functional.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_oss.sh
-
-node scripts/functional_tests \
- --config test/plugin_functional/config.ts \
- --bail \
- --debug
diff --git a/test/scripts/test/server_integration.sh b/test/scripts/test/server_integration.sh
deleted file mode 100755
index fa4c4c6ce2c35..0000000000000
--- a/test/scripts/test/server_integration.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env bash
-
-source test/scripts/jenkins_test_setup_oss.sh
-
-node scripts/functional_tests \
- --config test/server_integration/http/ssl/config.js \
- --config test/server_integration/http/ssl_redirect/config.js \
- --config test/server_integration/http/platform/config.ts \
- --config test/server_integration/http/ssl_with_p12/config.js \
- --config test/server_integration/http/ssl_with_p12_intermediate/config.js \
- --bail \
- --debug \
- --kibana-install-dir $KIBANA_INSTALL_DIR
-
-# Tests that must be run against source in order to build test plugins
-node scripts/functional_tests \
- --config test/server_integration/http/platform/config.status.ts \
- --bail \
- --debug
diff --git a/tsconfig.base.json b/tsconfig.base.json
index 178f70c927e28..d241aace37840 100644
--- a/tsconfig.base.json
+++ b/tsconfig.base.json
@@ -966,6 +966,8 @@
"@kbn/language-documentation-popover/*": ["packages/kbn-language-documentation-popover/*"],
"@kbn/lens-embeddable-utils": ["packages/kbn-lens-embeddable-utils"],
"@kbn/lens-embeddable-utils/*": ["packages/kbn-lens-embeddable-utils/*"],
+ "@kbn/lens-formula-docs": ["packages/kbn-lens-formula-docs"],
+ "@kbn/lens-formula-docs/*": ["packages/kbn-lens-formula-docs/*"],
"@kbn/lens-plugin": ["x-pack/plugins/lens"],
"@kbn/lens-plugin/*": ["x-pack/plugins/lens/*"],
"@kbn/license-api-guard-plugin": ["x-pack/plugins/license_api_guard"],
diff --git a/vars/agentInfo.groovy b/vars/agentInfo.groovy
deleted file mode 100644
index 166a86c169261..0000000000000
--- a/vars/agentInfo.groovy
+++ /dev/null
@@ -1,40 +0,0 @@
-def print() {
- catchError(catchInterruptions: false, buildResult: null) {
- def startTime = sh(script: "date -d '-3 minutes' -Iseconds | sed s/+/%2B/", returnStdout: true).trim()
- def endTime = sh(script: "date -d '+1 hour 30 minutes' -Iseconds | sed s/+/%2B/", returnStdout: true).trim()
-
- def resourcesUrl =
- (
- "https://infra-stats.elastic.co/app/kibana#/visualize/edit/8bd92360-1b92-11ea-b719-aba04518cc34" +
- "?_g=(time:(from:'${startTime}',to:'${endTime}'))" +
- "&_a=(query:'host.name:${env.NODE_NAME}')"
- )
- .replaceAll("'", '%27') // Need to escape ' because of the shell echo below, but can't really replace "'" with "\'" because of groovy sandbox
- .replaceAll(/\)$/, '%29') // This is just here because the URL parsing in the Jenkins console doesn't work right
-
- def logsStartTime = sh(script: "date -d '-3 minutes' +%s", returnStdout: true).trim()
- def logsUrl =
- (
- "https://infra-stats.elastic.co/app/infra#/logs" +
- "?_g=()&flyoutOptions=(flyoutId:!n,flyoutVisibility:hidden,surroundingLogsId:!n)" +
- "&logFilter=(expression:'host.name:${env.NODE_NAME}',kind:kuery)" +
- "&logPosition=(position:(time:${logsStartTime}000),streamLive:!f)"
- )
- .replaceAll("'", '%27')
- .replaceAll('\\)', '%29')
-
- sh script: """
- set +x
- echo 'Resource Graph:'
- echo '${resourcesUrl}'
- echo ''
- echo 'Agent Logs:'
- echo '${logsUrl}'
- echo ''
- echo 'SSH Command:'
- echo "ssh -F ssh_config \$(hostname --ip-address)"
- """, label: "Worker/Agent/Node debug links"
- }
-}
-
-return this
diff --git a/vars/buildState.groovy b/vars/buildState.groovy
deleted file mode 100644
index 365705661350c..0000000000000
--- a/vars/buildState.groovy
+++ /dev/null
@@ -1,30 +0,0 @@
-import groovy.transform.Field
-
-public static @Field JENKINS_BUILD_STATE = [:]
-
-def add(key, value) {
- if (!buildState.JENKINS_BUILD_STATE.containsKey(key)) {
- buildState.JENKINS_BUILD_STATE[key] = value
- return true
- }
-
- return false
-}
-
-def set(key, value) {
- buildState.JENKINS_BUILD_STATE[key] = value
-}
-
-def get(key) {
- return buildState.JENKINS_BUILD_STATE[key]
-}
-
-def has(key) {
- return buildState.JENKINS_BUILD_STATE.containsKey(key)
-}
-
-def get() {
- return buildState.JENKINS_BUILD_STATE
-}
-
-return this
diff --git a/vars/catchErrors.groovy b/vars/catchErrors.groovy
deleted file mode 100644
index 2a1b55d832606..0000000000000
--- a/vars/catchErrors.groovy
+++ /dev/null
@@ -1,15 +0,0 @@
-// Basically, this is a shortcut for catchError(catchInterruptions: false) {}
-// By default, catchError will swallow aborts/timeouts, which we almost never want
-// Also, by wrapping it in an additional try/catch, we cut down on spam in Pipeline Steps
-def call(Map params = [:], Closure closure) {
- try {
- closure()
- } catch (ex) {
- params.catchInterruptions = false
- catchError(params) {
- throw ex
- }
- }
-}
-
-return this
diff --git a/vars/esSnapshots.groovy b/vars/esSnapshots.groovy
deleted file mode 100644
index 884fbcdb17aeb..0000000000000
--- a/vars/esSnapshots.groovy
+++ /dev/null
@@ -1,50 +0,0 @@
-def promote(snapshotVersion, snapshotId) {
- def snapshotDestination = "${snapshotVersion}/archives/${snapshotId}"
- def MANIFEST_URL = "https://storage.googleapis.com/kibana-ci-es-snapshots-daily/${snapshotDestination}/manifest.json"
-
- dir('verified-manifest') {
- def verifiedSnapshotFilename = 'manifest-latest-verified.json'
-
- sh """
- curl -O '${MANIFEST_URL}'
- mv manifest.json ${verifiedSnapshotFilename}
- """
-
- googleStorageUpload(
- credentialsId: 'kibana-ci-gcs-plugin',
- bucket: "gs://kibana-ci-es-snapshots-daily/${snapshotVersion}",
- pattern: verifiedSnapshotFilename,
- sharedPublicly: false,
- showInline: false,
- )
- }
-
- // This would probably be more efficient if we could just copy using gsutil and specifying buckets for src and dest
- // But we don't currently have access to the GCS credentials in a way that can be consumed easily from here...
- dir('transfer-to-permanent') {
- googleStorageDownload(
- credentialsId: 'kibana-ci-gcs-plugin',
- bucketUri: "gs://kibana-ci-es-snapshots-daily/${snapshotDestination}/*",
- localDirectory: '.',
- pathPrefix: snapshotDestination,
- )
-
- def manifestJson = readFile file: 'manifest.json'
- writeFile(
- file: 'manifest.json',
- text: manifestJson.replace("kibana-ci-es-snapshots-daily/${snapshotDestination}", "kibana-ci-es-snapshots-permanent/${snapshotVersion}")
- )
-
- // Ideally we would have some delete logic here before uploading,
- // But we don't currently have access to the GCS credentials in a way that can be consumed easily from here...
- googleStorageUpload(
- credentialsId: 'kibana-ci-gcs-plugin',
- bucket: "gs://kibana-ci-es-snapshots-permanent/${snapshotVersion}",
- pattern: '*.*',
- sharedPublicly: false,
- showInline: false,
- )
- }
-}
-
-return this
diff --git a/vars/getCheckoutInfo.groovy b/vars/getCheckoutInfo.groovy
deleted file mode 100644
index f9d797f8127c7..0000000000000
--- a/vars/getCheckoutInfo.groovy
+++ /dev/null
@@ -1,50 +0,0 @@
-def call(branchOverride) {
- def repoInfo = [
- branch: branchOverride ?: env.ghprbSourceBranch,
- targetBranch: env.ghprbTargetBranch,
- targetsTrackedBranch: true
- ]
-
- if (repoInfo.branch == null) {
- if (!(params.branch_specifier instanceof String)) {
- throw new Exception(
- "Unable to determine branch automatically, either pass a branch name to getCheckoutInfo() or use the branch_specifier param."
- )
- }
-
- // strip prefix from the branch specifier to make it consistent with ghprbSourceBranch
- repoInfo.branch = params.branch_specifier.replaceFirst(/^(refs\/heads\/|origin\/)/, "")
- }
-
- repoInfo.commit = sh(
- script: "git rev-parse HEAD",
- label: "determining checked out sha",
- returnStdout: true
- ).trim()
-
- if (repoInfo.targetBranch) {
- // Try to clone fetch from Github up to 8 times, waiting 15 secs between attempts
- retryWithDelay(8, 15) {
- sh(
- script: "git fetch origin ${repoInfo.targetBranch}",
- label: "fetch latest from '${repoInfo.targetBranch}' at origin"
- )
- }
-
- repoInfo.mergeBase = sh(
- script: "git merge-base HEAD FETCH_HEAD",
- label: "determining merge point with '${repoInfo.targetBranch}' at origin",
- returnStdout: true
- ).trim()
-
- def pkgJson = readFile("package.json")
- def releaseBranch = toJSON(pkgJson).branch
- repoInfo.targetsTrackedBranch = releaseBranch == repoInfo.targetBranch
- }
-
- print "repoInfo: ${repoInfo}"
-
- return repoInfo
-}
-
-return this
diff --git a/vars/githubCommitStatus.groovy b/vars/githubCommitStatus.groovy
deleted file mode 100644
index 175dbe0c90542..0000000000000
--- a/vars/githubCommitStatus.groovy
+++ /dev/null
@@ -1,57 +0,0 @@
-def defaultCommit() {
- if (buildState.has('checkoutInfo')) {
- return buildState.get('checkoutInfo').commit
- }
-}
-
-def onStart(commit = defaultCommit(), context = 'kibana-ci') {
- catchError {
- if (githubPr.isPr() || !commit) {
- return
- }
-
- create(commit, 'pending', 'Build started.', context)
- }
-}
-
-def onFinish(commit = defaultCommit(), context = 'kibana-ci') {
- catchError {
- if (githubPr.isPr() || !commit) {
- return
- }
-
- def status = buildUtils.getBuildStatus()
-
- if (status == 'SUCCESS' || status == 'UNSTABLE') {
- create(commit, 'success', 'Build completed successfully.', context)
- } else if(status == 'ABORTED') {
- create(commit, 'error', 'Build aborted or timed out.', context)
- } else {
- create(commit, 'error', 'Build failed.', context)
- }
- }
-}
-
-def trackBuild(commit, context, Closure closure) {
- onStart(commit, context)
- catchError {
- closure()
- }
- onFinish(commit, context)
-}
-
-// state: error|failure|pending|success
-def create(sha, state, description, context, targetUrl = null) {
- targetUrl = targetUrl ?: env.BUILD_URL
-
- withGithubCredentials {
- return githubApi.post("repos/elastic/kibana/statuses/${sha}", [
- state: state,
- description: description,
- context: context,
- target_url: targetUrl.toString()
- ])
- }
-}
-
-return this
diff --git a/vars/githubPr.groovy b/vars/githubPr.groovy
deleted file mode 100644
index 594d54f2c5b5e..0000000000000
--- a/vars/githubPr.groovy
+++ /dev/null
@@ -1,369 +0,0 @@
-/**
- Wraps the main/important part of a job, executes it, and then publishes a comment to GitHub with the status.
-
- It will check for the existence of GHPRB env variables before doing any actual PR work,
- so it can be used to wrap code that is executed in both PR and non-PR contexts.
-
- Inside the comment, it will hide a JSON blob containing build data (status, etc).
-
- Then, the next time it posts a comment, it will:
- 1. Read the previous comment and parse the json
- 2. Create a new comment, add a summary of up to 5 previous builds to it, and append this build's data to the hidden JSON
- 3. Delete the old comment
-
- So, there is only ever one build status comment on a PR at any given time, the most recent one.
-*/
-def withDefaultPrComments(closure) {
- catchErrors {
- // kibanaPipeline.notifyOnError() needs to know if comments are enabled, so lets track it with a global
- // isPr() just ensures this functionality is skipped for non-PR builds
- buildState.set('PR_COMMENTS_ENABLED', isPr())
- catchErrors {
- closure()
- }
- sendComment(true)
- }
-}
-
-def sendComment(isFinal = false) {
- if (!buildState.get('PR_COMMENTS_ENABLED')) {
- return
- }
-
- def status = buildUtils.getBuildStatus()
- if (status == "ABORTED") {
- return
- }
-
- def lastComment = getLatestBuildComment()
- def info = getLatestBuildInfo(lastComment) ?: [:]
- info.builds = (info.builds ?: []).takeRight(5) // Rotate out old builds
-
- // If two builds are running at the same time, the first one should not post a comment after the second one
- if (info.number && info.number.toInteger() > env.BUILD_NUMBER.toInteger()) {
- return
- }
-
- def shouldUpdateComment = !!info.builds.find { it.number == env.BUILD_NUMBER }
-
- def message = getNextCommentMessage(info, isFinal)
-
- if (shouldUpdateComment) {
- updateComment(lastComment.id, message)
- } else {
- createComment(message)
-
- if (lastComment && lastComment.user.login == 'kibanamachine') {
- deleteComment(lastComment.id)
- }
- }
-}
-
-// Checks whether or not this currently executing build was triggered via a PR in the elastic/kibana repo
-def isPr() {
- return !!(env.ghprbPullId && env.ghprbPullLink && env.ghprbPullLink =~ /\/elastic\/kibana\//)
-}
-
-def isTrackedBranchPr() {
- return isPr() && (env.ghprbTargetBranch == 'master' || env.ghprbTargetBranch == '6.8' || env.ghprbTargetBranch =~ /[7-8]\.[x0-9]+/)
-}
-
-def getLatestBuildComment() {
- return getComments()
- .reverse()
- .find { (it.user.login == 'elasticmachine' || it.user.login == 'kibanamachine') && it.body =~ //
- if (!matches || !matches[0]) {
- return null
- }
-
- return toJSON(matches[0][1].trim())
-}
-
-def getLatestBuildInfo() {
- return getLatestBuildInfo(getLatestBuildComment())
-}
-
-def getLatestBuildInfo(comment) {
- return comment ? getBuildInfoFromComment(comment.body) : null
-}
-
-def getHistoryText(builds) {
- if (!builds || builds.size() < 1) {
- return ""
- }
-
- def list = builds
- .reverse()
- .collect { build ->
- if (build.status == "SUCCESS") {
- return "* :green_heart: [Build #${build.number}](${build.url}) succeeded ${build.commit}"
- } else if(build.status == "UNSTABLE") {
- return "* :yellow_heart: [Build #${build.number}](${build.url}) was flaky ${build.commit}"
- } else {
- return "* :broken_heart: [Build #${build.number}](${build.url}) failed ${build.commit}"
- }
- }
- .join("\n")
-
- return "### History\n${list}"
-}
-
-def getTestFailuresMessage() {
- def failures = testUtils.getFailures()
- if (!failures) {
- return ""
- }
-
- def messages = []
- messages << "---\n\n### [Test Failures](${env.BUILD_URL}testReport)"
-
- failures.take(3).each { failure ->
- messages << """
-${failure.fullDisplayName}
-
-[Link to Jenkins](${failure.url})
-"""
-
- if (failure.stdOut) {
- messages << "\n#### Standard Out\n```\n${failure.stdOut}\n```"
- }
-
- if (failure.stdErr) {
- messages << "\n#### Standard Error\n```\n${failure.stdErr}\n```"
- }
-
- if (failure.stacktrace) {
- messages << "\n#### Stack Trace\n```\n${failure.stacktrace}\n```"
- }
-
- messages << " \n\n---"
- }
-
- if (failures.size() > 3) {
- messages << "and ${failures.size() - 3} more failures, only showing the first 3."
- }
-
- return messages.join("\n")
-}
-
-def getBuildStatusIncludingMetrics() {
- def status = buildUtils.getBuildStatus()
-
- if (status == 'SUCCESS' && shouldCheckCiMetricSuccess() && !ciStats.getMetricsSuccess()) {
- return 'FAILURE'
- }
-
- return status
-}
-
-def getNextCommentMessage(previousCommentInfo = [:], isFinal = false) {
- def info = previousCommentInfo ?: [:]
- info.builds = previousCommentInfo.builds ?: []
-
- // When we update an in-progress comment, we need to remove the old version from the history
- info.builds = info.builds.findAll { it.number != env.BUILD_NUMBER }
-
- def messages = []
-
- def status = isFinal
- ? getBuildStatusIncludingMetrics()
- : buildUtils.getBuildStatus()
-
- def storybooksUrl = buildState.get('storybooksUrl')
- def storybooksMessage = storybooksUrl ? "* [Storybooks Preview](${storybooksUrl})" : "* Storybooks not built"
-
- if (!isFinal) {
- storybooksMessage = storybooksUrl ? storybooksMessage : "* Storybooks not built yet"
-
- def failuresPart = status != 'SUCCESS' ? ', with failures' : ''
- messages << """
- ## :hourglass_flowing_sand: Build in-progress${failuresPart}
- * [continuous-integration/kibana-ci/pull-request](${env.BUILD_URL})
- * Commit: ${getCommitHash()}
- ${storybooksMessage}
- * This comment will update when the build is complete
- """
- } else if (status == 'SUCCESS') {
- messages << """
- ## :green_heart: Build Succeeded
- * [continuous-integration/kibana-ci/pull-request](${env.BUILD_URL})
- * Commit: ${getCommitHash()}
- ${storybooksMessage}
- ${getDocsChangesLink()}
- """
- } else if(status == 'UNSTABLE') {
- def message = """
- ## :yellow_heart: Build succeeded, but was flaky
- * [continuous-integration/kibana-ci/pull-request](${env.BUILD_URL})
- * Commit: ${getCommitHash()}
- ${storybooksMessage}
- ${getDocsChangesLink()}
- """.stripIndent()
-
- def failures = retryable.getFlakyFailures()
- if (failures && failures.size() > 0) {
- def list = failures.collect { " * ${it.label}" }.join("\n")
- message += "* Flaky suites:\n${list}"
- }
-
- messages << message
- } else {
- messages << """
- ## :broken_heart: Build Failed
- * [continuous-integration/kibana-ci/pull-request](${env.BUILD_URL})
- * Commit: ${getCommitHash()}
- ${storybooksMessage}
- * [Pipeline Steps](${env.BUILD_URL}flowGraphTable) (look for red circles / failed steps)
- * [Interpreting CI Failures](https://www.elastic.co/guide/en/kibana/current/interpreting-ci-failures.html)
- ${getDocsChangesLink()}
- """
- }
-
- if (status != 'SUCCESS' && status != 'UNSTABLE') {
- try {
- def steps = getFailedSteps()
- if (steps?.size() > 0) {
- def list = steps.collect { "* [${it.displayName}](${it.logs})" }.join("\n")
- messages << "### Failed CI Steps\n${list}"
- }
- } catch (ex) {
- buildUtils.printStacktrace(ex)
- print "Error retrieving failed pipeline steps for PR comment, will skip this section"
- }
- }
-
- messages << getTestFailuresMessage()
-
- catchErrors {
- if (isFinal && isTrackedBranchPr()) {
- messages << ciStats.getMetricsReport()
- }
- }
-
- if (info.builds && info.builds.size() > 0) {
- messages << getHistoryText(info.builds)
- }
-
- messages << "To update your PR or re-run it, just comment with:\n`@elasticmachine merge upstream`"
-
- catchErrors {
- def assignees = getAssignees()
- if (assignees) {
- messages << "cc " + assignees.collect { "@${it}"}.join(" ")
- }
- }
-
- info.builds << [
- status: status,
- url: env.BUILD_URL,
- number: env.BUILD_NUMBER,
- commit: getCommitHash()
- ]
-
- messages << """
-
- """
-
- return messages
- .findAll { !!it } // No blank strings
- .collect { it.stripIndent().trim() } // This just allows us to indent various strings above, but leaves them un-indented in the comment
- .join("\n\n")
-}
-
-def createComment(message) {
- if (!isPr()) {
- error "Trying to post a GitHub PR comment on a non-PR or non-elastic PR build"
- }
-
- withGithubCredentials {
- return githubApi.post("repos/elastic/kibana/issues/${env.ghprbPullId}/comments", [ body: message ])
- }
-}
-
-def getComments() {
- withGithubCredentials {
- return githubIssues.getComments(env.ghprbPullId)
- }
-}
-
-def updateComment(commentId, message) {
- if (!isPr()) {
- error "Trying to post a GitHub PR comment on a non-PR or non-elastic PR build"
- }
-
- withGithubCredentials {
- def path = "repos/elastic/kibana/issues/comments/${commentId}"
- def json = toJSON([ body: message ]).toString()
-
- def resp = githubApi([ path: path ], [ method: "POST", data: json, headers: [ "X-HTTP-Method-Override": "PATCH" ] ])
- return toJSON(resp)
- }
-}
-
-def deleteComment(commentId) {
- withGithubCredentials {
- def path = "repos/elastic/kibana/issues/comments/${commentId}"
- return githubApi([ path: path ], [ method: "DELETE" ])
- }
-}
-
-def getCommitHash() {
- return env.ghprbActualCommit
-}
-
-def getDocsChangesLink() {
- def url = "https://kibana_${env.ghprbPullId}.docs-preview.app.elstc.co/diff"
-
- try {
- // httpRequest throws on status codes >400 and failures
- def resp = httpRequest([ method: "GET", url: url ])
-
- if (resp.contains("There aren't any differences!")) {
- return ""
- }
-
- return "* [Documentation Changes](${url})"
- } catch (ex) {
- print "Failed to reach ${url}"
- buildUtils.printStacktrace(ex)
- }
-
- return ""
-}
-
-def getFailedSteps() {
- return jenkinsApi.getFailedSteps()?.findAll { step ->
- step.displayName != 'Check out from version control'
- }
-}
-
-def shouldCheckCiMetricSuccess() {
- // disable ciMetrics success check when a PR is targetting a non-tracked branch
- if (buildState.has('checkoutInfo') && !buildState.get('checkoutInfo').targetsTrackedBranch) {
- return false
- }
-
- return true
-}
-
-def getPR() {
- withGithubCredentials {
- def path = "repos/elastic/kibana/pulls/${env.ghprbPullId}"
- return githubApi.get(path)
- }
-}
-
-def getAssignees() {
- def pr = getPR()
- if (!pr) {
- return []
- }
-
- return pr.assignees.collect { it.login }
-}
diff --git a/vars/jenkinsApi.groovy b/vars/jenkinsApi.groovy
deleted file mode 100644
index 57818593ffeb2..0000000000000
--- a/vars/jenkinsApi.groovy
+++ /dev/null
@@ -1,21 +0,0 @@
-def getSteps() {
- def url = "${env.BUILD_URL}api/json?tree=actions[nodes[iconColor,running,displayName,id,parents]]"
- def responseRaw = httpRequest([ method: "GET", url: url ])
- def response = toJSON(responseRaw)
-
- def graphAction = response?.actions?.find { it._class == "org.jenkinsci.plugins.workflow.job.views.FlowGraphAction" }
-
- return graphAction?.nodes
-}
-
-def getFailedSteps() {
- def steps = getSteps()
- def failedSteps = steps?.findAll { (it.iconColor == "red" || it.iconColor == "red_anime") && it._class == "org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode" }
- failedSteps.each { step ->
- step.logs = "${env.BUILD_URL}execution/node/${step.id}/log".toString()
- }
-
- return failedSteps
-}
-
-return this
diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy
deleted file mode 100644
index 374219d800600..0000000000000
--- a/vars/kibanaPipeline.groovy
+++ /dev/null
@@ -1,496 +0,0 @@
-def withPostBuildReporting(Map params, Closure closure) {
- try {
- closure()
- } finally {
- def parallelWorkspaces = []
- try {
- parallelWorkspaces = getParallelWorkspaces()
- } catch(ex) {
- print ex
- }
-
- if (params.runErrorReporter) {
- catchErrors {
- runErrorReporter([pwd()] + parallelWorkspaces)
- }
- }
-
- catchErrors {
- publishJunit()
- }
-
- catchErrors {
- def parallelWorkspace = "${env.WORKSPACE}/parallel"
- if (fileExists(parallelWorkspace)) {
- dir(parallelWorkspace) {
- def workspaceTasks = [:]
-
- parallelWorkspaces.each { workspaceDir ->
- workspaceTasks[workspaceDir] = {
- dir(workspaceDir) {
- catchErrors {
- runbld.junit()
- }
- }
- }
- }
-
- if (workspaceTasks) {
- parallel(workspaceTasks)
- }
- }
- }
- }
- }
-}
-
-def getParallelWorkspaces() {
- def workspaces = []
- def parallelWorkspace = "${env.WORKSPACE}/parallel"
- if (fileExists(parallelWorkspace)) {
- dir(parallelWorkspace) {
- // findFiles only returns files if you use glob, so look for a file that should be in every valid workspace
- workspaces = findFiles(glob: '*/kibana/package.json')
- .collect {
- // get the paths to the kibana directories for the parallel workspaces
- return parallelWorkspace + '/' + it.path.tokenize('/').dropRight(1).join('/')
- }
- }
- }
-
- return workspaces
-}
-
-def notifyOnError(Closure closure) {
- try {
- closure()
- } catch (ex) {
- // If this is the first failed step, it's likely that the error hasn't propagated up far enough to mark the build as a failure
- currentBuild.result = 'FAILURE'
- catchErrors {
- githubPr.sendComment(false)
- }
- catchErrors {
- // an empty map is a valid config, but is falsey, so let's use .has()
- if (buildState.has('SLACK_NOTIFICATION_CONFIG')) {
- slackNotifications.sendFailedBuild(buildState.get('SLACK_NOTIFICATION_CONFIG'))
- }
- }
- throw ex
- }
-}
-
-def withFunctionalTestEnv(List additionalEnvs = [], Closure closure) {
- // This can go away once everything that uses the deprecated workers.parallelProcesses() is moved to task queue
- def parallelId = env.TASK_QUEUE_PROCESS_ID ?: env.CI_PARALLEL_PROCESS_NUMBER
-
- def kibanaPort = "61${parallelId}1"
- def esPort = "62${parallelId}1"
- // Ports 62x2-62x9 kept open for ES nodes
- def esTransportPort = "63${parallelId}1-63${parallelId}9"
- def fleetPackageRegistryPort = "64${parallelId}1"
- def alertingProxyPort = "64${parallelId}2"
- def corsTestServerPort = "64${parallelId}3"
- // needed for https://github.com/elastic/kibana/issues/107246
- def proxyTestServerPort = "64${parallelId}4"
- def contextPropagationOnly = githubPr.isPr() ? "true" : "false"
-
- withEnv([
- "CI_GROUP=${parallelId}",
- "REMOVE_KIBANA_INSTALL_DIR=1",
- "CI_PARALLEL_PROCESS_NUMBER=${parallelId}",
- "TEST_KIBANA_HOST=localhost",
- "TEST_KIBANA_PORT=${kibanaPort}",
- "TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}",
- "TEST_ES_URL=http://elastic:changeme@localhost:${esPort}",
- "TEST_ES_TRANSPORT_PORT=${esTransportPort}",
- "TEST_CORS_SERVER_PORT=${corsTestServerPort}",
- "TEST_PROXY_SERVER_PORT=${proxyTestServerPort}",
- "KBN_NP_PLUGINS_BUILT=true",
- "FLEET_PACKAGE_REGISTRY_PORT=${fleetPackageRegistryPort}",
- "ALERTING_PROXY_PORT=${alertingProxyPort}",
- "ELASTIC_APM_ACTIVE=true",
- "ELASTIC_APM_CONTEXT_PROPAGATION_ONLY=${contextPropagationOnly}",
- "ELASTIC_APM_TRANSACTION_SAMPLE_RATE=0.1",
- ] + additionalEnvs) {
- closure()
- }
-}
-
-def functionalTestProcess(String name, Closure closure) {
- return {
- notifyOnError {
- withFunctionalTestEnv(["JOB=${name}"], closure)
- }
- }
-}
-
-def functionalTestProcess(String name, String script) {
- return functionalTestProcess(name) {
- retryable(name) {
- runbld(script, "Execute ${name}")
- }
- }
-}
-
-def ossCiGroupProcess(ciGroup, withDelay = false) {
- return functionalTestProcess("ciGroup" + ciGroup) {
- if (withDelay && !(ciGroup instanceof String) && !(ciGroup instanceof GString)) {
- sleep((ciGroup-1)*30) // smooth out CPU spikes from ES startup
- }
-
- withEnv([
- "CI_GROUP=${ciGroup}",
- "JOB=kibana-ciGroup${ciGroup}",
- ]) {
- retryable("kibana-ciGroup${ciGroup}") {
- runbld("./test/scripts/jenkins_ci_group.sh", "Execute kibana-ciGroup${ciGroup}")
- }
- }
- }
-}
-
-def xpackCiGroupProcess(ciGroup, withDelay = false) {
- return functionalTestProcess("xpack-ciGroup" + ciGroup) {
- if (withDelay && !(ciGroup instanceof String) && !(ciGroup instanceof GString)) {
- sleep((ciGroup-1)*30) // smooth out CPU spikes from ES startup
- }
- withEnv([
- "CI_GROUP=${ciGroup}",
- "JOB=xpack-kibana-ciGroup${ciGroup}",
- ]) {
- retryable("xpack-kibana-ciGroup${ciGroup}") {
- runbld("./test/scripts/jenkins_xpack_ci_group.sh", "Execute xpack-kibana-ciGroup${ciGroup}")
- }
- }
- }
-}
-
-def uploadGcsArtifact(uploadPrefix, pattern) {
- googleStorageUpload(
- credentialsId: 'kibana-ci-gcs-plugin',
- bucket: "gs://${uploadPrefix}",
- pattern: pattern,
- sharedPublicly: true,
- showInline: true,
- )
-}
-
-def withGcsArtifactUpload(workerName, closure) {
- def uploadPrefix = "kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/${workerName}"
- def ARTIFACT_PATTERNS = [
- 'target/junit/**/*',
- 'target/kibana-*',
- 'target/kibana-coverage/jest/**/*',
- 'target/kibana-security-solution/**/*.png',
- 'target/kibana-fleet/**/*.png',
- 'target/test-metrics/*',
- 'target/test-suites-ci-plan.json',
- 'test/**/screenshots/diff/*.png',
- 'test/**/screenshots/failure/*.png',
- 'test/**/screenshots/session/*.png',
- 'test/functional/failure_debug/html/*.html',
- 'x-pack/test/**/screenshots/diff/*.png',
- 'x-pack/test/**/screenshots/failure/*.png',
- 'x-pack/test/**/screenshots/session/*.png',
- 'x-pack/test/functional/failure_debug/html/*.html',
- '.es/**/*.hprof'
- ]
-
- withEnv([
- "GCS_UPLOAD_PREFIX=${uploadPrefix}"
- ], {
- try {
- closure()
- } finally {
- catchErrors {
- ARTIFACT_PATTERNS.each { pattern ->
- uploadGcsArtifact(uploadPrefix, pattern)
- }
-
- dir(env.WORKSPACE) {
- ARTIFACT_PATTERNS.each { pattern ->
- uploadGcsArtifact(uploadPrefix, "parallel/*/kibana/${pattern}")
- }
- }
- }
- }
- })
-}
-
-def publishJunit() {
- junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true)
-
- dir(env.WORKSPACE) {
- junit(testResults: 'parallel/*/kibana/target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true)
- }
-}
-
-def sendMail(Map params = [:]) {
- // If the build doesn't have a result set by this point, there haven't been any errors and it can be marked as a success
- // The e-mail plugin for the infra e-mail depends upon this being set
- currentBuild.result = currentBuild.result ?: 'SUCCESS'
-
- def buildStatus = buildUtils.getBuildStatus()
- if (buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') {
- node('flyweight') {
- sendInfraMail()
- sendKibanaMail(params)
- }
- }
-}
-
-def sendInfraMail() {
- catchErrors {
- step([
- $class: 'Mailer',
- notifyEveryUnstableBuild: true,
- recipients: 'infra-root+build@elastic.co',
- sendToIndividuals: false
- ])
- }
-}
-
-def sendKibanaMail(Map params = [:]) {
- def config = [to: 'build-kibana@elastic.co'] + params
-
- catchErrors {
- def buildStatus = buildUtils.getBuildStatus()
- if(params.NOTIFY_ON_FAILURE && buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') {
- emailext(
- config.to,
- subject: "${env.JOB_NAME} - Build # ${env.BUILD_NUMBER} - ${buildStatus}",
- body: '${SCRIPT,template="groovy-html.template"}',
- mimeType: 'text/html',
- )
- }
- }
-}
-
-def bash(script, label) {
- sh(
- script: "#!/bin/bash\n${script}",
- label: label
- )
-}
-
-def doSetup() {
- notifyOnError {
- retryWithDelay(2, 15) {
- try {
- runbld("./test/scripts/jenkins_setup.sh", "Setup Build Environment and Dependencies")
- } catch (ex) {
- try {
- // Setup expects this directory to be missing, so we need to remove it before we do a retry
- bash("rm -rf ../elasticsearch", "Remove elasticsearch sibling directory, if it exists")
- } finally {
- throw ex
- }
- }
- }
- }
-}
-
-def getBuildArtifactBucket() {
- def dir = env.ghprbPullId ? "pr-${env.ghprbPullId}" : buildState.get('checkoutInfo').branch.replace("/", "__")
- return "gs://ci-artifacts.kibana.dev/default-build/${dir}/${buildState.get('checkoutInfo').commit}"
-}
-
-def buildKibana(maxWorkers = '') {
- notifyOnError {
- withEnv(["KBN_OPTIMIZER_MAX_WORKERS=${maxWorkers}"]) {
- runbld("./test/scripts/jenkins_build_kibana.sh", "Build Kibana")
- }
-
- withGcpServiceAccount.fromVaultSecret('secret/kibana-issues/dev/ci-artifacts-key', 'value') {
- bash("""
- cd "${env.WORKSPACE}"
- gsutil -q -m cp 'kibana-default.tar.gz' '${getBuildArtifactBucket()}/'
- gsutil -q -m cp 'kibana-default-plugins.tar.gz' '${getBuildArtifactBucket()}/'
- """, "Upload Default Build artifacts to GCS")
- }
- }
-}
-
-def downloadDefaultBuildArtifacts() {
- withGcpServiceAccount.fromVaultSecret('secret/kibana-issues/dev/ci-artifacts-key', 'value') {
- bash("""
- cd "${env.WORKSPACE}"
- gsutil -q -m cp '${getBuildArtifactBucket()}/kibana-default.tar.gz' ./
- gsutil -q -m cp '${getBuildArtifactBucket()}/kibana-default-plugins.tar.gz' ./
- """, "Download Default Build artifacts from GCS")
- }
-}
-
-def runErrorReporter() {
- return runErrorReporter([pwd()])
-}
-
-def runErrorReporter(workspaces) {
- def status = buildUtils.getBuildStatus()
- def dryRun = status != "ABORTED" ? "" : "--no-github-update"
-
- def globs = workspaces.collect { "'${it}/target/junit/**/*.xml'" }.join(" ")
-
- bash(
- """
- source src/dev/ci_setup/setup_env.sh
- node scripts/report_failed_tests --no-index-errors ${dryRun} ${globs}
- """,
- "Report failed tests, if necessary"
- )
-}
-
-def call(Map params = [:], Closure closure) {
- def config = [timeoutMinutes: 135, checkPrChanges: false, setCommitStatus: false] + params
-
- stage("Kibana Pipeline") {
- timeout(time: config.timeoutMinutes, unit: 'MINUTES') {
- timestamps {
- ansiColor('xterm') {
- if (config.setCommitStatus) {
- buildState.set('shouldSetCommitStatus', true)
- }
- if (config.checkPrChanges && githubPr.isPr()) {
- pipelineLibraryTests()
-
- print "Checking PR for changes to determine if CI needs to be run..."
-
- if (prChanges.areChangesSkippable()) {
- print "No changes requiring CI found in PR, skipping."
- return
- }
- }
- try {
- closure()
- } finally {
- if (config.setCommitStatus) {
- githubCommitStatus.onFinish()
- }
- }
- }
- }
- }
- }
-}
-
-// Creates a task queue using withTaskQueue, and copies the bootstrapped kibana repo into each process's workspace
-// Note that node_modules are mostly symlinked to save time/space. See test/scripts/jenkins_setup_parallel_workspace.sh
-def withCiTaskQueue(Map options = [:], Closure closure) {
- def setupClosure = {
- // This can't use runbld, because it expects the source to be there, which isn't yet
- bash("${env.WORKSPACE}/kibana/test/scripts/jenkins_setup_parallel_workspace.sh", "Set up duplicate workspace for parallel process")
- }
-
- def config = [parallel: 24, setup: setupClosure] + options
-
- withTaskQueue(config) {
- closure.call()
- }
-}
-
-def scriptTask(description, script) {
- return {
- withFunctionalTestEnv {
- notifyOnError {
- runbld(script, description)
- }
- }
- }
-}
-
-def scriptTaskDocker(description, script) {
- return {
- withDocker(scriptTask(description, script))
- }
-}
-
-def buildDocker() {
- sh(
- script: "./.ci/build_docker.sh",
- label: 'Build CI Docker image'
- )
-}
-
-def withDocker(Closure closure) {
- docker
- .image('kibana-ci')
- .inside(
- "-v /etc/runbld:/etc/runbld:ro -v '${env.JENKINS_HOME}:${env.JENKINS_HOME}' -v '/dev/shm/workspace:/dev/shm/workspace' --shm-size 2GB --cpus 4",
- closure
- )
-}
-
-def buildPlugins() {
- runbld('./test/scripts/jenkins_build_plugins.sh', 'Build OSS Plugins')
-}
-
-def withTasks(Map params = [:], Closure closure) {
- catchErrors {
- def config = [setupWork: {}, worker: [:], parallel: 24] + params
- def workerConfig = [name: 'ci-worker', size: 'xxl', ramDisk: true] + config.worker
-
- workers.ci(workerConfig) {
- withCiTaskQueue([parallel: config.parallel]) {
- parallel([
- docker: {
- retry(2) {
- buildDocker()
- }
- },
-
- // There are integration tests etc that require the plugins to be built first, so let's go ahead and build them before set up the parallel workspaces
- plugins: { buildPlugins() },
- ])
-
- config.setupWork()
-
- catchErrors {
- closure()
- }
- }
- }
- }
-}
-
-def allCiTasks() {
- parallel([
- general: {
- withTasks {
- tasks.check()
- tasks.lint()
- tasks.test()
- task {
- buildKibana(16)
- tasks.functionalOss()
- tasks.functionalXpack()
- }
- tasks.storybooksCi()
- }
- },
- jest: {
- workers.ci(name: 'jest', size: 'n2-standard-16', ramDisk: false) {
- catchErrors {
- scriptTask('Jest Unit Tests', 'test/scripts/test/jest_unit.sh')()
- }
-
- catchErrors {
- runbld.junit()
- }
- }
- },
- ])
-}
-
-def pipelineLibraryTests() {
- return
- whenChanged(['vars/', '.ci/pipeline-library/']) {
- workers.base(size: 'flyweight', bootstrapped: false, ramDisk: false) {
- dir('.ci/pipeline-library') {
- sh './gradlew test'
- }
- }
- }
-}
-
-return this
diff --git a/vars/prChanges.groovy b/vars/prChanges.groovy
deleted file mode 100644
index a8a81cade844c..0000000000000
--- a/vars/prChanges.groovy
+++ /dev/null
@@ -1,82 +0,0 @@
-import groovy.transform.Field
-
-public static @Field PR_CHANGES_CACHE = []
-
-// if all the changed files in a PR match one of these regular
-// expressions then CI will be skipped for that PR
-def getSkippablePaths() {
- return [
- /^docs\//,
- /^rfcs\//,
- /^.ci\/.+\.yml$/,
- /^.ci\/es-snapshots\//,
- /^.ci\/pipeline-library\//,
- /^.ci\/Jenkinsfile_[^\/]+$/,
- /^\.github\//,
- /\.md$/,
- /^\.backportrc\.json$/,
- /^\.buildkite\//,
- ]
-}
-
-// exclusion regular expressions that will invalidate paths that
-// match one of the skippable path regular expressions
-def getNotSkippablePaths() {
- return [
- // this file is auto-generated and changes to it need to be validated with CI
- /^docs\/developer\/plugin-list.asciidoc$/,
- // don't skip CI on prs with changes to plugin readme files (?i) is for case-insensitive matching
- /(?i)\/plugins\/[^\/]+\/readme\.(md|asciidoc)$/,
- ]
-}
-
-def areChangesSkippable() {
- if (!githubPr.isPr()) {
- return false
- }
-
- try {
- def skippablePaths = getSkippablePaths()
- def notSkippablePaths = getNotSkippablePaths()
- def files = getChangedFiles()
-
- // 3000 is the max files GH API will return
- if (files.size() >= 3000) {
- return false
- }
-
- files = files.findAll { file ->
- def skippable = skippablePaths.find { regex -> file =~ regex} && !notSkippablePaths.find { regex -> file =~ regex }
- return !skippable
- }
-
- return files.size() < 1
- } catch (ex) {
- buildUtils.printStacktrace(ex)
- print "Error while checking to see if CI is skippable based on changes. Will run CI."
- return false
- }
-}
-
-def getChanges() {
- if (!PR_CHANGES_CACHE && env.ghprbPullId) {
- withGithubCredentials {
- def changes = githubPrs.getChanges(env.ghprbPullId)
- if (changes) {
- PR_CHANGES_CACHE.addAll(changes)
- }
- }
- }
-
- return PR_CHANGES_CACHE
-}
-
-def getChangedFiles() {
- def changes = getChanges()
- def changedFiles = changes.collect { it.filename }
- def renamedFiles = changes.collect { it.previousFilename }.findAll { it }
-
- return changedFiles + renamedFiles
-}
-
-return this
diff --git a/vars/retryWithDelay.groovy b/vars/retryWithDelay.groovy
deleted file mode 100644
index 83fd94c6f2b1e..0000000000000
--- a/vars/retryWithDelay.groovy
+++ /dev/null
@@ -1,18 +0,0 @@
-def call(retryTimes, delaySecs, closure) {
- retry(retryTimes) {
- try {
- closure()
- } catch (org.jenkinsci.plugins.workflow.steps.FlowInterruptedException ex) {
- throw ex // Immediately re-throw build abort exceptions, don't sleep first
- } catch (Exception ex) {
- sleep delaySecs
- throw ex
- }
- }
-}
-
-def call(retryTimes, Closure closure) {
- call(retryTimes, 15, closure)
-}
-
-return this
diff --git a/vars/retryable.groovy b/vars/retryable.groovy
deleted file mode 100644
index bfd021ddd8167..0000000000000
--- a/vars/retryable.groovy
+++ /dev/null
@@ -1,78 +0,0 @@
-import groovy.transform.Field
-
-public static @Field GLOBAL_RETRIES_ENABLED = false
-public static @Field MAX_GLOBAL_RETRIES = 1
-public static @Field CURRENT_GLOBAL_RETRIES = 0
-public static @Field FLAKY_FAILURES = []
-
-def setMax(max) {
- retryable.MAX_GLOBAL_RETRIES = max
-}
-
-def enable() {
- retryable.GLOBAL_RETRIES_ENABLED = true
-}
-
-def enable(max) {
- enable()
- setMax(max)
-}
-
-def haveReachedMaxRetries() {
- return retryable.CURRENT_GLOBAL_RETRIES >= retryable.MAX_GLOBAL_RETRIES
-}
-
-def getFlakyFailures() {
- return retryable.FLAKY_FAILURES
-}
-
-def printFlakyFailures() {
- catchErrors {
- def failures = getFlakyFailures()
-
- if (failures && failures.size() > 0) {
- print "This build had the following flaky failures:"
- failures.each {
- print "\n${it.label}"
- buildUtils.printStacktrace(it.exception)
- }
- }
- }
-}
-
-def call(label, Closure closure) {
- if (!retryable.GLOBAL_RETRIES_ENABLED) {
- closure()
- return
- }
-
- try {
- closure()
- } catch (org.jenkinsci.plugins.workflow.steps.FlowInterruptedException ex) {
- // If the build was aborted, don't retry the step
- throw ex
- } catch (Exception ex) {
- if (haveReachedMaxRetries()) {
- print "Couldn't retry '${label}', have already reached the max number of retries for this build."
- throw ex
- }
-
- retryable.CURRENT_GLOBAL_RETRIES++
- buildUtils.printStacktrace(ex)
- unstable "${label} failed but is retryable, trying a second time..."
-
- def JOB = env.JOB ? "${env.JOB}-retry" : ""
- withEnv([
- "JOB=${JOB}",
- ]) {
- closure()
- }
-
- retryable.FLAKY_FAILURES << [
- label: label,
- exception: ex,
- ]
-
- unstable "${label} failed on the first attempt, but succeeded on the second. Marking it as flaky."
- }
-}
diff --git a/vars/runbld.groovy b/vars/runbld.groovy
deleted file mode 100644
index 80416d4fa9a41..0000000000000
--- a/vars/runbld.groovy
+++ /dev/null
@@ -1,17 +0,0 @@
-def call(script, label, enableJunitProcessing = false) {
- // def extraConfig = enableJunitProcessing ? "" : "--config ${env.WORKSPACE}/kibana/.ci/runbld_no_junit.yml"
-
- sh(
- script: "bash ${script}",
- label: label ?: script
- )
-}
-
-def junit() {
- sh(
- script: "/usr/local/bin/runbld -d '${pwd()}' ${env.WORKSPACE}/kibana/test/scripts/jenkins_runbld_junit.sh",
- label: "Process JUnit reports with runbld"
- )
-}
-
-return this
diff --git a/vars/slackNotifications.groovy b/vars/slackNotifications.groovy
deleted file mode 100644
index 02aad14d8ba3f..0000000000000
--- a/vars/slackNotifications.groovy
+++ /dev/null
@@ -1,228 +0,0 @@
-def getFailedBuildBlocks() {
- def messages = [
- getFailedSteps(),
- getTestFailures(),
- ]
-
- return messages
- .findAll { !!it } // No blank strings
- .collect { markdownBlock(it) }
-}
-
-def dividerBlock() {
- return [ type: "divider" ]
-}
-
-// If a message is longer than the limit, split it up by '\n' into parts, and return as many parts as will fit within the limit
-def shortenMessage(message, sizeLimit = 3000) {
- if (message.size() <= sizeLimit) {
- return message
- }
-
- def truncatedMessage = "[...truncated...]"
-
- def parts = message.split("\n")
- message = ""
-
- for(def part in parts) {
- if ((message.size() + part.size() + truncatedMessage.size() + 1) > sizeLimit) {
- break;
- }
- message += part+"\n"
- }
-
- message += truncatedMessage
-
- return message.size() <= sizeLimit ? message : truncatedMessage
-}
-
-def markdownBlock(message) {
- return [
- type: "section",
- text: [
- type: "mrkdwn",
- text: shortenMessage(message, 3000), // 3000 is max text length for `section`s only
- ],
- ]
-}
-
-def contextBlock(message) {
- return [
- type: "context",
- elements: [
- [
- type: 'mrkdwn',
- text: message, // Not sure what the size limit is here, I tried 10000s of characters and it still worked
- ]
- ]
- ]
-}
-
-def getFailedSteps() {
- try {
- def steps = jenkinsApi.getFailedSteps()?.findAll { step ->
- step.displayName != 'Check out from version control'
- }
-
- if (steps?.size() > 0) {
- def list = steps.collect { "ā¢ <${it.logs}|${it.displayName}>" }.join("\n")
- return "*Failed Steps*\n${list}"
- }
- } catch (ex) {
- buildUtils.printStacktrace(ex)
- print "Error retrieving failed pipeline steps for PR comment, will skip this section"
- }
-
- return ""
-}
-
-def getTestFailures() {
- def failures = testUtils.getFailures()
- if (!failures) {
- return ""
- }
-
- def messages = []
- messages << "*Test Failures*"
-
- def list = failures.take(10).collect {
- def name = it
- .fullDisplayName
- .split(/\./, 2)[-1]
- // Only the following three characters need to be escaped for link text, per Slack's docs
- .replaceAll('&', '&')
- .replaceAll('<', '<')
- .replaceAll('>', '>')
-
- return "ā¢ <${it.url}|${name}>"
- }.join("\n")
-
- def moreText = failures.size() > 10 ? "\nā¢ ...and ${failures.size()-10} more" : ""
- return "*Test Failures*\n${list}${moreText}"
-}
-
-def getDefaultDisplayName() {
- return "${env.JOB_NAME} ${env.BUILD_DISPLAY_NAME}"
-}
-
-def getDefaultContext(config = [:]) {
- def progressMessage = ""
- if (config && !config.isFinal) {
- progressMessage = "In-progress"
- } else {
- def duration = currentBuild.durationString.replace(' and counting', '')
- progressMessage = "${buildUtils.getBuildStatus().toLowerCase().capitalize()} after ${duration}"
- }
-
- return contextBlock([
- progressMessage,
- "",
- ].join(' Ā· '))
-}
-
-def getStatusIcon(config = [:]) {
- if (config && !config.isFinal) {
- return ':hourglass_flowing_sand:'
- }
-
- def status = buildUtils.getBuildStatus()
- if (status == 'UNSTABLE') {
- return ':yellow_heart:'
- }
-
- return ':broken_heart:'
-}
-
-def getBackupMessage(config) {
- return "${getStatusIcon(config)} ${config.title}\n\nFirst attempt at sending this notification failed. Please check the build."
-}
-
-def sendFailedBuild(Map params = [:]) {
- def config = [
- channel: '#kibana-operations-alerts',
- title: "*<${env.BUILD_URL}|${getDefaultDisplayName()}>*",
- message: getDefaultDisplayName(),
- color: 'danger',
- icon: ':jenkins:',
- username: 'Kibana Operations',
- isFinal: false,
- ] + params
-
- config.context = config.context ?: getDefaultContext(config)
-
- def title = "${getStatusIcon(config)} ${config.title}"
- def message = "${getStatusIcon(config)} ${config.message}"
-
- def blocks = [markdownBlock(title)]
- getFailedBuildBlocks().each { blocks << it }
- blocks << dividerBlock()
- blocks << config.context
-
- def channel = config.channel
- def timestamp = null
-
- def previousResp = buildState.get('SLACK_NOTIFICATION_RESPONSE')
- if (previousResp) {
- // When using `timestamp` to update a previous message, you have to use the channel ID from the previous response
- channel = previousResp.channelId
- timestamp = previousResp.ts
- }
-
- def resp = slackSend(
- channel: channel,
- timestamp: timestamp,
- username: config.username,
- iconEmoji: config.icon,
- color: config.color,
- message: message,
- blocks: blocks
- )
-
- if (!resp) {
- resp = slackSend(
- channel: config.channel,
- username: config.username,
- iconEmoji: config.icon,
- color: config.color,
- message: message,
- blocks: [markdownBlock(getBackupMessage(config))]
- )
- }
-
- if (resp) {
- buildState.set('SLACK_NOTIFICATION_RESPONSE', resp)
- }
-}
-
-def onFailure(Map options = [:]) {
- catchError {
- def status = buildUtils.getBuildStatus()
- if (status != "SUCCESS") {
- catchErrors {
- options.isFinal = true
- sendFailedBuild(options)
- }
- }
- }
-}
-
-def onFailure(Map options = [:], Closure closure) {
- if (options.disabled) {
- catchError {
- closure()
- }
-
- return
- }
-
- buildState.set('SLACK_NOTIFICATION_CONFIG', options)
-
- // try/finally will NOT work here, because the build status will not have been changed to ERROR when the finally{} block executes
- catchError {
- closure()
- }
-
- onFailure(options)
-}
-
-return this
diff --git a/vars/storybooks.groovy b/vars/storybooks.groovy
deleted file mode 100644
index f3c4a97a7d436..0000000000000
--- a/vars/storybooks.groovy
+++ /dev/null
@@ -1,83 +0,0 @@
-def getStorybooksBucket() {
- return "ci-artifacts.kibana.dev/storybooks"
-}
-
-def getDestinationDir() {
- return env.ghprbPullId ? "pr-${env.ghprbPullId}" : buildState.get('checkoutInfo').branch.replace("/", "__")
-}
-
-def getUrl() {
- return "https://${getStorybooksBucket()}/${getDestinationDir()}"
-}
-
-def getUrlLatest() {
- return "${getUrl()}/latest"
-}
-
-def getUrlForCommit() {
- return "${getUrl()}/${buildState.get('checkoutInfo').commit}"
-}
-
-def upload() {
- dir("built_assets/storybook") {
- sh "mv ci_composite composite"
-
- def storybooks = sh(
- script: 'ls -1d */',
- returnStdout: true
- ).trim()
- .split('\n')
- .collect { it.replace('/', '') }
- .findAll { it != 'composite' }
-
- def listHtml = storybooks.collect { """${it}""" }.join("\n")
-
- def html = """
-
-
- Storybooks
- Composite Storybook
- All
-
-
-
- """
-
- writeFile(file: 'index.html', text: html)
-
- withGcpServiceAccount.fromVaultSecret('secret/kibana-issues/dev/ci-artifacts-key', 'value') {
- kibanaPipeline.bash("""
- gsutil -q -m cp -r -z js,css,html,json,map,txt,svg '*' 'gs://${getStorybooksBucket()}/${getDestinationDir()}/${buildState.get('checkoutInfo').commit}/'
- gsutil -h "Cache-Control:no-cache, max-age=0, no-transform" cp -z html 'index.html' 'gs://${getStorybooksBucket()}/${getDestinationDir()}/latest/'
- """, "Upload Storybooks to GCS")
- }
-
- buildState.set('storybooksUrl', getUrlForCommit())
- }
-}
-
-def build() {
- withEnv(["STORYBOOK_BASE_URL=${getUrlForCommit()}"]) {
- kibanaPipeline.bash('test/scripts/jenkins_storybook.sh', 'Build Storybooks')
- }
-}
-
-def buildAndUpload() {
- def sha = buildState.get('checkoutInfo').commit
- def context = 'Build and Publish Storybooks'
-
- githubCommitStatus.create(sha, 'pending', 'Building Storybooks', context)
-
- try {
- build()
- upload()
- githubCommitStatus.create(sha, 'success', 'Storybooks built', context, getUrlForCommit())
- } catch(ex) {
- githubCommitStatus.create(sha, 'error', 'Building Storybooks failed', context)
- throw ex
- }
-}
-
-return this
diff --git a/vars/task.groovy b/vars/task.groovy
deleted file mode 100644
index 0c07b519b6fef..0000000000000
--- a/vars/task.groovy
+++ /dev/null
@@ -1,5 +0,0 @@
-def call(Closure closure) {
- withTaskQueue.addTask(closure)
-}
-
-return this
diff --git a/vars/tasks.groovy b/vars/tasks.groovy
deleted file mode 100644
index 9a1ea053e9c49..0000000000000
--- a/vars/tasks.groovy
+++ /dev/null
@@ -1,201 +0,0 @@
-def call(List closures) {
- withTaskQueue.addTasks(closures)
-}
-
-def check() {
- tasks([
- kibanaPipeline.scriptTask('Quick Commit Checks', 'test/scripts/checks/commit/commit.sh'),
- kibanaPipeline.scriptTask('Check Telemetry Schema', 'test/scripts/checks/telemetry.sh'),
- kibanaPipeline.scriptTask('Check TypeScript Projects', 'test/scripts/checks/ts_projects.sh'),
- kibanaPipeline.scriptTask('Check Jest Configs', 'test/scripts/checks/jest_configs.sh'),
- kibanaPipeline.scriptTask('Check @kbn/pm Distributable', 'test/scripts/checks/kbn_pm_dist.sh'),
- kibanaPipeline.scriptTask('Check Plugin List Docs', 'test/scripts/checks/plugin_list_docs.sh'),
- kibanaPipeline.scriptTask('Check Types and Public API Docs', 'test/scripts/checks/type_check_plugin_public_api_docs.sh'),
- kibanaPipeline.scriptTask('Check Bundle Limits', 'test/scripts/checks/bundle_limits.sh'),
- kibanaPipeline.scriptTask('Check i18n', 'test/scripts/checks/i18n.sh'),
- kibanaPipeline.scriptTask('Check File Casing', 'test/scripts/checks/file_casing.sh'),
- kibanaPipeline.scriptTask('Check Licenses', 'test/scripts/checks/licenses.sh'),
- kibanaPipeline.scriptTask('Check Plugins With Circular Dependencies', 'test/scripts/checks/plugins_with_circular_deps.sh'),
- kibanaPipeline.scriptTask('Verify NOTICE', 'test/scripts/checks/verify_notice.sh'),
- kibanaPipeline.scriptTask('Test Projects', 'test/scripts/checks/test_projects.sh'),
- kibanaPipeline.scriptTask('Test Hardening', 'test/scripts/checks/test_hardening.sh'),
- ])
-}
-
-def lint() {
- tasks([
- kibanaPipeline.scriptTask('Lint: eslint', 'test/scripts/lint/eslint.sh'),
- kibanaPipeline.scriptTask('Lint: stylelint', 'test/scripts/lint/stylelint.sh'),
- ])
-}
-
-def test() {
- tasks([
- // This task requires isolation because of hard-coded, conflicting ports and such, so let's use Docker here
- kibanaPipeline.scriptTaskDocker('Jest Integration Tests', 'test/scripts/test/jest_integration.sh'),
- kibanaPipeline.scriptTask('API Integration Tests', 'test/scripts/test/api_integration.sh'),
- ])
-}
-
-def ossCiGroups() {
- def ciGroups = 1..11
- tasks(ciGroups.collect { kibanaPipeline.ossCiGroupProcess(it, true) })
-}
-
-def xpackCiGroups() {
- def ciGroups = 1..13
- tasks(ciGroups.collect { kibanaPipeline.xpackCiGroupProcess(it, true) })
-}
-
-def xpackCiGroupDocker() {
- task {
- workers.ci(name: 'xpack-cigroups-docker', size: 'm', ramDisk: true) {
- kibanaPipeline.downloadDefaultBuildArtifacts()
- kibanaPipeline.bash("""
- cd '${env.WORKSPACE}'
- mkdir -p kibana-build
- tar -xzf kibana-default.tar.gz -C kibana-build --strip=1
- tar -xzf kibana-default-plugins.tar.gz -C kibana
- """, "Extract Default Build artifacts")
- kibanaPipeline.xpackCiGroupProcess('Docker', true)()
- }
- }
-}
-
-def functionalOss(Map params = [:]) {
- def config = params ?: [
- serverIntegration: true,
- ciGroups: true,
- firefox: true,
- accessibility: true,
- pluginFunctional: true,
- visualRegression: false,
- ]
-
- task {
- if (config.ciGroups) {
- ossCiGroups()
- }
-
- if (config.firefox) {
- task(kibanaPipeline.functionalTestProcess('oss-firefox', './test/scripts/jenkins_firefox_smoke.sh'))
- }
-
- if (config.accessibility) {
- task(kibanaPipeline.functionalTestProcess('oss-accessibility', './test/scripts/jenkins_accessibility.sh'))
- }
-
- if (config.pluginFunctional) {
- task(kibanaPipeline.functionalTestProcess('oss-pluginFunctional', './test/scripts/jenkins_plugin_functional.sh'))
- }
-
- if (config.visualRegression) {
- task(kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh'))
- }
-
- if (config.serverIntegration) {
- task(kibanaPipeline.scriptTaskDocker('serverIntegration', './test/scripts/test/server_integration.sh'))
- }
- }
-}
-
-def functionalXpack(Map params = [:]) {
- def config = params ?: [
- ciGroups: true,
- firefox: true,
- accessibility: true,
- pluginFunctional: true,
- savedObjectsFieldMetrics:true,
- pageLoadMetrics: false,
- visualRegression: false,
- ]
-
- task {
- if (config.ciGroups) {
- xpackCiGroups()
- xpackCiGroupDocker()
- }
-
- if (config.firefox) {
- task(kibanaPipeline.functionalTestProcess('xpack-firefox', './test/scripts/jenkins_xpack_firefox_smoke.sh'))
- }
-
- if (config.accessibility) {
- task(kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh'))
- }
-
- if (config.visualRegression) {
- task(kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh'))
- }
-
- if (config.savedObjectsFieldMetrics) {
- task(kibanaPipeline.functionalTestProcess('xpack-savedObjectsFieldMetrics', './test/scripts/jenkins_xpack_saved_objects_field_metrics.sh'))
- }
-
- whenChanged([
- 'x-pack/plugins/security_solution/',
- 'x-pack/plugins/cases/',
- 'x-pack/plugins/timelines/',
- 'x-pack/plugins/lists/',
- 'x-pack/test/security_solution_cypress/',
- 'x-pack/plugins/triggers_actions_ui/public/application/sections/action_connector_form/',
- 'x-pack/plugins/triggers_actions_ui/public/application/context/actions_connectors_context.tsx',
- 'x-pack/plugins/triggers_actions_ui/public/application/sections/alerts_table/',
- ]) {
- if (githubPr.isPr()) {
- task(kibanaPipeline.functionalTestProcess('xpack-securitySolutionCypressChrome', './test/scripts/jenkins_security_solution_cypress_chrome.sh'))
- // Temporarily disabled to figure out test flake
- // task(kibanaPipeline.functionalTestProcess('xpack-securitySolutionCypressFirefox', './test/scripts/jenkins_security_solution_cypress_firefox.sh'))
- }
- }
-
- whenChanged([
- 'x-pack/plugins/apm/',
- ]) {
- if (githubPr.isPr()) {
- task(kibanaPipeline.functionalTestProcess('xpack-APMCypress', './test/scripts/jenkins_apm_cypress.sh'))
- }
- }
-
- whenChanged([
- 'x-pack/plugins/synthetics/',
- ]) {
- if (githubPr.isPr()) {
- task(kibanaPipeline.functionalTestProcess('xpack-UptimePlaywright', './test/scripts/jenkins_uptime_playwright.sh'))
- }
- }
-
- whenChanged([
- 'x-pack/plugins/ux/',
- ]) {
- if (githubPr.isPr()) {
- task(kibanaPipeline.functionalTestProcess('xpack-uxPluginSynthetics', './test/scripts/jenkins_ux_synthetics.sh'))
- }
- }
-
- whenChanged([
- 'x-pack/plugins/fleet/',
- ]) {
- if (githubPr.isPr()) {
- task(kibanaPipeline.functionalTestProcess('xpack-FleetCypress', './test/scripts/jenkins_fleet_cypress.sh'))
- }
- }
-
- whenChanged([
- 'x-pack/plugins/osquery/',
- ]) {
- if (githubPr.isPr()) {
- task(kibanaPipeline.functionalTestProcess('xpack-osqueryCypress', './test/scripts/jenkins_osquery_cypress.sh'))
- }
- }
-
- }
-}
-
-def storybooksCi() {
- task {
- storybooks.buildAndUpload()
- }
-}
-
-return this
diff --git a/vars/whenChanged.groovy b/vars/whenChanged.groovy
deleted file mode 100644
index c58ec83f2b051..0000000000000
--- a/vars/whenChanged.groovy
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- whenChanged('some/path') { yourCode() } can be used to execute pipeline code in PRs only when changes are detected on paths that you specify.
- The specified code blocks will also always be executed during the non-PR jobs for tracked branches.
-
- You have the option of passing in path prefixes, or regexes. Single or multiple.
- Path specifications are NOT globby, they are only prefixes.
- Specifying multiple will treat them as ORs.
-
- Example Usages:
- whenChanged('a/path/prefix/') { someCode() }
- whenChanged(startsWith: 'a/path/prefix/') { someCode() } // Same as above
- whenChanged(['prefix1/', 'prefix2/']) { someCode() }
- whenChanged(regex: /\.test\.js$/) { someCode() }
- whenChanged(regex: [/abc/, /xyz/]) { someCode() }
-*/
-
-def call(String startsWithString, Closure closure) {
- return whenChanged([ startsWith: startsWithString ], closure)
-}
-
-def call(List startsWithStrings, Closure closure) {
- return whenChanged([ startsWith: startsWithStrings ], closure)
-}
-
-def call(Map params, Closure closure) {
- if (!githubPr.isPr()) {
- return closure()
- }
-
- def files = prChanges.getChangedFiles()
- def hasMatch = false
-
- if (params.regex) {
- params.regex = [] + params.regex
- print "Checking PR for changes that match: ${params.regex.join(', ')}"
- hasMatch = !!files.find { file ->
- params.regex.find { regex -> file =~ regex }
- }
- }
-
- if (!hasMatch && params.startsWith) {
- params.startsWith = [] + params.startsWith
- print "Checking PR for changes that start with: ${params.startsWith.join(', ')}"
- hasMatch = !!files.find { file ->
- params.startsWith.find { str -> file.startsWith(str) }
- }
- }
-
- if (hasMatch) {
- print "Changes found, executing pipeline."
- closure()
- } else {
- print "No changes found, skipping."
- }
-}
-
-return this
diff --git a/vars/withGithubCredentials.groovy b/vars/withGithubCredentials.groovy
deleted file mode 100644
index 224e49af1bd6f..0000000000000
--- a/vars/withGithubCredentials.groovy
+++ /dev/null
@@ -1,9 +0,0 @@
-def call(closure) {
- withCredentials([
- string(credentialsId: '2a9602aa-ab9f-4e52-baf3-b71ca88469c7', variable: 'GITHUB_TOKEN'),
- ]) {
- closure()
- }
-}
-
-return this
diff --git a/vars/withTaskQueue.groovy b/vars/withTaskQueue.groovy
deleted file mode 100644
index 8132d6264744f..0000000000000
--- a/vars/withTaskQueue.groovy
+++ /dev/null
@@ -1,154 +0,0 @@
-import groovy.transform.Field
-
-public static @Field TASK_QUEUES = [:]
-public static @Field TASK_QUEUES_COUNTER = 0
-
-/**
- withTaskQueue creates a queue of "tasks" (just plain closures to execute), and executes them with your desired level of concurrency.
- This way, you can define, for example, 40 things that need to execute, then only allow 10 of them to execute at once.
-
- Each "process" will execute in a separate, unique, empty directory.
- If you want each process to have a bootstrapped kibana repo, check out kibanaPipeline.withCiTaskQueue
-
- Using the queue currently requires an agent/worker.
-
- Usage:
-
- withTaskQueue(parallel: 10) {
- task { print "This is a task" }
-
- // This is the same as calling task() multiple times
- tasks([ { print "Another task" }, { print "And another task" } ])
-
- // Tasks can queue up subsequent tasks
- task {
- buildThing()
- task { print "I depend on buildThing()" }
- }
- }
-
- You can also define a setup task that each process should execute one time before executing tasks:
- withTaskQueue(parallel: 10, setup: { sh "my-setup-scrupt.sh" }) {
- ...
- }
-
-*/
-def call(Map options = [:], Closure closure) {
- def config = [ parallel: 10 ] + options
- def counter = ++TASK_QUEUES_COUNTER
-
- // We're basically abusing withEnv() to create a "scope" for all steps inside of a withTaskQueue block
- // This way, we could have multiple task queue instances in the same pipeline
- withEnv(["TASK_QUEUE_ID=${counter}"]) {
- withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID] = [
- tasks: [],
- tmpFile: sh(script: 'mktemp', returnStdout: true).trim()
- ]
-
- closure.call()
-
- def processesExecuting = 0
- def processes = [:]
- def iterationId = 0
-
- for(def i = 1; i <= config.parallel; i++) {
- def j = i
- processes["task-queue-process-${j}"] = {
- catchErrors {
- withEnv([
- "TASK_QUEUE_PROCESS_ID=${j}",
- "TASK_QUEUE_ITERATION_ID=${++iterationId}"
- ]) {
- dir("${WORKSPACE}/parallel/${j}/kibana") {
- if (config.setup) {
- config.setup.call(j)
- }
-
- def isDone = false
- while(!isDone) { // TODO some kind of timeout?
- catchErrors {
- if (!getTasks().isEmpty()) {
- processesExecuting++
- catchErrors {
- def task
- try {
- task = getTasks().pop()
- } catch (java.util.NoSuchElementException ex) {
- return
- }
-
- task.call()
- }
- processesExecuting--
- // If a task finishes, and no new tasks were queued up, and nothing else is executing
- // Then all of the processes should wake up and exit
- if (processesExecuting < 1 && getTasks().isEmpty()) {
- taskNotify()
- }
- return
- }
-
- if (processesExecuting > 0) {
- taskSleep()
- return
- }
-
- // Queue is empty, no processes are executing
- isDone = true
- }
- }
- }
- }
- }
- }
- }
- parallel(processes)
- }
-}
-
-// If we sleep in a loop using Groovy code, Pipeline Steps is flooded with Sleep steps
-// So, instead, we just watch a file and `touch` it whenever something happens that could modify the queue
-// There's a 20 minute timeout just in case something goes wrong,
-// in which case this method will get called again if the process is actually supposed to be waiting.
-def taskSleep() {
- sh(script: """#!/bin/bash
- TIMESTAMP=\$(date '+%s' -d "0 seconds ago")
- for (( i=1; i<=240; i++ ))
- do
- if [ "\$(stat -c %Y '${getTmpFile()}')" -ge "\$TIMESTAMP" ]
- then
- break
- else
- sleep 5
- if [[ \$i == 240 ]]; then
- echo "Waited for new tasks for 20 minutes, exiting in case something went wrong"
- fi
- fi
- done
- """, label: "Waiting for new tasks...")
-}
-
-// Used to let the task queue processes know that either a new task has been queued up, or work is complete
-def taskNotify() {
- sh "touch '${getTmpFile()}'"
-}
-
-def getTasks() {
- return withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID].tasks
-}
-
-def getTmpFile() {
- return withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID].tmpFile
-}
-
-def addTask(Closure closure) {
- getTasks() << closure
- taskNotify()
-}
-
-def addTasks(List closures) {
- closures.reverse().each {
- getTasks() << it
- }
- taskNotify()
-}
diff --git a/vars/workers.groovy b/vars/workers.groovy
deleted file mode 100644
index ea67ce415738f..0000000000000
--- a/vars/workers.groovy
+++ /dev/null
@@ -1,206 +0,0 @@
-// "Workers" in this file will spin up an instance, do some setup etc depending on the configuration, and then execute some work that you define
-// e.g. workers.base(name: 'my-worker') { sh "echo 'ready to execute some kibana scripts'" }
-
-def label(size) {
- switch(size) {
- case 'flyweight':
- return 'flyweight'
- case 's':
- return 'docker && linux && immutable'
- case 's-highmem':
- return 'docker && tests-s'
- case 'm':
- return 'docker && linux && immutable && gobld/machineType:n2-standard-8'
- case 'm-highmem':
- return 'docker && linux && immutable && gobld/machineType:n1-highmem-8'
- case 'l':
- return 'docker && tests-l'
- case 'xl':
- return 'docker && tests-xl'
- case 'xl-highmem':
- return 'docker && tests-xl-highmem'
- case 'xxl':
- return 'docker && tests-xxl && gobld/machineType:custom-64-327680'
- case 'n2-standard-16':
- return 'docker && linux && immutable && gobld/machineType:n2-standard-16'
- }
-
- error "unknown size '${size}'"
-}
-
-/*
- The base worker that all of the others use. Will clone the scm (assumed to be kibana), and run kibana bootstrap processes by default.
-
- Parameters:
- size - size of worker label to use, e.g. 's' or 'xl'
- ramDisk - Should the workspace be mounted in memory? Default: true
- bootstrapped - If true, download kibana dependencies, run kbn bootstrap, etc. Default: true
- name - Name of the worker for display purposes, filenames, etc.
- scm - Jenkins scm configuration for checking out code. Use `null` to disable checkout. Default: inherited from job
-*/
-def base(Map params, Closure closure) {
- def config = [size: '', ramDisk: true, bootstrapped: true, name: 'unnamed-worker', scm: scm] + params
- if (!config.size) {
- error "You must specify an agent size, such as 'xl' or 's', when using workers.base()"
- }
-
- node(label(config.size)) {
- agentInfo.print()
-
- if (config.ramDisk) {
- // Move to a temporary workspace, so that we can symlink the real workspace into /dev/shm
- def originalWorkspace = env.WORKSPACE
- ws('/tmp/workspace') {
- sh(
- script: """
- mkdir -p /dev/shm/workspace
- mkdir -p '${originalWorkspace}' # create all of the directories leading up to the workspace, if they don't exist
- rm --preserve-root -rf '${originalWorkspace}' # then remove just the workspace, just in case there's stuff in it
- ln -s /dev/shm/workspace '${originalWorkspace}'
- """,
- label: "Move workspace to RAM - /dev/shm/workspace"
- )
- }
- }
-
- sh(
- script: "mkdir -p ${env.WORKSPACE}/tmp",
- label: "Create custom temp directory"
- )
-
- def checkoutInfo = [:]
-
- if (config.scm) {
- // Try to clone from Github up to 8 times, waiting 15 secs between attempts
- retryWithDelay(8, 15) {
- kibanaCheckout()
- }
-
- dir("kibana") {
- checkoutInfo = getCheckoutInfo()
-
- if (!buildState.has('checkoutInfo')) {
- buildState.set('checkoutInfo', checkoutInfo)
-
- if (buildState.get('shouldSetCommitStatus')) {
- githubCommitStatus.onStart()
- }
- }
- }
-
- ciStats.reportGitInfo(
- checkoutInfo.branch,
- checkoutInfo.commit,
- checkoutInfo.targetBranch,
- checkoutInfo.mergeBase
- )
- }
-
- withEnv([
- "CI=true",
- "HOME=${env.JENKINS_HOME}",
- "PR_NUMBER=${env.ghprbPullId ?: ''}",
- "PR_SOURCE_BRANCH=${env.ghprbSourceBranch ?: ''}",
- "PR_TARGET_BRANCH=${env.ghprbTargetBranch ?: ''}",
- "PR_MERGE_BASE=${checkoutInfo.mergeBase ?: ''}",
- "PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}",
- "TEST_BROWSER_HEADLESS=1",
- "GIT_COMMIT=${checkoutInfo.commit}",
- "GIT_BRANCH=${checkoutInfo.branch}",
- "TMPDIR=${env.WORKSPACE}/tmp", // For Chrome and anything else that respects it
- ]) {
- withCredentials([
- string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'),
- string(credentialsId: 'vault-role-id', variable: 'VAULT_ROLE_ID'),
- string(credentialsId: 'vault-secret-id', variable: 'VAULT_SECRET_ID'),
- ]) {
- // scm is configured to check out to the ./kibana directory
- dir('kibana') {
- if (config.bootstrapped) {
- kibanaPipeline.doSetup()
- }
-
- closure()
- }
- }
- }
- }
-}
-
-// Worker for ci processes. Extends the base worker and adds GCS artifact upload, error reporting, junit processing
-def ci(Map params, Closure closure) {
- def config = [ramDisk: true, bootstrapped: true, runErrorReporter: true] + params
-
- return base(config) {
- kibanaPipeline.withGcsArtifactUpload(config.name) {
- kibanaPipeline.withPostBuildReporting(config) {
- closure()
- }
- }
- }
-}
-
-// Worker for running the current intake jobs. Just runs a single script after bootstrap.
-def intake(jobName, String script) {
- return {
- ci(name: jobName, size: 'm-highmem', ramDisk: true) {
- withEnv(["JOB=${jobName}"]) {
- kibanaPipeline.notifyOnError {
- runbld(script, "Execute ${jobName}")
- }
- }
- }
- }
-}
-
-// Worker for running functional tests. Runs a setup process (e.g. the kibana build) then executes a map of closures in parallel (e.g. one for each ciGroup)
-def functional(name, Closure setup, Map processes) {
- return {
- parallelProcesses(name: name, setup: setup, processes: processes, delayBetweenProcesses: 20, size: 'xl')
- }
-}
-
-/*
- Creates a ci worker that can run a setup process, followed by a group of processes in parallel.
-
- Parameters:
- name: Name of the worker for display purposes, filenames, etc.
- setup: Closure to execute after the agent is bootstrapped, before starting the parallel work
- processes: Map of closures that will execute in parallel after setup. Each closure is passed a unique number.
- delayBetweenProcesses: Number of seconds to wait between starting the parallel processes. Useful to spread the load of heavy init processes, e.g. Elasticsearch starting up. Default: 0
- size: size of worker label to use, e.g. 's' or 'xl'
-*/
-def parallelProcesses(Map params) {
- def config = [name: 'parallel-worker', setup: {}, processes: [:], delayBetweenProcesses: 0, size: 'xl'] + params
-
- ci(size: config.size, name: config.name) {
- config.setup()
-
- def nextProcessNumber = 1
- def process = { processName, processClosure ->
- def processNumber = nextProcessNumber
- nextProcessNumber++
-
- return {
- if (config.delayBetweenProcesses && config.delayBetweenProcesses > 0) {
- // This delay helps smooth out CPU load caused by ES/Kibana instances starting up at the same time
- def delay = (processNumber-1)*config.delayBetweenProcesses
- sleep(delay)
- }
-
- withEnv(["CI_PARALLEL_PROCESS_NUMBER=${processNumber}"]) {
- processClosure()
- }
- }
- }
-
- def processes = [:]
- config.processes.each { processName, processClosure ->
- processes[processName] = process(processName, processClosure)
- }
-
- parallel(processes)
- }
-}
-
-return this
diff --git a/versions.json b/versions.json
index ce91f8f76bb7e..8406528bb4428 100644
--- a/versions.json
+++ b/versions.json
@@ -13,10 +13,16 @@
"currentMajor": true,
"previousMinor": true
},
+ {
+ "version": "8.11.4",
+ "branch": "8.11",
+ "currentMajor": true,
+ "previousMinor": true
+ },
{
"version": "7.17.17",
"branch": "7.17",
"previousMajor": true
}
]
-}
+}
\ No newline at end of file
diff --git a/x-pack/examples/gen_ai_streaming_response_example/server/plugin.ts b/x-pack/examples/gen_ai_streaming_response_example/server/plugin.ts
index 05f9612bcf73d..02be370d08e25 100644
--- a/x-pack/examples/gen_ai_streaming_response_example/server/plugin.ts
+++ b/x-pack/examples/gen_ai_streaming_response_example/server/plugin.ts
@@ -6,7 +6,7 @@
*/
import Boom from '@hapi/boom';
-import { CreateChatCompletionResponse } from 'openai';
+import type OpenAI from 'openai';
import { Readable } from 'stream';
import { Plugin, CoreSetup } from '@kbn/core/server';
import { schema } from '@kbn/config-schema';
@@ -112,7 +112,7 @@ export class GenAiStreamingResponseExamplePlugin implements Plugin {
}
return response.ok({
- body: executeResult.data as CreateChatCompletionResponse | Readable,
+ body: executeResult.data as OpenAI.ChatCompletion | Readable,
});
}
);
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/alerts/settings/alerts_settings.test.tsx b/x-pack/packages/kbn-elastic-assistant/impl/alerts/settings/alerts_settings.test.tsx
index beb2bd77d8512..cfdbcdc4a86b9 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/alerts/settings/alerts_settings.test.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/alerts/settings/alerts_settings.test.tsx
@@ -19,8 +19,8 @@ describe('AlertsSettings', () => {
it('updates the knowledgeBase settings when the switch is toggled', () => {
const knowledgeBase: KnowledgeBaseConfig = {
- alerts: false,
- assistantLangChain: false,
+ isEnabledRAGAlerts: false,
+ isEnabledKnowledgeBase: false,
latestAlerts: DEFAULT_LATEST_ALERTS,
};
const setUpdatedKnowledgeBaseSettings = jest.fn();
@@ -36,8 +36,8 @@ describe('AlertsSettings', () => {
fireEvent.click(alertsSwitch);
expect(setUpdatedKnowledgeBaseSettings).toHaveBeenCalledWith({
- alerts: true,
- assistantLangChain: false,
+ isEnabledRAGAlerts: true,
+ isEnabledKnowledgeBase: false,
latestAlerts: DEFAULT_LATEST_ALERTS,
});
});
@@ -45,8 +45,8 @@ describe('AlertsSettings', () => {
it('updates the knowledgeBase settings when the alerts range slider is changed', () => {
const setUpdatedKnowledgeBaseSettings = jest.fn();
const knowledgeBase: KnowledgeBaseConfig = {
- alerts: true,
- assistantLangChain: false,
+ isEnabledRAGAlerts: true,
+ isEnabledKnowledgeBase: false,
latestAlerts: DEFAULT_LATEST_ALERTS,
};
@@ -61,17 +61,17 @@ describe('AlertsSettings', () => {
fireEvent.change(rangeSlider, { target: { value: '10' } });
expect(setUpdatedKnowledgeBaseSettings).toHaveBeenCalledWith({
- alerts: true,
- assistantLangChain: false,
+ isEnabledRAGAlerts: true,
+ isEnabledKnowledgeBase: false,
latestAlerts: 10,
});
});
- it('enables the alerts range slider when knowledgeBase.alerts is true', () => {
+ it('enables the alerts range slider when knowledgeBase.isEnabledRAGAlerts is true', () => {
const setUpdatedKnowledgeBaseSettings = jest.fn();
const knowledgeBase: KnowledgeBaseConfig = {
- alerts: true, // <-- true
- assistantLangChain: false,
+ isEnabledRAGAlerts: true, // <-- true
+ isEnabledKnowledgeBase: false,
latestAlerts: DEFAULT_LATEST_ALERTS,
};
@@ -85,11 +85,11 @@ describe('AlertsSettings', () => {
expect(screen.getByTestId('alertsRange')).not.toBeDisabled();
});
- it('disables the alerts range slider when knowledgeBase.alerts is false', () => {
+ it('disables the alerts range slider when knowledgeBase.isEnabledRAGAlerts is false', () => {
const setUpdatedKnowledgeBaseSettings = jest.fn();
const knowledgeBase: KnowledgeBaseConfig = {
- alerts: false, // <-- false
- assistantLangChain: false,
+ isEnabledRAGAlerts: false, // <-- false
+ isEnabledKnowledgeBase: false,
latestAlerts: DEFAULT_LATEST_ALERTS,
};
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/alerts/settings/alerts_settings.tsx b/x-pack/packages/kbn-elastic-assistant/impl/alerts/settings/alerts_settings.tsx
index f23470bbbe7a7..6895d2f595e73 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/alerts/settings/alerts_settings.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/alerts/settings/alerts_settings.tsx
@@ -40,7 +40,7 @@ const AlertsSettingsComponent = ({ knowledgeBase, setUpdatedKnowledgeBaseSetting
(event: EuiSwitchEvent) => {
setUpdatedKnowledgeBaseSettings({
...knowledgeBase,
- alerts: event.target.checked,
+ isEnabledRAGAlerts: event.target.checked,
});
},
[knowledgeBase, setUpdatedKnowledgeBaseSettings]
@@ -58,7 +58,7 @@ const AlertsSettingsComponent = ({ knowledgeBase, setUpdatedKnowledgeBaseSetting
`}
>
-
-
- {i18n.ASK_QUESTIONS_ABOUT}
-
-
-
- {i18n.LATEST_AND_RISKIEST_OPEN_ALERTS}
+ {i18n.LATEST_AND_RISKIEST_OPEN_ALERTS(knowledgeBase.latestAlerts)}
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/api.test.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant/api.test.tsx
index ebb5afe2f12a1..4c71c1e63f8b3 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/api.test.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/api.test.tsx
@@ -35,14 +35,13 @@ const messages: Message[] = [
{ content: 'This is a test', role: 'user', timestamp: new Date().toLocaleString() },
];
const fetchConnectorArgs: FetchConnectorExecuteAction = {
- alerts: false,
+ isEnabledRAGAlerts: false,
apiConfig,
- assistantLangChain: true,
+ isEnabledKnowledgeBase: true,
assistantStreamingEnabled: true,
http: mockHttp,
messages,
onNewReplacements: jest.fn(),
- ragOnAlerts: false,
};
describe('API tests', () => {
beforeEach(() => {
@@ -50,13 +49,13 @@ describe('API tests', () => {
});
describe('fetchConnectorExecuteAction', () => {
- it('calls the internal assistant API when assistantLangChain is true', async () => {
+ it('calls the internal assistant API when isEnabledKnowledgeBase is true', async () => {
await fetchConnectorExecuteAction(fetchConnectorArgs);
expect(mockHttp.fetch).toHaveBeenCalledWith(
'/internal/elastic_assistant/actions/connector/foo/_execute',
{
- body: '{"params":{"subActionParams":{"model":"gpt-4","messages":[{"role":"user","content":"This is a test"}],"n":1,"stop":null,"temperature":0.2},"subAction":"invokeAI"},"assistantLangChain":true}',
+ body: '{"params":{"subActionParams":{"model":"gpt-4","messages":[{"role":"user","content":"This is a test"}],"n":1,"stop":null,"temperature":0.2},"subAction":"invokeAI"},"isEnabledKnowledgeBase":true,"isEnabledRAGAlerts":false}',
headers: { 'Content-Type': 'application/json' },
method: 'POST',
signal: undefined,
@@ -64,10 +63,10 @@ describe('API tests', () => {
);
});
- it('calls the actions connector api with streaming when assistantStreamingEnabled is true when assistantLangChain is false', async () => {
+ it('calls the actions connector api with streaming when assistantStreamingEnabled is true when isEnabledKnowledgeBase is false', async () => {
const testProps: FetchConnectorExecuteAction = {
...fetchConnectorArgs,
- assistantLangChain: false,
+ isEnabledKnowledgeBase: false,
};
await fetchConnectorExecuteAction(testProps);
@@ -75,7 +74,7 @@ describe('API tests', () => {
expect(mockHttp.fetch).toHaveBeenCalledWith(
'/internal/elastic_assistant/actions/connector/foo/_execute',
{
- body: '{"params":{"subActionParams":{"model":"gpt-4","messages":[{"role":"user","content":"This is a test"}],"n":1,"stop":null,"temperature":0.2},"subAction":"invokeStream"},"assistantLangChain":false}',
+ body: '{"params":{"subActionParams":{"model":"gpt-4","messages":[{"role":"user","content":"This is a test"}],"n":1,"stop":null,"temperature":0.2},"subAction":"invokeStream"},"isEnabledKnowledgeBase":false,"isEnabledRAGAlerts":false}',
method: 'POST',
asResponse: true,
rawResponse: true,
@@ -87,11 +86,10 @@ describe('API tests', () => {
it('calls the actions connector with the expected optional request parameters', async () => {
const testProps: FetchConnectorExecuteAction = {
...fetchConnectorArgs,
- alerts: true,
+ isEnabledRAGAlerts: true,
alertsIndexPattern: '.alerts-security.alerts-default',
allow: ['a', 'b', 'c'],
allowReplacement: ['b', 'c'],
- ragOnAlerts: true,
replacements: { auuid: 'real.hostname' },
size: 30,
};
@@ -101,7 +99,7 @@ describe('API tests', () => {
expect(mockHttp.fetch).toHaveBeenCalledWith(
'/internal/elastic_assistant/actions/connector/foo/_execute',
{
- body: '{"params":{"subActionParams":{"model":"gpt-4","messages":[{"role":"user","content":"This is a test"}],"n":1,"stop":null,"temperature":0.2},"subAction":"invokeAI"},"assistantLangChain":true,"alertsIndexPattern":".alerts-security.alerts-default","allow":["a","b","c"],"allowReplacement":["b","c"],"replacements":{"auuid":"real.hostname"},"size":30}',
+ body: '{"params":{"subActionParams":{"model":"gpt-4","messages":[{"role":"user","content":"This is a test"}],"n":1,"stop":null,"temperature":0.2},"subAction":"invokeAI"},"isEnabledKnowledgeBase":true,"isEnabledRAGAlerts":true,"alertsIndexPattern":".alerts-security.alerts-default","allow":["a","b","c"],"allowReplacement":["b","c"],"replacements":{"auuid":"real.hostname"},"size":30}',
headers: {
'Content-Type': 'application/json',
},
@@ -111,10 +109,10 @@ describe('API tests', () => {
);
});
- it('calls the actions connector api with invoke when assistantStreamingEnabled is false when assistantLangChain is false', async () => {
+ it('calls the actions connector api with invoke when assistantStreamingEnabled is false when isEnabledKnowledgeBase is false', async () => {
const testProps: FetchConnectorExecuteAction = {
...fetchConnectorArgs,
- assistantLangChain: false,
+ isEnabledKnowledgeBase: false,
assistantStreamingEnabled: false,
};
@@ -123,7 +121,7 @@ describe('API tests', () => {
expect(mockHttp.fetch).toHaveBeenCalledWith(
'/internal/elastic_assistant/actions/connector/foo/_execute',
{
- body: '{"params":{"subActionParams":{"model":"gpt-4","messages":[{"role":"user","content":"This is a test"}],"n":1,"stop":null,"temperature":0.2},"subAction":"invokeAI"},"assistantLangChain":false}',
+ body: '{"params":{"subActionParams":{"model":"gpt-4","messages":[{"role":"user","content":"This is a test"}],"n":1,"stop":null,"temperature":0.2},"subAction":"invokeAI"},"isEnabledKnowledgeBase":false,"isEnabledRAGAlerts":false}',
method: 'POST',
headers: {
'Content-Type': 'application/json',
@@ -133,11 +131,11 @@ describe('API tests', () => {
);
});
- it('calls the actions connector api with invoke when assistantStreamingEnabled is true when assistantLangChain is false and alerts is true', async () => {
+ it('calls the actions connector api with invoke when assistantStreamingEnabled is true when isEnabledKnowledgeBase is false and isEnabledRAGAlerts is true', async () => {
const testProps: FetchConnectorExecuteAction = {
...fetchConnectorArgs,
- assistantLangChain: false,
- alerts: true,
+ isEnabledKnowledgeBase: false,
+ isEnabledRAGAlerts: true,
};
await fetchConnectorExecuteAction(testProps);
@@ -145,7 +143,7 @@ describe('API tests', () => {
expect(mockHttp.fetch).toHaveBeenCalledWith(
'/internal/elastic_assistant/actions/connector/foo/_execute',
{
- body: '{"params":{"subActionParams":{"model":"gpt-4","messages":[{"role":"user","content":"This is a test"}],"n":1,"stop":null,"temperature":0.2},"subAction":"invokeAI"},"assistantLangChain":false}',
+ body: '{"params":{"subActionParams":{"model":"gpt-4","messages":[{"role":"user","content":"This is a test"}],"n":1,"stop":null,"temperature":0.2},"subAction":"invokeAI"},"isEnabledKnowledgeBase":false,"isEnabledRAGAlerts":true}',
method: 'POST',
headers: {
'Content-Type': 'application/json',
@@ -170,7 +168,7 @@ describe('API tests', () => {
});
const testProps: FetchConnectorExecuteAction = {
...fetchConnectorArgs,
- assistantLangChain: false,
+ isEnabledKnowledgeBase: false,
assistantStreamingEnabled: false,
};
@@ -188,7 +186,7 @@ describe('API tests', () => {
const testProps: FetchConnectorExecuteAction = {
...fetchConnectorArgs,
- assistantLangChain: false,
+ isEnabledKnowledgeBase: false,
};
const result = await fetchConnectorExecuteAction(testProps);
@@ -207,7 +205,7 @@ describe('API tests', () => {
});
const testProps: FetchConnectorExecuteAction = {
...fetchConnectorArgs,
- assistantLangChain: false,
+ isEnabledKnowledgeBase: false,
};
const result = await fetchConnectorExecuteAction(testProps);
@@ -227,7 +225,7 @@ describe('API tests', () => {
expect(result).toEqual({ response: API_ERROR, isStream: false, isError: true });
});
- it('returns the value of the action_input property when assistantLangChain is true, and `content` has properly prefixed and suffixed JSON with the action_input property', async () => {
+ it('returns the value of the action_input property when isEnabledKnowledgeBase is true, and `content` has properly prefixed and suffixed JSON with the action_input property', async () => {
const response = '```json\n{"action_input": "value from action_input"}\n```';
(mockHttp.fetch as jest.Mock).mockResolvedValue({
@@ -244,7 +242,7 @@ describe('API tests', () => {
});
});
- it('returns the original content when assistantLangChain is true, and `content` has properly formatted JSON WITHOUT the action_input property', async () => {
+ it('returns the original content when isEnabledKnowledgeBase is true, and `content` has properly formatted JSON WITHOUT the action_input property', async () => {
const response = '```json\n{"some_key": "some value"}\n```';
(mockHttp.fetch as jest.Mock).mockResolvedValue({
@@ -257,7 +255,7 @@ describe('API tests', () => {
expect(result).toEqual({ response, isStream: false, isError: false });
});
- it('returns the original when assistantLangChain is true, and `content` is not JSON', async () => {
+ it('returns the original when isEnabledKnowledgeBase is true, and `content` is not JSON', async () => {
const response = 'plain text content';
(mockHttp.fetch as jest.Mock).mockResolvedValue({
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/api.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant/api.tsx
index c2bdd4806a99a..f04b99c4e46e1 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/api.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/api.tsx
@@ -19,17 +19,16 @@ import {
import { PerformEvaluationParams } from './settings/evaluation_settings/use_perform_evaluation';
export interface FetchConnectorExecuteAction {
- alerts: boolean;
+ isEnabledRAGAlerts: boolean;
alertsIndexPattern?: string;
allow?: string[];
allowReplacement?: string[];
- assistantLangChain: boolean;
+ isEnabledKnowledgeBase: boolean;
assistantStreamingEnabled: boolean;
apiConfig: Conversation['apiConfig'];
http: HttpSetup;
messages: Message[];
onNewReplacements: (newReplacements: Record) => void;
- ragOnAlerts: boolean;
replacements?: Record;
signal?: AbortSignal | undefined;
size?: number;
@@ -46,16 +45,15 @@ export interface FetchConnectorExecuteResponse {
}
export const fetchConnectorExecuteAction = async ({
- alerts,
+ isEnabledRAGAlerts,
alertsIndexPattern,
allow,
allowReplacement,
- assistantLangChain,
+ isEnabledKnowledgeBase,
assistantStreamingEnabled,
http,
messages,
onNewReplacements,
- ragOnAlerts,
replacements,
apiConfig,
signal,
@@ -84,13 +82,12 @@ export const fetchConnectorExecuteAction = async ({
// tracked here: https://github.com/elastic/security-team/issues/7363
// In part 3 I will make enhancements to langchain to introduce streaming
// Once implemented, invokeAI can be removed
- const isStream = assistantStreamingEnabled && !assistantLangChain && !alerts;
+ const isStream = assistantStreamingEnabled && !isEnabledKnowledgeBase && !isEnabledRAGAlerts;
const optionalRequestParams = getOptionalRequestParams({
- alerts,
+ isEnabledRAGAlerts,
alertsIndexPattern,
allow,
allowReplacement,
- ragOnAlerts,
replacements,
size,
});
@@ -101,7 +98,8 @@ export const fetchConnectorExecuteAction = async ({
subActionParams: body,
subAction: 'invokeStream',
},
- assistantLangChain,
+ isEnabledKnowledgeBase,
+ isEnabledRAGAlerts,
...optionalRequestParams,
}
: {
@@ -109,7 +107,8 @@ export const fetchConnectorExecuteAction = async ({
subActionParams: body,
subAction: 'invokeAI',
},
- assistantLangChain,
+ isEnabledKnowledgeBase,
+ isEnabledRAGAlerts,
...optionalRequestParams,
};
@@ -190,9 +189,8 @@ export const fetchConnectorExecuteAction = async ({
return {
response: hasParsableResponse({
- alerts,
- assistantLangChain,
- ragOnAlerts,
+ isEnabledRAGAlerts,
+ isEnabledKnowledgeBase,
})
? getFormattedMessageContent(response.data)
: response.data,
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/assistant_overlay/index.test.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant/assistant_overlay/index.test.tsx
index 972d3d9099cd0..34d56d100cbae 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/assistant_overlay/index.test.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/assistant_overlay/index.test.tsx
@@ -15,6 +15,7 @@ const assistantTelemetry = {
reportAssistantInvoked,
reportAssistantMessageSent: () => {},
reportAssistantQuickPrompt: () => {},
+ reportAssistantSettingToggled: () => {},
};
describe('AssistantOverlay', () => {
beforeEach(() => {
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/assistant_overlay/index.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant/assistant_overlay/index.tsx
index ac72fc27dd891..e866cad765456 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/assistant_overlay/index.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/assistant_overlay/index.tsx
@@ -33,8 +33,7 @@ export const AssistantOverlay = React.memo(() => {
WELCOME_CONVERSATION_TITLE
);
const [promptContextId, setPromptContextId] = useState();
- const { assistantTelemetry, setShowAssistantOverlay, localStorageLastConversationId } =
- useAssistantContext();
+ const { assistantTelemetry, setShowAssistantOverlay, getConversationId } = useAssistantContext();
// Bind `showAssistantOverlay` in SecurityAssistantContext to this modal instance
const showOverlay = useCallback(
@@ -44,16 +43,18 @@ export const AssistantOverlay = React.memo(() => {
promptContextId: pid,
conversationId: cid,
}: ShowAssistantOverlayProps) => {
+ const newConversationId = getConversationId(cid);
if (so)
assistantTelemetry?.reportAssistantInvoked({
- conversationId: cid ?? 'unknown',
+ conversationId: newConversationId,
invokedBy: 'click',
});
+
setIsModalVisible(so);
setPromptContextId(pid);
- setConversationId(cid);
+ setConversationId(newConversationId);
},
- [assistantTelemetry]
+ [assistantTelemetry, getConversationId]
);
useEffect(() => {
setShowAssistantOverlay(showOverlay);
@@ -63,15 +64,15 @@ export const AssistantOverlay = React.memo(() => {
const handleShortcutPress = useCallback(() => {
// Try to restore the last conversation on shortcut pressed
if (!isModalVisible) {
- setConversationId(localStorageLastConversationId ?? WELCOME_CONVERSATION_TITLE);
+ setConversationId(getConversationId());
assistantTelemetry?.reportAssistantInvoked({
invokedBy: 'shortcut',
- conversationId: localStorageLastConversationId ?? WELCOME_CONVERSATION_TITLE,
+ conversationId: getConversationId(),
});
}
setIsModalVisible(!isModalVisible);
- }, [assistantTelemetry, isModalVisible, localStorageLastConversationId]);
+ }, [assistantTelemetry, isModalVisible, getConversationId]);
// Register keyboard listener to show the modal when cmd + ; is pressed
const onKeyDown = useCallback(
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/helpers.test.ts b/x-pack/packages/kbn-elastic-assistant/impl/assistant/helpers.test.ts
index 0c3c5a579d274..b176a229bcca7 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/helpers.test.ts
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/helpers.test.ts
@@ -235,29 +235,12 @@ describe('getBlockBotConversation', () => {
});
describe('getOptionalRequestParams', () => {
- it('should return an empty object when ragOnAlerts is false', () => {
- const params = {
- alerts: true,
- alertsIndexPattern: 'indexPattern',
- allow: ['a', 'b', 'c'],
- allowReplacement: ['b', 'c'],
- ragOnAlerts: false, // <-- false
- replacements: { key: 'value' },
- size: 10,
- };
-
- const result = getOptionalRequestParams(params);
-
- expect(result).toEqual({});
- });
-
it('should return an empty object when alerts is false', () => {
const params = {
- alerts: false, // <-- false
+ isEnabledRAGAlerts: false, // <-- false
alertsIndexPattern: 'indexPattern',
allow: ['a', 'b', 'c'],
allowReplacement: ['b', 'c'],
- ragOnAlerts: true,
replacements: { key: 'value' },
size: 10,
};
@@ -267,13 +250,12 @@ describe('getBlockBotConversation', () => {
expect(result).toEqual({});
});
- it('should return the optional request params when ragOnAlerts is true and alerts is true', () => {
+ it('should return the optional request params when alerts is true', () => {
const params = {
- alerts: true,
+ isEnabledRAGAlerts: true,
alertsIndexPattern: 'indexPattern',
allow: ['a', 'b', 'c'],
allowReplacement: ['b', 'c'],
- ragOnAlerts: true,
replacements: { key: 'value' },
size: 10,
};
@@ -291,8 +273,7 @@ describe('getBlockBotConversation', () => {
it('should return (only) the optional request params that are defined when some optional params are not provided', () => {
const params = {
- alerts: true,
- ragOnAlerts: true,
+ isEnabledRAGAlerts: true,
allow: ['a', 'b', 'c'], // all the others are undefined
};
@@ -305,31 +286,37 @@ describe('getBlockBotConversation', () => {
});
describe('hasParsableResponse', () => {
- it('returns true when assistantLangChain is true', () => {
+ it('returns true when just isEnabledKnowledgeBase is true', () => {
+ const result = hasParsableResponse({
+ isEnabledRAGAlerts: false,
+ isEnabledKnowledgeBase: true,
+ });
+
+ expect(result).toBe(true);
+ });
+
+ it('returns true when just isEnabledRAGAlerts is true', () => {
const result = hasParsableResponse({
- alerts: false,
- assistantLangChain: true,
- ragOnAlerts: false,
+ isEnabledRAGAlerts: true,
+ isEnabledKnowledgeBase: false,
});
expect(result).toBe(true);
});
- it('returns true when ragOnAlerts is true and alerts is true', () => {
+ it('returns true when both isEnabledKnowledgeBase and isEnabledRAGAlerts are true', () => {
const result = hasParsableResponse({
- alerts: true,
- assistantLangChain: false,
- ragOnAlerts: true,
+ isEnabledRAGAlerts: true,
+ isEnabledKnowledgeBase: true,
});
expect(result).toBe(true);
});
- it('returns false when assistantLangChain, ragOnAlerts, and alerts are all false', () => {
+ it('returns false when both isEnabledKnowledgeBase and isEnabledRAGAlerts are false', () => {
const result = hasParsableResponse({
- alerts: false,
- assistantLangChain: false,
- ragOnAlerts: false,
+ isEnabledRAGAlerts: false,
+ isEnabledKnowledgeBase: false,
});
expect(result).toBe(false);
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/helpers.ts b/x-pack/packages/kbn-elastic-assistant/impl/assistant/helpers.ts
index 688416d2e738c..f7ea3f52c8826 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/helpers.ts
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/helpers.ts
@@ -97,19 +97,17 @@ interface OptionalRequestParams {
}
export const getOptionalRequestParams = ({
- alerts,
+ isEnabledRAGAlerts,
alertsIndexPattern,
allow,
allowReplacement,
- ragOnAlerts,
replacements,
size,
}: {
- alerts: boolean;
+ isEnabledRAGAlerts: boolean;
alertsIndexPattern?: string;
allow?: string[];
allowReplacement?: string[];
- ragOnAlerts: boolean;
replacements?: Record;
size?: number;
}): OptionalRequestParams => {
@@ -119,10 +117,8 @@ export const getOptionalRequestParams = ({
const optionalReplacements = replacements ? { replacements } : undefined;
const optionalSize = size ? { size } : undefined;
- if (
- !ragOnAlerts || // the feature flag must be enabled
- !alerts // the settings toggle must also be enabled
- ) {
+ // the settings toggle must be enabled:
+ if (!isEnabledRAGAlerts) {
return {}; // don't send any optional params
}
@@ -136,11 +132,9 @@ export const getOptionalRequestParams = ({
};
export const hasParsableResponse = ({
- alerts,
- assistantLangChain,
- ragOnAlerts,
+ isEnabledRAGAlerts,
+ isEnabledKnowledgeBase,
}: {
- alerts: boolean;
- assistantLangChain: boolean;
- ragOnAlerts: boolean;
-}): boolean => assistantLangChain || (ragOnAlerts && alerts);
+ isEnabledRAGAlerts: boolean;
+ isEnabledKnowledgeBase: boolean;
+}): boolean => isEnabledKnowledgeBase || isEnabledRAGAlerts;
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/index.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant/index.tsx
index 86e0f3a460055..190eee654bc67 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/index.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/index.tsx
@@ -83,7 +83,7 @@ const AssistantComponent: React.FC = ({
http,
promptContexts,
setLastConversationId,
- localStorageLastConversationId,
+ getConversationId,
title,
allSystemPrompts,
} = useAssistantContext();
@@ -113,12 +113,7 @@ const AssistantComponent: React.FC = ({
);
const [selectedConversationId, setSelectedConversationId] = useState(
- isAssistantEnabled
- ? // if a conversationId has been provided, use that
- // if not, check local storage
- // last resort, go to welcome conversation
- conversationId ?? localStorageLastConversationId ?? WELCOME_CONVERSATION_TITLE
- : WELCOME_CONVERSATION_TITLE
+ isAssistantEnabled ? getConversationId(conversationId) : WELCOME_CONVERSATION_TITLE
);
useEffect(() => {
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/prompt_editor/system_prompt/select_system_prompt/index.test.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant/prompt_editor/system_prompt/select_system_prompt/index.test.tsx
index cb1050c895b8e..fc62c00a99727 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/prompt_editor/system_prompt/select_system_prompt/index.test.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/prompt_editor/system_prompt/select_system_prompt/index.test.tsx
@@ -48,6 +48,10 @@ const mockUseAssistantContext = {
},
],
setAllSystemPrompts: jest.fn(),
+ knowledgeBase: {
+ isEnabledRAGAlerts: false,
+ isEnabledKnowledgeBase: false,
+ },
};
jest.mock('../../../../assistant_context', () => {
const original = jest.requireActual('../../../../assistant_context');
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/quick_prompts/quick_prompts.test.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant/quick_prompts/quick_prompts.test.tsx
index 517d52667c45a..46e5ecfb76f39 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/quick_prompts/quick_prompts.test.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/quick_prompts/quick_prompts.test.tsx
@@ -26,7 +26,7 @@ const mockUseAssistantContext = {
promptContexts: {},
allQuickPrompts: MOCK_QUICK_PROMPTS,
knowledgeBase: {
- assistantLangChain: true,
+ isEnabledKnowledgeBase: true,
},
};
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/quick_prompts/quick_prompts.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant/quick_prompts/quick_prompts.tsx
index f475f052f180b..a4731ef54f801 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/quick_prompts/quick_prompts.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/quick_prompts/quick_prompts.tsx
@@ -41,7 +41,7 @@ export const QuickPrompts: React.FC = React.memo(
const contextFilteredQuickPrompts = useMemo(() => {
const registeredPromptContextTitles = Object.values(promptContexts).map((pc) => pc.category);
// If KB is enabled, include KNOWLEDGE_BASE_CATEGORY so KB dependent quick prompts are shown
- if (knowledgeBase.assistantLangChain) {
+ if (knowledgeBase.isEnabledKnowledgeBase) {
registeredPromptContextTitles.push(KNOWLEDGE_BASE_CATEGORY);
}
return allQuickPrompts.filter((quickPrompt) => {
@@ -54,7 +54,7 @@ export const QuickPrompts: React.FC = React.memo(
});
}
});
- }, [allQuickPrompts, knowledgeBase.assistantLangChain, promptContexts]);
+ }, [allQuickPrompts, knowledgeBase.isEnabledKnowledgeBase, promptContexts]);
// Overflow state
const [isOverflowPopoverOpen, setIsOverflowPopoverOpen] = useState(false);
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/settings/use_settings_updater/use_settings_updater.test.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant/settings/use_settings_updater/use_settings_updater.test.tsx
index 975e5c8e27db7..af73fa31293b3 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/settings/use_settings_updater/use_settings_updater.test.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/settings/use_settings_updater/use_settings_updater.test.tsx
@@ -33,15 +33,18 @@ const setConversationsMock = jest.fn();
const setDefaultAllowMock = jest.fn();
const setDefaultAllowReplacementMock = jest.fn();
const setKnowledgeBaseMock = jest.fn();
-
+const reportAssistantSettingToggled = jest.fn();
const mockValues = {
+ assistantTelemetry: { reportAssistantSettingToggled },
conversations: mockConversations,
allSystemPrompts: mockSystemPrompts,
allQuickPrompts: mockQuickPrompts,
defaultAllow: initialDefaultAllow,
defaultAllowReplacement: initialDefaultAllowReplacement,
knowledgeBase: {
- assistantLangChain: true,
+ isEnabledRAGAlerts: true,
+ isEnabledKnowledgeBase: true,
+ latestAlerts: DEFAULT_LATEST_ALERTS,
},
setAllQuickPrompts: setAllQuickPromptsMock,
setConversations: setConversationsMock,
@@ -58,8 +61,8 @@ const updatedValues = {
defaultAllow: ['allow2'],
defaultAllowReplacement: ['replacement2'],
knowledgeBase: {
- alerts: false,
- assistantLangChain: false,
+ isEnabledRAGAlerts: false,
+ isEnabledKnowledgeBase: false,
latestAlerts: DEFAULT_LATEST_ALERTS,
},
};
@@ -73,6 +76,9 @@ jest.mock('../../../assistant_context', () => {
});
describe('useSettingsUpdater', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
it('should set all state variables to their initial values when resetSettings is called', async () => {
await act(async () => {
const { result, waitForNextUpdate } = renderHook(() => useSettingsUpdater());
@@ -144,4 +150,46 @@ describe('useSettingsUpdater', () => {
expect(setKnowledgeBaseMock).toHaveBeenCalledWith(updatedValues.knowledgeBase);
});
});
+ it('should track which toggles have been updated when saveSettings is called', async () => {
+ await act(async () => {
+ const { result, waitForNextUpdate } = renderHook(() => useSettingsUpdater());
+ await waitForNextUpdate();
+ const { setUpdatedKnowledgeBaseSettings } = result.current;
+
+ setUpdatedKnowledgeBaseSettings(updatedValues.knowledgeBase);
+
+ result.current.saveSettings();
+ expect(reportAssistantSettingToggled).toHaveBeenCalledWith({
+ isEnabledKnowledgeBase: false,
+ isEnabledRAGAlerts: false,
+ });
+ });
+ });
+ it('should track only toggles that updated', async () => {
+ await act(async () => {
+ const { result, waitForNextUpdate } = renderHook(() => useSettingsUpdater());
+ await waitForNextUpdate();
+ const { setUpdatedKnowledgeBaseSettings } = result.current;
+
+ setUpdatedKnowledgeBaseSettings({
+ ...updatedValues.knowledgeBase,
+ isEnabledKnowledgeBase: true,
+ });
+ result.current.saveSettings();
+ expect(reportAssistantSettingToggled).toHaveBeenCalledWith({
+ isEnabledRAGAlerts: false,
+ });
+ });
+ });
+ it('if no toggles update, do not track anything', async () => {
+ await act(async () => {
+ const { result, waitForNextUpdate } = renderHook(() => useSettingsUpdater());
+ await waitForNextUpdate();
+ const { setUpdatedKnowledgeBaseSettings } = result.current;
+
+ setUpdatedKnowledgeBaseSettings(mockValues.knowledgeBase);
+ result.current.saveSettings();
+ expect(reportAssistantSettingToggled).not.toHaveBeenCalledWith();
+ });
+ });
});
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/settings/use_settings_updater/use_settings_updater.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant/settings/use_settings_updater/use_settings_updater.tsx
index 0dfd6ebe2904c..63c9d7217e947 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/settings/use_settings_updater/use_settings_updater.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/settings/use_settings_updater/use_settings_updater.tsx
@@ -34,6 +34,7 @@ export const useSettingsUpdater = (): UseSettingsUpdater => {
const {
allQuickPrompts,
allSystemPrompts,
+ assistantTelemetry,
conversations,
defaultAllow,
defaultAllowReplacement,
@@ -92,10 +93,27 @@ export const useSettingsUpdater = (): UseSettingsUpdater => {
setAllQuickPrompts(updatedQuickPromptSettings);
setAllSystemPrompts(updatedSystemPromptSettings);
setConversations(updatedConversationSettings);
+ const didUpdateKnowledgeBase =
+ knowledgeBase.isEnabledKnowledgeBase !== updatedKnowledgeBaseSettings.isEnabledKnowledgeBase;
+ const didUpdateRAGAlerts =
+ knowledgeBase.isEnabledRAGAlerts !== updatedKnowledgeBaseSettings.isEnabledRAGAlerts;
+ if (didUpdateKnowledgeBase || didUpdateRAGAlerts) {
+ assistantTelemetry?.reportAssistantSettingToggled({
+ ...(didUpdateKnowledgeBase
+ ? { isEnabledKnowledgeBase: updatedKnowledgeBaseSettings.isEnabledKnowledgeBase }
+ : {}),
+ ...(didUpdateRAGAlerts
+ ? { isEnabledRAGAlerts: updatedKnowledgeBaseSettings.isEnabledRAGAlerts }
+ : {}),
+ });
+ }
setKnowledgeBase(updatedKnowledgeBaseSettings);
setDefaultAllow(updatedDefaultAllow);
setDefaultAllowReplacement(updatedDefaultAllowReplacement);
}, [
+ assistantTelemetry,
+ knowledgeBase.isEnabledRAGAlerts,
+ knowledgeBase.isEnabledKnowledgeBase,
setAllQuickPrompts,
setAllSystemPrompts,
setConversations,
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/types.ts b/x-pack/packages/kbn-elastic-assistant/impl/assistant/types.ts
index 303aae0f6ff9c..e7cb6f79f243a 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/types.ts
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/types.ts
@@ -17,7 +17,7 @@ export interface Prompt {
}
export interface KnowledgeBaseConfig {
- alerts: boolean;
- assistantLangChain: boolean;
+ isEnabledRAGAlerts: boolean;
+ isEnabledKnowledgeBase: boolean;
latestAlerts: number;
}
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_conversation/index.test.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_conversation/index.test.tsx
index 562a252bf8111..be94a164364aa 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_conversation/index.test.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_conversation/index.test.tsx
@@ -36,6 +36,9 @@ const mockConvo = {
};
describe('useConversation', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
it('should append a message to an existing conversation when called with valid conversationId and message', async () => {
await act(async () => {
const { result, waitForNextUpdate } = renderHook(() => useConversation(), {
@@ -63,6 +66,43 @@ describe('useConversation', () => {
});
});
+ it('should report telemetry when a message has been sent', async () => {
+ await act(async () => {
+ const reportAssistantMessageSent = jest.fn();
+ const { result, waitForNextUpdate } = renderHook(() => useConversation(), {
+ wrapper: ({ children }) => (
+ ({
+ [alertConvo.id]: alertConvo,
+ [welcomeConvo.id]: welcomeConvo,
+ }),
+ assistantTelemetry: {
+ reportAssistantInvoked: () => {},
+ reportAssistantQuickPrompt: () => {},
+ reportAssistantSettingToggled: () => {},
+ reportAssistantMessageSent,
+ },
+ }}
+ >
+ {children}
+
+ ),
+ });
+ await waitForNextUpdate();
+ result.current.appendMessage({
+ conversationId: welcomeConvo.id,
+ message,
+ });
+ expect(reportAssistantMessageSent).toHaveBeenCalledWith({
+ conversationId: 'Welcome',
+ isEnabledKnowledgeBase: false,
+ isEnabledRAGAlerts: false,
+ role: 'user',
+ });
+ });
+ });
+
it('should create a new conversation when called with valid conversationId and message', async () => {
await act(async () => {
const { result, waitForNextUpdate } = renderHook(() => useConversation(), {
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_conversation/index.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_conversation/index.tsx
index 3bd9f3fcbff71..11a21641b1bd4 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_conversation/index.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_conversation/index.tsx
@@ -75,7 +75,12 @@ interface UseConversation {
}
export const useConversation = (): UseConversation => {
- const { allSystemPrompts, assistantTelemetry, setConversations } = useAssistantContext();
+ const {
+ allSystemPrompts,
+ assistantTelemetry,
+ knowledgeBase: { isEnabledKnowledgeBase, isEnabledRAGAlerts },
+ setConversations,
+ } = useAssistantContext();
/**
* Removes the last message of conversation[] for a given conversationId
@@ -140,7 +145,12 @@ export const useConversation = (): UseConversation => {
*/
const appendMessage = useCallback(
({ conversationId, message }: AppendMessageProps): Message[] => {
- assistantTelemetry?.reportAssistantMessageSent({ conversationId, role: message.role });
+ assistantTelemetry?.reportAssistantMessageSent({
+ conversationId,
+ role: message.role,
+ isEnabledKnowledgeBase,
+ isEnabledRAGAlerts,
+ });
let messages: Message[] = [];
setConversations((prev: Record) => {
const prevConversation: Conversation | undefined = prev[conversationId];
@@ -161,7 +171,7 @@ export const useConversation = (): UseConversation => {
});
return messages;
},
- [assistantTelemetry, setConversations]
+ [isEnabledKnowledgeBase, isEnabledRAGAlerts, assistantTelemetry, setConversations]
);
const appendReplacements = useCallback(
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_send_messages/index.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_send_messages/index.tsx
index fcfbadb574bbd..eae7d7914e6a1 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_send_messages/index.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant/use_send_messages/index.tsx
@@ -37,7 +37,6 @@ export const useSendMessages = (): UseSendMessages => {
assistantStreamingEnabled,
defaultAllow,
defaultAllowReplacement,
- ragOnAlerts,
knowledgeBase,
} = useAssistantContext();
const [isLoading, setIsLoading] = useState(false);
@@ -48,15 +47,14 @@ export const useSendMessages = (): UseSendMessages => {
try {
return await fetchConnectorExecuteAction({
- alerts: knowledgeBase.alerts, // settings toggle
+ isEnabledRAGAlerts: knowledgeBase.isEnabledRAGAlerts, // settings toggle
alertsIndexPattern,
allow: defaultAllow,
allowReplacement: defaultAllowReplacement,
apiConfig,
- assistantLangChain: knowledgeBase.assistantLangChain,
+ isEnabledKnowledgeBase: knowledgeBase.isEnabledKnowledgeBase,
assistantStreamingEnabled,
http,
- ragOnAlerts, // feature flag
replacements,
messages,
size: knowledgeBase.latestAlerts,
@@ -71,10 +69,9 @@ export const useSendMessages = (): UseSendMessages => {
assistantStreamingEnabled,
defaultAllow,
defaultAllowReplacement,
- knowledgeBase.alerts,
- knowledgeBase.assistantLangChain,
+ knowledgeBase.isEnabledRAGAlerts,
+ knowledgeBase.isEnabledKnowledgeBase,
knowledgeBase.latestAlerts,
- ragOnAlerts,
]
);
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/constants.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/constants.tsx
index 780a2a04a9728..cc747a705b851 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/constants.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/constants.tsx
@@ -17,7 +17,7 @@ export const KNOWLEDGE_BASE_LOCAL_STORAGE_KEY = 'knowledgeBase';
export const DEFAULT_LATEST_ALERTS = 20;
export const DEFAULT_KNOWLEDGE_BASE_SETTINGS: KnowledgeBaseConfig = {
- alerts: false,
- assistantLangChain: false,
+ isEnabledRAGAlerts: false,
+ isEnabledKnowledgeBase: false,
latestAlerts: DEFAULT_LATEST_ALERTS,
};
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/index.test.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/index.test.tsx
index 61f8352e0d325..84a2ac40a6f24 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/index.test.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/index.test.tsx
@@ -12,7 +12,11 @@ import { AssistantProvider, useAssistantContext } from '.';
import { httpServiceMock } from '@kbn/core-http-browser-mocks';
import { actionTypeRegistryMock } from '@kbn/triggers-actions-ui-plugin/public/application/action_type_registry.mock';
import { AssistantAvailability } from '../..';
+import { useLocalStorage } from 'react-use';
+jest.mock('react-use', () => ({
+ useLocalStorage: jest.fn().mockReturnValue(['456', jest.fn()]),
+}));
const actionTypeRegistry = actionTypeRegistryMock.create();
const mockGetInitialConversations = jest.fn(() => ({}));
const mockGetComments = jest.fn(() => []);
@@ -70,4 +74,23 @@ describe('AssistantContext', () => {
expect(mockHttp.fetch).toBeCalledWith(path);
});
+
+ test('getConversationId defaults to provided id', async () => {
+ const { result } = renderHook(useAssistantContext, { wrapper: ContextWrapper });
+ const id = result.current.getConversationId('123');
+ expect(id).toEqual('123');
+ });
+
+ test('getConversationId uses local storage id when no id is provided ', async () => {
+ const { result } = renderHook(useAssistantContext, { wrapper: ContextWrapper });
+ const id = result.current.getConversationId();
+ expect(id).toEqual('456');
+ });
+
+ test('getConversationId defaults to Welcome when no local storage id and no id is provided ', async () => {
+ (useLocalStorage as jest.Mock).mockReturnValue([undefined, jest.fn()]);
+ const { result } = renderHook(useAssistantContext, { wrapper: ContextWrapper });
+ const id = result.current.getConversationId();
+ expect(id).toEqual('Welcome');
+ });
});
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/index.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/index.tsx
index 4d0eec97f2639..50a3211f74f3c 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/index.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/index.tsx
@@ -13,6 +13,7 @@ import type { IToasts } from '@kbn/core-notifications-browser';
import { ActionTypeRegistryContract } from '@kbn/triggers-actions-ui-plugin/public';
import { useLocalStorage } from 'react-use';
import type { DocLinksStart } from '@kbn/core-doc-links-browser';
+import { WELCOME_CONVERSATION_TITLE } from '../assistant/use_conversation/translations';
import { updatePromptContexts } from './helpers';
import type {
PromptContext,
@@ -88,7 +89,6 @@ export interface AssistantProviderProps {
getInitialConversations: () => Record;
modelEvaluatorEnabled?: boolean;
nameSpace?: string;
- ragOnAlerts?: boolean;
setConversations: React.Dispatch>>;
setDefaultAllow: React.Dispatch>;
setDefaultAllowReplacement: React.Dispatch>;
@@ -136,11 +136,10 @@ export interface UseAssistantContext {
}) => EuiCommentProps[];
http: HttpSetup;
knowledgeBase: KnowledgeBaseConfig;
- localStorageLastConversationId: string | undefined;
+ getConversationId: (id?: string) => string;
promptContexts: Record;
modelEvaluatorEnabled: boolean;
nameSpace: string;
- ragOnAlerts: boolean;
registerPromptContext: RegisterPromptContext;
selectedSettingsTab: SettingsTabs;
setAllQuickPrompts: React.Dispatch>;
@@ -182,7 +181,6 @@ export const AssistantProvider: React.FC = ({
getInitialConversations,
modelEvaluatorEnabled = false,
nameSpace = DEFAULT_ASSISTANT_NAMESPACE,
- ragOnAlerts = false,
setConversations,
setDefaultAllow,
setDefaultAllowReplacement,
@@ -292,6 +290,14 @@ export const AssistantProvider: React.FC = ({
[setConversations]
);
+ const getConversationId = useCallback(
+ // if a conversationId has been provided, use that
+ // if not, check local storage
+ // last resort, go to welcome conversation
+ (id?: string) => id ?? localStorageLastConversationId ?? WELCOME_CONVERSATION_TITLE,
+ [localStorageLastConversationId]
+ );
+
const value = useMemo(
() => ({
actionTypeRegistry,
@@ -315,11 +321,10 @@ export const AssistantProvider: React.FC = ({
docLinks,
getComments,
http,
- knowledgeBase: localStorageKnowledgeBase ?? DEFAULT_KNOWLEDGE_BASE_SETTINGS,
+ knowledgeBase: { ...DEFAULT_KNOWLEDGE_BASE_SETTINGS, ...localStorageKnowledgeBase },
modelEvaluatorEnabled,
promptContexts,
nameSpace,
- ragOnAlerts,
registerPromptContext,
selectedSettingsTab,
setAllQuickPrompts: setLocalStorageQuickPrompts,
@@ -334,7 +339,7 @@ export const AssistantProvider: React.FC = ({
title,
toasts,
unRegisterPromptContext,
- localStorageLastConversationId,
+ getConversationId,
setLastConversationId: setLocalStorageLastConversationId,
}),
[
@@ -358,14 +363,13 @@ export const AssistantProvider: React.FC = ({
getComments,
http,
localStorageKnowledgeBase,
- localStorageLastConversationId,
+ getConversationId,
localStorageQuickPrompts,
localStorageSystemPrompts,
modelEvaluatorEnabled,
nameSpace,
onConversationsUpdated,
promptContexts,
- ragOnAlerts,
registerPromptContext,
selectedSettingsTab,
setDefaultAllow,
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/types.tsx b/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/types.tsx
index 982b74faabf8d..bf95b7d400240 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/types.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/assistant_context/types.tsx
@@ -67,8 +67,17 @@ export interface Conversation {
export interface AssistantTelemetry {
reportAssistantInvoked: (params: { invokedBy: string; conversationId: string }) => void;
- reportAssistantMessageSent: (params: { conversationId: string; role: string }) => void;
+ reportAssistantMessageSent: (params: {
+ conversationId: string;
+ role: string;
+ isEnabledKnowledgeBase: boolean;
+ isEnabledRAGAlerts: boolean;
+ }) => void;
reportAssistantQuickPrompt: (params: { conversationId: string; promptTitle: string }) => void;
+ reportAssistantSettingToggled: (params: {
+ isEnabledKnowledgeBase?: boolean;
+ isEnabledRAGAlerts?: boolean;
+ }) => void;
}
export interface AssistantAvailability {
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/knowledge_base/knowledge_base_settings.test.tsx b/x-pack/packages/kbn-elastic-assistant/impl/knowledge_base/knowledge_base_settings.test.tsx
index 06c1b33bfda85..20ab3aab4a26f 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/knowledge_base/knowledge_base_settings.test.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/knowledge_base/knowledge_base_settings.test.tsx
@@ -22,7 +22,6 @@ const mockUseAssistantContext = {
prepend: jest.fn(),
},
},
- ragOnAlerts: true,
setAllSystemPrompts: jest.fn(),
setConversations: jest.fn(),
};
@@ -39,8 +38,8 @@ jest.mock('../assistant_context', () => {
const setUpdatedKnowledgeBaseSettings = jest.fn();
const defaultProps = {
knowledgeBase: {
- assistantLangChain: true,
- alerts: false,
+ isEnabledKnowledgeBase: true,
+ isEnabledRAGAlerts: false,
latestAlerts: DEFAULT_LATEST_ALERTS,
},
setUpdatedKnowledgeBaseSettings,
@@ -118,16 +117,16 @@ describe('Knowledge base settings', () => {
fireEvent.click(getByTestId('esqlEnableButton'));
expect(mockSetup).toHaveBeenCalledWith('esql');
});
- it('On disable lang chain, set assistantLangChain to false', () => {
+ it('On disable lang chain, set isEnabledKnowledgeBase to false', () => {
const { getByTestId } = render(
);
- fireEvent.click(getByTestId('assistantLangChainSwitch'));
+ fireEvent.click(getByTestId('isEnabledKnowledgeBaseSwitch'));
expect(setUpdatedKnowledgeBaseSettings).toHaveBeenCalledWith({
- alerts: false,
- assistantLangChain: false,
+ isEnabledRAGAlerts: false,
+ isEnabledKnowledgeBase: false,
latestAlerts: DEFAULT_LATEST_ALERTS,
});
@@ -139,17 +138,17 @@ describe('Knowledge base settings', () => {
);
- fireEvent.click(getByTestId('assistantLangChainSwitch'));
+ fireEvent.click(getByTestId('isEnabledKnowledgeBaseSwitch'));
expect(setUpdatedKnowledgeBaseSettings).toHaveBeenCalledWith({
- assistantLangChain: true,
- alerts: false,
+ isEnabledKnowledgeBase: true,
+ isEnabledRAGAlerts: false,
latestAlerts: DEFAULT_LATEST_ALERTS,
});
@@ -210,7 +209,7 @@ describe('Knowledge base settings', () => {
expect(queryByTestId('knowledgeBaseActionButton')).not.toBeInTheDocument();
});
- it('renders the alerts settings when ragOnAlerts is true', () => {
+ it('renders the alerts settings', () => {
const { getByTestId } = render(
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/knowledge_base/knowledge_base_settings.tsx b/x-pack/packages/kbn-elastic-assistant/impl/knowledge_base/knowledge_base_settings.tsx
index bd41f5b888c93..f30215eaac521 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/knowledge_base/knowledge_base_settings.tsx
+++ b/x-pack/packages/kbn-elastic-assistant/impl/knowledge_base/knowledge_base_settings.tsx
@@ -47,7 +47,7 @@ interface Props {
*/
export const KnowledgeBaseSettings: React.FC = React.memo(
({ knowledgeBase, setUpdatedKnowledgeBaseSettings }) => {
- const { http, ragOnAlerts } = useAssistantContext();
+ const { http } = useAssistantContext();
const {
data: kbStatus,
isLoading,
@@ -63,11 +63,11 @@ export const KnowledgeBaseSettings: React.FC = React.memo(
// Resource availability state
const isLoadingKb = isLoading || isFetching || isSettingUpKB || isDeletingUpKB;
- const isKnowledgeBaseAvailable = knowledgeBase.assistantLangChain && kbStatus?.elser_exists;
+ const isKnowledgeBaseAvailable = knowledgeBase.isEnabledKnowledgeBase && kbStatus?.elser_exists;
const isESQLAvailable =
- knowledgeBase.assistantLangChain && isKnowledgeBaseAvailable && isKnowledgeBaseEnabled;
+ knowledgeBase.isEnabledKnowledgeBase && isKnowledgeBaseAvailable && isKnowledgeBaseEnabled;
// Prevent enabling if elser doesn't exist, but always allow to disable
- const isSwitchDisabled = !kbStatus?.elser_exists && !knowledgeBase.assistantLangChain;
+ const isSwitchDisabled = !kbStatus?.elser_exists && !knowledgeBase.isEnabledKnowledgeBase;
// Calculated health state for EuiHealth component
const elserHealth = isElserEnabled ? 'success' : 'subdued';
@@ -75,13 +75,13 @@ export const KnowledgeBaseSettings: React.FC = React.memo(
const esqlHealth = isESQLEnabled ? 'success' : 'subdued';
//////////////////////////////////////////////////////////////////////////////////////////
- // Main `Knowledge Base` switch, which toggles the `assistantLangChain` UI feature toggle
+ // Main `Knowledge Base` switch, which toggles the `isEnabledKnowledgeBase` UI feature toggle
// setting that is saved to localstorage
const onEnableAssistantLangChainChange = useCallback(
(event: EuiSwitchEvent) => {
setUpdatedKnowledgeBaseSettings({
...knowledgeBase,
- assistantLangChain: event.target.checked,
+ isEnabledKnowledgeBase: event.target.checked,
});
// If enabling and ELSER exists, try to set up automatically
@@ -92,16 +92,16 @@ export const KnowledgeBaseSettings: React.FC = React.memo(
[kbStatus?.elser_exists, knowledgeBase, setUpdatedKnowledgeBaseSettings, setupKB]
);
- const assistantLangChainSwitch = useMemo(() => {
+ const isEnabledKnowledgeBaseSwitch = useMemo(() => {
return isLoadingKb ? (
) : (
= React.memo(
}, [
isLoadingKb,
isSwitchDisabled,
- knowledgeBase.assistantLangChain,
+ knowledgeBase.isEnabledKnowledgeBase,
onEnableAssistantLangChainChange,
]);
@@ -221,7 +221,7 @@ export const KnowledgeBaseSettings: React.FC = React.memo(
}
`}
>
- {assistantLangChainSwitch}
+ {isEnabledKnowledgeBaseSwitch}
@@ -303,12 +303,10 @@ export const KnowledgeBaseSettings: React.FC = React.memo(
- {ragOnAlerts && (
-
- )}
+
>
);
}
diff --git a/x-pack/packages/kbn-elastic-assistant/impl/knowledge_base/translations.ts b/x-pack/packages/kbn-elastic-assistant/impl/knowledge_base/translations.ts
index 03e989ab6a055..e1b176e9dcaa7 100644
--- a/x-pack/packages/kbn-elastic-assistant/impl/knowledge_base/translations.ts
+++ b/x-pack/packages/kbn-elastic-assistant/impl/knowledge_base/translations.ts
@@ -21,25 +21,27 @@ export const ASK_QUESTIONS_ABOUT = i18n.translate(
}
);
-export const LATEST_AND_RISKIEST_OPEN_ALERTS = i18n.translate(
- 'xpack.elasticAssistant.assistant.settings.knowledgeBaseSettings.latestAndRiskiestOpenAlertsLabel',
- {
- defaultMessage: 'latest and riskiest open and acknowledged alerts in your environment.',
- }
-);
+export const LATEST_AND_RISKIEST_OPEN_ALERTS = (alertsCount: number) =>
+ i18n.translate(
+ 'xpack.elasticAssistant.assistant.settings.knowledgeBaseSettings.latestAndRiskiestOpenAlertsLabel',
+ {
+ defaultMessage:
+ 'Send AI Assistant information about your {alertsCount} newest and riskiest open or acknowledged alerts.',
+ values: { alertsCount },
+ }
+ );
export const YOUR_ANONYMIZATION_SETTINGS = i18n.translate(
'xpack.elasticAssistant.assistant.settings.knowledgeBaseSettings.yourAnonymizationSettingsLabel',
{
- defaultMessage: 'Your Anonymization settings will be applied to the alerts.',
+ defaultMessage: 'Your anonymization settings will apply to these alerts.',
}
);
export const SELECT_FEWER_ALERTS = i18n.translate(
'xpack.elasticAssistant.assistant.settings.knowledgeBaseSettings.selectFewerAlertsLabel',
{
- defaultMessage:
- "Select fewer alerts if the model's maximum context length is frequently exceeded.",
+ defaultMessage: "Send fewer alerts if the model's context window is too small.",
}
);
diff --git a/x-pack/packages/kbn-random-sampling/src/ui/slider_control/index.tsx b/x-pack/packages/kbn-random-sampling/src/ui/slider_control/index.tsx
index 1f7495e431298..40a9256f9ba07 100644
--- a/x-pack/packages/kbn-random-sampling/src/ui/slider_control/index.tsx
+++ b/x-pack/packages/kbn-random-sampling/src/ui/slider_control/index.tsx
@@ -8,7 +8,7 @@
import { EuiFlexGroup, EuiFlexItem, EuiRange, EuiText, useEuiTheme } from '@elastic/eui';
import React from 'react';
import { FormattedMessage } from '@kbn/i18n-react';
-import { TooltipWrapper } from './tooltip_wrapper';
+import { TooltipWrapper } from '@kbn/visualization-utils';
export interface ControlSliderProps {
/** Allowed values to show on the Control Slider */
diff --git a/x-pack/packages/kbn-random-sampling/src/ui/slider_control/tooltip_wrapper.tsx b/x-pack/packages/kbn-random-sampling/src/ui/slider_control/tooltip_wrapper.tsx
deleted file mode 100644
index 5ab7800e05349..0000000000000
--- a/x-pack/packages/kbn-random-sampling/src/ui/slider_control/tooltip_wrapper.tsx
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
- */
-
-import React from 'react';
-import { EuiToolTip, EuiToolTipProps } from '@elastic/eui';
-
-export type TooltipWrapperProps = Partial> & {
- tooltipContent: string;
- /** When the condition is truthy, the tooltip will be shown */
- condition: boolean;
-};
-
-export const TooltipWrapper: React.FunctionComponent = ({
- children,
- condition,
- tooltipContent,
- ...tooltipProps
-}) => {
- return (
- <>
- {condition ? (
-
- <>{children}>
-
- ) : (
- children
- )}
- >
- );
-};
diff --git a/x-pack/packages/kbn-random-sampling/tsconfig.json b/x-pack/packages/kbn-random-sampling/tsconfig.json
index 5c5673e4b6787..b4b34fcb94036 100644
--- a/x-pack/packages/kbn-random-sampling/tsconfig.json
+++ b/x-pack/packages/kbn-random-sampling/tsconfig.json
@@ -13,6 +13,7 @@
],
"kbn_references": [
"@kbn/i18n-react",
+ "@kbn/visualization-utils",
],
"exclude": [
"target/**/*",
diff --git a/x-pack/performance/journeys/apm_service_inventory.ts b/x-pack/performance/journeys/apm_service_inventory.ts
index 84f2518575df8..cbef53858eea6 100644
--- a/x-pack/performance/journeys/apm_service_inventory.ts
+++ b/x-pack/performance/journeys/apm_service_inventory.ts
@@ -11,8 +11,6 @@ import { SynthtraceClient } from '../services/synthtrace';
import { generateData } from '../synthtrace_data/apm_data';
export const journey = new Journey({
- // FAILING VERSION BUMP: https://github.com/elastic/kibana/issues/172757
- skipped: true,
beforeSteps: async ({ kbnUrl, log, auth, es }) => {
// Install APM Package
const synthClient = new SynthtraceClient({
diff --git a/x-pack/plugins/actions/common/connector_feature_config.ts b/x-pack/plugins/actions/common/connector_feature_config.ts
index 61a67087da9fa..fb61ff35da6b7 100644
--- a/x-pack/plugins/actions/common/connector_feature_config.ts
+++ b/x-pack/plugins/actions/common/connector_feature_config.ts
@@ -26,6 +26,7 @@ export const CasesConnectorFeatureId = 'cases';
export const UptimeConnectorFeatureId = 'uptime';
export const SecurityConnectorFeatureId = 'siem';
export const GenerativeAIConnectorFeatureId = 'generativeAI';
+export const GenerativeAIForObservabilityConnectorFeatureId = 'generativeAIForObservability';
const compatibilityGenerativeAI = i18n.translate(
'xpack.actions.availableConnectorFeatures.compatibility.generativeAI',
@@ -34,6 +35,13 @@ const compatibilityGenerativeAI = i18n.translate(
}
);
+const compatibilityGenerativeAIForObservability = i18n.translate(
+ 'xpack.actions.availableConnectorFeatures.compatibility.generativeAIForObservability',
+ {
+ defaultMessage: 'Generative AI For Observability',
+ }
+);
+
const compatibilityAlertingRules = i18n.translate(
'xpack.actions.availableConnectorFeatures.compatibility.alertingRules',
{
@@ -86,12 +94,19 @@ export const GenerativeAIFeature: ConnectorFeatureConfig = {
compatibility: compatibilityGenerativeAI,
};
+export const GenerativeAIForObservabilityFeature: ConnectorFeatureConfig = {
+ id: GenerativeAIForObservabilityConnectorFeatureId,
+ name: compatibilityGenerativeAIForObservability,
+ compatibility: compatibilityGenerativeAIForObservability,
+};
+
const AllAvailableConnectorFeatures = {
[AlertingConnectorFeature.id]: AlertingConnectorFeature,
[CasesConnectorFeature.id]: CasesConnectorFeature,
[UptimeConnectorFeature.id]: UptimeConnectorFeature,
[SecuritySolutionFeature.id]: SecuritySolutionFeature,
[GenerativeAIFeature.id]: GenerativeAIFeature,
+ [GenerativeAIForObservabilityFeature.id]: GenerativeAIForObservabilityFeature,
};
export function areValidFeatures(ids: string[]) {
diff --git a/x-pack/plugins/actions/common/types.ts b/x-pack/plugins/actions/common/types.ts
index 44d7516595d19..2560cbae3f7e7 100644
--- a/x-pack/plugins/actions/common/types.ts
+++ b/x-pack/plugins/actions/common/types.ts
@@ -6,6 +6,7 @@
*/
import { LicenseType } from '@kbn/licensing-plugin/common/types';
+import { TaskErrorSource } from '@kbn/task-manager-plugin/common';
export {
AlertingConnectorFeatureId,
@@ -47,6 +48,7 @@ export interface ActionTypeExecutorResult {
serviceMessage?: string;
data?: Data;
retry?: null | boolean | Date;
+ errorSource?: TaskErrorSource;
}
export type ActionTypeExecutorRawResult = ActionTypeExecutorResult & {
diff --git a/x-pack/plugins/actions/server/lib/action_executor.test.ts b/x-pack/plugins/actions/server/lib/action_executor.test.ts
index c58e16a11aa4a..9af58047878d1 100644
--- a/x-pack/plugins/actions/server/lib/action_executor.test.ts
+++ b/x-pack/plugins/actions/server/lib/action_executor.test.ts
@@ -23,6 +23,8 @@ import { securityMock } from '@kbn/security-plugin/server/mocks';
import { finished } from 'stream/promises';
import { PassThrough } from 'stream';
import { SecurityConnectorFeatureId } from '../../common';
+import { TaskErrorSource } from '@kbn/task-manager-plugin/common';
+import { getErrorSource } from '@kbn/task-manager-plugin/server/task_running';
const actionExecutor = new ActionExecutor({ isESOCanEncrypt: true });
const services = actionsMock.createServices();
@@ -720,6 +722,7 @@ test('successfully executes with system connector', async () => {
secrets: {},
params: { foo: true },
logger: loggerMock,
+ request: {},
});
expect(loggerMock.debug).toBeCalledWith(
@@ -812,6 +815,75 @@ test('successfully executes with system connector', async () => {
`);
});
+test('passes the Kibana request on the executor of a system action', async () => {
+ const actionType: jest.Mocked = {
+ id: '.cases',
+ name: 'Cases',
+ minimumLicenseRequired: 'platinum',
+ supportedFeatureIds: ['alerting'],
+ isSystemActionType: true,
+ validate: {
+ config: { schema: schema.any() },
+ secrets: { schema: schema.any() },
+ params: { schema: schema.any() },
+ },
+ executor: jest.fn(),
+ };
+
+ actionTypeRegistry.get.mockReturnValueOnce(actionType);
+ await actionExecutor.execute({ ...executeParams, actionId: 'system-connector-.cases' });
+
+ expect(actionType.executor).toHaveBeenCalledWith({
+ actionId: 'system-connector-.cases',
+ services: expect.anything(),
+ config: {},
+ secrets: {},
+ params: { foo: true },
+ logger: loggerMock,
+ request: {},
+ });
+});
+
+test('does not pass the Kibana request on the executor if the action is not a system action', async () => {
+ const actionType: jest.Mocked = {
+ id: 'test',
+ name: 'Test',
+ minimumLicenseRequired: 'basic',
+ supportedFeatureIds: ['alerting'],
+ validate: {
+ config: { schema: schema.object({ bar: schema.boolean() }) },
+ secrets: { schema: schema.object({ baz: schema.boolean() }) },
+ params: { schema: schema.object({ foo: schema.boolean() }) },
+ },
+ executor: jest.fn(),
+ };
+
+ const actionSavedObject = {
+ id: '1',
+ type: 'action',
+ attributes: {
+ name: '1',
+ actionTypeId: 'test',
+ config: {
+ bar: true,
+ },
+ secrets: {
+ baz: true,
+ },
+ isMissingSecrets: false,
+ },
+ references: [],
+ };
+
+ encryptedSavedObjectsClient.getDecryptedAsInternalUser.mockResolvedValueOnce(actionSavedObject);
+ actionTypeRegistry.get.mockReturnValueOnce(actionType);
+ await actionExecutor.execute(executeParams);
+
+ const args = actionType.executor.mock.calls[0][0];
+
+ expect(args.request).toBeUndefined();
+});
+
test('successfully authorize system actions', async () => {
const actionType: jest.Mocked = {
id: '.cases',
@@ -841,6 +913,53 @@ test('successfully authorize system actions', async () => {
});
});
+test('actionType Executor returns status "error" and an error message', async () => {
+ const actionType: jest.Mocked = {
+ id: 'test',
+ name: 'Test',
+ minimumLicenseRequired: 'basic',
+ supportedFeatureIds: ['alerting'],
+ validate: {
+ config: { schema: schema.any() },
+ secrets: { schema: schema.any() },
+ params: { schema: schema.any() },
+ },
+ executor: jest.fn().mockReturnValue({
+ actionId: 'test',
+ status: 'error',
+ message: 'test error message',
+ retry: true,
+ }),
+ };
+ const actionSavedObject = {
+ id: '1',
+ type: 'action',
+ attributes: {
+ name: '1',
+ actionTypeId: 'test',
+ config: {
+ bar: true,
+ },
+ secrets: {
+ baz: true,
+ },
+ isMissingSecrets: false,
+ },
+ references: [],
+ };
+ encryptedSavedObjectsClient.getDecryptedAsInternalUser.mockResolvedValueOnce(actionSavedObject);
+ actionTypeRegistry.get.mockReturnValueOnce(actionType);
+ const result = await actionExecutor.execute(executeParams);
+
+ expect(result).toEqual({
+ actionId: 'test',
+ errorSource: TaskErrorSource.USER,
+ message: 'test error message',
+ retry: true,
+ status: 'error',
+ });
+});
+
test('Execute of SentinelOne sub-actions require create privilege', async () => {
const actionType: jest.Mocked = {
id: '.sentinelone',
@@ -1060,6 +1179,7 @@ test('throws an error when config is invalid', async () => {
status: 'error',
retry: false,
message: `error validating action type config: [param1]: expected value of type [string] but got [undefined]`,
+ errorSource: TaskErrorSource.FRAMEWORK,
});
});
@@ -1099,6 +1219,7 @@ test('returns an error when connector is invalid', async () => {
status: 'error',
retry: false,
message: `error validating action type connector: config must be defined`,
+ errorSource: TaskErrorSource.FRAMEWORK,
});
});
@@ -1137,6 +1258,7 @@ test('throws an error when params is invalid', async () => {
status: 'error',
retry: false,
message: `error validating action params: [param1]: expected value of type [string] but got [undefined]`,
+ errorSource: TaskErrorSource.FRAMEWORK,
});
});
@@ -1144,9 +1266,13 @@ test('throws an error when failing to load action through savedObjectsClient', a
encryptedSavedObjectsClient.getDecryptedAsInternalUser.mockRejectedValueOnce(
new Error('No access')
);
- await expect(actionExecutor.execute(executeParams)).rejects.toThrowErrorMatchingInlineSnapshot(
- `"No access"`
- );
+
+ try {
+ await actionExecutor.execute(executeParams);
+ } catch (e) {
+ expect(e.message).toBe('No access');
+ expect(getErrorSource(e)).toBe(TaskErrorSource.FRAMEWORK);
+ }
});
test('throws an error if actionType is not enabled', async () => {
@@ -1176,9 +1302,13 @@ test('throws an error if actionType is not enabled', async () => {
actionTypeRegistry.ensureActionTypeEnabled.mockImplementationOnce(() => {
throw new Error('not enabled for test');
});
- await expect(actionExecutor.execute(executeParams)).rejects.toThrowErrorMatchingInlineSnapshot(
- `"not enabled for test"`
- );
+
+ try {
+ await actionExecutor.execute(executeParams);
+ } catch (e) {
+ expect(e.message).toBe('not enabled for test');
+ expect(getErrorSource(e)).toBe(TaskErrorSource.FRAMEWORK);
+ }
expect(actionTypeRegistry.ensureActionTypeEnabled).toHaveBeenCalledWith('test');
});
@@ -1278,6 +1408,7 @@ test('should not throws an error if actionType is system action', async () => {
secrets: {},
params: { foo: true },
logger: loggerMock,
+ request: {},
});
});
@@ -1293,11 +1424,15 @@ test('throws an error when passing isESOCanEncrypt with value of false', async (
inMemoryConnectors: [],
getActionsAuthorizationWithRequest,
});
- await expect(
- customActionExecutor.execute(executeParams)
- ).rejects.toThrowErrorMatchingInlineSnapshot(
- `"Unable to execute action because the Encrypted Saved Objects plugin is missing encryption key. Please set xpack.encryptedSavedObjects.encryptionKey in the kibana.yml or use the bin/kibana-encryption-keys command."`
- );
+
+ try {
+ await customActionExecutor.execute(executeParams);
+ } catch (e) {
+ expect(e.message).toBe(
+ 'Unable to execute action because the Encrypted Saved Objects plugin is missing encryption key. Please set xpack.encryptedSavedObjects.encryptionKey in the kibana.yml or use the bin/kibana-encryption-keys command.'
+ );
+ expect(getErrorSource(e)).toBe(TaskErrorSource.USER);
+ }
});
test('should not throw error if action is preconfigured and isESOCanEncrypt is false', async () => {
@@ -1510,6 +1645,7 @@ test('should not throw error if action is system action and isESOCanEncrypt is f
secrets: {},
params: { foo: true },
logger: loggerMock,
+ request: {},
});
expect(loggerMock.debug).toBeCalledWith(
diff --git a/x-pack/plugins/actions/server/lib/action_executor.ts b/x-pack/plugins/actions/server/lib/action_executor.ts
index 69aab56e4e5b3..afcb96bbde8be 100644
--- a/x-pack/plugins/actions/server/lib/action_executor.ts
+++ b/x-pack/plugins/actions/server/lib/action_executor.ts
@@ -6,7 +6,7 @@
*/
import type { PublicMethodsOf } from '@kbn/utility-types';
-import { Logger, KibanaRequest } from '@kbn/core/server';
+import { KibanaRequest, Logger } from '@kbn/core/server';
import { cloneDeep } from 'lodash';
import { set } from '@kbn/safer-lodash-set';
import { withSpan } from '@kbn/apm-utils';
@@ -14,24 +14,25 @@ import { EncryptedSavedObjectsClient } from '@kbn/encrypted-saved-objects-plugin
import { SpacesServiceStart } from '@kbn/spaces-plugin/server';
import { IEventLogger, SAVED_OBJECT_REL_PRIMARY } from '@kbn/event-log-plugin/server';
import { SecurityPluginStart } from '@kbn/security-plugin/server';
+import { createTaskRunError, TaskErrorSource } from '@kbn/task-manager-plugin/server';
import { getGenAiTokenTracking, shouldTrackGenAiToken } from './gen_ai_token_tracking';
import {
- validateParams,
validateConfig,
- validateSecrets,
validateConnector,
+ validateParams,
+ validateSecrets,
} from './validate_with_schema';
import {
ActionType,
- ActionTypeExecutorResult,
+ ActionTypeConfig,
ActionTypeExecutorRawResult,
+ ActionTypeExecutorResult,
ActionTypeRegistryContract,
+ ActionTypeSecrets,
GetServicesFunction,
InMemoryConnector,
RawAction,
ValidatorServices,
- ActionTypeSecrets,
- ActionTypeConfig,
} from '../types';
import { EVENT_LOG_ACTIONS } from '../constants/event_log';
import { ActionExecutionSource } from './action_execution_source';
@@ -147,7 +148,11 @@ export class ActionExecutor {
}
if (!actionTypeRegistry.isActionExecutable(actionId, actionTypeId, { notifyUsage: true })) {
- actionTypeRegistry.ensureActionTypeEnabled(actionTypeId);
+ try {
+ actionTypeRegistry.ensureActionTypeEnabled(actionTypeId);
+ } catch (e) {
+ throw createTaskRunError(e, TaskErrorSource.FRAMEWORK);
+ }
}
const actionType = actionTypeRegistry.get(actionTypeId);
const configurationUtilities = actionTypeRegistry.getUtils();
@@ -259,12 +264,14 @@ export class ActionExecutor {
configurationUtilities,
logger,
source,
+ ...(actionType.isSystemActionType ? { request } : {}),
});
+
+ if (rawResult && rawResult.status === 'error') {
+ rawResult.errorSource = TaskErrorSource.USER;
+ }
} catch (err) {
- if (
- err.reason === ActionExecutionErrorReason.Validation ||
- err.reason === ActionExecutionErrorReason.Authorization
- ) {
+ if (err.reason === ActionExecutionErrorReason.Authorization) {
rawResult = err.result;
} else {
rawResult = {
@@ -274,6 +281,7 @@ export class ActionExecutor {
serviceMessage: err.message,
error: err,
retry: true,
+ errorSource: TaskErrorSource.USER,
};
}
}
@@ -450,31 +458,37 @@ export class ActionExecutor {
}
if (!this.isESOCanEncrypt) {
- throw new Error(
- `Unable to execute action because the Encrypted Saved Objects plugin is missing encryption key. Please set xpack.encryptedSavedObjects.encryptionKey in the kibana.yml or use the bin/kibana-encryption-keys command.`
+ throw createTaskRunError(
+ new Error(
+ `Unable to execute action because the Encrypted Saved Objects plugin is missing encryption key. Please set xpack.encryptedSavedObjects.encryptionKey in the kibana.yml or use the bin/kibana-encryption-keys command.`
+ ),
+ TaskErrorSource.USER
);
}
- const rawAction = await encryptedSavedObjectsClient.getDecryptedAsInternalUser(
- 'action',
- actionId,
- {
- namespace: namespace === 'default' ? undefined : namespace,
- }
- );
-
- const {
- attributes: { secrets, actionTypeId, config, name },
- } = rawAction;
+ try {
+ const rawAction = await encryptedSavedObjectsClient.getDecryptedAsInternalUser(
+ 'action',
+ actionId,
+ {
+ namespace: namespace === 'default' ? undefined : namespace,
+ }
+ );
+ const {
+ attributes: { secrets, actionTypeId, config, name },
+ } = rawAction;
- return {
- actionTypeId,
- name,
- config,
- secrets,
- actionId,
- rawAction: rawAction.attributes,
- };
+ return {
+ actionTypeId,
+ name,
+ config,
+ secrets,
+ actionId,
+ rawAction: rawAction.attributes,
+ };
+ } catch (e) {
+ throw createTaskRunError(e, TaskErrorSource.FRAMEWORK);
+ }
}
}
@@ -539,6 +553,7 @@ function validateAction(
status: 'error',
message: err.message,
retry: !!taskInfo,
+ errorSource: TaskErrorSource.FRAMEWORK,
});
}
}
@@ -584,6 +599,7 @@ const ensureAuthorizedToExecute = async ({
status: 'error',
message: error.message,
retry: false,
+ errorSource: TaskErrorSource.USER,
});
}
};
diff --git a/x-pack/plugins/actions/server/lib/get_token_count_from_openai_stream.ts b/x-pack/plugins/actions/server/lib/get_token_count_from_openai_stream.ts
index 0091faca468e3..0aa5fca22d0ff 100644
--- a/x-pack/plugins/actions/server/lib/get_token_count_from_openai_stream.ts
+++ b/x-pack/plugins/actions/server/lib/get_token_count_from_openai_stream.ts
@@ -9,7 +9,7 @@ import { encode } from 'gpt-tokenizer';
import { isEmpty, omitBy } from 'lodash';
import { Readable } from 'stream';
import { finished } from 'stream/promises';
-import { CreateChatCompletionRequest } from 'openai';
+import type OpenAI from 'openai';
import { Logger } from '@kbn/logging';
export async function getTokenCountFromOpenAIStream({
@@ -25,7 +25,9 @@ export async function getTokenCountFromOpenAIStream({
prompt: number;
completion: number;
}> {
- const chatCompletionRequest = JSON.parse(body) as CreateChatCompletionRequest;
+ const chatCompletionRequest = JSON.parse(
+ body
+ ) as OpenAI.ChatCompletionCreateParams.ChatCompletionCreateParamsStreaming;
// per https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
const tokensFromMessages = encode(
@@ -33,9 +35,9 @@ export async function getTokenCountFromOpenAIStream({
.map(
(msg) =>
`<|start|>${msg.role}\n${msg.content}\n${
- msg.name
+ 'name' in msg
? msg.name
- : msg.function_call
+ : 'function_call' in msg && msg.function_call
? msg.function_call.name + '\n' + msg.function_call.arguments
: ''
}<|end|>`
diff --git a/x-pack/plugins/actions/server/lib/task_runner_factory.test.ts b/x-pack/plugins/actions/server/lib/task_runner_factory.test.ts
index e8c66cff784c9..ecb3f75fc7794 100644
--- a/x-pack/plugins/actions/server/lib/task_runner_factory.test.ts
+++ b/x-pack/plugins/actions/server/lib/task_runner_factory.test.ts
@@ -7,7 +7,7 @@
import sinon from 'sinon';
import { ActionExecutor } from './action_executor';
-import { ConcreteTaskInstance, TaskStatus } from '@kbn/task-manager-plugin/server';
+import { ConcreteTaskInstance, TaskErrorSource, TaskStatus } from '@kbn/task-manager-plugin/server';
import { TaskRunnerFactory } from './task_runner_factory';
import { actionTypeRegistryMock } from '../action_type_registry.mock';
import { actionExecutorMock } from './action_executor.mock';
@@ -25,6 +25,7 @@ import { inMemoryMetricsMock } from '../monitoring/in_memory_metrics.mock';
import { IN_MEMORY_METRICS } from '../monitoring';
import { pick } from 'lodash';
import {
+ getErrorSource,
isRetryableError,
isUnrecoverableError,
} from '@kbn/task-manager-plugin/server/task_running';
@@ -544,12 +545,13 @@ describe('Task Runner Factory', () => {
message: 'Error message',
data: { foo: true },
retry: false,
+ errorSource: TaskErrorSource.USER,
});
try {
await taskRunner.run();
- throw new Error('Should have thrown');
} catch (e) {
+ expect(getErrorSource(e)).toBe(TaskErrorSource.USER);
expect(isRetryableError(e)).toEqual(false);
}
});
@@ -853,6 +855,7 @@ describe('Task Runner Factory', () => {
throw new Error('Should have thrown');
} catch (e) {
expect(isUnrecoverableError(e)).toEqual(true);
+ expect(getErrorSource(e)).toBe(TaskErrorSource.USER);
}
});
@@ -887,6 +890,7 @@ describe('Task Runner Factory', () => {
message: 'Error message',
data: { foo: true },
retry: false,
+ errorSource: TaskErrorSource.FRAMEWORK,
});
let err;
@@ -900,6 +904,47 @@ describe('Task Runner Factory', () => {
expect(taskRunnerFactoryInitializerParams.logger.error as jest.Mock).toHaveBeenCalledWith(
`Action '2' failed: Error message`
);
+ expect(getErrorSource(err)).toBe(TaskErrorSource.FRAMEWORK);
+ });
+
+ test(`fallbacks to FRAMEWORK error if ActionExecutor does not return any type of source'`, async () => {
+ const taskRunner = taskRunnerFactory.create({
+ taskInstance: {
+ ...mockedTaskInstance,
+ attempts: 0,
+ },
+ });
+
+ mockedEncryptedSavedObjectsClient.getDecryptedAsInternalUser.mockResolvedValueOnce({
+ id: '3',
+ type: 'action_task_params',
+ attributes: {
+ actionId: '2',
+ params: { baz: true },
+ executionId: '123abc',
+ apiKey: Buffer.from('123:abc').toString('base64'),
+ },
+ references: [
+ {
+ id: '2',
+ name: 'actionRef',
+ type: 'action',
+ },
+ ],
+ });
+ mockedActionExecutor.execute.mockResolvedValueOnce({
+ status: 'error',
+ actionId: '2',
+ message: 'Error message',
+ data: { foo: true },
+ retry: false,
+ });
+
+ try {
+ await taskRunner.run();
+ } catch (e) {
+ expect(getErrorSource(e)).toBe(TaskErrorSource.FRAMEWORK);
+ }
});
test('will rethrow the error if the error is thrown instead of returned', async () => {
@@ -941,6 +986,7 @@ describe('Task Runner Factory', () => {
`Action '2' failed: Fail`
);
expect(thrownError).toEqual(err);
+ expect(getErrorSource(err)).toBe(TaskErrorSource.FRAMEWORK);
});
test('increments monitoring metrics after execution', async () => {
@@ -1158,6 +1204,11 @@ describe('Task Runner Factory', () => {
],
});
- await expect(taskRunner.run()).rejects.toThrow('test');
+ try {
+ await taskRunner.run();
+ } catch (e) {
+ expect(getErrorSource(e)).toBe(TaskErrorSource.FRAMEWORK);
+ expect(e).toEqual(error);
+ }
});
});
diff --git a/x-pack/plugins/actions/server/lib/task_runner_factory.ts b/x-pack/plugins/actions/server/lib/task_runner_factory.ts
index 22dc57d7dc0d3..cd22ea324ba5c 100644
--- a/x-pack/plugins/actions/server/lib/task_runner_factory.ts
+++ b/x-pack/plugins/actions/server/lib/task_runner_factory.ts
@@ -19,13 +19,16 @@ import {
SavedObjectReference,
} from '@kbn/core/server';
import {
+ createTaskRunError,
LoadIndirectParamsResult,
RunContext,
+ TaskErrorSource,
throwRetryableError,
throwUnrecoverableError,
} from '@kbn/task-manager-plugin/server';
import { EncryptedSavedObjectsClient } from '@kbn/encrypted-saved-objects-plugin/server';
import { LoadedIndirectParams } from '@kbn/task-manager-plugin/server/task';
+import { getErrorSource } from '@kbn/task-manager-plugin/server/task_running';
import { ActionExecutorContract, ActionInfo } from './action_executor';
import {
ActionTaskExecutorParams,
@@ -44,7 +47,7 @@ import {
} from './action_execution_source';
import { RelatedSavedObjects, validatedRelatedSavedObjects } from './related_saved_objects';
import { injectSavedObjectReferences } from './action_task_params_utils';
-import { InMemoryMetrics, IN_MEMORY_METRICS } from '../monitoring';
+import { IN_MEMORY_METRICS, InMemoryMetrics } from '../monitoring';
import { ActionTypeDisabledError } from './errors';
export interface TaskRunnerContext {
@@ -135,7 +138,8 @@ export class TaskRunnerFactory {
},
};
return actionData;
- } catch (error) {
+ } catch (err) {
+ const error = createTaskRunError(err, getErrorSource(err) || TaskErrorSource.FRAMEWORK);
actionData = { error };
return { error };
}
@@ -187,9 +191,9 @@ export class TaskRunnerFactory {
logger.error(`Action '${actionId}' failed: ${e.message}`);
if (e instanceof ActionTypeDisabledError) {
// We'll stop re-trying due to action being forbidden
- throwUnrecoverableError(e);
+ throwUnrecoverableError(createTaskRunError(e, TaskErrorSource.USER));
}
- throw e;
+ throw createTaskRunError(e, getErrorSource(e) || TaskErrorSource.FRAMEWORK);
}
inMemoryMetrics.increment(IN_MEMORY_METRICS.ACTION_EXECUTIONS);
@@ -199,7 +203,7 @@ export class TaskRunnerFactory {
// Task manager error handler only kicks in when an error thrown (at this time)
// So what we have to do is throw when the return status is `error`.
throw throwRetryableError(
- new Error(executorResult.message),
+ createTaskRunError(new Error(executorResult.message), executorResult.errorSource),
executorResult.retry as boolean | Date
);
}
diff --git a/x-pack/plugins/actions/server/types.ts b/x-pack/plugins/actions/server/types.ts
index 6a038d5092363..0d9cc99ac6186 100644
--- a/x-pack/plugins/actions/server/types.ts
+++ b/x-pack/plugins/actions/server/types.ts
@@ -76,6 +76,7 @@ export interface ActionTypeExecutorOptions<
taskInfo?: TaskInfo;
configurationUtilities: ActionsConfigurationUtilities;
source?: ActionExecutionSource;
+ request?: KibanaRequest;
}
export type ActionResult = Connector;
diff --git a/x-pack/plugins/alerting/public/application/maintenance_windows.tsx b/x-pack/plugins/alerting/public/application/maintenance_windows.tsx
index 6bbe3a080943e..bb2ba1847abac 100644
--- a/x-pack/plugins/alerting/public/application/maintenance_windows.tsx
+++ b/x-pack/plugins/alerting/public/application/maintenance_windows.tsx
@@ -78,7 +78,7 @@ export const renderApp = ({
}) => {
const { element, history, theme$ } = mountParams;
const i18nCore = core.i18n;
- const isDarkMode = core.uiSettings.get('theme:darkMode');
+ const isDarkMode = core.theme.getTheme().darkMode;
const queryClient = new QueryClient();
diff --git a/x-pack/plugins/alerting/server/alerts_client/alerts_client_error.ts b/x-pack/plugins/alerting/server/alerts_client/alerts_client_error.ts
new file mode 100644
index 0000000000000..e553618ea921d
--- /dev/null
+++ b/x-pack/plugins/alerting/server/alerts_client/alerts_client_error.ts
@@ -0,0 +1,15 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+export class AlertsClientError extends Error {
+ constructor() {
+ super(
+ `Expected alertsClient not to be null! There may have been an issue installing alert resources.`
+ );
+ Object.setPrototypeOf(this, AlertsClientError.prototype);
+ }
+}
diff --git a/x-pack/plugins/alerting/server/alerts_client/index.ts b/x-pack/plugins/alerting/server/alerts_client/index.ts
index a1c0a309e0dc4..c141ac8167e9b 100644
--- a/x-pack/plugins/alerting/server/alerts_client/index.ts
+++ b/x-pack/plugins/alerting/server/alerts_client/index.ts
@@ -9,3 +9,4 @@ export { type LegacyAlertsClientParams, LegacyAlertsClient } from './legacy_aler
export { AlertsClient } from './alerts_client';
export type { AlertRuleData } from './types';
export { sanitizeBulkErrorResponse } from './lib';
+export { AlertsClientError } from './alerts_client_error';
diff --git a/x-pack/plugins/alerting/server/index.ts b/x-pack/plugins/alerting/server/index.ts
index 5be648354e61f..916d1bd11a384 100644
--- a/x-pack/plugins/alerting/server/index.ts
+++ b/x-pack/plugins/alerting/server/index.ts
@@ -69,7 +69,7 @@ export {
isValidAlertIndexName,
InstallShutdownError,
} from './alerts_service';
-export { sanitizeBulkErrorResponse } from './alerts_client';
+export { sanitizeBulkErrorResponse, AlertsClientError } from './alerts_client';
export { getDataStreamAdapter } from './alerts_service/lib/data_stream_adapter';
export const plugin = async (initContext: PluginInitializerContext) => {
diff --git a/x-pack/plugins/alerting/server/lib/wrap_scoped_cluster_client.test.ts b/x-pack/plugins/alerting/server/lib/wrap_scoped_cluster_client.test.ts
index 5f5a120252b2a..e1febe893d4d6 100644
--- a/x-pack/plugins/alerting/server/lib/wrap_scoped_cluster_client.test.ts
+++ b/x-pack/plugins/alerting/server/lib/wrap_scoped_cluster_client.test.ts
@@ -13,6 +13,17 @@ import { createWrappedScopedClusterClientFactory } from './wrap_scoped_cluster_c
const esQuery = {
body: { query: { bool: { filter: { range: { '@timestamp': { gte: 0 } } } } } },
};
+const eqlQuery = {
+ index: 'foo',
+ query: 'process where process.name == "regsvr32.exe"',
+};
+const esqlQueryRequest = {
+ method: 'POST',
+ path: '/_esql',
+ body: {
+ query: 'from .kibana_task_manager',
+ },
+};
const logger = loggingSystemMock.create().get();
@@ -36,180 +47,358 @@ describe('wrapScopedClusterClient', () => {
jest.resetAllMocks();
});
- test('searches with asInternalUser when specified', async () => {
- const abortController = new AbortController();
- const scopedClusterClient = elasticsearchServiceMock.createScopedClusterClient();
- const childClient = elasticsearchServiceMock.createElasticsearchClient();
-
- scopedClusterClient.asInternalUser.child.mockReturnValue(childClient as unknown as Client);
- const asInternalUserWrappedSearchFn = childClient.search;
-
- const wrappedSearchClient = createWrappedScopedClusterClientFactory({
- scopedClusterClient,
- rule,
- logger,
- abortController,
- }).client();
- await wrappedSearchClient.asInternalUser.search(esQuery);
-
- expect(asInternalUserWrappedSearchFn).toHaveBeenCalledWith(esQuery, {
- signal: abortController.signal,
+ describe('search', () => {
+ test('uses asInternalUser when specified', async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients();
+
+ const asInternalUserWrappedSearchFn = childClient.search;
+
+ const wrappedSearchClient = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ }).client();
+ await wrappedSearchClient.asInternalUser.search(esQuery);
+
+ expect(asInternalUserWrappedSearchFn).toHaveBeenCalledWith(esQuery, {
+ signal: abortController.signal,
+ });
+ expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
+ expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
});
- expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
- expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
- });
- test('searches with asCurrentUser when specified', async () => {
- const abortController = new AbortController();
- const scopedClusterClient = elasticsearchServiceMock.createScopedClusterClient();
- const childClient = elasticsearchServiceMock.createElasticsearchClient();
+ test('uses asCurrentUser when specified', async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients(true);
- scopedClusterClient.asCurrentUser.child.mockReturnValue(childClient as unknown as Client);
- const asCurrentUserWrappedSearchFn = childClient.search;
-
- const wrappedSearchClient = createWrappedScopedClusterClientFactory({
- scopedClusterClient,
- rule,
- logger,
- abortController,
- }).client();
- await wrappedSearchClient.asCurrentUser.search(esQuery);
-
- expect(asCurrentUserWrappedSearchFn).toHaveBeenCalledWith(esQuery, {
- signal: abortController.signal,
+ const asCurrentUserWrappedSearchFn = childClient.search;
+
+ const wrappedSearchClient = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ }).client();
+ await wrappedSearchClient.asCurrentUser.search(esQuery);
+
+ expect(asCurrentUserWrappedSearchFn).toHaveBeenCalledWith(esQuery, {
+ signal: abortController.signal,
+ });
+ expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
+ expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
});
- expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
- expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
- });
- test('uses search options when specified', async () => {
- const abortController = new AbortController();
- const scopedClusterClient = elasticsearchServiceMock.createScopedClusterClient();
- const childClient = elasticsearchServiceMock.createElasticsearchClient();
+ test('uses search options when specified', async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients();
+
+ const asInternalUserWrappedSearchFn = childClient.search;
+
+ const wrappedSearchClient = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ }).client();
+ await wrappedSearchClient.asInternalUser.search(esQuery, { ignore: [404] });
+
+ expect(asInternalUserWrappedSearchFn).toHaveBeenCalledWith(esQuery, {
+ ignore: [404],
+ signal: abortController.signal,
+ });
+ expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
+ expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
+ });
- scopedClusterClient.asInternalUser.child.mockReturnValue(childClient as unknown as Client);
- const asInternalUserWrappedSearchFn = childClient.search;
-
- const wrappedSearchClient = createWrappedScopedClusterClientFactory({
- scopedClusterClient,
- rule,
- logger,
- abortController,
- }).client();
- await wrappedSearchClient.asInternalUser.search(esQuery, { ignore: [404] });
-
- expect(asInternalUserWrappedSearchFn).toHaveBeenCalledWith(esQuery, {
- ignore: [404],
- signal: abortController.signal,
+ test('re-throws error when an error is thrown', async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients();
+
+ childClient.search.mockRejectedValueOnce(new Error('something went wrong!'));
+
+ const wrappedSearchClient = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ }).client();
+
+ await expect(
+ wrappedSearchClient.asInternalUser.search
+ ).rejects.toThrowErrorMatchingInlineSnapshot(`"something went wrong!"`);
});
- expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
- expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
- });
- test('re-throws error when search throws error', async () => {
- const abortController = new AbortController();
- const scopedClusterClient = elasticsearchServiceMock.createScopedClusterClient();
- const childClient = elasticsearchServiceMock.createElasticsearchClient();
+ test('handles empty search result object', async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients();
- scopedClusterClient.asInternalUser.child.mockReturnValue(childClient as unknown as Client);
- const asInternalUserWrappedSearchFn = childClient.search;
-
- asInternalUserWrappedSearchFn.mockRejectedValueOnce(new Error('something went wrong!'));
- const wrappedSearchClient = createWrappedScopedClusterClientFactory({
- scopedClusterClient,
- rule,
- logger,
- abortController,
- }).client();
-
- await expect(
- wrappedSearchClient.asInternalUser.search
- ).rejects.toThrowErrorMatchingInlineSnapshot(`"something went wrong!"`);
- });
+ const asInternalUserWrappedSearchFn = childClient.search;
+ // @ts-ignore incomplete return type
+ asInternalUserWrappedSearchFn.mockResolvedValue({});
- test('handles empty search result object', async () => {
- const abortController = new AbortController();
- const scopedClusterClient = elasticsearchServiceMock.createScopedClusterClient();
- const childClient = elasticsearchServiceMock.createElasticsearchClient();
+ const wrappedSearchClientFactory = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ });
- scopedClusterClient.asInternalUser.child.mockReturnValue(childClient as unknown as Client);
- const asInternalUserWrappedSearchFn = childClient.search;
- // @ts-ignore incomplete return type
- asInternalUserWrappedSearchFn.mockResolvedValue({});
-
- const wrappedSearchClientFactory = createWrappedScopedClusterClientFactory({
- scopedClusterClient,
- rule,
- logger,
- abortController,
+ const wrappedSearchClient = wrappedSearchClientFactory.client();
+ await wrappedSearchClient.asInternalUser.search(esQuery);
+
+ expect(asInternalUserWrappedSearchFn).toHaveBeenCalledTimes(1);
+ expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
+ expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
+
+ const stats = wrappedSearchClientFactory.getMetrics();
+ expect(stats.numSearches).toEqual(1);
+ expect(stats.esSearchDurationMs).toEqual(0);
+ });
+
+ test('keeps track of number of queries', async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients();
+
+ const asInternalUserWrappedSearchFn = childClient.search;
+ // @ts-ignore incomplete return type
+ asInternalUserWrappedSearchFn.mockResolvedValue({ took: 333 });
+
+ const wrappedSearchClientFactory = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ });
+ const wrappedSearchClient = wrappedSearchClientFactory.client();
+ await wrappedSearchClient.asInternalUser.search(esQuery);
+ await wrappedSearchClient.asInternalUser.search(esQuery);
+ await wrappedSearchClient.asInternalUser.search(esQuery);
+
+ expect(asInternalUserWrappedSearchFn).toHaveBeenCalledTimes(3);
+ expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
+ expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
+
+ const stats = wrappedSearchClientFactory.getMetrics();
+ expect(stats.numSearches).toEqual(3);
+ expect(stats.esSearchDurationMs).toEqual(999);
+
+ expect(logger.debug).toHaveBeenCalledWith(
+ `executing query for rule .test-rule-type:abcdefg in space my-space - {\"body\":{\"query\":{\"bool\":{\"filter\":{\"range\":{\"@timestamp\":{\"gte\":0}}}}}}} - with options {}`
+ );
});
- const wrappedSearchClient = wrappedSearchClientFactory.client();
- await wrappedSearchClient.asInternalUser.search(esQuery);
+ test('throws error when search throws abort error', async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients();
- expect(asInternalUserWrappedSearchFn).toHaveBeenCalledTimes(1);
- expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
- expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
+ abortController.abort();
+ childClient.search.mockRejectedValueOnce(new Error('Request has been aborted by the user'));
- const stats = wrappedSearchClientFactory.getMetrics();
- expect(stats.numSearches).toEqual(1);
- expect(stats.esSearchDurationMs).toEqual(0);
+ const abortableSearchClient = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ }).client();
+
+ await expect(
+ abortableSearchClient.asInternalUser.search
+ ).rejects.toThrowErrorMatchingInlineSnapshot(
+ `"Search has been aborted due to cancelled execution"`
+ );
+ });
});
- test('keeps track of number of queries', async () => {
- const abortController = new AbortController();
- const scopedClusterClient = elasticsearchServiceMock.createScopedClusterClient();
- const childClient = elasticsearchServiceMock.createElasticsearchClient();
+ describe('eql.search', () => {
+ test('re-throws error when an error is thrown', async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients();
- scopedClusterClient.asInternalUser.child.mockReturnValue(childClient as unknown as Client);
- const asInternalUserWrappedSearchFn = childClient.search;
- // @ts-ignore incomplete return type
- asInternalUserWrappedSearchFn.mockResolvedValue({ took: 333 });
-
- const wrappedSearchClientFactory = createWrappedScopedClusterClientFactory({
- scopedClusterClient,
- rule,
- logger,
- abortController,
+ childClient.eql.search.mockRejectedValueOnce(new Error('something went wrong!'));
+
+ const wrappedSearchClient = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ }).client();
+
+ await expect(
+ wrappedSearchClient.asInternalUser.eql.search
+ ).rejects.toThrowErrorMatchingInlineSnapshot(`"something went wrong!"`);
+ });
+
+ test('keeps track of number of queries', async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients();
+
+ const asInternalUserWrappedEqlSearchFn = childClient.eql.search;
+ // @ts-ignore incomplete return type
+ asInternalUserWrappedEqlSearchFn.mockResolvedValue({ took: 333 });
+
+ const wrappedSearchClientFactory = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ });
+ const wrappedSearchClient = wrappedSearchClientFactory.client();
+ await wrappedSearchClient.asInternalUser.eql.search(eqlQuery);
+ await wrappedSearchClient.asInternalUser.eql.search(eqlQuery);
+ await wrappedSearchClient.asInternalUser.eql.search(eqlQuery);
+
+ expect(asInternalUserWrappedEqlSearchFn).toHaveBeenCalledTimes(3);
+ expect(scopedClusterClient.asInternalUser.eql.search).not.toHaveBeenCalled();
+ expect(scopedClusterClient.asCurrentUser.eql.search).not.toHaveBeenCalled();
+
+ const stats = wrappedSearchClientFactory.getMetrics();
+ expect(stats.numSearches).toEqual(3);
+ expect(stats.esSearchDurationMs).toEqual(999);
+
+ expect(logger.debug).toHaveBeenCalledWith(
+ `executing eql query for rule .test-rule-type:abcdefg in space my-space - {\"index\":\"foo\",\"query\":\"process where process.name == \\\"regsvr32.exe\\\"\"} - with options {}`
+ );
+ });
+
+ test('throws error when eql search throws abort error', async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients();
+
+ abortController.abort();
+ childClient.eql.search.mockRejectedValueOnce(
+ new Error('Request has been aborted by the user')
+ );
+
+ const abortableSearchClient = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ }).client();
+
+ await expect(
+ abortableSearchClient.asInternalUser.eql.search
+ ).rejects.toThrowErrorMatchingInlineSnapshot(
+ `"EQL search has been aborted due to cancelled execution"`
+ );
});
- const wrappedSearchClient = wrappedSearchClientFactory.client();
- await wrappedSearchClient.asInternalUser.search(esQuery);
- await wrappedSearchClient.asInternalUser.search(esQuery);
- await wrappedSearchClient.asInternalUser.search(esQuery);
-
- expect(asInternalUserWrappedSearchFn).toHaveBeenCalledTimes(3);
- expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
- expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
-
- const stats = wrappedSearchClientFactory.getMetrics();
- expect(stats.numSearches).toEqual(3);
- expect(stats.esSearchDurationMs).toEqual(999);
-
- expect(logger.debug).toHaveBeenCalledWith(
- `executing query for rule .test-rule-type:abcdefg in space my-space - {\"body\":{\"query\":{\"bool\":{\"filter\":{\"range\":{\"@timestamp\":{\"gte\":0}}}}}}} - with options {}`
- );
});
- test('throws error when search throws abort error', async () => {
- const abortController = new AbortController();
- abortController.abort();
- const scopedClusterClient = elasticsearchServiceMock.createScopedClusterClient();
- const childClient = elasticsearchServiceMock.createElasticsearchClient();
+ describe('transport.request', () => {
+ describe('ES|QL', () => {
+ test('re-throws error when an error is thrown', async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients();
+
+ childClient.transport.request.mockRejectedValueOnce(new Error('something went wrong!'));
+
+ const wrappedSearchClient = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ }).client();
+
+ await expect(
+ wrappedSearchClient.asInternalUser.transport.request({ method: 'POST', path: '/_esql' })
+ ).rejects.toThrowErrorMatchingInlineSnapshot(`"something went wrong!"`);
+ });
+
+ test('keeps track of number of queries', async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients();
+
+ const asInternalUserWrappedRequestFn = childClient.transport.request;
+ // @ts-ignore incomplete return type
+ asInternalUserWrappedRequestFn.mockResolvedValue({});
+
+ const wrappedSearchClientFactory = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ });
+ const wrappedSearchClient = wrappedSearchClientFactory.client();
+ await wrappedSearchClient.asInternalUser.transport.request(esqlQueryRequest);
+ await wrappedSearchClient.asInternalUser.transport.request(esqlQueryRequest);
+ await wrappedSearchClient.asInternalUser.transport.request(esqlQueryRequest);
+
+ expect(asInternalUserWrappedRequestFn).toHaveBeenCalledTimes(3);
+ expect(scopedClusterClient.asInternalUser.transport.request).not.toHaveBeenCalled();
+ expect(scopedClusterClient.asCurrentUser.transport.request).not.toHaveBeenCalled();
+
+ const stats = wrappedSearchClientFactory.getMetrics();
+ expect(stats.numSearches).toEqual(3);
+ expect(stats.totalSearchDurationMs).toBeGreaterThan(-1);
+
+ expect(logger.debug).toHaveBeenCalledWith(
+ `executing ES|QL query for rule .test-rule-type:abcdefg in space my-space - {\"method\":\"POST\",\"path\":\"/_esql\",\"body\":{\"query\":\"from .kibana_task_manager\"}} - with options {}`
+ );
+ });
+
+ test('throws error when es|ql search throws abort error', async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients();
+
+ abortController.abort();
+ childClient.transport.request.mockRejectedValueOnce(
+ new Error('Request has been aborted by the user')
+ );
+
+ const abortableSearchClient = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ }).client();
+
+ await expect(
+ abortableSearchClient.asInternalUser.transport.request({ method: 'POST', path: '/_esql' })
+ ).rejects.toThrowErrorMatchingInlineSnapshot(
+ `"ES|QL search has been aborted due to cancelled execution"`
+ );
+ });
+ });
- scopedClusterClient.asInternalUser.child.mockReturnValue(childClient as unknown as Client);
- childClient.search.mockRejectedValueOnce(new Error('Request has been aborted by the user'));
-
- const abortableSearchClient = createWrappedScopedClusterClientFactory({
- scopedClusterClient,
- rule,
- logger,
- abortController,
- }).client();
-
- await expect(
- abortableSearchClient.asInternalUser.search
- ).rejects.toThrowErrorMatchingInlineSnapshot(
- `"Search has been aborted due to cancelled execution"`
- );
+ test('re-throws error when an error is thrown', async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients();
+
+ childClient.transport.request.mockRejectedValueOnce(new Error('something went wrong!'));
+
+ const wrappedSearchClient = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ }).client();
+
+ await expect(
+ wrappedSearchClient.asInternalUser.transport.request({ method: '', path: '' })
+ ).rejects.toThrowErrorMatchingInlineSnapshot(`"something went wrong!"`);
+ });
+
+ test(`doesn't throw error when non es|ql request throws an error`, async () => {
+ const { abortController, scopedClusterClient, childClient } = getMockClusterClients();
+
+ abortController.abort();
+ childClient.transport.request.mockRejectedValueOnce(new Error('Some other error'));
+
+ const abortableSearchClient = createWrappedScopedClusterClientFactory({
+ scopedClusterClient,
+ rule,
+ logger,
+ abortController,
+ }).client();
+
+ await expect(
+ abortableSearchClient.asInternalUser.transport.request({
+ method: 'GET',
+ path: '/_cat/indices',
+ })
+ ).rejects.toThrowErrorMatchingInlineSnapshot(`"Some other error"`);
+ });
});
});
+
+function getMockClusterClients(asCurrentUser: boolean = false) {
+ const abortController = new AbortController();
+ const scopedClusterClient = elasticsearchServiceMock.createScopedClusterClient();
+ const childClient = elasticsearchServiceMock.createElasticsearchClient();
+
+ if (asCurrentUser) {
+ scopedClusterClient.asCurrentUser.child.mockReturnValue(childClient as unknown as Client);
+ } else {
+ scopedClusterClient.asInternalUser.child.mockReturnValue(childClient as unknown as Client);
+ }
+
+ return { abortController, scopedClusterClient, childClient };
+}
diff --git a/x-pack/plugins/alerting/server/lib/wrap_scoped_cluster_client.ts b/x-pack/plugins/alerting/server/lib/wrap_scoped_cluster_client.ts
index e1156d177116c..9ddd22a292b4a 100644
--- a/x-pack/plugins/alerting/server/lib/wrap_scoped_cluster_client.ts
+++ b/x-pack/plugins/alerting/server/lib/wrap_scoped_cluster_client.ts
@@ -10,15 +10,19 @@ import {
TransportResult,
TransportRequestOptionsWithMeta,
TransportRequestOptionsWithOutMeta,
+ TransportRequestParams,
} from '@elastic/elasticsearch';
import type {
SearchRequest,
SearchResponse,
AggregateName,
+ EqlSearchRequest,
+ EqlSearchResponse,
} from '@elastic/elasticsearch/lib/api/types';
import type {
SearchRequest as SearchRequestWithBody,
AggregationsAggregate,
+ EqlSearchRequest as EqlSearchRequestWithBody,
} from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import type { IScopedClusterClient, ElasticsearchClient, Logger } from '@kbn/core/server';
import { SearchMetrics, RuleInfo } from './types';
@@ -89,11 +93,133 @@ function wrapEsClient(opts: WrapEsClientOpts): ElasticsearchClient {
const wrappedClient = esClient.child({});
// Mutating the functions we want to wrap
+ wrappedClient.transport.request = getWrappedTransportRequestFn({
+ esClient: wrappedClient,
+ ...rest,
+ });
wrappedClient.search = getWrappedSearchFn({ esClient: wrappedClient, ...rest });
+ wrappedClient.eql.search = getWrappedEqlSearchFn({ esClient: wrappedClient, ...rest });
return wrappedClient;
}
+function getWrappedTransportRequestFn(opts: WrapEsClientOpts) {
+ const originalRequestFn = opts.esClient.transport.request;
+
+ // A bunch of overloads to make TypeScript happy
+ async function request(
+ params: TransportRequestParams,
+ options?: TransportRequestOptionsWithOutMeta
+ ): Promise;
+ async function request(
+ params: TransportRequestParams,
+ options?: TransportRequestOptionsWithMeta
+ ): Promise>;
+ async function request(
+ params: TransportRequestParams,
+ options?: TransportRequestOptions
+ ): Promise;
+ async function request(
+ params: TransportRequestParams,
+ options?: TransportRequestOptions
+ ): Promise> {
+ // Wrap ES|QL requests with an abort signal
+ if (params.method === 'POST' && params.path === '/_esql') {
+ try {
+ const requestOptions = options ?? {};
+ const start = Date.now();
+ opts.logger.debug(
+ `executing ES|QL query for rule ${opts.rule.alertTypeId}:${opts.rule.id} in space ${
+ opts.rule.spaceId
+ } - ${JSON.stringify(params)} - with options ${JSON.stringify(requestOptions)}`
+ );
+ const result = (await originalRequestFn.call(opts.esClient.transport, params, {
+ ...requestOptions,
+ signal: opts.abortController.signal,
+ })) as Promise | TransportResult;
+
+ const end = Date.now();
+ const durationMs = end - start;
+
+ opts.logMetricsFn({ esSearchDuration: 0, totalSearchDuration: durationMs });
+ return result;
+ } catch (e) {
+ if (opts.abortController.signal.aborted) {
+ throw new Error('ES|QL search has been aborted due to cancelled execution');
+ }
+ throw e;
+ }
+ }
+
+ // No wrap
+ return (await originalRequestFn.call(
+ opts.esClient.transport,
+ params,
+ options
+ )) as Promise;
+ }
+
+ return request;
+}
+
+function getWrappedEqlSearchFn(opts: WrapEsClientOpts) {
+ const originalEqlSearch = opts.esClient.eql.search;
+
+ // A bunch of overloads to make TypeScript happy
+ async function search(
+ params: EqlSearchRequest | EqlSearchRequestWithBody,
+ options?: TransportRequestOptionsWithOutMeta
+ ): Promise>;
+ async function search(
+ params: EqlSearchRequest | EqlSearchRequestWithBody,
+ options?: TransportRequestOptionsWithMeta
+ ): Promise, unknown>>;
+ async function search(
+ params: EqlSearchRequest | EqlSearchRequestWithBody,
+ options?: TransportRequestOptions
+ ): Promise>;
+ async function search(
+ params: EqlSearchRequest | EqlSearchRequestWithBody,
+ options?: TransportRequestOptions
+ ): Promise | TransportResult, unknown>> {
+ try {
+ const searchOptions = options ?? {};
+ const start = Date.now();
+ opts.logger.debug(
+ `executing eql query for rule ${opts.rule.alertTypeId}:${opts.rule.id} in space ${
+ opts.rule.spaceId
+ } - ${JSON.stringify(params)} - with options ${JSON.stringify(searchOptions)}`
+ );
+ const result = (await originalEqlSearch.call(opts.esClient, params, {
+ ...searchOptions,
+ signal: opts.abortController.signal,
+ })) as TransportResult, unknown> | EqlSearchResponse;
+
+ const end = Date.now();
+ const durationMs = end - start;
+
+ let took: number | undefined = 0;
+ if (searchOptions.meta) {
+ // when meta: true, response is TransportResult, unknown>
+ took = (result as TransportResult, unknown>).body.took;
+ } else {
+ // when meta: false, response is EqlSearchResponse
+ took = (result as EqlSearchResponse).took;
+ }
+
+ opts.logMetricsFn({ esSearchDuration: took ?? 0, totalSearchDuration: durationMs });
+ return result;
+ } catch (e) {
+ if (opts.abortController.signal.aborted) {
+ throw new Error('EQL search has been aborted due to cancelled execution');
+ }
+ throw e;
+ }
+ }
+
+ return search;
+}
+
function getWrappedSearchFn(opts: WrapEsClientOpts) {
const originalSearch = opts.esClient.search;
diff --git a/x-pack/plugins/alerting/server/task_runner/execution_handler.ts b/x-pack/plugins/alerting/server/task_runner/execution_handler.ts
index 29f56b630eaf8..3a464c8f30521 100644
--- a/x-pack/plugins/alerting/server/task_runner/execution_handler.ts
+++ b/x-pack/plugins/alerting/server/task_runner/execution_handler.ts
@@ -9,7 +9,11 @@ import type { PublicMethodsOf } from '@kbn/utility-types';
import { Logger } from '@kbn/core/server';
import { ALERT_UUID, getRuleDetailsRoute, triggersActionsRoute } from '@kbn/rule-data-utils';
import { asSavedObjectExecutionSource } from '@kbn/actions-plugin/server';
-import { isEphemeralTaskRejectedDueToCapacityError } from '@kbn/task-manager-plugin/server';
+import {
+ createTaskRunError,
+ isEphemeralTaskRejectedDueToCapacityError,
+ TaskErrorSource,
+} from '@kbn/task-manager-plugin/server';
import {
ExecuteOptions as EnqueueExecutionOptions,
ExecutionResponseItem,
@@ -360,10 +364,15 @@ export class ExecutionHandler<
if (!!bulkActions.length) {
for (const c of chunk(bulkActions, CHUNK_SIZE)) {
- const response = await this.actionsClient!.bulkEnqueueExecution(c);
- if (response.errors) {
+ let enqueueResponse;
+ try {
+ enqueueResponse = await this.actionsClient!.bulkEnqueueExecution(c);
+ } catch (e) {
+ throw createTaskRunError(e, TaskErrorSource.FRAMEWORK);
+ }
+ if (enqueueResponse.errors) {
bulkActionsResponse = bulkActionsResponse.concat(
- response.items.filter(
+ enqueueResponse.items.filter(
(i) => i.response === ExecutionResponseType.QUEUED_ACTIONS_LIMIT_ERROR
)
);
@@ -730,7 +739,13 @@ export class ExecutionHandler<
executionUuid: this.executionId,
};
}
- const alerts = await this.alertsClient.getSummarizedAlerts!(options);
+
+ let alerts;
+ try {
+ alerts = await this.alertsClient.getSummarizedAlerts!(options);
+ } catch (e) {
+ throw createTaskRunError(e, TaskErrorSource.FRAMEWORK);
+ }
/**
* We need to remove all new alerts with maintenance windows retrieved from
diff --git a/x-pack/plugins/alerting/server/task_runner/rule_loader.test.ts b/x-pack/plugins/alerting/server/task_runner/rule_loader.test.ts
index 380d436c95e65..f3417312881e8 100644
--- a/x-pack/plugins/alerting/server/task_runner/rule_loader.test.ts
+++ b/x-pack/plugins/alerting/server/task_runner/rule_loader.test.ts
@@ -19,6 +19,7 @@ import { ErrorWithReason, getReasonFromError } from '../lib/error_with_reason';
import { alertingEventLoggerMock } from '../lib/alerting_event_logger/alerting_event_logger.mock';
import { mockedRawRuleSO, mockedRule } from './fixtures';
import { RULE_SAVED_OBJECT_TYPE } from '../saved_objects';
+import { getErrorSource, TaskErrorSource } from '@kbn/task-manager-plugin/server/task_running';
// create mocks
const rulesClient = rulesClientMock.create();
@@ -142,6 +143,7 @@ describe('rule_loader', () => {
} catch (err) {
outcome = 'failure';
expect(getReasonFromError(err)).toBe(RuleExecutionStatusErrorReasons.Disabled);
+ expect(getErrorSource(err)).toBe(TaskErrorSource.FRAMEWORK);
}
expect(outcome).toBe('failure');
});
@@ -162,6 +164,7 @@ describe('rule_loader', () => {
outcome = 'failure';
expect(err.message).toBe('rule-type-not-enabled: 2112');
expect(getReasonFromError(err)).toBe(RuleExecutionStatusErrorReasons.License);
+ expect(getErrorSource(err)).toBe(TaskErrorSource.USER);
}
expect(outcome).toBe('failure');
});
@@ -178,6 +181,7 @@ describe('rule_loader', () => {
outcome = 'failure';
expect(err.message).toMatch('[bar]: expected value of type [boolean] but got [string]');
expect(getReasonFromError(err)).toBe(RuleExecutionStatusErrorReasons.Validate);
+ expect(getErrorSource(err)).toBe(TaskErrorSource.USER);
}
expect(outcome).toBe('failure');
});
@@ -229,8 +233,12 @@ describe('rule_loader', () => {
}
);
- const promise = getRuleAttributes(context, ruleId, spaceId);
- await expect(promise).rejects.toThrow('wops');
+ try {
+ await getRuleAttributes(context, ruleId, spaceId);
+ } catch (e) {
+ expect(e.message).toMatch('wops');
+ expect(getErrorSource(e)).toBe(TaskErrorSource.FRAMEWORK);
+ }
});
});
diff --git a/x-pack/plugins/alerting/server/task_runner/rule_loader.ts b/x-pack/plugins/alerting/server/task_runner/rule_loader.ts
index fb037b802ec9b..ccf40adbf0920 100644
--- a/x-pack/plugins/alerting/server/task_runner/rule_loader.ts
+++ b/x-pack/plugins/alerting/server/task_runner/rule_loader.ts
@@ -6,12 +6,13 @@
*/
import { addSpaceIdToPath } from '@kbn/spaces-plugin/server';
-import { CoreKibanaRequest, FakeRawRequest, Headers } from '@kbn/core/server';
+import { CoreKibanaRequest, FakeRawRequest, Headers, SavedObject } from '@kbn/core/server';
import { PublicMethodsOf } from '@kbn/utility-types';
import {
LoadedIndirectParams,
LoadIndirectParamsResult,
} from '@kbn/task-manager-plugin/server/task';
+import { createTaskRunError, TaskErrorSource } from '@kbn/task-manager-plugin/server';
import { TaskRunnerContext } from './task_runner_factory';
import { ErrorWithReason, validateRuleTypeParams } from '../lib';
import {
@@ -70,23 +71,33 @@ export function validateRule(
const { enabled, apiKey } = indirectParams;
if (!enabled) {
- throw new ErrorWithReason(
- RuleExecutionStatusErrorReasons.Disabled,
- new Error(`Rule failed to execute because rule ran after it was disabled.`)
+ throw createTaskRunError(
+ new ErrorWithReason(
+ RuleExecutionStatusErrorReasons.Disabled,
+ new Error(`Rule failed to execute because rule ran after it was disabled.`)
+ ),
+ TaskErrorSource.FRAMEWORK
);
}
+
alertingEventLogger.setRuleName(rule.name);
try {
ruleTypeRegistry.ensureRuleTypeEnabled(rule.alertTypeId);
} catch (err) {
- throw new ErrorWithReason(RuleExecutionStatusErrorReasons.License, err);
+ throw createTaskRunError(
+ new ErrorWithReason(RuleExecutionStatusErrorReasons.License, err),
+ TaskErrorSource.USER
+ );
}
let validatedParams: Params;
try {
validatedParams = validateRuleTypeParams(rule.params, paramValidator);
} catch (err) {
- throw new ErrorWithReason(RuleExecutionStatusErrorReasons.Validate, err);
+ throw createTaskRunError(
+ new ErrorWithReason(RuleExecutionStatusErrorReasons.Validate, err),
+ TaskErrorSource.USER
+ );
}
if (rule.monitoring) {
@@ -114,11 +125,17 @@ export async function getRuleAttributes(
): Promise> {
const namespace = context.spaceIdToNamespace(spaceId);
- const rawRule = await context.encryptedSavedObjectsClient.getDecryptedAsInternalUser(
- RULE_SAVED_OBJECT_TYPE,
- ruleId,
- { namespace }
- );
+ let rawRule: SavedObject;
+
+ try {
+ rawRule = await context.encryptedSavedObjectsClient.getDecryptedAsInternalUser(
+ RULE_SAVED_OBJECT_TYPE,
+ ruleId,
+ { namespace }
+ );
+ } catch (e) {
+ throw createTaskRunError(e, TaskErrorSource.FRAMEWORK);
+ }
const fakeRequest = getFakeKibanaRequest(context, spaceId, rawRule.attributes.apiKey);
const rulesClient = context.getRulesClientWithRequest(fakeRequest);
diff --git a/x-pack/plugins/alerting/server/task_runner/task_runner.test.ts b/x-pack/plugins/alerting/server/task_runner/task_runner.test.ts
index 23d5d5f576ce3..d395ff3364cd7 100644
--- a/x-pack/plugins/alerting/server/task_runner/task_runner.test.ts
+++ b/x-pack/plugins/alerting/server/task_runner/task_runner.test.ts
@@ -18,7 +18,11 @@ import {
RuleAction,
RuleAlertData,
} from '../types';
-import { ConcreteTaskInstance, isUnrecoverableError } from '@kbn/task-manager-plugin/server';
+import {
+ ConcreteTaskInstance,
+ isUnrecoverableError,
+ TaskErrorSource,
+} from '@kbn/task-manager-plugin/server';
import { TaskRunnerContext } from './task_runner_factory';
import { TaskRunner } from './task_runner';
import { encryptedSavedObjectsMock } from '@kbn/encrypted-saved-objects-plugin/server/mocks';
@@ -83,6 +87,7 @@ import { getMockMaintenanceWindow } from '../data/maintenance_window/test_helper
import { alertsClientMock } from '../alerts_client/alerts_client.mock';
import { MaintenanceWindow } from '../application/maintenance_window/types';
import { RULE_SAVED_OBJECT_TYPE } from '../saved_objects';
+import { getErrorSource } from '@kbn/task-manager-plugin/server/task_running';
jest.mock('uuid', () => ({
v4: () => '5f6aa57d-3e22-484e-bae8-cbed868f4d28',
@@ -1942,6 +1947,7 @@ describe('Task Runner', () => {
expect(loggerMeta?.tags).toEqual(['test', '1', 'rule-run-failed']);
expect(loggerMeta?.error?.stack_trace).toBeDefined();
expect(logger.error).toBeCalledTimes(1);
+ expect(getErrorSource(runnerResult.taskRunError as Error)).toBe(TaskErrorSource.USER);
});
test('recovers gracefully when the Rule Task Runner throws an exception when loading rule to prepare for run', async () => {
@@ -3326,6 +3332,7 @@ describe('Task Runner', () => {
expect(encryptedSavedObjectsClient.getDecryptedAsInternalUser).toHaveBeenCalledTimes(1);
expect(result).toEqual({ error });
+ expect(getErrorSource(result.error as Error)).toBe(TaskErrorSource.FRAMEWORK);
});
function testAlertingEventLogCalls({
diff --git a/x-pack/plugins/alerting/server/task_runner/task_runner.ts b/x-pack/plugins/alerting/server/task_runner/task_runner.ts
index 532dd7b1e12ba..208e46b88a1f6 100644
--- a/x-pack/plugins/alerting/server/task_runner/task_runner.ts
+++ b/x-pack/plugins/alerting/server/task_runner/task_runner.ts
@@ -12,12 +12,13 @@ import { v4 as uuidv4 } from 'uuid';
import { Logger } from '@kbn/core/server';
import {
ConcreteTaskInstance,
- throwUnrecoverableError,
createTaskRunError,
TaskErrorSource,
+ throwUnrecoverableError,
} from '@kbn/task-manager-plugin/server';
import { nanosToMillis } from '@kbn/event-log-plugin/server';
import { DEFAULT_NAMESPACE_STRING } from '@kbn/core-saved-objects-utils-server';
+import { getErrorSource } from '@kbn/task-manager-plugin/server/task_running';
import { ExecutionHandler, RunResult } from './execution_handler';
import { TaskRunnerContext } from './task_runner_factory';
import {
@@ -25,20 +26,20 @@ import {
ErrorWithReason,
executionStatusFromError,
executionStatusFromState,
- ruleExecutionStatusToRaw,
+ getNextRun,
isRuleSnoozed,
lastRunFromError,
- getNextRun,
+ ruleExecutionStatusToRaw,
} from '../lib';
import {
- RuleExecutionStatus,
- RuleExecutionStatusErrorReasons,
IntervalSchedule,
RawRuleExecutionStatus,
+ RawRuleLastRun,
RawRuleMonitoring,
+ RuleExecutionStatus,
+ RuleExecutionStatusErrorReasons,
RuleTaskState,
RuleTypeRegistry,
- RawRuleLastRun,
} from '../types';
import { asErr, asOk, isErr, isOk, map, resolveErr, Result } from '../lib/result_type';
import { taskInstanceToAlertTaskInstance } from './alert_task_instance';
@@ -47,18 +48,18 @@ import { partiallyUpdateRule, RULE_SAVED_OBJECT_TYPE } from '../saved_objects';
import {
AlertInstanceContext,
AlertInstanceState,
- RuleTypeParams,
- RuleTypeState,
parseDuration,
RawAlertInstance,
- RuleLastRunOutcomeOrderMap,
RuleAlertData,
- SanitizedRule,
+ RuleLastRunOutcomeOrderMap,
RuleNotifyWhen,
+ RuleTypeParams,
+ RuleTypeState,
+ SanitizedRule,
} from '../../common';
import { NormalizedRuleType, UntypedNormalizedRuleType } from '../rule_type_registry';
import { getEsErrorMessage } from '../lib/errors';
-import { InMemoryMetrics, IN_MEMORY_METRICS } from '../monitoring';
+import { IN_MEMORY_METRICS, InMemoryMetrics } from '../monitoring';
import {
RuleTaskInstance,
RuleTaskRunResult,
@@ -552,7 +553,10 @@ export class TaskRunner<
message: err,
stackTrace: err.stack,
};
- throw new ErrorWithReason(RuleExecutionStatusErrorReasons.Execute, err);
+ throw createTaskRunError(
+ new ErrorWithReason(RuleExecutionStatusErrorReasons.Execute, err),
+ TaskErrorSource.USER
+ );
}
}
@@ -838,7 +842,10 @@ export class TaskRunner<
const data = await getRuleAttributes(this.context, ruleId, spaceId);
this.ruleData = { data };
} catch (err) {
- const error = new ErrorWithReason(RuleExecutionStatusErrorReasons.Decrypt, err);
+ const error = createTaskRunError(
+ new ErrorWithReason(RuleExecutionStatusErrorReasons.Decrypt, err),
+ getErrorSource(err)
+ );
this.ruleData = { error };
}
return this.ruleData;
@@ -930,6 +937,14 @@ export class TaskRunner<
timings: this.timer.toJson(),
});
+ const getTaskRunError = (state: Result) => {
+ return isErr(state)
+ ? {
+ taskRunError: createTaskRunError(state.error, getErrorSource(state.error)),
+ }
+ : {};
+ };
+
return {
state: map(
stateWithMetrics,
@@ -977,9 +992,7 @@ export class TaskRunner<
return { interval: retryInterval };
}),
monitoring: this.ruleMonitoring.getMonitoring(),
- ...(isErr(schedule)
- ? { taskRunError: createTaskRunError(schedule.error, TaskErrorSource.FRAMEWORK) }
- : {}),
+ ...getTaskRunError(stateWithMetrics),
};
}
diff --git a/x-pack/plugins/apm/public/components/routing/app_root/index.tsx b/x-pack/plugins/apm/public/components/routing/app_root/index.tsx
index b4ff66d12629d..0a18ebb584898 100644
--- a/x-pack/plugins/apm/public/components/routing/app_root/index.tsx
+++ b/x-pack/plugins/apm/public/components/routing/app_root/index.tsx
@@ -8,7 +8,7 @@
import { APP_WRAPPER_CLASS } from '@kbn/core/public';
import {
KibanaContextProvider,
- useUiSetting$,
+ useDarkMode,
} from '@kbn/kibana-react-plugin/public';
import { RedirectAppLinks } from '@kbn/shared-ux-link-redirect-app';
import { Storage } from '@kbn/kibana-utils-plugin/public';
@@ -137,7 +137,7 @@ function MountApmHeaderActionMenu() {
}
export function ApmThemeProvider({ children }: { children: React.ReactNode }) {
- const [darkMode] = useUiSetting$('theme:darkMode');
+ const darkMode = useDarkMode(false);
return (
= ({
useFetchAlertData: () => [false, {}],
permissions: userCapabilities.generalCases,
basePath: '/',
- features: { alerts: { enabled: false } },
+ features: { alerts: { enabled: true, sync: false } },
})}
);
diff --git a/x-pack/plugins/cases/public/components/case_view/components/case_view_activity.tsx b/x-pack/plugins/cases/public/components/case_view/components/case_view_activity.tsx
index 0ba1f2214bfc0..1c579eaf08848 100644
--- a/x-pack/plugins/cases/public/components/case_view/components/case_view_activity.tsx
+++ b/x-pack/plugins/cases/public/components/case_view/components/case_view_activity.tsx
@@ -89,15 +89,6 @@ export const CaseViewActivity = ({
const { data: currentUserProfile, isFetching: isLoadingCurrentUserProfile } =
useGetCurrentUserProfile();
- const onShowAlertDetails = useCallback(
- (alertId: string, index: string) => {
- if (showAlertDetails) {
- showAlertDetails(alertId, index);
- }
- },
- [showAlertDetails]
- );
-
const { onUpdateField, isLoading, loadingKey } = useOnUpdateField({
caseData,
});
@@ -221,7 +212,7 @@ export const CaseViewActivity = ({
data={caseData}
casesConfiguration={casesConfiguration}
actionsNavigation={actionsNavigation}
- onShowAlertDetails={onShowAlertDetails}
+ onShowAlertDetails={showAlertDetails}
onUpdateField={onUpdateField}
statusActionButton={
permissions.update ? (
diff --git a/x-pack/plugins/cases/public/components/case_view/components/case_view_alerts.test.tsx b/x-pack/plugins/cases/public/components/case_view/components/case_view_alerts.test.tsx
index b85dbe7564cf9..165f974e8be2e 100644
--- a/x-pack/plugins/cases/public/components/case_view/components/case_view_alerts.test.tsx
+++ b/x-pack/plugins/cases/public/components/case_view/components/case_view_alerts.test.tsx
@@ -14,6 +14,7 @@ import { createAppMockRenderer } from '../../../common/mock';
import type { CaseUI } from '../../../../common';
import { CaseViewAlerts } from './case_view_alerts';
import * as api from '../../../containers/api';
+import type { FeatureIdsResponse } from '../../../containers/types';
jest.mock('../../../containers/api');
@@ -30,6 +31,13 @@ describe('CaseUI View Page activity tab', () => {
appMockRender = createAppMockRenderer();
appMockRender.coreStart.triggersActionsUi.getAlertsStateTable =
getAlertsStateTableMock.mockReturnValue();
+ appMockRender.coreStart.triggersActionsUi.alertsTableConfigurationRegistry.register({
+ id: 'case-details-alerts-observability',
+ columns: [],
+ ruleTypeIds: ['log-threshold'],
+ });
+ });
+ afterEach(() => {
jest.clearAllMocks();
});
@@ -46,7 +54,7 @@ describe('CaseUI View Page activity tab', () => {
expect(getAlertsStateTableMock).toHaveBeenCalledWith({
alertsTableConfigurationRegistry: expect.anything(),
configurationId: 'securitySolution-case',
- featureIds: ['siem', 'observability'],
+ featureIds: ['siem'],
id: 'case-details-alerts-securitySolution',
query: {
ids: {
@@ -60,7 +68,13 @@ describe('CaseUI View Page activity tab', () => {
it('should call the alerts table with correct props for observability', async () => {
const getFeatureIdsMock = jest.spyOn(api, 'getFeatureIds');
- getFeatureIdsMock.mockResolvedValueOnce(['observability']);
+ getFeatureIdsMock.mockResolvedValueOnce({
+ aggregations: {
+ consumer: { buckets: [{ doc_count: 1, key: 'observability' }] },
+ producer: { buckets: [] },
+ ruleTypeIds: { buckets: [{ doc_count: 1, key: 'log-threshold' }] },
+ },
+ } as unknown as FeatureIdsResponse);
appMockRender.render(
{
await waitFor(async () => {
expect(getAlertsStateTableMock).toHaveBeenCalledWith({
alertsTableConfigurationRegistry: expect.anything(),
- configurationId: 'observability',
+ configurationId: 'case-details-alerts-observability',
featureIds: ['observability'],
id: 'case-details-alerts-observability',
query: {
@@ -86,12 +100,23 @@ describe('CaseUI View Page activity tab', () => {
});
});
- it('should call the getFeatureIds with the correct registration context', async () => {
+ it('should call the getFeatureIds with the correct alert ID', async () => {
const getFeatureIdsMock = jest.spyOn(api, 'getFeatureIds');
- appMockRender.render();
+ appMockRender.render(
+
+ );
await waitFor(async () => {
expect(getFeatureIdsMock).toHaveBeenCalledWith({
- query: { registrationContext: ['matchme'] },
+ query: {
+ ids: {
+ values: ['alert-id-1'],
+ },
+ },
signal: expect.anything(),
});
});
diff --git a/x-pack/plugins/cases/public/components/case_view/components/case_view_alerts.tsx b/x-pack/plugins/cases/public/components/case_view/components/case_view_alerts.tsx
index 44487fe4f4a0e..59914fcae85a2 100644
--- a/x-pack/plugins/cases/public/components/case_view/components/case_view_alerts.tsx
+++ b/x-pack/plugins/cases/public/components/case_view/components/case_view_alerts.tsx
@@ -8,10 +8,12 @@
import React, { useMemo } from 'react';
import { EuiFlexItem, EuiFlexGroup, EuiProgress } from '@elastic/eui';
+import type { ValidFeatureId } from '@kbn/rule-registry-plugin/common/technical_rule_data_field_names';
+import { AlertConsumers } from '@kbn/rule-registry-plugin/common/technical_rule_data_field_names';
import { SECURITY_SOLUTION_OWNER } from '../../../../common/constants';
import type { CaseUI } from '../../../../common';
import { useKibana } from '../../../common/lib/kibana';
-import { getManualAlertIds, getRegistrationContextFromAlerts } from './helpers';
+import { getManualAlertIds } from './helpers';
import { useGetFeatureIds } from '../../../containers/use_get_feature_ids';
import { CaseViewAlertsEmpty } from './case_view_alerts_empty';
import { CaseViewTabs } from '../case_view_tabs';
@@ -22,34 +24,49 @@ interface CaseViewAlertsProps {
export const CaseViewAlerts = ({ caseData }: CaseViewAlertsProps) => {
const { triggersActionsUi } = useKibana().services;
+ const alertIds = getManualAlertIds(caseData.comments);
const alertIdsQuery = useMemo(
() => ({
ids: {
- values: getManualAlertIds(caseData.comments),
+ values: alertIds,
},
}),
- [caseData.comments]
+ [alertIds]
);
- const alertRegistrationContexts = useMemo(
- () => getRegistrationContextFromAlerts(caseData.comments),
- [caseData.comments]
+ const { isLoading: isLoadingAlertFeatureIds, data: alertData } = useGetFeatureIds(
+ alertIds,
+ caseData.owner !== SECURITY_SOLUTION_OWNER
);
- const { isLoading: isLoadingAlertFeatureIds, data: alertFeatureIds } =
- useGetFeatureIds(alertRegistrationContexts);
-
const configId =
- caseData.owner === SECURITY_SOLUTION_OWNER ? `${caseData.owner}-case` : caseData.owner;
+ caseData.owner === SECURITY_SOLUTION_OWNER
+ ? `${caseData.owner}-case`
+ : !isLoadingAlertFeatureIds
+ ? triggersActionsUi.alertsTableConfigurationRegistry.getAlertConfigIdPerRuleTypes(
+ alertData?.ruleTypeIds ?? []
+ )
+ : '';
- const alertStateProps = {
- alertsTableConfigurationRegistry: triggersActionsUi.alertsTableConfigurationRegistry,
- configurationId: configId,
- id: `case-details-alerts-${caseData.owner}`,
- featureIds: alertFeatureIds ?? [],
- query: alertIdsQuery,
- showAlertStatusWithFlapping: caseData.owner !== SECURITY_SOLUTION_OWNER,
- };
+ const alertStateProps = useMemo(
+ () => ({
+ alertsTableConfigurationRegistry: triggersActionsUi.alertsTableConfigurationRegistry,
+ configurationId: configId,
+ id: `case-details-alerts-${caseData.owner}`,
+ featureIds: (caseData.owner === SECURITY_SOLUTION_OWNER
+ ? [AlertConsumers.SIEM]
+ : alertData?.featureIds ?? []) as ValidFeatureId[],
+ query: alertIdsQuery,
+ showAlertStatusWithFlapping: caseData.owner !== SECURITY_SOLUTION_OWNER,
+ }),
+ [
+ triggersActionsUi.alertsTableConfigurationRegistry,
+ configId,
+ caseData.owner,
+ alertData?.featureIds,
+ alertIdsQuery,
+ ]
+ );
if (alertIdsQuery.ids.values.length === 0) {
return (
diff --git a/x-pack/plugins/cases/public/components/case_view/components/helpers.test.ts b/x-pack/plugins/cases/public/components/case_view/components/helpers.test.ts
index fba878ef1061a..b9905635f3944 100644
--- a/x-pack/plugins/cases/public/components/case_view/components/helpers.test.ts
+++ b/x-pack/plugins/cases/public/components/case_view/components/helpers.test.ts
@@ -6,7 +6,7 @@
*/
import { alertComment } from '../../../containers/mock';
-import { getManualAlertIds, getRegistrationContextFromAlerts } from './helpers';
+import { getManualAlertIds } from './helpers';
const comment = {
...alertComment,
@@ -24,62 +24,7 @@ const comment3 = {
alertId: ['nested1', 'nested2', 'nested3'],
};
-const commentSiemSignal = {
- ...alertComment,
- alertId: 'alert-id-siem',
- index: '.siem-signals-default-000008',
-};
-
-const commentIsBad = {
- ...alertComment,
- alertId: 'alert-id-bad',
- index: 'bad-siem-signals-default-000008',
-};
-
-const multipleIndices = {
- ...alertComment,
- alertId: ['test-id-1', 'test-id-2', 'test-id-3', 'test-id-4', 'test-id-5', 'test-id-6'],
- index: [
- '.internal.alerts-security.alerts-default-000001',
- '.internal.alerts-observability.logs.alerts-default-000001',
- '.internal.alerts-observability.uptime.alerts-default-000001',
- '.internal.alerts-observability.metrics.alerts-default-000001',
- '.internal.alerts-observability.apm.alerts-space2-000001',
- '.internal.alerts-observability.logs.alerts-space1-000001',
- ],
-};
-
describe('Case view helpers', () => {
- describe('getRegistrationContextFromAlerts', () => {
- it('returns the correct registration context', () => {
- const result = getRegistrationContextFromAlerts([comment, comment2, multipleIndices]);
- expect(result).toEqual([
- 'matchme',
- 'another',
- 'security',
- 'observability.logs',
- 'observability.uptime',
- 'observability.metrics',
- 'observability.apm',
- ]);
- });
-
- it('dedupes contexts', () => {
- const result = getRegistrationContextFromAlerts([comment, comment]);
- expect(result).toEqual(['matchme']);
- });
-
- it('returns the correct registration when find a .siem-signals* index', () => {
- const result = getRegistrationContextFromAlerts([commentSiemSignal, comment2]);
- expect(result).toEqual(['security', 'another']);
- });
-
- it('returns empty when the index is not formatted as expected', () => {
- const result = getRegistrationContextFromAlerts([commentIsBad]);
- expect(result).toEqual([]);
- });
- });
-
describe('getManualAlertIds', () => {
it('returns the alert ids', () => {
const result = getManualAlertIds([comment, comment2]);
diff --git a/x-pack/plugins/cases/public/components/case_view/components/helpers.ts b/x-pack/plugins/cases/public/components/case_view/components/helpers.ts
index d393054e30f78..73778f0348f06 100644
--- a/x-pack/plugins/cases/public/components/case_view/components/helpers.ts
+++ b/x-pack/plugins/cases/public/components/case_view/components/helpers.ts
@@ -19,36 +19,3 @@ export const getManualAlertIds = (comments: AttachmentUI[]): string[] => {
}, new Set());
return Array.from(dedupeAlerts);
};
-
-export const getRegistrationContextFromAlerts = (comments: AttachmentUI[]): string[] => {
- const dedupeRegistrationContext = comments.reduce(
- (registrationContexts, comment: AttachmentUI) => {
- if (comment.type === AttachmentType.alert) {
- const indices = Array.isArray(comment.index) ? comment.index : [comment.index];
- indices.forEach((index) => {
- // That's legacy code, we created some index alias so everything should work as expected
- if (index.startsWith('.siem-signals')) {
- registrationContexts.add('security');
- } else {
- const registrationContext = getRegistrationContextFromIndex(index);
- if (registrationContext) {
- registrationContexts.add(registrationContext);
- }
- }
- });
- return registrationContexts;
- }
- return registrationContexts;
- },
- new Set()
- );
- return Array.from(dedupeRegistrationContext);
-};
-
-export const getRegistrationContextFromIndex = (indexName: string): string | null => {
- const found = indexName.match(/\.alerts-(.*?).alerts/);
- if (found && found.length > 1) {
- return `${found[1]}`;
- }
- return null;
-};
diff --git a/x-pack/plugins/cases/public/components/connectors/resilient/case_fields_preview.test.tsx b/x-pack/plugins/cases/public/components/connectors/resilient/case_fields_preview.test.tsx
index dd6b9f08c0d09..f4ac54c1eb925 100644
--- a/x-pack/plugins/cases/public/components/connectors/resilient/case_fields_preview.test.tsx
+++ b/x-pack/plugins/cases/public/components/connectors/resilient/case_fields_preview.test.tsx
@@ -81,7 +81,7 @@ describe('Jira Fields: Preview', () => {
const getByText = createQueryWithMarkup(screen.getByText);
- expect(getByText('Incident Types: Malware, Denial of Service')).toBeInTheDocument();
+ expect(getByText('Incident types: Malware, Denial of Service')).toBeInTheDocument();
expect(getByText('Severity: Medium')).toBeInTheDocument();
});
});
diff --git a/x-pack/plugins/cases/public/components/connectors/resilient/translations.ts b/x-pack/plugins/cases/public/components/connectors/resilient/translations.ts
index 1b63a5098e92a..132234f0d2d57 100644
--- a/x-pack/plugins/cases/public/components/connectors/resilient/translations.ts
+++ b/x-pack/plugins/cases/public/components/connectors/resilient/translations.ts
@@ -31,7 +31,7 @@ export const INCIDENT_TYPES_PLACEHOLDER = i18n.translate(
export const INCIDENT_TYPES_LABEL = i18n.translate(
'xpack.cases.connectors.resilient.incidentTypesLabel',
{
- defaultMessage: 'Incident Types',
+ defaultMessage: 'Incident types',
}
);
diff --git a/x-pack/plugins/cases/public/components/user_actions/comment/show_alert.test.tsx b/x-pack/plugins/cases/public/components/user_actions/comment/show_alert.test.tsx
index 2e26b0f03ea82..3fea0295740d2 100644
--- a/x-pack/plugins/cases/public/components/user_actions/comment/show_alert.test.tsx
+++ b/x-pack/plugins/cases/public/components/user_actions/comment/show_alert.test.tsx
@@ -9,6 +9,7 @@ import React from 'react';
import type { ReactWrapper } from 'enzyme';
import { mount } from 'enzyme';
import { UserActionShowAlert } from './show_alert';
+import { useCaseViewNavigation, useCaseViewParams } from '../../../common/navigation';
const props = {
id: 'action-id',
@@ -17,12 +18,25 @@ const props = {
onShowAlertDetails: jest.fn(),
};
+jest.mock('../../../common/lib/kibana');
+jest.mock('../../../common/navigation/hooks');
+
+const useCaseViewParamsMock = useCaseViewParams as jest.Mock;
+const useCaseViewNavigationMock = useCaseViewNavigation as jest.Mock;
+
describe('UserActionShowAlert ', () => {
let wrapper: ReactWrapper;
const onShowAlertDetails = jest.fn();
+ const navigateToCaseView = jest.fn();
beforeAll(() => {
wrapper = mount();
+ useCaseViewParamsMock.mockReturnValue({ detailName: 'case-id' });
+ useCaseViewNavigationMock.mockReturnValue({ navigateToCaseView });
+ });
+
+ beforeEach(() => {
+ jest.clearAllMocks();
});
it('it renders', async () => {
@@ -31,8 +45,16 @@ describe('UserActionShowAlert ', () => {
).toBeTruthy();
});
- it('it calls onClick', async () => {
+ it('it calls onShowAlertDetails onClick when is defined', async () => {
wrapper.find('button[data-test-subj="comment-action-show-alert-action-id"]').simulate('click');
expect(onShowAlertDetails).toHaveBeenCalledWith('alert-id', 'alert-index');
+ expect(navigateToCaseView).toBeCalledTimes(0);
+ });
+
+ it('it calls navigateToCaseView onClick when onShowAlertDetails is undefined', async () => {
+ wrapper = mount();
+ wrapper.find('button[data-test-subj="comment-action-show-alert-action-id"]').simulate('click');
+ expect(navigateToCaseView).toHaveBeenCalledWith({ detailName: 'case-id', tabId: 'alerts' });
+ expect(onShowAlertDetails).toBeCalledTimes(0);
});
});
diff --git a/x-pack/plugins/cases/public/components/user_actions/comment/show_alert.tsx b/x-pack/plugins/cases/public/components/user_actions/comment/show_alert.tsx
index 48a6bff3fd557..a0a5c7d24bec9 100644
--- a/x-pack/plugins/cases/public/components/user_actions/comment/show_alert.tsx
+++ b/x-pack/plugins/cases/public/components/user_actions/comment/show_alert.tsx
@@ -8,12 +8,14 @@
import React, { memo, useCallback } from 'react';
import { EuiToolTip, EuiButtonIcon } from '@elastic/eui';
import * as i18n from '../translations';
+import { useCaseViewNavigation, useCaseViewParams } from '../../../common/navigation';
+import { CASE_VIEW_PAGE_TABS } from '../../../../common/types';
interface UserActionShowAlertProps {
id: string;
alertId: string;
index: string;
- onShowAlertDetails: (alertId: string, index: string) => void;
+ onShowAlertDetails?: (alertId: string, index: string) => void;
}
const UserActionShowAlertComponent = ({
@@ -22,10 +24,16 @@ const UserActionShowAlertComponent = ({
index,
onShowAlertDetails,
}: UserActionShowAlertProps) => {
- const onClick = useCallback(
- () => onShowAlertDetails(alertId, index),
- [alertId, index, onShowAlertDetails]
- );
+ const { navigateToCaseView } = useCaseViewNavigation();
+ const { detailName } = useCaseViewParams();
+
+ const onClick = useCallback(() => {
+ if (onShowAlertDetails) {
+ onShowAlertDetails(alertId, index);
+ } else {
+ navigateToCaseView({ detailName, tabId: CASE_VIEW_PAGE_TABS.ALERTS });
+ }
+ }, [alertId, detailName, index, navigateToCaseView, onShowAlertDetails]);
return (
{i18n.SHOW_ALERT_TOOLTIP}
}>
diff --git a/x-pack/plugins/cases/public/components/user_actions/types.ts b/x-pack/plugins/cases/public/components/user_actions/types.ts
index 8e1377c4f0f28..cafbedf9c2cb5 100644
--- a/x-pack/plugins/cases/public/components/user_actions/types.ts
+++ b/x-pack/plugins/cases/public/components/user_actions/types.ts
@@ -36,7 +36,7 @@ export interface UserActionTreeProps {
getRuleDetailsHref?: RuleDetailsNavigation['href'];
actionsNavigation?: ActionsNavigation;
onRuleDetailsClick?: RuleDetailsNavigation['onClick'];
- onShowAlertDetails: (alertId: string, index: string) => void;
+ onShowAlertDetails?: (alertId: string, index: string) => void;
onUpdateField: ({ key, value, onSuccess, onError }: OnUpdateFields) => void;
statusActionButton: JSX.Element | null;
useFetchAlertData: UseFetchAlertData;
@@ -76,7 +76,7 @@ export interface UserActionBuilderArgs {
handleSaveComment: ({ id, version }: { id: string; version: string }, content: string) => void;
handleDeleteComment: (id: string, successToasterTitle: string) => void;
handleManageQuote: (quote: string) => void;
- onShowAlertDetails: (alertId: string, index: string) => void;
+ onShowAlertDetails?: (alertId: string, index: string) => void;
getRuleDetailsHref?: RuleDetailsNavigation['href'];
onRuleDetailsClick?: RuleDetailsNavigation['onClick'];
}
diff --git a/x-pack/plugins/cases/public/containers/api.test.tsx b/x-pack/plugins/cases/public/containers/api.test.tsx
index 5b00fabbbbf50..e2e0897d75ada 100644
--- a/x-pack/plugins/cases/public/containers/api.test.tsx
+++ b/x-pack/plugins/cases/public/containers/api.test.tsx
@@ -84,7 +84,8 @@ const mockKibanaServices = KibanaServices.get as jest.Mock;
jest.mock('../common/lib/kibana');
const fetchMock = jest.fn();
-mockKibanaServices.mockReturnValue({ http: { fetch: fetchMock } });
+const postMock = jest.fn();
+mockKibanaServices.mockReturnValue({ http: { fetch: fetchMock, post: postMock } });
describe('Cases API', () => {
describe('deleteCases', () => {
@@ -1051,22 +1052,43 @@ describe('Cases API', () => {
describe('getFeatureIds', () => {
beforeEach(() => {
- fetchMock.mockClear();
- fetchMock.mockResolvedValue(['siem', 'observability']);
+ postMock.mockClear();
+ postMock.mockResolvedValue({
+ consumer: {
+ buckets: [{ key: 'observability', doc_count: 1 }],
+ },
+ producer: {
+ buckets: [],
+ },
+ ruleTypeIds: {
+ buckets: [{ key: 'apm.threshold', doc_count: 1 }],
+ },
+ });
});
it('should be called with correct check url, method, signal', async () => {
const resp = await getFeatureIds({
- query: { registrationContext: ['security', 'observability.logs'] },
+ query: { ids: { values: ['alert_id_1', 'alert_id_2'] } },
signal: abortCtrl.signal,
});
- expect(fetchMock).toHaveBeenCalledWith(`${BASE_RAC_ALERTS_API_PATH}/_feature_ids`, {
- query: { registrationContext: ['security', 'observability.logs'] },
+ expect(postMock).toHaveBeenCalledWith(`${BASE_RAC_ALERTS_API_PATH}/find`, {
+ body: '{"aggs":{"consumer":{"terms":{"field":"kibana.alert.rule.consumer","size":100}},"producer":{"terms":{"field":"kibana.alert.rule.producer","size":100}},"ruleTypeIds":{"terms":{"field":"kibana.alert.rule.rule_type_id","size":100}}},"query":{"ids":{"values":["alert_id_1","alert_id_2"]}}}',
+ method: 'POST',
signal: abortCtrl.signal,
});
- expect(resp).toEqual(['siem', 'observability']);
+ expect(resp).toEqual({
+ consumer: {
+ buckets: [{ key: 'observability', doc_count: 1 }],
+ },
+ producer: {
+ buckets: [],
+ },
+ ruleTypeIds: {
+ buckets: [{ key: 'apm.threshold', doc_count: 1 }],
+ },
+ });
});
});
diff --git a/x-pack/plugins/cases/public/containers/api.ts b/x-pack/plugins/cases/public/containers/api.ts
index c15b41cb458a7..eeeffc6f4e424 100644
--- a/x-pack/plugins/cases/public/containers/api.ts
+++ b/x-pack/plugins/cases/public/containers/api.ts
@@ -5,7 +5,7 @@
* 2.0.
*/
-import type { ValidFeatureId } from '@kbn/rule-data-utils';
+import { ALERT_RULE_CONSUMER, ALERT_RULE_PRODUCER, ALERT_RULE_TYPE_ID } from '@kbn/rule-data-utils';
import { BASE_RAC_ALERTS_API_PATH } from '@kbn/rule-registry-plugin/common/constants';
import type { User } from '../../common/types/domain';
import { AttachmentType } from '../../common/types/domain';
@@ -73,6 +73,7 @@ import {
import type {
ActionLicense,
CaseUI,
+ FeatureIdsResponse,
SingleCaseMetrics,
SingleCaseMetricsFeature,
UserActionUI,
@@ -511,16 +512,40 @@ export const getFeatureIds = async ({
query,
signal,
}: {
- query: { registrationContext: string[] };
+ query: {
+ ids: {
+ values: string[];
+ };
+ };
signal?: AbortSignal;
-}): Promise => {
- return KibanaServices.get().http.fetch(
- `${BASE_RAC_ALERTS_API_PATH}/_feature_ids`,
- {
- signal,
+}): Promise => {
+ return KibanaServices.get().http.post(`${BASE_RAC_ALERTS_API_PATH}/find`, {
+ method: 'POST',
+ body: JSON.stringify({
+ aggs: {
+ consumer: {
+ terms: {
+ field: ALERT_RULE_CONSUMER,
+ size: 100,
+ },
+ },
+ producer: {
+ terms: {
+ field: ALERT_RULE_PRODUCER,
+ size: 100,
+ },
+ },
+ ruleTypeIds: {
+ terms: {
+ field: ALERT_RULE_TYPE_ID,
+ size: 100,
+ },
+ },
+ },
query,
- }
- );
+ }),
+ signal,
+ });
};
export const getCaseConnectors = async (
diff --git a/x-pack/plugins/cases/public/containers/constants.ts b/x-pack/plugins/cases/public/containers/constants.ts
index 224ea2c8bd04c..76d95a8bd0375 100644
--- a/x-pack/plugins/cases/public/containers/constants.ts
+++ b/x-pack/plugins/cases/public/containers/constants.ts
@@ -46,8 +46,8 @@ export const casesQueriesKeys = {
license: () => [...casesQueriesKeys.connectors, 'license'] as const,
tags: () => [...casesQueriesKeys.all, 'tags'] as const,
categories: () => [...casesQueriesKeys.all, 'categories'] as const,
- alertFeatureIds: (alertRegistrationContexts: string[]) =>
- [...casesQueriesKeys.alerts, 'features', alertRegistrationContexts] as const,
+ alertFeatureIds: (alertIds: string[]) =>
+ [...casesQueriesKeys.alerts, 'features', alertIds] as const,
configuration: (params: unknown) => [...casesQueriesKeys.all, 'configuration', params] as const,
};
diff --git a/x-pack/plugins/cases/public/containers/types.ts b/x-pack/plugins/cases/public/containers/types.ts
index 62a5f9299498e..d23d18c6e7896 100644
--- a/x-pack/plugins/cases/public/containers/types.ts
+++ b/x-pack/plugins/cases/public/containers/types.ts
@@ -5,4 +5,21 @@
* 2.0.
*/
+import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
+
export * from '../../common/ui';
+
+export type FeatureIdsResponse = estypes.SearchResponse<
+ unknown,
+ {
+ consumer: {
+ buckets: Array<{ key: string; doc_count: number }>;
+ };
+ producer: {
+ buckets: Array<{ key: string; doc_count: number }>;
+ };
+ ruleTypeIds: {
+ buckets: Array<{ key: string; doc_count: number }>;
+ };
+ }
+>;
diff --git a/x-pack/plugins/cases/public/containers/use_get_feature_ids.test.tsx b/x-pack/plugins/cases/public/containers/use_get_feature_ids.test.tsx
index 298abb5d133e7..9446007e367d5 100644
--- a/x-pack/plugins/cases/public/containers/use_get_feature_ids.test.tsx
+++ b/x-pack/plugins/cases/public/containers/use_get_feature_ids.test.tsx
@@ -32,7 +32,7 @@ describe('useGetFeaturesIds', () => {
it('returns the features ids correctly', async () => {
const spy = jest.spyOn(api, 'getFeatureIds').mockRejectedValue([]);
- const { waitForNextUpdate } = renderHook(() => useGetFeatureIds(['context1']), {
+ const { waitForNextUpdate } = renderHook(() => useGetFeatureIds(['alert-id-1'], true), {
wrapper: appMockRender.AppWrapper,
});
@@ -40,12 +40,26 @@ describe('useGetFeaturesIds', () => {
await waitFor(() => {
expect(spy).toHaveBeenCalledWith({
- query: { registrationContext: ['context1'] },
+ query: {
+ ids: {
+ values: ['alert-id-1'],
+ },
+ },
signal: expect.any(AbortSignal),
});
});
});
+ it('never call API if disable', async () => {
+ const spyMock = jest.spyOn(api, 'getFeatureIds');
+
+ renderHook(() => useGetFeatureIds(['alert-id-1'], false), {
+ wrapper: appMockRender.AppWrapper,
+ });
+
+ expect(spyMock).toHaveBeenCalledTimes(0);
+ });
+
it('shows a toast error when the api return an error', async () => {
(useToasts as jest.Mock).mockReturnValue({ addError });
@@ -53,7 +67,7 @@ describe('useGetFeaturesIds', () => {
.spyOn(api, 'getFeatureIds')
.mockRejectedValue(new Error('Something went wrong'));
- const { waitForNextUpdate } = renderHook(() => useGetFeatureIds(['context1']), {
+ const { waitForNextUpdate } = renderHook(() => useGetFeatureIds(['alert-id-1'], true), {
wrapper: appMockRender.AppWrapper,
});
@@ -61,7 +75,11 @@ describe('useGetFeaturesIds', () => {
await waitFor(() => {
expect(spy).toHaveBeenCalledWith({
- query: { registrationContext: ['context1'] },
+ query: {
+ ids: {
+ values: ['alert-id-1'],
+ },
+ },
signal: expect.any(AbortSignal),
});
expect(addError).toHaveBeenCalled();
diff --git a/x-pack/plugins/cases/public/containers/use_get_feature_ids.tsx b/x-pack/plugins/cases/public/containers/use_get_feature_ids.tsx
index 2c4df495b33d1..07d52edfd319a 100644
--- a/x-pack/plugins/cases/public/containers/use_get_feature_ids.tsx
+++ b/x-pack/plugins/cases/public/containers/use_get_feature_ids.tsx
@@ -6,28 +6,75 @@
*/
import { useQuery } from '@tanstack/react-query';
-import type { ValidFeatureId } from '@kbn/rule-data-utils';
+import { isValidFeatureId } from '@kbn/rule-data-utils';
+import { useMemo } from 'react';
import type { ServerError } from '../types';
import { useCasesToast } from '../common/use_cases_toast';
import * as i18n from './translations';
import { getFeatureIds } from './api';
import { casesQueriesKeys } from './constants';
+import type { FeatureIdsResponse } from './types';
-export const useGetFeatureIds = (alertRegistrationContexts: string[]) => {
- const { showErrorToast } = useCasesToast();
+interface UseGetFeatureIdsResponse {
+ featureIds: string[];
+ ruleTypeIds: string[];
+}
+
+const transformResponseToFeatureIds = (data: FeatureIdsResponse): UseGetFeatureIdsResponse => {
+ const localFeatureIds = new Set();
+ data?.aggregations?.consumer?.buckets?.forEach(
+ ({ key, doc_count: docCount }: { key: string; doc_count: number }) => {
+ if (docCount > 0 && isValidFeatureId(key)) {
+ localFeatureIds.add(key);
+ }
+ }
+ );
+ data?.aggregations?.producer?.buckets?.forEach(
+ ({ key, doc_count: docCount }: { key: string; doc_count: number }) => {
+ if (docCount > 0 && isValidFeatureId(key)) {
+ localFeatureIds.add(key);
+ }
+ }
+ );
+ const ruleTypeIds =
+ data?.aggregations?.ruleTypeIds?.buckets
+ ?.filter(({ doc_count: docCount }: { doc_count: number }) => docCount > 0)
+ .map(({ key }: { key: string }) => key) ?? [];
- return useQuery(
- casesQueriesKeys.alertFeatureIds(alertRegistrationContexts),
+ return { featureIds: [...localFeatureIds], ruleTypeIds };
+};
+
+export const useGetFeatureIds = (alertIds: string[], enabled: boolean) => {
+ const { showErrorToast } = useCasesToast();
+ const { data, isInitialLoading, isLoading } = useQuery<
+ FeatureIdsResponse,
+ ServerError,
+ UseGetFeatureIdsResponse
+ >(
+ casesQueriesKeys.alertFeatureIds(alertIds),
({ signal }) => {
- const query = { registrationContext: alertRegistrationContexts };
- return getFeatureIds({ query, signal });
+ return getFeatureIds({
+ query: {
+ ids: {
+ values: alertIds,
+ },
+ },
+ signal,
+ });
},
{
+ select: transformResponseToFeatureIds,
+ enabled,
onError: (error: ServerError) => {
showErrorToast(error, { title: i18n.ERROR_TITLE });
},
}
);
+
+ return useMemo(
+ () => ({ data, isLoading: (isInitialLoading || isLoading) && enabled }),
+ [data, enabled, isInitialLoading, isLoading]
+ );
};
export type UseGetFeatureIds = typeof useGetFeatureIds;
diff --git a/x-pack/plugins/cases/server/common/models/case_with_comments.test.ts b/x-pack/plugins/cases/server/common/models/case_with_comments.test.ts
index 3a194645c3fca..f55c1c78b1108 100644
--- a/x-pack/plugins/cases/server/common/models/case_with_comments.test.ts
+++ b/x-pack/plugins/cases/server/common/models/case_with_comments.test.ts
@@ -6,11 +6,15 @@
*/
import type { AlertAttachmentAttributes } from '../../../common/types/domain';
+import { AttachmentType } from '../../../common/types/domain';
import type { SavedObject } from '@kbn/core-saved-objects-api-server';
import { createCasesClientMockArgs } from '../../client/mocks';
import { alertComment, comment, mockCaseComments, mockCases, multipleAlert } from '../../mocks';
import { CaseCommentModel } from './case_with_comments';
-import { MAX_PERSISTABLE_STATE_AND_EXTERNAL_REFERENCES } from '../../../common/constants';
+import {
+ MAX_PERSISTABLE_STATE_AND_EXTERNAL_REFERENCES,
+ SECURITY_SOLUTION_OWNER,
+} from '../../../common/constants';
import {
commentExternalReference,
commentFileExternalReference,
@@ -25,6 +29,7 @@ describe('CaseCommentModel', () => {
clientArgs.services.caseService.getCase.mockResolvedValue(theCase);
clientArgs.services.caseService.patchCase.mockResolvedValue(theCase);
clientArgs.services.attachmentService.create.mockResolvedValue(mockCaseComments[0]);
+ clientArgs.services.attachmentService.update.mockResolvedValue(mockCaseComments[0]);
clientArgs.services.attachmentService.bulkCreate.mockResolvedValue({
saved_objects: mockCaseComments,
});
@@ -274,6 +279,18 @@ describe('CaseCommentModel', () => {
expect(clientArgs.services.attachmentService.create).not.toHaveBeenCalled();
});
+ it('partial updates the case', async () => {
+ await model.createComment({
+ id: 'comment-1',
+ commentReq: comment,
+ createdDate,
+ });
+
+ const args = clientArgs.services.caseService.patchCase.mock.calls[0][0];
+
+ expect(args.version).toBeUndefined();
+ });
+
describe('validation', () => {
clientArgs.services.attachmentService.countPersistableStateAndExternalReferenceAttachments.mockResolvedValue(
MAX_PERSISTABLE_STATE_AND_EXTERNAL_REFERENCES
@@ -579,6 +596,21 @@ describe('CaseCommentModel', () => {
expect(multipleAlertsCall.attributes.index).toEqual(['test-index-3', 'test-index-5']);
});
+ it('partial updates the case', async () => {
+ await model.bulkCreate({
+ attachments: [
+ {
+ id: 'comment-1',
+ ...comment,
+ },
+ ],
+ });
+
+ const args = clientArgs.services.caseService.patchCase.mock.calls[0][0];
+
+ expect(args.version).toBeUndefined();
+ });
+
describe('validation', () => {
clientArgs.services.attachmentService.countPersistableStateAndExternalReferenceAttachments.mockResolvedValue(
MAX_PERSISTABLE_STATE_AND_EXTERNAL_REFERENCES
@@ -619,4 +651,24 @@ describe('CaseCommentModel', () => {
});
});
});
+
+ describe('updateComment', () => {
+ it('partial updates the case', async () => {
+ await model.updateComment({
+ updateRequest: {
+ id: 'comment-id',
+ version: 'comment-version',
+ type: AttachmentType.user,
+ comment: 'my updated comment',
+ owner: SECURITY_SOLUTION_OWNER,
+ },
+ updatedAt: createdDate,
+ owner: SECURITY_SOLUTION_OWNER,
+ });
+
+ const args = clientArgs.services.caseService.patchCase.mock.calls[0][0];
+
+ expect(args.version).toBeUndefined();
+ });
+ });
});
diff --git a/x-pack/plugins/cases/server/common/models/case_with_comments.ts b/x-pack/plugins/cases/server/common/models/case_with_comments.ts
index e1b89d7af791e..a1eb5cbfdb8b5 100644
--- a/x-pack/plugins/cases/server/common/models/case_with_comments.ts
+++ b/x-pack/plugins/cases/server/common/models/case_with_comments.ts
@@ -129,7 +129,7 @@ export class CaseCommentModel {
},
options,
}),
- this.updateCaseUserAndDateSkipRefresh(updatedAt),
+ this.partialUpdateCaseUserAndDateSkipRefresh(updatedAt),
]);
await commentableCase.createUpdateCommentUserAction(comment, updateRequest, owner);
@@ -144,11 +144,11 @@ export class CaseCommentModel {
}
}
- private async updateCaseUserAndDateSkipRefresh(date: string) {
- return this.updateCaseUserAndDate(date, false);
+ private async partialUpdateCaseUserAndDateSkipRefresh(date: string) {
+ return this.partialUpdateCaseUserAndDate(date, false);
}
- private async updateCaseUserAndDate(
+ private async partialUpdateCaseUserAndDate(
date: string,
refresh: RefreshSetting
): Promise {
@@ -160,7 +160,6 @@ export class CaseCommentModel {
updated_at: date,
updated_by: { ...this.params.user },
},
- version: this.caseInfo.version,
refresh,
});
@@ -242,7 +241,7 @@ export class CaseCommentModel {
id,
refresh: false,
}),
- this.updateCaseUserAndDateSkipRefresh(createdDate),
+ this.partialUpdateCaseUserAndDateSkipRefresh(createdDate),
]);
await Promise.all([
@@ -502,7 +501,7 @@ export class CaseCommentModel {
}),
refresh: false,
}),
- this.updateCaseUserAndDateSkipRefresh(new Date().toISOString()),
+ this.partialUpdateCaseUserAndDateSkipRefresh(new Date().toISOString()),
]);
const savedObjectsWithoutErrors = newlyCreatedAttachments.saved_objects.filter(
diff --git a/x-pack/plugins/cases/server/saved_object_types/migrations/user_actions/connector_id.test.ts b/x-pack/plugins/cases/server/saved_object_types/migrations/user_actions/connector_id.test.ts
index ed43cc40649c6..2ed37837f5100 100644
--- a/x-pack/plugins/cases/server/saved_object_types/migrations/user_actions/connector_id.test.ts
+++ b/x-pack/plugins/cases/server/saved_object_types/migrations/user_actions/connector_id.test.ts
@@ -254,7 +254,7 @@ describe('user action migrations', () => {
const log = context.log as jest.Mocked;
expect(log.error.mock.calls[0]).toMatchInlineSnapshot(`
Array [
- "Failed to migrate user action connector with doc id: 1 version: 8.0.0 error: Unexpected token a in JSON at position 1",
+ "Failed to migrate user action connector with doc id: 1 version: 8.0.0 error: Expected property name or '}' in JSON at position 1",
Object {
"migrations": Object {
"userAction": Object {
@@ -448,7 +448,7 @@ describe('user action migrations', () => {
const log = context.log as jest.Mocked;
expect(log.error.mock.calls[0]).toMatchInlineSnapshot(`
Array [
- "Failed to migrate user action connector with doc id: 1 version: 8.0.0 error: Unexpected token b in JSON at position 1",
+ "Failed to migrate user action connector with doc id: 1 version: 8.0.0 error: Expected property name or '}' in JSON at position 1",
Object {
"migrations": Object {
"userAction": Object {
@@ -644,7 +644,7 @@ describe('user action migrations', () => {
const log = context.log as jest.Mocked;
expect(log.error.mock.calls[0]).toMatchInlineSnapshot(`
Array [
- "Failed to migrate user action connector with doc id: 1 version: 8.0.0 error: Unexpected token e in JSON at position 1",
+ "Failed to migrate user action connector with doc id: 1 version: 8.0.0 error: Unexpected token 'e', \\"new json value\\" is not valid JSON",
Object {
"migrations": Object {
"userAction": Object {
diff --git a/x-pack/plugins/cloud_security_posture/common/constants.ts b/x-pack/plugins/cloud_security_posture/common/constants.ts
index 7f4f8c796f4c1..77e2dd38f326d 100644
--- a/x-pack/plugins/cloud_security_posture/common/constants.ts
+++ b/x-pack/plugins/cloud_security_posture/common/constants.ts
@@ -30,6 +30,10 @@ export const CSP_BENCHMARK_RULES_BULK_ACTION_ROUTE_PATH =
'/internal/cloud_security_posture/rules/_bulk_action';
export const CSP_BENCHMARK_RULES_BULK_ACTION_API_CURRENT_VERSION = '1';
+export const CSP_GET_BENCHMARK_RULES_STATE_ROUTE_PATH =
+ '/internal/cloud_security_posture/rules/_get_states';
+export const CSP_GET_BENCHMARK_RULES_STATE_API_CURRENT_VERSION = '1';
+
export const GET_DETECTION_RULE_ALERTS_STATUS_PATH =
'/internal/cloud_security_posture/detection_engine_rules/alerts/_status';
export const DETECTION_RULE_ALERTS_STATUS_API_CURRENT_VERSION = '1';
diff --git a/x-pack/plugins/cloud_security_posture/common/types/index.ts b/x-pack/plugins/cloud_security_posture/common/types/index.ts
index d6e804c33f017..e53f34d5cf919 100644
--- a/x-pack/plugins/cloud_security_posture/common/types/index.ts
+++ b/x-pack/plugins/cloud_security_posture/common/types/index.ts
@@ -8,6 +8,7 @@
export * as rulesV1 from './rules/v1';
export * as rulesV2 from './rules/v2';
export * as rulesV3 from './rules/v3';
+export * as rulesV4 from './rules/v4';
export * as benchmarkV1 from './benchmarks/v1';
export * as benchmarkV2 from './benchmarks/v2';
@@ -22,4 +23,5 @@ export type {
BenchmarkScore,
Benchmark,
GetBenchmarkResponse,
+ BenchmarkRuleSelectParams,
} from './latest';
diff --git a/x-pack/plugins/cloud_security_posture/common/types/latest.ts b/x-pack/plugins/cloud_security_posture/common/types/latest.ts
index 9951c7a418ecc..73d86e76db250 100644
--- a/x-pack/plugins/cloud_security_posture/common/types/latest.ts
+++ b/x-pack/plugins/cloud_security_posture/common/types/latest.ts
@@ -5,5 +5,5 @@
* 2.0.
*/
-export * from './rules/v3';
+export * from './rules/v4';
export * from './benchmarks/v2';
diff --git a/x-pack/plugins/cloud_security_posture/common/types/rules/v3.ts b/x-pack/plugins/cloud_security_posture/common/types/rules/v3.ts
index cef3e445b91a8..c2b70352aef9f 100644
--- a/x-pack/plugins/cloud_security_posture/common/types/rules/v3.ts
+++ b/x-pack/plugins/cloud_security_posture/common/types/rules/v3.ts
@@ -6,7 +6,7 @@
*/
import { schema, TypeOf } from '@kbn/config-schema';
-
+import type { SavedObjectsUpdateResponse } from '@kbn/core-saved-objects-api-server';
import { CSPM_POLICY_TEMPLATE, KSPM_POLICY_TEMPLATE } from '../../constants';
const DEFAULT_BENCHMARK_RULES_PER_PAGE = 25;
@@ -107,9 +107,14 @@ export const findCspBenchmarkRuleRequestSchema = schema.object({
* benchmark id
*/
benchmarkId: schema.maybe(
- schema.oneOf([schema.literal('cis_k8s'), schema.literal('cis_eks'), schema.literal('cis_aws')])
+ schema.oneOf([
+ schema.literal('cis_k8s'),
+ schema.literal('cis_eks'),
+ schema.literal('cis_aws'),
+ schema.literal('cis_azure'),
+ schema.literal('cis_gcp'),
+ ])
),
-
/**
* package_policy_id
*/
@@ -130,8 +135,11 @@ export interface FindCspBenchmarkRuleResponse {
perPage: number;
}
-export const cspBenchmarkRules = schema.arrayOf(
+export type PageUrlParams = Record<'policyId' | 'packagePolicyId', string>;
+
+export const rulesToUpdate = schema.arrayOf(
schema.object({
+ rule_id: schema.string(),
benchmark_id: schema.string(),
benchmark_version: schema.string(),
rule_number: schema.string(),
@@ -140,10 +148,10 @@ export const cspBenchmarkRules = schema.arrayOf(
export const cspBenchmarkRulesBulkActionRequestSchema = schema.object({
action: schema.oneOf([schema.literal('mute'), schema.literal('unmute')]),
- rules: cspBenchmarkRules,
+ rules: rulesToUpdate,
});
-export type CspBenchmarkRules = TypeOf;
+export type RulesToUpdate = TypeOf;
export type CspBenchmarkRulesBulkActionRequestSchema = TypeOf<
typeof cspBenchmarkRulesBulkActionRequestSchema
@@ -153,6 +161,10 @@ const rulesStates = schema.recordOf(
schema.string(),
schema.object({
muted: schema.boolean(),
+ benchmark_id: schema.string(),
+ benchmark_version: schema.string(),
+ rule_number: schema.string(),
+ rule_id: schema.string(),
})
);
@@ -162,3 +174,8 @@ export const cspSettingsSchema = schema.object({
export type CspBenchmarkRulesStates = TypeOf;
export type CspSettings = TypeOf;
+
+export interface BulkActionBenchmarkRulesResponse {
+ newCspSettings: SavedObjectsUpdateResponse;
+ disabledRulesCounter: number;
+}
diff --git a/x-pack/plugins/cloud_security_posture/common/types/rules/v4.ts b/x-pack/plugins/cloud_security_posture/common/types/rules/v4.ts
new file mode 100644
index 0000000000000..1ee8c584be770
--- /dev/null
+++ b/x-pack/plugins/cloud_security_posture/common/types/rules/v4.ts
@@ -0,0 +1,115 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { schema, TypeOf } from '@kbn/config-schema';
+import { BenchmarksCisId } from '../latest';
+
+export type {
+ cspBenchmarkRuleMetadataSchema,
+ CspBenchmarkRuleMetadata,
+ cspBenchmarkRuleSchema,
+ CspBenchmarkRule,
+ FindCspBenchmarkRuleResponse,
+ CspSettings,
+ CspBenchmarkRulesStates,
+} from './v3';
+
+const DEFAULT_BENCHMARK_RULES_PER_PAGE = 25;
+
+export const findCspBenchmarkRuleRequestSchema = schema.object({
+ /**
+ * An Elasticsearch simple_query_string
+ */
+ search: schema.maybe(schema.string()),
+
+ /**
+ * The page of objects to return
+ */
+ page: schema.number({ defaultValue: 1, min: 1 }),
+
+ /**
+ * The number of objects to include in each page
+ */
+ perPage: schema.number({ defaultValue: DEFAULT_BENCHMARK_RULES_PER_PAGE, min: 0 }),
+
+ /**
+ * Fields to retrieve from CspBenchmarkRule saved object
+ */
+ fields: schema.maybe(schema.arrayOf(schema.string())),
+
+ /**
+ * The fields to perform the parsed query against.
+ * Valid fields are fields which mapped to 'text' in cspBenchmarkRuleSavedObjectMapping
+ */
+ searchFields: schema.arrayOf(
+ schema.oneOf([schema.literal('metadata.name.text'), schema.literal('metadata.section.text')]),
+ { defaultValue: ['metadata.name.text'] }
+ ),
+
+ /**
+ * Sort Field
+ */
+ sortField: schema.oneOf(
+ [
+ schema.literal('metadata.name'),
+ schema.literal('metadata.section'),
+ schema.literal('metadata.id'),
+ schema.literal('metadata.version'),
+ schema.literal('metadata.benchmark.id'),
+ schema.literal('metadata.benchmark.name'),
+ schema.literal('metadata.benchmark.posture_type'),
+ schema.literal('metadata.benchmark.version'),
+ schema.literal('metadata.benchmark.rule_number'),
+ ],
+ {
+ defaultValue: 'metadata.benchmark.rule_number',
+ }
+ ),
+
+ /**
+ * The order to sort by
+ */
+ sortOrder: schema.oneOf([schema.literal('asc'), schema.literal('desc')], {
+ defaultValue: 'asc',
+ }),
+
+ /**
+ * benchmark id
+ */
+ benchmarkId: schema.maybe(
+ schema.oneOf([
+ schema.literal('cis_k8s'),
+ schema.literal('cis_eks'),
+ schema.literal('cis_aws'),
+ schema.literal('cis_azure'),
+ schema.literal('cis_gcp'),
+ ])
+ ),
+
+ /**
+ * benchmark version
+ */
+ benchmarkVersion: schema.maybe(schema.string()),
+
+ /**
+ * rule section
+ */
+ section: schema.maybe(schema.string()),
+ ruleNumber: schema.maybe(schema.string()),
+});
+
+export type FindCspBenchmarkRuleRequest = TypeOf;
+
+export interface BenchmarkRuleSelectParams {
+ section?: string;
+ ruleNumber?: string;
+}
+
+export interface PageUrlParams {
+ benchmarkId: BenchmarksCisId;
+ benchmarkVersion: string;
+}
diff --git a/x-pack/plugins/cloud_security_posture/common/types_old.ts b/x-pack/plugins/cloud_security_posture/common/types_old.ts
index d3706c51469f8..617f9dce122e8 100644
--- a/x-pack/plugins/cloud_security_posture/common/types_old.ts
+++ b/x-pack/plugins/cloud_security_posture/common/types_old.ts
@@ -181,7 +181,7 @@ export interface CnvmStatistics {
highCount: number | undefined;
mediumCount: number | undefined;
resourcesScanned: number | undefined;
- cloudRegions: number | undefined;
+ cloudAccounts: number | undefined;
}
export interface CnvmDashboardData {
diff --git a/x-pack/plugins/cloud_security_posture/common/utils/detection_rules.test.ts b/x-pack/plugins/cloud_security_posture/common/utils/detection_rules.test.ts
new file mode 100644
index 0000000000000..f2a35944f0825
--- /dev/null
+++ b/x-pack/plugins/cloud_security_posture/common/utils/detection_rules.test.ts
@@ -0,0 +1,113 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { CspBenchmarkRuleMetadata } from '../types';
+import {
+ convertRuleTagsToKQL,
+ generateBenchmarkRuleTags,
+ getFindingsDetectionRuleSearchTags,
+} from './detection_rules';
+
+describe('Detection rules utils', () => {
+ it('should convert tags to KQL format', () => {
+ const inputTags = ['tag1', 'tag2', 'tag3'];
+
+ const result = convertRuleTagsToKQL(inputTags);
+
+ const expectedKQL = 'alert.attributes.tags:("tag1" AND "tag2" AND "tag3")';
+ expect(result).toBe(expectedKQL);
+ });
+
+ it('Should convert tags to KQL format', () => {
+ const inputTags = [] as string[];
+
+ const result = convertRuleTagsToKQL(inputTags);
+
+ const expectedKQL = 'alert.attributes.tags:()';
+ expect(result).toBe(expectedKQL);
+ });
+
+ it('Should generate search tags for a CSP benchmark rule', () => {
+ const cspBenchmarkRule = {
+ benchmark: {
+ id: 'cis_gcp',
+ rule_number: '1.1',
+ },
+ } as unknown as CspBenchmarkRuleMetadata;
+
+ const result = getFindingsDetectionRuleSearchTags(cspBenchmarkRule);
+
+ const expectedTags = ['CIS', 'GCP', 'CIS GCP 1.1'];
+ expect(result).toEqual(expectedTags);
+ });
+
+ it('Should handle undefined benchmark object gracefully', () => {
+ const cspBenchmarkRule = { benchmark: {} } as any;
+ const expectedTags: string[] = [];
+ const result = getFindingsDetectionRuleSearchTags(cspBenchmarkRule);
+ expect(result).toEqual(expectedTags);
+ });
+
+ it('Should handle undefined rule number gracefully', () => {
+ const cspBenchmarkRule = {
+ benchmark: {
+ id: 'cis_gcp',
+ },
+ } as unknown as CspBenchmarkRuleMetadata;
+ const result = getFindingsDetectionRuleSearchTags(cspBenchmarkRule);
+ const expectedTags = ['CIS', 'GCP', 'CIS GCP'];
+ expect(result).toEqual(expectedTags);
+ });
+
+ it('Should generate tags for a CSPM benchmark rule', () => {
+ const cspBenchmarkRule = {
+ benchmark: {
+ id: 'cis_gcp',
+ rule_number: '1.1',
+ posture_type: 'cspm',
+ },
+ } as unknown as CspBenchmarkRuleMetadata;
+
+ const result = generateBenchmarkRuleTags(cspBenchmarkRule);
+
+ const expectedTags = [
+ 'Cloud Security',
+ 'Use Case: Configuration Audit',
+ 'CIS',
+ 'GCP',
+ 'CIS GCP 1.1',
+ 'CSPM',
+ 'Data Source: CSPM',
+ 'Domain: Cloud',
+ ];
+ expect(result).toEqual(expectedTags);
+ });
+
+ it('Should generate tags for a KSPM benchmark rule', () => {
+ const cspBenchmarkRule = {
+ benchmark: {
+ id: 'cis_gcp',
+ rule_number: '1.1',
+ posture_type: 'kspm',
+ },
+ } as unknown as CspBenchmarkRuleMetadata;
+
+ const result = generateBenchmarkRuleTags(cspBenchmarkRule);
+
+ const expectedTags = [
+ 'Cloud Security',
+ 'Use Case: Configuration Audit',
+ 'CIS',
+ 'GCP',
+ 'CIS GCP 1.1',
+ 'KSPM',
+ 'Data Source: KSPM',
+ 'Domain: Container',
+ ];
+ expect(result).toEqual(expectedTags);
+ });
+});
diff --git a/x-pack/plugins/cloud_security_posture/common/utils/detection_rules.ts b/x-pack/plugins/cloud_security_posture/common/utils/detection_rules.ts
new file mode 100644
index 0000000000000..42ea7561286c1
--- /dev/null
+++ b/x-pack/plugins/cloud_security_posture/common/utils/detection_rules.ts
@@ -0,0 +1,58 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { CspBenchmarkRuleMetadata } from '../types/latest';
+
+const CSP_RULE_TAG = 'Cloud Security';
+const CSP_RULE_TAG_USE_CASE = 'Use Case: Configuration Audit';
+const CSP_RULE_TAG_DATA_SOURCE_PREFIX = 'Data Source: ';
+
+const STATIC_RULE_TAGS = [CSP_RULE_TAG, CSP_RULE_TAG_USE_CASE];
+
+export const convertRuleTagsToKQL = (tags: string[]): string => {
+ const TAGS_FIELD = 'alert.attributes.tags';
+ return `${TAGS_FIELD}:(${tags.map((tag) => `"${tag}"`).join(' AND ')})`;
+};
+
+/*
+ * Returns an array of CspFinding tags that can be used to search and filter a detection rule
+ */
+export const getFindingsDetectionRuleSearchTags = (
+ cspBenchmarkRule: CspBenchmarkRuleMetadata
+): string[] => {
+ if (!cspBenchmarkRule.benchmark || !cspBenchmarkRule.benchmark.id) {
+ // Return an empty array if benchmark ID is undefined
+ return [];
+ }
+
+ // ex: cis_gcp to ['CIS', 'GCP']
+ const benchmarkIdTags = cspBenchmarkRule.benchmark.id.split('_').map((tag) => tag.toUpperCase());
+
+ // ex: 'CIS GCP 1.1'
+ const benchmarkRuleNumberTag = cspBenchmarkRule.benchmark.rule_number
+ ? `${cspBenchmarkRule.benchmark.id.replace('_', ' ').toUpperCase()} ${
+ cspBenchmarkRule.benchmark.rule_number
+ }`
+ : cspBenchmarkRule.benchmark.id.replace('_', ' ').toUpperCase();
+
+ return benchmarkIdTags.concat([benchmarkRuleNumberTag]);
+};
+
+export const generateBenchmarkRuleTags = (rule: CspBenchmarkRuleMetadata) => {
+ return [STATIC_RULE_TAGS]
+ .concat(getFindingsDetectionRuleSearchTags(rule))
+ .concat(
+ rule.benchmark.posture_type
+ ? [
+ rule.benchmark.posture_type.toUpperCase(),
+ `${CSP_RULE_TAG_DATA_SOURCE_PREFIX}${rule.benchmark.posture_type.toUpperCase()}`,
+ ]
+ : []
+ )
+ .concat(rule.benchmark.posture_type === 'cspm' ? ['Domain: Cloud'] : ['Domain: Container'])
+ .flat();
+};
diff --git a/x-pack/plugins/cloud_security_posture/common/utils/helpers.ts b/x-pack/plugins/cloud_security_posture/common/utils/helpers.ts
index 2f7706705da45..3c70b3a7964b9 100644
--- a/x-pack/plugins/cloud_security_posture/common/utils/helpers.ts
+++ b/x-pack/plugins/cloud_security_posture/common/utils/helpers.ts
@@ -31,7 +31,7 @@ import type {
AzureCredentialsType,
RuleSection,
} from '../types_old';
-import type { BenchmarksCisId } from '../types/latest';
+import type { BenchmarkRuleSelectParams, BenchmarksCisId } from '../types/latest';
/**
* @example
@@ -187,7 +187,6 @@ export const getBenchmarkCisName = (benchmarkId: BenchmarksCisId) => {
case 'cis_gcp':
return 'CIS GCP';
}
- return null;
};
export const getBenchmarkApplicableTo = (benchmarkId: BenchmarksCisId) => {
@@ -203,5 +202,19 @@ export const getBenchmarkApplicableTo = (benchmarkId: BenchmarksCisId) => {
case 'cis_gcp':
return 'Google Cloud Provider';
}
- return null;
+};
+
+export const getBenchmarkFilterQuery = (
+ id: BenchmarkId,
+ version?: string,
+ selectParams?: BenchmarkRuleSelectParams
+): string => {
+ const baseQuery = `${CSP_BENCHMARK_RULE_SAVED_OBJECT_TYPE}.attributes.metadata.benchmark.id:${id} AND ${CSP_BENCHMARK_RULE_SAVED_OBJECT_TYPE}.attributes.metadata.benchmark.version:"v${version}"`;
+ const sectionQuery = selectParams?.section
+ ? ` AND ${CSP_BENCHMARK_RULE_SAVED_OBJECT_TYPE}.attributes.metadata.section: "${selectParams.section}"`
+ : '';
+ const ruleNumberQuery = selectParams?.ruleNumber
+ ? ` AND ${CSP_BENCHMARK_RULE_SAVED_OBJECT_TYPE}.attributes.metadata.benchmark.rule_number: "${selectParams.ruleNumber}"`
+ : '';
+ return baseQuery + sectionQuery + ruleNumberQuery;
};
diff --git a/x-pack/plugins/cloud_security_posture/kibana.jsonc b/x-pack/plugins/cloud_security_posture/kibana.jsonc
index 9237ed70104ad..56ea8549629ac 100644
--- a/x-pack/plugins/cloud_security_posture/kibana.jsonc
+++ b/x-pack/plugins/cloud_security_posture/kibana.jsonc
@@ -21,7 +21,8 @@
"cloud",
"licensing",
"share",
- "kibanaUtils"
+ "kibanaUtils",
+ "alerting"
],
"optionalPlugins": ["usageCollection"],
"requiredBundles": ["kibanaReact", "usageCollection"]
diff --git a/x-pack/plugins/cloud_security_posture/public/common/api/use_fetch_detection_rules_by_tags.ts b/x-pack/plugins/cloud_security_posture/public/common/api/use_fetch_detection_rules_by_tags.ts
index 309698f4219d9..dfd6f13e38692 100644
--- a/x-pack/plugins/cloud_security_posture/public/common/api/use_fetch_detection_rules_by_tags.ts
+++ b/x-pack/plugins/cloud_security_posture/public/common/api/use_fetch_detection_rules_by_tags.ts
@@ -11,6 +11,7 @@ import { useQuery } from '@tanstack/react-query';
import { DETECTION_RULE_RULES_API_CURRENT_VERSION } from '../../../common/constants';
import { RuleResponse } from '../types';
import { DETECTION_ENGINE_RULES_KEY } from '../constants';
+import { convertRuleTagsToKQL } from '../../../common/utils/detection_rules';
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
@@ -26,16 +27,10 @@ export interface FetchRulesResponse {
data: RuleResponse[];
}
-export const TAGS_FIELD = 'alert.attributes.tags';
-
const DETECTION_ENGINE_URL = '/api/detection_engine' as const;
const DETECTION_ENGINE_RULES_URL = `${DETECTION_ENGINE_URL}/rules` as const;
export const DETECTION_ENGINE_RULES_URL_FIND = `${DETECTION_ENGINE_RULES_URL}/_find` as const;
-export function convertRuleTagsToKQL(tags: string[]): string {
- return `${TAGS_FIELD}:(${tags.map((tag) => `"${tag}"`).join(' AND ')})`;
-}
-
export const useFetchDetectionRulesByTags = (tags: string[]) => {
const { http } = useKibana().services;
diff --git a/x-pack/plugins/cloud_security_posture/public/common/api/use_latest_findings_data_view.ts b/x-pack/plugins/cloud_security_posture/public/common/api/use_latest_findings_data_view.ts
index 86b9692cbfc43..c8e98703cdbf0 100644
--- a/x-pack/plugins/cloud_security_posture/public/common/api/use_latest_findings_data_view.ts
+++ b/x-pack/plugins/cloud_security_posture/public/common/api/use_latest_findings_data_view.ts
@@ -62,15 +62,19 @@ export const useLatestFindingsDataView = (dataView: string) => {
}
if (dataView === LATEST_FINDINGS_INDEX_PATTERN) {
+ let shouldUpdate = false;
Object.entries(cloudSecurityFieldLabels).forEach(([field, label]) => {
if (
!dataViewObj.getFieldAttrs()[field]?.customLabel ||
dataViewObj.getFieldAttrs()[field]?.customLabel === field
) {
dataViewObj.setFieldCustomLabel(field, label);
+ shouldUpdate = true;
}
});
- await dataViews.updateSavedObject(dataViewObj);
+ if (shouldUpdate) {
+ await dataViews.updateSavedObject(dataViewObj);
+ }
}
return dataViewObj;
diff --git a/x-pack/plugins/cloud_security_posture/public/common/constants.ts b/x-pack/plugins/cloud_security_posture/public/common/constants.ts
index 404ea0f036017..833f941c95292 100644
--- a/x-pack/plugins/cloud_security_posture/public/common/constants.ts
+++ b/x-pack/plugins/cloud_security_posture/public/common/constants.ts
@@ -49,6 +49,8 @@ export const LOCAL_STORAGE_DASHBOARD_BENCHMARK_SORT_KEY =
'cloudPosture:complianceDashboard:benchmarkSort';
export const LOCAL_STORAGE_FINDINGS_LAST_SELECTED_TAB_KEY = 'cloudPosture:findings:lastSelectedTab';
+export const SESSION_STORAGE_FIELDS_MODAL_SHOW_SELECTED = 'cloudPosture:fieldsModal:showSelected';
+
export type CloudPostureIntegrations = Record<
CloudSecurityPolicyTemplate,
CloudPostureIntegrationProps
diff --git a/x-pack/plugins/cloud_security_posture/public/common/hooks/use_cloud_posture_table/use_cloud_posture_table.ts b/x-pack/plugins/cloud_security_posture/public/common/hooks/use_cloud_posture_table/use_cloud_posture_table.ts
index 0becb56e6ec22..d06e29a95e46d 100644
--- a/x-pack/plugins/cloud_security_posture/public/common/hooks/use_cloud_posture_table/use_cloud_posture_table.ts
+++ b/x-pack/plugins/cloud_security_posture/public/common/hooks/use_cloud_posture_table/use_cloud_posture_table.ts
@@ -40,9 +40,9 @@ export interface CloudPostureTableResult {
getRowsFromPages: (data: Array<{ page: DataTableRecord[] }> | undefined) => DataTableRecord[];
}
-/*
- Hook for managing common table state and methods for Cloud Posture
-*/
+/**
+ * @deprecated will be replaced by useCloudPostureDataTable
+ */
export const useCloudPostureTable = ({
defaultQuery = getDefaultQuery,
dataView,
diff --git a/x-pack/plugins/cloud_security_posture/public/common/navigation/constants.ts b/x-pack/plugins/cloud_security_posture/public/common/navigation/constants.ts
index 6182a0b47f54e..674a28f34e97c 100644
--- a/x-pack/plugins/cloud_security_posture/public/common/navigation/constants.ts
+++ b/x-pack/plugins/cloud_security_posture/public/common/navigation/constants.ts
@@ -63,7 +63,7 @@ export const cloudPosturePages: Record = {
export const benchmarksNavigation: Record = {
rules: {
name: NAV_ITEMS_NAMES.RULES,
- path: `${CLOUD_SECURITY_POSTURE_BASE_PATH}/benchmarks/:packagePolicyId/:policyId/rules`,
+ path: `${CLOUD_SECURITY_POSTURE_BASE_PATH}/benchmarks/:benchmarkId/:benchmarkVersion/rules`,
id: 'cloud_security_posture-benchmarks-rules',
},
};
diff --git a/x-pack/plugins/cloud_security_posture/public/common/types.ts b/x-pack/plugins/cloud_security_posture/public/common/types.ts
index a4c26643293fd..ac483445407e4 100644
--- a/x-pack/plugins/cloud_security_posture/public/common/types.ts
+++ b/x-pack/plugins/cloud_security_posture/public/common/types.ts
@@ -22,6 +22,8 @@ export interface FindingsBaseURLQuery {
export interface FindingsBaseProps {
dataView: DataView;
+ dataViewRefetch?: () => void;
+ dataViewIsRefetching?: boolean;
}
export interface FindingsBaseESQueryConfig {
diff --git a/x-pack/plugins/cloud_security_posture/public/components/cloud_security_data_table/additional_controls.tsx b/x-pack/plugins/cloud_security_posture/public/components/cloud_security_data_table/additional_controls.tsx
index ff411d2dcd9e0..ea9b276339f6a 100644
--- a/x-pack/plugins/cloud_security_posture/public/components/cloud_security_data_table/additional_controls.tsx
+++ b/x-pack/plugins/cloud_security_posture/public/components/cloud_security_data_table/additional_controls.tsx
@@ -4,13 +4,14 @@
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
-import React, { useState } from 'react';
-import { i18n } from '@kbn/i18n';
+import React from 'react';
import { EuiButtonEmpty, EuiFlexItem } from '@elastic/eui';
import { type DataView } from '@kbn/data-views-plugin/common';
-import { FieldsSelectorModal } from './fields_selector';
+import { FormattedMessage } from '@kbn/i18n-react';
+import { FieldsSelectorModal, useFieldsModal } from './fields_selector';
import { useStyles } from './use_styles';
import { getAbbreviatedNumber } from '../../common/utils/get_abbreviated_number';
+import { CSP_FIELDS_SELECTOR_OPEN_BUTTON } from '../test_subjects';
const GroupSelectorWrapper: React.FC = ({ children }) => {
const styles = useStyles();
@@ -30,6 +31,7 @@ export const AdditionalControls = ({
onAddColumn,
onRemoveColumn,
groupSelectorComponent,
+ onResetColumns,
}: {
total: number;
title: string;
@@ -38,11 +40,10 @@ export const AdditionalControls = ({
onAddColumn: (column: string) => void;
onRemoveColumn: (column: string) => void;
groupSelectorComponent?: JSX.Element;
+ onResetColumns: () => void;
}) => {
- const [isFieldSelectorModalVisible, setIsFieldSelectorModalVisible] = useState(false);
-
- const closeModal = () => setIsFieldSelectorModalVisible(false);
- const showModal = () => setIsFieldSelectorModalVisible(true);
+ const { isFieldSelectorModalVisible, closeFieldsSelectorModal, openFieldsSelectorModal } =
+ useFieldsModal();
return (
<>
@@ -50,9 +51,10 @@ export const AdditionalControls = ({
)}
@@ -62,13 +64,12 @@ export const AdditionalControls = ({
- {i18n.translate('xpack.csp.dataTable.fields', {
- defaultMessage: 'Fields',
- })}
+
{groupSelectorComponent && (
diff --git a/x-pack/plugins/cloud_security_posture/public/components/cloud_security_data_table/cloud_security_data_table.test.tsx b/x-pack/plugins/cloud_security_posture/public/components/cloud_security_data_table/cloud_security_data_table.test.tsx
new file mode 100644
index 0000000000000..7ddbe28a7da07
--- /dev/null
+++ b/x-pack/plugins/cloud_security_posture/public/components/cloud_security_data_table/cloud_security_data_table.test.tsx
@@ -0,0 +1,102 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import { render } from '@testing-library/react';
+import React from 'react';
+import { TestProvider } from '../../test/test_provider';
+import { CloudSecurityDataTable, CloudSecurityDataTableProps } from './cloud_security_data_table';
+
+const mockDataView = {
+ fields: {
+ getAll: () => [
+ { id: 'field1', name: 'field1', customLabel: 'Label 1', visualizable: true },
+ { id: 'field2', name: 'field2', customLabel: 'Label 2', visualizable: true },
+ ],
+ getByName: (name: string) => ({ id: name }),
+ },
+ getFieldByName: (name: string) => ({ id: name }),
+ getFormatterForField: (name: string) => ({
+ convert: (value: string) => value,
+ }),
+} as any;
+
+const mockDefaultColumns = [{ id: 'field1' }, { id: 'field2' }];
+
+const mockCloudPostureDataTable = {
+ setUrlQuery: jest.fn(),
+ columnsLocalStorageKey: 'test',
+ filters: [],
+ onSort: jest.fn(),
+ sort: [],
+ query: {},
+ queryError: undefined,
+ pageIndex: 0,
+ urlQuery: {},
+ setTableOptions: jest.fn(),
+ handleUpdateQuery: jest.fn(),
+ pageSize: 10,
+ setPageSize: jest.fn(),
+ onChangeItemsPerPage: jest.fn(),
+ onChangePage: jest.fn(),
+ onResetFilters: jest.fn(),
+ getRowsFromPages: jest.fn(),
+} as any;
+
+const renderDataTable = (props: Partial = {}) => {
+ const defaultProps: CloudSecurityDataTableProps = {
+ dataView: mockDataView,
+ isLoading: false,
+ defaultColumns: mockDefaultColumns,
+ rows: [],
+ total: 0,
+ flyoutComponent: () => <>>,
+ cloudPostureDataTable: mockCloudPostureDataTable,
+ loadMore: jest.fn(),
+ title: 'Test Table',
+ };
+
+ return render(
+
+
+
+ );
+};
+
+describe('CloudSecurityDataTable', () => {
+ it('renders loading state', () => {
+ const { getByTestId } = renderDataTable({ isLoading: true });
+ expect(getByTestId('unifiedDataTableLoading')).toBeInTheDocument();
+ });
+
+ it('renders empty state when no rows are present', () => {
+ const { getByTestId } = renderDataTable();
+ expect(getByTestId('csp:empty-state')).toBeInTheDocument();
+ });
+
+ it('renders data table with rows', async () => {
+ const mockRows = [
+ {
+ id: '1',
+ raw: {
+ field1: 'Label 1',
+ field2: 'Label 2',
+ },
+ flattened: {
+ field1: 'Label 1',
+ field2: 'Label 2',
+ },
+ },
+ ] as any;
+ const { getByTestId, getByText } = renderDataTable({
+ rows: mockRows,
+ total: mockRows.length,
+ });
+
+ expect(getByTestId('discoverDocTable')).toBeInTheDocument();
+ expect(getByText('Label 1')).toBeInTheDocument();
+ expect(getByText('Label 2')).toBeInTheDocument();
+ });
+});
diff --git a/x-pack/plugins/cloud_security_posture/public/components/cloud_security_data_table/cloud_security_data_table.tsx b/x-pack/plugins/cloud_security_posture/public/components/cloud_security_data_table/cloud_security_data_table.tsx
index 50e81a0a0c7ec..3f0c3da73a986 100644
--- a/x-pack/plugins/cloud_security_posture/public/components/cloud_security_data_table/cloud_security_data_table.tsx
+++ b/x-pack/plugins/cloud_security_posture/public/components/cloud_security_data_table/cloud_security_data_table.tsx
@@ -17,7 +17,7 @@ import { generateFilters } from '@kbn/data-plugin/public';
import { DocViewFilterFn } from '@kbn/unified-doc-viewer/types';
import useLocalStorage from 'react-use/lib/useLocalStorage';
import { useKibana } from '../../common/hooks/use_kibana';
-import { CloudPostureTableResult } from '../../common/hooks/use_cloud_posture_table';
+import { CloudPostureDataTableResult } from '../../common/hooks/use_cloud_posture_data_table';
import { EmptyState } from '../empty_state';
import { MAX_FINDINGS_TO_LOAD } from '../../common/constants';
import { useStyles } from './use_styles';
@@ -40,7 +40,7 @@ const useNewFieldsApi = true;
// Hide Checkbox, enable open details Flyout
const controlColumnIds = ['openDetails'];
-interface CloudSecurityDataGridProps {
+export interface CloudSecurityDataTableProps {
dataView: DataView;
isLoading: boolean;
defaultColumns: CloudSecurityDefaultColumn[];
@@ -52,10 +52,10 @@ interface CloudSecurityDataGridProps {
*/
flyoutComponent: (hit: DataTableRecord, onCloseFlyout: () => void) => JSX.Element;
/**
- * This is the object that contains all the data and functions from the useCloudPostureTable hook.
+ * This is the object that contains all the data and functions from the useCloudPostureDataTable hook.
* This is also used to manage the table state from the parent component.
*/
- cloudPostureTable: CloudPostureTableResult;
+ cloudPostureDataTable: CloudPostureDataTableResult;
title: string;
/**
* This is a function that returns a map of column ids to custom cell renderers.
@@ -78,6 +78,16 @@ interface CloudSecurityDataGridProps {
* Height override for the data grid.
*/
height?: number;
+ /**
+ * Callback Function when the DataView field is edited.
+ * Required to enable editing of the field in the data grid.
+ */
+ dataViewRefetch?: () => void;
+ /**
+ * Flag to indicate if the data view is refetching.
+ * Required for smoothing re-rendering the DataTable columns.
+ */
+ dataViewIsRefetching?: boolean;
}
export const CloudSecurityDataTable = ({
@@ -87,14 +97,16 @@ export const CloudSecurityDataTable = ({
rows,
total,
flyoutComponent,
- cloudPostureTable,
+ cloudPostureDataTable,
loadMore,
title,
customCellRenderer,
groupSelectorComponent,
height,
+ dataViewRefetch,
+ dataViewIsRefetching,
...rest
-}: CloudSecurityDataGridProps) => {
+}: CloudSecurityDataTableProps) => {
const {
columnsLocalStorageKey,
pageSize,
@@ -104,7 +116,7 @@ export const CloudSecurityDataTable = ({
onResetFilters,
filters,
sort,
- } = cloudPostureTable;
+ } = cloudPostureDataTable;
const [columns, setColumns] = useLocalStorage(
columnsLocalStorageKey,
@@ -208,6 +220,10 @@ export const CloudSecurityDataTable = ({
return customCellRenderer(rows);
}, [customCellRenderer, rows]);
+ const onResetColumns = () => {
+ setColumns(defaultColumns.map((c) => c.id));
+ };
+
if (!isLoading && !rows.length) {
return ;
}
@@ -221,6 +237,7 @@ export const CloudSecurityDataTable = ({
onAddColumn={onAddColumn}
onRemoveColumn={onRemoveColumn}
groupSelectorComponent={groupSelectorComponent}
+ onResetColumns={onResetColumns}
/>
);
@@ -237,6 +254,9 @@ export const CloudSecurityDataTable = ({
opacity: isLoading ? 1 : 0,
};
+ const loadingState =
+ isLoading || dataViewIsRefetching ? DataLoadingState.loading : DataLoadingState.loaded;
+
return (