diff --git a/.ci/Jenkinsfile_coverage b/.ci/Jenkinsfile_coverage index 6b8dc31bab34e..f2a58e7b6a7ac 100644 --- a/.ci/Jenkinsfile_coverage +++ b/.ci/Jenkinsfile_coverage @@ -44,7 +44,7 @@ kibanaPipeline(timeoutMinutes: 180) { 'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10), ]), ]) - workers.base(name: 'coverage-worker', label: 'tests-l', ramDisk: false, bootstrapped: false) { + workers.base(name: 'coverage-worker', size: 'l', ramDisk: false, bootstrapped: false) { kibanaPipeline.downloadCoverageArtifacts() kibanaPipeline.bash( ''' diff --git a/.ci/Jenkinsfile_visual_baseline b/.ci/Jenkinsfile_visual_baseline index 5c13ccccd9c6f..815c1345bbb68 100644 --- a/.ci/Jenkinsfile_visual_baseline +++ b/.ci/Jenkinsfile_visual_baseline @@ -7,12 +7,12 @@ kibanaPipeline(timeoutMinutes: 120) { catchError { parallel([ 'oss-visualRegression': { - workers.ci(name: 'oss-visualRegression', label: 'linux && immutable', ramDisk: false) { + workers.ci(name: 'oss-visualRegression', size: 's', ramDisk: false) { kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh')(1) } }, 'xpack-visualRegression': { - workers.ci(name: 'xpack-visualRegression', label: 'linux && immutable', ramDisk: false) { + workers.ci(name: 'xpack-visualRegression', size: 's', ramDisk: false) { kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')(1) } }, diff --git a/.ci/es-snapshots/Jenkinsfile_build_es b/.ci/es-snapshots/Jenkinsfile_build_es index a00bcb3bbc946..a3470cd750738 100644 --- a/.ci/es-snapshots/Jenkinsfile_build_es +++ b/.ci/es-snapshots/Jenkinsfile_build_es @@ -25,7 +25,7 @@ def PROMOTE_WITHOUT_VERIFY = !!params.PROMOTE_WITHOUT_VERIFICATION timeout(time: 120, unit: 'MINUTES') { timestamps { ansiColor('xterm') { - node('linux && immutable') { + node(workers.label('s')) { catchErrors { def VERSION def SNAPSHOT_ID diff --git a/.ci/es-snapshots/Jenkinsfile_verify_es b/.ci/es-snapshots/Jenkinsfile_verify_es index ce472a404c053..ade79f27e10e9 100644 --- a/.ci/es-snapshots/Jenkinsfile_verify_es +++ b/.ci/es-snapshots/Jenkinsfile_verify_es @@ -61,7 +61,7 @@ kibanaPipeline(timeoutMinutes: 120) { } def promoteSnapshot(snapshotVersion, snapshotId) { - node('linux && immutable') { + node(workers.label('s')) { esSnapshots.promote(snapshotVersion, snapshotId) } } diff --git a/docs/infrastructure/index.asciidoc b/docs/infrastructure/index.asciidoc index 60695c0e3f1cf..416e95a8941ce 100644 --- a/docs/infrastructure/index.asciidoc +++ b/docs/infrastructure/index.asciidoc @@ -21,6 +21,8 @@ You can optionally save these views and add them to {kibana-ref}/dashboard.html[ * Seamlessly switch to view the corresponding logs, application traces or uptime information for a component. +* Create alerts based on metric thresholds for one or more components. + To get started, you need to <>. Then you can <>. [role="screenshot"] diff --git a/docs/infrastructure/metrics-explorer.asciidoc b/docs/infrastructure/metrics-explorer.asciidoc index d47581ffe720a..793f09ea83b4f 100644 --- a/docs/infrastructure/metrics-explorer.asciidoc +++ b/docs/infrastructure/metrics-explorer.asciidoc @@ -20,6 +20,7 @@ By default that is set to `@timestamp`. * The interval for the X Axis is set to `auto`. The bucket size is determined by the time range. * To use *Open in Visualize* you need access to the Visualize app. +* To use *Create alert* you need to {kibana-ref}/alerting-getting-started.html#alerting-setup-prerequisites[set up alerting]. [float] [[metrics-explorer-tutorial]] @@ -67,4 +68,8 @@ Choose a graph, click the *Actions* dropdown and select *Open In Visualize*. This opens the graph in {kibana-ref}/TSVB.html[TSVB]. From here you can save the graph and add it to a dashboard as usual. +9. You can also create an alert based on the metrics in a graph. +Choose a graph, click the *Actions* dropdown and select *Create alert*. +This opens the {kibana-ref}/defining-alerts.html[alert flyout] prefilled with mertrics from the chart. + Who's the Metrics Explorer now? You are! diff --git a/package.json b/package.json index fd6f6ac140247..e807cd4d95198 100644 --- a/package.json +++ b/package.json @@ -239,6 +239,7 @@ "react-monaco-editor": "~0.27.0", "react-redux": "^7.1.3", "react-resize-detector": "^4.2.0", + "react-router": "^5.1.2", "react-router-dom": "^5.1.2", "react-sizeme": "^2.3.6", "react-use": "^13.27.0", diff --git a/packages/kbn-optimizer/src/worker/webpack.config.ts b/packages/kbn-optimizer/src/worker/webpack.config.ts index 9337daf419bfa..a3a11783cd82a 100644 --- a/packages/kbn-optimizer/src/worker/webpack.config.ts +++ b/packages/kbn-optimizer/src/worker/webpack.config.ts @@ -27,7 +27,7 @@ import TerserPlugin from 'terser-webpack-plugin'; import webpackMerge from 'webpack-merge'; // @ts-ignore import { CleanWebpackPlugin } from 'clean-webpack-plugin'; -import * as SharedDeps from '@kbn/ui-shared-deps'; +import * as UiSharedDeps from '@kbn/ui-shared-deps'; import { Bundle, WorkerConfig, parseDirPath, DisallowedSyntaxPlugin } from '../common'; @@ -73,7 +73,7 @@ export function getWebpackConfig(bundle: Bundle, worker: WorkerConfig) { }, externals: { - ...SharedDeps.externals, + ...UiSharedDeps.externals, }, plugins: [new CleanWebpackPlugin(), new DisallowedSyntaxPlugin()], diff --git a/packages/kbn-ui-framework/package.json b/packages/kbn-ui-framework/package.json index bcebdf591d6f0..5ea031595d1d4 100644 --- a/packages/kbn-ui-framework/package.json +++ b/packages/kbn-ui-framework/package.json @@ -38,7 +38,7 @@ "brace": "0.11.1", "chalk": "^2.4.2", "chokidar": "3.2.1", - "core-js": "^3.2.1", + "core-js": "^3.6.4", "css-loader": "^3.4.2", "expose-loader": "^0.7.5", "file-loader": "^4.2.0", diff --git a/packages/kbn-ui-shared-deps/entry.js b/packages/kbn-ui-shared-deps/entry.js index 5028c6efdb40e..f19271de8ad27 100644 --- a/packages/kbn-ui-shared-deps/entry.js +++ b/packages/kbn-ui-shared-deps/entry.js @@ -17,31 +17,40 @@ * under the License. */ -// import global polyfills before everything else require('./polyfills'); // must load before angular export const Jquery = require('jquery'); window.$ = window.jQuery = Jquery; -export const Angular = require('angular'); -export const ElasticCharts = require('@elastic/charts'); -export const ElasticEui = require('@elastic/eui'); -export const ElasticEuiLibServices = require('@elastic/eui/lib/services'); -export const ElasticEuiLightTheme = require('@elastic/eui/dist/eui_theme_light.json'); -export const ElasticEuiDarkTheme = require('@elastic/eui/dist/eui_theme_dark.json'); +// stateful deps export const KbnI18n = require('@kbn/i18n'); export const KbnI18nAngular = require('@kbn/i18n/angular'); export const KbnI18nReact = require('@kbn/i18n/react'); +export const Angular = require('angular'); export const Moment = require('moment'); export const MomentTimezone = require('moment-timezone/moment-timezone'); +export const Monaco = require('./monaco.ts'); +export const MonacoBare = require('monaco-editor/esm/vs/editor/editor.api'); export const React = require('react'); export const ReactDom = require('react-dom'); +export const ReactDomServer = require('react-dom/server'); export const ReactIntl = require('react-intl'); export const ReactRouter = require('react-router'); // eslint-disable-line export const ReactRouterDom = require('react-router-dom'); -export const Monaco = require('./monaco.ts'); -export const MonacoBare = require('monaco-editor/esm/vs/editor/editor.api'); -// load timezone data into moment-timezone Moment.tz.load(require('moment-timezone/data/packed/latest.json')); + +// big deps which are locked to a single version +export const Rxjs = require('rxjs'); +export const RxjsOperators = require('rxjs/operators'); +export const ElasticCharts = require('@elastic/charts'); +export const ElasticEui = require('@elastic/eui'); +export const ElasticEuiLibServices = require('@elastic/eui/lib/services'); +export const ElasticEuiLibServicesFormat = require('@elastic/eui/lib/services/format'); +export const ElasticEuiLightTheme = require('@elastic/eui/dist/eui_theme_light.json'); +export const ElasticEuiDarkTheme = require('@elastic/eui/dist/eui_theme_dark.json'); +export const ElasticEuiChartsTheme = require('@elastic/eui/dist/eui_charts_theme'); + +// massive deps that we should really get rid of or reduce in size substantially +export const ElasticsearchBrowser = require('elasticsearch-browser/elasticsearch.js'); diff --git a/packages/kbn-ui-shared-deps/index.d.ts b/packages/kbn-ui-shared-deps/index.d.ts index 7ee96050a1248..dec519da69641 100644 --- a/packages/kbn-ui-shared-deps/index.d.ts +++ b/packages/kbn-ui-shared-deps/index.d.ts @@ -25,7 +25,12 @@ export const distDir: string; /** * Filename of the main bundle file in the distributable directory */ -export const distFilename: string; +export const jsFilename: string; + +/** + * Filename of files that must be loaded before the jsFilename + */ +export const jsDepFilenames: string[]; /** * Filename of the unthemed css file in the distributable directory diff --git a/packages/kbn-ui-shared-deps/index.js b/packages/kbn-ui-shared-deps/index.js index d1bb93ddecd0a..666ec7a46ff06 100644 --- a/packages/kbn-ui-shared-deps/index.js +++ b/packages/kbn-ui-shared-deps/index.js @@ -20,17 +20,14 @@ const Path = require('path'); exports.distDir = Path.resolve(__dirname, 'target'); -exports.distFilename = 'kbn-ui-shared-deps.js'; +exports.jsDepFilenames = ['kbn-ui-shared-deps.@elastic.js']; +exports.jsFilename = 'kbn-ui-shared-deps.js'; exports.baseCssDistFilename = 'kbn-ui-shared-deps.css'; exports.lightCssDistFilename = 'kbn-ui-shared-deps.light.css'; exports.darkCssDistFilename = 'kbn-ui-shared-deps.dark.css'; exports.externals = { + // stateful deps angular: '__kbnSharedDeps__.Angular', - '@elastic/charts': '__kbnSharedDeps__.ElasticCharts', - '@elastic/eui': '__kbnSharedDeps__.ElasticEui', - '@elastic/eui/lib/services': '__kbnSharedDeps__.ElasticEuiLibServices', - '@elastic/eui/dist/eui_theme_light.json': '__kbnSharedDeps__.ElasticEuiLightTheme', - '@elastic/eui/dist/eui_theme_dark.json': '__kbnSharedDeps__.ElasticEuiDarkTheme', '@kbn/i18n': '__kbnSharedDeps__.KbnI18n', '@kbn/i18n/angular': '__kbnSharedDeps__.KbnI18nAngular', '@kbn/i18n/react': '__kbnSharedDeps__.KbnI18nReact', @@ -39,10 +36,31 @@ exports.externals = { 'moment-timezone': '__kbnSharedDeps__.MomentTimezone', react: '__kbnSharedDeps__.React', 'react-dom': '__kbnSharedDeps__.ReactDom', + 'react-dom/server': '__kbnSharedDeps__.ReactDomServer', 'react-intl': '__kbnSharedDeps__.ReactIntl', 'react-router': '__kbnSharedDeps__.ReactRouter', 'react-router-dom': '__kbnSharedDeps__.ReactRouterDom', '@kbn/ui-shared-deps/monaco': '__kbnSharedDeps__.Monaco', // this is how plugins/consumers from npm load monaco 'monaco-editor/esm/vs/editor/editor.api': '__kbnSharedDeps__.MonacoBare', + + /** + * big deps which are locked to a single version + */ + rxjs: '__kbnSharedDeps__.Rxjs', + 'rxjs/operators': '__kbnSharedDeps__.RxjsOperators', + '@elastic/charts': '__kbnSharedDeps__.ElasticCharts', + '@elastic/eui': '__kbnSharedDeps__.ElasticEui', + '@elastic/eui/lib/services': '__kbnSharedDeps__.ElasticEuiLibServices', + '@elastic/eui/lib/services/format': '__kbnSharedDeps__.ElasticEuiLibServicesFormat', + '@elastic/eui/dist/eui_charts_theme': '__kbnSharedDeps__.ElasticEuiChartsTheme', + '@elastic/eui/dist/eui_theme_light.json': '__kbnSharedDeps__.ElasticEuiLightTheme', + '@elastic/eui/dist/eui_theme_dark.json': '__kbnSharedDeps__.ElasticEuiDarkTheme', + + /** + * massive deps that we should really get rid of or reduce in size substantially + */ + elasticsearch: '__kbnSharedDeps__.ElasticsearchBrowser', + 'elasticsearch-browser': '__kbnSharedDeps__.ElasticsearchBrowser', + 'elasticsearch-browser/elasticsearch': '__kbnSharedDeps__.ElasticsearchBrowser', }; diff --git a/packages/kbn-ui-shared-deps/package.json b/packages/kbn-ui-shared-deps/package.json index c76e909d2adbc..e2823f23d0431 100644 --- a/packages/kbn-ui-shared-deps/package.json +++ b/packages/kbn-ui-shared-deps/package.json @@ -1,37 +1,41 @@ { "name": "@kbn/ui-shared-deps", "version": "1.0.0", - "license": "Apache-2.0", "private": true, + "license": "Apache-2.0", "scripts": { "build": "node scripts/build", "kbn:bootstrap": "node scripts/build --dev", "kbn:watch": "node scripts/build --watch" }, - "devDependencies": { + "dependencies": { "@elastic/charts": "^18.1.1", - "abortcontroller-polyfill": "^1.4.0", "@elastic/eui": "21.0.1", - "@kbn/babel-preset": "1.0.0", - "@kbn/dev-utils": "1.0.0", "@kbn/i18n": "1.0.0", - "@yarnpkg/lockfile": "^1.1.0", + "abortcontroller-polyfill": "^1.4.0", "angular": "^1.7.9", - "core-js": "^3.2.1", - "css-loader": "^3.4.2", + "core-js": "^3.6.4", "custom-event-polyfill": "^0.3.0", - "del": "^5.1.0", + "elasticsearch-browser": "^16.7.0", "jquery": "^3.4.1", - "mini-css-extract-plugin": "0.8.0", "moment": "^2.24.0", "moment-timezone": "^0.5.27", + "monaco-editor": "~0.17.0", "react": "^16.12.0", "react-dom": "^16.12.0", "react-intl": "^2.8.0", - "read-pkg": "^5.2.0", + "react-router": "^5.1.2", + "react-router-dom": "^5.1.2", "regenerator-runtime": "^0.13.3", + "rxjs": "^6.5.3", "symbol-observable": "^1.2.0", - "webpack": "^4.41.5", "whatwg-fetch": "^3.0.0" + }, + "devDependencies": { + "@kbn/babel-preset": "1.0.0", + "@kbn/dev-utils": "1.0.0", + "css-loader": "^3.4.2", + "del": "^5.1.0", + "webpack": "^4.41.5" } } diff --git a/packages/kbn-ui-shared-deps/webpack.config.js b/packages/kbn-ui-shared-deps/webpack.config.js index dc6e7ae33dbec..a875274544905 100644 --- a/packages/kbn-ui-shared-deps/webpack.config.js +++ b/packages/kbn-ui-shared-deps/webpack.config.js @@ -23,19 +23,19 @@ const MiniCssExtractPlugin = require('mini-css-extract-plugin'); const { REPO_ROOT } = require('@kbn/dev-utils'); const webpack = require('webpack'); -const SharedDeps = require('./index'); +const UiSharedDeps = require('./index'); const MOMENT_SRC = require.resolve('moment/min/moment-with-locales.js'); exports.getWebpackConfig = ({ dev = false } = {}) => ({ mode: dev ? 'development' : 'production', entry: { - [SharedDeps.distFilename.replace(/\.js$/, '')]: './entry.js', - [SharedDeps.darkCssDistFilename.replace(/\.css$/, '')]: [ + 'kbn-ui-shared-deps': './entry.js', + 'kbn-ui-shared-deps.dark': [ '@elastic/eui/dist/eui_theme_dark.css', '@elastic/charts/dist/theme_only_dark.css', ], - [SharedDeps.lightCssDistFilename.replace(/\.css$/, '')]: [ + 'kbn-ui-shared-deps.light': [ '@elastic/eui/dist/eui_theme_light.css', '@elastic/charts/dist/theme_only_light.css', ], @@ -43,7 +43,7 @@ exports.getWebpackConfig = ({ dev = false } = {}) => ({ context: __dirname, devtool: dev ? '#cheap-source-map' : false, output: { - path: SharedDeps.distDir, + path: UiSharedDeps.distDir, filename: '[name].js', sourceMapFilename: '[file].map', publicPath: '__REPLACE_WITH_PUBLIC_PATH__', @@ -81,6 +81,16 @@ exports.getWebpackConfig = ({ dev = false } = {}) => ({ optimization: { noEmitOnErrors: true, + splitChunks: { + cacheGroups: { + 'kbn-ui-shared-deps.@elastic': { + name: 'kbn-ui-shared-deps.@elastic', + test: m => m.resource && m.resource.includes('@elastic'), + chunks: 'all', + enforce: true, + }, + }, + }, }, performance: { diff --git a/packages/kbn-ui-shared-deps/yarn.lock b/packages/kbn-ui-shared-deps/yarn.lock new file mode 120000 index 0000000000000..3f82ebc9cdbae --- /dev/null +++ b/packages/kbn-ui-shared-deps/yarn.lock @@ -0,0 +1 @@ +../../yarn.lock \ No newline at end of file diff --git a/src/legacy/ui/ui_render/bootstrap/template.js.hbs b/src/legacy/ui/ui_render/bootstrap/template.js.hbs index 106dbcd9f8ab2..ad4aa97d8ea7a 100644 --- a/src/legacy/ui/ui_render/bootstrap/template.js.hbs +++ b/src/legacy/ui/ui_render/bootstrap/template.js.hbs @@ -76,24 +76,33 @@ if (window.__kbnStrictCsp__ && window.__kbnCspNotEnforced__) { load({ deps: [ + {{#each sharedJsDepFilenames}} + '{{../regularBundlePath}}/kbn-ui-shared-deps/{{this}}', + {{/each}} + ], + urls: [ { deps: [ - '{{dllBundlePath}}/vendors_runtime.bundle.dll.js' + '{{regularBundlePath}}/kbn-ui-shared-deps/{{sharedJsFilename}}', + { + deps: [ + '{{dllBundlePath}}/vendors_runtime.bundle.dll.js' + ], + urls: [ + {{#each dllJsChunks}} + '{{this}}', + {{/each}} + ] + }, + '{{regularBundlePath}}/commons.bundle.js', ], urls: [ - {{#each dllJsChunks}} + '{{regularBundlePath}}/{{appId}}.bundle.js', + {{#each styleSheetPaths}} '{{this}}', {{/each}} ] - }, - '{{regularBundlePath}}/kbn-ui-shared-deps/{{sharedDepsFilename}}', - '{{regularBundlePath}}/commons.bundle.js', - ], - urls: [ - '{{regularBundlePath}}/{{appId}}.bundle.js', - {{#each styleSheetPaths}} - '{{this}}', - {{/each}}, + } ] }); }; diff --git a/src/legacy/ui/ui_render/ui_render_mixin.js b/src/legacy/ui/ui_render/ui_render_mixin.js index 99560b0bf653f..0912d8683fc48 100644 --- a/src/legacy/ui/ui_render/ui_render_mixin.js +++ b/src/legacy/ui/ui_render/ui_render_mixin.js @@ -135,7 +135,8 @@ export function uiRenderMixin(kbnServer, server, config) { dllBundlePath, dllJsChunks, styleSheetPaths, - sharedDepsFilename: UiSharedDeps.distFilename, + sharedJsFilename: UiSharedDeps.jsFilename, + sharedJsDepFilenames: UiSharedDeps.jsDepFilenames, darkMode, }, }); diff --git a/src/plugins/advanced_settings/public/management_app/index.tsx b/src/plugins/advanced_settings/public/management_app/index.tsx deleted file mode 100644 index 53b8f9983aa27..0000000000000 --- a/src/plugins/advanced_settings/public/management_app/index.tsx +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import React from 'react'; -import ReactDOM from 'react-dom'; -import { HashRouter, Switch, Route } from 'react-router-dom'; -import { i18n } from '@kbn/i18n'; -import { I18nProvider } from '@kbn/i18n/react'; -import { AdvancedSettings } from './advanced_settings'; -import { ManagementSetup } from '../../../management/public'; -import { StartServicesAccessor } from '../../../../core/public'; -import { ComponentRegistry } from '../types'; - -const title = i18n.translate('advancedSettings.advancedSettingsLabel', { - defaultMessage: 'Advanced Settings', -}); -const crumb = [{ text: title }]; - -const readOnlyBadge = { - text: i18n.translate('advancedSettings.badge.readOnly.text', { - defaultMessage: 'Read only', - }), - tooltip: i18n.translate('advancedSettings.badge.readOnly.tooltip', { - defaultMessage: 'Unable to save advanced settings', - }), - iconType: 'glasses', -}; - -export async function registerAdvSettingsMgmntApp({ - management, - getStartServices, - componentRegistry, -}: { - management: ManagementSetup; - getStartServices: StartServicesAccessor; - componentRegistry: ComponentRegistry['start']; -}) { - const kibanaSection = management.sections.getSection('kibana'); - if (!kibanaSection) { - throw new Error('`kibana` management section not found.'); - } - - const advancedSettingsManagementApp = kibanaSection.registerApp({ - id: 'settings', - title, - order: 20, - async mount(params) { - params.setBreadcrumbs(crumb); - const [ - { uiSettings, notifications, docLinks, application, chrome }, - ] = await getStartServices(); - - const canSave = application.capabilities.advancedSettings.save as boolean; - - if (!canSave) { - chrome.setBadge(readOnlyBadge); - } - - ReactDOM.render( - - - - - - - - - , - params.element - ); - return () => { - ReactDOM.unmountComponentAtNode(params.element); - }; - }, - }); - const [{ application }] = await getStartServices(); - if (!application.capabilities.management.kibana.settings) { - advancedSettingsManagementApp.disable(); - } -} diff --git a/src/plugins/advanced_settings/public/management_app/mount_management_section.tsx b/src/plugins/advanced_settings/public/management_app/mount_management_section.tsx new file mode 100644 index 0000000000000..df44ea45e9d01 --- /dev/null +++ b/src/plugins/advanced_settings/public/management_app/mount_management_section.tsx @@ -0,0 +1,82 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React from 'react'; +import ReactDOM from 'react-dom'; +import { HashRouter, Switch, Route } from 'react-router-dom'; + +import { i18n } from '@kbn/i18n'; +import { I18nProvider } from '@kbn/i18n/react'; +import { StartServicesAccessor } from 'src/core/public'; + +import { AdvancedSettings } from './advanced_settings'; +import { ManagementAppMountParams } from '../../../management/public'; +import { ComponentRegistry } from '../types'; + +const title = i18n.translate('advancedSettings.advancedSettingsLabel', { + defaultMessage: 'Advanced Settings', +}); +const crumb = [{ text: title }]; + +const readOnlyBadge = { + text: i18n.translate('advancedSettings.badge.readOnly.text', { + defaultMessage: 'Read only', + }), + tooltip: i18n.translate('advancedSettings.badge.readOnly.tooltip', { + defaultMessage: 'Unable to save advanced settings', + }), + iconType: 'glasses', +}; + +export async function mountManagementSection( + getStartServices: StartServicesAccessor, + params: ManagementAppMountParams, + componentRegistry: ComponentRegistry['start'] +) { + params.setBreadcrumbs(crumb); + const [{ uiSettings, notifications, docLinks, application, chrome }] = await getStartServices(); + + const canSave = application.capabilities.advancedSettings.save as boolean; + + if (!canSave) { + chrome.setBadge(readOnlyBadge); + } + + ReactDOM.render( + + + + + + + + + , + params.element + ); + return () => { + ReactDOM.unmountComponentAtNode(params.element); + }; +} diff --git a/src/plugins/advanced_settings/public/plugin.ts b/src/plugins/advanced_settings/public/plugin.ts index e9472fbdee0e6..04eeff1e1f3ce 100644 --- a/src/plugins/advanced_settings/public/plugin.ts +++ b/src/plugins/advanced_settings/public/plugin.ts @@ -16,21 +16,37 @@ * specific language governing permissions and limitations * under the License. */ - +import { i18n } from '@kbn/i18n'; import { CoreSetup, CoreStart, Plugin } from 'kibana/public'; +import { ManagementApp } from '../../management/public'; import { ComponentRegistry } from './component_registry'; import { AdvancedSettingsSetup, AdvancedSettingsStart, AdvancedSettingsPluginSetup } from './types'; -import { registerAdvSettingsMgmntApp } from './management_app'; const component = new ComponentRegistry(); +const title = i18n.translate('advancedSettings.advancedSettingsLabel', { + defaultMessage: 'Advanced Settings', +}); + export class AdvancedSettingsPlugin implements Plugin { + private managementApp?: ManagementApp; public setup(core: CoreSetup, { management }: AdvancedSettingsPluginSetup) { - registerAdvSettingsMgmntApp({ - management, - getStartServices: core.getStartServices, - componentRegistry: component.start, + const kibanaSection = management.sections.getSection('kibana'); + if (!kibanaSection) { + throw new Error('`kibana` management section not found.'); + } + + this.managementApp = kibanaSection.registerApp({ + id: 'settings', + title, + order: 20, + async mount(params) { + const { mountManagementSection } = await import( + './management_app/mount_management_section' + ); + return mountManagementSection(core.getStartServices, params, component.start); + }, }); return { @@ -39,6 +55,10 @@ export class AdvancedSettingsPlugin } public start(core: CoreStart) { + if (!core.application.capabilities.management.kibana.settings) { + this.managementApp!.disable(); + } + return { component: component.start, }; diff --git a/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.test.ts b/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.test.ts index 0c02b02a25af0..ef6eaa196b06a 100644 --- a/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.test.ts +++ b/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.test.ts @@ -46,6 +46,10 @@ describe('parseInterval', () => { validateDuration(parseInterval('5m'), 'm', 5); }); + test('should correctly parse 500m interval', () => { + validateDuration(parseInterval('500m'), 'm', 500); + }); + test('should correctly parse 250ms interval', () => { validateDuration(parseInterval('250ms'), 'ms', 250); }); diff --git a/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.ts b/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.ts index ef1d89e400b72..857c8594720ee 100644 --- a/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.ts +++ b/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.ts @@ -49,6 +49,13 @@ export function parseInterval(interval: string): moment.Duration | null { u => Math.abs(duration.as(u)) >= 1 ) as unitOfTime.Base; + // however if we do this fhe other way around it will also fail + // go from 500m to hours as this will result in infinite number (dividing 500/60 = 8.3*) + // so we can only do this if we are changing to smaller units + if (dateMath.units.indexOf(selectedUnit as any) < dateMath.units.indexOf(unit as any)) { + return duration; + } + return moment.duration(duration.as(selectedUnit), selectedUnit); } catch (e) { return null; diff --git a/src/plugins/es_ui_shared/static/forms/helpers/field_validators/is_json.ts b/src/plugins/es_ui_shared/static/forms/helpers/field_validators/is_json.ts index 5626fc80bb749..dc8321aa07004 100644 --- a/src/plugins/es_ui_shared/static/forms/helpers/field_validators/is_json.ts +++ b/src/plugins/es_ui_shared/static/forms/helpers/field_validators/is_json.ts @@ -17,25 +17,6 @@ * under the License. */ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - import { ValidationFunc } from '../../hook_form_lib'; import { isJSON } from '../../../validators/string'; import { ERROR_CODE } from './types'; diff --git a/src/plugins/home/public/application/components/__snapshots__/add_data.test.js.snap b/src/plugins/home/public/application/components/__snapshots__/add_data.test.js.snap index 57cbe0f17498f..c1dc560b4353f 100644 --- a/src/plugins/home/public/application/components/__snapshots__/add_data.test.js.snap +++ b/src/plugins/home/public/application/components/__snapshots__/add_data.test.js.snap @@ -104,6 +104,7 @@ exports[`apmUiEnabled 1`] = ` { footer={ diff --git a/src/plugins/home/public/application/components/feature_directory.js b/src/plugins/home/public/application/components/feature_directory.js index 2e979bf589975..7d827b1ca9229 100644 --- a/src/plugins/home/public/application/components/feature_directory.js +++ b/src/plugins/home/public/application/components/feature_directory.js @@ -89,6 +89,7 @@ export class FeatureDirectory extends React.Component { renderTabs = () => { return this.tabs.map((tab, index) => ( this.onSelectedTabChanged(tab.id)} isSelected={tab.id === this.state.selectedTabId} key={index} diff --git a/src/plugins/home/public/application/components/home.js b/src/plugins/home/public/application/components/home.js index 77cde6a574aec..5263dc06e96fc 100644 --- a/src/plugins/home/public/application/components/home.js +++ b/src/plugins/home/public/application/components/home.js @@ -203,7 +203,7 @@ export class Home extends Component {

- + `http://localhost:5610/bundles/kbn-ui-shared-deps/${chunkFilename}` + ), + `http://localhost:5610/bundles/kbn-ui-shared-deps/${UiSharedDeps.jsFilename}`, + 'http://localhost:5610/built_assets/dlls/vendors_runtime.bundle.dll.js', ...DllCompiler.getRawDllConfig().chunks.map( chunk => `http://localhost:5610/built_assets/dlls/vendors${chunk}.bundle.dll.js` diff --git a/test/accessibility/apps/management.ts b/test/accessibility/apps/management.ts index ac2921ed063f5..9e75250403d6b 100644 --- a/test/accessibility/apps/management.ts +++ b/test/accessibility/apps/management.ts @@ -35,7 +35,8 @@ export default function({ getService, getPageObjects }: FtrProviderContext) { // await PageObjects.common.navigateToApp('settings'); // }); - describe('Management', () => { + // FLAKY: https://github.com/elastic/kibana/issues/60470 + describe.skip('Management', () => { before(async () => { await esArchiver.load('discover'); await esArchiver.loadIfNeeded('logstash_functional'); diff --git a/test/functional/apps/discover/_doc_navigation.js b/test/functional/apps/discover/_doc_navigation.js index f0a7844b29987..08e0cb0b8d23a 100644 --- a/test/functional/apps/discover/_doc_navigation.js +++ b/test/functional/apps/discover/_doc_navigation.js @@ -31,7 +31,8 @@ export default function({ getService, getPageObjects }) { const PageObjects = getPageObjects(['common', 'discover', 'timePicker']); const esArchiver = getService('esArchiver'); - describe('doc link in discover', function contextSize() { + // FLAKY: https://github.com/elastic/kibana/issues/62281 + describe.skip('doc link in discover', function contextSize() { this.tags('smoke'); before(async function() { await esArchiver.loadIfNeeded('logstash_functional'); diff --git a/test/functional/apps/discover/_field_visualize.ts b/test/functional/apps/discover/_field_visualize.ts index 24f4ba592324c..f8f290b259b7e 100644 --- a/test/functional/apps/discover/_field_visualize.ts +++ b/test/functional/apps/discover/_field_visualize.ts @@ -32,8 +32,7 @@ export default function({ getService, getPageObjects }: FtrProviderContext) { defaultIndex: 'logstash-*', }; - // FLAKY: https://github.com/elastic/kibana/issues/61714 - describe.skip('discover field visualize button', () => { + describe('discover field visualize button', () => { before(async function() { log.debug('load kibana index with default index pattern'); await esArchiver.load('discover'); @@ -50,7 +49,7 @@ export default function({ getService, getPageObjects }: FtrProviderContext) { }); it('should visualize a field in area chart', async () => { - await PageObjects.discover.clickFieldListItem('phpmemory'); + await PageObjects.discover.findFieldByName('phpmemory'); log.debug('visualize a phpmemory field'); await PageObjects.discover.clickFieldListItemVisualize('phpmemory'); await PageObjects.header.waitUntilLoadingHasFinished(); @@ -83,7 +82,7 @@ export default function({ getService, getPageObjects }: FtrProviderContext) { it('should preserve app filters in visualize', async () => { await filterBar.addFilter('bytes', 'is between', '3500', '4000'); - await PageObjects.discover.clickFieldListItem('geo.src'); + await PageObjects.discover.findFieldByName('geo.src'); log.debug('visualize a geo.src field with filter applied'); await PageObjects.discover.clickFieldListItemVisualize('geo.src'); await PageObjects.header.waitUntilLoadingHasFinished(); @@ -119,7 +118,7 @@ export default function({ getService, getPageObjects }: FtrProviderContext) { it('should preserve query in visualize', async () => { await queryBar.setQuery('machine.os : ios'); await queryBar.submitQuery(); - await PageObjects.discover.clickFieldListItem('geo.dest'); + await PageObjects.discover.findFieldByName('geo.dest'); log.debug('visualize a geo.dest field with query applied'); await PageObjects.discover.clickFieldListItemVisualize('geo.dest'); await PageObjects.header.waitUntilLoadingHasFinished(); diff --git a/test/functional/page_objects/discover_page.ts b/test/functional/page_objects/discover_page.ts index 10652ce3ec4b2..2377c32a80b5b 100644 --- a/test/functional/page_objects/discover_page.ts +++ b/test/functional/page_objects/discover_page.ts @@ -40,6 +40,11 @@ export function DiscoverPageProvider({ getService, getPageObjects }: FtrProvider return await el.getVisibleText(); } + public async findFieldByName(name: string) { + const fieldSearch = await testSubjects.find('fieldFilterSearchInput'); + await fieldSearch.type(name); + } + public async saveSearch(searchName: string) { log.debug('saveSearch'); await this.clickSaveSearchButton(); @@ -239,10 +244,16 @@ export function DiscoverPageProvider({ getService, getPageObjects }: FtrProvider await testSubjects.click(`fieldToggle-${field}`); } - public async clickFieldListItemVisualize(field: string) { - return await retry.try(async () => { - await testSubjects.click(`fieldVisualize-${field}`); - }); + public async clickFieldListItemVisualize(fieldName: string) { + const field = await testSubjects.find(`field-${fieldName}-showDetails`); + const isActive = await field.elementHasClass('dscSidebarItem--active'); + + if (!isActive) { + // expand the field to show the "Visualize" button + await field.click(); + } + + await testSubjects.click(`fieldVisualize-${fieldName}`); } public async expectFieldListItemVisualize(field: string) { diff --git a/test/functional/page_objects/home_page.ts b/test/functional/page_objects/home_page.ts index 6225b4e3aca62..6fdc306e39192 100644 --- a/test/functional/page_objects/home_page.ts +++ b/test/functional/page_objects/home_page.ts @@ -79,6 +79,39 @@ export function HomePageProvider({ getService, getPageObjects }: FtrProviderCont await testSubjects.click(`launchSampleDataSet${id}`); } + async clickAllKibanaPlugins() { + await testSubjects.click('allPlugins'); + } + + async clickVisualizeExplorePlugins() { + await testSubjects.click('tab-data'); + } + + async clickAdminPlugin() { + await testSubjects.click('tab-admin'); + } + + async clickOnConsole() { + await testSubjects.click('homeSynopsisLinkconsole'); + } + async clickOnLogo() { + await testSubjects.click('logo'); + } + + async ClickOnLogsData() { + await testSubjects.click('logsData'); + } + + // clicks on Active MQ logs + async clickOnLogsTutorial() { + await testSubjects.click('homeSynopsisLinkactivemq logs'); + } + + // clicks on cloud tutorial link + async clickOnCloudTutorial() { + await testSubjects.click('onCloudTutorial'); + } + async loadSavedObjects() { await retry.try(async () => { await testSubjects.click('loadSavedObjects'); diff --git a/vars/workers.groovy b/vars/workers.groovy index c5638f2624fe5..1c55c676d9425 100644 --- a/vars/workers.groovy +++ b/vars/workers.groovy @@ -1,23 +1,38 @@ // "Workers" in this file will spin up an instance, do some setup etc depending on the configuration, and then execute some work that you define // e.g. workers.base(name: 'my-worker') { sh "echo 'ready to execute some kibana scripts'" } +def label(size) { + switch(size) { + case 's': + return 'linux && immutable' + case 'l': + return 'tests-l' + case 'xl': + return 'tests-xl' + case 'xxl': + return 'tests-xxl' + } + + error "unknown size '${size}'" +} + /* The base worker that all of the others use. Will clone the scm (assumed to be kibana), and run kibana bootstrap processes by default. Parameters: - label - gobld/agent label to use, e.g. 'linux && immutable' + size - size of worker label to use, e.g. 's' or 'xl' ramDisk - Should the workspace be mounted in memory? Default: true bootstrapped - If true, download kibana dependencies, run kbn bootstrap, etc. Default: true name - Name of the worker for display purposes, filenames, etc. scm - Jenkins scm configuration for checking out code. Use `null` to disable checkout. Default: inherited from job */ def base(Map params, Closure closure) { - def config = [label: '', ramDisk: true, bootstrapped: true, name: 'unnamed-worker', scm: scm] + params - if (!config.label) { - error "You must specify an agent label, such as 'tests-xl' or 'linux && immutable', when using workers.base()" + def config = [size: '', ramDisk: true, bootstrapped: true, name: 'unnamed-worker', scm: scm] + params + if (!config.size) { + error "You must specify an agent size, such as 'xl' or 's', when using workers.base()" } - node(config.label) { + node(label(config.size)) { agentInfo.print() if (config.ramDisk) { @@ -88,7 +103,7 @@ def ci(Map params, Closure closure) { // Worker for running the current intake jobs. Just runs a single script after bootstrap. def intake(jobName, String script) { return { - ci(name: jobName, label: 'linux && immutable', ramDisk: false) { + ci(name: jobName, size: 's', ramDisk: false) { withEnv(["JOB=${jobName}"]) { runbld(script, "Execute ${jobName}") } @@ -99,7 +114,7 @@ def intake(jobName, String script) { // Worker for running functional tests. Runs a setup process (e.g. the kibana build) then executes a map of closures in parallel (e.g. one for each ciGroup) def functional(name, Closure setup, Map processes) { return { - parallelProcesses(name: name, setup: setup, processes: processes, delayBetweenProcesses: 20, label: 'tests-xl') + parallelProcesses(name: name, setup: setup, processes: processes, delayBetweenProcesses: 20, size: 'xl') } } @@ -111,12 +126,12 @@ def functional(name, Closure setup, Map processes) { setup: Closure to execute after the agent is bootstrapped, before starting the parallel work processes: Map of closures that will execute in parallel after setup. Each closure is passed a unique number. delayBetweenProcesses: Number of seconds to wait between starting the parallel processes. Useful to spread the load of heavy init processes, e.g. Elasticsearch starting up. Default: 0 - label: gobld/agent label to use, e.g. 'linux && immutable'. Default: 'tests-xl', a 32 CPU machine used for running many functional test suites in parallel + size: size of worker label to use, e.g. 's' or 'xl' */ def parallelProcesses(Map params) { - def config = [name: 'parallel-worker', setup: {}, processes: [:], delayBetweenProcesses: 0, label: 'tests-xl'] + params + def config = [name: 'parallel-worker', setup: {}, processes: [:], delayBetweenProcesses: 0, size: 'xl'] + params - ci(label: config.label, name: config.name) { + ci(size: config.size, name: config.name) { config.setup() def nextProcessNumber = 1 diff --git a/webpackShims/elasticsearch-browser.js b/webpackShims/elasticsearch-browser.js deleted file mode 100644 index a4373dcdfe1d1..0000000000000 --- a/webpackShims/elasticsearch-browser.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -require('angular'); -module.exports = require('elasticsearch-browser/elasticsearch.angular.js'); diff --git a/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/index.ts b/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/index.ts index 949264fcc9fdb..b0083eb4f87e2 100644 --- a/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/index.ts +++ b/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/index.ts @@ -57,7 +57,7 @@ export const WorkpadExport = compose( ({ workpad, pageCount, kibana }: Props & WithKibanaProps): ComponentProps => ({ getExportUrl: type => { if (type === 'pdf') { - const pdfUrl = getPdfUrl(workpad, { pageCount }, kibana.services.http.basePath.prepend); + const pdfUrl = getPdfUrl(workpad, { pageCount }, kibana.services.http.basePath); return getAbsoluteUrl(pdfUrl); } @@ -78,7 +78,7 @@ export const WorkpadExport = compose( onExport: type => { switch (type) { case 'pdf': - return createPdf(workpad, { pageCount }, kibana.services.http.basePath.prepend) + return createPdf(workpad, { pageCount }, kibana.services.http.basePath) .then(({ data }: { data: { job: { id: string } } }) => { notify.info(strings.getExportPDFMessage(), { title: strings.getExportPDFTitle(workpad.name), diff --git a/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.test.ts b/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.test.ts index ceaf82c1c07d6..6c7d7ddd0a793 100644 --- a/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.test.ts +++ b/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.test.ts @@ -9,29 +9,34 @@ jest.mock('../../../../common/lib/fetch'); import { getPdfUrl, createPdf } from './utils'; import { workpads } from '../../../../__tests__/fixtures/workpads'; import { fetch } from '../../../../common/lib/fetch'; +import { IBasePath } from 'kibana/public'; -const addBasePath = jest.fn().mockImplementation(s => `basepath/${s}`); +const basePath = ({ + prepend: jest.fn().mockImplementation(s => `basepath/s/spacey/${s}`), + get: () => 'basepath/s/spacey', + serverBasePath: `basepath`, +} as unknown) as IBasePath; const workpad = workpads[0]; test('getPdfUrl returns the correct url', () => { - const url = getPdfUrl(workpad, { pageCount: 2 }, addBasePath); + const url = getPdfUrl(workpad, { pageCount: 2 }, basePath); expect(url).toMatchInlineSnapshot( - `"basepath//api/reporting/generate/printablePdf?jobParams=(browserTimezone:America%2FPhoenix,layout:(dimensions:(height:0,width:0),id:preserve_layout),objectType:'canvas%20workpad',relativeUrls:!(%2Fapp%2Fcanvas%23%2Fexport%2Fworkpad%2Fpdf%2Fbase-workpad%2Fpage%2F1,%2Fapp%2Fcanvas%23%2Fexport%2Fworkpad%2Fpdf%2Fbase-workpad%2Fpage%2F2),title:'base%20workpad')"` + `"basepath/s/spacey//api/reporting/generate/printablePdf?jobParams=(browserTimezone:America%2FPhoenix,layout:(dimensions:(height:0,width:0),id:preserve_layout),objectType:'canvas%20workpad',relativeUrls:!(%2Fs%2Fspacey%2Fapp%2Fcanvas%23%2Fexport%2Fworkpad%2Fpdf%2Fbase-workpad%2Fpage%2F1,%2Fs%2Fspacey%2Fapp%2Fcanvas%23%2Fexport%2Fworkpad%2Fpdf%2Fbase-workpad%2Fpage%2F2),title:'base%20workpad')"` ); }); test('createPdf posts to create the pdf', () => { - createPdf(workpad, { pageCount: 2 }, addBasePath); + createPdf(workpad, { pageCount: 2 }, basePath); expect(fetch.post).toBeCalled(); const args = (fetch.post as jest.MockedFunction).mock.calls[0]; - expect(args[0]).toMatchInlineSnapshot(`"basepath//api/reporting/generate/printablePdf"`); + expect(args[0]).toMatchInlineSnapshot(`"basepath/s/spacey//api/reporting/generate/printablePdf"`); expect(args[1]).toMatchInlineSnapshot(` Object { - "jobParams": "(browserTimezone:America/Phoenix,layout:(dimensions:(height:0,width:0),id:preserve_layout),objectType:'canvas workpad',relativeUrls:!(/app/canvas#/export/workpad/pdf/base-workpad/page/1,/app/canvas#/export/workpad/pdf/base-workpad/page/2),title:'base workpad')", + "jobParams": "(browserTimezone:America/Phoenix,layout:(dimensions:(height:0,width:0),id:preserve_layout),objectType:'canvas workpad',relativeUrls:!(/s/spacey/app/canvas#/export/workpad/pdf/base-workpad/page/1,/s/spacey/app/canvas#/export/workpad/pdf/base-workpad/page/2),title:'base workpad')", } `); }); diff --git a/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.ts b/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.ts index 5adbf4ce66c13..dc99c0687f388 100644 --- a/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.ts +++ b/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.ts @@ -6,6 +6,7 @@ import rison from 'rison-node'; // @ts-ignore Untyped local. +import { IBasePath } from 'kibana/public'; import { fetch } from '../../../../common/lib/fetch'; import { CanvasWorkpad } from '../../../../types'; import { url } from '../../../../../../../../src/plugins/kibana_utils/public'; @@ -17,9 +18,7 @@ interface PageCount { pageCount: number; } -type AddBasePath = (url: string) => string; - -type Arguments = [CanvasWorkpad, PageCount, AddBasePath]; +type Arguments = [CanvasWorkpad, PageCount, IBasePath]; interface PdfUrlData { createPdfUri: string; @@ -29,10 +28,11 @@ interface PdfUrlData { function getPdfUrlParts( { id, name: title, width, height }: CanvasWorkpad, { pageCount }: PageCount, - addBasePath: (path: string) => string + basePath: IBasePath ): PdfUrlData { - const reportingEntry = addBasePath('/api/reporting/generate'); - const canvasEntry = '/app/canvas#'; + const reportingEntry = basePath.prepend('/api/reporting/generate'); + const urlPrefix = basePath.get().replace(basePath.serverBasePath, ''); // for Spaces prefix, which is included in basePath.get() + const canvasEntry = `${urlPrefix}/app/canvas#`; // The viewport in Reporting by specifying the dimensions. In order for things to work, // we need a viewport that will include all of the pages in the workpad. The viewport diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_map_select.js b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_map_select.js index bf57306df5697..eadaf42ca694d 100644 --- a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_map_select.js +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_map_select.js @@ -99,6 +99,7 @@ export class ColorMapSelect extends Component { ); } else @@ -108,6 +109,7 @@ export class ColorMapSelect extends Component { field={this.props.styleProperty.getField()} getValueSuggestions={this.props.styleProperty.getValueSuggestions} onChange={this._onCustomColorMapChange} + swatches={this.props.swatches} /> ); diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops.js b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops.js index 059543d705fc7..20fd97a229352 100644 --- a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops.js +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops.js @@ -8,61 +8,8 @@ import _ from 'lodash'; import React from 'react'; import { removeRow, isColorInvalid } from './color_stops_utils'; import { i18n } from '@kbn/i18n'; -import { EuiButtonIcon, EuiColorPicker, EuiFlexGroup, EuiFlexItem, EuiFormRow } from '@elastic/eui'; - -function getColorStopRow({ index, errors, stopInput, onColorChange, color, deleteButton, onAdd }) { - const colorPickerButtons = ( -
- {deleteButton} - -
- ); - return ( - - - - {stopInput} - - - - - - - ); -} - -export function getDeleteButton(onRemove) { - return ( - - ); -} +import { EuiButtonIcon, EuiFlexGroup, EuiFlexItem, EuiFormRow } from '@elastic/eui'; +import { MbValidatedColorPicker } from './mb_validated_color_picker'; export const ColorStops = ({ onChange, @@ -72,6 +19,7 @@ export const ColorStops = ({ renderStopInput, addNewRow, canDeleteStop, + swatches, }) => { function getStopInput(stop, index) { const onStopChange = newStopValue => { @@ -134,10 +82,56 @@ export const ColorStops = ({ isInvalid: isStopsInvalid(newColorStops), }); }; - deleteButton = getDeleteButton(onRemove); + deleteButton = ( + + ); } - return getColorStopRow({ index, errors, stopInput, onColorChange, color, deleteButton, onAdd }); + const colorPickerButtons = ( +
+ {deleteButton} + +
+ ); + return ( + + + + {stopInput} + + + + + + + ); }); return
{rows}
; diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_categorical.js b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_categorical.js index edf230b0a945c..0656173e5c411 100644 --- a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_categorical.js +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_categorical.js @@ -27,6 +27,7 @@ export const ColorStopsCategorical = ({ field, onChange, getValueSuggestions, + swatches, }) => { const getStopError = (stop, index) => { let count = 0; @@ -81,6 +82,7 @@ export const ColorStopsCategorical = ({ renderStopInput={renderStopInput} canDeleteStop={canDeleteStop} addNewRow={addCategoricalRow} + swatches={swatches} /> ); }; diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_ordinal.js b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_ordinal.js index 0f6a0583d3dbc..4e2d07b9dfea0 100644 --- a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_ordinal.js +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_ordinal.js @@ -20,6 +20,7 @@ import { i18n } from '@kbn/i18n'; export const ColorStopsOrdinal = ({ colorStops = [{ stop: 0, color: DEFAULT_CUSTOM_COLOR }], onChange, + swatches, }) => { const getStopError = (stop, index) => { let error; @@ -69,6 +70,7 @@ export const ColorStopsOrdinal = ({ renderStopInput={renderStopInput} canDeleteStop={canDeleteStop} addNewRow={addOrdinalRow} + swatches={swatches} /> ); }; diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/dynamic_color_form.js b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/dynamic_color_form.js index 5e8f720fcc5e3..460e7379920c4 100644 --- a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/dynamic_color_form.js +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/dynamic_color_form.js @@ -18,6 +18,7 @@ export function DynamicColorForm({ onDynamicStyleChange, staticDynamicSelect, styleProperty, + swatches, }) { const styleOptions = styleProperty.getOptions(); @@ -101,6 +102,7 @@ export function DynamicColorForm({ useCustomColorMap={_.get(styleOptions, 'useCustomColorRamp', false)} styleProperty={styleProperty} showColorMapTypeToggle={showColorMapTypeToggle} + swatches={swatches} /> ); } else if (styleProperty.isCategorical()) { @@ -118,6 +120,7 @@ export function DynamicColorForm({ useCustomColorMap={_.get(styleOptions, 'useCustomColorPalette', false)} styleProperty={styleProperty} showColorMapTypeToggle={showColorMapTypeToggle} + swatches={swatches} /> ); } diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/mb_validated_color_picker.tsx b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/mb_validated_color_picker.tsx new file mode 100644 index 0000000000000..b4fad6690b9ac --- /dev/null +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/mb_validated_color_picker.tsx @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { Component } from 'react'; +import { isValidHex, EuiColorPicker, EuiFormControlLayoutProps } from '@elastic/eui'; + +export const RGBA_0000 = 'rgba(0,0,0,0)'; + +interface Props { + onChange: (color: string) => void; + color: string; + swatches?: string[]; + append?: EuiFormControlLayoutProps['append']; +} + +interface State { + colorInputValue: string; +} + +// EuiColorPicker treats '' or invalid colors as transparent. +// Mapbox logs errors for '' or invalid colors. +// MbValidatedColorPicker is a wrapper around EuiColorPicker that reconciles the behavior difference +// between the two by returning a Mapbox safe RGBA_0000 for '' or invalid colors +// while keeping invalid state local so EuiColorPicker's input properly handles text input. +export class MbValidatedColorPicker extends Component { + state = { + colorInputValue: this.props.color === RGBA_0000 ? '' : this.props.color, + }; + + _onColorChange = (color: string) => { + // reflect all user input, whether valid or not + this.setState({ colorInputValue: color }); + // Only surface mapbox valid input to caller + this.props.onChange(isValidHex(color) ? color : RGBA_0000); + }; + + render() { + return ( + + ); + } +} diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/static_color_form.js b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/static_color_form.js index ab1634a53a966..a295556ee3126 100644 --- a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/static_color_form.js +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/static_color_form.js @@ -5,7 +5,8 @@ */ import React from 'react'; -import { EuiColorPicker, EuiFlexGroup, EuiFlexItem } from '@elastic/eui'; +import { EuiFlexGroup, EuiFlexItem } from '@elastic/eui'; +import { MbValidatedColorPicker } from './mb_validated_color_picker'; export function StaticColorForm({ onStaticStyleChange, @@ -23,11 +24,10 @@ export function StaticColorForm({ {staticDynamicSelect} - diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/properties/dynamic_color_property.js b/x-pack/legacy/plugins/maps/public/layers/styles/vector/properties/dynamic_color_property.js index 146bc40aa8531..e671f00b78381 100644 --- a/x-pack/legacy/plugins/maps/public/layers/styles/vector/properties/dynamic_color_property.js +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/properties/dynamic_color_property.js @@ -18,11 +18,10 @@ import { EuiTextColor, } from '@elastic/eui'; import { Category } from '../components/legend/category'; -import { COLOR_MAP_TYPE } from '../../../../../common/constants'; +import { COLOR_MAP_TYPE, RGBA_0000 } from '../../../../../common/constants'; import { isCategoricalStopsInvalid } from '../components/color/color_stops_utils'; const EMPTY_STOPS = { stops: [], defaultColor: null }; -const RGBA_0000 = 'rgba(0,0,0,0)'; export class DynamicColorProperty extends DynamicStyleProperty { syncCircleColorWithMb(mbLayerId, mbMap, alpha) { diff --git a/x-pack/legacy/plugins/reporting/common/constants.ts b/x-pack/legacy/plugins/reporting/common/constants.ts index 1746345879192..8f7a06ba9f8e9 100644 --- a/x-pack/legacy/plugins/reporting/common/constants.ts +++ b/x-pack/legacy/plugins/reporting/common/constants.ts @@ -27,6 +27,9 @@ export const WHITELISTED_JOB_CONTENT_TYPES = [ 'image/png', ]; +// See: +// https://github.com/chromium/chromium/blob/3611052c055897e5ebbc5b73ea295092e0c20141/services/network/public/cpp/header_util_unittest.cc#L50 +// For a list of headers that chromium doesn't like export const KBN_SCREENSHOT_HEADER_BLACKLIST = [ 'accept-encoding', 'connection', @@ -38,8 +41,14 @@ export const KBN_SCREENSHOT_HEADER_BLACKLIST = [ // only for a single transport-level connection, and shouldn't // be stored by caches or forwarded by proxies. 'transfer-encoding', + 'trailer', + 'te', + 'upgrade', + 'keep-alive', ]; +export const KBN_SCREENSHOT_HEADER_BLACKLIST_STARTS_WITH_PATTERN = ['proxy-']; + export const UI_SETTINGS_CUSTOM_PDF_LOGO = 'xpackReporting:customPdfLogo'; /** diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts index 468caf93ec5dd..9085fb3cbc876 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts @@ -5,33 +5,27 @@ */ import { cryptoFactory } from '../../../server/lib/crypto'; -import { createMockServer } from '../../../test_helpers'; import { Logger } from '../../../types'; import { decryptJobHeaders } from './decrypt_job_headers'; -let mockServer: any; -beforeEach(() => { - mockServer = createMockServer(''); -}); - -const encryptHeaders = async (headers: Record) => { - const crypto = cryptoFactory(mockServer); +const encryptHeaders = async (encryptionKey: string, headers: Record) => { + const crypto = cryptoFactory(encryptionKey); return await crypto.encrypt(headers); }; describe('headers', () => { test(`fails if it can't decrypt headers`, async () => { - await expect( + const getDecryptedHeaders = () => decryptJobHeaders({ + encryptionKey: 'abcsecretsauce', job: { headers: 'Q53+9A+zf+Xe+ceR/uB/aR/Sw/8e+M+qR+WiG+8z+EY+mo+HiU/zQL+Xn', }, logger: ({ error: jest.fn(), } as unknown) as Logger, - server: mockServer, - }) - ).rejects.toMatchInlineSnapshot( + }); + await expect(getDecryptedHeaders()).rejects.toMatchInlineSnapshot( `[Error: Failed to decrypt report job data. Please ensure that xpack.reporting.encryptionKey is set and re-generate this report. Error: Invalid IV length]` ); }); @@ -42,15 +36,15 @@ describe('headers', () => { baz: 'quix', }; - const encryptedHeaders = await encryptHeaders(headers); + const encryptedHeaders = await encryptHeaders('abcsecretsauce', headers); const decryptedHeaders = await decryptJobHeaders({ + encryptionKey: 'abcsecretsauce', job: { title: 'cool-job-bro', type: 'csv', headers: encryptedHeaders, }, logger: {} as Logger, - server: mockServer, }); expect(decryptedHeaders).toEqual(headers); }); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.ts index 436b2c2dab1ad..6f415d7ee5ea9 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.ts @@ -6,7 +6,7 @@ import { i18n } from '@kbn/i18n'; import { cryptoFactory } from '../../../server/lib/crypto'; -import { CryptoFactory, ServerFacade, Logger } from '../../../types'; +import { CryptoFactory, Logger } from '../../../types'; interface HasEncryptedHeaders { headers?: string; @@ -17,15 +17,15 @@ export const decryptJobHeaders = async < JobParamsType, JobDocPayloadType extends HasEncryptedHeaders >({ - server, + encryptionKey, job, logger, }: { - server: ServerFacade; + encryptionKey?: string; job: JobDocPayloadType; logger: Logger; }): Promise> => { - const crypto: CryptoFactory = cryptoFactory(server); + const crypto: CryptoFactory = cryptoFactory(encryptionKey); try { const decryptedHeaders: Record = await crypto.decrypt(job.headers); return decryptedHeaders; diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts index eedb742ad7597..5f5fc94eee830 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts @@ -4,27 +4,32 @@ * you may not use this file except in compliance with the Elastic License. */ -import { createMockReportingCore, createMockServer } from '../../../test_helpers'; -import { ReportingCore } from '../../../server'; +import sinon from 'sinon'; +import { createMockReportingCore } from '../../../test_helpers'; +import { ReportingConfig, ReportingCore } from '../../../server/types'; import { JobDocPayload } from '../../../types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; import { getConditionalHeaders, getCustomLogo } from './index'; +let mockConfig: ReportingConfig; let mockReportingPlugin: ReportingCore; -let mockServer: any; + +const getMockConfig = (mockConfigGet: sinon.SinonStub) => ({ + get: mockConfigGet, + kbnConfig: { get: mockConfigGet }, +}); + beforeEach(async () => { - mockReportingPlugin = await createMockReportingCore(); - mockServer = createMockServer(''); + const mockConfigGet = sinon + .stub() + .withArgs('kibanaServer', 'hostname') + .returns('custom-hostname'); + mockConfig = getMockConfig(mockConfigGet); + mockReportingPlugin = await createMockReportingCore(mockConfig); }); describe('conditions', () => { test(`uses hostname from reporting config if set`, async () => { - const settings: any = { - 'xpack.reporting.kibanaServer.hostname': 'custom-hostname', - }; - - mockServer = createMockServer({ settings }); - const permittedHeaders = { foo: 'bar', baz: 'quix', @@ -33,121 +38,20 @@ describe('conditions', () => { const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: permittedHeaders, - server: mockServer, + config: mockConfig, }); expect(conditionalHeaders.conditions.hostname).toEqual( - mockServer.config().get('xpack.reporting.kibanaServer.hostname') + mockConfig.get('kibanaServer', 'hostname') ); - }); - - test(`uses hostname from server.config if reporting config not set`, async () => { - const permittedHeaders = { - foo: 'bar', - baz: 'quix', - }; - - const conditionalHeaders = await getConditionalHeaders({ - job: {} as JobDocPayload, - filteredHeaders: permittedHeaders, - server: mockServer, - }); - - expect(conditionalHeaders.conditions.hostname).toEqual(mockServer.config().get('server.host')); - }); - - test(`uses port from reporting config if set`, async () => { - const settings = { - 'xpack.reporting.kibanaServer.port': 443, - }; - - mockServer = createMockServer({ settings }); - - const permittedHeaders = { - foo: 'bar', - baz: 'quix', - }; - - const conditionalHeaders = await getConditionalHeaders({ - job: {} as JobDocPayload, - filteredHeaders: permittedHeaders, - server: mockServer, - }); - - expect(conditionalHeaders.conditions.port).toEqual( - mockServer.config().get('xpack.reporting.kibanaServer.port') + expect(conditionalHeaders.conditions.port).toEqual(mockConfig.get('kibanaServer', 'port')); + expect(conditionalHeaders.conditions.protocol).toEqual( + mockConfig.get('kibanaServer', 'protocol') ); - }); - - test(`uses port from server if reporting config not set`, async () => { - const permittedHeaders = { - foo: 'bar', - baz: 'quix', - }; - - const conditionalHeaders = await getConditionalHeaders({ - job: {} as JobDocPayload, - filteredHeaders: permittedHeaders, - server: mockServer, - }); - - expect(conditionalHeaders.conditions.port).toEqual(mockServer.config().get('server.port')); - }); - - test(`uses basePath from server config`, async () => { - const permittedHeaders = { - foo: 'bar', - baz: 'quix', - }; - - const conditionalHeaders = await getConditionalHeaders({ - job: {} as JobDocPayload, - filteredHeaders: permittedHeaders, - server: mockServer, - }); - expect(conditionalHeaders.conditions.basePath).toEqual( - mockServer.config().get('server.basePath') + mockConfig.kbnConfig.get('server', 'basePath') ); }); - - test(`uses protocol from reporting config if set`, async () => { - const settings = { - 'xpack.reporting.kibanaServer.protocol': 'https', - }; - - mockServer = createMockServer({ settings }); - - const permittedHeaders = { - foo: 'bar', - baz: 'quix', - }; - - const conditionalHeaders = await getConditionalHeaders({ - job: {} as JobDocPayload, - filteredHeaders: permittedHeaders, - server: mockServer, - }); - - expect(conditionalHeaders.conditions.protocol).toEqual( - mockServer.config().get('xpack.reporting.kibanaServer.protocol') - ); - }); - - test(`uses protocol from server.info`, async () => { - const permittedHeaders = { - foo: 'bar', - baz: 'quix', - }; - - const conditionalHeaders = await getConditionalHeaders({ - job: {} as JobDocPayload, - filteredHeaders: permittedHeaders, - server: mockServer, - }); - - expect(conditionalHeaders.conditions.protocol).toEqual(mockServer.info.protocol); - }); }); test('uses basePath from job when creating saved object service', async () => { @@ -161,14 +65,14 @@ test('uses basePath from job when creating saved object service', async () => { const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: permittedHeaders, - server: mockServer, + config: mockConfig, }); const jobBasePath = '/sbp/s/marketing'; await getCustomLogo({ reporting: mockReportingPlugin, job: { basePath: jobBasePath } as JobDocPayloadPDF, conditionalHeaders, - server: mockServer, + config: mockConfig, }); const getBasePath = mockGetSavedObjectsClient.mock.calls[0][0].getBasePath; @@ -179,6 +83,11 @@ test(`uses basePath from server if job doesn't have a basePath when creating sav const mockGetSavedObjectsClient = jest.fn(); mockReportingPlugin.getSavedObjectsClient = mockGetSavedObjectsClient; + const mockConfigGet = sinon.stub(); + mockConfigGet.withArgs('kibanaServer', 'hostname').returns('localhost'); + mockConfigGet.withArgs('server', 'basePath').returns('/sbp'); + mockConfig = getMockConfig(mockConfigGet); + const permittedHeaders = { foo: 'bar', baz: 'quix', @@ -186,14 +95,14 @@ test(`uses basePath from server if job doesn't have a basePath when creating sav const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: permittedHeaders, - server: mockServer, + config: mockConfig, }); await getCustomLogo({ reporting: mockReportingPlugin, job: {} as JobDocPayloadPDF, conditionalHeaders, - server: mockServer, + config: mockConfig, }); const getBasePath = mockGetSavedObjectsClient.mock.calls[0][0].getBasePath; @@ -225,19 +134,26 @@ test(`uses basePath from server if job doesn't have a basePath when creating sav describe('config formatting', () => { test(`lowercases server.host`, async () => { - mockServer = createMockServer({ settings: { 'server.host': 'COOL-HOSTNAME' } }); + const mockConfigGet = sinon + .stub() + .withArgs('server', 'host') + .returns('COOL-HOSTNAME'); + mockConfig = getMockConfig(mockConfigGet); + const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: {}, - server: mockServer, + config: mockConfig, }); expect(conditionalHeaders.conditions.hostname).toEqual('cool-hostname'); }); - test(`lowercases xpack.reporting.kibanaServer.hostname`, async () => { - mockServer = createMockServer({ - settings: { 'xpack.reporting.kibanaServer.hostname': 'GREAT-HOSTNAME' }, - }); + test(`lowercases kibanaServer.hostname`, async () => { + const mockConfigGet = sinon + .stub() + .withArgs('kibanaServer', 'hostname') + .returns('GREAT-HOSTNAME'); + mockConfig = getMockConfig(mockConfigGet); const conditionalHeaders = await getConditionalHeaders({ job: { title: 'cool-job-bro', @@ -249,7 +165,7 @@ describe('config formatting', () => { }, }, filteredHeaders: {}, - server: mockServer, + config: mockConfig, }); expect(conditionalHeaders.conditions.hostname).toEqual('great-hostname'); }); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.ts index 975060a8052f0..bd7999d697ca9 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.ts @@ -3,29 +3,31 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { ConditionalHeaders, ServerFacade } from '../../../types'; + +import { ReportingConfig } from '../../../server/types'; +import { ConditionalHeaders } from '../../../types'; export const getConditionalHeaders = ({ - server, + config, job, filteredHeaders, }: { - server: ServerFacade; + config: ReportingConfig; job: JobDocPayloadType; filteredHeaders: Record; }) => { - const config = server.config(); + const { kbnConfig } = config; const [hostname, port, basePath, protocol] = [ - config.get('xpack.reporting.kibanaServer.hostname') || config.get('server.host'), - config.get('xpack.reporting.kibanaServer.port') || config.get('server.port'), - config.get('server.basePath'), - config.get('xpack.reporting.kibanaServer.protocol') || server.info.protocol, + config.get('kibanaServer', 'hostname'), + config.get('kibanaServer', 'port'), + kbnConfig.get('server', 'basePath'), + config.get('kibanaServer', 'protocol'), ] as [string, number, string, string]; const conditionalHeaders: ConditionalHeaders = { headers: filteredHeaders, conditions: { - hostname: hostname.toLowerCase(), + hostname: hostname ? hostname.toLowerCase() : hostname, port, basePath, protocol, diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts index fa53f474dfba7..2cbde69c81316 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts @@ -5,16 +5,18 @@ */ import { ReportingCore } from '../../../server'; -import { createMockReportingCore, createMockServer } from '../../../test_helpers'; -import { ServerFacade } from '../../../types'; +import { createMockReportingCore } from '../../../test_helpers'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; import { getConditionalHeaders, getCustomLogo } from './index'; +const mockConfigGet = jest.fn().mockImplementation((key: string) => { + return 'localhost'; +}); +const mockConfig = { get: mockConfigGet, kbnConfig: { get: mockConfigGet } }; + let mockReportingPlugin: ReportingCore; -let mockServer: ServerFacade; beforeEach(async () => { - mockReportingPlugin = await createMockReportingCore(); - mockServer = createMockServer(''); + mockReportingPlugin = await createMockReportingCore(mockConfig); }); test(`gets logo from uiSettings`, async () => { @@ -37,14 +39,14 @@ test(`gets logo from uiSettings`, async () => { const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayloadPDF, filteredHeaders: permittedHeaders, - server: mockServer, + config: mockConfig, }); const { logo } = await getCustomLogo({ reporting: mockReportingPlugin, + config: mockConfig, job: {} as JobDocPayloadPDF, conditionalHeaders, - server: mockServer, }); expect(mockGet).toBeCalledWith('xpackReporting:customPdfLogo'); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts index 7af5edab41ab7..a13f992e7867c 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts @@ -5,23 +5,22 @@ */ import { UI_SETTINGS_CUSTOM_PDF_LOGO } from '../../../common/constants'; -import { ReportingCore } from '../../../server'; -import { ConditionalHeaders, ServerFacade } from '../../../types'; +import { ReportingConfig, ReportingCore } from '../../../server/types'; +import { ConditionalHeaders } from '../../../types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; // Logo is PDF only export const getCustomLogo = async ({ reporting, - server, + config, job, conditionalHeaders, }: { reporting: ReportingCore; - server: ServerFacade; + config: ReportingConfig; job: JobDocPayloadPDF; conditionalHeaders: ConditionalHeaders; }) => { - const serverBasePath: string = server.config().get('server.basePath'); - + const serverBasePath: string = config.kbnConfig.get('server', 'basePath'); const fakeRequest: any = { headers: conditionalHeaders.headers, // This is used by the spaces SavedObjectClientWrapper to determine the existing space. diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts index 27e772195f726..5f55617724ff6 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts @@ -4,29 +4,41 @@ * you may not use this file except in compliance with the Elastic License. */ -import { createMockServer } from '../../../test_helpers'; -import { ServerFacade } from '../../../types'; +import { ReportingConfig } from '../../../server'; import { JobDocPayloadPNG } from '../../png/types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; import { getFullUrls } from './get_full_urls'; interface FullUrlsOpts { job: JobDocPayloadPNG & JobDocPayloadPDF; - server: ServerFacade; - conditionalHeaders: any; + config: ReportingConfig; } -let mockServer: any; +let mockConfig: ReportingConfig; +const getMockConfig = (mockConfigGet: jest.Mock) => { + return { + get: mockConfigGet, + kbnConfig: { get: mockConfigGet }, + }; +}; + beforeEach(() => { - mockServer = createMockServer(''); + const reportingConfig: Record = { + 'kibanaServer.hostname': 'localhost', + 'kibanaServer.port': 5601, + 'kibanaServer.protocol': 'http', + 'server.basePath': '/sbp', + }; + const mockConfigGet = jest.fn().mockImplementation((...keys: string[]) => { + return reportingConfig[keys.join('.') as string]; + }); + mockConfig = getMockConfig(mockConfigGet); }); +const getMockJob = (base: object) => base as JobDocPayloadPNG & JobDocPayloadPDF; + test(`fails if no URL is passed`, async () => { - const fn = () => - getFullUrls({ - job: {}, - server: mockServer, - } as FullUrlsOpts); + const fn = () => getFullUrls({ job: getMockJob({}), config: mockConfig } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"No valid URL fields found in Job Params! Expected \`job.relativeUrl: string\` or \`job.relativeUrls: string[]\`"` ); @@ -37,8 +49,8 @@ test(`fails if URLs are file-protocols for PNGs`, async () => { const relativeUrl = 'file://etc/passwd/#/something'; const fn = () => getFullUrls({ - job: { relativeUrl, forceNow }, - server: mockServer, + job: getMockJob({ relativeUrl, forceNow }), + config: mockConfig, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: file://etc/passwd/#/something"` @@ -51,8 +63,8 @@ test(`fails if URLs are absolute for PNGs`, async () => { 'http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something'; const fn = () => getFullUrls({ - job: { relativeUrl, forceNow }, - server: mockServer, + job: getMockJob({ relativeUrl, forceNow }), + config: mockConfig, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something"` @@ -64,11 +76,11 @@ test(`fails if URLs are file-protocols for PDF`, async () => { const relativeUrl = 'file://etc/passwd/#/something'; const fn = () => getFullUrls({ - job: { + job: getMockJob({ relativeUrls: [relativeUrl], forceNow, - }, - server: mockServer, + }), + config: mockConfig, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: file://etc/passwd/#/something"` @@ -81,11 +93,11 @@ test(`fails if URLs are absolute for PDF`, async () => { 'http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something'; const fn = () => getFullUrls({ - job: { + job: getMockJob({ relativeUrls: [relativeUrl], forceNow, - }, - server: mockServer, + }), + config: mockConfig, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something"` @@ -102,8 +114,8 @@ test(`fails if any URLs are absolute or file's for PDF`, async () => { const fn = () => getFullUrls({ - job: { relativeUrls, forceNow }, - server: mockServer, + job: getMockJob({ relativeUrls, forceNow }), + config: mockConfig, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something file://etc/passwd/#/something"` @@ -113,8 +125,8 @@ test(`fails if any URLs are absolute or file's for PDF`, async () => { test(`fails if URL does not route to a visualization`, async () => { const fn = () => getFullUrls({ - job: { relativeUrl: '/app/phoney' }, - server: mockServer, + job: getMockJob({ relativeUrl: '/app/phoney' }), + config: mockConfig, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"No valid hash in the URL! A hash is expected for the application to route to the intended visualization."` @@ -124,8 +136,8 @@ test(`fails if URL does not route to a visualization`, async () => { test(`adds forceNow to hash's query, if it exists`, async () => { const forceNow = '2000-01-01T00:00:00.000Z'; const urls = await getFullUrls({ - job: { relativeUrl: '/app/kibana#/something', forceNow }, - server: mockServer, + job: getMockJob({ relativeUrl: '/app/kibana#/something', forceNow }), + config: mockConfig, } as FullUrlsOpts); expect(urls[0]).toEqual( @@ -137,8 +149,8 @@ test(`appends forceNow to hash's query, if it exists`, async () => { const forceNow = '2000-01-01T00:00:00.000Z'; const urls = await getFullUrls({ - job: { relativeUrl: '/app/kibana#/something?_g=something', forceNow }, - server: mockServer, + job: getMockJob({ relativeUrl: '/app/kibana#/something?_g=something', forceNow }), + config: mockConfig, } as FullUrlsOpts); expect(urls[0]).toEqual( @@ -148,8 +160,8 @@ test(`appends forceNow to hash's query, if it exists`, async () => { test(`doesn't append forceNow query to url, if it doesn't exists`, async () => { const urls = await getFullUrls({ - job: { relativeUrl: '/app/kibana#/something' }, - server: mockServer, + job: getMockJob({ relativeUrl: '/app/kibana#/something' }), + config: mockConfig, } as FullUrlsOpts); expect(urls[0]).toEqual('http://localhost:5601/sbp/app/kibana#/something'); @@ -158,7 +170,7 @@ test(`doesn't append forceNow query to url, if it doesn't exists`, async () => { test(`adds forceNow to each of multiple urls`, async () => { const forceNow = '2000-01-01T00:00:00.000Z'; const urls = await getFullUrls({ - job: { + job: getMockJob({ relativeUrls: [ '/app/kibana#/something_aaa', '/app/kibana#/something_bbb', @@ -166,8 +178,8 @@ test(`adds forceNow to each of multiple urls`, async () => { '/app/kibana#/something_ddd', ], forceNow, - }, - server: mockServer, + }), + config: mockConfig, } as FullUrlsOpts); expect(urls).toEqual([ diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.ts index ca64d8632dbfe..c4b6f31019fdf 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.ts @@ -12,7 +12,7 @@ import { } from 'url'; import { getAbsoluteUrlFactory } from '../../../common/get_absolute_url'; import { validateUrls } from '../../../common/validate_urls'; -import { ServerFacade } from '../../../types'; +import { ReportingConfig } from '../../../server/types'; import { JobDocPayloadPNG } from '../../png/types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; @@ -24,19 +24,23 @@ function isPdfJob(job: JobDocPayloadPNG | JobDocPayloadPDF): job is JobDocPayloa } export function getFullUrls({ - server, + config, job, }: { - server: ServerFacade; + config: ReportingConfig; job: JobDocPayloadPDF | JobDocPayloadPNG; }) { - const config = server.config(); - + const [basePath, protocol, hostname, port] = [ + config.kbnConfig.get('server', 'basePath'), + config.get('kibanaServer', 'protocol'), + config.get('kibanaServer', 'hostname'), + config.get('kibanaServer', 'port'), + ] as string[]; const getAbsoluteUrl = getAbsoluteUrlFactory({ - defaultBasePath: config.get('server.basePath'), - protocol: config.get('xpack.reporting.kibanaServer.protocol') || server.info.protocol, - hostname: config.get('xpack.reporting.kibanaServer.hostname') || config.get('server.host'), - port: config.get('xpack.reporting.kibanaServer.port') || config.get('server.port'), + defaultBasePath: basePath, + protocol, + hostname, + port, }); // PDF and PNG job params put in the url differently diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.test.ts index f446369fec78c..abf5784dacff9 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.test.ts @@ -19,6 +19,9 @@ test(`omits blacklisted headers`, async () => { 'content-type': '', host: '', 'transfer-encoding': '', + 'proxy-connection': 'bananas', + 'proxy-authorization': 'some-base64-encoded-thing', + trailer: 's are for trucks', }; const filteredHeaders = await omitBlacklistedHeaders({ diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.ts index cbebd6bc21b0e..2fbfd868674f6 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.ts @@ -4,7 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ import { omit } from 'lodash'; -import { KBN_SCREENSHOT_HEADER_BLACKLIST } from '../../../common/constants'; +import { + KBN_SCREENSHOT_HEADER_BLACKLIST, + KBN_SCREENSHOT_HEADER_BLACKLIST_STARTS_WITH_PATTERN, +} from '../../../common/constants'; export const omitBlacklistedHeaders = ({ job, @@ -15,7 +18,12 @@ export const omitBlacklistedHeaders = ({ }) => { const filteredHeaders: Record = omit( decryptedHeaders, - KBN_SCREENSHOT_HEADER_BLACKLIST + (_value, header: string) => + header && + (KBN_SCREENSHOT_HEADER_BLACKLIST.includes(header) || + KBN_SCREENSHOT_HEADER_BLACKLIST_STARTS_WITH_PATTERN.some(pattern => + header?.startsWith(pattern) + )) ); return filteredHeaders; }; diff --git a/x-pack/legacy/plugins/reporting/export_types/common/layouts/create_layout.ts b/x-pack/legacy/plugins/reporting/export_types/common/layouts/create_layout.ts index 0cb83352d4606..07fceb603e451 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/layouts/create_layout.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/layouts/create_layout.ts @@ -3,17 +3,18 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { ServerFacade } from '../../../types'; + +import { CaptureConfig } from '../../../server/types'; import { LayoutTypes } from '../constants'; import { Layout, LayoutParams } from './layout'; import { PreserveLayout } from './preserve_layout'; import { PrintLayout } from './print_layout'; -export function createLayout(server: ServerFacade, layoutParams?: LayoutParams): Layout { +export function createLayout(captureConfig: CaptureConfig, layoutParams?: LayoutParams): Layout { if (layoutParams && layoutParams.id === LayoutTypes.PRESERVE_LAYOUT) { return new PreserveLayout(layoutParams.dimensions); } // this is the default because some jobs won't have anything specified - return new PrintLayout(server); + return new PrintLayout(captureConfig); } diff --git a/x-pack/legacy/plugins/reporting/export_types/common/layouts/print_layout.ts b/x-pack/legacy/plugins/reporting/export_types/common/layouts/print_layout.ts index 6007c2960057a..f6974379253fb 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/layouts/print_layout.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/layouts/print_layout.ts @@ -3,14 +3,14 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ + import path from 'path'; import { EvaluateFn, SerializableOrJSHandle } from 'puppeteer'; -import { LevelLogger } from '../../../server/lib'; import { HeadlessChromiumDriver } from '../../../server/browsers'; -import { ServerFacade } from '../../../types'; +import { LevelLogger } from '../../../server/lib'; +import { CaptureConfig } from '../../../server/types'; import { LayoutTypes } from '../constants'; import { getDefaultLayoutSelectors, Layout, LayoutSelectorDictionary, Size } from './layout'; -import { CaptureConfig } from './types'; export class PrintLayout extends Layout { public readonly selectors: LayoutSelectorDictionary = { @@ -20,9 +20,9 @@ export class PrintLayout extends Layout { public readonly groupCount = 2; private captureConfig: CaptureConfig; - constructor(server: ServerFacade) { + constructor(captureConfig: CaptureConfig) { super(LayoutTypes.PRINT); - this.captureConfig = server.config().get('xpack.reporting.capture'); + this.captureConfig = captureConfig; } public getCssOverridesPath() { diff --git a/x-pack/legacy/plugins/reporting/export_types/common/layouts/types.d.ts b/x-pack/legacy/plugins/reporting/export_types/common/layouts/types.d.ts deleted file mode 100644 index ccfa82ca0ae53..0000000000000 --- a/x-pack/legacy/plugins/reporting/export_types/common/layouts/types.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { Size } from './layout'; - -export interface CaptureConfig { - zoom: number; - viewport: Size; -} diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/get_number_of_items.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/get_number_of_items.ts index 16eb433e8a75e..57d025890d3e2 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/get_number_of_items.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/get_number_of_items.ts @@ -7,17 +7,16 @@ import { i18n } from '@kbn/i18n'; import { HeadlessChromiumDriver as HeadlessBrowser } from '../../../../server/browsers'; import { LevelLogger } from '../../../../server/lib'; -import { ServerFacade } from '../../../../types'; +import { CaptureConfig } from '../../../../server/types'; import { LayoutInstance } from '../../layouts/layout'; import { CONTEXT_GETNUMBEROFITEMS, CONTEXT_READMETADATA } from './constants'; export const getNumberOfItems = async ( - server: ServerFacade, + captureConfig: CaptureConfig, browser: HeadlessBrowser, layout: LayoutInstance, logger: LevelLogger ): Promise => { - const config = server.config(); const { renderComplete: renderCompleteSelector, itemsCountAttribute } = layout.selectors; let itemsCount: number; @@ -33,7 +32,7 @@ export const getNumberOfItems = async ( // we have to use this hint to wait for all of them await browser.waitForSelector( `${renderCompleteSelector},[${itemsCountAttribute}]`, - { timeout: config.get('xpack.reporting.capture.timeouts.waitForElements') }, + { timeout: captureConfig.timeouts.waitForElements }, { context: CONTEXT_READMETADATA }, logger ); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.test.ts index 13d07bcdd6baf..75ac3dca4ffa0 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.test.ts @@ -19,12 +19,9 @@ import * as Rx from 'rxjs'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths import { loggingServiceMock } from '../../../../../../../../src/core/server/mocks'; import { LevelLogger } from '../../../../server/lib'; -import { - createMockBrowserDriverFactory, - createMockLayoutInstance, - createMockServer, -} from '../../../../test_helpers'; +import { createMockBrowserDriverFactory, createMockLayoutInstance } from '../../../../test_helpers'; import { ConditionalHeaders, HeadlessChromiumDriver } from '../../../../types'; +import { CaptureConfig } from '../../../../server/types'; import { screenshotsObservableFactory } from './observable'; import { ElementsPositionAndAttribute } from './types'; @@ -34,8 +31,8 @@ import { ElementsPositionAndAttribute } from './types'; const mockLogger = jest.fn(loggingServiceMock.create); const logger = new LevelLogger(mockLogger()); -const __LEGACY = createMockServer({ settings: { 'xpack.reporting.capture': { loadDelay: 13 } } }); -const mockLayout = createMockLayoutInstance(__LEGACY); +const mockConfig = { timeouts: { openUrl: 13 } } as CaptureConfig; +const mockLayout = createMockLayoutInstance(mockConfig); /* * Tests @@ -48,7 +45,7 @@ describe('Screenshot Observable Pipeline', () => { }); it('pipelines a single url into screenshot and timeRange', async () => { - const getScreenshots$ = screenshotsObservableFactory(__LEGACY, mockBrowserDriverFactory); + const getScreenshots$ = screenshotsObservableFactory(mockConfig, mockBrowserDriverFactory); const result = await getScreenshots$({ logger, urls: ['/welcome/home/start/index.htm'], @@ -86,7 +83,7 @@ describe('Screenshot Observable Pipeline', () => { }); // test - const getScreenshots$ = screenshotsObservableFactory(__LEGACY, mockBrowserDriverFactory); + const getScreenshots$ = screenshotsObservableFactory(mockConfig, mockBrowserDriverFactory); const result = await getScreenshots$({ logger, urls: ['/welcome/home/start/index2.htm', '/welcome/home/start/index.php3?page=./home.php'], @@ -136,7 +133,7 @@ describe('Screenshot Observable Pipeline', () => { }); // test - const getScreenshots$ = screenshotsObservableFactory(__LEGACY, mockBrowserDriverFactory); + const getScreenshots$ = screenshotsObservableFactory(mockConfig, mockBrowserDriverFactory); const getScreenshot = async () => { return await getScreenshots$({ logger, @@ -197,7 +194,7 @@ describe('Screenshot Observable Pipeline', () => { }); // test - const getScreenshots$ = screenshotsObservableFactory(__LEGACY, mockBrowserDriverFactory); + const getScreenshots$ = screenshotsObservableFactory(mockConfig, mockBrowserDriverFactory); const getScreenshot = async () => { return await getScreenshots$({ logger, diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.ts index 44c04c763f840..53a11c18abd79 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.ts @@ -6,24 +6,22 @@ import * as Rx from 'rxjs'; import { catchError, concatMap, first, mergeMap, take, takeUntil, toArray } from 'rxjs/operators'; -import { CaptureConfig, HeadlessChromiumDriverFactory, ServerFacade } from '../../../../types'; +import { CaptureConfig } from '../../../../server/types'; +import { HeadlessChromiumDriverFactory } from '../../../../types'; import { getElementPositionAndAttributes } from './get_element_position_data'; import { getNumberOfItems } from './get_number_of_items'; import { getScreenshots } from './get_screenshots'; import { getTimeRange } from './get_time_range'; +import { injectCustomCss } from './inject_css'; import { openUrl } from './open_url'; import { ScreenSetupData, ScreenshotObservableOpts, ScreenshotResults } from './types'; import { waitForRenderComplete } from './wait_for_render'; import { waitForVisualizations } from './wait_for_visualizations'; -import { injectCustomCss } from './inject_css'; export function screenshotsObservableFactory( - server: ServerFacade, + captureConfig: CaptureConfig, browserDriverFactory: HeadlessChromiumDriverFactory ) { - const config = server.config(); - const captureConfig: CaptureConfig = config.get('xpack.reporting.capture'); - return function screenshotsObservable({ logger, urls, @@ -41,13 +39,13 @@ export function screenshotsObservableFactory( mergeMap(({ driver, exit$ }) => { const setup$: Rx.Observable = Rx.of(1).pipe( takeUntil(exit$), - mergeMap(() => openUrl(server, driver, url, conditionalHeaders, logger)), - mergeMap(() => getNumberOfItems(server, driver, layout, logger)), + mergeMap(() => openUrl(captureConfig, driver, url, conditionalHeaders, logger)), + mergeMap(() => getNumberOfItems(captureConfig, driver, layout, logger)), mergeMap(async itemsCount => { const viewport = layout.getViewport(itemsCount); await Promise.all([ driver.setViewport(viewport, logger), - waitForVisualizations(server, driver, itemsCount, layout, logger), + waitForVisualizations(captureConfig, driver, itemsCount, layout, logger), ]); }), mergeMap(async () => { @@ -60,7 +58,7 @@ export function screenshotsObservableFactory( await layout.positionElements(driver, logger); } - await waitForRenderComplete(driver, layout, captureConfig, logger); + await waitForRenderComplete(captureConfig, driver, layout, logger); }), mergeMap(async () => { return await Promise.all([ diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/open_url.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/open_url.ts index fbae1f91a7a6a..a484dfb243563 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/open_url.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/open_url.ts @@ -5,27 +5,26 @@ */ import { i18n } from '@kbn/i18n'; -import { ConditionalHeaders, ServerFacade } from '../../../../types'; -import { LevelLogger } from '../../../../server/lib'; import { HeadlessChromiumDriver as HeadlessBrowser } from '../../../../server/browsers'; +import { LevelLogger } from '../../../../server/lib'; +import { CaptureConfig } from '../../../../server/types'; +import { ConditionalHeaders } from '../../../../types'; import { PAGELOAD_SELECTOR } from '../../constants'; export const openUrl = async ( - server: ServerFacade, + captureConfig: CaptureConfig, browser: HeadlessBrowser, url: string, conditionalHeaders: ConditionalHeaders, logger: LevelLogger ): Promise => { - const config = server.config(); - try { await browser.open( url, { conditionalHeaders, waitForSelector: PAGELOAD_SELECTOR, - timeout: config.get('xpack.reporting.capture.timeouts.openUrl'), + timeout: captureConfig.timeouts.openUrl, }, logger ); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/types.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/types.ts index ab81a952f345c..76613c2d631d6 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/types.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/types.ts @@ -4,8 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ElementPosition, ConditionalHeaders } from '../../../../types'; import { LevelLogger } from '../../../../server/lib'; +import { ConditionalHeaders, ElementPosition } from '../../../../types'; import { LayoutInstance } from '../../layouts/layout'; export interface ScreenshotObservableOpts { diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_render.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_render.ts index 2f6dc2829dfd8..069896c8d9e90 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_render.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_render.ts @@ -5,16 +5,16 @@ */ import { i18n } from '@kbn/i18n'; -import { CaptureConfig } from '../../../../types'; import { HeadlessChromiumDriver as HeadlessBrowser } from '../../../../server/browsers'; import { LevelLogger } from '../../../../server/lib'; +import { CaptureConfig } from '../../../../server/types'; import { LayoutInstance } from '../../layouts/layout'; import { CONTEXT_WAITFORRENDER } from './constants'; export const waitForRenderComplete = async ( + captureConfig: CaptureConfig, browser: HeadlessBrowser, layout: LayoutInstance, - captureConfig: CaptureConfig, logger: LevelLogger ) => { logger.debug( diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_visualizations.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_visualizations.ts index 93ad40026dff8..7960e1552e559 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_visualizations.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_visualizations.ts @@ -5,9 +5,9 @@ */ import { i18n } from '@kbn/i18n'; -import { ServerFacade } from '../../../../types'; import { HeadlessChromiumDriver as HeadlessBrowser } from '../../../../server/browsers'; import { LevelLogger } from '../../../../server/lib'; +import { CaptureConfig } from '../../../../server/types'; import { LayoutInstance } from '../../layouts/layout'; import { CONTEXT_WAITFORELEMENTSTOBEINDOM } from './constants'; @@ -23,13 +23,12 @@ const getCompletedItemsCount = ({ renderCompleteSelector }: SelectorArgs) => { * 3. Wait for the render complete event to be fired once for each item */ export const waitForVisualizations = async ( - server: ServerFacade, + captureConfig: CaptureConfig, browser: HeadlessBrowser, itemsCount: number, layout: LayoutInstance, logger: LevelLogger ): Promise => { - const config = server.config(); const { renderComplete: renderCompleteSelector } = layout.selectors; logger.debug( @@ -45,7 +44,7 @@ export const waitForVisualizations = async ( fn: getCompletedItemsCount, args: [{ renderCompleteSelector }], toEqual: itemsCount, - timeout: config.get('xpack.reporting.capture.timeouts.renderComplete'), + timeout: captureConfig.timeouts.renderComplete, }, { context: CONTEXT_WAITFORELEMENTSTOBEINDOM }, logger diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts index 7ea67277015ab..0e704a041452a 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts @@ -11,14 +11,14 @@ import { CreateJobFactory, ESQueueCreateJobFn, RequestFacade, - ServerFacade, } from '../../../types'; import { JobParamsDiscoverCsv } from '../types'; export const createJobFactory: CreateJobFactory> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { - const crypto = cryptoFactory(server); +>> = function createJobFactoryFn(reporting: ReportingCore) { + const config = reporting.getConfig(); + const crypto = cryptoFactory(config.get('encryptionKey')); return async function createJob( jobParams: JobParamsDiscoverCsv, diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js index f12916b734dbf..93dbe598b367c 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js @@ -11,8 +11,8 @@ import { CancellationToken } from '../../../common/cancellation_token'; import { fieldFormats } from '../../../../../../../src/plugins/data/server'; import { createMockReportingCore } from '../../../test_helpers'; import { LevelLogger } from '../../../server/lib/level_logger'; -import { executeJobFactory } from './execute_job'; import { setFieldFormats } from '../../../server/services'; +import { executeJobFactory } from './execute_job'; const delay = ms => new Promise(resolve => setTimeout(() => resolve(), ms)); @@ -36,11 +36,12 @@ describe('CSV Execute Job', function() { let defaultElasticsearchResponse; let encryptedHeaders; - let cancellationToken; - let mockReportingPlugin; - let mockServer; let clusterStub; + let configGetStub; + let mockReportingConfig; + let mockReportingPlugin; let callAsCurrentUserStub; + let cancellationToken; const mockElasticsearch = { dataClient: { @@ -57,8 +58,16 @@ describe('CSV Execute Job', function() { }); beforeEach(async function() { - mockReportingPlugin = await createMockReportingCore(); - mockReportingPlugin.getUiSettingsServiceFactory = () => mockUiSettingsClient; + configGetStub = sinon.stub(); + configGetStub.withArgs('encryptionKey').returns(encryptionKey); + configGetStub.withArgs('csv', 'maxSizeBytes').returns(1024 * 1000); // 1mB + configGetStub.withArgs('csv', 'scroll').returns({}); + mockReportingConfig = { get: configGetStub, kbnConfig: { get: configGetStub } }; + + mockReportingPlugin = await createMockReportingCore(mockReportingConfig); + mockReportingPlugin.getUiSettingsServiceFactory = () => Promise.resolve(mockUiSettingsClient); + mockReportingPlugin.getElasticsearchService = () => Promise.resolve(mockElasticsearch); + cancellationToken = new CancellationToken(); defaultElasticsearchResponse = { @@ -75,7 +84,6 @@ describe('CSV Execute Job', function() { .stub(clusterStub, 'callAsCurrentUser') .resolves(defaultElasticsearchResponse); - const configGetStub = sinon.stub(); mockUiSettingsClient.get.withArgs('csv:separator').returns(','); mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true); @@ -93,36 +101,11 @@ describe('CSV Execute Job', function() { return fieldFormatsRegistry; }, }); - - mockServer = { - config: function() { - return { - get: configGetStub, - }; - }, - }; - mockServer - .config() - .get.withArgs('xpack.reporting.encryptionKey') - .returns(encryptionKey); - mockServer - .config() - .get.withArgs('xpack.reporting.csv.maxSizeBytes') - .returns(1024 * 1000); // 1mB - mockServer - .config() - .get.withArgs('xpack.reporting.csv.scroll') - .returns({}); }); describe('basic Elasticsearch call behavior', function() { it('should decrypt encrypted headers and pass to callAsCurrentUser', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -138,12 +121,7 @@ describe('CSV Execute Job', function() { testBody: true, }; - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const job = { headers: encryptedHeaders, fields: [], @@ -170,12 +148,7 @@ describe('CSV Execute Job', function() { _scroll_id: scrollId, }); callAsCurrentUserStub.onSecondCall().resolves(defaultElasticsearchResponse); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -189,12 +162,7 @@ describe('CSV Execute Job', function() { }); it('should not execute scroll if there are no hits from the search', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -224,12 +192,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -264,12 +227,7 @@ describe('CSV Execute Job', function() { _scroll_id: lastScrollId, }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -297,12 +255,7 @@ describe('CSV Execute Job', function() { _scroll_id: lastScrollId, }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -321,10 +274,7 @@ describe('CSV Execute Job', function() { describe('Cells with formula values', () => { it('returns `csv_contains_formulas` when cells contain formulas', async function() { - mockServer - .config() - .get.withArgs('xpack.reporting.csv.checkForFormulas') - .returns(true); + configGetStub.withArgs('csv', 'checkForFormulas').returns(true); callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { one: '=SUM(A1:A2)', two: 'bar' } }], @@ -332,12 +282,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -354,10 +299,7 @@ describe('CSV Execute Job', function() { }); it('returns warnings when headings contain formulas', async function() { - mockServer - .config() - .get.withArgs('xpack.reporting.csv.checkForFormulas') - .returns(true); + configGetStub.withArgs('csv', 'checkForFormulas').returns(true); callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { '=SUM(A1:A2)': 'foo', two: 'bar' } }], @@ -365,12 +307,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['=SUM(A1:A2)', 'two'], @@ -387,10 +324,7 @@ describe('CSV Execute Job', function() { }); it('returns no warnings when cells have no formulas', async function() { - mockServer - .config() - .get.withArgs('xpack.reporting.csv.checkForFormulas') - .returns(true); + configGetStub.withArgs('csv', 'checkForFormulas').returns(true); callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -398,12 +332,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -420,10 +349,7 @@ describe('CSV Execute Job', function() { }); it('returns no warnings when configured not to', async () => { - mockServer - .config() - .get.withArgs('xpack.reporting.csv.checkForFormulas') - .returns(false); + configGetStub.withArgs('csv', 'checkForFormulas').returns(false); callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { one: '=SUM(A1:A2)', two: 'bar' } }], @@ -431,12 +357,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -456,12 +377,7 @@ describe('CSV Execute Job', function() { describe('Elasticsearch call errors', function() { it('should reject Promise if search call errors out', async function() { callAsCurrentUserStub.rejects(new Error()); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -480,12 +396,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); callAsCurrentUserStub.onSecondCall().rejects(new Error()); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -506,12 +417,7 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -532,12 +438,7 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -565,12 +466,7 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -598,12 +494,7 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -639,12 +530,7 @@ describe('CSV Execute Job', function() { }); it('should stop calling Elasticsearch when cancellationToken.cancel is called', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -659,12 +545,7 @@ describe('CSV Execute Job', function() { }); it(`shouldn't call clearScroll if it never got a scrollId`, async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -678,12 +559,7 @@ describe('CSV Execute Job', function() { }); it('should call clearScroll if it got a scrollId', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -701,12 +577,7 @@ describe('CSV Execute Job', function() { describe('csv content', function() { it('should write column headers to output, even if there are no results', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -718,12 +589,7 @@ describe('CSV Execute Job', function() { it('should use custom uiSettings csv:separator for header', async function() { mockUiSettingsClient.get.withArgs('csv:separator').returns(';'); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -735,12 +601,7 @@ describe('CSV Execute Job', function() { it('should escape column headers if uiSettings csv:quoteValues is true', async function() { mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one and a half', 'two', 'three-and-four', 'five & six'], @@ -752,12 +613,7 @@ describe('CSV Execute Job', function() { it(`shouldn't escape column headers if uiSettings csv:quoteValues is false`, async function() { mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(false); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one and a half', 'two', 'three-and-four', 'five & six'], @@ -768,12 +624,7 @@ describe('CSV Execute Job', function() { }); it('should write column headers to output, when there are results', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ one: '1', two: '2' }], @@ -793,12 +644,7 @@ describe('CSV Execute Job', function() { }); it('should use comma separated values of non-nested fields from _source', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -819,12 +665,7 @@ describe('CSV Execute Job', function() { }); it('should concatenate the hits from multiple responses', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -852,12 +693,7 @@ describe('CSV Execute Job', function() { }); it('should use field formatters to format fields', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -897,17 +733,9 @@ describe('CSV Execute Job', function() { let maxSizeReached; beforeEach(async function() { - mockServer - .config() - .get.withArgs('xpack.reporting.csv.maxSizeBytes') - .returns(1); - - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + configGetStub.withArgs('csv', 'maxSizeBytes').returns(1); + + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -935,17 +763,9 @@ describe('CSV Execute Job', function() { let maxSizeReached; beforeEach(async function() { - mockServer - .config() - .get.withArgs('xpack.reporting.csv.maxSizeBytes') - .returns(9); - - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + configGetStub.withArgs('csv', 'maxSizeBytes').returns(9); + + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -973,10 +793,7 @@ describe('CSV Execute Job', function() { let maxSizeReached; beforeEach(async function() { - mockServer - .config() - .get.withArgs('xpack.reporting.csv.maxSizeBytes') - .returns(9); + configGetStub.withArgs('csv', 'maxSizeBytes').returns(9); callAsCurrentUserStub.onFirstCall().returns({ hits: { @@ -985,12 +802,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1020,10 +832,7 @@ describe('CSV Execute Job', function() { beforeEach(async function() { mockReportingPlugin.getUiSettingsServiceFactory = () => mockUiSettingsClient; - mockServer - .config() - .get.withArgs('xpack.reporting.csv.maxSizeBytes') - .returns(18); + configGetStub.withArgs('csv', 'maxSizeBytes').returns(18); callAsCurrentUserStub.onFirstCall().returns({ hits: { @@ -1032,12 +841,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1065,10 +869,7 @@ describe('CSV Execute Job', function() { describe('scroll settings', function() { it('passes scroll duration to initial search call', async function() { const scrollDuration = 'test'; - mockServer - .config() - .get.withArgs('xpack.reporting.csv.scroll') - .returns({ duration: scrollDuration }); + configGetStub.withArgs('csv', 'scroll').returns({ duration: scrollDuration }); callAsCurrentUserStub.onFirstCall().returns({ hits: { @@ -1077,12 +878,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1099,10 +895,7 @@ describe('CSV Execute Job', function() { it('passes scroll size to initial search call', async function() { const scrollSize = 100; - mockServer - .config() - .get.withArgs('xpack.reporting.csv.scroll') - .returns({ size: scrollSize }); + configGetStub.withArgs('csv', 'scroll').returns({ size: scrollSize }); callAsCurrentUserStub.onFirstCall().resolves({ hits: { @@ -1111,12 +904,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1133,10 +921,7 @@ describe('CSV Execute Job', function() { it('passes scroll duration to subsequent scroll call', async function() { const scrollDuration = 'test'; - mockServer - .config() - .get.withArgs('xpack.reporting.csv.scroll') - .returns({ duration: scrollDuration }); + configGetStub.withArgs('csv', 'scroll').returns({ duration: scrollDuration }); callAsCurrentUserStub.onFirstCall().resolves({ hits: { @@ -1145,12 +930,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts index 1579985891053..d78d8a8a8010d 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts @@ -6,38 +6,30 @@ import { i18n } from '@kbn/i18n'; import Hapi from 'hapi'; -import { - ElasticsearchServiceSetup, - IUiSettingsClient, - KibanaRequest, -} from '../../../../../../../src/core/server'; +import { IUiSettingsClient, KibanaRequest } from '../../../../../../../src/core/server'; import { CSV_JOB_TYPE } from '../../../common/constants'; -import { ReportingCore } from '../../../server'; +import { ReportingCore } from '../../../server/core'; import { cryptoFactory } from '../../../server/lib'; import { getFieldFormats } from '../../../server/services'; -import { ESQueueWorkerExecuteFn, ExecuteJobFactory, Logger, ServerFacade } from '../../../types'; +import { ESQueueWorkerExecuteFn, ExecuteJobFactory, Logger } from '../../../types'; import { JobDocPayloadDiscoverCsv } from '../types'; import { fieldFormatMapFactory } from './lib/field_format_map'; import { createGenerateCsv } from './lib/generate_csv'; export const executeJobFactory: ExecuteJobFactory> = async function executeJobFactoryFn( - reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, - parentLogger: Logger -) { - const crypto = cryptoFactory(server); - const config = server.config(); +>> = async function executeJobFactoryFn(reporting: ReportingCore, parentLogger: Logger) { + const config = reporting.getConfig(); + const crypto = cryptoFactory(config.get('encryptionKey')); const logger = parentLogger.clone([CSV_JOB_TYPE, 'execute-job']); - const serverBasePath = config.get('server.basePath'); + const serverBasePath = config.kbnConfig.get('server', 'basePath'); return async function executeJob( jobId: string, job: JobDocPayloadDiscoverCsv, cancellationToken: any ) { + const elasticsearch = await reporting.getElasticsearchService(); const jobLogger = logger.clone([jobId]); const { @@ -131,9 +123,9 @@ export const executeJobFactory: ExecuteJobFactory) { const response = await request; diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/types.d.ts b/x-pack/legacy/plugins/reporting/export_types/csv/types.d.ts index 842330fa7c93f..529c195486bc6 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/types.d.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/types.d.ts @@ -5,7 +5,8 @@ */ import { CancellationToken } from '../../common/cancellation_token'; -import { JobDocPayload, JobParamPostPayload, ConditionalHeaders, RequestFacade } from '../../types'; +import { ScrollConfig } from '../../server/types'; +import { JobDocPayload, JobParamPostPayload } from '../../types'; interface DocValueField { field: string; @@ -106,7 +107,7 @@ export interface GenerateCsvParams { quoteValues: boolean; timezone: string | null; maxSizeBytes: number; - scroll: { duration: string; size: number }; + scroll: ScrollConfig; checkForFormulas?: boolean; }; } diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts index 17072d311b35f..8e0376a190267 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts @@ -5,18 +5,11 @@ */ import { notFound, notImplemented } from 'boom'; -import { ElasticsearchServiceSetup } from 'kibana/server'; import { get } from 'lodash'; import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../../common/constants'; import { ReportingCore } from '../../../../server'; import { cryptoFactory } from '../../../../server/lib'; -import { - CreateJobFactory, - ImmediateCreateJobFn, - Logger, - RequestFacade, - ServerFacade, -} from '../../../../types'; +import { CreateJobFactory, ImmediateCreateJobFn, Logger, RequestFacade } from '../../../../types'; import { JobDocPayloadPanelCsv, JobParamsPanelCsv, @@ -37,13 +30,9 @@ interface VisData { export const createJobFactory: CreateJobFactory> = function createJobFactoryFn( - reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, - parentLogger: Logger -) { - const crypto = cryptoFactory(server); +>> = function createJobFactoryFn(reporting: ReportingCore, parentLogger: Logger) { + const config = reporting.getConfig(); + const crypto = cryptoFactory(config.get('encryptionKey')); const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'create-job']); return async function createJob( diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts index 6bb3e73fcfe84..afa917f17651c 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts @@ -5,7 +5,6 @@ */ import { i18n } from '@kbn/i18n'; -import { ElasticsearchServiceSetup } from 'kibana/server'; import { CONTENT_TYPE_CSV, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants'; import { ReportingCore } from '../../../server'; import { cryptoFactory } from '../../../server/lib'; @@ -15,7 +14,6 @@ import { JobDocOutput, Logger, RequestFacade, - ServerFacade, } from '../../../types'; import { CsvResultFromSearch } from '../../csv/types'; import { FakeRequest, JobDocPayloadPanelCsv, JobParamsPanelCsv, SearchPanel } from '../types'; @@ -23,15 +21,11 @@ import { createGenerateCsv } from './lib'; export const executeJobFactory: ExecuteJobFactory> = async function executeJobFactoryFn( - reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, - parentLogger: Logger -) { - const crypto = cryptoFactory(server); +>> = async function executeJobFactoryFn(reporting: ReportingCore, parentLogger: Logger) { + const config = reporting.getConfig(); + const crypto = cryptoFactory(config.get('encryptionKey')); const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'execute-job']); - const generateCsv = createGenerateCsv(reporting, server, elasticsearch, parentLogger); + const generateCsv = createGenerateCsv(reporting, parentLogger); return async function executeJob( jobId: string | null, @@ -57,11 +51,11 @@ export const executeJobFactory: ExecuteJobFactory; const serializedEncryptedHeaders = job.headers; try { decryptedHeaders = await crypto.decrypt(serializedEncryptedHeaders); @@ -79,10 +73,7 @@ export const executeJobFactory: ExecuteJobFactory { export async function generateCsvSearch( req: RequestFacade, reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, logger: Logger, searchPanel: SearchPanel, jobParams: JobParamsDiscoverCsv @@ -159,11 +153,12 @@ export async function generateCsvSearch( }, }; + const config = reporting.getConfig(); + const elasticsearch = await reporting.getElasticsearchService(); const { callAsCurrentUser } = elasticsearch.dataClient.asScoped( KibanaRequest.from(req.getRawRequest()) ); const callCluster = (...params: [string, object]) => callAsCurrentUser(...params); - const config = server.config(); const uiSettings = await getUiSettings(uiConfig); const generateCsvParams: GenerateCsvParams = { @@ -176,8 +171,8 @@ export async function generateCsvSearch( cancellationToken: new CancellationToken(), settings: { ...uiSettings, - maxSizeBytes: config.get('xpack.reporting.csv.maxSizeBytes'), - scroll: config.get('xpack.reporting.csv.scroll'), + maxSizeBytes: config.get('csv', 'maxSizeBytes'), + scroll: config.get('csv', 'scroll'), timezone, }, }; diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/types.d.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/types.d.ts index 6a7d5f336e238..ab14d2dd8a660 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/types.d.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/types.d.ts @@ -4,11 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ -import { JobParamPostPayload, JobDocPayload, ServerFacade } from '../../types'; +import { JobDocPayload, JobParamPostPayload } from '../../types'; export interface FakeRequest { - headers: any; - server: ServerFacade; + headers: Record; } export interface JobParamsPostPayloadPanelCsv extends JobParamPostPayload { diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts index a6911e1f14704..1f834bde88a2d 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts @@ -12,14 +12,14 @@ import { CreateJobFactory, ESQueueCreateJobFn, RequestFacade, - ServerFacade, } from '../../../../types'; import { JobParamsPNG } from '../../types'; export const createJobFactory: CreateJobFactory> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { - const crypto = cryptoFactory(server); +>> = function createJobFactoryFn(reporting: ReportingCore) { + const config = reporting.getConfig(); + const crypto = cryptoFactory(config.get('encryptionKey')); return async function createJob( { objectType, title, relativeUrl, browserTimezone, layout }: JobParamsPNG, diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js index e2e6ba1b89096..cb63e7dad2fdf 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js @@ -5,7 +5,6 @@ */ import * as Rx from 'rxjs'; -import { memoize } from 'lodash'; import { createMockReportingCore } from '../../../../test_helpers'; import { cryptoFactory } from '../../../../server/lib/crypto'; import { executeJobFactory } from './index'; @@ -14,63 +13,65 @@ import { LevelLogger } from '../../../../server/lib'; jest.mock('../lib/generate_png', () => ({ generatePngObservableFactory: jest.fn() })); +let mockReporting; + const cancellationToken = { on: jest.fn(), }; -let config; -let mockServer; -let mockReporting; +const mockLoggerFactory = { + get: jest.fn().mockImplementation(() => ({ + error: jest.fn(), + debug: jest.fn(), + warn: jest.fn(), + })), +}; +const getMockLogger = () => new LevelLogger(mockLoggerFactory); -beforeEach(async () => { - mockReporting = await createMockReportingCore(); +const mockEncryptionKey = 'abcabcsecuresecret'; +const encryptHeaders = async headers => { + const crypto = cryptoFactory(mockEncryptionKey); + return await crypto.encrypt(headers); +}; - config = { - 'xpack.reporting.encryptionKey': 'testencryptionkey', +beforeEach(async () => { + const kbnConfig = { 'server.basePath': '/sbp', - 'server.host': 'localhost', - 'server.port': 5601, }; - mockServer = { - config: memoize(() => ({ get: jest.fn() })), - info: { - protocol: 'http', + const reportingConfig = { + encryptionKey: mockEncryptionKey, + 'kibanaServer.hostname': 'localhost', + 'kibanaServer.port': 5601, + 'kibanaServer.protocol': 'http', + }; + const mockReportingConfig = { + get: (...keys) => reportingConfig[keys.join('.')], + kbnConfig: { get: (...keys) => kbnConfig[keys.join('.')] }, + }; + + mockReporting = await createMockReportingCore(mockReportingConfig); + + const mockElasticsearch = { + dataClient: { + asScoped: () => ({ callAsCurrentUser: jest.fn() }), }, }; - mockServer.config().get.mockImplementation(key => { - return config[key]; - }); + const mockGetElasticsearch = jest.fn(); + mockGetElasticsearch.mockImplementation(() => Promise.resolve(mockElasticsearch)); + mockReporting.getElasticsearchService = mockGetElasticsearch; generatePngObservableFactory.mockReturnValue(jest.fn()); }); afterEach(() => generatePngObservableFactory.mockReset()); -const mockElasticsearch = { - dataClient: { - asScoped: () => ({ callAsCurrentUser: jest.fn() }), - }, -}; - -const getMockLogger = () => new LevelLogger(); - -const encryptHeaders = async headers => { - const crypto = cryptoFactory(mockServer); - return await crypto.encrypt(headers); -}; - test(`passes browserTimezone to generatePng`, async () => { const encryptedHeaders = await encryptHeaders({}); const generatePngObservable = generatePngObservableFactory(); generatePngObservable.mockReturnValue(Rx.of(Buffer.from(''))); - const executeJob = await executeJobFactory( - mockReporting, - mockServer, - mockElasticsearch, - getMockLogger() - ); + const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const browserTimezone = 'UTC'; await executeJob( 'pngJobId', @@ -88,15 +89,7 @@ test(`passes browserTimezone to generatePng`, async () => { }); test(`returns content_type of application/png`, async () => { - const executeJob = await executeJobFactory( - mockReporting, - mockServer, - mockElasticsearch, - getMockLogger(), - { - browserDriverFactory: {}, - } - ); + const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const encryptedHeaders = await encryptHeaders({}); const generatePngObservable = generatePngObservableFactory(); @@ -116,15 +109,7 @@ test(`returns content of generatePng getBuffer base64 encoded`, async () => { const generatePngObservable = generatePngObservableFactory(); generatePngObservable.mockReturnValue(Rx.of({ buffer: Buffer.from(testContent) })); - const executeJob = await executeJobFactory( - mockReporting, - mockServer, - mockElasticsearch, - getMockLogger(), - { - browserDriverFactory: {}, - } - ); + const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const encryptedHeaders = await encryptHeaders({}); const { content } = await executeJob( 'pngJobId', diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts index 8670f0027af89..113da92d1862f 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts @@ -4,18 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ElasticsearchServiceSetup } from 'kibana/server'; import * as Rx from 'rxjs'; import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators'; import { PNG_JOB_TYPE } from '../../../../common/constants'; import { ReportingCore } from '../../../../server'; -import { - ESQueueWorkerExecuteFn, - ExecuteJobFactory, - JobDocOutput, - Logger, - ServerFacade, -} from '../../../../types'; +import { ESQueueWorkerExecuteFn, ExecuteJobFactory, JobDocOutput, Logger } from '../../../../types'; import { decryptJobHeaders, getConditionalHeaders, @@ -29,22 +22,23 @@ type QueuedPngExecutorFactory = ExecuteJobFactory = Rx.of(1).pipe( - mergeMap(() => decryptJobHeaders({ server, job, logger })), + mergeMap(() => decryptJobHeaders({ encryptionKey, job, logger })), map(decryptedHeaders => omitBlacklistedHeaders({ job, decryptedHeaders })), - map(filteredHeaders => getConditionalHeaders({ server, job, filteredHeaders })), + map(filteredHeaders => getConditionalHeaders({ config, job, filteredHeaders })), mergeMap(conditionalHeaders => { - const urls = getFullUrls({ server, job }); + const urls = getFullUrls({ config, job }); const hashUrl = urls[0]; return generatePngObservable( jobLogger, diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/lib/generate_png.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/lib/generate_png.ts index 88e91982adc63..a15541d99f6fb 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/lib/generate_png.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/lib/generate_png.ts @@ -7,17 +7,18 @@ import * as Rx from 'rxjs'; import { map } from 'rxjs/operators'; import { LevelLogger } from '../../../../server/lib'; -import { ConditionalHeaders, HeadlessChromiumDriverFactory, ServerFacade } from '../../../../types'; +import { CaptureConfig } from '../../../../server/types'; +import { ConditionalHeaders, HeadlessChromiumDriverFactory } from '../../../../types'; import { LayoutParams } from '../../../common/layouts/layout'; import { PreserveLayout } from '../../../common/layouts/preserve_layout'; import { screenshotsObservableFactory } from '../../../common/lib/screenshots'; import { ScreenshotResults } from '../../../common/lib/screenshots/types'; export function generatePngObservableFactory( - server: ServerFacade, + captureConfig: CaptureConfig, browserDriverFactory: HeadlessChromiumDriverFactory ) { - const screenshotsObservable = screenshotsObservableFactory(server, browserDriverFactory); + const screenshotsObservable = screenshotsObservableFactory(captureConfig, browserDriverFactory); return function generatePngObservable( logger: LevelLogger, diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts index 656c99991e1f6..25d2d64b1029d 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts @@ -12,14 +12,14 @@ import { CreateJobFactory, ESQueueCreateJobFn, RequestFacade, - ServerFacade, } from '../../../../types'; import { JobParamsPDF } from '../../types'; export const createJobFactory: CreateJobFactory> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { - const crypto = cryptoFactory(server); +>> = function createJobFactoryFn(reporting: ReportingCore) { + const config = reporting.getConfig(); + const crypto = cryptoFactory(config.get('encryptionKey')); return async function createJobFn( { title, relativeUrls, browserTimezone, layout, objectType }: JobParamsPDF, diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js index 484842ba18f2a..c6f07f8ad2d34 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js @@ -5,7 +5,6 @@ */ import * as Rx from 'rxjs'; -import { memoize } from 'lodash'; import { createMockReportingCore } from '../../../../test_helpers'; import { cryptoFactory } from '../../../../server/lib/crypto'; import { executeJobFactory } from './index'; @@ -14,57 +13,60 @@ import { LevelLogger } from '../../../../server/lib'; jest.mock('../lib/generate_pdf', () => ({ generatePdfObservableFactory: jest.fn() })); +let mockReporting; + const cancellationToken = { on: jest.fn(), }; -let config; -let mockServer; -let mockReporting; +const mockLoggerFactory = { + get: jest.fn().mockImplementation(() => ({ + error: jest.fn(), + debug: jest.fn(), + warn: jest.fn(), + })), +}; +const getMockLogger = () => new LevelLogger(mockLoggerFactory); -beforeEach(async () => { - mockReporting = await createMockReportingCore(); +const mockEncryptionKey = 'testencryptionkey'; +const encryptHeaders = async headers => { + const crypto = cryptoFactory(mockEncryptionKey); + return await crypto.encrypt(headers); +}; - config = { - 'xpack.reporting.encryptionKey': 'testencryptionkey', +beforeEach(async () => { + const kbnConfig = { 'server.basePath': '/sbp', - 'server.host': 'localhost', - 'server.port': 5601, }; - mockServer = { - config: memoize(() => ({ get: jest.fn() })), - info: { - protocol: 'http', + const reportingConfig = { + encryptionKey: mockEncryptionKey, + 'kibanaServer.hostname': 'localhost', + 'kibanaServer.port': 5601, + 'kibanaServer.protocol': 'http', + }; + const mockReportingConfig = { + get: (...keys) => reportingConfig[keys.join('.')], + kbnConfig: { get: (...keys) => kbnConfig[keys.join('.')] }, + }; + + mockReporting = await createMockReportingCore(mockReportingConfig); + + const mockElasticsearch = { + dataClient: { + asScoped: () => ({ callAsCurrentUser: jest.fn() }), }, }; - mockServer.config().get.mockImplementation(key => { - return config[key]; - }); + const mockGetElasticsearch = jest.fn(); + mockGetElasticsearch.mockImplementation(() => Promise.resolve(mockElasticsearch)); + mockReporting.getElasticsearchService = mockGetElasticsearch; generatePdfObservableFactory.mockReturnValue(jest.fn()); }); afterEach(() => generatePdfObservableFactory.mockReset()); -const getMockLogger = () => new LevelLogger(); -const mockElasticsearch = { - dataClient: { - asScoped: () => ({ callAsCurrentUser: jest.fn() }), - }, -}; - -const encryptHeaders = async headers => { - const crypto = cryptoFactory(mockServer); - return await crypto.encrypt(headers); -}; - test(`returns content_type of application/pdf`, async () => { - const executeJob = await executeJobFactory( - mockReporting, - mockServer, - mockElasticsearch, - getMockLogger() - ); + const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const encryptedHeaders = await encryptHeaders({}); const generatePdfObservable = generatePdfObservableFactory(); @@ -84,12 +86,7 @@ test(`returns content of generatePdf getBuffer base64 encoded`, async () => { const generatePdfObservable = generatePdfObservableFactory(); generatePdfObservable.mockReturnValue(Rx.of({ buffer: Buffer.from(testContent) })); - const executeJob = await executeJobFactory( - mockReporting, - mockServer, - mockElasticsearch, - getMockLogger() - ); + const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const encryptedHeaders = await encryptHeaders({}); const { content } = await executeJob( 'pdfJobId', diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts index 535c2dcd439a7..dbdccb6160a6e 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts @@ -4,18 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ElasticsearchServiceSetup } from 'kibana/server'; import * as Rx from 'rxjs'; import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators'; import { PDF_JOB_TYPE } from '../../../../common/constants'; import { ReportingCore } from '../../../../server'; -import { - ESQueueWorkerExecuteFn, - ExecuteJobFactory, - JobDocOutput, - Logger, - ServerFacade, -} from '../../../../types'; +import { ESQueueWorkerExecuteFn, ExecuteJobFactory, JobDocOutput, Logger } from '../../../../types'; import { decryptJobHeaders, getConditionalHeaders, @@ -30,23 +23,26 @@ type QueuedPdfExecutorFactory = ExecuteJobFactory = Rx.of(1).pipe( - mergeMap(() => decryptJobHeaders({ server, job, logger })), + mergeMap(() => decryptJobHeaders({ encryptionKey, job, logger })), map(decryptedHeaders => omitBlacklistedHeaders({ job, decryptedHeaders })), - map(filteredHeaders => getConditionalHeaders({ server, job, filteredHeaders })), - mergeMap(conditionalHeaders => getCustomLogo({ reporting, server, job, conditionalHeaders })), + map(filteredHeaders => getConditionalHeaders({ config, job, filteredHeaders })), + mergeMap(conditionalHeaders => getCustomLogo({ reporting, config, job, conditionalHeaders })), mergeMap(({ logo, conditionalHeaders }) => { - const urls = getFullUrls({ server, job }); + const urls = getFullUrls({ config, job }); const { browserTimezone, layout, title } = job; return generatePdfObservable( diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/generate_pdf.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/generate_pdf.ts index d78effaa1fc2f..a62b7ec7013a5 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/generate_pdf.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/generate_pdf.ts @@ -8,7 +8,8 @@ import { groupBy } from 'lodash'; import * as Rx from 'rxjs'; import { mergeMap } from 'rxjs/operators'; import { LevelLogger } from '../../../../server/lib'; -import { ConditionalHeaders, HeadlessChromiumDriverFactory, ServerFacade } from '../../../../types'; +import { CaptureConfig } from '../../../../server/types'; +import { ConditionalHeaders, HeadlessChromiumDriverFactory } from '../../../../types'; import { createLayout } from '../../../common/layouts'; import { LayoutInstance, LayoutParams } from '../../../common/layouts/layout'; import { screenshotsObservableFactory } from '../../../common/lib/screenshots'; @@ -27,10 +28,10 @@ const getTimeRange = (urlScreenshots: ScreenshotResults[]) => { }; export function generatePdfObservableFactory( - server: ServerFacade, + captureConfig: CaptureConfig, browserDriverFactory: HeadlessChromiumDriverFactory ) { - const screenshotsObservable = screenshotsObservableFactory(server, browserDriverFactory); + const screenshotsObservable = screenshotsObservableFactory(captureConfig, browserDriverFactory); return function generatePdfObservable( logger: LevelLogger, @@ -41,7 +42,7 @@ export function generatePdfObservableFactory( layoutParams: LayoutParams, logo?: string ): Rx.Observable<{ buffer: Buffer; warnings: string[] }> { - const layout = createLayout(server, layoutParams) as LayoutInstance; + const layout = createLayout(captureConfig, layoutParams) as LayoutInstance; const screenshots$ = screenshotsObservable({ logger, urls, diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/types.d.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/types.d.ts index 0a9dcfe986ca6..e8dd3c5207d92 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/types.d.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/types.d.ts @@ -4,8 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ +import { JobDocPayload } from '../../types'; import { LayoutInstance, LayoutParams } from '../common/layouts/layout'; -import { JobDocPayload, ServerFacade, RequestFacade } from '../../types'; // Job params: structure of incoming user request data, after being parsed from RISON export interface JobParamsPDF { diff --git a/x-pack/legacy/plugins/reporting/index.ts b/x-pack/legacy/plugins/reporting/index.ts index 89e98302cddc9..a5d27d0545da1 100644 --- a/x-pack/legacy/plugins/reporting/index.ts +++ b/x-pack/legacy/plugins/reporting/index.ts @@ -12,9 +12,7 @@ import { config as reportingConfig } from './config'; import { legacyInit } from './server/legacy'; import { ReportingPluginSpecOptions } from './types'; -const kbToBase64Length = (kb: number) => { - return Math.floor((kb * 1024 * 8) / 6); -}; +const kbToBase64Length = (kb: number) => Math.floor((kb * 1024 * 8) / 6); export const reporting = (kibana: any) => { return new kibana.Plugin({ diff --git a/x-pack/legacy/plugins/reporting/log_configuration.ts b/x-pack/legacy/plugins/reporting/log_configuration.ts index b07475df6304f..7aaed2038bd52 100644 --- a/x-pack/legacy/plugins/reporting/log_configuration.ts +++ b/x-pack/legacy/plugins/reporting/log_configuration.ts @@ -6,22 +6,23 @@ import getosSync, { LinuxOs } from 'getos'; import { promisify } from 'util'; -import { ServerFacade, Logger } from './types'; +import { BROWSER_TYPE } from './common/constants'; +import { CaptureConfig } from './server/types'; +import { Logger } from './types'; const getos = promisify(getosSync); -export async function logConfiguration(server: ServerFacade, logger: Logger) { - const config = server.config(); +export async function logConfiguration(captureConfig: CaptureConfig, logger: Logger) { + const { + browser: { + type: browserType, + chromium: { disableSandbox }, + }, + } = captureConfig; - const browserType = config.get('xpack.reporting.capture.browser.type'); logger.debug(`Browser type: ${browserType}`); - - if (browserType === 'chromium') { - logger.debug( - `Chromium sandbox disabled: ${config.get( - 'xpack.reporting.capture.browser.chromium.disableSandbox' - )}` - ); + if (browserType === BROWSER_TYPE) { + logger.debug(`Chromium sandbox disabled: ${disableSandbox}`); } const os = await getos(); diff --git a/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/args.ts b/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/args.ts index dc79a6b9db2c1..a2f7a1f3ad0da 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/args.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/args.ts @@ -4,11 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -import { BrowserConfig } from '../../../../types'; +import { CaptureConfig } from '../../../../server/types'; + +type ViewportConfig = CaptureConfig['viewport']; +type BrowserConfig = CaptureConfig['browser']['chromium']; interface LaunchArgs { userDataDir: BrowserConfig['userDataDir']; - viewport: BrowserConfig['viewport']; + viewport: ViewportConfig; disableSandbox: BrowserConfig['disableSandbox']; proxy: BrowserConfig['proxy']; } diff --git a/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/index.ts b/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/index.ts index f90f2c7aee395..cb228150efbcd 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/index.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/index.ts @@ -19,7 +19,8 @@ import { import * as Rx from 'rxjs'; import { InnerSubscriber } from 'rxjs/internal/InnerSubscriber'; import { ignoreElements, map, mergeMap, tap } from 'rxjs/operators'; -import { BrowserConfig, CaptureConfig } from '../../../../types'; +import { BROWSER_TYPE } from '../../../../common/constants'; +import { CaptureConfig } from '../../../../server/types'; import { LevelLogger as Logger } from '../../../lib/level_logger'; import { safeChildProcess } from '../../safe_child_process'; import { HeadlessChromiumDriver } from '../driver'; @@ -28,7 +29,8 @@ import { puppeteerLaunch } from '../puppeteer'; import { args } from './args'; type binaryPath = string; -type ViewportConfig = BrowserConfig['viewport']; +type BrowserConfig = CaptureConfig['browser']['chromium']; +type ViewportConfig = CaptureConfig['viewport']; export class HeadlessChromiumDriverFactory { private binaryPath: binaryPath; @@ -37,15 +39,10 @@ export class HeadlessChromiumDriverFactory { private userDataDir: string; private getChromiumArgs: (viewport: ViewportConfig) => string[]; - constructor( - binaryPath: binaryPath, - logger: Logger, - browserConfig: BrowserConfig, - captureConfig: CaptureConfig - ) { + constructor(binaryPath: binaryPath, logger: Logger, captureConfig: CaptureConfig) { this.binaryPath = binaryPath; - this.browserConfig = browserConfig; this.captureConfig = captureConfig; + this.browserConfig = captureConfig.browser.chromium; this.userDataDir = fs.mkdtempSync(path.join(os.tmpdir(), 'chromium-')); this.getChromiumArgs = (viewport: ViewportConfig) => @@ -57,7 +54,7 @@ export class HeadlessChromiumDriverFactory { }); } - type = 'chromium'; + type = BROWSER_TYPE; test(logger: Logger) { const chromiumArgs = args({ @@ -153,7 +150,7 @@ export class HeadlessChromiumDriverFactory { // HeadlessChromiumDriver: object to "drive" a browser page const driver = new HeadlessChromiumDriver(page, { - inspect: this.browserConfig.inspect, + inspect: !!this.browserConfig.inspect, networkPolicy: this.captureConfig.networkPolicy, }); diff --git a/x-pack/legacy/plugins/reporting/server/browsers/chromium/index.ts b/x-pack/legacy/plugins/reporting/server/browsers/chromium/index.ts index d32338ae3e311..5f89662c94da2 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/chromium/index.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/chromium/index.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { BrowserConfig, CaptureConfig } from '../../../types'; +import { CaptureConfig } from '../../../server/types'; import { LevelLogger } from '../../lib'; import { HeadlessChromiumDriverFactory } from './driver_factory'; @@ -13,8 +13,7 @@ export { paths } from './paths'; export async function createDriverFactory( binaryPath: string, logger: LevelLogger, - browserConfig: BrowserConfig, captureConfig: CaptureConfig ): Promise { - return new HeadlessChromiumDriverFactory(binaryPath, logger, browserConfig, captureConfig); + return new HeadlessChromiumDriverFactory(binaryPath, logger, captureConfig); } diff --git a/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts b/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts index 49c6222c9f276..af3b86919dc50 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts @@ -4,24 +4,22 @@ * you may not use this file except in compliance with the Elastic License. */ +import { Logger } from '../../types'; +import { ReportingConfig } from '../types'; +import { HeadlessChromiumDriverFactory } from './chromium/driver_factory'; import { ensureBrowserDownloaded } from './download'; -import { installBrowser } from './install'; -import { ServerFacade, CaptureConfig, Logger } from '../../types'; -import { BROWSER_TYPE } from '../../common/constants'; import { chromium } from './index'; -import { HeadlessChromiumDriverFactory } from './chromium/driver_factory'; +import { installBrowser } from './install'; export async function createBrowserDriverFactory( - server: ServerFacade, + config: ReportingConfig, logger: Logger ): Promise { - const config = server.config(); - - const dataDir: string = config.get('path.data'); - const captureConfig: CaptureConfig = config.get('xpack.reporting.capture'); - const browserType = captureConfig.browser.type; + const captureConfig = config.get('capture'); + const browserConfig = captureConfig.browser.chromium; const browserAutoDownload = captureConfig.browser.autoDownload; - const browserConfig = captureConfig.browser[BROWSER_TYPE]; + const browserType = captureConfig.browser.type; + const dataDir = config.kbnConfig.get('path', 'data'); if (browserConfig.disableSandbox) { logger.warning(`Enabling the Chromium sandbox provides an additional layer of protection.`); @@ -32,7 +30,7 @@ export async function createBrowserDriverFactory( try { const { binaryPath } = await installBrowser(logger, chromium, dataDir); - return chromium.createDriverFactory(binaryPath, logger, browserConfig, captureConfig); + return chromium.createDriverFactory(binaryPath, logger, captureConfig); } catch (error) { if (error.cause && ['EACCES', 'EEXIST'].includes(error.cause.code)) { logger.error( diff --git a/x-pack/legacy/plugins/reporting/server/browsers/download/ensure_downloaded.ts b/x-pack/legacy/plugins/reporting/server/browsers/download/ensure_downloaded.ts index 73186966e3d2f..3697c4b86ce3c 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/download/ensure_downloaded.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/download/ensure_downloaded.ts @@ -4,16 +4,15 @@ * you may not use this file except in compliance with the Elastic License. */ -import { resolve as resolvePath } from 'path'; import { existsSync } from 'fs'; - +import { resolve as resolvePath } from 'path'; +import { BROWSER_TYPE } from '../../../common/constants'; import { chromium } from '../index'; -import { BrowserDownload, BrowserType } from '../types'; - +import { BrowserDownload } from '../types'; import { md5 } from './checksum'; -import { asyncMap } from './util'; -import { download } from './download'; import { clean } from './clean'; +import { download } from './download'; +import { asyncMap } from './util'; /** * Check for the downloaded archive of each requested browser type and @@ -21,7 +20,7 @@ import { clean } from './clean'; * @param {String} browserType * @return {Promise} */ -export async function ensureBrowserDownloaded(browserType: BrowserType) { +export async function ensureBrowserDownloaded(browserType = BROWSER_TYPE) { await ensureDownloaded([chromium]); } diff --git a/x-pack/legacy/plugins/reporting/server/browsers/network_policy.ts b/x-pack/legacy/plugins/reporting/server/browsers/network_policy.ts index b36345c08bfee..9714c5965a5db 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/network_policy.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/network_policy.ts @@ -6,12 +6,7 @@ import * as _ from 'lodash'; import { parse } from 'url'; - -interface FirewallRule { - allow: boolean; - host?: string; - protocol?: string; -} +import { NetworkPolicyRule } from '../../types'; const isHostMatch = (actualHost: string, ruleHost: string) => { const hostParts = actualHost.split('.').reverse(); @@ -20,7 +15,7 @@ const isHostMatch = (actualHost: string, ruleHost: string) => { return _.every(ruleParts, (part, idx) => part === hostParts[idx]); }; -export const allowRequest = (url: string, rules: FirewallRule[]) => { +export const allowRequest = (url: string, rules: NetworkPolicyRule[]) => { const parsed = parse(url); if (!rules.length) { diff --git a/x-pack/legacy/plugins/reporting/server/browsers/types.d.ts b/x-pack/legacy/plugins/reporting/server/browsers/types.d.ts index 0c480fc82752b..f096073ec2f5f 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/types.d.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/types.d.ts @@ -4,8 +4,6 @@ * you may not use this file except in compliance with the Elastic License. */ -export type BrowserType = 'chromium'; - export interface BrowserDownload { paths: { archivesPath: string; diff --git a/x-pack/legacy/plugins/reporting/server/config/index.ts b/x-pack/legacy/plugins/reporting/server/config/index.ts new file mode 100644 index 0000000000000..623d3c2015f3b --- /dev/null +++ b/x-pack/legacy/plugins/reporting/server/config/index.ts @@ -0,0 +1,214 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Legacy } from 'kibana'; +import { CoreSetup } from 'src/core/server'; +import { i18n } from '@kbn/i18n'; +import crypto from 'crypto'; +import { get } from 'lodash'; +import { NetworkPolicy } from '../../types'; + +// make config.get() aware of the value type it returns +interface Config { + get(key1: Key1): BaseType[Key1]; + get( + key1: Key1, + key2: Key2 + ): BaseType[Key1][Key2]; + get< + Key1 extends keyof BaseType, + Key2 extends keyof BaseType[Key1], + Key3 extends keyof BaseType[Key1][Key2] + >( + key1: Key1, + key2: Key2, + key3: Key3 + ): BaseType[Key1][Key2][Key3]; + get< + Key1 extends keyof BaseType, + Key2 extends keyof BaseType[Key1], + Key3 extends keyof BaseType[Key1][Key2], + Key4 extends keyof BaseType[Key1][Key2][Key3] + >( + key1: Key1, + key2: Key2, + key3: Key3, + key4: Key4 + ): BaseType[Key1][Key2][Key3][Key4]; +} + +interface KbnServerConfigType { + path: { data: string }; + server: { + basePath: string; + host: string; + name: string; + port: number; + protocol: string; + uuid: string; + }; +} + +export interface ReportingConfig extends Config { + kbnConfig: Config; +} + +type BrowserType = 'chromium'; + +interface BrowserConfig { + inspect: boolean; + userDataDir: string; + viewport: { width: number; height: number }; + disableSandbox: boolean; + proxy: { + enabled: boolean; + server?: string; + bypass?: string[]; + }; +} + +interface CaptureConfig { + browser: { + type: BrowserType; + autoDownload: boolean; + chromium: BrowserConfig; + }; + maxAttempts: number; + networkPolicy: NetworkPolicy; + loadDelay: number; + timeouts: { + openUrl: number; + waitForElements: number; + renderComplete: number; + }; + viewport: any; + zoom: any; +} + +interface QueueConfig { + indexInterval: string; + pollEnabled: boolean; + pollInterval: number; + pollIntervalErrorMultiplier: number; + timeout: number; +} + +interface ScrollConfig { + duration: string; + size: number; +} + +export interface ReportingConfigType { + capture: CaptureConfig; + csv: { + scroll: ScrollConfig; + enablePanelActionDownload: boolean; + checkForFormulas: boolean; + maxSizeBytes: number; + }; + encryptionKey: string; + kibanaServer: any; + index: string; + queue: QueueConfig; + roles: any; +} + +const addConfigDefaults = ( + server: Legacy.Server, + core: CoreSetup, + baseConfig: ReportingConfigType +) => { + // encryption key + let encryptionKey = baseConfig.encryptionKey; + if (encryptionKey === undefined) { + server.log( + ['reporting', 'config', 'warning'], + i18n.translate('xpack.reporting.selfCheckEncryptionKey.warning', { + defaultMessage: + `Generating a random key for {setting}. To prevent pending reports ` + + `from failing on restart, please set {setting} in kibana.yml`, + values: { + setting: 'xpack.reporting.encryptionKey', + }, + }) + ); + encryptionKey = crypto.randomBytes(16).toString('hex'); + } + + const { kibanaServer: reportingServer } = baseConfig; + const serverInfo = core.http.getServerInfo(); + + // kibanaServer.hostname, default to server.host, don't allow "0" + let kibanaServerHostname = reportingServer.hostname ? reportingServer.hostname : serverInfo.host; + if (kibanaServerHostname === '0') { + server.log( + ['reporting', 'config', 'warning'], + i18n.translate('xpack.reporting.selfCheckHostname.warning', { + defaultMessage: + `Found 'server.host: "0"' in settings. This is incompatible with Reporting. ` + + `To enable Reporting to work, '{setting}: 0.0.0.0' is being automatically to the configuration. ` + + `You can change to 'server.host: 0.0.0.0' or add '{setting}: 0.0.0.0' in kibana.yml to prevent this message.`, + values: { + setting: 'xpack.reporting.kibanaServer.hostname', + }, + }) + ); + kibanaServerHostname = '0.0.0.0'; + } + + // kibanaServer.port, default to server.port + const kibanaServerPort = reportingServer.port + ? reportingServer.port + : serverInfo.port; // prettier-ignore + + // kibanaServer.protocol, default to server.protocol + const kibanaServerProtocol = reportingServer.protocol + ? reportingServer.protocol + : serverInfo.protocol; + + return { + ...baseConfig, + encryptionKey, + kibanaServer: { + hostname: kibanaServerHostname, + port: kibanaServerPort, + protocol: kibanaServerProtocol, + }, + }; +}; + +export const buildConfig = ( + core: CoreSetup, + server: Legacy.Server, + reportingConfig: ReportingConfigType +): ReportingConfig => { + const config = server.config(); + const { http } = core; + const serverInfo = http.getServerInfo(); + + const kbnConfig = { + path: { + data: config.get('path.data'), + }, + server: { + basePath: core.http.basePath.serverBasePath, + host: serverInfo.host, + name: serverInfo.name, + port: serverInfo.port, + uuid: core.uuid.getInstanceUuid(), + protocol: serverInfo.protocol, + }, + }; + + // spreading arguments as an array allows the return type to be known by the compiler + reportingConfig = addConfigDefaults(server, core, reportingConfig); + return { + get: (...keys: string[]) => get(reportingConfig, keys.join('.'), null), + kbnConfig: { + get: (...keys: string[]) => get(kbnConfig, keys.join('.'), null), + }, + }; +}; diff --git a/x-pack/legacy/plugins/reporting/server/core.ts b/x-pack/legacy/plugins/reporting/server/core.ts index 4506d41e4f5c3..9be61d091b00e 100644 --- a/x-pack/legacy/plugins/reporting/server/core.ts +++ b/x-pack/legacy/plugins/reporting/server/core.ts @@ -7,6 +7,7 @@ import * as Rx from 'rxjs'; import { first, mapTo } from 'rxjs/operators'; import { + ElasticsearchServiceSetup, IUiSettingsClient, KibanaRequest, SavedObjectsClient, @@ -19,20 +20,24 @@ import { XPackMainPlugin } from '../../xpack_main/server/xpack_main'; import { PLUGIN_ID } from '../common/constants'; import { EnqueueJobFn, ESQueueInstance, ReportingPluginSpecOptions, ServerFacade } from '../types'; import { HeadlessChromiumDriverFactory } from './browsers/chromium/driver_factory'; +import { ReportingConfig, ReportingConfigType } from './config'; import { checkLicenseFactory, getExportTypesRegistry, LevelLogger } from './lib'; import { registerRoutes } from './routes'; import { ReportingSetupDeps } from './types'; interface ReportingInternalSetup { browserDriverFactory: HeadlessChromiumDriverFactory; + elasticsearch: ElasticsearchServiceSetup; } interface ReportingInternalStart { + enqueueJob: EnqueueJobFn; + esqueue: ESQueueInstance; savedObjects: SavedObjectsServiceStart; uiSettings: UiSettingsServiceStart; - esqueue: ESQueueInstance; - enqueueJob: EnqueueJobFn; } +export { ReportingConfig, ReportingConfigType }; + export class ReportingCore { private pluginSetupDeps?: ReportingInternalSetup; private pluginStartDeps?: ReportingInternalStart; @@ -40,7 +45,7 @@ export class ReportingCore { private readonly pluginStart$ = new Rx.ReplaySubject(); private exportTypesRegistry = getExportTypesRegistry(); - constructor(private logger: LevelLogger) {} + constructor(private logger: LevelLogger, private config: ReportingConfig) {} legacySetup( xpackMainPlugin: XPackMainPlugin, @@ -48,14 +53,18 @@ export class ReportingCore { __LEGACY: ServerFacade, plugins: ReportingSetupDeps ) { + // legacy plugin status mirrorPluginStatus(xpackMainPlugin, reporting); + + // legacy license check const checkLicense = checkLicenseFactory(this.exportTypesRegistry); (xpackMainPlugin as any).status.once('green', () => { // Register a function that is called whenever the xpack info changes, // to re-compute the license check results for this plugin xpackMainPlugin.info.feature(PLUGIN_ID).registerLicenseCheckResultsGenerator(checkLicense); }); - // Reporting routes + + // legacy routes registerRoutes(this, __LEGACY, plugins, this.logger); } @@ -90,23 +99,31 @@ export class ReportingCore { return (await this.getPluginSetupDeps()).browserDriverFactory; } + public getConfig(): ReportingConfig { + return this.config; + } + /* - * Kibana core module dependencies + * Outside dependencies */ - private async getPluginSetupDeps() { + private async getPluginSetupDeps(): Promise { if (this.pluginSetupDeps) { return this.pluginSetupDeps; } return await this.pluginSetup$.pipe(first()).toPromise(); } - private async getPluginStartDeps() { + private async getPluginStartDeps(): Promise { if (this.pluginStartDeps) { return this.pluginStartDeps; } return await this.pluginStart$.pipe(first()).toPromise(); } + public async getElasticsearchService(): Promise { + return (await this.getPluginSetupDeps()).elasticsearch; + } + public async getSavedObjectsClient(fakeRequest: KibanaRequest): Promise { const { savedObjects } = await this.getPluginStartDeps(); return savedObjects.getScopedClient(fakeRequest) as SavedObjectsClient; diff --git a/x-pack/legacy/plugins/reporting/server/index.ts b/x-pack/legacy/plugins/reporting/server/index.ts index 24e2a954415d9..c564963e363cc 100644 --- a/x-pack/legacy/plugins/reporting/server/index.ts +++ b/x-pack/legacy/plugins/reporting/server/index.ts @@ -6,10 +6,11 @@ import { PluginInitializerContext } from 'src/core/server'; import { ReportingPlugin as Plugin } from './plugin'; +import { ReportingConfig, ReportingCore } from './core'; -export const plugin = (context: PluginInitializerContext) => { - return new Plugin(context); +export const plugin = (context: PluginInitializerContext, config: ReportingConfig) => { + return new Plugin(context, config); }; -export { ReportingCore } from './core'; export { ReportingPlugin } from './plugin'; +export { ReportingConfig, ReportingCore }; diff --git a/x-pack/legacy/plugins/reporting/server/legacy.ts b/x-pack/legacy/plugins/reporting/server/legacy.ts index 336ff5f4d2ee7..679b42aca6de5 100644 --- a/x-pack/legacy/plugins/reporting/server/legacy.ts +++ b/x-pack/legacy/plugins/reporting/server/legacy.ts @@ -3,10 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ + import { Legacy } from 'kibana'; import { PluginInitializerContext } from 'src/core/server'; import { SecurityPluginSetup } from '../../../../plugins/security/server'; import { ReportingPluginSpecOptions } from '../types'; +import { buildConfig } from './config'; import { plugin } from './index'; import { LegacySetup, ReportingStartDeps } from './types'; @@ -14,24 +16,31 @@ const buildLegacyDependencies = ( server: Legacy.Server, reportingPlugin: ReportingPluginSpecOptions ): LegacySetup => ({ - config: server.config, - info: server.info, route: server.route.bind(server), + config: server.config, plugins: { - elasticsearch: server.plugins.elasticsearch, xpack_main: server.plugins.xpack_main, reporting: reportingPlugin, }, }); +/* + * Starts the New Platform instance of Reporting using legacy dependencies + */ export const legacyInit = async ( server: Legacy.Server, - reportingPlugin: ReportingPluginSpecOptions + reportingLegacyPlugin: ReportingPluginSpecOptions ) => { - const coreSetup = server.newPlatform.setup.core; - const pluginInstance = plugin(server.newPlatform.coreContext as PluginInitializerContext); + const { core: coreSetup } = server.newPlatform.setup; + const legacyConfig = server.config(); + const reportingConfig = buildConfig(coreSetup, server, legacyConfig.get('xpack.reporting')); - const __LEGACY = buildLegacyDependencies(server, reportingPlugin); + const __LEGACY = buildLegacyDependencies(server, reportingLegacyPlugin); + + const pluginInstance = plugin( + server.newPlatform.coreContext as PluginInitializerContext, + reportingConfig + ); await pluginInstance.setup(coreSetup, { elasticsearch: coreSetup.elasticsearch, security: server.newPlatform.setup.plugins.security as SecurityPluginSetup, @@ -42,7 +51,6 @@ export const legacyInit = async ( // Schedule to call the "start" hook only after start dependencies are ready coreSetup.getStartServices().then(([core, plugins]) => pluginInstance.start(core, { - elasticsearch: coreSetup.elasticsearch, data: (plugins as ReportingStartDeps).data, __LEGACY, }) diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts b/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts index d593e4625cdf4..8230ee889ae05 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts @@ -4,22 +4,21 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ElasticsearchServiceSetup } from 'kibana/server'; -import { ESQueueInstance, ServerFacade, QueueConfig, Logger } from '../../types'; +import { ESQueueInstance, Logger } from '../../types'; import { ReportingCore } from '../core'; +import { createTaggedLogger } from './create_tagged_logger'; // TODO remove createTaggedLogger once esqueue is removed +import { createWorkerFactory } from './create_worker'; // @ts-ignore import { Esqueue } from './esqueue'; -import { createWorkerFactory } from './create_worker'; -import { createTaggedLogger } from './create_tagged_logger'; // TODO remove createTaggedLogger once esqueue is removed export async function createQueueFactory( reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, logger: Logger ): Promise { - const queueConfig: QueueConfig = server.config().get('xpack.reporting.queue'); - const index = server.config().get('xpack.reporting.index'); + const config = reporting.getConfig(); + const queueConfig = config.get('queue'); + const index = config.get('index'); + const elasticsearch = await reporting.getElasticsearchService(); const queueOptions = { interval: queueConfig.indexInterval, @@ -33,7 +32,7 @@ export async function createQueueFactory( if (queueConfig.pollEnabled) { // create workers to poll the index for idle jobs waiting to be claimed and executed - const createWorker = createWorkerFactory(reporting, server, elasticsearch, logger); + const createWorker = createWorkerFactory(reporting, logger); await createWorker(queue); } else { logger.info( diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts b/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts index d4d913243e18d..ad8db3201844e 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts @@ -4,11 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ElasticsearchServiceSetup } from 'kibana/server'; import * as sinon from 'sinon'; -import { ReportingCore } from '../../server'; +import { ReportingConfig, ReportingCore } from '../../server/types'; import { createMockReportingCore } from '../../test_helpers'; -import { ServerFacade } from '../../types'; import { createWorkerFactory } from './create_worker'; // @ts-ignore import { Esqueue } from './esqueue'; @@ -17,21 +15,15 @@ import { ClientMock } from './esqueue/__tests__/fixtures/legacy_elasticsearch'; import { ExportTypesRegistry } from './export_types_registry'; const configGetStub = sinon.stub(); -configGetStub.withArgs('xpack.reporting.queue').returns({ +configGetStub.withArgs('queue').returns({ pollInterval: 3300, pollIntervalErrorMultiplier: 10, }); -configGetStub.withArgs('server.name').returns('test-server-123'); -configGetStub.withArgs('server.uuid').returns('g9ymiujthvy6v8yrh7567g6fwzgzftzfr'); +configGetStub.withArgs('server', 'name').returns('test-server-123'); +configGetStub.withArgs('server', 'uuid').returns('g9ymiujthvy6v8yrh7567g6fwzgzftzfr'); const executeJobFactoryStub = sinon.stub(); - -const getMockServer = (): ServerFacade => { - return ({ - config: () => ({ get: configGetStub }), - } as unknown) as ServerFacade; -}; -const getMockLogger = jest.fn(); +const getMockLogger = sinon.stub(); const getMockExportTypesRegistry = ( exportTypes: any[] = [{ executeJobFactory: executeJobFactoryStub }] @@ -41,25 +33,22 @@ const getMockExportTypesRegistry = ( } as ExportTypesRegistry); describe('Create Worker', () => { + let mockReporting: ReportingCore; + let mockConfig: ReportingConfig; let queue: Esqueue; let client: ClientMock; - let mockReporting: ReportingCore; beforeEach(async () => { - mockReporting = await createMockReportingCore(); + mockConfig = { get: configGetStub, kbnConfig: { get: configGetStub } }; + mockReporting = await createMockReportingCore(mockConfig); + mockReporting.getExportTypesRegistry = () => getMockExportTypesRegistry(); client = new ClientMock(); queue = new Esqueue('reporting-queue', { client }); executeJobFactoryStub.reset(); }); test('Creates a single Esqueue worker for Reporting', async () => { - mockReporting.getExportTypesRegistry = () => getMockExportTypesRegistry(); - const createWorker = createWorkerFactory( - mockReporting, - getMockServer(), - {} as ElasticsearchServiceSetup, - getMockLogger() - ); + const createWorker = createWorkerFactory(mockReporting, getMockLogger()); const registerWorkerSpy = sinon.spy(queue, 'registerWorker'); await createWorker(queue); @@ -91,12 +80,7 @@ Object { { executeJobFactory: executeJobFactoryStub }, ]); mockReporting.getExportTypesRegistry = () => exportTypesRegistry; - const createWorker = createWorkerFactory( - mockReporting, - getMockServer(), - {} as ElasticsearchServiceSetup, - getMockLogger() - ); + const createWorker = createWorkerFactory(mockReporting, getMockLogger()); const registerWorkerSpy = sinon.spy(queue, 'registerWorker'); await createWorker(queue); diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts b/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts index 3567712367608..16b8fbdb30fdd 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts @@ -4,9 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ElasticsearchServiceSetup } from 'kibana/server'; import { CancellationToken } from '../../common/cancellation_token'; import { PLUGIN_ID } from '../../common/constants'; +import { ReportingCore } from '../../server/types'; import { ESQueueInstance, ESQueueWorkerExecuteFn, @@ -15,25 +15,18 @@ import { JobDocPayload, JobSource, Logger, - QueueConfig, RequestFacade, - ServerFacade, } from '../../types'; -import { ReportingCore } from '../core'; // @ts-ignore untyped dependency import { events as esqueueEvents } from './esqueue'; -export function createWorkerFactory( - reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, - logger: Logger -) { +export function createWorkerFactory(reporting: ReportingCore, logger: Logger) { type JobDocPayloadType = JobDocPayload; - const config = server.config(); - const queueConfig: QueueConfig = config.get('xpack.reporting.queue'); - const kibanaName: string = config.get('server.name'); - const kibanaId: string = config.get('server.uuid'); + + const config = reporting.getConfig(); + const queueConfig = config.get('queue'); + const kibanaName = config.kbnConfig.get('server', 'name'); + const kibanaId = config.kbnConfig.get('server', 'uuid'); // Once more document types are added, this will need to be passed in return async function createWorker(queue: ESQueueInstance) { @@ -44,15 +37,14 @@ export function createWorkerFactory( > = new Map(); for (const exportType of reporting.getExportTypesRegistry().getAll() as Array< - ExportTypeDefinition + ExportTypeDefinition< + JobParamsType, + unknown, + unknown, + ImmediateExecuteFn | ESQueueWorkerExecuteFn + > >) { - // TODO: the executeJobFn should be unwrapped in the register method of the export types registry - const jobExecutor = await exportType.executeJobFactory( - reporting, - server, - elasticsearch, - logger - ); + const jobExecutor = await exportType.executeJobFactory(reporting, logger); // FIXME: does not "need" to be async jobExecutors.set(exportType.jobType, jobExecutor); } diff --git a/x-pack/legacy/plugins/reporting/server/lib/crypto.ts b/x-pack/legacy/plugins/reporting/server/lib/crypto.ts index dbc01fc947f8b..97876529ecfa7 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/crypto.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/crypto.ts @@ -5,12 +5,7 @@ */ import nodeCrypto from '@elastic/node-crypto'; -import { oncePerServer } from './once_per_server'; -import { ServerFacade } from '../../types'; -function cryptoFn(server: ServerFacade) { - const encryptionKey = server.config().get('xpack.reporting.encryptionKey'); +export function cryptoFactory(encryptionKey: string | undefined) { return nodeCrypto({ encryptionKey }); } - -export const cryptoFactory = oncePerServer(cryptoFn); diff --git a/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts b/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts index c215bdc398904..5a062a693b468 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts @@ -5,22 +5,18 @@ */ import { get } from 'lodash'; -import { ElasticsearchServiceSetup } from 'kibana/server'; -// @ts-ignore -import { events as esqueueEvents } from './esqueue'; import { + ConditionalHeaders, EnqueueJobFn, ESQueueCreateJobFn, ImmediateCreateJobFn, Job, - ServerFacade, - RequestFacade, Logger, - CaptureConfig, - QueueConfig, - ConditionalHeaders, + RequestFacade, } from '../../types'; import { ReportingCore } from '../core'; +// @ts-ignore +import { events as esqueueEvents } from './esqueue'; interface ConfirmedJob { id: string; @@ -29,18 +25,13 @@ interface ConfirmedJob { _primary_term: number; } -export function enqueueJobFactory( - reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, - parentLogger: Logger -): EnqueueJobFn { +export function enqueueJobFactory(reporting: ReportingCore, parentLogger: Logger): EnqueueJobFn { const logger = parentLogger.clone(['queue-job']); - const config = server.config(); - const captureConfig: CaptureConfig = config.get('xpack.reporting.capture'); + const config = reporting.getConfig(); + const captureConfig = config.get('capture'); + const queueConfig = config.get('queue'); const browserType = captureConfig.browser.type; const maxAttempts = captureConfig.maxAttempts; - const queueConfig: QueueConfig = config.get('xpack.reporting.queue'); return async function enqueueJob( exportTypeId: string, @@ -58,13 +49,7 @@ export function enqueueJobFactory( throw new Error(`Export type ${exportTypeId} does not exist in the registry!`); } - // TODO: the createJobFn should be unwrapped in the register method of the export types registry - const createJob = exportType.createJobFactory( - reporting, - server, - elasticsearch, - logger - ) as CreateJobFn; + const createJob = exportType.createJobFactory(reporting, logger) as CreateJobFn; const payload = await createJob(jobParams, headers, request); const options = { diff --git a/x-pack/legacy/plugins/reporting/server/lib/get_user.ts b/x-pack/legacy/plugins/reporting/server/lib/get_user.ts index 49d5c568c3981..5e73fe77ecb79 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/get_user.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/get_user.ts @@ -6,10 +6,10 @@ import { Legacy } from 'kibana'; import { KibanaRequest } from '../../../../../../src/core/server'; -import { ServerFacade } from '../../types'; +import { Logger } from '../../types'; import { ReportingSetupDeps } from '../types'; -export function getUserFactory(server: ServerFacade, security: ReportingSetupDeps['security']) { +export function getUserFactory(security: ReportingSetupDeps['security'], logger: Logger) { /* * Legacy.Request because this is called from routing middleware */ diff --git a/x-pack/legacy/plugins/reporting/server/lib/index.ts b/x-pack/legacy/plugins/reporting/server/lib/index.ts index 0a2db749cb954..f5ccbe493a91f 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/index.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/index.ts @@ -4,11 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ -export { getExportTypesRegistry } from './export_types_registry'; export { checkLicenseFactory } from './check_license'; -export { LevelLogger } from './level_logger'; -export { cryptoFactory } from './crypto'; -export { oncePerServer } from './once_per_server'; -export { runValidations } from './validate'; export { createQueueFactory } from './create_queue'; +export { cryptoFactory } from './crypto'; export { enqueueJobFactory } from './enqueue_job'; +export { getExportTypesRegistry } from './export_types_registry'; +export { LevelLogger } from './level_logger'; +export { runValidations } from './validate'; diff --git a/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts b/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts index c01e6377b039e..0affc111c1368 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts @@ -9,7 +9,8 @@ import Boom from 'boom'; import { errors as elasticsearchErrors } from 'elasticsearch'; import { ElasticsearchServiceSetup } from 'kibana/server'; import { get } from 'lodash'; -import { JobSource, ServerFacade } from '../../types'; +import { JobSource } from '../../types'; +import { ReportingConfig } from '../types'; const esErrors = elasticsearchErrors as Record; const defaultSize = 10; @@ -39,8 +40,11 @@ interface CountAggResult { count: number; } -export function jobsQueryFactory(server: ServerFacade, elasticsearch: ElasticsearchServiceSetup) { - const index = server.config().get('xpack.reporting.index'); +export function jobsQueryFactory( + config: ReportingConfig, + elasticsearch: ElasticsearchServiceSetup +) { + const index = config.get('index'); const { callAsInternalUser } = elasticsearch.adminClient; function getUsername(user: any) { diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_encryption_key.js b/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_encryption_key.js deleted file mode 100644 index 10980f702d849..0000000000000 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_encryption_key.js +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import expect from '@kbn/expect'; -import sinon from 'sinon'; -import { validateEncryptionKey } from '../validate_encryption_key'; - -describe('Reporting: Validate config', () => { - const logger = { - warning: sinon.spy(), - }; - - beforeEach(() => { - logger.warning.resetHistory(); - }); - - [undefined, null].forEach(value => { - it(`should log a warning and set xpack.reporting.encryptionKey if encryptionKey is ${value}`, () => { - const config = { - get: sinon.stub().returns(value), - set: sinon.stub(), - }; - - expect(() => validateEncryptionKey({ config: () => config }, logger)).not.to.throwError(); - - sinon.assert.calledWith(config.set, 'xpack.reporting.encryptionKey'); - sinon.assert.calledWithMatch(logger.warning, /Generating a random key/); - sinon.assert.calledWithMatch(logger.warning, /please set xpack.reporting.encryptionKey/); - }); - }); -}); diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_server_host.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_server_host.ts deleted file mode 100644 index 04f998fd3e5a5..0000000000000 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_server_host.ts +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import expect from '@kbn/expect'; -import sinon from 'sinon'; -import { ServerFacade } from '../../../../types'; -import { validateServerHost } from '../validate_server_host'; - -const configKey = 'xpack.reporting.kibanaServer.hostname'; - -describe('Reporting: Validate server host setting', () => { - it(`should log a warning and set ${configKey} if server.host is "0"`, () => { - const getStub = sinon.stub(); - getStub.withArgs('server.host').returns('0'); - getStub.withArgs(configKey).returns(undefined); - const config = { - get: getStub, - set: sinon.stub(), - }; - - expect(() => - validateServerHost(({ config: () => config } as unknown) as ServerFacade) - ).to.throwError(); - - sinon.assert.calledWith(config.set, configKey); - }); -}); diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts index 0fdbd858b8e3c..85d9f727d7fa7 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts @@ -6,25 +6,22 @@ import { i18n } from '@kbn/i18n'; import { ElasticsearchServiceSetup } from 'kibana/server'; -import { Logger, ServerFacade } from '../../../types'; +import { Logger } from '../../../types'; import { HeadlessChromiumDriverFactory } from '../../browsers/chromium/driver_factory'; +import { ReportingConfig } from '../../types'; import { validateBrowser } from './validate_browser'; -import { validateEncryptionKey } from './validate_encryption_key'; import { validateMaxContentLength } from './validate_max_content_length'; -import { validateServerHost } from './validate_server_host'; export async function runValidations( - server: ServerFacade, + config: ReportingConfig, elasticsearch: ElasticsearchServiceSetup, browserFactory: HeadlessChromiumDriverFactory, logger: Logger ) { try { await Promise.all([ - validateBrowser(server, browserFactory, logger), - validateEncryptionKey(server, logger), - validateMaxContentLength(server, elasticsearch, logger), - validateServerHost(server), + validateBrowser(browserFactory, logger), + validateMaxContentLength(config, elasticsearch, logger), ]); logger.debug( i18n.translate('xpack.reporting.selfCheck.ok', { diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_browser.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_browser.ts index 89c49123e85bf..d6512d5eb718b 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_browser.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_browser.ts @@ -3,9 +3,10 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ + import { Browser } from 'puppeteer'; import { BROWSER_TYPE } from '../../../common/constants'; -import { ServerFacade, Logger } from '../../../types'; +import { Logger } from '../../../types'; import { HeadlessChromiumDriverFactory } from '../../browsers/chromium/driver_factory'; /* @@ -13,7 +14,6 @@ import { HeadlessChromiumDriverFactory } from '../../browsers/chromium/driver_fa * to the locally running Kibana instance. */ export const validateBrowser = async ( - server: ServerFacade, browserFactory: HeadlessChromiumDriverFactory, logger: Logger ) => { diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_encryption_key.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_encryption_key.ts deleted file mode 100644 index e0af94cbdc29c..0000000000000 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_encryption_key.ts +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { i18n } from '@kbn/i18n'; -import crypto from 'crypto'; -import { ServerFacade, Logger } from '../../../types'; - -export function validateEncryptionKey(serverFacade: ServerFacade, logger: Logger) { - const config = serverFacade.config(); - - const encryptionKey = config.get('xpack.reporting.encryptionKey'); - if (encryptionKey == null) { - // TODO this should simply throw an error and let the handler conver it to a warning mesasge. See validateServerHost. - logger.warning( - i18n.translate('xpack.reporting.selfCheckEncryptionKey.warning', { - defaultMessage: - `Generating a random key for {setting}. To prevent pending reports ` + - `from failing on restart, please set {setting} in kibana.yml`, - values: { - setting: 'xpack.reporting.encryptionKey', - }, - }) - ); - - // @ts-ignore: No set() method on KibanaConfig, just get() and has() - config.set('xpack.reporting.encryptionKey', crypto.randomBytes(16).toString('hex')); // update config in memory to contain a usable encryption key - } -} diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js index 942dcaf842696..2551fd48b91f3 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js @@ -32,11 +32,7 @@ describe('Reporting: Validate Max Content Length', () => { }); it('should log warning messages when reporting has a higher max-size than elasticsearch', async () => { - const server = { - config: () => ({ - get: sinon.stub().returns(FIVE_HUNDRED_MEGABYTES), - }), - }; + const config = { get: sinon.stub().returns(FIVE_HUNDRED_MEGABYTES) }; const elasticsearch = { dataClient: { callAsInternalUser: () => ({ @@ -49,7 +45,7 @@ describe('Reporting: Validate Max Content Length', () => { }, }; - await validateMaxContentLength(server, elasticsearch, logger); + await validateMaxContentLength(config, elasticsearch, logger); sinon.assert.calledWithMatch( logger.warning, @@ -70,14 +66,10 @@ describe('Reporting: Validate Max Content Length', () => { }); it('should do nothing when reporting has the same max-size as elasticsearch', async () => { - const server = { - config: () => ({ - get: sinon.stub().returns(ONE_HUNDRED_MEGABYTES), - }), - }; + const config = { get: sinon.stub().returns(ONE_HUNDRED_MEGABYTES) }; expect( - async () => await validateMaxContentLength(server, elasticsearch, logger.warning) + async () => await validateMaxContentLength(config, elasticsearch, logger.warning) ).not.toThrow(); sinon.assert.notCalled(logger.warning); }); diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts index ce4a5b93e7431..a20905ba093d4 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts @@ -7,17 +7,17 @@ import numeral from '@elastic/numeral'; import { ElasticsearchServiceSetup } from 'kibana/server'; import { defaults, get } from 'lodash'; -import { Logger, ServerFacade } from '../../../types'; +import { Logger } from '../../../types'; +import { ReportingConfig } from '../../types'; -const KIBANA_MAX_SIZE_BYTES_PATH = 'xpack.reporting.csv.maxSizeBytes'; +const KIBANA_MAX_SIZE_BYTES_PATH = 'csv.maxSizeBytes'; const ES_MAX_SIZE_BYTES_PATH = 'http.max_content_length'; export async function validateMaxContentLength( - server: ServerFacade, + config: ReportingConfig, elasticsearch: ElasticsearchServiceSetup, logger: Logger ) { - const config = server.config(); const { callAsInternalUser } = elasticsearch.dataClient; const elasticClusterSettingsResponse = await callAsInternalUser('cluster.getSettings', { @@ -28,13 +28,13 @@ export async function validateMaxContentLength( const elasticSearchMaxContent = get(elasticClusterSettings, 'http.max_content_length', '100mb'); const elasticSearchMaxContentBytes = numeral().unformat(elasticSearchMaxContent.toUpperCase()); - const kibanaMaxContentBytes: number = config.get(KIBANA_MAX_SIZE_BYTES_PATH); + const kibanaMaxContentBytes = config.get('csv', 'maxSizeBytes'); if (kibanaMaxContentBytes > elasticSearchMaxContentBytes) { // TODO this should simply throw an error and let the handler conver it to a warning mesasge. See validateServerHost. logger.warning( - `${KIBANA_MAX_SIZE_BYTES_PATH} (${kibanaMaxContentBytes}) is higher than ElasticSearch's ${ES_MAX_SIZE_BYTES_PATH} (${elasticSearchMaxContentBytes}). ` + - `Please set ${ES_MAX_SIZE_BYTES_PATH} in ElasticSearch to match, or lower your ${KIBANA_MAX_SIZE_BYTES_PATH} in Kibana to avoid this warning.` + `xpack.reporting.${KIBANA_MAX_SIZE_BYTES_PATH} (${kibanaMaxContentBytes}) is higher than ElasticSearch's ${ES_MAX_SIZE_BYTES_PATH} (${elasticSearchMaxContentBytes}). ` + + `Please set ${ES_MAX_SIZE_BYTES_PATH} in ElasticSearch to match, or lower your xpack.reporting.${KIBANA_MAX_SIZE_BYTES_PATH} in Kibana to avoid this warning.` ); } } diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_server_host.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_server_host.ts deleted file mode 100644 index f4f4d61246b6a..0000000000000 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_server_host.ts +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { ServerFacade } from '../../../types'; - -const configKey = 'xpack.reporting.kibanaServer.hostname'; - -export function validateServerHost(serverFacade: ServerFacade) { - const config = serverFacade.config(); - - const serverHost = config.get('server.host'); - const reportingKibanaHostName = config.get(configKey); - - if (!reportingKibanaHostName && serverHost === '0') { - // @ts-ignore: No set() method on KibanaConfig, just get() and has() - config.set(configKey, '0.0.0.0'); // update config in memory to allow Reporting to work - - throw new Error( - `Found 'server.host: "0"' in settings. This is incompatible with Reporting. ` + - `To enable Reporting to work, '${configKey}: 0.0.0.0' is being automatically to the configuration. ` + - `You can change to 'server.host: 0.0.0.0' or add '${configKey}: 0.0.0.0' in kibana.yml to prevent this message.` - ); - } -} diff --git a/x-pack/legacy/plugins/reporting/server/plugin.ts b/x-pack/legacy/plugins/reporting/server/plugin.ts index 4f24cc16b2277..c9ed2e81c6792 100644 --- a/x-pack/legacy/plugins/reporting/server/plugin.ts +++ b/x-pack/legacy/plugins/reporting/server/plugin.ts @@ -7,7 +7,7 @@ import { CoreSetup, CoreStart, Plugin, PluginInitializerContext } from 'src/core/server'; import { logConfiguration } from '../log_configuration'; import { createBrowserDriverFactory } from './browsers'; -import { ReportingCore } from './core'; +import { ReportingCore, ReportingConfig } from './core'; import { createQueueFactory, enqueueJobFactory, LevelLogger, runValidations } from './lib'; import { setFieldFormats } from './services'; import { ReportingSetup, ReportingSetupDeps, ReportingStart, ReportingStartDeps } from './types'; @@ -17,38 +17,40 @@ import { mirrorPluginStatus } from '../../../server/lib/mirror_plugin_status'; export class ReportingPlugin implements Plugin { + private config: ReportingConfig; private logger: LevelLogger; private reportingCore: ReportingCore; - constructor(context: PluginInitializerContext) { + constructor(context: PluginInitializerContext, config: ReportingConfig) { + this.config = config; this.logger = new LevelLogger(context.logger.get('reporting')); - this.reportingCore = new ReportingCore(this.logger); + this.reportingCore = new ReportingCore(this.logger, this.config); } public async setup(core: CoreSetup, plugins: ReportingSetupDeps) { - const { elasticsearch, usageCollection, __LEGACY } = plugins; + const { config } = this; + const { elasticsearch, __LEGACY } = plugins; - const browserDriverFactory = await createBrowserDriverFactory(__LEGACY, this.logger); // required for validations :( - runValidations(__LEGACY, elasticsearch, browserDriverFactory, this.logger); // this must run early, as it sets up config defaults + const browserDriverFactory = await createBrowserDriverFactory(config, this.logger); // required for validations :( + runValidations(config, elasticsearch, browserDriverFactory, this.logger); const { xpack_main: xpackMainLegacy, reporting: reportingLegacy } = __LEGACY.plugins; this.reportingCore.legacySetup(xpackMainLegacy, reportingLegacy, __LEGACY, plugins); // Register a function with server to manage the collection of usage stats - registerReportingUsageCollector(this.reportingCore, __LEGACY, usageCollection); + registerReportingUsageCollector(this.reportingCore, plugins); // regsister setup internals - this.reportingCore.pluginSetup({ browserDriverFactory }); + this.reportingCore.pluginSetup({ browserDriverFactory, elasticsearch }); return {}; } public async start(core: CoreStart, plugins: ReportingStartDeps) { const { reportingCore, logger } = this; - const { elasticsearch, __LEGACY } = plugins; - const esqueue = await createQueueFactory(reportingCore, __LEGACY, elasticsearch, logger); - const enqueueJob = enqueueJobFactory(reportingCore, __LEGACY, elasticsearch, logger); + const esqueue = await createQueueFactory(reportingCore, logger); + const enqueueJob = enqueueJobFactory(reportingCore, logger); this.reportingCore.pluginStart({ savedObjects: core.savedObjects, @@ -58,7 +60,8 @@ export class ReportingPlugin }); setFieldFormats(plugins.data.fieldFormats); - logConfiguration(__LEGACY, this.logger); + + logConfiguration(this.config.get('capture'), this.logger); return {}; } diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts index 56622617586f7..6b4f5dbd9203a 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts @@ -10,7 +10,7 @@ import { Legacy } from 'kibana'; import rison from 'rison-node'; import { API_BASE_URL } from '../../common/constants'; import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; -import { ReportingSetupDeps } from '../types'; +import { ReportingCore, ReportingSetupDeps } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { GetRouteConfigFactoryFn, @@ -22,15 +22,17 @@ import { HandlerErrorFunction, HandlerFunction } from './types'; const BASE_GENERATE = `${API_BASE_URL}/generate`; export function registerGenerateFromJobParams( + reporting: ReportingCore, server: ServerFacade, plugins: ReportingSetupDeps, handler: HandlerFunction, handleError: HandlerErrorFunction, logger: Logger ) { + const config = reporting.getConfig(); const getRouteConfig = () => { const getOriginalRouteConfig: GetRouteConfigFactoryFn = getRouteConfigFactoryReportingPre( - server, + config, plugins, logger ); diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts index 415b6b7d64366..830953d532243 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts @@ -9,7 +9,7 @@ import { get } from 'lodash'; import { API_BASE_GENERATE_V1, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../common/constants'; import { getJobParamsFromRequest } from '../../export_types/csv_from_savedobject/server/lib/get_job_params_from_request'; import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; -import { ReportingSetupDeps } from '../types'; +import { ReportingCore, ReportingSetupDeps } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { getRouteOptionsCsv } from './lib/route_config_factories'; import { HandlerErrorFunction, HandlerFunction, QueuedJobPayload } from './types'; @@ -24,13 +24,15 @@ import { HandlerErrorFunction, HandlerFunction, QueuedJobPayload } from './types * - local (transient) changes the user made to the saved object */ export function registerGenerateCsvFromSavedObject( + reporting: ReportingCore, server: ServerFacade, plugins: ReportingSetupDeps, handleRoute: HandlerFunction, handleRouteError: HandlerErrorFunction, logger: Logger ) { - const routeOptions = getRouteOptionsCsv(server, plugins, logger); + const config = reporting.getConfig(); + const routeOptions = getRouteOptionsCsv(config, plugins, logger); server.route({ path: `${API_BASE_GENERATE_V1}/csv/saved-object/{savedObjectType}:{savedObjectId}`, diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts index 5d17fa2e82b8c..519e49f56c377 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts @@ -16,7 +16,7 @@ import { ResponseFacade, ServerFacade, } from '../../types'; -import { ReportingSetupDeps, ReportingCore } from '../types'; +import { ReportingCore, ReportingSetupDeps } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { getRouteOptionsCsv } from './lib/route_config_factories'; @@ -35,8 +35,8 @@ export function registerGenerateCsvFromSavedObjectImmediate( plugins: ReportingSetupDeps, parentLogger: Logger ) { - const routeOptions = getRouteOptionsCsv(server, plugins, parentLogger); - const { elasticsearch } = plugins; + const config = reporting.getConfig(); + const routeOptions = getRouteOptionsCsv(config, plugins, parentLogger); /* * CSV export with the `immediate` option does not queue a job with Reporting's ESQueue to run the job async. Instead, this does: @@ -51,15 +51,8 @@ export function registerGenerateCsvFromSavedObjectImmediate( const request = makeRequestFacade(legacyRequest); const logger = parentLogger.clone(['savedobject-csv']); const jobParams = getJobParamsFromRequest(request, { isImmediate: true }); - - /* TODO these functions should be made available in the export types registry: - * - * const { createJobFn, executeJobFn } = exportTypesRegistry.getById(CSV_FROM_SAVEDOBJECT_JOB_TYPE) - * - * Calling an execute job factory requires passing a browserDriverFactory option, so we should not call the factory from here - */ - const createJobFn = createJobFactory(reporting, server, elasticsearch, logger); - const executeJobFn = await executeJobFactory(reporting, server, elasticsearch, logger); + const createJobFn = createJobFactory(reporting, logger); + const executeJobFn = await executeJobFactory(reporting, logger); // FIXME: does not "need" to be async const jobDocPayload: JobDocPayloadPanelCsv = await createJobFn( jobParams, request.headers, diff --git a/x-pack/legacy/plugins/reporting/server/routes/generation.test.ts b/x-pack/legacy/plugins/reporting/server/routes/generation.test.ts index 54d9671692c5d..8e54feac3c8a6 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generation.test.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generation.test.ts @@ -7,7 +7,7 @@ import Hapi from 'hapi'; import { createMockReportingCore } from '../../test_helpers'; import { Logger, ServerFacade } from '../../types'; -import { ReportingCore, ReportingSetupDeps } from '../../server/types'; +import { ReportingConfig, ReportingCore, ReportingSetupDeps } from '../types'; jest.mock('./lib/authorized_user_pre_routing', () => ({ authorizedUserPreRoutingFactory: () => () => ({}), @@ -22,6 +22,8 @@ import { registerJobGenerationRoutes } from './generation'; let mockServer: Hapi.Server; let mockReportingPlugin: ReportingCore; +let mockReportingConfig: ReportingConfig; + const mockLogger = ({ error: jest.fn(), debug: jest.fn(), @@ -33,8 +35,9 @@ beforeEach(async () => { port: 8080, routes: { log: { collect: true } }, }); - mockServer.config = () => ({ get: jest.fn(), has: jest.fn() }); - mockReportingPlugin = await createMockReportingCore(); + + mockReportingConfig = { get: jest.fn(), kbnConfig: { get: jest.fn() } }; + mockReportingPlugin = await createMockReportingCore(mockReportingConfig); mockReportingPlugin.getEnqueueJob = async () => jest.fn().mockImplementation(() => ({ toJSON: () => '{ "job": "data" }' })); }); diff --git a/x-pack/legacy/plugins/reporting/server/routes/generation.ts b/x-pack/legacy/plugins/reporting/server/routes/generation.ts index 096ba84b63d1a..1c6129313db4b 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generation.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generation.ts @@ -9,7 +9,7 @@ import { errors as elasticsearchErrors } from 'elasticsearch'; import { Legacy } from 'kibana'; import { API_BASE_URL } from '../../common/constants'; import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; -import { ReportingSetupDeps, ReportingCore } from '../types'; +import { ReportingCore, ReportingSetupDeps } from '../types'; import { registerGenerateFromJobParams } from './generate_from_jobparams'; import { registerGenerateCsvFromSavedObject } from './generate_from_savedobject'; import { registerGenerateCsvFromSavedObjectImmediate } from './generate_from_savedobject_immediate'; @@ -23,8 +23,9 @@ export function registerJobGenerationRoutes( plugins: ReportingSetupDeps, logger: Logger ) { - const config = server.config(); - const DOWNLOAD_BASE_URL = config.get('server.basePath') + `${API_BASE_URL}/jobs/download`; + const config = reporting.getConfig(); + const downloadBaseUrl = + config.kbnConfig.get('server', 'basePath') + `${API_BASE_URL}/jobs/download`; /* * Generates enqueued job details to use in responses @@ -47,7 +48,7 @@ export function registerJobGenerationRoutes( return h .response({ - path: `${DOWNLOAD_BASE_URL}/${jobJson.id}`, + path: `${downloadBaseUrl}/${jobJson.id}`, job: jobJson, }) .type('application/json'); @@ -66,11 +67,11 @@ export function registerJobGenerationRoutes( return err; } - registerGenerateFromJobParams(server, plugins, handler, handleError, logger); + registerGenerateFromJobParams(reporting, server, plugins, handler, handleError, logger); // Register beta panel-action download-related API's - if (config.get('xpack.reporting.csv.enablePanelActionDownload')) { - registerGenerateCsvFromSavedObject(server, plugins, handler, handleError, logger); + if (config.get('csv', 'enablePanelActionDownload')) { + registerGenerateCsvFromSavedObject(reporting, server, plugins, handler, handleError, logger); registerGenerateCsvFromSavedObjectImmediate(reporting, server, plugins, logger); } } diff --git a/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js b/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js index 071b401d2321b..9f0de844df369 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js +++ b/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js @@ -5,7 +5,6 @@ */ import Hapi from 'hapi'; -import { memoize } from 'lodash'; import { createMockReportingCore } from '../../test_helpers'; import { ExportTypesRegistry } from '../lib/export_types_registry'; @@ -23,6 +22,7 @@ import { registerJobInfoRoutes } from './jobs'; let mockServer; let exportTypesRegistry; let mockReportingPlugin; +let mockReportingConfig; const mockLogger = { error: jest.fn(), debug: jest.fn(), @@ -30,7 +30,6 @@ const mockLogger = { beforeEach(async () => { mockServer = new Hapi.Server({ debug: false, port: 8080, routes: { log: { collect: true } } }); - mockServer.config = memoize(() => ({ get: jest.fn() })); exportTypesRegistry = new ExportTypesRegistry(); exportTypesRegistry.register({ id: 'unencoded', @@ -43,7 +42,9 @@ beforeEach(async () => { jobContentEncoding: 'base64', jobContentExtension: 'pdf', }); - mockReportingPlugin = await createMockReportingCore(); + + mockReportingConfig = { get: jest.fn(), kbnConfig: { get: jest.fn() } }; + mockReportingPlugin = await createMockReportingCore(mockReportingConfig); mockReportingPlugin.getExportTypesRegistry = () => exportTypesRegistry; }); diff --git a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts index b9aa75e0ddd00..f6f98b2377db6 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts @@ -17,7 +17,7 @@ import { ServerFacade, } from '../../types'; import { jobsQueryFactory } from '../lib/jobs_query'; -import { ReportingSetupDeps, ReportingCore } from '../types'; +import { ReportingCore, ReportingSetupDeps } from '../types'; import { deleteJobResponseHandlerFactory, downloadJobResponseHandlerFactory, @@ -41,9 +41,10 @@ export function registerJobInfoRoutes( plugins: ReportingSetupDeps, logger: Logger ) { + const config = reporting.getConfig(); const { elasticsearch } = plugins; - const jobsQuery = jobsQueryFactory(server, elasticsearch); - const getRouteConfig = getRouteConfigFactoryManagementPre(server, plugins, logger); + const jobsQuery = jobsQueryFactory(config, elasticsearch); + const getRouteConfig = getRouteConfigFactoryManagementPre(config, plugins, logger); // list jobs in the queue, paginated server.route({ @@ -141,8 +142,8 @@ export function registerJobInfoRoutes( // trigger a download of the output from a job const exportTypesRegistry = reporting.getExportTypesRegistry(); - const getRouteConfigDownload = getRouteConfigFactoryDownloadPre(server, plugins, logger); - const downloadResponseHandler = downloadJobResponseHandlerFactory(server, elasticsearch, exportTypesRegistry); // prettier-ignore + const getRouteConfigDownload = getRouteConfigFactoryDownloadPre(config, plugins, logger); + const downloadResponseHandler = downloadJobResponseHandlerFactory(config, elasticsearch, exportTypesRegistry); // prettier-ignore server.route({ path: `${MAIN_ENTRY}/download/{docId}`, method: 'GET', @@ -181,8 +182,8 @@ export function registerJobInfoRoutes( }); // allow a report to be deleted - const getRouteConfigDelete = getRouteConfigFactoryDeletePre(server, plugins, logger); - const deleteResponseHandler = deleteJobResponseHandlerFactory(server, elasticsearch); + const getRouteConfigDelete = getRouteConfigFactoryDeletePre(config, plugins, logger); + const deleteResponseHandler = deleteJobResponseHandlerFactory(config, elasticsearch); server.route({ path: `${MAIN_ENTRY}/delete/{docId}`, method: 'DELETE', diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js index 3460d22592e3d..b5d6ae59ce5dd 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js @@ -7,56 +7,48 @@ import { authorizedUserPreRoutingFactory } from './authorized_user_pre_routing'; describe('authorized_user_pre_routing', function() { - // the getClientShield is using `once` which forces us to use a constant mock - // which makes testing anything that is dependent on `oncePerServer` confusing. - // so createMockServer reuses the same 'instance' of the server and overwrites - // the properties to contain different values - const createMockServer = (function() { - const getUserStub = jest.fn(); - let mockConfig; - - const mockServer = { - expose() {}, - config() { - return { - get(key) { - return mockConfig[key]; - }, - }; - }, - log: function() {}, - plugins: { - xpack_main: {}, - security: { getUser: getUserStub }, - }, + const createMockConfig = (mockConfig = {}) => { + return { + get: (...keys) => mockConfig[keys.join('.')], + kbnConfig: { get: (...keys) => mockConfig[keys.join('.')] }, }; + }; + const createMockPlugins = (function() { + const getUserStub = jest.fn(); return function({ securityEnabled = true, xpackInfoUndefined = false, xpackInfoAvailable = true, + getCurrentUser = undefined, user = undefined, - config = {}, }) { - mockConfig = config; - - mockServer.plugins.xpack_main = { - info: !xpackInfoUndefined && { - isAvailable: () => xpackInfoAvailable, - feature(featureName) { - if (featureName === 'security') { - return { - isEnabled: () => securityEnabled, - isAvailable: () => xpackInfoAvailable, - }; + getUserStub.mockReset(); + getUserStub.mockResolvedValue(user); + return { + security: securityEnabled + ? { + authc: { getCurrentUser }, } + : null, + __LEGACY: { + plugins: { + xpack_main: { + info: !xpackInfoUndefined && { + isAvailable: () => xpackInfoAvailable, + feature(featureName) { + if (featureName === 'security') { + return { + isEnabled: () => securityEnabled, + isAvailable: () => xpackInfoAvailable, + }; + } + }, + }, + }, }, }, }; - - getUserStub.mockReset(); - getUserStub.mockResolvedValue(user); - return mockServer; }; })(); @@ -75,10 +67,6 @@ describe('authorized_user_pre_routing', function() { raw: { req: mockRequestRaw }, }); - const getMockPlugins = pluginSet => { - return pluginSet || { security: null }; - }; - const getMockLogger = () => ({ warn: jest.fn(), error: msg => { @@ -87,11 +75,9 @@ describe('authorized_user_pre_routing', function() { }); it('should return with boom notFound when xpackInfo is undefined', async function() { - const mockServer = createMockServer({ xpackInfoUndefined: true }); - const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockServer, - getMockPlugins(), + createMockConfig(), + createMockPlugins({ xpackInfoUndefined: true }), getMockLogger() ); const response = await authorizedUserPreRouting(getMockRequest()); @@ -100,11 +86,9 @@ describe('authorized_user_pre_routing', function() { }); it(`should return with boom notFound when xpackInfo isn't available`, async function() { - const mockServer = createMockServer({ xpackInfoAvailable: false }); - const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockServer, - getMockPlugins(), + createMockConfig(), + createMockPlugins({ xpackInfoAvailable: false }), getMockLogger() ); const response = await authorizedUserPreRouting(getMockRequest()); @@ -113,11 +97,9 @@ describe('authorized_user_pre_routing', function() { }); it('should return with null user when security is disabled in Elasticsearch', async function() { - const mockServer = createMockServer({ securityEnabled: false }); - const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockServer, - getMockPlugins(), + createMockConfig(), + createMockPlugins({ securityEnabled: false }), getMockLogger() ); const response = await authorizedUserPreRouting(getMockRequest()); @@ -125,16 +107,14 @@ describe('authorized_user_pre_routing', function() { }); it('should return with boom unauthenticated when security is enabled but no authenticated user', async function() { - const mockServer = createMockServer({ + const mockPlugins = createMockPlugins({ user: null, config: { 'xpack.reporting.roles.allow': ['.reporting_user'] }, }); - const mockPlugins = getMockPlugins({ - security: { authc: { getCurrentUser: () => null } }, - }); + mockPlugins.security = { authc: { getCurrentUser: () => null } }; const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockServer, + createMockConfig(), mockPlugins, getMockLogger() ); @@ -144,16 +124,14 @@ describe('authorized_user_pre_routing', function() { }); it(`should return with boom forbidden when security is enabled but user doesn't have allowed role`, async function() { - const mockServer = createMockServer({ + const mockConfig = createMockConfig({ 'roles.allow': ['.reporting_user'] }); + const mockPlugins = createMockPlugins({ user: { roles: [] }, - config: { 'xpack.reporting.roles.allow': ['.reporting_user'] }, - }); - const mockPlugins = getMockPlugins({ - security: { authc: { getCurrentUser: () => ({ roles: ['something_else'] }) } }, + getCurrentUser: () => ({ roles: ['something_else'] }), }); const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockServer, + mockConfig, mockPlugins, getMockLogger() ); @@ -164,18 +142,14 @@ describe('authorized_user_pre_routing', function() { it('should return with user when security is enabled and user has explicitly allowed role', async function() { const user = { roles: ['.reporting_user', 'something_else'] }; - const mockServer = createMockServer({ + const mockConfig = createMockConfig({ 'roles.allow': ['.reporting_user'] }); + const mockPlugins = createMockPlugins({ user, - config: { 'xpack.reporting.roles.allow': ['.reporting_user'] }, - }); - const mockPlugins = getMockPlugins({ - security: { - authc: { getCurrentUser: () => ({ roles: ['.reporting_user', 'something_else'] }) }, - }, + getCurrentUser: () => ({ roles: ['.reporting_user', 'something_else'] }), }); const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockServer, + mockConfig, mockPlugins, getMockLogger() ); @@ -185,16 +159,13 @@ describe('authorized_user_pre_routing', function() { it('should return with user when security is enabled and user has superuser role', async function() { const user = { roles: ['superuser', 'something_else'] }; - const mockServer = createMockServer({ - user, - config: { 'xpack.reporting.roles.allow': [] }, - }); - const mockPlugins = getMockPlugins({ - security: { authc: { getCurrentUser: () => ({ roles: ['superuser', 'something_else'] }) } }, + const mockConfig = createMockConfig({ 'roles.allow': [] }); + const mockPlugins = createMockPlugins({ + getCurrentUser: () => ({ roles: ['superuser', 'something_else'] }), }); const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockServer, + mockConfig, mockPlugins, getMockLogger() ); diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts index c5f8c78016f61..1ca28ca62a7f2 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts @@ -7,7 +7,8 @@ import Boom from 'boom'; import { Legacy } from 'kibana'; import { AuthenticatedUser } from '../../../../../../plugins/security/server'; -import { Logger, ServerFacade } from '../../../types'; +import { ReportingConfig } from '../../../server'; +import { Logger } from '../../../types'; import { getUserFactory } from '../../lib/get_user'; import { ReportingSetupDeps } from '../../types'; @@ -18,16 +19,14 @@ export type PreRoutingFunction = ( ) => Promise | AuthenticatedUser | null>; export const authorizedUserPreRoutingFactory = function authorizedUserPreRoutingFn( - server: ServerFacade, + config: ReportingConfig, plugins: ReportingSetupDeps, logger: Logger ) { - const getUser = getUserFactory(server, plugins.security); - const config = server.config(); + const getUser = getUserFactory(plugins.security, logger); + const { info: xpackInfo } = plugins.__LEGACY.plugins.xpack_main; return async function authorizedUserPreRouting(request: Legacy.Request) { - const xpackInfo = server.plugins.xpack_main.info; - if (!xpackInfo || !xpackInfo.isAvailable()) { logger.warn('Unable to authorize user before xpack info is available.', [ 'authorizedUserPreRouting', @@ -46,10 +45,7 @@ export const authorizedUserPreRoutingFactory = function authorizedUserPreRouting return Boom.unauthorized(`Sorry, you aren't authenticated`); } - const authorizedRoles = [ - superuserRole, - ...(config.get('xpack.reporting.roles.allow') as string[]), - ]; + const authorizedRoles = [superuserRole, ...(config.get('roles', 'allow') as string[])]; if (!user.roles.find(role => authorizedRoles.includes(role))) { return Boom.forbidden(`Sorry, you don't have access to Reporting`); } diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts index fb3944ea33552..aef37754681ec 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts @@ -8,13 +8,7 @@ import contentDisposition from 'content-disposition'; import * as _ from 'lodash'; import { CSV_JOB_TYPE } from '../../../common/constants'; -import { - ExportTypeDefinition, - ExportTypesRegistry, - JobDocOutput, - JobSource, - ServerFacade, -} from '../../../types'; +import { ExportTypeDefinition, ExportTypesRegistry, JobDocOutput, JobSource } from '../../../types'; interface ICustomHeaders { [x: string]: any; @@ -22,9 +16,15 @@ interface ICustomHeaders { type ExportTypeType = ExportTypeDefinition; +interface ErrorFromPayload { + message: string; + reason: string | null; +} + +// A camelCase version of JobDocOutput interface Payload { statusCode: number; - content: any; + content: string | Buffer | ErrorFromPayload; contentType: string; headers: Record; } @@ -48,20 +48,17 @@ const getReportingHeaders = (output: JobDocOutput, exportType: ExportTypeType) = return metaDataHeaders; }; -export function getDocumentPayloadFactory( - server: ServerFacade, - exportTypesRegistry: ExportTypesRegistry -) { - function encodeContent(content: string | null, exportType: ExportTypeType) { +export function getDocumentPayloadFactory(exportTypesRegistry: ExportTypesRegistry) { + function encodeContent(content: string | null, exportType: ExportTypeType): Buffer | string { switch (exportType.jobContentEncoding) { case 'base64': - return content ? Buffer.from(content, 'base64') : content; // Buffer.from rejects null + return content ? Buffer.from(content, 'base64') : ''; // convert null to empty string default: - return content; + return content ? content : ''; // convert null to empty string } } - function getCompleted(output: JobDocOutput, jobType: string, title: string) { + function getCompleted(output: JobDocOutput, jobType: string, title: string): Payload { const exportType = exportTypesRegistry.get((item: ExportTypeType) => item.jobType === jobType); const filename = getTitle(exportType, title); const headers = getReportingHeaders(output, exportType); @@ -77,7 +74,7 @@ export function getDocumentPayloadFactory( }; } - function getFailure(output: JobDocOutput) { + function getFailure(output: JobDocOutput): Payload { return { statusCode: 500, content: { diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts index 30627d5b23230..e7e7c866db96a 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts @@ -5,11 +5,12 @@ */ import Boom from 'boom'; -import { ElasticsearchServiceSetup } from 'kibana/server'; import { ResponseToolkit } from 'hapi'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { WHITELISTED_JOB_CONTENT_TYPES } from '../../../common/constants'; -import { ExportTypesRegistry, ServerFacade } from '../../../types'; +import { ExportTypesRegistry } from '../../../types'; import { jobsQueryFactory } from '../../lib/jobs_query'; +import { ReportingConfig } from '../../types'; import { getDocumentPayloadFactory } from './get_document_payload'; interface JobResponseHandlerParams { @@ -21,12 +22,12 @@ interface JobResponseHandlerOpts { } export function downloadJobResponseHandlerFactory( - server: ServerFacade, + config: ReportingConfig, elasticsearch: ElasticsearchServiceSetup, exportTypesRegistry: ExportTypesRegistry ) { - const jobsQuery = jobsQueryFactory(server, elasticsearch); - const getDocumentPayload = getDocumentPayloadFactory(server, exportTypesRegistry); + const jobsQuery = jobsQueryFactory(config, elasticsearch); + const getDocumentPayload = getDocumentPayloadFactory(exportTypesRegistry); return function jobResponseHandler( validJobTypes: string[], @@ -70,10 +71,10 @@ export function downloadJobResponseHandlerFactory( } export function deleteJobResponseHandlerFactory( - server: ServerFacade, + config: ReportingConfig, elasticsearch: ElasticsearchServiceSetup ) { - const jobsQuery = jobsQueryFactory(server, elasticsearch); + const jobsQuery = jobsQueryFactory(config, elasticsearch); return async function deleteJobResponseHander( validJobTypes: string[], diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts index 9e618ff1fe40a..8a79566aafae2 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts @@ -6,17 +6,17 @@ import Boom from 'boom'; import { Legacy } from 'kibana'; -import { Logger, ServerFacade } from '../../../types'; -import { ReportingSetupDeps } from '../../types'; +import { Logger } from '../../../types'; +import { ReportingConfig, ReportingSetupDeps } from '../../types'; export type GetReportingFeatureIdFn = (request: Legacy.Request) => string; export const reportingFeaturePreRoutingFactory = function reportingFeaturePreRoutingFn( - server: ServerFacade, + config: ReportingConfig, plugins: ReportingSetupDeps, logger: Logger ) { - const xpackMainPlugin = server.plugins.xpack_main; + const xpackMainPlugin = plugins.__LEGACY.plugins.xpack_main; const pluginId = 'reporting'; // License checking and enable/disable logic diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts index 3d275d34e2f7d..06f7efaa9dcbb 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts @@ -6,8 +6,8 @@ import Joi from 'joi'; import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants'; -import { Logger, ServerFacade } from '../../../types'; -import { ReportingSetupDeps } from '../../types'; +import { Logger } from '../../../types'; +import { ReportingConfig, ReportingSetupDeps } from '../../types'; import { authorizedUserPreRoutingFactory } from './authorized_user_pre_routing'; import { GetReportingFeatureIdFn, @@ -29,12 +29,12 @@ export type GetRouteConfigFactoryFn = ( ) => RouteConfigFactory; export function getRouteConfigFactoryReportingPre( - server: ServerFacade, + config: ReportingConfig, plugins: ReportingSetupDeps, logger: Logger ): GetRouteConfigFactoryFn { - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(server, plugins, logger); - const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(server, plugins, logger); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(config, plugins, logger); + const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(config, plugins, logger); return (getFeatureId?: GetReportingFeatureIdFn): RouteConfigFactory => { const preRouting: any[] = [{ method: authorizedUserPreRouting, assign: 'user' }]; @@ -50,11 +50,11 @@ export function getRouteConfigFactoryReportingPre( } export function getRouteOptionsCsv( - server: ServerFacade, + config: ReportingConfig, plugins: ReportingSetupDeps, logger: Logger ) { - const getRouteConfig = getRouteConfigFactoryReportingPre(server, plugins, logger); + const getRouteConfig = getRouteConfigFactoryReportingPre(config, plugins, logger); return { ...getRouteConfig(() => CSV_FROM_SAVEDOBJECT_JOB_TYPE), validate: { @@ -75,12 +75,12 @@ export function getRouteOptionsCsv( } export function getRouteConfigFactoryManagementPre( - server: ServerFacade, + config: ReportingConfig, plugins: ReportingSetupDeps, logger: Logger ): GetRouteConfigFactoryFn { - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(server, plugins, logger); - const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(server, plugins, logger); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(config, plugins, logger); + const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(config, plugins, logger); const managementPreRouting = reportingFeaturePreRouting(() => 'management'); return (): RouteConfigFactory => { @@ -99,11 +99,11 @@ export function getRouteConfigFactoryManagementPre( // Additionally, the range-request doesn't alleviate any performance issues on the server as the entire // download is loaded into memory. export function getRouteConfigFactoryDownloadPre( - server: ServerFacade, + config: ReportingConfig, plugins: ReportingSetupDeps, logger: Logger ): GetRouteConfigFactoryFn { - const getManagementRouteConfig = getRouteConfigFactoryManagementPre(server, plugins, logger); + const getManagementRouteConfig = getRouteConfigFactoryManagementPre(config, plugins, logger); return (): RouteConfigFactory => ({ ...getManagementRouteConfig(), tags: [API_TAG, 'download'], @@ -114,11 +114,11 @@ export function getRouteConfigFactoryDownloadPre( } export function getRouteConfigFactoryDeletePre( - server: ServerFacade, + config: ReportingConfig, plugins: ReportingSetupDeps, logger: Logger ): GetRouteConfigFactoryFn { - const getManagementRouteConfig = getRouteConfigFactoryManagementPre(server, plugins, logger); + const getManagementRouteConfig = getRouteConfigFactoryManagementPre(config, plugins, logger); return (): RouteConfigFactory => ({ ...getManagementRouteConfig(), tags: [API_TAG, 'delete'], diff --git a/x-pack/legacy/plugins/reporting/server/types.d.ts b/x-pack/legacy/plugins/reporting/server/types.d.ts index 59b7bc2020ad9..bec00688432cc 100644 --- a/x-pack/legacy/plugins/reporting/server/types.d.ts +++ b/x-pack/legacy/plugins/reporting/server/types.d.ts @@ -11,16 +11,16 @@ import { PluginStart as DataPluginStart } from '../../../../../src/plugins/data/ import { SecurityPluginSetup } from '../../../../plugins/security/server'; import { XPackMainPlugin } from '../../xpack_main/server/xpack_main'; import { ReportingPluginSpecOptions } from '../types'; +import { ReportingConfig, ReportingConfigType } from './core'; export interface ReportingSetupDeps { elasticsearch: ElasticsearchServiceSetup; security: SecurityPluginSetup; - usageCollection: UsageCollectionSetup; + usageCollection?: UsageCollectionSetup; __LEGACY: LegacySetup; } export interface ReportingStartDeps { - elasticsearch: ElasticsearchServiceSetup; data: DataPluginStart; __LEGACY: LegacySetup; } @@ -31,9 +31,7 @@ export type ReportingStart = object; export interface LegacySetup { config: Legacy.Server['config']; - info: Legacy.Server['info']; plugins: { - elasticsearch: Legacy.Server['plugins']['elasticsearch']; xpack_main: XPackMainPlugin & { status?: any; }; @@ -42,4 +40,7 @@ export interface LegacySetup { route: Legacy.Server['route']; } -export { ReportingCore } from './core'; +export { ReportingConfig, ReportingConfigType, ReportingCore } from './core'; + +export type CaptureConfig = ReportingConfigType['capture']; +export type ScrollConfig = ReportingConfigType['csv']['scroll']; diff --git a/x-pack/legacy/plugins/reporting/server/usage/get_reporting_usage.ts b/x-pack/legacy/plugins/reporting/server/usage/get_reporting_usage.ts index bd2d0cb835a79..e9523d9e70202 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/get_reporting_usage.ts +++ b/x-pack/legacy/plugins/reporting/server/usage/get_reporting_usage.ts @@ -5,7 +5,11 @@ */ import { get } from 'lodash'; -import { ServerFacade, ExportTypesRegistry, ESCallCluster } from '../../types'; +import { XPackMainPlugin } from '../../../xpack_main/server/xpack_main'; +import { ESCallCluster, ExportTypesRegistry } from '../../types'; +import { ReportingConfig } from '../types'; +import { decorateRangeStats } from './decorate_range_stats'; +import { getExportTypesHandler } from './get_export_type_handler'; import { AggregationBuckets, AggregationResults, @@ -15,8 +19,8 @@ import { RangeAggregationResults, RangeStats, } from './types'; -import { decorateRangeStats } from './decorate_range_stats'; -import { getExportTypesHandler } from './get_export_type_handler'; + +type XPackInfo = XPackMainPlugin['info']; const JOB_TYPES_KEY = 'jobTypes'; const JOB_TYPES_FIELD = 'jobtype'; @@ -79,10 +83,7 @@ type RangeStatSets = Partial< last7Days: RangeStats; } >; -async function handleResponse( - server: ServerFacade, - response: AggregationResults -): Promise { +async function handleResponse(response: AggregationResults): Promise { const buckets = get(response, 'aggregations.ranges.buckets'); if (!buckets) { return {}; @@ -101,12 +102,12 @@ async function handleResponse( } export async function getReportingUsage( - server: ServerFacade, + config: ReportingConfig, + xpackMainInfo: XPackInfo, callCluster: ESCallCluster, exportTypesRegistry: ExportTypesRegistry ) { - const config = server.config(); - const reportingIndex = config.get('xpack.reporting.index'); + const reportingIndex = config.get('index'); const params = { index: `${reportingIndex}-*`, @@ -140,15 +141,16 @@ export async function getReportingUsage( }; return callCluster('search', params) - .then((response: AggregationResults) => handleResponse(server, response)) + .then((response: AggregationResults) => handleResponse(response)) .then((usage: RangeStatSets) => { // Allow this to explicitly throw an exception if/when this config is deprecated, // because we shouldn't collect browserType in that case! - const browserType = config.get('xpack.reporting.capture.browser.type'); + const browserType = config.get('capture', 'browser', 'type'); - const xpackInfo = server.plugins.xpack_main.info; const exportTypesHandler = getExportTypesHandler(exportTypesRegistry); - const availability = exportTypesHandler.getAvailability(xpackInfo) as FeatureAvailabilityMap; + const availability = exportTypesHandler.getAvailability( + xpackMainInfo + ) as FeatureAvailabilityMap; const { lastDay, last7Days, ...all } = usage; diff --git a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js index a6d753f9b107a..929109e66914d 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js +++ b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js @@ -24,62 +24,60 @@ function getMockUsageCollection() { makeUsageCollector: options => { return new MockUsageCollector(this, options); }, + registerCollector: sinon.stub(), }; } -function getServerMock(customization) { - const getLicenseCheckResults = sinon.stub().returns({}); - const defaultServerMock = { - plugins: { - security: { - isAuthenticated: sinon.stub().returns(true), - }, - xpack_main: { - info: { - isAvailable: sinon.stub().returns(true), - feature: () => ({ - getLicenseCheckResults, - }), - license: { - isOneOf: sinon.stub().returns(false), - getType: sinon.stub().returns('platinum'), - }, - toJSON: () => ({ b: 1 }), - }, +function getPluginsMock( + { license, usageCollection = getMockUsageCollection() } = { license: 'platinum' } +) { + const mockXpackMain = { + info: { + isAvailable: sinon.stub().returns(true), + feature: () => ({ + getLicenseCheckResults: sinon.stub(), + }), + license: { + isOneOf: sinon.stub().returns(false), + getType: sinon.stub().returns(license), }, + toJSON: () => ({ b: 1 }), }, - log: () => {}, - config: () => ({ - get: key => { - if (key === 'xpack.reporting.enabled') { - return true; - } else if (key === 'xpack.reporting.index') { - return '.reporting-index'; - } + }; + return { + usageCollection, + __LEGACY: { + plugins: { + xpack_main: mockXpackMain, }, - }), + }, }; - return Object.assign(defaultServerMock, customization); } +const getMockReportingConfig = () => ({ + get: () => {}, + kbnConfig: { get: () => '' }, +}); const getResponseMock = (customization = {}) => customization; describe('license checks', () => { + let mockConfig; + beforeAll(async () => { + mockConfig = getMockReportingConfig(); + }); + describe('with a basic license', () => { let usageStats; beforeAll(async () => { - const serverWithBasicLicenseMock = getServerMock(); - serverWithBasicLicenseMock.plugins.xpack_main.info.license.getType = sinon - .stub() - .returns('basic'); + const plugins = getPluginsMock({ license: 'basic' }); const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); - const usageCollection = getMockUsageCollection(); - const { fetch: getReportingUsage } = getReportingUsageCollector( - serverWithBasicLicenseMock, - usageCollection, + const { fetch } = getReportingUsageCollector( + mockConfig, + plugins.usageCollection, + plugins.__LEGACY.plugins.xpack_main.info, exportTypesRegistry ); - usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); + usageStats = await fetch(callClusterMock, exportTypesRegistry); }); test('sets enables to true', async () => { @@ -98,18 +96,15 @@ describe('license checks', () => { describe('with no license', () => { let usageStats; beforeAll(async () => { - const serverWithNoLicenseMock = getServerMock(); - serverWithNoLicenseMock.plugins.xpack_main.info.license.getType = sinon - .stub() - .returns('none'); + const plugins = getPluginsMock({ license: 'none' }); const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); - const usageCollection = getMockUsageCollection(); - const { fetch: getReportingUsage } = getReportingUsageCollector( - serverWithNoLicenseMock, - usageCollection, + const { fetch } = getReportingUsageCollector( + mockConfig, + plugins.usageCollection, + plugins.__LEGACY.plugins.xpack_main.info, exportTypesRegistry ); - usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); + usageStats = await fetch(callClusterMock, exportTypesRegistry); }); test('sets enables to true', async () => { @@ -128,18 +123,15 @@ describe('license checks', () => { describe('with platinum license', () => { let usageStats; beforeAll(async () => { - const serverWithPlatinumLicenseMock = getServerMock(); - serverWithPlatinumLicenseMock.plugins.xpack_main.info.license.getType = sinon - .stub() - .returns('platinum'); + const plugins = getPluginsMock({ license: 'platinum' }); const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); - const usageCollection = getMockUsageCollection(); - const { fetch: getReportingUsage } = getReportingUsageCollector( - serverWithPlatinumLicenseMock, - usageCollection, + const { fetch } = getReportingUsageCollector( + mockConfig, + plugins.usageCollection, + plugins.__LEGACY.plugins.xpack_main.info, exportTypesRegistry ); - usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); + usageStats = await fetch(callClusterMock, exportTypesRegistry); }); test('sets enables to true', async () => { @@ -158,18 +150,15 @@ describe('license checks', () => { describe('with no usage data', () => { let usageStats; beforeAll(async () => { - const serverWithBasicLicenseMock = getServerMock(); - serverWithBasicLicenseMock.plugins.xpack_main.info.license.getType = sinon - .stub() - .returns('basic'); + const plugins = getPluginsMock({ license: 'basic' }); const callClusterMock = jest.fn(() => Promise.resolve({})); - const usageCollection = getMockUsageCollection(); - const { fetch: getReportingUsage } = getReportingUsageCollector( - serverWithBasicLicenseMock, - usageCollection, + const { fetch } = getReportingUsageCollector( + mockConfig, + plugins.usageCollection, + plugins.__LEGACY.plugins.xpack_main.info, exportTypesRegistry ); - usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); + usageStats = await fetch(callClusterMock, exportTypesRegistry); }); test('sets enables to true', async () => { @@ -183,21 +172,15 @@ describe('license checks', () => { }); describe('data modeling', () => { - let getReportingUsage; - beforeAll(async () => { - const usageCollection = getMockUsageCollection(); - const serverWithPlatinumLicenseMock = getServerMock(); - serverWithPlatinumLicenseMock.plugins.xpack_main.info.license.getType = sinon - .stub() - .returns('platinum'); - ({ fetch: getReportingUsage } = getReportingUsageCollector( - serverWithPlatinumLicenseMock, - usageCollection, - exportTypesRegistry - )); - }); - test('with normal looking usage data', async () => { + const mockConfig = getMockReportingConfig(); + const plugins = getPluginsMock(); + const { fetch } = getReportingUsageCollector( + mockConfig, + plugins.usageCollection, + plugins.__LEGACY.plugins.xpack_main.info, + exportTypesRegistry + ); const callClusterMock = jest.fn(() => Promise.resolve( getResponseMock({ @@ -320,7 +303,7 @@ describe('data modeling', () => { ) ); - const usageStats = await getReportingUsage(callClusterMock); + const usageStats = await fetch(callClusterMock); expect(usageStats).toMatchInlineSnapshot(` Object { "PNG": Object { @@ -415,20 +398,16 @@ describe('data modeling', () => { }); describe('Ready for collection observable', () => { - let mockReporting; - - beforeEach(async () => { - mockReporting = await createMockReportingCore(); - }); - test('converts observable to promise', async () => { - const serverWithBasicLicenseMock = getServerMock(); + const mockConfig = getMockReportingConfig(); + const mockReporting = await createMockReportingCore(mockConfig); + + const usageCollection = getMockUsageCollection(); const makeCollectorSpy = sinon.spy(); - const usageCollection = { - makeUsageCollector: makeCollectorSpy, - registerCollector: sinon.stub(), - }; - registerReportingUsageCollector(mockReporting, serverWithBasicLicenseMock, usageCollection); + usageCollection.makeUsageCollector = makeCollectorSpy; + + const plugins = getPluginsMock({ usageCollection }); + registerReportingUsageCollector(mockReporting, plugins); const [args] = makeCollectorSpy.firstCall.args; expect(args).toMatchInlineSnapshot(` diff --git a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts index 14202530fb6c7..8f9d65c200dad 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts +++ b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts @@ -5,29 +5,32 @@ */ import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; +import { XPackMainPlugin } from '../../../xpack_main/server/xpack_main'; import { KIBANA_REPORTING_TYPE } from '../../common/constants'; -import { ReportingCore } from '../../server'; -import { ESCallCluster, ExportTypesRegistry, ServerFacade } from '../../types'; +import { ReportingConfig, ReportingCore, ReportingSetupDeps } from '../../server/types'; +import { ESCallCluster, ExportTypesRegistry } from '../../types'; import { getReportingUsage } from './get_reporting_usage'; import { RangeStats } from './types'; +type XPackInfo = XPackMainPlugin['info']; + // places the reporting data as kibana stats const METATYPE = 'kibana_stats'; /* - * @param {Object} server * @return {Object} kibana usage stats type collection object */ export function getReportingUsageCollector( - server: ServerFacade, + config: ReportingConfig, usageCollection: UsageCollectionSetup, + xpackMainInfo: XPackInfo, exportTypesRegistry: ExportTypesRegistry, isReady: () => Promise ) { return usageCollection.makeUsageCollector({ type: KIBANA_REPORTING_TYPE, fetch: (callCluster: ESCallCluster) => - getReportingUsage(server, callCluster, exportTypesRegistry), + getReportingUsage(config, xpackMainInfo, callCluster, exportTypesRegistry), isReady, /* @@ -52,17 +55,23 @@ export function getReportingUsageCollector( export function registerReportingUsageCollector( reporting: ReportingCore, - server: ServerFacade, - usageCollection: UsageCollectionSetup + plugins: ReportingSetupDeps ) { + if (!plugins.usageCollection) { + return; + } + const xpackMainInfo = plugins.__LEGACY.plugins.xpack_main.info; + const exportTypesRegistry = reporting.getExportTypesRegistry(); const collectionIsReady = reporting.pluginHasStarted.bind(reporting); + const config = reporting.getConfig(); const collector = getReportingUsageCollector( - server, - usageCollection, + config, + plugins.usageCollection, + xpackMainInfo, exportTypesRegistry, collectionIsReady ); - usageCollection.registerCollector(collector); + plugins.usageCollection.registerCollector(collector); } diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_browserdriverfactory.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_browserdriverfactory.ts index 883276d43e27e..930aa7601b8cb 100644 --- a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_browserdriverfactory.ts +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_browserdriverfactory.ts @@ -10,7 +10,8 @@ import * as contexts from '../export_types/common/lib/screenshots/constants'; import { ElementsPositionAndAttribute } from '../export_types/common/lib/screenshots/types'; import { HeadlessChromiumDriver, HeadlessChromiumDriverFactory } from '../server/browsers'; import { createDriverFactory } from '../server/browsers/chromium'; -import { BrowserConfig, CaptureConfig, Logger } from '../types'; +import { CaptureConfig } from '../server/types'; +import { Logger } from '../types'; interface CreateMockBrowserDriverFactoryOpts { evaluate: jest.Mock, any[]>; @@ -93,24 +94,34 @@ export const createMockBrowserDriverFactory = async ( logger: Logger, opts: Partial ): Promise => { - const browserConfig = { - inspect: true, - userDataDir: '/usr/data/dir', - viewport: { width: 12, height: 12 }, - disableSandbox: false, - proxy: { enabled: false }, - } as BrowserConfig; + const captureConfig = { + timeouts: { openUrl: 30000, waitForElements: 30000, renderComplete: 30000 }, + browser: { + type: 'chromium', + chromium: { + inspect: false, + disableSandbox: false, + userDataDir: '/usr/data/dir', + viewport: { width: 12, height: 12 }, + proxy: { enabled: false, server: undefined, bypass: undefined }, + }, + autoDownload: false, + inspect: true, + userDataDir: '/usr/data/dir', + viewport: { width: 12, height: 12 }, + disableSandbox: false, + proxy: { enabled: false, server: undefined, bypass: undefined }, + maxScreenshotDimension: undefined, + }, + networkPolicy: { enabled: true, rules: [] }, + viewport: { width: 800, height: 600 }, + loadDelay: 2000, + zoom: 1, + maxAttempts: 1, + } as CaptureConfig; const binaryPath = '/usr/local/share/common/secure/'; - const captureConfig = { networkPolicy: {}, timeouts: {} } as CaptureConfig; - - const mockBrowserDriverFactory = await createDriverFactory( - binaryPath, - logger, - browserConfig, - captureConfig - ); - + const mockBrowserDriverFactory = await createDriverFactory(binaryPath, logger, captureConfig); const mockPage = {} as Page; const mockBrowserDriver = new HeadlessChromiumDriver(mockPage, { inspect: true, diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_layoutinstance.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_layoutinstance.ts index 0250e6c0a9afd..be60b56dcc0c1 100644 --- a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_layoutinstance.ts +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_layoutinstance.ts @@ -4,13 +4,13 @@ * you may not use this file except in compliance with the Elastic License. */ -import { createLayout } from '../export_types/common/layouts'; import { LayoutTypes } from '../export_types/common/constants'; +import { createLayout } from '../export_types/common/layouts'; import { LayoutInstance } from '../export_types/common/layouts/layout'; -import { ServerFacade } from '../types'; +import { CaptureConfig } from '../server/types'; -export const createMockLayoutInstance = (__LEGACY: ServerFacade) => { - const mockLayout = createLayout(__LEGACY, { +export const createMockLayoutInstance = (captureConfig: CaptureConfig) => { + const mockLayout = createLayout(captureConfig, { id: LayoutTypes.PRESERVE_LAYOUT, dimensions: { height: 12, width: 12 }, }) as LayoutInstance; diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts index 2cd129d47b3f9..34ff91d1972a0 100644 --- a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts @@ -16,24 +16,26 @@ jest.mock('../log_configuration'); import { EventEmitter } from 'events'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths import { coreMock } from 'src/core/server/mocks'; -import { ReportingPlugin, ReportingCore } from '../server'; +import { ReportingPlugin, ReportingCore, ReportingConfig } from '../server'; import { ReportingSetupDeps, ReportingStartDeps } from '../server/types'; -export const createMockSetupDeps = (setupMock?: any): ReportingSetupDeps => ({ - elasticsearch: setupMock.elasticsearch, - security: setupMock.security, - usageCollection: {} as any, - __LEGACY: { plugins: { xpack_main: { status: new EventEmitter() } } } as any, -}); +const createMockSetupDeps = (setupMock?: any): ReportingSetupDeps => { + return { + elasticsearch: setupMock.elasticsearch, + security: setupMock.security, + usageCollection: {} as any, + __LEGACY: { plugins: { xpack_main: { status: new EventEmitter() } } } as any, + }; +}; export const createMockStartDeps = (startMock?: any): ReportingStartDeps => ({ data: startMock.data, - elasticsearch: startMock.elasticsearch, __LEGACY: {} as any, }); -const createMockReportingPlugin = async (config = {}): Promise => { - const plugin = new ReportingPlugin(coreMock.createPluginInitializerContext(config)); +const createMockReportingPlugin = async (config: ReportingConfig): Promise => { + config = config || {}; + const plugin = new ReportingPlugin(coreMock.createPluginInitializerContext(config), config); const setupMock = coreMock.createSetup(); const coreStartMock = coreMock.createStart(); const startMock = { @@ -47,7 +49,8 @@ const createMockReportingPlugin = async (config = {}): Promise return plugin; }; -export const createMockReportingCore = async (config = {}): Promise => { +export const createMockReportingCore = async (config: ReportingConfig): Promise => { + config = config || {}; const plugin = await createMockReportingPlugin(config); return plugin.getReportingCore(); }; diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts index bb7851ba036a9..531e1dcaf84e0 100644 --- a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts @@ -3,36 +3,10 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { memoize } from 'lodash'; -import { ServerFacade } from '../types'; - -export const createMockServer = ({ settings = {} }: any): ServerFacade => { - const mockServer = { - config: memoize(() => ({ get: jest.fn() })), - info: { - protocol: 'http', - }, - plugins: { - elasticsearch: { - getCluster: memoize(() => { - return { - callWithRequest: jest.fn(), - }; - }), - }, - }, - }; - const defaultSettings: any = { - 'xpack.reporting.encryptionKey': 'testencryptionkey', - 'server.basePath': '/sbp', - 'server.host': 'localhost', - 'server.port': 5601, - 'xpack.reporting.kibanaServer': {}, - }; - mockServer.config().get.mockImplementation((key: any) => { - return key in settings ? settings[key] : defaultSettings[key]; - }); +import { ServerFacade } from '../types'; - return (mockServer as unknown) as ServerFacade; +export const createMockServer = (): ServerFacade => { + const mockServer = {}; + return mockServer as any; }; diff --git a/x-pack/legacy/plugins/reporting/types.d.ts b/x-pack/legacy/plugins/reporting/types.d.ts index 238079ba92a29..09d53278941c9 100644 --- a/x-pack/legacy/plugins/reporting/types.d.ts +++ b/x-pack/legacy/plugins/reporting/types.d.ts @@ -7,14 +7,11 @@ import { EventEmitter } from 'events'; import { ResponseObject } from 'hapi'; import { Legacy } from 'kibana'; -import { ElasticsearchServiceSetup } from 'kibana/server'; import { CallCluster } from '../../../../src/legacy/core_plugins/elasticsearch'; import { CancellationToken } from './common/cancellation_token'; -import { HeadlessChromiumDriverFactory } from './server/browsers/chromium/driver_factory'; -import { BrowserType } from './server/browsers/types'; -import { LevelLogger } from './server/lib/level_logger'; import { ReportingCore } from './server/core'; -import { LegacySetup, ReportingStartDeps, ReportingSetup, ReportingStart } from './server/types'; +import { LevelLogger } from './server/lib/level_logger'; +import { LegacySetup } from './server/types'; export type Job = EventEmitter & { id: string; @@ -25,8 +22,8 @@ export type Job = EventEmitter & { export interface NetworkPolicyRule { allow: boolean; - protocol: string; - host: string; + protocol?: string; + host?: string; } export interface NetworkPolicy { @@ -93,51 +90,6 @@ export type ReportingResponseToolkit = Legacy.ResponseToolkit; export type ESCallCluster = CallCluster; -/* - * Reporting Config - */ - -export interface CaptureConfig { - browser: { - type: BrowserType; - autoDownload: boolean; - chromium: BrowserConfig; - }; - maxAttempts: number; - networkPolicy: NetworkPolicy; - loadDelay: number; - timeouts: { - openUrl: number; - waitForElements: number; - renderComplet: number; - }; -} - -export interface BrowserConfig { - inspect: boolean; - userDataDir: string; - viewport: { width: number; height: number }; - disableSandbox: boolean; - proxy: { - enabled: boolean; - server: string; - bypass?: string[]; - }; -} - -export interface QueueConfig { - indexInterval: string; - pollEnabled: boolean; - pollInterval: number; - pollIntervalErrorMultiplier: number; - timeout: number; -} - -export interface ScrollConfig { - duration: string; - size: number; -} - export interface ElementPosition { boundingClientRect: { // modern browsers support x/y, but older ones don't @@ -274,16 +226,12 @@ export interface ESQueueInstance { export type CreateJobFactory = ( reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, logger: LevelLogger ) => CreateJobFnType; export type ExecuteJobFactory = ( reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, logger: LevelLogger -) => Promise; +) => Promise; // FIXME: does not "need" to be async export interface ExportTypeDefinition< JobParamsType, diff --git a/x-pack/legacy/plugins/siem/cypress/integration/detections.spec.ts b/x-pack/legacy/plugins/siem/cypress/integration/detections.spec.ts index 646132c3f88eb..f38cb2285b480 100644 --- a/x-pack/legacy/plugins/siem/cypress/integration/detections.spec.ts +++ b/x-pack/legacy/plugins/siem/cypress/integration/detections.spec.ts @@ -42,16 +42,15 @@ describe('Detections', () => { cy.get(NUMBER_OF_SIGNALS) .invoke('text') .then(numberOfSignals => { - cy.get(SHOWING_SIGNALS) - .invoke('text') - .should('eql', `Showing ${numberOfSignals} signals`); + cy.get(SHOWING_SIGNALS).should('have.text', `Showing ${numberOfSignals} signals`); const numberOfSignalsToBeClosed = 3; selectNumberOfSignals(numberOfSignalsToBeClosed); - cy.get(SELECTED_SIGNALS) - .invoke('text') - .should('eql', `Selected ${numberOfSignalsToBeClosed} signals`); + cy.get(SELECTED_SIGNALS).should( + 'have.text', + `Selected ${numberOfSignalsToBeClosed} signals` + ); closeSignals(); waitForSignals(); @@ -59,30 +58,33 @@ describe('Detections', () => { waitForSignals(); const expectedNumberOfSignalsAfterClosing = +numberOfSignals - numberOfSignalsToBeClosed; - cy.get(NUMBER_OF_SIGNALS) - .invoke('text') - .should('eq', expectedNumberOfSignalsAfterClosing.toString()); - cy.get(SHOWING_SIGNALS) - .invoke('text') - .should('eql', `Showing ${expectedNumberOfSignalsAfterClosing.toString()} signals`); + cy.get(NUMBER_OF_SIGNALS).should( + 'have.text', + expectedNumberOfSignalsAfterClosing.toString() + ); + + cy.get(SHOWING_SIGNALS).should( + 'have.text', + `Showing ${expectedNumberOfSignalsAfterClosing.toString()} signals` + ); goToClosedSignals(); waitForSignals(); - cy.get(NUMBER_OF_SIGNALS) - .invoke('text') - .should('eql', numberOfSignalsToBeClosed.toString()); - cy.get(SHOWING_SIGNALS) - .invoke('text') - .should('eql', `Showing ${numberOfSignalsToBeClosed.toString()} signals`); + cy.get(NUMBER_OF_SIGNALS).should('have.text', numberOfSignalsToBeClosed.toString()); + cy.get(SHOWING_SIGNALS).should( + 'have.text', + `Showing ${numberOfSignalsToBeClosed.toString()} signals` + ); cy.get(SIGNALS).should('have.length', numberOfSignalsToBeClosed); const numberOfSignalsToBeOpened = 1; selectNumberOfSignals(numberOfSignalsToBeOpened); - cy.get(SELECTED_SIGNALS) - .invoke('text') - .should('eql', `Selected ${numberOfSignalsToBeOpened} signal`); + cy.get(SELECTED_SIGNALS).should( + 'have.text', + `Selected ${numberOfSignalsToBeOpened} signal` + ); openSignals(); waitForSignals(); @@ -93,15 +95,14 @@ describe('Detections', () => { waitForSignals(); const expectedNumberOfClosedSignalsAfterOpened = 2; - cy.get(NUMBER_OF_SIGNALS) - .invoke('text') - .should('eql', expectedNumberOfClosedSignalsAfterOpened.toString()); - cy.get(SHOWING_SIGNALS) - .invoke('text') - .should( - 'eql', - `Showing ${expectedNumberOfClosedSignalsAfterOpened.toString()} signals` - ); + cy.get(NUMBER_OF_SIGNALS).should( + 'have.text', + expectedNumberOfClosedSignalsAfterOpened.toString() + ); + cy.get(SHOWING_SIGNALS).should( + 'have.text', + `Showing ${expectedNumberOfClosedSignalsAfterOpened.toString()} signals` + ); cy.get(SIGNALS).should('have.length', expectedNumberOfClosedSignalsAfterOpened); goToOpenedSignals(); @@ -109,13 +110,15 @@ describe('Detections', () => { const expectedNumberOfOpenedSignals = +numberOfSignals - expectedNumberOfClosedSignalsAfterOpened; - cy.get(SHOWING_SIGNALS) - .invoke('text') - .should('eql', `Showing ${expectedNumberOfOpenedSignals.toString()} signals`); - - cy.get('[data-test-subj="server-side-event-count"]') - .invoke('text') - .should('eql', expectedNumberOfOpenedSignals.toString()); + cy.get(SHOWING_SIGNALS).should( + 'have.text', + `Showing ${expectedNumberOfOpenedSignals.toString()} signals` + ); + + cy.get('[data-test-subj="server-side-event-count"]').should( + 'have.text', + expectedNumberOfOpenedSignals.toString() + ); }); }); diff --git a/x-pack/legacy/plugins/siem/cypress/screens/detections.ts b/x-pack/legacy/plugins/siem/cypress/screens/detections.ts index f388ac1215d01..cb776be8d7b6b 100644 --- a/x-pack/legacy/plugins/siem/cypress/screens/detections.ts +++ b/x-pack/legacy/plugins/siem/cypress/screens/detections.ts @@ -10,7 +10,7 @@ export const LOADING_SIGNALS_PANEL = '[data-test-subj="loading-signals-panel"]'; export const MANAGE_SIGNAL_DETECTION_RULES_BTN = '[data-test-subj="manage-signal-detection-rules"]'; -export const NUMBER_OF_SIGNALS = '[data-test-subj="server-side-event-count"]'; +export const NUMBER_OF_SIGNALS = '[data-test-subj="server-side-event-count"] .euiBadge__text'; export const OPEN_CLOSE_SIGNAL_BTN = '[data-test-subj="update-signal-status-button"]'; diff --git a/x-pack/legacy/plugins/siem/public/components/ml_popover/ml_popover.tsx b/x-pack/legacy/plugins/siem/public/components/ml_popover/ml_popover.tsx index 05dfd561b1f5e..b00eef79ee480 100644 --- a/x-pack/legacy/plugins/siem/public/components/ml_popover/ml_popover.tsx +++ b/x-pack/legacy/plugins/siem/public/components/ml_popover/ml_popover.tsx @@ -7,13 +7,13 @@ import { EuiButtonEmpty, EuiCallOut, EuiPopover, EuiPopoverTitle, EuiSpacer } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; import moment from 'moment'; -import React, { useReducer, useState } from 'react'; +import React, { Dispatch, useCallback, useReducer, useState } from 'react'; import styled from 'styled-components'; import { useKibana } from '../../lib/kibana'; import { METRIC_TYPE, TELEMETRY_EVENT, track } from '../../lib/telemetry'; import { hasMlAdminPermissions } from '../ml/permissions/has_ml_admin_permissions'; -import { errorToToaster, useStateToaster } from '../toasters'; +import { errorToToaster, useStateToaster, ActionToaster } from '../toasters'; import { setupMlJob, startDatafeeds, stopDatafeeds } from './api'; import { filterJobs } from './helpers'; import { useSiemJobs } from './hooks/use_siem_jobs'; @@ -22,7 +22,7 @@ import { JobsTable } from './jobs_table/jobs_table'; import { ShowingCount } from './jobs_table/showing_count'; import { PopoverDescription } from './popover_description'; import * as i18n from './translations'; -import { JobsFilters, JobSummary, SiemJob } from './types'; +import { JobsFilters, SiemJob } from './types'; import { UpgradeContents } from './upgrade_contents'; import { useMlCapabilities } from './hooks/use_ml_capabilities'; @@ -34,15 +34,10 @@ PopoverContentsDiv.displayName = 'PopoverContentsDiv'; interface State { isLoading: boolean; - jobs: JobSummary[]; refreshToggle: boolean; } -type Action = - | { type: 'refresh' } - | { type: 'loading' } - | { type: 'success'; results: JobSummary[] } - | { type: 'failure' }; +type Action = { type: 'refresh' } | { type: 'loading' } | { type: 'success' } | { type: 'failure' }; function mlPopoverReducer(state: State, action: Action): State { switch (action.type) { @@ -62,14 +57,12 @@ function mlPopoverReducer(state: State, action: Action): State { return { ...state, isLoading: false, - jobs: action.results, }; } case 'failure': { return { ...state, isLoading: false, - jobs: [], }; } default: @@ -79,7 +72,6 @@ function mlPopoverReducer(state: State, action: Action): State { const initialState: State = { isLoading: false, - jobs: [], refreshToggle: true, }; @@ -91,7 +83,7 @@ const defaultFilterProps: JobsFilters = { }; export const MlPopover = React.memo(() => { - const [{ refreshToggle }, dispatch] = useReducer(mlPopoverReducer, initialState); + const [{ isLoading, refreshToggle }, dispatch] = useReducer(mlPopoverReducer, initialState); const [isPopoverOpen, setIsPopoverOpen] = useState(false); const [filterProperties, setFilterProperties] = useState(defaultFilterProps); @@ -99,50 +91,11 @@ export const MlPopover = React.memo(() => { const [, dispatchToaster] = useStateToaster(); const capabilities = useMlCapabilities(); const docLinks = useKibana().services.docLinks; - - // Enable/Disable Job & Datafeed -- passed to JobsTable for use as callback on JobSwitch - const enableDatafeed = async (job: SiemJob, latestTimestampMs: number, enable: boolean) => { - submitTelemetry(job, enable); - - if (!job.isInstalled) { - try { - await setupMlJob({ - configTemplate: job.moduleId, - indexPatternName: job.defaultIndexPattern, - jobIdErrorFilter: [job.id], - groups: job.groups, - }); - } catch (error) { - errorToToaster({ title: i18n.CREATE_JOB_FAILURE, error, dispatchToaster }); - dispatch({ type: 'refresh' }); - return; - } - } - - // Max start time for job is no more than two weeks ago to ensure job performance - const maxStartTime = moment - .utc() - .subtract(14, 'days') - .valueOf(); - - if (enable) { - const startTime = Math.max(latestTimestampMs, maxStartTime); - try { - await startDatafeeds({ datafeedIds: [`datafeed-${job.id}`], start: startTime }); - } catch (error) { - track(METRIC_TYPE.COUNT, TELEMETRY_EVENT.JOB_ENABLE_FAILURE); - errorToToaster({ title: i18n.START_JOB_FAILURE, error, dispatchToaster }); - } - } else { - try { - await stopDatafeeds({ datafeedIds: [`datafeed-${job.id}`] }); - } catch (error) { - track(METRIC_TYPE.COUNT, TELEMETRY_EVENT.JOB_DISABLE_FAILURE); - errorToToaster({ title: i18n.STOP_JOB_FAILURE, error, dispatchToaster }); - } - } - dispatch({ type: 'refresh' }); - }; + const handleJobStateChange = useCallback( + (job: SiemJob, latestTimestampMs: number, enable: boolean) => + enableDatafeed(job, latestTimestampMs, enable, dispatch, dispatchToaster), + [dispatch, dispatchToaster] + ); const filteredJobs = filterJobs({ jobs: siemJobs, @@ -239,9 +192,9 @@ export const MlPopover = React.memo(() => { )} @@ -252,6 +205,59 @@ export const MlPopover = React.memo(() => { } }); +// Enable/Disable Job & Datafeed -- passed to JobsTable for use as callback on JobSwitch +const enableDatafeed = async ( + job: SiemJob, + latestTimestampMs: number, + enable: boolean, + dispatch: Dispatch, + dispatchToaster: Dispatch +) => { + submitTelemetry(job, enable); + + if (!job.isInstalled) { + dispatch({ type: 'loading' }); + try { + await setupMlJob({ + configTemplate: job.moduleId, + indexPatternName: job.defaultIndexPattern, + jobIdErrorFilter: [job.id], + groups: job.groups, + }); + dispatch({ type: 'success' }); + } catch (error) { + errorToToaster({ title: i18n.CREATE_JOB_FAILURE, error, dispatchToaster }); + dispatch({ type: 'failure' }); + dispatch({ type: 'refresh' }); + return; + } + } + + // Max start time for job is no more than two weeks ago to ensure job performance + const maxStartTime = moment + .utc() + .subtract(14, 'days') + .valueOf(); + + if (enable) { + const startTime = Math.max(latestTimestampMs, maxStartTime); + try { + await startDatafeeds({ datafeedIds: [`datafeed-${job.id}`], start: startTime }); + } catch (error) { + track(METRIC_TYPE.COUNT, TELEMETRY_EVENT.JOB_ENABLE_FAILURE); + errorToToaster({ title: i18n.START_JOB_FAILURE, error, dispatchToaster }); + } + } else { + try { + await stopDatafeeds({ datafeedIds: [`datafeed-${job.id}`] }); + } catch (error) { + track(METRIC_TYPE.COUNT, TELEMETRY_EVENT.JOB_DISABLE_FAILURE); + errorToToaster({ title: i18n.STOP_JOB_FAILURE, error, dispatchToaster }); + } + } + dispatch({ type: 'refresh' }); +}; + const submitTelemetry = (job: SiemJob, enabled: boolean) => { // Report type of job enabled/disabled track( diff --git a/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.test.tsx b/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.test.tsx index 426a1ab9238dc..4d9e283bfb9cc 100644 --- a/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.test.tsx +++ b/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.test.tsx @@ -22,7 +22,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: null, hasIndexWrite: null, - hasManageApiKey: null, isAuthenticated: null, hasEncryptionKey: null, isSignalIndexExists: null, @@ -50,7 +49,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: null, hasIndexWrite: null, - hasManageApiKey: null, isAuthenticated: null, hasEncryptionKey: null, isSignalIndexExists: null, @@ -79,7 +77,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: true, hasIndexWrite: true, - hasManageApiKey: true, isAuthenticated: true, hasEncryptionKey: true, isSignalIndexExists: true, @@ -116,7 +113,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: true, hasIndexWrite: true, - hasManageApiKey: true, isAuthenticated: true, hasEncryptionKey: true, isSignalIndexExists: true, @@ -139,7 +135,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: false, hasIndexWrite: true, - hasManageApiKey: true, isAuthenticated: true, hasEncryptionKey: true, isSignalIndexExists: true, @@ -161,29 +156,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: true, hasIndexWrite: false, - hasManageApiKey: true, - isAuthenticated: true, - hasEncryptionKey: true, - isSignalIndexExists: true, - }) - ); - await waitForNextUpdate(); - await waitForNextUpdate(); - let resp = null; - if (result.current.createPrePackagedRules) { - resp = await result.current.createPrePackagedRules(); - } - expect(resp).toEqual(false); - }); - }); - - test('can NOT createPrePackagedRules because hasManageApiKey === false', async () => { - await act(async () => { - const { result, waitForNextUpdate } = renderHook(() => - usePrePackagedRules({ - canUserCRUD: true, - hasIndexWrite: true, - hasManageApiKey: false, isAuthenticated: true, hasEncryptionKey: true, isSignalIndexExists: true, @@ -205,7 +177,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: true, hasIndexWrite: true, - hasManageApiKey: true, isAuthenticated: false, hasEncryptionKey: true, isSignalIndexExists: true, @@ -227,7 +198,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: true, hasIndexWrite: true, - hasManageApiKey: true, isAuthenticated: true, hasEncryptionKey: false, isSignalIndexExists: true, @@ -249,7 +219,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: true, hasIndexWrite: true, - hasManageApiKey: true, isAuthenticated: true, hasEncryptionKey: true, isSignalIndexExists: false, diff --git a/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.tsx b/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.tsx index 0dd95bea8a0b2..44d5de10e361a 100644 --- a/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.tsx +++ b/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.tsx @@ -26,7 +26,6 @@ export interface ReturnPrePackagedRules { interface UsePrePackagedRuleProps { canUserCRUD: boolean | null; hasIndexWrite: boolean | null; - hasManageApiKey: boolean | null; isAuthenticated: boolean | null; hasEncryptionKey: boolean | null; isSignalIndexExists: boolean | null; @@ -36,7 +35,6 @@ interface UsePrePackagedRuleProps { * Hook for using to get status about pre-packaged Rules from the Detection Engine API * * @param hasIndexWrite boolean - * @param hasManageApiKey boolean * @param isAuthenticated boolean * @param hasEncryptionKey boolean * @param isSignalIndexExists boolean @@ -45,7 +43,6 @@ interface UsePrePackagedRuleProps { export const usePrePackagedRules = ({ canUserCRUD, hasIndexWrite, - hasManageApiKey, isAuthenticated, hasEncryptionKey, isSignalIndexExists, @@ -117,7 +114,6 @@ export const usePrePackagedRules = ({ if ( canUserCRUD && hasIndexWrite && - hasManageApiKey && isAuthenticated && hasEncryptionKey && isSignalIndexExists @@ -185,14 +181,7 @@ export const usePrePackagedRules = ({ isSubscribed = false; abortCtrl.abort(); }; - }, [ - canUserCRUD, - hasIndexWrite, - hasManageApiKey, - isAuthenticated, - hasEncryptionKey, - isSignalIndexExists, - ]); + }, [canUserCRUD, hasIndexWrite, isAuthenticated, hasEncryptionKey, isSignalIndexExists]); return { loading, diff --git a/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_rule_status.tsx b/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_rule_status.tsx index 0d37cce1fd85c..412fc0706b151 100644 --- a/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_rule_status.tsx +++ b/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_rule_status.tsx @@ -16,7 +16,7 @@ type Func = (ruleId: string) => void; export type ReturnRuleStatus = [boolean, RuleStatus | null, Func | null]; export interface ReturnRulesStatuses { loading: boolean; - rulesStatuses: RuleStatusRowItemType[] | null; + rulesStatuses: RuleStatusRowItemType[]; } /** diff --git a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/mock.ts b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/mock.ts index 37e93b1481e15..6b0c7e0078268 100644 --- a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/mock.ts +++ b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/mock.ts @@ -992,7 +992,6 @@ export const mockUserPrivilege: Privilege = { monitor_watcher: true, monitor_transform: true, read_ilm: true, - manage_api_key: true, manage_security: true, manage_own_api_key: false, manage_saml: true, diff --git a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/types.ts b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/types.ts index d90f94d32001d..4e97c597546a7 100644 --- a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/types.ts +++ b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/types.ts @@ -54,7 +54,6 @@ export interface Privilege { monitor_watcher: boolean; monitor_transform: boolean; read_ilm: boolean; - manage_api_key: boolean; manage_security: boolean; manage_own_api_key: boolean; manage_saml: boolean; diff --git a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.test.tsx b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.test.tsx index 2682742960442..c248223c6b81b 100644 --- a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.test.tsx +++ b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.test.tsx @@ -21,7 +21,6 @@ describe('usePrivilegeUser', () => { hasEncryptionKey: null, hasIndexManage: null, hasIndexWrite: null, - hasManageApiKey: null, isAuthenticated: null, loading: true, }); @@ -39,7 +38,6 @@ describe('usePrivilegeUser', () => { hasEncryptionKey: true, hasIndexManage: true, hasIndexWrite: true, - hasManageApiKey: true, isAuthenticated: true, loading: false, }); @@ -61,7 +59,6 @@ describe('usePrivilegeUser', () => { hasEncryptionKey: false, hasIndexManage: false, hasIndexWrite: false, - hasManageApiKey: false, isAuthenticated: false, loading: false, }); diff --git a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.tsx b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.tsx index c58e62c062fae..140dd1544b12b 100644 --- a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.tsx +++ b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.tsx @@ -15,7 +15,6 @@ export interface ReturnPrivilegeUser { isAuthenticated: boolean | null; hasEncryptionKey: boolean | null; hasIndexManage: boolean | null; - hasManageApiKey: boolean | null; hasIndexWrite: boolean | null; } /** @@ -27,17 +26,12 @@ export const usePrivilegeUser = (): ReturnPrivilegeUser => { const [privilegeUser, setPrivilegeUser] = useState< Pick< ReturnPrivilegeUser, - | 'isAuthenticated' - | 'hasEncryptionKey' - | 'hasIndexManage' - | 'hasManageApiKey' - | 'hasIndexWrite' + 'isAuthenticated' | 'hasEncryptionKey' | 'hasIndexManage' | 'hasIndexWrite' > >({ isAuthenticated: null, hasEncryptionKey: null, hasIndexManage: null, - hasManageApiKey: null, hasIndexWrite: null, }); const [, dispatchToaster] = useStateToaster(); @@ -65,10 +59,6 @@ export const usePrivilegeUser = (): ReturnPrivilegeUser => { privilege.index[indexName].create_doc || privilege.index[indexName].index || privilege.index[indexName].write, - hasManageApiKey: - privilege.cluster.manage_security || - privilege.cluster.manage_api_key || - privilege.cluster.manage_own_api_key, }); } } @@ -78,7 +68,6 @@ export const usePrivilegeUser = (): ReturnPrivilegeUser => { isAuthenticated: false, hasEncryptionKey: false, hasIndexManage: false, - hasManageApiKey: false, hasIndexWrite: false, }); errorToToaster({ title: i18n.PRIVILEGE_FETCH_FAILURE, error, dispatchToaster }); diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/components/user_info/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/components/user_info/index.tsx index a96913f2ad541..9e45371fb6058 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/components/user_info/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/components/user_info/index.tsx @@ -15,7 +15,6 @@ export interface State { canUserCRUD: boolean | null; hasIndexManage: boolean | null; hasIndexWrite: boolean | null; - hasManageApiKey: boolean | null; isSignalIndexExists: boolean | null; isAuthenticated: boolean | null; hasEncryptionKey: boolean | null; @@ -27,7 +26,6 @@ const initialState: State = { canUserCRUD: null, hasIndexManage: null, hasIndexWrite: null, - hasManageApiKey: null, isSignalIndexExists: null, isAuthenticated: null, hasEncryptionKey: null, @@ -37,10 +35,6 @@ const initialState: State = { export type Action = | { type: 'updateLoading'; loading: boolean } - | { - type: 'updateHasManageApiKey'; - hasManageApiKey: boolean | null; - } | { type: 'updateHasIndexManage'; hasIndexManage: boolean | null; @@ -90,12 +84,6 @@ export const userInfoReducer = (state: State, action: Action): State => { hasIndexWrite: action.hasIndexWrite, }; } - case 'updateHasManageApiKey': { - return { - ...state, - hasManageApiKey: action.hasManageApiKey, - }; - } case 'updateIsSignalIndexExists': { return { ...state, @@ -151,7 +139,6 @@ export const useUserInfo = (): State => { canUserCRUD, hasIndexManage, hasIndexWrite, - hasManageApiKey, isSignalIndexExists, isAuthenticated, hasEncryptionKey, @@ -166,7 +153,6 @@ export const useUserInfo = (): State => { hasEncryptionKey: isApiEncryptionKey, hasIndexManage: hasApiIndexManage, hasIndexWrite: hasApiIndexWrite, - hasManageApiKey: hasApiManageApiKey, } = usePrivilegeUser(); const { loading: indexNameLoading, @@ -197,12 +183,6 @@ export const useUserInfo = (): State => { } }, [loading, hasIndexWrite, hasApiIndexWrite]); - useEffect(() => { - if (!loading && hasManageApiKey !== hasApiManageApiKey && hasApiManageApiKey != null) { - dispatch({ type: 'updateHasManageApiKey', hasManageApiKey: hasApiManageApiKey }); - } - }, [loading, hasManageApiKey, hasApiManageApiKey]); - useEffect(() => { if ( !loading && @@ -258,7 +238,6 @@ export const useUserInfo = (): State => { canUserCRUD, hasIndexManage, hasIndexWrite, - hasManageApiKey, signalIndexName, }; }; diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/columns.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/columns.tsx index 5157bd81403e2..9a84d33ab5fdf 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/columns.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/columns.tsx @@ -81,8 +81,8 @@ export type RuleStatusRowItemType = RuleStatus & { name: string; id: string; }; -type RulesColumns = EuiBasicTableColumn | EuiTableActionsColumnType; -type RulesStatusesColumns = EuiBasicTableColumn; +export type RulesColumns = EuiBasicTableColumn | EuiTableActionsColumnType; +export type RulesStatusesColumns = EuiBasicTableColumn; interface GetColumns { dispatch: React.Dispatch; diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/index.tsx index 1a98272546440..ccdfd1ed1be38 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/index.tsx @@ -31,7 +31,7 @@ import { Loader } from '../../../../components/loader'; import { Panel } from '../../../../components/panel'; import { PrePackagedRulesPrompt } from '../components/pre_packaged_rules/load_empty_prompt'; import { GenericDownloader } from '../../../../components/generic_downloader'; -import { AllRulesTables } from '../components/all_rules_tables'; +import { AllRulesTables, SortingType } from '../components/all_rules_tables'; import { getPrePackagedRuleStatus } from '../helpers'; import * as i18n from '../translations'; import { EuiBasicTableOnChange } from '../types'; @@ -128,7 +128,7 @@ export const AllRules = React.memo( }); const sorting = useMemo( - () => ({ sort: { field: 'enabled', direction: filterOptions.sortOrder } }), + (): SortingType => ({ sort: { field: 'enabled', direction: filterOptions.sortOrder } }), [filterOptions.sortOrder] ); @@ -330,7 +330,7 @@ export const AllRules = React.memo( euiBasicTableSelectionProps={euiBasicTableSelectionProps} hasNoPermissions={hasNoPermissions} monitoringColumns={monitoringColumns} - paginationMemo={paginationMemo} + pagination={paginationMemo} rules={rules} rulesColumns={rulesColumns} rulesStatuses={rulesStatuses} diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/all_rules_tables/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/all_rules_tables/index.tsx index 0fd07f30a00b6..31aaa426e4f3b 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/all_rules_tables/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/all_rules_tables/index.tsx @@ -4,30 +4,59 @@ * you may not use this file except in compliance with the Elastic License. */ -import { EuiBasicTable, EuiTab, EuiTabs, EuiEmptyPrompt } from '@elastic/eui'; +import { + EuiBasicTable, + EuiBasicTableColumn, + EuiTab, + EuiTabs, + EuiEmptyPrompt, + Direction, + EuiTableSelectionType, +} from '@elastic/eui'; import React, { useMemo, memo, useState } from 'react'; import styled from 'styled-components'; +import { EuiBasicTableOnChange } from '../../types'; import * as i18n from '../../translations'; -import { RuleStatusRowItemType } from '../../../../../pages/detection_engine/rules/all/columns'; -import { Rules } from '../../../../../containers/detection_engine/rules'; +import { + RulesColumns, + RuleStatusRowItemType, +} from '../../../../../pages/detection_engine/rules/all/columns'; +import { Rule, Rules } from '../../../../../containers/detection_engine/rules'; // EuiBasicTable give me a hardtime with adding the ref attributes so I went the easy way // after few hours of fight with typescript !!!! I lost :( // eslint-disable-next-line @typescript-eslint/no-explicit-any const MyEuiBasicTable = styled(EuiBasicTable as any)`` as any; +export interface SortingType { + sort: { + field: 'enabled'; + direction: Direction; + }; +} + interface AllRulesTablesProps { - euiBasicTableSelectionProps: unknown; + euiBasicTableSelectionProps: EuiTableSelectionType; hasNoPermissions: boolean; - monitoringColumns: unknown; - paginationMemo: unknown; + monitoringColumns: Array>; + pagination: { + pageIndex: number; + pageSize: number; + totalItemCount: number; + pageSizeOptions: number[]; + }; rules: Rules; - rulesColumns: unknown; - rulesStatuses: RuleStatusRowItemType[] | null; - sorting: unknown; - tableOnChangeCallback: unknown; - tableRef?: unknown; + rulesColumns: RulesColumns[]; + rulesStatuses: RuleStatusRowItemType[]; + sorting: { + sort: { + field: 'enabled'; + direction: Direction; + }; + }; + tableOnChangeCallback: ({ page, sort }: EuiBasicTableOnChange) => void; + tableRef?: React.MutableRefObject; } enum AllRulesTabs { @@ -52,7 +81,7 @@ const AllRulesTablesComponent: React.FC = ({ euiBasicTableSelectionProps, hasNoPermissions, monitoringColumns, - paginationMemo, + pagination, rules, rulesColumns, rulesStatuses, @@ -95,7 +124,7 @@ const AllRulesTablesComponent: React.FC = ({ items={rules ?? []} noItemsMessage={emptyPrompt} onChange={tableOnChangeCallback} - pagination={paginationMemo} + pagination={pagination} ref={tableRef} sorting={sorting} selection={hasNoPermissions ? undefined : euiBasicTableSelectionProps} @@ -110,7 +139,7 @@ const AllRulesTablesComponent: React.FC = ({ items={rulesStatuses} noItemsMessage={emptyPrompt} onChange={tableOnChangeCallback} - pagination={paginationMemo} + pagination={pagination} sorting={sorting} /> )} diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/__snapshots__/index.test.tsx.snap b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/__snapshots__/index.test.tsx.snap index 65a606604d4a7..1bee36ed9e185 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/__snapshots__/index.test.tsx.snap +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/__snapshots__/index.test.tsx.snap @@ -1,6 +1,6 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`RuleActionsOverflow renders correctly against snapshot 1`] = ` +exports[`RuleActionsOverflow snapshots renders correctly against snapshot 1`] = ` } closePopover={[Function]} + data-test-subj="rules-details-popover" display="inlineBlock" hasArrow={true} id="ruleActionsOverflow" @@ -27,24 +29,28 @@ exports[`RuleActionsOverflow renders correctly against snapshot 1`] = ` panelPaddingSize="none" > Duplicate rule… , Export rule , ({ }), })); +jest.mock('../../all/actions', () => ({ + deleteRulesAction: jest.fn(), + duplicateRulesAction: jest.fn(), +})); + describe('RuleActionsOverflow', () => { - test('renders correctly against snapshot', () => { - const wrapper = shallow( - - ); - expect(wrapper).toMatchSnapshot(); + describe('snapshots', () => { + test('renders correctly against snapshot', () => { + const wrapper = shallow( + + ); + expect(wrapper).toMatchSnapshot(); + }); + }); + + describe('rules details menu panel', () => { + test('there is at least one item when there is a rule within the rules-details-menu-panel', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + const items: unknown[] = wrapper + .find('[data-test-subj="rules-details-menu-panel"]') + .first() + .prop('items'); + + expect(items.length).toBeGreaterThan(0); + }); + + test('items are empty when there is a null rule within the rules-details-menu-panel', () => { + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-menu-panel"]') + .first() + .prop('items') + ).toEqual([]); + }); + + test('items are empty when there is an undefined rule within the rules-details-menu-panel', () => { + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-menu-panel"]') + .first() + .prop('items') + ).toEqual([]); + }); + + test('it opens the popover when rules-details-popover-button-icon is clicked', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-popover"]') + .first() + .prop('isOpen') + ).toEqual(true); + }); + }); + + describe('rules details pop over button icon', () => { + test('it does not open the popover when rules-details-popover-button-icon is clicked when the user does not have permission', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-popover"]') + .first() + .prop('isOpen') + ).toEqual(false); + }); + }); + + describe('rules details duplicate rule', () => { + test('it does not open the popover when rules-details-popover-button-icon is clicked and the user does not have permission', () => { + const rule = mockRule('id'); + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect(wrapper.find('[data-test-subj="rules-details-delete-rule"] button').exists()).toEqual( + false + ); + }); + + test('it opens the popover when rules-details-popover-button-icon is clicked', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-popover"]') + .first() + .prop('isOpen') + ).toEqual(true); + }); + + test('it closes the popover when rules-details-duplicate-rule is clicked', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-duplicate-rule"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-popover"]') + .first() + .prop('isOpen') + ).toEqual(false); + }); + + test('it calls duplicateRulesAction when rules-details-duplicate-rule is clicked', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-duplicate-rule"] button').simulate('click'); + wrapper.update(); + expect(duplicateRulesAction).toHaveBeenCalled(); + }); + + test('it calls duplicateRulesAction with the rule and rule.id when rules-details-duplicate-rule is clicked', () => { + const rule = mockRule('id'); + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-duplicate-rule"] button').simulate('click'); + wrapper.update(); + expect(duplicateRulesAction).toHaveBeenCalledWith( + [rule], + [rule.id], + expect.anything(), + expect.anything() + ); + }); + }); + + describe('rules details export rule', () => { + test('it does not open the popover when rules-details-popover-button-icon is clicked and the user does not have permission', () => { + const rule = mockRule('id'); + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect(wrapper.find('[data-test-subj="rules-details-export-rule"] button').exists()).toEqual( + false + ); + }); + + test('it closes the popover when rules-details-export-rule is clicked', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-export-rule"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-popover"]') + .first() + .prop('isOpen') + ).toEqual(false); + }); + + test('it sets the rule.rule_id on the generic downloader when rules-details-export-rule is clicked', () => { + const rule = mockRule('id'); + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-export-rule"] button').simulate('click'); + wrapper.update(); + expect( + wrapper.find('[data-test-subj="rules-details-generic-downloader"]').prop('ids') + ).toEqual([rule.rule_id]); + }); + + test('it does not close the pop over on rules-details-export-rule when the rule is an immutable rule and the user does a click', () => { + const rule = mockRule('id'); + rule.immutable = true; + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-export-rule"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-popover"]') + .first() + .prop('isOpen') + ).toEqual(true); + }); + + test('it does not set the rule.rule_id on rules-details-export-rule when the rule is an immutable rule', () => { + const rule = mockRule('id'); + rule.immutable = true; + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-export-rule"] button').simulate('click'); + wrapper.update(); + expect( + wrapper.find('[data-test-subj="rules-details-generic-downloader"]').prop('ids') + ).toEqual([]); + }); + }); + + describe('rules details delete rule', () => { + test('it does not open the popover when rules-details-popover-button-icon is clicked and the user does not have permission', () => { + const rule = mockRule('id'); + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect(wrapper.find('[data-test-subj="rules-details-delete-rule"] button').exists()).toEqual( + false + ); + }); + + test('it closes the popover when rules-details-delete-rule is clicked', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-delete-rule"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-popover"]') + .first() + .prop('isOpen') + ).toEqual(false); + }); + + test('it calls deleteRulesAction when rules-details-delete-rule is clicked', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-delete-rule"] button').simulate('click'); + wrapper.update(); + expect(deleteRulesAction).toHaveBeenCalled(); + }); + + test('it calls deleteRulesAction with the rule.id when rules-details-delete-rule is clicked', () => { + const rule = mockRule('id'); + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-delete-rule"] button').simulate('click'); + wrapper.update(); + expect(deleteRulesAction).toHaveBeenCalledWith( + [rule.id], + expect.anything(), + expect.anything(), + expect.anything() + ); + }); }); }); diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/index.tsx index e1ca84ed8cc64..a7ce0c85ffdcf 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/index.tsx @@ -62,8 +62,9 @@ const RuleActionsOverflowComponent = ({ ? [ { setIsPopoverOpen(false); await duplicateRulesAction([rule], [rule.id], noop, dispatchToaster); @@ -73,11 +74,12 @@ const RuleActionsOverflowComponent = ({ , { setIsPopoverOpen(false); - setRulesToExport([rule.id]); + setRulesToExport([rule.rule_id]); }} > {i18nActions.EXPORT_RULE} @@ -86,6 +88,7 @@ const RuleActionsOverflowComponent = ({ key={i18nActions.DELETE_RULE} icon="trash" disabled={userHasNoPermissions} + data-test-subj="rules-details-delete-rule" onClick={async () => { setIsPopoverOpen(false); await deleteRulesAction([rule.id], noop, dispatchToaster, onRuleDeletedCallback); @@ -109,6 +112,7 @@ const RuleActionsOverflowComponent = ({ iconType="boxesHorizontal" aria-label={i18n.ALL_ACTIONS} isDisabled={userHasNoPermissions} + data-test-subj="rules-details-popover-button-icon" onClick={handlePopoverOpen} /> @@ -124,15 +128,17 @@ const RuleActionsOverflowComponent = ({ closePopover={() => setIsPopoverOpen(false)} id="ruleActionsOverflow" isOpen={isPopoverOpen} + data-test-subj="rules-details-popover" ownFocus={true} panelPaddingSize="none" > - + { displaySuccessToast( i18nActions.SUCCESSFULLY_EXPORTED_RULES(exportCount), diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/create/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/create/index.tsx index 0335216672915..2686bb47925b6 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/create/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/create/index.tsx @@ -24,7 +24,7 @@ import { StepScheduleRule } from '../components/step_schedule_rule'; import { StepRuleActions } from '../components/step_rule_actions'; import { DetectionEngineHeaderPage } from '../../components/detection_engine_header_page'; import * as RuleI18n from '../translations'; -import { redirectToDetections, getActionMessageParams } from '../helpers'; +import { redirectToDetections, getActionMessageParams, userHasNoPermissions } from '../helpers'; import { AboutStepRule, DefineStepRule, @@ -85,7 +85,6 @@ const CreateRulePageComponent: React.FC = () => { isAuthenticated, hasEncryptionKey, canUserCRUD, - hasManageApiKey, } = useUserInfo(); const [, dispatchToaster] = useStateToaster(); const [openAccordionId, setOpenAccordionId] = useState(RuleStep.defineRule); @@ -117,8 +116,6 @@ const CreateRulePageComponent: React.FC = () => { getActionMessageParams((stepsData.current['define-rule'].data as DefineStepRule).ruleType), [stepsData.current['define-rule'].data] ); - const userHasNoPermissions = - canUserCRUD != null && hasManageApiKey != null ? !canUserCRUD || !hasManageApiKey : false; const setStepData = useCallback( (step: RuleStep, data: unknown, isValid: boolean) => { @@ -274,7 +271,7 @@ const CreateRulePageComponent: React.FC = () => { if (redirectToDetections(isSignalIndexExists, isAuthenticated, hasEncryptionKey)) { return ; - } else if (userHasNoPermissions) { + } else if (userHasNoPermissions(canUserCRUD)) { return ; } diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/details/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/details/index.tsx index b8e2310ef0614..cb4d88a8bb539 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/details/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/details/index.tsx @@ -53,7 +53,7 @@ import * as detectionI18n from '../../translations'; import { ReadOnlyCallOut } from '../components/read_only_callout'; import { RuleSwitch } from '../components/rule_switch'; import { StepPanel } from '../components/step_panel'; -import { getStepsData, redirectToDetections } from '../helpers'; +import { getStepsData, redirectToDetections, userHasNoPermissions } from '../helpers'; import * as ruleI18n from '../translations'; import * as i18n from './translations'; import { GlobalTime } from '../../../../containers/global_time'; @@ -96,7 +96,6 @@ const RuleDetailsPageComponent: FC = ({ isAuthenticated, hasEncryptionKey, canUserCRUD, - hasManageApiKey, hasIndexWrite, signalIndexName, } = useUserInfo(); @@ -115,8 +114,6 @@ const RuleDetailsPageComponent: FC = ({ scheduleRuleData: null, }; const [lastSignals] = useSignalInfo({ ruleId }); - const userHasNoPermissions = - canUserCRUD != null && hasManageApiKey != null ? !canUserCRUD || !hasManageApiKey : false; const title = isLoading === true || rule === null ? : rule.name; const subTitle = useMemo( @@ -227,7 +224,7 @@ const RuleDetailsPageComponent: FC = ({ return ( <> {hasIndexWrite != null && !hasIndexWrite && } - {userHasNoPermissions && } + {userHasNoPermissions(canUserCRUD) && } {({ indicesExist, indexPattern }) => { return indicesExistOrDataTemporarilyUnavailable(indicesExist) ? ( @@ -264,7 +261,7 @@ const RuleDetailsPageComponent: FC = ({ = ({ {ruleI18n.EDIT_RULE_SETTINGS} @@ -285,7 +282,7 @@ const RuleDetailsPageComponent: FC = ({ diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/edit/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/edit/index.tsx index 60d6158987a1d..c42e7b902cd5c 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/edit/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/edit/index.tsx @@ -33,7 +33,12 @@ import { StepDefineRule } from '../components/step_define_rule'; import { StepScheduleRule } from '../components/step_schedule_rule'; import { StepRuleActions } from '../components/step_rule_actions'; import { formatRule } from '../create/helpers'; -import { getStepsData, redirectToDetections, getActionMessageParams } from '../helpers'; +import { + getStepsData, + redirectToDetections, + getActionMessageParams, + userHasNoPermissions, +} from '../helpers'; import * as ruleI18n from '../translations'; import { RuleStep, @@ -69,14 +74,10 @@ const EditRulePageComponent: FC = () => { isAuthenticated, hasEncryptionKey, canUserCRUD, - hasManageApiKey, } = useUserInfo(); const { detailName: ruleId } = useParams(); const [loading, rule] = useRule(ruleId); - const userHasNoPermissions = - canUserCRUD != null && hasManageApiKey != null ? !canUserCRUD || !hasManageApiKey : false; - const [initForm, setInitForm] = useState(false); const [myAboutRuleForm, setMyAboutRuleForm] = useState({ data: null, @@ -346,7 +347,7 @@ const EditRulePageComponent: FC = () => { if (redirectToDetections(isSignalIndexExists, isAuthenticated, hasEncryptionKey)) { return ; - } else if (userHasNoPermissions) { + } else if (userHasNoPermissions(canUserCRUD)) { return ; } diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.test.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.test.tsx index 522464d585cca..443dbd2c93a35 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.test.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.test.tsx @@ -14,6 +14,7 @@ import { getHumanizedDuration, getModifiedAboutDetailsData, determineDetailsValue, + userHasNoPermissions, } from './helpers'; import { mockRuleWithEverything, mockRule } from './all/__mocks__/mock'; import { esFilters } from '../../../../../../../../src/plugins/data/public'; @@ -337,4 +338,27 @@ describe('rule helpers', () => { expect(result).toEqual(aboutRuleDetailsData); }); }); + + describe('userHasNoPermissions', () => { + test("returns false when user's CRUD operations are null", () => { + const result: boolean = userHasNoPermissions(null); + const userHasNoPermissionsExpectedResult = false; + + expect(result).toEqual(userHasNoPermissionsExpectedResult); + }); + + test('returns true when user cannot CRUD', () => { + const result: boolean = userHasNoPermissions(false); + const userHasNoPermissionsExpectedResult = true; + + expect(result).toEqual(userHasNoPermissionsExpectedResult); + }); + + test('returns false when user can CRUD', () => { + const result: boolean = userHasNoPermissions(true); + const userHasNoPermissionsExpectedResult = false; + + expect(result).toEqual(userHasNoPermissionsExpectedResult); + }); + }); }); diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.tsx index b6afba527ccdc..db1f2298b5ea7 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.tsx @@ -267,3 +267,7 @@ export const getActionMessageParams = memoizeOne((ruleType: RuleType | undefined ...actionMessageRuleParams.map(param => `context.rule.${param}`), ]; }); + +// typed as null not undefined as the initial state for this value is null. +export const userHasNoPermissions = (canUserCRUD: boolean | null): boolean => + canUserCRUD != null ? !canUserCRUD : false; diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/index.tsx index 2b93ec8b10112..8831bc77691fa 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/index.tsx @@ -23,7 +23,7 @@ import { AllRules } from './all'; import { ImportDataModal } from '../../../components/import_data_modal'; import { ReadOnlyCallOut } from './components/read_only_callout'; import { UpdatePrePackagedRulesCallOut } from './components/pre_packaged_rules/update_callout'; -import { getPrePackagedRuleStatus, redirectToDetections } from './helpers'; +import { getPrePackagedRuleStatus, redirectToDetections, userHasNoPermissions } from './helpers'; import * as i18n from './translations'; type Func = (refreshPrePackagedRule?: boolean) => void; @@ -38,7 +38,6 @@ const RulesPageComponent: React.FC = () => { hasEncryptionKey, canUserCRUD, hasIndexWrite, - hasManageApiKey, } = useUserInfo(); const { createPrePackagedRules, @@ -52,7 +51,6 @@ const RulesPageComponent: React.FC = () => { } = usePrePackagedRules({ canUserCRUD, hasIndexWrite, - hasManageApiKey, isSignalIndexExists, isAuthenticated, hasEncryptionKey, @@ -63,9 +61,6 @@ const RulesPageComponent: React.FC = () => { rulesNotUpdated ); - const userHasNoPermissions = - canUserCRUD != null && hasManageApiKey != null ? !canUserCRUD || !hasManageApiKey : false; - const handleRefreshRules = useCallback(async () => { if (refreshRulesData.current != null) { refreshRulesData.current(true); @@ -95,7 +90,7 @@ const RulesPageComponent: React.FC = () => { return ( <> - {userHasNoPermissions && } + {userHasNoPermissions(canUserCRUD) && } setShowImportModal(false)} @@ -125,7 +120,7 @@ const RulesPageComponent: React.FC = () => { {i18n.LOAD_PREPACKAGED_RULES} @@ -138,7 +133,7 @@ const RulesPageComponent: React.FC = () => { data-test-subj="reloadPrebuiltRulesBtn" iconType="plusInCircle" isLoading={loadingCreatePrePackagedRules} - isDisabled={userHasNoPermissions || loading} + isDisabled={userHasNoPermissions(canUserCRUD) || loading} onClick={handleCreatePrePackagedRules} > {i18n.RELOAD_MISSING_PREPACKAGED_RULES(rulesNotInstalled ?? 0)} @@ -148,7 +143,7 @@ const RulesPageComponent: React.FC = () => { { setShowImportModal(true); }} @@ -162,7 +157,7 @@ const RulesPageComponent: React.FC = () => { fill href={getCreateRuleUrl()} iconType="plusInCircle" - isDisabled={userHasNoPermissions || loading} + isDisabled={userHasNoPermissions(canUserCRUD) || loading} > {i18n.ADD_NEW_RULE} @@ -180,7 +175,7 @@ const RulesPageComponent: React.FC = () => { createPrePackagedRules={createPrePackagedRules} loading={loading || prePackagedRuleLoading} loadingCreatePrePackagedRules={loadingCreatePrePackagedRules} - hasNoPermissions={userHasNoPermissions} + hasNoPermissions={userHasNoPermissions(canUserCRUD)} refetchPrePackagedRulesStatus={handleRefetchPrePackagedRulesStatus} rulesCustomInstalled={rulesCustomInstalled} rulesInstalled={rulesInstalled} diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.test.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.test.ts index 8e4b5ce3c9924..bdbb6ff7d1052 100644 --- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.test.ts +++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.test.ts @@ -28,25 +28,23 @@ describe('buildRuleMessageFactory', () => { expect(message).toEqual(expect.stringContaining('signals index: "index"')); }); - it('joins message parts with newlines', () => { + it('joins message parts with spaces', () => { const buildMessage = buildRuleMessageFactory(factoryParams); const message = buildMessage('my message'); - const messageParts = message.split('\n'); - expect(messageParts).toContain('my message'); - expect(messageParts).toContain('name: "name"'); - expect(messageParts).toContain('id: "id"'); - expect(messageParts).toContain('rule id: "ruleId"'); - expect(messageParts).toContain('signals index: "index"'); + expect(message).toEqual(expect.stringContaining('my message ')); + expect(message).toEqual(expect.stringContaining(' name: "name" ')); + expect(message).toEqual(expect.stringContaining(' id: "id" ')); + expect(message).toEqual(expect.stringContaining(' rule id: "ruleId" ')); + expect(message).toEqual(expect.stringContaining(' signals index: "index"')); }); - it('joins multiple arguments with newlines', () => { + it('joins multiple arguments with spaces', () => { const buildMessage = buildRuleMessageFactory(factoryParams); const message = buildMessage('my message', 'here is more'); - const messageParts = message.split('\n'); - expect(messageParts).toContain('my message'); - expect(messageParts).toContain('here is more'); + expect(message).toEqual(expect.stringContaining('my message ')); + expect(message).toEqual(expect.stringContaining(' here is more')); }); it('defaults the rule ID if not provided ', () => { diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.ts index d5f9d332bbcdd..cc97a1f8a9f0b 100644 --- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.ts +++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.ts @@ -24,4 +24,4 @@ export const buildRuleMessageFactory = ({ `id: "${id}"`, `rule id: "${ruleId ?? '(unknown rule id)'}"`, `signals index: "${index}"`, - ].join('\n'); + ].join(' '); diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/signal_rule_alert_type.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/signal_rule_alert_type.ts index 91905722fbca3..246701e94c99a 100644 --- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/signal_rule_alert_type.ts +++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/signal_rule_alert_type.ts @@ -55,6 +55,7 @@ export const signalRulesAlertType = ({ index, filters, language, + maxSignals, meta, machineLearningJobId, outputIndex, @@ -63,6 +64,14 @@ export const signalRulesAlertType = ({ to, type, } = params; + const searchAfterSize = Math.min(maxSignals, DEFAULT_SEARCH_AFTER_PAGE_SIZE); + let hasError: boolean = false; + let result: SearchAfterAndBulkCreateReturnType = { + success: false, + bulkCreateTimes: [], + searchAfterTimes: [], + lastLookBackDate: null, + }; const ruleStatusClient = ruleStatusSavedObjectsClientFactory(services.savedObjectsClient); const ruleStatusService = await ruleStatusServiceFactory({ alertId, @@ -104,17 +113,10 @@ export const signalRulesAlertType = ({ ); logger.warn(gapMessage); + hasError = true; await ruleStatusService.error(gapMessage, { gap: gapString }); } - const searchAfterSize = Math.min(params.maxSignals, DEFAULT_SEARCH_AFTER_PAGE_SIZE); - let result: SearchAfterAndBulkCreateReturnType = { - success: false, - bulkCreateTimes: [], - searchAfterTimes: [], - lastLookBackDate: null, - }; - try { if (isMlRule(type)) { if (ml == null) { @@ -126,7 +128,7 @@ export const signalRulesAlertType = ({ 'Machine learning rule is missing job id and/or anomaly threshold:', `job id: "${machineLearningJobId}"`, `anomaly threshold: "${anomalyThreshold}"`, - ].join('\n') + ].join(' ') ); } @@ -143,6 +145,7 @@ export const signalRulesAlertType = ({ `datafeed status: "${jobSummary?.datafeedState}"` ); logger.warn(errorMessage); + hasError = true; await ruleStatusService.error(errorMessage); } @@ -270,11 +273,13 @@ export const signalRulesAlertType = ({ } logger.debug(buildRuleMessage('[+] Signal Rule execution completed.')); - await ruleStatusService.success('succeeded', { - bulkCreateTimeDurations: result.bulkCreateTimes, - searchAfterTimeDurations: result.searchAfterTimes, - lastLookBackDate: result.lastLookBackDate?.toISOString(), - }); + if (!hasError) { + await ruleStatusService.success('succeeded', { + bulkCreateTimeDurations: result.bulkCreateTimes, + searchAfterTimeDurations: result.searchAfterTimes, + lastLookBackDate: result.lastLookBackDate?.toISOString(), + }); + } } else { const errorMessage = buildRuleMessage( 'Bulk Indexing of signals failed. Check logs for further details.' diff --git a/x-pack/legacy/plugins/siem/server/lib/timeline/routes/utils/import_timelines.ts b/x-pack/legacy/plugins/siem/server/lib/timeline/routes/utils/import_timelines.ts index 5596d0c70f5ea..f69a715f9b2c9 100644 --- a/x-pack/legacy/plugins/siem/server/lib/timeline/routes/utils/import_timelines.ts +++ b/x-pack/legacy/plugins/siem/server/lib/timeline/routes/utils/import_timelines.ts @@ -127,7 +127,7 @@ export const saveNotes = ( existingNoteIds?: string[], newNotes?: NoteResult[] ) => { - return ( + return Promise.all( newNotes?.map(note => { const newNote: SavedNote = { eventId: note.eventId, diff --git a/x-pack/package.json b/x-pack/package.json index bbab1a96f52f4..24b23256bf18e 100644 --- a/x-pack/package.json +++ b/x-pack/package.json @@ -315,6 +315,7 @@ "react-portal": "^3.2.0", "react-redux": "^7.1.3", "react-reverse-portal": "^1.0.4", + "react-router": "^5.1.2", "react-router-dom": "^5.1.2", "react-shortcuts": "^2.0.0", "react-sticky": "^6.0.3", diff --git a/x-pack/plugins/endpoint/common/generate_data.test.ts b/x-pack/plugins/endpoint/common/generate_data.test.ts index dfb906c7af606..88e1c66ea3e82 100644 --- a/x-pack/plugins/endpoint/common/generate_data.test.ts +++ b/x-pack/plugins/endpoint/common/generate_data.test.ts @@ -86,7 +86,7 @@ describe('data generator', () => { let events: Event[]; beforeEach(() => { - events = generator.generateAlertEventAncestry(3); + events = generator.createAlertEventAncestry(3); }); it('with n-1 process events', () => { @@ -153,7 +153,7 @@ describe('data generator', () => { const timestamp = new Date().getTime(); const root = generator.generateEvent({ timestamp }); const generations = 2; - const events = [root, ...generator.generateDescendantsTree(root, generations)]; + const events = [root, ...generator.descendantsTreeGenerator(root, generations)]; const rootNode = buildResolverTree(events); const visitedEvents = countResolverEvents(rootNode, generations); expect(visitedEvents).toEqual(events.length); @@ -162,7 +162,7 @@ describe('data generator', () => { it('creates full resolver tree', () => { const alertAncestors = 3; const generations = 2; - const events = generator.generateFullResolverTree(alertAncestors, generations); + const events = [...generator.fullResolverTreeGenerator(alertAncestors, generations)]; const rootNode = buildResolverTree(events); const visitedEvents = countResolverEvents(rootNode, alertAncestors + generations); expect(visitedEvents).toEqual(events.length); diff --git a/x-pack/plugins/endpoint/common/generate_data.ts b/x-pack/plugins/endpoint/common/generate_data.ts index 430ba1d422b96..0ec105129b7ac 100644 --- a/x-pack/plugins/endpoint/common/generate_data.ts +++ b/x-pack/plugins/endpoint/common/generate_data.ts @@ -100,19 +100,30 @@ interface HostInfo { }; } +interface NodeState { + event: Event; + childrenCreated: number; + maxChildren: number; +} + export class EndpointDocGenerator { commonInfo: HostInfo; random: seedrandom.prng; - constructor(seed = Math.random().toString()) { - this.random = seedrandom(seed); + constructor(seed: string | seedrandom.prng = Math.random().toString()) { + if (typeof seed === 'string') { + this.random = seedrandom(seed); + } else { + this.random = seed; + } this.commonInfo = this.createHostData(); } - // This function will create new values for all the host fields, so documents from a different host can be created - // This provides a convenient way to make documents from multiple hosts that are all tied to a single seed value - public randomizeHostData() { - this.commonInfo = this.createHostData(); + /** + * Creates new random IP addresses for the host to simulate new DHCP assignment + */ + public updateHostData() { + this.commonInfo.host.ip = this.randomArray(3, () => this.randomIP()); } private createHostData(): HostInfo { @@ -139,6 +150,10 @@ export class EndpointDocGenerator { }; } + /** + * Creates a host metadata document + * @param ts - Timestamp to put in the event + */ public generateHostMetadata(ts = new Date().getTime()): HostMetadata { return { '@timestamp': ts, @@ -149,6 +164,12 @@ export class EndpointDocGenerator { }; } + /** + * Creates an alert from the simulated host represented by this EndpointDocGenerator + * @param ts - Timestamp to put in the event + * @param entityID - entityID of the originating process + * @param parentEntityID - optional entityID of the parent process, if it exists + */ public generateAlert( ts = new Date().getTime(), entityID = this.randomString(10), @@ -183,7 +204,7 @@ export class EndpointDocGenerator { trusted: false, subject_name: 'bad signer', }, - malware_classifier: { + malware_classification: { identifier: 'endpointpe', score: 1, threshold: 0.66, @@ -241,7 +262,7 @@ export class EndpointDocGenerator { sha1: 'ca85243c0af6a6471bdaa560685c51eefd6dbc0d', sha256: '8ad40c90a611d36eb8f9eb24fa04f7dbca713db383ff55a03aa0f382e92061a2', }, - malware_classifier: { + malware_classification: { identifier: 'Whitelisted', score: 0, threshold: 0, @@ -255,6 +276,10 @@ export class EndpointDocGenerator { }; } + /** + * Creates an event, customized by the options parameter + * @param options - Allows event field values to be specified + */ public generateEvent(options: EventOptions = {}): EndpointEvent { return { '@timestamp': options.timestamp ? options.timestamp : new Date().getTime(), @@ -277,17 +302,31 @@ export class EndpointDocGenerator { }; } - public generateFullResolverTree( + /** + * Generator function that creates the full set of events needed to render resolver. + * The number of nodes grows exponentially with the number of generations and children per node. + * Each node is logically a process, and will have 1 or more process events associated with it. + * @param alertAncestors - number of ancestor generations to create relative to the alert + * @param childGenerations - number of child generations to create relative to the alert + * @param maxChildrenPerNode - maximum number of children for any given node in the tree + * @param relatedEventsPerNode - number of related events (file, registry, etc) to create for each process event in the tree + * @param percentNodesWithRelated - percent of nodes which should have related events + * @param percentChildrenTerminated - percent of nodes which will have process termination events + */ + public *fullResolverTreeGenerator( alertAncestors?: number, childGenerations?: number, maxChildrenPerNode?: number, relatedEventsPerNode?: number, percentNodesWithRelated?: number, percentChildrenTerminated?: number - ): Event[] { - const ancestry = this.generateAlertEventAncestry(alertAncestors); + ) { + const ancestry = this.createAlertEventAncestry(alertAncestors); + for (let i = 0; i < ancestry.length; i++) { + yield ancestry[i]; + } // ancestry will always have at least 2 elements, and the second to last element will be the process associated with the alert - const descendants = this.generateDescendantsTree( + yield* this.descendantsTreeGenerator( ancestry[ancestry.length - 2], childGenerations, maxChildrenPerNode, @@ -295,10 +334,13 @@ export class EndpointDocGenerator { percentNodesWithRelated, percentChildrenTerminated ); - return ancestry.concat(descendants); } - public generateAlertEventAncestry(alertAncestors = 3): Event[] { + /** + * Creates an alert event and associated process ancestry. The alert event will always be the last event in the return array. + * @param alertAncestors - number of ancestor generations to create + */ + public createAlertEventAncestry(alertAncestors = 3): Event[] { const events = []; const startDate = new Date().getTime(); const root = this.generateEvent({ timestamp: startDate + 1000 }); @@ -321,75 +363,93 @@ export class EndpointDocGenerator { return events; } - public generateDescendantsTree( + /** + * Creates the child generations of a process. The number of returned events grows exponentially with generations and maxChildrenPerNode. + * @param root - The process event to use as the root node of the tree + * @param generations - number of child generations to create. The root node is not counted as a generation. + * @param maxChildrenPerNode - maximum number of children for any given node in the tree + * @param relatedEventsPerNode - number of related events (file, registry, etc) to create for each process event in the tree + * @param percentNodesWithRelated - percent of nodes which should have related events + * @param percentChildrenTerminated - percent of nodes which will have process termination events + */ + public *descendantsTreeGenerator( root: Event, generations = 2, maxChildrenPerNode = 2, relatedEventsPerNode = 3, percentNodesWithRelated = 100, percentChildrenTerminated = 100 - ): Event[] { - let events: Event[] = []; - let parents = [root]; + ) { + const rootState: NodeState = { + event: root, + childrenCreated: 0, + maxChildren: this.randomN(maxChildrenPerNode + 1), + }; + const lineage: NodeState[] = [rootState]; let timestamp = root['@timestamp']; - for (let i = 0; i < generations; i++) { - const newParents: EndpointEvent[] = []; - parents.forEach(element => { - const numChildren = this.randomN(maxChildrenPerNode + 1); - for (let j = 0; j < numChildren; j++) { - timestamp = timestamp + 1000; - const child = this.generateEvent({ - timestamp, - parentEntityID: element.process.entity_id, - }); - newParents.push(child); - } + while (lineage.length > 0) { + const currentState = lineage[lineage.length - 1]; + // If we get to a state node and it has made all the children, move back up a level + if ( + currentState.childrenCreated === currentState.maxChildren || + lineage.length === generations + 1 + ) { + lineage.pop(); + continue; + } + // Otherwise, add a child and any nodes associated with it + currentState.childrenCreated++; + timestamp = timestamp + 1000; + const child = this.generateEvent({ + timestamp, + parentEntityID: currentState.event.process.entity_id, }); - events = events.concat(newParents); - parents = newParents; - } - const terminationEvents: EndpointEvent[] = []; - let relatedEvents: EndpointEvent[] = []; - events.forEach(element => { + lineage.push({ + event: child, + childrenCreated: 0, + maxChildren: this.randomN(maxChildrenPerNode + 1), + }); + yield child; + let processDuration: number = 6 * 3600; if (this.randomN(100) < percentChildrenTerminated) { - timestamp = timestamp + 1000; - terminationEvents.push( - this.generateEvent({ - timestamp, - entityID: element.process.entity_id, - parentEntityID: element.process.parent?.entity_id, - eventCategory: 'process', - eventType: 'end', - }) - ); + processDuration = this.randomN(1000000); // This lets termination events be up to 1 million seconds after the creation event (~11 days) + yield this.generateEvent({ + timestamp: timestamp + processDuration * 1000, + entityID: child.process.entity_id, + parentEntityID: child.process.parent?.entity_id, + eventCategory: 'process', + eventType: 'end', + }); } if (this.randomN(100) < percentNodesWithRelated) { - relatedEvents = relatedEvents.concat( - this.generateRelatedEvents(element, relatedEventsPerNode) - ); + yield* this.relatedEventsGenerator(child, relatedEventsPerNode, processDuration); } - }); - events = events.concat(terminationEvents); - events = events.concat(relatedEvents); - return events; + } } - public generateRelatedEvents(node: Event, numRelatedEvents = 10): EndpointEvent[] { - const ts = node['@timestamp'] + 1000; - const relatedEvents: EndpointEvent[] = []; + /** + * Creates related events for a process event + * @param node - process event to relate events to by entityID + * @param numRelatedEvents - number of related events to generate + * @param processDuration - maximum number of seconds after process event that related event timestamp can be + */ + public *relatedEventsGenerator( + node: Event, + numRelatedEvents = 10, + processDuration: number = 6 * 3600 + ) { for (let i = 0; i < numRelatedEvents; i++) { const eventInfo = this.randomChoice(OTHER_EVENT_CATEGORIES); - relatedEvents.push( - this.generateEvent({ - timestamp: ts, - entityID: node.process.entity_id, - parentEntityID: node.process.parent?.entity_id, - eventCategory: eventInfo.category, - eventType: eventInfo.creationType, - }) - ); + + const ts = node['@timestamp'] + this.randomN(processDuration) * 1000; + yield this.generateEvent({ + timestamp: ts, + entityID: node.process.entity_id, + parentEntityID: node.process.parent?.entity_id, + eventCategory: eventInfo.category, + eventType: eventInfo.creationType, + }); } - return relatedEvents; } private randomN(n: number): number { diff --git a/x-pack/plugins/endpoint/common/types.ts b/x-pack/plugins/endpoint/common/types.ts index 565f47e7a0d6f..e8e1281a88925 100644 --- a/x-pack/plugins/endpoint/common/types.ts +++ b/x-pack/plugins/endpoint/common/types.ts @@ -113,7 +113,7 @@ export interface HashFields { sha1: string; sha256: string; } -export interface MalwareClassifierFields { +export interface MalwareClassificationFields { identifier: string; score: number; threshold: number; @@ -142,7 +142,7 @@ export interface DllFields { }; compile_time: number; hash: HashFields; - malware_classifier: MalwareClassifierFields; + malware_classification: MalwareClassificationFields; mapped_address: number; mapped_size: number; path: string; @@ -194,7 +194,7 @@ export type AlertEvent = Immutable<{ executable: string; sid?: string; start: number; - malware_classifier?: MalwareClassifierFields; + malware_classification?: MalwareClassificationFields; token: { domain: string; type: string; @@ -224,7 +224,7 @@ export type AlertEvent = Immutable<{ trusted: boolean; subject_name: string; }; - malware_classifier: MalwareClassifierFields; + malware_classification: MalwareClassificationFields; temp_file_path: string; }; host: HostFields; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/view/alerts/details/metadata/general_accordion.tsx b/x-pack/plugins/endpoint/public/applications/endpoint/view/alerts/details/metadata/general_accordion.tsx index 0183e9663bb44..79cb61693056c 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/view/alerts/details/metadata/general_accordion.tsx +++ b/x-pack/plugins/endpoint/public/applications/endpoint/view/alerts/details/metadata/general_accordion.tsx @@ -40,7 +40,7 @@ export const GeneralAccordion = memo(({ alertData }: { alertData: Immutable { } else if (columnId === 'archived') { return null; } else if (columnId === 'malware_score') { - return row.file.malware_classifier.score; + return row.file.malware_classification.score; } return null; }; diff --git a/x-pack/plugins/endpoint/public/embeddables/resolver/store/actions.ts b/x-pack/plugins/endpoint/public/embeddables/resolver/store/actions.ts index 0860c9c62aca4..a26f43e1f8cc0 100644 --- a/x-pack/plugins/endpoint/public/embeddables/resolver/store/actions.ts +++ b/x-pack/plugins/endpoint/public/embeddables/resolver/store/actions.ts @@ -45,7 +45,11 @@ interface AppRequestedResolverData { } /** - * When the user switches the active descendent of the Resolver. + * When the user switches the "active descendant" of the Resolver. + * The "active descendant" (from the point of view of the parent element) + * corresponds to the "current" child element. "active" or "current" here meaning + * the element that is focused on by the user's interactions with the UI, but + * not necessarily "selected" (see UserSelectedResolverNode below) */ interface UserFocusedOnResolverNode { readonly type: 'userFocusedOnResolverNode'; @@ -57,10 +61,27 @@ interface UserFocusedOnResolverNode { }; } +/** + * When the user "selects" a node in the Resolver + * "Selected" refers to the state of being the element that the + * user most recently "picked" (by e.g. pressing a button corresponding + * to the element in a list) as opposed to "active" or "current" (see UserFocusedOnResolverNode above). + */ +interface UserSelectedResolverNode { + readonly type: 'userSelectedResolverNode'; + readonly payload: { + /** + * Used to identify the process node that the user selected + */ + readonly nodeId: string; + }; +} + export type ResolverAction = | CameraAction | DataAction | UserBroughtProcessIntoView | UserChangedSelectedEvent | AppRequestedResolverData - | UserFocusedOnResolverNode; + | UserFocusedOnResolverNode + | UserSelectedResolverNode; diff --git a/x-pack/plugins/endpoint/public/embeddables/resolver/store/reducer.ts b/x-pack/plugins/endpoint/public/embeddables/resolver/store/reducer.ts index 1c66a998a4c22..82206d77f8349 100644 --- a/x-pack/plugins/endpoint/public/embeddables/resolver/store/reducer.ts +++ b/x-pack/plugins/endpoint/public/embeddables/resolver/store/reducer.ts @@ -4,18 +4,44 @@ * you may not use this file except in compliance with the Elastic License. */ import { Reducer, combineReducers } from 'redux'; +import { htmlIdGenerator } from '@elastic/eui'; import { animateProcessIntoView } from './methods'; import { cameraReducer } from './camera/reducer'; import { dataReducer } from './data/reducer'; import { ResolverState, ResolverAction, ResolverUIState } from '../types'; +import { uniquePidForProcess } from '../models/process_event'; + +/** + * Despite the name "generator", this function is entirely determinant + * (i.e. it will return the same html id given the same prefix 'resolverNode' + * and nodeId) + */ +const resolverNodeIdGenerator = htmlIdGenerator('resolverNode'); const uiReducer: Reducer = ( - uiState = { activeDescendentId: null }, + uiState = { activeDescendantId: null, selectedDescendantId: null }, action ) => { if (action.type === 'userFocusedOnResolverNode') { return { - activeDescendentId: action.payload.nodeId, + ...uiState, + activeDescendantId: action.payload.nodeId, + }; + } else if (action.type === 'userSelectedResolverNode') { + return { + ...uiState, + selectedDescendantId: action.payload.nodeId, + }; + } else if (action.type === 'userBroughtProcessIntoView') { + /** + * This action has a process payload (instead of a processId), so we use + * `uniquePidForProcess` and `resolverNodeIdGenerator` to resolve the determinant + * html id of the node being brought into view. + */ + const processNodeId = resolverNodeIdGenerator(uniquePidForProcess(action.payload.process)); + return { + ...uiState, + activeDescendantId: processNodeId, }; } else { return uiState; diff --git a/x-pack/plugins/endpoint/public/embeddables/resolver/store/selectors.ts b/x-pack/plugins/endpoint/public/embeddables/resolver/store/selectors.ts index 37482916496e7..e8ae3d08e5cb6 100644 --- a/x-pack/plugins/endpoint/public/embeddables/resolver/store/selectors.ts +++ b/x-pack/plugins/endpoint/public/embeddables/resolver/store/selectors.ts @@ -6,6 +6,7 @@ import * as cameraSelectors from './camera/selectors'; import * as dataSelectors from './data/selectors'; +import * as uiSelectors from './ui/selectors'; import { ResolverState } from '../types'; /** @@ -59,6 +60,22 @@ export const processAdjacencies = composeSelectors( dataSelectors.processAdjacencies ); +/** + * Returns the id of the "current" tree node (fake-focused) + */ +export const uiActiveDescendantId = composeSelectors( + uiStateSelector, + uiSelectors.activeDescendantId +); + +/** + * Returns the id of the "selected" tree node (the node that is currently "pressed" and possibly controlling other popups / components) + */ +export const uiSelectedDescendantId = composeSelectors( + uiStateSelector, + uiSelectors.selectedDescendantId +); + /** * Returns the camera state from within ResolverState */ @@ -73,6 +90,13 @@ function dataStateSelector(state: ResolverState) { return state.data; } +/** + * Returns the ui state from within ResolverState + */ +function uiStateSelector(state: ResolverState) { + return state.ui; +} + /** * Whether or not the resolver is pending fetching data */ diff --git a/x-pack/plugins/endpoint/public/embeddables/resolver/store/ui/selectors.ts b/x-pack/plugins/endpoint/public/embeddables/resolver/store/ui/selectors.ts new file mode 100644 index 0000000000000..196e834c406b3 --- /dev/null +++ b/x-pack/plugins/endpoint/public/embeddables/resolver/store/ui/selectors.ts @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { createSelector } from 'reselect'; +import { ResolverUIState } from '../../types'; + +/** + * id of the "current" tree node (fake-focused) + */ +export const activeDescendantId = createSelector( + (uiState: ResolverUIState) => uiState, + /* eslint-disable no-shadow */ + ({ activeDescendantId }) => { + return activeDescendantId; + } +); + +/** + * id of the currently "selected" tree node + */ +export const selectedDescendantId = createSelector( + (uiState: ResolverUIState) => uiState, + /* eslint-disable no-shadow */ + ({ selectedDescendantId }) => { + return selectedDescendantId; + } +); diff --git a/x-pack/plugins/endpoint/public/embeddables/resolver/types.ts b/x-pack/plugins/endpoint/public/embeddables/resolver/types.ts index 674553aba0937..d370bda0d1842 100644 --- a/x-pack/plugins/endpoint/public/embeddables/resolver/types.ts +++ b/x-pack/plugins/endpoint/public/embeddables/resolver/types.ts @@ -37,7 +37,11 @@ export interface ResolverUIState { /** * The ID attribute of the resolver's aria-activedescendent. */ - readonly activeDescendentId: string | null; + readonly activeDescendantId: string | null; + /** + * The ID attribute of the resolver's currently selected descendant. + */ + readonly selectedDescendantId: string | null; } /** diff --git a/x-pack/plugins/endpoint/public/embeddables/resolver/view/defs.tsx b/x-pack/plugins/endpoint/public/embeddables/resolver/view/defs.tsx index 911cda1be6517..8ee9bfafc630e 100644 --- a/x-pack/plugins/endpoint/public/embeddables/resolver/view/defs.tsx +++ b/x-pack/plugins/endpoint/public/embeddables/resolver/view/defs.tsx @@ -193,6 +193,7 @@ export const SymbolIds = { runningTriggerCube: idGenerator('runningTriggerCube'), terminatedProcessCube: idGenerator('terminatedCube'), terminatedTriggerCube: idGenerator('terminatedTriggerCube'), + processCubeActiveBacking: idGenerator('activeBacking'), }; /** @@ -393,6 +394,15 @@ const SymbolsAndShapes = memo(() => ( /> + + resolver active backing + + )); diff --git a/x-pack/plugins/endpoint/public/embeddables/resolver/view/index.tsx b/x-pack/plugins/endpoint/public/embeddables/resolver/view/index.tsx index 58ce9b963de5d..36155ece57a9c 100644 --- a/x-pack/plugins/endpoint/public/embeddables/resolver/view/index.tsx +++ b/x-pack/plugins/endpoint/public/embeddables/resolver/view/index.tsx @@ -59,6 +59,7 @@ export const Resolver = styled( const { projectionMatrix, ref, onMouseDown } = useCamera(); const isLoading = useSelector(selectors.isLoading); + const activeDescendantId = useSelector(selectors.uiActiveDescendantId); useLayoutEffect(() => { dispatch({ @@ -66,6 +67,7 @@ export const Resolver = styled( payload: { selectedEvent }, }); }, [dispatch, selectedEvent]); + return (
{isLoading ? ( @@ -79,6 +81,7 @@ export const Resolver = styled( ref={ref} role="tree" tabIndex={0} + aria-activedescendant={activeDescendantId || undefined} > {edgeLineSegments.map(([startPosition, endPosition], index) => ( ({ left: `${left}px`, @@ -143,6 +148,9 @@ export const ProcessEventDot = styled( const labelId = useMemo(() => resolverNodeIdGenerator(), [resolverNodeIdGenerator]); const descriptionId = useMemo(() => resolverNodeIdGenerator(), [resolverNodeIdGenerator]); + const isActiveDescendant = nodeId === activeDescendantId; + const isSelectedDescendant = nodeId === selectedDescendantId; + const dispatch = useResolverDispatch(); const handleFocus = useCallback( @@ -153,16 +161,24 @@ export const ProcessEventDot = styled( nodeId, }, }); - focusEvent.currentTarget.setAttribute('aria-current', 'true'); }, [dispatch, nodeId] ); - const handleClick = useCallback(() => { - if (animationTarget.current !== null) { - animationTarget.current.beginElement(); - } - }, [animationTarget]); + const handleClick = useCallback( + (clickEvent: React.MouseEvent) => { + if (animationTarget.current !== null) { + (animationTarget.current as any).beginElement(); + } + dispatch({ + type: 'userSelectedResolverNode', + payload: { + nodeId, + }, + }); + }, + [animationTarget, dispatch, nodeId] + ); return ( @@ -179,6 +195,8 @@ export const ProcessEventDot = styled( aria-labelledby={labelId} aria-describedby={descriptionId} aria-haspopup={'true'} + aria-current={isActiveDescendant ? 'true' : undefined} + aria-selected={isSelectedDescendant ? 'true' : undefined} style={nodeViewportStyle} id={nodeId} onClick={handleClick} @@ -186,6 +204,15 @@ export const ProcessEventDot = styled( tabIndex={-1} > + + = { diff --git a/x-pack/plugins/endpoint/scripts/mapping.json b/x-pack/plugins/endpoint/scripts/mapping.json index 34c039d643517..5878e01b52a47 100644 --- a/x-pack/plugins/endpoint/scripts/mapping.json +++ b/x-pack/plugins/endpoint/scripts/mapping.json @@ -90,7 +90,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -452,7 +452,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -849,7 +849,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -1494,7 +1494,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -1687,7 +1687,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { diff --git a/x-pack/plugins/endpoint/scripts/resolver_generator.ts b/x-pack/plugins/endpoint/scripts/resolver_generator.ts index 3d11ccaad005d..aebf92eff6cb8 100644 --- a/x-pack/plugins/endpoint/scripts/resolver_generator.ts +++ b/x-pack/plugins/endpoint/scripts/resolver_generator.ts @@ -4,9 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ import * as yargs from 'yargs'; +import seedrandom from 'seedrandom'; import { Client, ClientOptions } from '@elastic/elasticsearch'; import { ResponseError } from '@elastic/elasticsearch/lib/errors'; -import { EndpointDocGenerator } from '../common/generate_data'; +import { EndpointDocGenerator, Event } from '../common/generate_data'; import { default as mapping } from './mapping.json'; main(); @@ -137,14 +138,24 @@ async function main() { // eslint-disable-next-line no-console console.log('No seed supplied, using random seed: ' + seed); } - const generator = new EndpointDocGenerator(seed); + const random = seedrandom(seed); for (let i = 0; i < argv.numHosts; i++) { - await client.index({ - index: argv.metadataIndex, - body: generator.generateHostMetadata(), - }); + const generator = new EndpointDocGenerator(random); + const timeBetweenDocs = 6 * 3600 * 1000; // 6 hours between metadata documents + const numMetadataDocs = 5; + const timestamp = new Date().getTime(); + for (let j = 0; j < numMetadataDocs; j++) { + generator.updateHostData(); + await client.index({ + index: argv.metadataIndex, + body: generator.generateHostMetadata( + timestamp - timeBetweenDocs * (numMetadataDocs - j - 1) + ), + }); + } + for (let j = 0; j < argv.alertsPerHost; j++) { - const resolverDocs = generator.generateFullResolverTree( + const resolverDocGenerator = generator.fullResolverTreeGenerator( argv.ancestors, argv.generations, argv.children, @@ -152,15 +163,23 @@ async function main() { argv.percentWithRelated, argv.percentTerminated ); - const body = resolverDocs.reduce( - (array: Array>, doc) => ( - array.push({ index: { _index: argv.eventIndex } }, doc), array - ), - [] - ); - - await client.bulk({ body }); + let result = resolverDocGenerator.next(); + while (!result.done) { + let k = 0; + const resolverDocs: Event[] = []; + while (k < 1000 && !result.done) { + resolverDocs.push(result.value); + result = resolverDocGenerator.next(); + k++; + } + const body = resolverDocs.reduce( + (array: Array>, doc) => ( + array.push({ index: { _index: argv.eventIndex } }, doc), array + ), + [] + ); + await client.bulk({ body }); + } } - generator.randomizeHostData(); } } diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/index.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/index.ts index 663017e2e47af..cc4c17c5c63a3 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/index.ts +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/index.ts @@ -63,6 +63,10 @@ export * from './max_shingle_size_parameter'; export * from './relations_parameter'; +export * from './other_type_name_parameter'; + +export * from './other_type_json_parameter'; + export const PARAMETER_SERIALIZERS = [relationsSerializer, dynamicSerializer]; export const PARAMETER_DESERIALIZERS = [relationsDeserializer, dynamicDeserializer]; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/other_type_json_parameter.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/other_type_json_parameter.tsx new file mode 100644 index 0000000000000..64e50f711a249 --- /dev/null +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/other_type_json_parameter.tsx @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React from 'react'; +import { i18n } from '@kbn/i18n'; + +import { + UseField, + JsonEditorField, + ValidationFuncArg, + fieldValidators, + FieldConfig, +} from '../../../shared_imports'; + +const { isJsonField } = fieldValidators; + +/** + * This is a special component that does not have an explicit entry in {@link PARAMETERS_DEFINITION}. + * + * We use it to store custom defined parameters in a field called "otherTypeJson". + */ + +const fieldConfig: FieldConfig = { + label: i18n.translate('xpack.idxMgmt.mappingsEditor.otherTypeJsonFieldLabel', { + defaultMessage: 'Type Parameters JSON', + }), + defaultValue: {}, + validations: [ + { + validator: isJsonField( + i18n.translate( + 'xpack.idxMgmt.mappingsEditor.parameters.validations.otherTypeJsonInvalidJSONErrorMessage', + { + defaultMessage: 'Invalid JSON.', + } + ) + ), + }, + { + validator: ({ value }: ValidationFuncArg) => { + const json = JSON.parse(value); + if (Array.isArray(json)) { + return { + message: i18n.translate( + 'xpack.idxMgmt.mappingsEditor.parameters.validations.otherTypeJsonArrayNotAllowedErrorMessage', + { + defaultMessage: 'Arrays are not allowed.', + } + ), + }; + } + }, + }, + { + validator: ({ value }: ValidationFuncArg) => { + const json = JSON.parse(value); + if (json.type) { + return { + code: 'ERR_CUSTOM_TYPE_OVERRIDDEN', + message: i18n.translate( + 'xpack.idxMgmt.mappingsEditor.parameters.validations.otherTypeJsonTypeFieldErrorMessage', + { + defaultMessage: 'Cannot override the "type" field.', + } + ), + }; + } + }, + }, + ], + deserializer: (value: any) => { + if (value === '') { + return value; + } + return JSON.stringify(value, null, 2); + }, + serializer: (value: string) => { + try { + return JSON.parse(value); + } catch (error) { + // swallow error and return non-parsed value; + return value; + } + }, +}; + +export const OtherTypeJsonParameter = () => ( + +); diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/other_type_name_parameter.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/other_type_name_parameter.tsx new file mode 100644 index 0000000000000..6004e484323a1 --- /dev/null +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/other_type_name_parameter.tsx @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React from 'react'; + +import { i18n } from '@kbn/i18n'; +import { UseField, TextField, FieldConfig } from '../../../shared_imports'; +import { fieldValidators } from '../../../shared_imports'; + +const { emptyField } = fieldValidators; + +/** + * This is a special component that does not have an explicit entry in {@link PARAMETERS_DEFINITION}. + * + * We use it to store the name of types unknown to the mappings editor in the "subType" path. + */ + +const fieldConfig: FieldConfig = { + label: i18n.translate('xpack.idxMgmt.mappingsEditor.otherTypeNameFieldLabel', { + defaultMessage: 'Type Name', + }), + defaultValue: '', + validations: [ + { + validator: emptyField( + i18n.translate( + 'xpack.idxMgmt.mappingsEditor.parameters.validations.otherTypeNameIsRequiredErrorMessage', + { + defaultMessage: 'The type name is required.', + } + ) + ), + }, + ], +}; + +export const OtherTypeNameParameter = () => ( + +); diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx index 60b025ce644ef..b41f35b983885 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx @@ -5,6 +5,7 @@ */ import React, { useEffect, useCallback } from 'react'; import classNames from 'classnames'; +import * as _ from 'lodash'; import { i18n } from '@kbn/i18n'; @@ -31,7 +32,7 @@ import { filterTypesForNonRootFields, } from '../../../../lib'; import { Field, MainType, SubType, NormalizedFields, ComboBoxOption } from '../../../../types'; -import { NameParameter, TypeParameter } from '../../field_parameters'; +import { NameParameter, TypeParameter, OtherTypeNameParameter } from '../../field_parameters'; import { getParametersFormForType } from './required_parameters_forms'; const formWrapper = (props: any) =>
; @@ -155,9 +156,9 @@ export const CreateField = React.memo(function CreateFieldComponent({ }, [form, getSubTypeMeta] ); - const renderFormFields = useCallback( ({ type }) => { + const isOtherType = type === 'other'; const { subTypeOptions, subTypeLabel } = getSubTypeMeta(type); const docLink = documentationService.getTypeDocLink(type) as string; @@ -178,7 +179,13 @@ export const CreateField = React.memo(function CreateFieldComponent({ docLink={docLink} /> - {/* Field sub type (if any) */} + {/* Other type */} + {isOtherType && ( + + + + )} + {/* Field sub type (if any) - will never be the case if we have an "other" type */} {subTypeOptions && ( {/* Documentation link */} - - - {i18n.translate( - 'xpack.idxMgmt.mappingsEditor.editField.typeDocumentation', - { - defaultMessage: '{type} documentation', - values: { - type: subTypeDefinition - ? subTypeDefinition.label - : typeDefinition.label, - }, - } - )} - - + {linkDocumentation && ( + + + {i18n.translate( + 'xpack.idxMgmt.mappingsEditor.editField.typeDocumentation', + { + defaultMessage: '{type} documentation', + values: { + type: subTypeDefinition + ? subTypeDefinition.label + : typeDefinition.label, + }, + } + )} + + + )} {/* Field path */} diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_header_form.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_header_form.tsx index ddb808094428d..75a083d64b6db 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_header_form.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_header_form.tsx @@ -17,7 +17,7 @@ import { } from '../../../../lib'; import { TYPE_DEFINITION } from '../../../../constants'; -import { NameParameter, TypeParameter } from '../../field_parameters'; +import { NameParameter, TypeParameter, OtherTypeNameParameter } from '../../field_parameters'; import { FieldDescriptionSection } from './field_description_section'; interface Props { @@ -80,9 +80,17 @@ export const EditFieldHeaderForm = React.memo( /> - {/* Field sub type (if any) */} + {/* Other type */} + {type === 'other' && ( + + + + )} + + {/* Field sub type (if any) - will never be the case if we have an "other" type */} {hasSubType && ( + {' '} } = { shape: ShapeType, dense_vector: DenseVectorType, object: ObjectType, + other: OtherType, nested: NestedType, join: JoinType, }; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/field_types/other_type.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/field_types/other_type.tsx new file mode 100644 index 0000000000000..c403bbfb79056 --- /dev/null +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/field_types/other_type.tsx @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import React from 'react'; + +import { OtherTypeJsonParameter } from '../../field_parameters'; +import { BasicParametersSection } from '../edit_field'; + +export const OtherType = () => { + return ( + + + + ); +}; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item.tsx index 4c1c8bc1da114..f274159bd6c30 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item.tsx @@ -16,11 +16,13 @@ import { import { i18n } from '@kbn/i18n'; import { NormalizedField, NormalizedFields } from '../../../types'; +import { getTypeLabelFromType } from '../../../lib'; import { TYPE_DEFINITION, CHILD_FIELD_INDENT_SIZE, LEFT_PADDING_SIZE_FIELD_ITEM_WRAPPER, } from '../../../constants'; + import { FieldsList } from './fields_list'; import { CreateField } from './create_field'; import { DeleteFieldProvider } from './delete_field_provider'; @@ -265,7 +267,7 @@ function FieldListItemComponent( dataType: TYPE_DEFINITION[source.type].label, }, }) - : TYPE_DEFINITION[source.type].label} + : getTypeLabelFromType(source.type)} diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx index dbb8a788514bc..614b7cb56bef6 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx @@ -11,6 +11,7 @@ import { i18n } from '@kbn/i18n'; import { SearchResult } from '../../../types'; import { TYPE_DEFINITION } from '../../../constants'; import { useDispatch } from '../../../mappings_state'; +import { getTypeLabelFromType } from '../../../lib'; import { DeleteFieldProvider } from '../fields/delete_field_provider'; interface Props { @@ -115,7 +116,7 @@ export const SearchResultItem = React.memo(function FieldListItemFlatComponent({ dataType: TYPE_DEFINITION[source.type].label, }, }) - : TYPE_DEFINITION[source.type].label} + : getTypeLabelFromType(source.type)} diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/data_types_definition.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/data_types_definition.tsx index f904281181c48..4206fe8b696da 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/data_types_definition.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/data_types_definition.tsx @@ -784,6 +784,20 @@ export const TYPE_DEFINITION: { [key in DataType]: DataTypeDefinition } = {

), }, + other: { + label: i18n.translate('xpack.idxMgmt.mappingsEditor.dataType.otherDescription', { + defaultMessage: 'Other', + }), + value: 'other', + description: () => ( +

+ +

+ ), + }, }; export const MAIN_TYPES: MainType[] = [ @@ -811,6 +825,7 @@ export const MAIN_TYPES: MainType[] = [ 'shape', 'text', 'token_count', + 'other', ]; export const MAIN_DATA_TYPE_DEFINITION: { diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/parameters_definition.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/parameters_definition.tsx index 732449f382f93..1b9372e4b50c4 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/parameters_definition.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/parameters_definition.tsx @@ -504,7 +504,7 @@ export const PARAMETERS_DEFINITION: { [key in ParameterName]: ParameterDefinitio fieldConfig: { defaultValue: '', type: FIELD_TYPES.NUMBER, - deserializer: (value: string | number) => +value, + deserializer: (value: string | number) => (value === '' ? value : +value), formatters: [toInt], label: i18n.translate('xpack.idxMgmt.mappingsEditor.parameters.scalingFactorLabel', { defaultMessage: 'Scaling factor', diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/search_fields.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/search_fields.tsx index 5a277073c5f1a..618d106b0e7a1 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/search_fields.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/search_fields.tsx @@ -185,8 +185,6 @@ const getSearchMetadata = (searchData: SearchData, fieldData: FieldData): Search const score = calculateScore(metadata); const display = getJSXdisplayFromMeta(searchData, fieldData, metadata); - // console.log(fieldData.path, score, metadata); - return { ...metadata, display, diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/serializers.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/serializers.ts index 131d886ff05d9..6b817c829251f 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/serializers.ts +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/serializers.ts @@ -45,16 +45,19 @@ const runParametersDeserializers = (field: Field): Field => ); export const fieldSerializer: SerializerFunc = (field: Field) => { + const { otherTypeJson, ...rest } = field; + const updatedField: Field = Boolean(otherTypeJson) ? { ...otherTypeJson, ...rest } : { ...rest }; + // If a subType is present, use it as type for ES - if ({}.hasOwnProperty.call(field, 'subType')) { - field.type = field.subType as DataType; - delete field.subType; + if ({}.hasOwnProperty.call(updatedField, 'subType')) { + updatedField.type = updatedField.subType as DataType; + delete updatedField.subType; } // Delete temp fields - delete (field as any).useSameAnalyzerForSearch; + delete (updatedField as any).useSameAnalyzerForSearch; - return sanitizeField(runParametersSerializers(field)); + return sanitizeField(runParametersSerializers(updatedField)); }; export const fieldDeserializer: SerializerFunc = (field: Field): Field => { @@ -70,8 +73,18 @@ export const fieldDeserializer: SerializerFunc = (field: Field): Field => field.type = type; } - (field as any).useSameAnalyzerForSearch = - {}.hasOwnProperty.call(field, 'search_analyzer') === false; + if (field.type === 'other') { + const { type, subType, name, ...otherTypeJson } = field; + /** + * For "other" type (type we don't support through a form) + * we grab all the parameters and put them in the "otherTypeJson" object + * that we will render in a JSON editor. + */ + field.otherTypeJson = otherTypeJson; + } else { + (field as any).useSameAnalyzerForSearch = + {}.hasOwnProperty.call(field, 'search_analyzer') === false; + } return runParametersDeserializers(field); }; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/utils.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/utils.ts index 337554ab5fa5a..cece26618ced8 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/utils.ts +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/utils.ts @@ -25,6 +25,7 @@ import { PARAMETERS_DEFINITION, TYPE_NOT_ALLOWED_MULTIFIELD, TYPE_ONLY_ALLOWED_AT_ROOT_LEVEL, + TYPE_DEFINITION, } from '../constants'; import { State } from '../reducer'; @@ -71,6 +72,9 @@ export const getFieldMeta = (field: Field, isMultiField?: boolean): FieldMeta => }; }; +export const getTypeLabelFromType = (type: DataType) => + TYPE_DEFINITION[type] ? TYPE_DEFINITION[type].label : `${TYPE_DEFINITION.other.label}: ${type}`; + export const getFieldConfig = (param: ParameterName, prop?: string): FieldConfig => { if (prop !== undefined) { if ( @@ -122,7 +126,7 @@ const replaceAliasPathByAliasId = ( }; export const getMainTypeFromSubType = (subType: SubType): MainType => - SUB_TYPE_MAP_TO_MAIN[subType] as MainType; + (SUB_TYPE_MAP_TO_MAIN[subType] ?? 'other') as MainType; /** * In order to better work with the recursive pattern of the mappings `properties`, this method flatten the fields @@ -287,7 +291,9 @@ export const deNormalize = ({ rootLevelFields, byId, aliases }: NormalizedFields const { source, childFields, childFieldsName } = serializedFieldsById[id]; const { name, ...normalizedField } = source; const field: Omit = normalizedField; + to[name] = field; + if (childFields) { field[childFieldsName!] = {}; return deNormalizePaths(childFields, field[childFieldsName!]); diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/types.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/types.ts index dbbffe5a0bd31..5b18af68ed55b 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/types.ts +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/types.ts @@ -56,7 +56,12 @@ export type MainType = | 'date_nanos' | 'geo_point' | 'geo_shape' - | 'token_count'; + | 'token_count' + /** + * 'other' is a special type that only exists inside of MappingsEditor as a placeholder + * for undocumented field types. + */ + | 'other'; export type SubType = NumericType | RangeType; @@ -156,6 +161,10 @@ interface FieldBasic { subType?: SubType; properties?: { [key: string]: Omit }; fields?: { [key: string]: Omit }; + + // other* exist together as a holder of types that the mappings editor does not yet know about but + // enables the user to create mappings with them. + otherTypeJson?: GenericObject; } type FieldParams = { diff --git a/x-pack/plugins/infra/public/components/alerting/metrics/expression.tsx b/x-pack/plugins/infra/public/components/alerting/metrics/expression.tsx index 0909a3c2ed569..cd3ba43c3607c 100644 --- a/x-pack/plugins/infra/public/components/alerting/metrics/expression.tsx +++ b/x-pack/plugins/infra/public/components/alerting/metrics/expression.tsx @@ -89,7 +89,7 @@ export const Expressions: React.FC = props => { const defaultExpression = useMemo( () => ({ - aggType: AGGREGATION_TYPES.MAX, + aggType: AGGREGATION_TYPES.AVERAGE, comparator: '>', threshold: [], timeSize: 1, diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/components/package_icon.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/components/package_icon.tsx index 8ba597a0d377e..de0dd75f635cf 100644 --- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/components/package_icon.tsx +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/components/package_icon.tsx @@ -3,78 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import React, { useEffect, useMemo, useState } from 'react'; -import { ICON_TYPES, EuiIcon, EuiIconProps } from '@elastic/eui'; -import { PackageInfo, PackageListItem } from '../../../../common/types/models'; -import { useLinks } from '../sections/epm/hooks'; -import { epmRouteService } from '../../../../common/services'; -import { sendRequest } from '../hooks/use_request'; -import { GetInfoResponse } from '../types'; -type Package = PackageInfo | PackageListItem; +import React from 'react'; +import { EuiIcon, EuiIconProps } from '@elastic/eui'; +import { usePackageIconType, UsePackageIconType } from '../hooks'; -const CACHED_ICONS = new Map(); - -export const PackageIcon: React.FunctionComponent<{ - packageName: string; - version?: string; - icons?: Package['icons']; -} & Omit> = ({ packageName, version, icons, ...euiIconProps }) => { - const iconType = usePackageIcon(packageName, version, icons); +export const PackageIcon: React.FunctionComponent> = ({ packageName, version, icons, tryApi, ...euiIconProps }) => { + const iconType = usePackageIconType({ packageName, version, icons, tryApi }); return ; }; - -const usePackageIcon = (packageName: string, version?: string, icons?: Package['icons']) => { - const { toImage } = useLinks(); - const [iconType, setIconType] = useState(''); // FIXME: use `empty` icon during initialization - see: https://github.com/elastic/kibana/issues/60622 - const pkgKey = `${packageName}-${version ?? ''}`; - - // Generates an icon path or Eui Icon name based on an icon list from the package - // or by using the package name against logo icons from Eui - const fromInput = useMemo(() => { - return (iconList?: Package['icons']) => { - const svgIcons = iconList?.filter(iconDef => iconDef.type === 'image/svg+xml'); - const localIconSrc = Array.isArray(svgIcons) && svgIcons[0]?.src; - if (localIconSrc) { - CACHED_ICONS.set(pkgKey, toImage(localIconSrc)); - setIconType(CACHED_ICONS.get(pkgKey) as string); - return; - } - - const euiLogoIcon = ICON_TYPES.find(key => key.toLowerCase() === `logo${packageName}`); - if (euiLogoIcon) { - CACHED_ICONS.set(pkgKey, euiLogoIcon); - setIconType(euiLogoIcon); - return; - } - - CACHED_ICONS.set(pkgKey, 'package'); - setIconType('package'); - }; - }, [packageName, pkgKey, toImage]); - - useEffect(() => { - if (CACHED_ICONS.has(pkgKey)) { - setIconType(CACHED_ICONS.get(pkgKey) as string); - return; - } - - // Use API to see if package has icons defined - if (!icons && version) { - fromPackageInfo(pkgKey) - .catch(() => undefined) // ignore API errors - .then(fromInput); - } else { - fromInput(icons); - } - }, [icons, toImage, packageName, version, fromInput, pkgKey]); - - return iconType; -}; - -const fromPackageInfo = async (pkgKey: string) => { - const { data } = await sendRequest({ - path: epmRouteService.getInfoPath(pkgKey), - method: 'get', - }); - return data?.response?.icons; -}; diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/index.ts b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/index.ts index 5e0695bd3e305..66c7333150fb7 100644 --- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/index.ts +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/index.ts @@ -9,6 +9,7 @@ export { useCore, CoreContext } from './use_core'; export { useConfig, ConfigContext } from './use_config'; export { useSetupDeps, useStartDeps, DepsContext } from './use_deps'; export { useLink } from './use_link'; +export { usePackageIconType, UsePackageIconType } from './use_package_icon_type'; export { usePagination, Pagination } from './use_pagination'; export { useDebounce } from './use_debounce'; export * from './use_request'; diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/use_package_icon_type.ts b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/use_package_icon_type.ts new file mode 100644 index 0000000000000..5f231b5cc9ec9 --- /dev/null +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/use_package_icon_type.ts @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { useEffect, useState } from 'react'; +import { ICON_TYPES } from '@elastic/eui'; +import { PackageInfo, PackageListItem } from '../../../../common/types/models'; +import { useLinks } from '../sections/epm/hooks'; +import { sendGetPackageInfoByKey } from './index'; + +type Package = PackageInfo | PackageListItem; + +export interface UsePackageIconType { + packageName: Package['name']; + version: Package['version']; + icons?: Package['icons']; + tryApi?: boolean; // should it call API to try to find missing icons? +} + +const CACHED_ICONS = new Map(); + +export const usePackageIconType = ({ + packageName, + version, + icons: paramIcons, + tryApi = false, +}: UsePackageIconType) => { + const { toImage } = useLinks(); + const [iconList, setIconList] = useState(); + const [iconType, setIconType] = useState(''); // FIXME: use `empty` icon during initialization - see: https://github.com/elastic/kibana/issues/60622 + const pkgKey = `${packageName}-${version}`; + + // Generates an icon path or Eui Icon name based on an icon list from the package + // or by using the package name against logo icons from Eui + useEffect(() => { + if (CACHED_ICONS.has(pkgKey)) { + setIconType(CACHED_ICONS.get(pkgKey) || ''); + return; + } + const svgIcons = (paramIcons || iconList)?.filter(iconDef => iconDef.type === 'image/svg+xml'); + const localIconSrc = Array.isArray(svgIcons) && svgIcons[0]?.src; + if (localIconSrc) { + CACHED_ICONS.set(pkgKey, toImage(localIconSrc)); + setIconType(CACHED_ICONS.get(pkgKey) || ''); + return; + } + + const euiLogoIcon = ICON_TYPES.find(key => key.toLowerCase() === `logo${packageName}`); + if (euiLogoIcon) { + CACHED_ICONS.set(pkgKey, euiLogoIcon); + setIconType(euiLogoIcon); + return; + } + + if (tryApi && !paramIcons && !iconList) { + sendGetPackageInfoByKey(pkgKey) + .catch(error => undefined) // Ignore API errors + .then(res => { + CACHED_ICONS.delete(pkgKey); + setIconList(res?.data?.response?.icons); + }); + } + + CACHED_ICONS.set(pkgKey, 'package'); + setIconType('package'); + }, [paramIcons, pkgKey, toImage, iconList, packageName, iconType, tryApi]); + + return iconType; +}; diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_datasource_page/step_select_package.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_datasource_page/step_select_package.tsx index 0b48020c3cac1..cc7fc89ab8a80 100644 --- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_datasource_page/step_select_package.tsx +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_datasource_page/step_select_package.tsx @@ -130,7 +130,15 @@ export const StepSelectPackage: React.FunctionComponent<{ return { label: title || name, key: pkgkey, - prepend: , + prepend: ( + + ), checked: selectedPkgKey === pkgkey ? 'on' : undefined, }; })} diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/details_page/components/datasources/datasources_table.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/details_page/components/datasources/datasources_table.tsx index 49285707457e1..87155afdc21be 100644 --- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/details_page/components/datasources/datasources_table.tsx +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/details_page/components/datasources/datasources_table.tsx @@ -150,6 +150,7 @@ export const DatasourcesTable: React.FunctionComponent = ({ packageName={datasource.package.name} version={datasource.package.version} size="m" + tryApi={true} /> )} diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/components/icon_panel.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/components/icon_panel.tsx index 7ce386ed56f5f..684b158b5da86 100644 --- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/components/icon_panel.tsx +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/components/icon_panel.tsx @@ -16,7 +16,8 @@ export function IconPanel({ iconType }: { iconType: IconType }) { text-align: center; vertical-align: middle; padding: ${props => props.theme.eui.spacerSizes.xl}; - svg { + svg, + img { height: ${props => props.theme.eui.euiKeyPadMenuSize}; width: ${props => props.theme.eui.euiKeyPadMenuSize}; } diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/screens/detail/index.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/screens/detail/index.tsx index 4bc90c6a0f8fd..3239d7b90e3c3 100644 --- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/screens/detail/index.tsx +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/screens/detail/index.tsx @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { EuiPage, EuiPageBody, EuiPageProps, ICON_TYPES } from '@elastic/eui'; +import { EuiPage, EuiPageBody, EuiPageProps } from '@elastic/eui'; import React, { Fragment, useEffect, useState } from 'react'; import { useParams } from 'react-router-dom'; import styled from 'styled-components'; @@ -12,7 +12,7 @@ import { PackageInfo } from '../../../../types'; import { useSetPackageInstallStatus } from '../../hooks'; import { Content } from './content'; import { Header } from './header'; -import { sendGetPackageInfoByKey } from '../../../../hooks'; +import { sendGetPackageInfoByKey, usePackageIconType } from '../../../../hooks'; export const DEFAULT_PANEL: DetailViewPanelName = 'overview'; @@ -62,8 +62,8 @@ const FullWidthContent = styled(EuiPage)` type LayoutProps = PackageInfo & Pick & Pick; export function DetailLayout(props: LayoutProps) { - const { name, restrictWidth } = props; - const iconType = ICON_TYPES.find(key => key.toLowerCase() === `logo${name}`); + const { name: packageName, version, icons, restrictWidth } = props; + const iconType = usePackageIconType({ packageName, version, icons }); return ( diff --git a/x-pack/plugins/ingest_manager/server/services/agents/crud.ts b/x-pack/plugins/ingest_manager/server/services/agents/crud.ts index 41bd2476c99a1..ec270884e62b4 100644 --- a/x-pack/plugins/ingest_manager/server/services/agents/crud.ts +++ b/x-pack/plugins/ingest_manager/server/services/agents/crud.ts @@ -14,6 +14,7 @@ import { } from '../../constants'; import { AgentSOAttributes, Agent, AgentEventSOAttributes } from '../../types'; import { savedObjectToAgent } from './saved_objects'; +import { escapeSearchQueryPhrase } from '../saved_object'; export async function listAgents( soClient: SavedObjectsClientContract, @@ -72,14 +73,16 @@ export async function getAgentByAccessAPIKeyId( const response = await soClient.find({ type: AGENT_SAVED_OBJECT_TYPE, searchFields: ['access_api_key_id'], - search: accessAPIKeyId, + search: escapeSearchQueryPhrase(accessAPIKeyId), }); - const [agent] = response.saved_objects.map(savedObjectToAgent); if (!agent) { throw Boom.notFound('Agent not found'); } + if (agent.access_api_key_id !== accessAPIKeyId) { + throw new Error('Agent api key id is not matching'); + } if (!agent.active) { throw Boom.forbidden('Agent inactive'); } diff --git a/x-pack/plugins/ingest_manager/server/services/api_keys/index.ts b/x-pack/plugins/ingest_manager/server/services/api_keys/index.ts index 329945b669f8f..57362e6b4b0de 100644 --- a/x-pack/plugins/ingest_manager/server/services/api_keys/index.ts +++ b/x-pack/plugins/ingest_manager/server/services/api_keys/index.ts @@ -8,6 +8,7 @@ import { SavedObjectsClientContract, SavedObject, KibanaRequest } from 'src/core import { ENROLLMENT_API_KEYS_SAVED_OBJECT_TYPE } from '../../constants'; import { EnrollmentAPIKeySOAttributes, EnrollmentAPIKey } from '../../types'; import { createAPIKey } from './security'; +import { escapeSearchQueryPhrase } from '../saved_object'; export { invalidateAPIKey } from './security'; export * from './enrollment_api_key'; @@ -71,10 +72,14 @@ export async function getEnrollmentAPIKeyById( await soClient.find({ type: ENROLLMENT_API_KEYS_SAVED_OBJECT_TYPE, searchFields: ['api_key_id'], - search: apiKeyId, + search: escapeSearchQueryPhrase(apiKeyId), }) ).saved_objects.map(_savedObjectToEnrollmentApiKey); + if (enrollmentAPIKey?.api_key_id !== apiKeyId) { + throw new Error('find enrollmentKeyById returned an incorrect key'); + } + return enrollmentAPIKey; } diff --git a/x-pack/plugins/ingest_manager/server/services/saved_object.test.ts b/x-pack/plugins/ingest_manager/server/services/saved_object.test.ts new file mode 100644 index 0000000000000..9eb5dccb76ac5 --- /dev/null +++ b/x-pack/plugins/ingest_manager/server/services/saved_object.test.ts @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { escapeSearchQueryPhrase } from './saved_object'; + +describe('Saved object service', () => { + describe('escapeSearchQueryPhrase', () => { + it('should return value between quotes', () => { + const res = escapeSearchQueryPhrase('-test'); + + expect(res).toEqual('"-test"'); + }); + + it('should escape quotes', () => { + const res = escapeSearchQueryPhrase('test1"test2'); + + expect(res).toEqual(`"test1\"test2"`); + }); + }); +}); diff --git a/x-pack/plugins/ingest_manager/server/services/saved_object.ts b/x-pack/plugins/ingest_manager/server/services/saved_object.ts new file mode 100644 index 0000000000000..8fe7ffcdfc896 --- /dev/null +++ b/x-pack/plugins/ingest_manager/server/services/saved_object.ts @@ -0,0 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Escape a value with double quote to use with saved object search + * Example: escapeSearchQueryPhrase('-test"toto') => '"-test\"toto""' + * @param val + */ +export function escapeSearchQueryPhrase(val: string): string { + return `"${val.replace(/["]/g, '"')}"`; +} diff --git a/x-pack/plugins/maps/common/constants.ts b/x-pack/plugins/maps/common/constants.ts index bd4406ef5ce63..f3997f741a1bf 100644 --- a/x-pack/plugins/maps/common/constants.ts +++ b/x-pack/plugins/maps/common/constants.ts @@ -213,3 +213,5 @@ export enum SCALING_TYPES { CLUSTERS = 'CLUSTERS', TOP_HITS = 'TOP_HITS', } + +export const RGBA_0000 = 'rgba(0,0,0,0)'; diff --git a/x-pack/plugins/ml/common/util/es_utils.ts b/x-pack/plugins/ml/common/util/es_utils.ts index bed7ba8bc7736..ff632a60dd516 100644 --- a/x-pack/plugins/ml/common/util/es_utils.ts +++ b/x-pack/plugins/ml/common/util/es_utils.ts @@ -26,6 +26,7 @@ function isValidIndexNameLength(indexName: string) { // https://github.com/elastic/elasticsearch/blob/master/docs/reference/indices/create-index.asciidoc export function isValidIndexName(indexName: string) { return ( + typeof indexName === 'string' && // Lowercase only indexName === indexName.toLowerCase() && // Cannot include \, /, *, ?, ", <, >, |, space character, comma, #, : diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/common/analytics.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/common/analytics.ts index d77f19c0df79d..511ebb7e1647a 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/common/analytics.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/common/analytics.ts @@ -33,6 +33,7 @@ interface OutlierAnalysis { interface Regression { dependent_variable: string; training_percent?: number; + num_top_feature_importance_values?: number; prediction_field_name?: string; } export interface RegressionAnalysis { @@ -44,6 +45,7 @@ interface Classification { dependent_variable: string; training_percent?: number; num_top_classes?: string; + num_top_feature_importance_values?: number; prediction_field_name?: string; } export interface ClassificationAnalysis { @@ -65,6 +67,8 @@ export const SEARCH_SIZE = 1000; export const TRAINING_PERCENT_MIN = 1; export const TRAINING_PERCENT_MAX = 100; +export const NUM_TOP_FEATURE_IMPORTANCE_VALUES_MIN = 0; + export const defaultSearchQuery = { match_all: {}, }; @@ -152,7 +156,7 @@ type AnalysisConfig = | ClassificationAnalysis | GenericAnalysis; -export const getAnalysisType = (analysis: AnalysisConfig) => { +export const getAnalysisType = (analysis: AnalysisConfig): string => { const keys = Object.keys(analysis); if (keys.length === 1) { @@ -162,7 +166,11 @@ export const getAnalysisType = (analysis: AnalysisConfig) => { return 'unknown'; }; -export const getDependentVar = (analysis: AnalysisConfig) => { +export const getDependentVar = ( + analysis: AnalysisConfig +): + | RegressionAnalysis['regression']['dependent_variable'] + | ClassificationAnalysis['classification']['dependent_variable'] => { let depVar = ''; if (isRegressionAnalysis(analysis)) { @@ -175,7 +183,11 @@ export const getDependentVar = (analysis: AnalysisConfig) => { return depVar; }; -export const getTrainingPercent = (analysis: AnalysisConfig) => { +export const getTrainingPercent = ( + analysis: AnalysisConfig +): + | RegressionAnalysis['regression']['training_percent'] + | ClassificationAnalysis['classification']['training_percent'] => { let trainingPercent; if (isRegressionAnalysis(analysis)) { @@ -188,7 +200,11 @@ export const getTrainingPercent = (analysis: AnalysisConfig) => { return trainingPercent; }; -export const getPredictionFieldName = (analysis: AnalysisConfig) => { +export const getPredictionFieldName = ( + analysis: AnalysisConfig +): + | RegressionAnalysis['regression']['prediction_field_name'] + | ClassificationAnalysis['classification']['prediction_field_name'] => { // If undefined will be defaulted to dependent_variable when config is created let predictionFieldName; if (isRegressionAnalysis(analysis) && analysis.regression.prediction_field_name !== undefined) { @@ -202,6 +218,26 @@ export const getPredictionFieldName = (analysis: AnalysisConfig) => { return predictionFieldName; }; +export const getNumTopFeatureImportanceValues = ( + analysis: AnalysisConfig +): + | RegressionAnalysis['regression']['num_top_feature_importance_values'] + | ClassificationAnalysis['classification']['num_top_feature_importance_values'] => { + let numTopFeatureImportanceValues; + if ( + isRegressionAnalysis(analysis) && + analysis.regression.num_top_feature_importance_values !== undefined + ) { + numTopFeatureImportanceValues = analysis.regression.num_top_feature_importance_values; + } else if ( + isClassificationAnalysis(analysis) && + analysis.classification.num_top_feature_importance_values !== undefined + ) { + numTopFeatureImportanceValues = analysis.classification.num_top_feature_importance_values; + } + return numTopFeatureImportanceValues; +}; + export const getPredictedFieldName = ( resultsField: string, analysis: AnalysisConfig, diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/common/fields.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/common/fields.ts index 59b42935a141d..92d8731959895 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/common/fields.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/common/fields.ts @@ -7,12 +7,13 @@ import { getNestedProperty } from '../../util/object_utils'; import { DataFrameAnalyticsConfig, + getNumTopFeatureImportanceValues, getPredictedFieldName, getDependentVar, getPredictionFieldName, } from './analytics'; import { Field } from '../../../../common/types/fields'; -import { ES_FIELD_TYPES } from '../../../../../../../src/plugins/data/public'; +import { ES_FIELD_TYPES, KBN_FIELD_TYPES } from '../../../../../../../src/plugins/data/public'; import { newJobCapsService } from '../../services/new_job_capabilities_service'; export type EsId = string; @@ -254,6 +255,7 @@ export const getDefaultFieldsFromJobCaps = ( const dependentVariable = getDependentVar(jobConfig.analysis); const type = newJobCapsService.getFieldById(dependentVariable)?.type; const predictionFieldName = getPredictionFieldName(jobConfig.analysis); + const numTopFeatureImportanceValues = getNumTopFeatureImportanceValues(jobConfig.analysis); // default is 'ml' const resultsField = jobConfig.dest.results_field; @@ -261,7 +263,20 @@ export const getDefaultFieldsFromJobCaps = ( const predictedField = `${resultsField}.${ predictionFieldName ? predictionFieldName : defaultPredictionField }`; - // Only need to add these first two fields if we didn't use dest index pattern to get the fields + + const featureImportanceFields = []; + + if ((numTopFeatureImportanceValues ?? 0) > 0) { + featureImportanceFields.push( + ...fields.map(d => ({ + id: `${resultsField}.feature_importance.${d.id}`, + name: `${resultsField}.feature_importance.${d.name}`, + type: KBN_FIELD_TYPES.NUMBER, + })) + ); + } + + // Only need to add these fields if we didn't use dest index pattern to get the fields const allFields: any = needsDestIndexFields === true ? [ @@ -271,16 +286,20 @@ export const getDefaultFieldsFromJobCaps = ( type: ES_FIELD_TYPES.BOOLEAN, }, { id: predictedField, name: predictedField, type }, + ...featureImportanceFields, ] : []; allFields.push(...fields); - // @ts-ignore - allFields.sort(({ name: a }, { name: b }) => sortRegressionResultsFields(a, b, jobConfig)); - - let selectedFields = allFields - .slice(0, DEFAULT_REGRESSION_COLUMNS * 2) - .filter((field: any) => field.name === predictedField || !field.name.includes('.keyword')); + allFields.sort(({ name: a }: { name: string }, { name: b }: { name: string }) => + sortRegressionResultsFields(a, b, jobConfig) + ); + + let selectedFields = allFields.filter( + (field: any) => + field.name === predictedField || + (!field.name.includes('.keyword') && !field.name.includes('.feature_importance.')) + ); if (selectedFields.length > DEFAULT_REGRESSION_COLUMNS) { selectedFields = selectedFields.slice(0, DEFAULT_REGRESSION_COLUMNS); diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.test.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.test.ts index 6225bca592be3..2463da054d140 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.test.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.test.ts @@ -25,6 +25,7 @@ describe('Analytics job clone action', () => { classification: { dependent_variable: 'y', num_top_classes: 2, + num_top_feature_importance_values: 4, prediction_field_name: 'y_prediction', training_percent: 2, randomize_seed: 6233212276062807000, @@ -90,6 +91,7 @@ describe('Analytics job clone action', () => { prediction_field_name: 'stab_prediction', training_percent: 20, randomize_seed: -2228827740028660200, + num_top_feature_importance_values: 4, }, }, analyzed_fields: { @@ -120,6 +122,7 @@ describe('Analytics job clone action', () => { classification: { dependent_variable: 'y', num_top_classes: 2, + num_top_feature_importance_values: 4, prediction_field_name: 'y_prediction', training_percent: 2, randomize_seed: 6233212276062807000, @@ -188,6 +191,7 @@ describe('Analytics job clone action', () => { prediction_field_name: 'stab_prediction', training_percent: 20, randomize_seed: -2228827740028660200, + num_top_feature_importance_values: 4, }, }, analyzed_fields: { @@ -218,6 +222,7 @@ describe('Analytics job clone action', () => { dependent_variable: 'y', training_percent: 71, max_trees: 1500, + num_top_feature_importance_values: 4, }, }, model_memory_limit: '400mb', @@ -243,6 +248,7 @@ describe('Analytics job clone action', () => { dependent_variable: 'y', training_percent: 71, maximum_number_trees: 1500, + num_top_feature_importance_values: 4, }, }, model_memory_limit: '400mb', diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.tsx b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.tsx index 3a0f98fc5acaa..eb1871c98764b 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.tsx +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.tsx @@ -11,7 +11,10 @@ import { i18n } from '@kbn/i18n'; import { DeepReadonly } from '../../../../../../../common/types/common'; import { DataFrameAnalyticsConfig, isOutlierAnalysis } from '../../../../common'; import { isClassificationAnalysis, isRegressionAnalysis } from '../../../../common/analytics'; -import { CreateAnalyticsFormProps } from '../../hooks/use_create_analytics_form'; +import { + CreateAnalyticsFormProps, + DEFAULT_NUM_TOP_FEATURE_IMPORTANCE_VALUES, +} from '../../hooks/use_create_analytics_form'; import { State } from '../../hooks/use_create_analytics_form/state'; import { DataFrameAnalyticsListRow } from './common'; @@ -97,6 +100,8 @@ const getAnalyticsJobMeta = (config: CloneDataFrameAnalyticsConfig): AnalyticsJo }, num_top_feature_importance_values: { optional: true, + defaultValue: DEFAULT_NUM_TOP_FEATURE_IMPORTANCE_VALUES, + formKey: 'numTopFeatureImportanceValues', }, class_assignment_objective: { optional: true, @@ -164,6 +169,8 @@ const getAnalyticsJobMeta = (config: CloneDataFrameAnalyticsConfig): AnalyticsJo }, num_top_feature_importance_values: { optional: true, + defaultValue: DEFAULT_NUM_TOP_FEATURE_IMPORTANCE_VALUES, + formKey: 'numTopFeatureImportanceValues', }, randomize_seed: { optional: true, diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/create_analytics_form/create_analytics_form.tsx b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/create_analytics_form/create_analytics_form.tsx index 044bb9f517001..e5f30a50ed8f0 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/create_analytics_form/create_analytics_form.tsx +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/create_analytics_form/create_analytics_form.tsx @@ -10,6 +10,7 @@ import { EuiComboBox, EuiComboBoxOptionOption, EuiForm, + EuiFieldNumber, EuiFieldText, EuiFormRow, EuiLink, @@ -41,6 +42,7 @@ import { ANALYSIS_CONFIG_TYPE, DfAnalyticsExplainResponse, FieldSelectionItem, + NUM_TOP_FEATURE_IMPORTANCE_VALUES_MIN, TRAINING_PERCENT_MIN, TRAINING_PERCENT_MAX, } from '../../../../common/analytics'; @@ -83,6 +85,8 @@ export const CreateAnalyticsForm: FC = ({ actions, sta maxDistinctValuesError, modelMemoryLimit, modelMemoryLimitValidationResult, + numTopFeatureImportanceValues, + numTopFeatureImportanceValuesValid, previousJobType, previousSourceIndex, sourceIndex, @@ -645,6 +649,54 @@ export const CreateAnalyticsForm: FC = ({ actions, sta data-test-subj="mlAnalyticsCreateJobFlyoutTrainingPercentSlider" /> + {/* num_top_feature_importance_values */} + + {i18n.translate( + 'xpack.ml.dataframe.analytics.create.numTopFeatureImportanceValuesErrorText', + { + defaultMessage: + 'Invalid maximum number of feature importance values.', + } + )} + , + ] + : []), + ]} + > + setFormState({ numTopFeatureImportanceValues: +e.target.value })} + step={1} + value={numTopFeatureImportanceValues} + /> + )} merge(getInitialState(), { form: { @@ -34,7 +41,11 @@ const getMockState = ({ source: { index }, dest: { index: 'the-destination-index' }, analysis: { - classification: { dependent_variable: 'the-variable', training_percent: trainingPercent }, + classification: { + dependent_variable: 'the-variable', + num_top_feature_importance_values: numTopFeatureImportanceValues, + training_percent: trainingPercent, + }, }, model_memory_limit: modelMemoryLimit, }, @@ -173,6 +184,27 @@ describe('useCreateAnalyticsForm', () => { .isValid ).toBe(false); }); + + test('validateAdvancedEditor(): check num_top_feature_importance_values validation', () => { + // valid num_top_feature_importance_values value + expect( + validateAdvancedEditor( + getMockState({ index: 'the-source-index', numTopFeatureImportanceValues: 1 }) + ).isValid + ).toBe(true); + // invalid num_top_feature_importance_values numeric value + expect( + validateAdvancedEditor( + getMockState({ index: 'the-source-index', numTopFeatureImportanceValues: -1 }) + ).isValid + ).toBe(false); + // invalid training_percent numeric value if not an integer + expect( + validateAdvancedEditor( + getMockState({ index: 'the-source-index', numTopFeatureImportanceValues: 1.1 }) + ).isValid + ).toBe(false); + }); }); describe('validateMinMML', () => { @@ -194,3 +226,24 @@ describe('validateMinMML', () => { expect(validateMinMML((undefined as unknown) as string)('')).toEqual(null); }); }); + +describe('validateNumTopFeatureImportanceValues()', () => { + test('should not allow below 0', () => { + expect(validateNumTopFeatureImportanceValues(-1)).toBe(false); + }); + + test('should not allow strings', () => { + expect(validateNumTopFeatureImportanceValues('1')).toBe(false); + }); + + test('should not allow floats', () => { + expect(validateNumTopFeatureImportanceValues(0.1)).toBe(false); + expect(validateNumTopFeatureImportanceValues(1.1)).toBe(false); + expect(validateNumTopFeatureImportanceValues(-1.1)).toBe(false); + }); + + test('should allow 0 and higher', () => { + expect(validateNumTopFeatureImportanceValues(0)).toBe(true); + expect(validateNumTopFeatureImportanceValues(1)).toBe(true); + }); +}); diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.ts index 28d8afbcd88cc..ded6e50947035 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.ts @@ -31,10 +31,12 @@ import { } from '../../../../../../../common/constants/validation'; import { getDependentVar, + getNumTopFeatureImportanceValues, getTrainingPercent, isRegressionAnalysis, isClassificationAnalysis, ANALYSIS_CONFIG_TYPE, + NUM_TOP_FEATURE_IMPORTANCE_VALUES_MIN, TRAINING_PERCENT_MIN, TRAINING_PERCENT_MAX, } from '../../../../common/analytics'; @@ -100,6 +102,19 @@ const getSourceIndexString = (state: State) => { return ''; }; +/** + * Validates num_top_feature_importance_values. Must be an integer >= 0. + */ +export const validateNumTopFeatureImportanceValues = ( + numTopFeatureImportanceValues: any +): boolean => { + return ( + typeof numTopFeatureImportanceValues === 'number' && + numTopFeatureImportanceValues >= NUM_TOP_FEATURE_IMPORTANCE_VALUES_MIN && + Number.isInteger(numTopFeatureImportanceValues) + ); +}; + export const validateAdvancedEditor = (state: State): State => { const { jobIdEmpty, @@ -147,6 +162,7 @@ export const validateAdvancedEditor = (state: State): State => { let dependentVariableEmpty = false; let excludesValid = true; let trainingPercentValid = true; + let numTopFeatureImportanceValuesValid = true; if ( jobConfig.analysis === undefined && @@ -180,6 +196,7 @@ export const validateAdvancedEditor = (state: State): State => { if ( trainingPercent !== undefined && (isNaN(trainingPercent) || + typeof trainingPercent !== 'number' || trainingPercent < TRAINING_PERCENT_MIN || trainingPercent > TRAINING_PERCENT_MAX) ) { @@ -189,7 +206,7 @@ export const validateAdvancedEditor = (state: State): State => { error: i18n.translate( 'xpack.ml.dataframe.analytics.create.advancedEditorMessage.trainingPercentInvalid', { - defaultMessage: 'The training percent must be a value between {min} and {max}.', + defaultMessage: 'The training percent must be a number between {min} and {max}.', values: { min: TRAINING_PERCENT_MIN, max: TRAINING_PERCENT_MAX, @@ -199,6 +216,28 @@ export const validateAdvancedEditor = (state: State): State => { message: '', }); } + + const numTopFeatureImportanceValues = getNumTopFeatureImportanceValues(jobConfig.analysis); + if (numTopFeatureImportanceValues !== undefined) { + numTopFeatureImportanceValuesValid = validateNumTopFeatureImportanceValues( + numTopFeatureImportanceValues + ); + if (numTopFeatureImportanceValuesValid === false) { + state.advancedEditorMessages.push({ + error: i18n.translate( + 'xpack.ml.dataframe.analytics.create.advancedEditorMessage.numTopFeatureImportanceValuesInvalid', + { + defaultMessage: + 'The value for num_top_feature_importance_values must be an integer of {min} or higher.', + values: { + min: 0, + }, + } + ), + message: '', + }); + } + } } if (sourceIndexNameEmpty) { @@ -233,6 +272,17 @@ export const validateAdvancedEditor = (state: State): State => { ), message: '', }); + } else if (destinationIndexPatternTitleExists && !createIndexPattern) { + state.advancedEditorMessages.push({ + error: i18n.translate( + 'xpack.ml.dataframe.analytics.create.advancedEditorMessage.destinationIndexNameExistsWarn', + { + defaultMessage: + 'An index with this destination index name already exists. Be aware that running this analytics job will modify this destination index.', + } + ), + message: '', + }); } else if (!destinationIndexNameValid) { state.advancedEditorMessages.push({ error: i18n.translate( @@ -276,6 +326,8 @@ export const validateAdvancedEditor = (state: State): State => { }); } + state.form.destinationIndexPatternTitleExists = destinationIndexPatternTitleExists; + state.isValid = maxDistinctValuesError === undefined && excludesValid && @@ -290,6 +342,7 @@ export const validateAdvancedEditor = (state: State): State => { destinationIndexNameValid && !dependentVariableEmpty && !modelMemoryLimitEmpty && + numTopFeatureImportanceValuesValid && (!destinationIndexPatternTitleExists || !createIndexPattern); return state; @@ -343,6 +396,7 @@ const validateForm = (state: State): State => { dependentVariable, maxDistinctValuesError, modelMemoryLimit, + numTopFeatureImportanceValuesValid, } = state.form; const { estimatedModelMemoryLimit } = state; @@ -368,6 +422,7 @@ const validateForm = (state: State): State => { !destinationIndexNameEmpty && destinationIndexNameValid && !dependentVariableEmpty && + numTopFeatureImportanceValuesValid && (!destinationIndexPatternTitleExists || !createIndexPattern); return state; @@ -443,6 +498,12 @@ export function reducer(state: State, action: Action): State { newFormState.sourceIndexNameValid = Object.keys(validationMessages).length === 0; } + if (action.payload.numTopFeatureImportanceValues !== undefined) { + newFormState.numTopFeatureImportanceValuesValid = validateNumTopFeatureImportanceValues( + newFormState?.numTopFeatureImportanceValues + ); + } + return state.isAdvancedEditorEnabled ? validateAdvancedEditor({ ...state, form: newFormState }) : validateForm({ ...state, form: newFormState }); diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/state.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/state.ts index fe741fe9a92d4..01a39d2ef9f3b 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/state.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/state.ts @@ -25,6 +25,8 @@ export enum DEFAULT_MODEL_MEMORY_LIMIT { classification = '100mb', } +export const DEFAULT_NUM_TOP_FEATURE_IMPORTANCE_VALUES = 2; + export type EsIndexName = string; export type DependentVariable = string; export type IndexPatternTitle = string; @@ -69,6 +71,8 @@ export interface State { modelMemoryLimit: string | undefined; modelMemoryLimitUnitValid: boolean; modelMemoryLimitValidationResult: any; + numTopFeatureImportanceValues: number | undefined; + numTopFeatureImportanceValuesValid: boolean; previousJobType: null | AnalyticsJobType; previousSourceIndex: EsIndexName | undefined; sourceIndex: EsIndexName; @@ -124,6 +128,8 @@ export const getInitialState = (): State => ({ modelMemoryLimit: undefined, modelMemoryLimitUnitValid: true, modelMemoryLimitValidationResult: null, + numTopFeatureImportanceValues: DEFAULT_NUM_TOP_FEATURE_IMPORTANCE_VALUES, + numTopFeatureImportanceValuesValid: true, previousJobType: null, previousSourceIndex: undefined, sourceIndex: '', @@ -184,6 +190,7 @@ export const getJobConfigFromFormState = ( jobConfig.analysis = { [formState.jobType]: { dependent_variable: formState.dependentVariable, + num_top_feature_importance_values: formState.numTopFeatureImportanceValues, training_percent: formState.trainingPercent, }, }; @@ -218,6 +225,7 @@ export function getCloneFormStateFromJobConfig( const analysisConfig = analyticsJobConfig.analysis[jobType]; resultState.dependentVariable = analysisConfig.dependent_variable; + resultState.numTopFeatureImportanceValues = analysisConfig.num_top_feature_importance_values; resultState.trainingPercent = analysisConfig.training_percent; } diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/use_create_analytics_form.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/use_create_analytics_form.ts index 44bfc0c5a472c..2478dbf7cf63d 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/use_create_analytics_form.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/use_create_analytics_form.ts @@ -47,7 +47,8 @@ export const useCreateAnalyticsForm = (): CreateAnalyticsFormProps => { const { refresh } = useRefreshAnalyticsList(); const { form, jobConfig, isAdvancedEditorEnabled } = state; - const { createIndexPattern, destinationIndex, jobId } = form; + const { createIndexPattern, jobId } = form; + let { destinationIndex } = form; const addRequestMessage = (requestMessage: FormMessage) => dispatch({ type: ACTION.ADD_REQUEST_MESSAGE, requestMessage }); @@ -90,9 +91,13 @@ export const useCreateAnalyticsForm = (): CreateAnalyticsFormProps => { resetRequestMessages(); setIsModalButtonDisabled(true); - const analyticsJobConfig = isAdvancedEditorEnabled + const analyticsJobConfig = (isAdvancedEditorEnabled ? jobConfig - : getJobConfigFromFormState(form); + : getJobConfigFromFormState(form)) as DataFrameAnalyticsConfig; + + if (isAdvancedEditorEnabled) { + destinationIndex = analyticsJobConfig.dest.index; + } try { await ml.dataFrameAnalytics.createDataFrameAnalytics(jobId, analyticsJobConfig); diff --git a/x-pack/plugins/reporting/public/lib/reporting_api_client.ts b/x-pack/plugins/reporting/public/lib/reporting_api_client.ts index cddfcd3ec855a..b6c33860752d6 100644 --- a/x-pack/plugins/reporting/public/lib/reporting_api_client.ts +++ b/x-pack/plugins/reporting/public/lib/reporting_api_client.ts @@ -9,12 +9,7 @@ import rison from 'rison-node'; import { HttpSetup } from 'src/core/public'; import { add } from './job_completion_notifications'; -import { - API_LIST_URL, - API_BASE_URL, - API_BASE_GENERATE, - REPORTING_MANAGEMENT_HOME, -} from '../../constants'; +import { API_LIST_URL, API_BASE_GENERATE, REPORTING_MANAGEMENT_HOME } from '../../constants'; import { JobId, SourceJob } from '../..'; export interface JobQueueEntry { @@ -129,12 +124,17 @@ export class ReportingAPIClient { }); }; + /* + * Return a URL to queue a job, with the job params encoded in the query string of the URL. Used for copying POST URL + */ public getReportingJobPath = (exportType: string, jobParams: JobParams) => { const params = stringify({ jobParams: rison.encode(jobParams) }); - - return `${this.http.basePath.prepend(API_BASE_URL)}/${exportType}?${params}`; + return `${this.http.basePath.prepend(API_BASE_GENERATE)}/${exportType}?${params}`; }; + /* + * Sends a request to queue a job, with the job params in the POST body + */ public createReportingJob = async (exportType: string, jobParams: any) => { const jobParamsRison = rison.encode(jobParams); const resp = await this.http.post(`${API_BASE_GENERATE}/${exportType}`, { @@ -154,5 +154,8 @@ export class ReportingAPIClient { public getDownloadLink = (jobId: JobId) => this.http.basePath.prepend(`${API_LIST_URL}/download/${jobId}`); - public getBasePath = () => this.http.basePath.get(); + /* + * provides the raw server basePath to allow it to be stripped out from relativeUrls in job params + */ + public getServerBasePath = () => this.http.basePath.serverBasePath; } diff --git a/x-pack/plugins/reporting/public/share_context_menu/register_pdf_png_reporting.tsx b/x-pack/plugins/reporting/public/share_context_menu/register_pdf_png_reporting.tsx index e9eaa9c2ed2a1..2a955ea398bd4 100644 --- a/x-pack/plugins/reporting/public/share_context_menu/register_pdf_png_reporting.tsx +++ b/x-pack/plugins/reporting/public/share_context_menu/register_pdf_png_reporting.tsx @@ -58,9 +58,10 @@ export const reportingPDFPNGProvider = ({ } const getReportingJobParams = () => { + // Relative URL must have URL prefix (Spaces ID prefix), but not server basePath // Replace hashes with original RISON values. const relativeUrl = shareableUrl.replace( - window.location.origin + apiClient.getBasePath(), + window.location.origin + apiClient.getServerBasePath(), '' ); @@ -80,7 +81,7 @@ export const reportingPDFPNGProvider = ({ const getPngJobParams = () => { // Replace hashes with original RISON values. const relativeUrl = shareableUrl.replace( - window.location.origin + apiClient.getBasePath(), + window.location.origin + apiClient.getServerBasePath(), '' ); diff --git a/x-pack/test/accessibility/apps/grok_debugger.ts b/x-pack/test/accessibility/apps/grok_debugger.ts new file mode 100644 index 0000000000000..0b052d39a4db8 --- /dev/null +++ b/x-pack/test/accessibility/apps/grok_debugger.ts @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { FtrProviderContext } from '../ftr_provider_context'; + +export default function({ getService, getPageObjects }: FtrProviderContext) { + const PageObjects = getPageObjects(['common', 'security']); + const a11y = getService('a11y'); + const grokDebugger = getService('grokDebugger'); + + // this test is failing as there is a violation https://github.com/elastic/kibana/issues/62102 + describe.skip('Dev tools grok debugger', () => { + before(async () => { + await PageObjects.common.navigateToApp('grokDebugger'); + await grokDebugger.assertExists(); + }); + + it('Dev tools grok debugger set input', async () => { + await grokDebugger.setEventInput('SegerCommaBob'); + await a11y.testAppSnapshot(); + }); + + it('Dev tools grok debugger set pattern', async () => { + await grokDebugger.setPatternInput('%{USERNAME:u}'); + await a11y.testAppSnapshot(); + }); + + it('Dev tools grok debugger simulate', async () => { + await grokDebugger.clickSimulate(); + await a11y.testAppSnapshot(); + }); + }); +} diff --git a/x-pack/test/accessibility/apps/home.ts b/x-pack/test/accessibility/apps/home.ts new file mode 100644 index 0000000000000..f40976f09f9c8 --- /dev/null +++ b/x-pack/test/accessibility/apps/home.ts @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { FtrProviderContext } from '../ftr_provider_context'; + +export default function({ getService, getPageObjects }: FtrProviderContext) { + const PageObjects = getPageObjects(['common', 'home']); + const a11y = getService('a11y'); + + describe('Kibana Home', () => { + before(async () => { + await PageObjects.common.navigateToApp('home'); + }); + + it('Kibana Home view', async () => { + await a11y.testAppSnapshot(); + }); + + it('all plugins view page meets a11y requirements', async () => { + await PageObjects.home.clickAllKibanaPlugins(); + await a11y.testAppSnapshot(); + }); + + it('visualize & explore details tab meets a11y requirements', async () => { + await PageObjects.home.clickVisualizeExplorePlugins(); + await a11y.testAppSnapshot(); + }); + + it('administrative detail tab meets a11y requirements', async () => { + await PageObjects.home.clickAdminPlugin(); + await a11y.testAppSnapshot(); + }); + + it('navigating to console app from administration tab meets a11y requirements', async () => { + await PageObjects.home.clickOnConsole(); + await a11y.testAppSnapshot(); + }); + + // issue: https://github.com/elastic/kibana/issues/38980 + it.skip('navigating back to home page from console meets a11y requirements', async () => { + await PageObjects.home.clickOnLogo(); + await a11y.testAppSnapshot(); + }); + + // Extra clickon logo step here will be removed after preceding test is fixed. + it('click on Add logs panel to open all log examples page meets a11y requirements ', async () => { + await PageObjects.home.clickOnLogo(); + await PageObjects.home.ClickOnLogsData(); + await a11y.testAppSnapshot(); + }); + + // issue - logo images are missing alt -text https://github.com/elastic/kibana/issues/62239 + it.skip('click on ActiveMQ logs panel to open tutorial meets a11y requirements', async () => { + await PageObjects.home.clickOnLogsTutorial(); + await a11y.testAppSnapshot(); + }); + + // https://github.com/elastic/kibana/issues/62239 + it.skip('click on cloud tutorial meets a11y requirements', async () => { + await PageObjects.home.clickOnCloudTutorial(); + await a11y.testAppSnapshot(); + }); + }); +} diff --git a/x-pack/test/accessibility/config.ts b/x-pack/test/accessibility/config.ts index a9ac7c71d3e79..7bf6079cc6487 100644 --- a/x-pack/test/accessibility/config.ts +++ b/x-pack/test/accessibility/config.ts @@ -13,7 +13,11 @@ export default async function({ readConfigFile }: FtrConfigProviderContext) { return { ...functionalConfig.getAll(), - testFiles: [require.resolve('./apps/login_page')], + testFiles: [ + require.resolve('./apps/login_page'), + require.resolve('./apps/home'), + require.resolve('./apps/grok_debugger'), + ], pageObjects, services, diff --git a/x-pack/test/api_integration/apis/fleet/agents/acks.ts b/x-pack/test/api_integration/apis/fleet/agents/acks.ts index db925813b90c4..a2eba2c23c39d 100644 --- a/x-pack/test/api_integration/apis/fleet/agents/acks.ts +++ b/x-pack/test/api_integration/apis/fleet/agents/acks.ts @@ -18,8 +18,7 @@ export default function(providerContext: FtrProviderContext) { const supertest = getSupertestWithoutAuth(providerContext); let apiKey: { id: string; api_key: string }; - // FLAKY: https://github.com/elastic/kibana/issues/60471 - describe.skip('fleet_agents_acks', () => { + describe('fleet_agents_acks', () => { before(async () => { await esArchiver.loadIfNeeded('fleet/agents'); diff --git a/x-pack/test/functional/apps/maps/discover.js b/x-pack/test/functional/apps/maps/discover.js index ce33596476755..43a7a93ad62e4 100644 --- a/x-pack/test/functional/apps/maps/discover.js +++ b/x-pack/test/functional/apps/maps/discover.js @@ -17,7 +17,6 @@ export default function({ getService, getPageObjects }) { it('should link geo_shape fields to Maps application', async () => { await PageObjects.discover.selectIndexPattern('geo_shapes*'); - await PageObjects.discover.clickFieldListItem('geometry'); await PageObjects.discover.clickFieldListItemVisualize('geometry'); await PageObjects.header.waitUntilLoadingHasFinished(); await PageObjects.maps.waitForLayersToLoad(); @@ -37,7 +36,6 @@ export default function({ getService, getPageObjects }) { await queryBar.submitQuery(); await PageObjects.header.waitUntilLoadingHasFinished(); - await PageObjects.discover.clickFieldListItem('geo.coordinates'); await PageObjects.discover.clickFieldListItemVisualize('geo.coordinates'); await PageObjects.header.waitUntilLoadingHasFinished(); await PageObjects.maps.waitForLayersToLoad(); diff --git a/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/data.json.gz b/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/data.json.gz index c1a3c44cb8d8d..feb2af93b0fd1 100644 Binary files a/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/data.json.gz and b/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/data.json.gz differ diff --git a/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/mappings.json b/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/mappings.json index e0a7068e1149a..64dc395ab69a4 100644 --- a/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/mappings.json +++ b/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/mappings.json @@ -94,7 +94,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -454,7 +454,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -851,7 +851,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -1496,7 +1496,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -1689,7 +1689,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { diff --git a/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/alerts.ts b/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/alerts.ts index 347eb5e14d0a8..029af1ea06e4f 100644 --- a/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/alerts.ts +++ b/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/alerts.ts @@ -38,7 +38,8 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => { return createdAlert; } - describe('alerts', function() { + // FLAKY: https://github.com/elastic/kibana/issues/62472 + describe.skip('alerts', function() { before(async () => { await pageObjects.common.navigateToApp('triggersActions'); await testSubjects.click('alertsTab'); diff --git a/yarn.lock b/yarn.lock index 8176eab436afd..d9edb55a32039 100644 --- a/yarn.lock +++ b/yarn.lock @@ -9651,7 +9651,7 @@ core-js@^2.2.0, core-js@^2.4.0, core-js@^2.5.0, core-js@^2.5.1, core-js@^2.5.3, resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.9.tgz#6b4b214620c834152e179323727fc19741b084f2" integrity sha512-HOpZf6eXmnl7la+cUdMnLvUxKNqLUzJvgIziQ0DiF3JwSImNphIqdGqzj6hIKyX04MmV0poclQ7+wjWvxQyR2A== -core-js@^3.0.1, core-js@^3.0.4, core-js@^3.2.1, core-js@^3.4.1, core-js@^3.6.4: +core-js@^3.0.1, core-js@^3.0.4, core-js@^3.4.1, core-js@^3.6.4: version "3.6.4" resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.6.4.tgz#440a83536b458114b9cb2ac1580ba377dc470647" integrity sha512-4paDGScNgZP2IXXilaffL9X7968RuvwlkK3xWtZRVqgd8SYNiVKRJvkFd1aqqEuPfN7E68ZHEp9hDj6lHj4Hyw== @@ -24284,7 +24284,7 @@ react-router-redux@^4.0.8: resolved "https://registry.yarnpkg.com/react-router-redux/-/react-router-redux-4.0.8.tgz#227403596b5151e182377dab835b5d45f0f8054e" integrity sha1-InQDWWtRUeGCN32rg1tdRfD4BU4= -react-router@5.1.2: +react-router@5.1.2, react-router@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/react-router/-/react-router-5.1.2.tgz#6ea51d789cb36a6be1ba5f7c0d48dd9e817d3418" integrity sha512-yjEuMFy1ONK246B+rsa0cUam5OeAQ8pyclRDgpxuSCrAlJ1qN9uZ5IgyKC7gQg0w8OM50NXHEegPh/ks9YuR2A==