diff --git a/.ci/Jenkinsfile_coverage b/.ci/Jenkinsfile_coverage index 6b8dc31bab34ed..f2a58e7b6a7ac5 100644 --- a/.ci/Jenkinsfile_coverage +++ b/.ci/Jenkinsfile_coverage @@ -44,7 +44,7 @@ kibanaPipeline(timeoutMinutes: 180) { 'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10), ]), ]) - workers.base(name: 'coverage-worker', label: 'tests-l', ramDisk: false, bootstrapped: false) { + workers.base(name: 'coverage-worker', size: 'l', ramDisk: false, bootstrapped: false) { kibanaPipeline.downloadCoverageArtifacts() kibanaPipeline.bash( ''' diff --git a/.ci/Jenkinsfile_visual_baseline b/.ci/Jenkinsfile_visual_baseline index 5c13ccccd9c6f7..815c1345bbb680 100644 --- a/.ci/Jenkinsfile_visual_baseline +++ b/.ci/Jenkinsfile_visual_baseline @@ -7,12 +7,12 @@ kibanaPipeline(timeoutMinutes: 120) { catchError { parallel([ 'oss-visualRegression': { - workers.ci(name: 'oss-visualRegression', label: 'linux && immutable', ramDisk: false) { + workers.ci(name: 'oss-visualRegression', size: 's', ramDisk: false) { kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh')(1) } }, 'xpack-visualRegression': { - workers.ci(name: 'xpack-visualRegression', label: 'linux && immutable', ramDisk: false) { + workers.ci(name: 'xpack-visualRegression', size: 's', ramDisk: false) { kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')(1) } }, diff --git a/.ci/es-snapshots/Jenkinsfile_build_es b/.ci/es-snapshots/Jenkinsfile_build_es index a00bcb3bbc9465..a3470cd750738d 100644 --- a/.ci/es-snapshots/Jenkinsfile_build_es +++ b/.ci/es-snapshots/Jenkinsfile_build_es @@ -25,7 +25,7 @@ def PROMOTE_WITHOUT_VERIFY = !!params.PROMOTE_WITHOUT_VERIFICATION timeout(time: 120, unit: 'MINUTES') { timestamps { ansiColor('xterm') { - node('linux && immutable') { + node(workers.label('s')) { catchErrors { def VERSION def SNAPSHOT_ID diff --git a/.ci/es-snapshots/Jenkinsfile_verify_es b/.ci/es-snapshots/Jenkinsfile_verify_es index ce472a404c0538..ade79f27e10e96 100644 --- a/.ci/es-snapshots/Jenkinsfile_verify_es +++ b/.ci/es-snapshots/Jenkinsfile_verify_es @@ -61,7 +61,7 @@ kibanaPipeline(timeoutMinutes: 120) { } def promoteSnapshot(snapshotVersion, snapshotId) { - node('linux && immutable') { + node(workers.label('s')) { esSnapshots.promote(snapshotVersion, snapshotId) } } diff --git a/docs/infrastructure/index.asciidoc b/docs/infrastructure/index.asciidoc index 60695c0e3f1cf4..416e95a8941ce8 100644 --- a/docs/infrastructure/index.asciidoc +++ b/docs/infrastructure/index.asciidoc @@ -21,6 +21,8 @@ You can optionally save these views and add them to {kibana-ref}/dashboard.html[ * Seamlessly switch to view the corresponding logs, application traces or uptime information for a component. +* Create alerts based on metric thresholds for one or more components. + To get started, you need to <>. Then you can <>. [role="screenshot"] diff --git a/docs/infrastructure/metrics-explorer.asciidoc b/docs/infrastructure/metrics-explorer.asciidoc index d47581ffe720ac..793f09ea83b4f5 100644 --- a/docs/infrastructure/metrics-explorer.asciidoc +++ b/docs/infrastructure/metrics-explorer.asciidoc @@ -20,6 +20,7 @@ By default that is set to `@timestamp`. * The interval for the X Axis is set to `auto`. The bucket size is determined by the time range. * To use *Open in Visualize* you need access to the Visualize app. +* To use *Create alert* you need to {kibana-ref}/alerting-getting-started.html#alerting-setup-prerequisites[set up alerting]. [float] [[metrics-explorer-tutorial]] @@ -67,4 +68,8 @@ Choose a graph, click the *Actions* dropdown and select *Open In Visualize*. This opens the graph in {kibana-ref}/TSVB.html[TSVB]. From here you can save the graph and add it to a dashboard as usual. +9. You can also create an alert based on the metrics in a graph. +Choose a graph, click the *Actions* dropdown and select *Create alert*. +This opens the {kibana-ref}/defining-alerts.html[alert flyout] prefilled with mertrics from the chart. + Who's the Metrics Explorer now? You are! diff --git a/package.json b/package.json index fd6f6ac1402471..e807cd4d951989 100644 --- a/package.json +++ b/package.json @@ -239,6 +239,7 @@ "react-monaco-editor": "~0.27.0", "react-redux": "^7.1.3", "react-resize-detector": "^4.2.0", + "react-router": "^5.1.2", "react-router-dom": "^5.1.2", "react-sizeme": "^2.3.6", "react-use": "^13.27.0", diff --git a/packages/kbn-optimizer/src/worker/webpack.config.ts b/packages/kbn-optimizer/src/worker/webpack.config.ts index 9337daf419bfac..a3a11783cd82a7 100644 --- a/packages/kbn-optimizer/src/worker/webpack.config.ts +++ b/packages/kbn-optimizer/src/worker/webpack.config.ts @@ -27,7 +27,7 @@ import TerserPlugin from 'terser-webpack-plugin'; import webpackMerge from 'webpack-merge'; // @ts-ignore import { CleanWebpackPlugin } from 'clean-webpack-plugin'; -import * as SharedDeps from '@kbn/ui-shared-deps'; +import * as UiSharedDeps from '@kbn/ui-shared-deps'; import { Bundle, WorkerConfig, parseDirPath, DisallowedSyntaxPlugin } from '../common'; @@ -73,7 +73,7 @@ export function getWebpackConfig(bundle: Bundle, worker: WorkerConfig) { }, externals: { - ...SharedDeps.externals, + ...UiSharedDeps.externals, }, plugins: [new CleanWebpackPlugin(), new DisallowedSyntaxPlugin()], diff --git a/packages/kbn-ui-framework/package.json b/packages/kbn-ui-framework/package.json index bcebdf591d6f03..5ea031595d1d46 100644 --- a/packages/kbn-ui-framework/package.json +++ b/packages/kbn-ui-framework/package.json @@ -38,7 +38,7 @@ "brace": "0.11.1", "chalk": "^2.4.2", "chokidar": "3.2.1", - "core-js": "^3.2.1", + "core-js": "^3.6.4", "css-loader": "^3.4.2", "expose-loader": "^0.7.5", "file-loader": "^4.2.0", diff --git a/packages/kbn-ui-shared-deps/entry.js b/packages/kbn-ui-shared-deps/entry.js index 5028c6efdb40eb..f19271de8ad278 100644 --- a/packages/kbn-ui-shared-deps/entry.js +++ b/packages/kbn-ui-shared-deps/entry.js @@ -17,31 +17,40 @@ * under the License. */ -// import global polyfills before everything else require('./polyfills'); // must load before angular export const Jquery = require('jquery'); window.$ = window.jQuery = Jquery; -export const Angular = require('angular'); -export const ElasticCharts = require('@elastic/charts'); -export const ElasticEui = require('@elastic/eui'); -export const ElasticEuiLibServices = require('@elastic/eui/lib/services'); -export const ElasticEuiLightTheme = require('@elastic/eui/dist/eui_theme_light.json'); -export const ElasticEuiDarkTheme = require('@elastic/eui/dist/eui_theme_dark.json'); +// stateful deps export const KbnI18n = require('@kbn/i18n'); export const KbnI18nAngular = require('@kbn/i18n/angular'); export const KbnI18nReact = require('@kbn/i18n/react'); +export const Angular = require('angular'); export const Moment = require('moment'); export const MomentTimezone = require('moment-timezone/moment-timezone'); +export const Monaco = require('./monaco.ts'); +export const MonacoBare = require('monaco-editor/esm/vs/editor/editor.api'); export const React = require('react'); export const ReactDom = require('react-dom'); +export const ReactDomServer = require('react-dom/server'); export const ReactIntl = require('react-intl'); export const ReactRouter = require('react-router'); // eslint-disable-line export const ReactRouterDom = require('react-router-dom'); -export const Monaco = require('./monaco.ts'); -export const MonacoBare = require('monaco-editor/esm/vs/editor/editor.api'); -// load timezone data into moment-timezone Moment.tz.load(require('moment-timezone/data/packed/latest.json')); + +// big deps which are locked to a single version +export const Rxjs = require('rxjs'); +export const RxjsOperators = require('rxjs/operators'); +export const ElasticCharts = require('@elastic/charts'); +export const ElasticEui = require('@elastic/eui'); +export const ElasticEuiLibServices = require('@elastic/eui/lib/services'); +export const ElasticEuiLibServicesFormat = require('@elastic/eui/lib/services/format'); +export const ElasticEuiLightTheme = require('@elastic/eui/dist/eui_theme_light.json'); +export const ElasticEuiDarkTheme = require('@elastic/eui/dist/eui_theme_dark.json'); +export const ElasticEuiChartsTheme = require('@elastic/eui/dist/eui_charts_theme'); + +// massive deps that we should really get rid of or reduce in size substantially +export const ElasticsearchBrowser = require('elasticsearch-browser/elasticsearch.js'); diff --git a/packages/kbn-ui-shared-deps/index.d.ts b/packages/kbn-ui-shared-deps/index.d.ts index 7ee96050a1248a..dec519da696414 100644 --- a/packages/kbn-ui-shared-deps/index.d.ts +++ b/packages/kbn-ui-shared-deps/index.d.ts @@ -25,7 +25,12 @@ export const distDir: string; /** * Filename of the main bundle file in the distributable directory */ -export const distFilename: string; +export const jsFilename: string; + +/** + * Filename of files that must be loaded before the jsFilename + */ +export const jsDepFilenames: string[]; /** * Filename of the unthemed css file in the distributable directory diff --git a/packages/kbn-ui-shared-deps/index.js b/packages/kbn-ui-shared-deps/index.js index d1bb93ddecd0a4..666ec7a46ff06e 100644 --- a/packages/kbn-ui-shared-deps/index.js +++ b/packages/kbn-ui-shared-deps/index.js @@ -20,17 +20,14 @@ const Path = require('path'); exports.distDir = Path.resolve(__dirname, 'target'); -exports.distFilename = 'kbn-ui-shared-deps.js'; +exports.jsDepFilenames = ['kbn-ui-shared-deps.@elastic.js']; +exports.jsFilename = 'kbn-ui-shared-deps.js'; exports.baseCssDistFilename = 'kbn-ui-shared-deps.css'; exports.lightCssDistFilename = 'kbn-ui-shared-deps.light.css'; exports.darkCssDistFilename = 'kbn-ui-shared-deps.dark.css'; exports.externals = { + // stateful deps angular: '__kbnSharedDeps__.Angular', - '@elastic/charts': '__kbnSharedDeps__.ElasticCharts', - '@elastic/eui': '__kbnSharedDeps__.ElasticEui', - '@elastic/eui/lib/services': '__kbnSharedDeps__.ElasticEuiLibServices', - '@elastic/eui/dist/eui_theme_light.json': '__kbnSharedDeps__.ElasticEuiLightTheme', - '@elastic/eui/dist/eui_theme_dark.json': '__kbnSharedDeps__.ElasticEuiDarkTheme', '@kbn/i18n': '__kbnSharedDeps__.KbnI18n', '@kbn/i18n/angular': '__kbnSharedDeps__.KbnI18nAngular', '@kbn/i18n/react': '__kbnSharedDeps__.KbnI18nReact', @@ -39,10 +36,31 @@ exports.externals = { 'moment-timezone': '__kbnSharedDeps__.MomentTimezone', react: '__kbnSharedDeps__.React', 'react-dom': '__kbnSharedDeps__.ReactDom', + 'react-dom/server': '__kbnSharedDeps__.ReactDomServer', 'react-intl': '__kbnSharedDeps__.ReactIntl', 'react-router': '__kbnSharedDeps__.ReactRouter', 'react-router-dom': '__kbnSharedDeps__.ReactRouterDom', '@kbn/ui-shared-deps/monaco': '__kbnSharedDeps__.Monaco', // this is how plugins/consumers from npm load monaco 'monaco-editor/esm/vs/editor/editor.api': '__kbnSharedDeps__.MonacoBare', + + /** + * big deps which are locked to a single version + */ + rxjs: '__kbnSharedDeps__.Rxjs', + 'rxjs/operators': '__kbnSharedDeps__.RxjsOperators', + '@elastic/charts': '__kbnSharedDeps__.ElasticCharts', + '@elastic/eui': '__kbnSharedDeps__.ElasticEui', + '@elastic/eui/lib/services': '__kbnSharedDeps__.ElasticEuiLibServices', + '@elastic/eui/lib/services/format': '__kbnSharedDeps__.ElasticEuiLibServicesFormat', + '@elastic/eui/dist/eui_charts_theme': '__kbnSharedDeps__.ElasticEuiChartsTheme', + '@elastic/eui/dist/eui_theme_light.json': '__kbnSharedDeps__.ElasticEuiLightTheme', + '@elastic/eui/dist/eui_theme_dark.json': '__kbnSharedDeps__.ElasticEuiDarkTheme', + + /** + * massive deps that we should really get rid of or reduce in size substantially + */ + elasticsearch: '__kbnSharedDeps__.ElasticsearchBrowser', + 'elasticsearch-browser': '__kbnSharedDeps__.ElasticsearchBrowser', + 'elasticsearch-browser/elasticsearch': '__kbnSharedDeps__.ElasticsearchBrowser', }; diff --git a/packages/kbn-ui-shared-deps/package.json b/packages/kbn-ui-shared-deps/package.json index c76e909d2adbcb..e2823f23d04317 100644 --- a/packages/kbn-ui-shared-deps/package.json +++ b/packages/kbn-ui-shared-deps/package.json @@ -1,37 +1,41 @@ { "name": "@kbn/ui-shared-deps", "version": "1.0.0", - "license": "Apache-2.0", "private": true, + "license": "Apache-2.0", "scripts": { "build": "node scripts/build", "kbn:bootstrap": "node scripts/build --dev", "kbn:watch": "node scripts/build --watch" }, - "devDependencies": { + "dependencies": { "@elastic/charts": "^18.1.1", - "abortcontroller-polyfill": "^1.4.0", "@elastic/eui": "21.0.1", - "@kbn/babel-preset": "1.0.0", - "@kbn/dev-utils": "1.0.0", "@kbn/i18n": "1.0.0", - "@yarnpkg/lockfile": "^1.1.0", + "abortcontroller-polyfill": "^1.4.0", "angular": "^1.7.9", - "core-js": "^3.2.1", - "css-loader": "^3.4.2", + "core-js": "^3.6.4", "custom-event-polyfill": "^0.3.0", - "del": "^5.1.0", + "elasticsearch-browser": "^16.7.0", "jquery": "^3.4.1", - "mini-css-extract-plugin": "0.8.0", "moment": "^2.24.0", "moment-timezone": "^0.5.27", + "monaco-editor": "~0.17.0", "react": "^16.12.0", "react-dom": "^16.12.0", "react-intl": "^2.8.0", - "read-pkg": "^5.2.0", + "react-router": "^5.1.2", + "react-router-dom": "^5.1.2", "regenerator-runtime": "^0.13.3", + "rxjs": "^6.5.3", "symbol-observable": "^1.2.0", - "webpack": "^4.41.5", "whatwg-fetch": "^3.0.0" + }, + "devDependencies": { + "@kbn/babel-preset": "1.0.0", + "@kbn/dev-utils": "1.0.0", + "css-loader": "^3.4.2", + "del": "^5.1.0", + "webpack": "^4.41.5" } } diff --git a/packages/kbn-ui-shared-deps/webpack.config.js b/packages/kbn-ui-shared-deps/webpack.config.js index dc6e7ae33dbecd..a8752745449055 100644 --- a/packages/kbn-ui-shared-deps/webpack.config.js +++ b/packages/kbn-ui-shared-deps/webpack.config.js @@ -23,19 +23,19 @@ const MiniCssExtractPlugin = require('mini-css-extract-plugin'); const { REPO_ROOT } = require('@kbn/dev-utils'); const webpack = require('webpack'); -const SharedDeps = require('./index'); +const UiSharedDeps = require('./index'); const MOMENT_SRC = require.resolve('moment/min/moment-with-locales.js'); exports.getWebpackConfig = ({ dev = false } = {}) => ({ mode: dev ? 'development' : 'production', entry: { - [SharedDeps.distFilename.replace(/\.js$/, '')]: './entry.js', - [SharedDeps.darkCssDistFilename.replace(/\.css$/, '')]: [ + 'kbn-ui-shared-deps': './entry.js', + 'kbn-ui-shared-deps.dark': [ '@elastic/eui/dist/eui_theme_dark.css', '@elastic/charts/dist/theme_only_dark.css', ], - [SharedDeps.lightCssDistFilename.replace(/\.css$/, '')]: [ + 'kbn-ui-shared-deps.light': [ '@elastic/eui/dist/eui_theme_light.css', '@elastic/charts/dist/theme_only_light.css', ], @@ -43,7 +43,7 @@ exports.getWebpackConfig = ({ dev = false } = {}) => ({ context: __dirname, devtool: dev ? '#cheap-source-map' : false, output: { - path: SharedDeps.distDir, + path: UiSharedDeps.distDir, filename: '[name].js', sourceMapFilename: '[file].map', publicPath: '__REPLACE_WITH_PUBLIC_PATH__', @@ -81,6 +81,16 @@ exports.getWebpackConfig = ({ dev = false } = {}) => ({ optimization: { noEmitOnErrors: true, + splitChunks: { + cacheGroups: { + 'kbn-ui-shared-deps.@elastic': { + name: 'kbn-ui-shared-deps.@elastic', + test: m => m.resource && m.resource.includes('@elastic'), + chunks: 'all', + enforce: true, + }, + }, + }, }, performance: { diff --git a/packages/kbn-ui-shared-deps/yarn.lock b/packages/kbn-ui-shared-deps/yarn.lock new file mode 120000 index 00000000000000..3f82ebc9cdbae3 --- /dev/null +++ b/packages/kbn-ui-shared-deps/yarn.lock @@ -0,0 +1 @@ +../../yarn.lock \ No newline at end of file diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover_state.test.ts b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover_state.test.ts index 3840fd0c2e3bed..b7b36ca960167e 100644 --- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover_state.test.ts +++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover_state.test.ts @@ -76,3 +76,30 @@ describe('Test discover state', () => { expect(state.getPreviousAppState()).toEqual(stateA); }); }); + +describe('Test discover state with legacy migration', () => { + test('migration of legacy query ', async () => { + history = createBrowserHistory(); + history.push( + "/#?_a=(query:(query_string:(analyze_wildcard:!t,query:'type:nice%20name:%22yeah%22')))" + ); + state = getState({ + defaultAppState: { index: 'test' }, + history, + }); + expect(state.appStateContainer.getState()).toMatchInlineSnapshot(` + Object { + "index": "test", + "query": Object { + "language": "lucene", + "query": Object { + "query_string": Object { + "analyze_wildcard": true, + "query": "type:nice name:\\"yeah\\"", + }, + }, + }, + } + `); + }); +}); diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover_state.ts b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover_state.ts index d9e1850cd6a249..2a036f0ac60ad3 100644 --- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover_state.ts +++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/discover_state.ts @@ -129,6 +129,11 @@ export function getState({ }); const appStateFromUrl = stateStorage.get(APP_STATE_URL_KEY) as AppState; + + if (appStateFromUrl && appStateFromUrl.query && !appStateFromUrl.query.language) { + appStateFromUrl.query = migrateLegacyQuery(appStateFromUrl.query); + } + let initialAppState = { ...defaultAppState, ...appStateFromUrl, @@ -179,9 +184,6 @@ export function setState(stateContainer: ReduxLikeStateContainer, newS const oldState = stateContainer.getState(); const mergedState = { ...oldState, ...newState }; if (!isEqualState(oldState, mergedState)) { - if (mergedState.query) { - mergedState.query = migrateLegacyQuery(mergedState.query); - } stateContainer.set(mergedState); } } diff --git a/src/legacy/ui/ui_render/bootstrap/template.js.hbs b/src/legacy/ui/ui_render/bootstrap/template.js.hbs index 106dbcd9f8ab2c..ad4aa97d8ea7a0 100644 --- a/src/legacy/ui/ui_render/bootstrap/template.js.hbs +++ b/src/legacy/ui/ui_render/bootstrap/template.js.hbs @@ -76,24 +76,33 @@ if (window.__kbnStrictCsp__ && window.__kbnCspNotEnforced__) { load({ deps: [ + {{#each sharedJsDepFilenames}} + '{{../regularBundlePath}}/kbn-ui-shared-deps/{{this}}', + {{/each}} + ], + urls: [ { deps: [ - '{{dllBundlePath}}/vendors_runtime.bundle.dll.js' + '{{regularBundlePath}}/kbn-ui-shared-deps/{{sharedJsFilename}}', + { + deps: [ + '{{dllBundlePath}}/vendors_runtime.bundle.dll.js' + ], + urls: [ + {{#each dllJsChunks}} + '{{this}}', + {{/each}} + ] + }, + '{{regularBundlePath}}/commons.bundle.js', ], urls: [ - {{#each dllJsChunks}} + '{{regularBundlePath}}/{{appId}}.bundle.js', + {{#each styleSheetPaths}} '{{this}}', {{/each}} ] - }, - '{{regularBundlePath}}/kbn-ui-shared-deps/{{sharedDepsFilename}}', - '{{regularBundlePath}}/commons.bundle.js', - ], - urls: [ - '{{regularBundlePath}}/{{appId}}.bundle.js', - {{#each styleSheetPaths}} - '{{this}}', - {{/each}}, + } ] }); }; diff --git a/src/legacy/ui/ui_render/ui_render_mixin.js b/src/legacy/ui/ui_render/ui_render_mixin.js index 99560b0bf653f3..0912d8683fc485 100644 --- a/src/legacy/ui/ui_render/ui_render_mixin.js +++ b/src/legacy/ui/ui_render/ui_render_mixin.js @@ -135,7 +135,8 @@ export function uiRenderMixin(kbnServer, server, config) { dllBundlePath, dllJsChunks, styleSheetPaths, - sharedDepsFilename: UiSharedDeps.distFilename, + sharedJsFilename: UiSharedDeps.jsFilename, + sharedJsDepFilenames: UiSharedDeps.jsDepFilenames, darkMode, }, }); diff --git a/src/plugins/advanced_settings/public/management_app/index.tsx b/src/plugins/advanced_settings/public/management_app/index.tsx deleted file mode 100644 index 53b8f9983aa270..00000000000000 --- a/src/plugins/advanced_settings/public/management_app/index.tsx +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import React from 'react'; -import ReactDOM from 'react-dom'; -import { HashRouter, Switch, Route } from 'react-router-dom'; -import { i18n } from '@kbn/i18n'; -import { I18nProvider } from '@kbn/i18n/react'; -import { AdvancedSettings } from './advanced_settings'; -import { ManagementSetup } from '../../../management/public'; -import { StartServicesAccessor } from '../../../../core/public'; -import { ComponentRegistry } from '../types'; - -const title = i18n.translate('advancedSettings.advancedSettingsLabel', { - defaultMessage: 'Advanced Settings', -}); -const crumb = [{ text: title }]; - -const readOnlyBadge = { - text: i18n.translate('advancedSettings.badge.readOnly.text', { - defaultMessage: 'Read only', - }), - tooltip: i18n.translate('advancedSettings.badge.readOnly.tooltip', { - defaultMessage: 'Unable to save advanced settings', - }), - iconType: 'glasses', -}; - -export async function registerAdvSettingsMgmntApp({ - management, - getStartServices, - componentRegistry, -}: { - management: ManagementSetup; - getStartServices: StartServicesAccessor; - componentRegistry: ComponentRegistry['start']; -}) { - const kibanaSection = management.sections.getSection('kibana'); - if (!kibanaSection) { - throw new Error('`kibana` management section not found.'); - } - - const advancedSettingsManagementApp = kibanaSection.registerApp({ - id: 'settings', - title, - order: 20, - async mount(params) { - params.setBreadcrumbs(crumb); - const [ - { uiSettings, notifications, docLinks, application, chrome }, - ] = await getStartServices(); - - const canSave = application.capabilities.advancedSettings.save as boolean; - - if (!canSave) { - chrome.setBadge(readOnlyBadge); - } - - ReactDOM.render( - - - - - - - - - , - params.element - ); - return () => { - ReactDOM.unmountComponentAtNode(params.element); - }; - }, - }); - const [{ application }] = await getStartServices(); - if (!application.capabilities.management.kibana.settings) { - advancedSettingsManagementApp.disable(); - } -} diff --git a/src/plugins/advanced_settings/public/management_app/mount_management_section.tsx b/src/plugins/advanced_settings/public/management_app/mount_management_section.tsx new file mode 100644 index 00000000000000..df44ea45e9d01c --- /dev/null +++ b/src/plugins/advanced_settings/public/management_app/mount_management_section.tsx @@ -0,0 +1,82 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React from 'react'; +import ReactDOM from 'react-dom'; +import { HashRouter, Switch, Route } from 'react-router-dom'; + +import { i18n } from '@kbn/i18n'; +import { I18nProvider } from '@kbn/i18n/react'; +import { StartServicesAccessor } from 'src/core/public'; + +import { AdvancedSettings } from './advanced_settings'; +import { ManagementAppMountParams } from '../../../management/public'; +import { ComponentRegistry } from '../types'; + +const title = i18n.translate('advancedSettings.advancedSettingsLabel', { + defaultMessage: 'Advanced Settings', +}); +const crumb = [{ text: title }]; + +const readOnlyBadge = { + text: i18n.translate('advancedSettings.badge.readOnly.text', { + defaultMessage: 'Read only', + }), + tooltip: i18n.translate('advancedSettings.badge.readOnly.tooltip', { + defaultMessage: 'Unable to save advanced settings', + }), + iconType: 'glasses', +}; + +export async function mountManagementSection( + getStartServices: StartServicesAccessor, + params: ManagementAppMountParams, + componentRegistry: ComponentRegistry['start'] +) { + params.setBreadcrumbs(crumb); + const [{ uiSettings, notifications, docLinks, application, chrome }] = await getStartServices(); + + const canSave = application.capabilities.advancedSettings.save as boolean; + + if (!canSave) { + chrome.setBadge(readOnlyBadge); + } + + ReactDOM.render( + + + + + + + + + , + params.element + ); + return () => { + ReactDOM.unmountComponentAtNode(params.element); + }; +} diff --git a/src/plugins/advanced_settings/public/plugin.ts b/src/plugins/advanced_settings/public/plugin.ts index e9472fbdee0e67..04eeff1e1f3ce3 100644 --- a/src/plugins/advanced_settings/public/plugin.ts +++ b/src/plugins/advanced_settings/public/plugin.ts @@ -16,21 +16,37 @@ * specific language governing permissions and limitations * under the License. */ - +import { i18n } from '@kbn/i18n'; import { CoreSetup, CoreStart, Plugin } from 'kibana/public'; +import { ManagementApp } from '../../management/public'; import { ComponentRegistry } from './component_registry'; import { AdvancedSettingsSetup, AdvancedSettingsStart, AdvancedSettingsPluginSetup } from './types'; -import { registerAdvSettingsMgmntApp } from './management_app'; const component = new ComponentRegistry(); +const title = i18n.translate('advancedSettings.advancedSettingsLabel', { + defaultMessage: 'Advanced Settings', +}); + export class AdvancedSettingsPlugin implements Plugin { + private managementApp?: ManagementApp; public setup(core: CoreSetup, { management }: AdvancedSettingsPluginSetup) { - registerAdvSettingsMgmntApp({ - management, - getStartServices: core.getStartServices, - componentRegistry: component.start, + const kibanaSection = management.sections.getSection('kibana'); + if (!kibanaSection) { + throw new Error('`kibana` management section not found.'); + } + + this.managementApp = kibanaSection.registerApp({ + id: 'settings', + title, + order: 20, + async mount(params) { + const { mountManagementSection } = await import( + './management_app/mount_management_section' + ); + return mountManagementSection(core.getStartServices, params, component.start); + }, }); return { @@ -39,6 +55,10 @@ export class AdvancedSettingsPlugin } public start(core: CoreStart) { + if (!core.application.capabilities.management.kibana.settings) { + this.managementApp!.disable(); + } + return { component: component.start, }; diff --git a/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.test.ts b/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.test.ts index 0c02b02a25af0d..ef6eaa196b06a1 100644 --- a/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.test.ts +++ b/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.test.ts @@ -46,6 +46,10 @@ describe('parseInterval', () => { validateDuration(parseInterval('5m'), 'm', 5); }); + test('should correctly parse 500m interval', () => { + validateDuration(parseInterval('500m'), 'm', 500); + }); + test('should correctly parse 250ms interval', () => { validateDuration(parseInterval('250ms'), 'ms', 250); }); diff --git a/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.ts b/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.ts index ef1d89e400b729..857c8594720ee6 100644 --- a/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.ts +++ b/src/plugins/data/common/search/aggs/date_interval_utils/parse_interval.ts @@ -49,6 +49,13 @@ export function parseInterval(interval: string): moment.Duration | null { u => Math.abs(duration.as(u)) >= 1 ) as unitOfTime.Base; + // however if we do this fhe other way around it will also fail + // go from 500m to hours as this will result in infinite number (dividing 500/60 = 8.3*) + // so we can only do this if we are changing to smaller units + if (dateMath.units.indexOf(selectedUnit as any) < dateMath.units.indexOf(unit as any)) { + return duration; + } + return moment.duration(duration.as(selectedUnit), selectedUnit); } catch (e) { return null; diff --git a/src/plugins/es_ui_shared/static/forms/helpers/field_validators/is_json.ts b/src/plugins/es_ui_shared/static/forms/helpers/field_validators/is_json.ts index 5626fc80bb749a..dc8321aa07004a 100644 --- a/src/plugins/es_ui_shared/static/forms/helpers/field_validators/is_json.ts +++ b/src/plugins/es_ui_shared/static/forms/helpers/field_validators/is_json.ts @@ -17,25 +17,6 @@ * under the License. */ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - import { ValidationFunc } from '../../hook_form_lib'; import { isJSON } from '../../../validators/string'; import { ERROR_CODE } from './types'; diff --git a/src/plugins/home/public/application/components/__snapshots__/add_data.test.js.snap b/src/plugins/home/public/application/components/__snapshots__/add_data.test.js.snap index 57cbe0f17498fd..c1dc560b4353f8 100644 --- a/src/plugins/home/public/application/components/__snapshots__/add_data.test.js.snap +++ b/src/plugins/home/public/application/components/__snapshots__/add_data.test.js.snap @@ -104,6 +104,7 @@ exports[`apmUiEnabled 1`] = ` { footer={ diff --git a/src/plugins/home/public/application/components/feature_directory.js b/src/plugins/home/public/application/components/feature_directory.js index 2e979bf5899755..7d827b1ca9229f 100644 --- a/src/plugins/home/public/application/components/feature_directory.js +++ b/src/plugins/home/public/application/components/feature_directory.js @@ -89,6 +89,7 @@ export class FeatureDirectory extends React.Component { renderTabs = () => { return this.tabs.map((tab, index) => ( this.onSelectedTabChanged(tab.id)} isSelected={tab.id === this.state.selectedTabId} key={index} diff --git a/src/plugins/home/public/application/components/home.js b/src/plugins/home/public/application/components/home.js index 77cde6a574aece..5263dc06e96fc8 100644 --- a/src/plugins/home/public/application/components/home.js +++ b/src/plugins/home/public/application/components/home.js @@ -203,7 +203,7 @@ export class Home extends Component {

- + `http://localhost:5610/bundles/kbn-ui-shared-deps/${chunkFilename}` + ), + `http://localhost:5610/bundles/kbn-ui-shared-deps/${UiSharedDeps.jsFilename}`, + 'http://localhost:5610/built_assets/dlls/vendors_runtime.bundle.dll.js', ...DllCompiler.getRawDllConfig().chunks.map( chunk => `http://localhost:5610/built_assets/dlls/vendors${chunk}.bundle.dll.js` diff --git a/test/accessibility/apps/management.ts b/test/accessibility/apps/management.ts index ac2921ed063f50..9e75250403d6b1 100644 --- a/test/accessibility/apps/management.ts +++ b/test/accessibility/apps/management.ts @@ -35,7 +35,8 @@ export default function({ getService, getPageObjects }: FtrProviderContext) { // await PageObjects.common.navigateToApp('settings'); // }); - describe('Management', () => { + // FLAKY: https://github.com/elastic/kibana/issues/60470 + describe.skip('Management', () => { before(async () => { await esArchiver.load('discover'); await esArchiver.loadIfNeeded('logstash_functional'); diff --git a/test/functional/apps/discover/_doc_navigation.js b/test/functional/apps/discover/_doc_navigation.js index f0a7844b299873..08e0cb0b8d23a7 100644 --- a/test/functional/apps/discover/_doc_navigation.js +++ b/test/functional/apps/discover/_doc_navigation.js @@ -31,7 +31,8 @@ export default function({ getService, getPageObjects }) { const PageObjects = getPageObjects(['common', 'discover', 'timePicker']); const esArchiver = getService('esArchiver'); - describe('doc link in discover', function contextSize() { + // FLAKY: https://github.com/elastic/kibana/issues/62281 + describe.skip('doc link in discover', function contextSize() { this.tags('smoke'); before(async function() { await esArchiver.loadIfNeeded('logstash_functional'); diff --git a/test/functional/apps/discover/_field_visualize.ts b/test/functional/apps/discover/_field_visualize.ts index 24f4ba592324c0..f8f290b259b7eb 100644 --- a/test/functional/apps/discover/_field_visualize.ts +++ b/test/functional/apps/discover/_field_visualize.ts @@ -32,8 +32,7 @@ export default function({ getService, getPageObjects }: FtrProviderContext) { defaultIndex: 'logstash-*', }; - // FLAKY: https://github.com/elastic/kibana/issues/61714 - describe.skip('discover field visualize button', () => { + describe('discover field visualize button', () => { before(async function() { log.debug('load kibana index with default index pattern'); await esArchiver.load('discover'); @@ -50,7 +49,7 @@ export default function({ getService, getPageObjects }: FtrProviderContext) { }); it('should visualize a field in area chart', async () => { - await PageObjects.discover.clickFieldListItem('phpmemory'); + await PageObjects.discover.findFieldByName('phpmemory'); log.debug('visualize a phpmemory field'); await PageObjects.discover.clickFieldListItemVisualize('phpmemory'); await PageObjects.header.waitUntilLoadingHasFinished(); @@ -83,7 +82,7 @@ export default function({ getService, getPageObjects }: FtrProviderContext) { it('should preserve app filters in visualize', async () => { await filterBar.addFilter('bytes', 'is between', '3500', '4000'); - await PageObjects.discover.clickFieldListItem('geo.src'); + await PageObjects.discover.findFieldByName('geo.src'); log.debug('visualize a geo.src field with filter applied'); await PageObjects.discover.clickFieldListItemVisualize('geo.src'); await PageObjects.header.waitUntilLoadingHasFinished(); @@ -119,7 +118,7 @@ export default function({ getService, getPageObjects }: FtrProviderContext) { it('should preserve query in visualize', async () => { await queryBar.setQuery('machine.os : ios'); await queryBar.submitQuery(); - await PageObjects.discover.clickFieldListItem('geo.dest'); + await PageObjects.discover.findFieldByName('geo.dest'); log.debug('visualize a geo.dest field with query applied'); await PageObjects.discover.clickFieldListItemVisualize('geo.dest'); await PageObjects.header.waitUntilLoadingHasFinished(); diff --git a/test/functional/page_objects/discover_page.ts b/test/functional/page_objects/discover_page.ts index 10652ce3ec4b2d..2377c32a80b5b6 100644 --- a/test/functional/page_objects/discover_page.ts +++ b/test/functional/page_objects/discover_page.ts @@ -40,6 +40,11 @@ export function DiscoverPageProvider({ getService, getPageObjects }: FtrProvider return await el.getVisibleText(); } + public async findFieldByName(name: string) { + const fieldSearch = await testSubjects.find('fieldFilterSearchInput'); + await fieldSearch.type(name); + } + public async saveSearch(searchName: string) { log.debug('saveSearch'); await this.clickSaveSearchButton(); @@ -239,10 +244,16 @@ export function DiscoverPageProvider({ getService, getPageObjects }: FtrProvider await testSubjects.click(`fieldToggle-${field}`); } - public async clickFieldListItemVisualize(field: string) { - return await retry.try(async () => { - await testSubjects.click(`fieldVisualize-${field}`); - }); + public async clickFieldListItemVisualize(fieldName: string) { + const field = await testSubjects.find(`field-${fieldName}-showDetails`); + const isActive = await field.elementHasClass('dscSidebarItem--active'); + + if (!isActive) { + // expand the field to show the "Visualize" button + await field.click(); + } + + await testSubjects.click(`fieldVisualize-${fieldName}`); } public async expectFieldListItemVisualize(field: string) { diff --git a/test/functional/page_objects/home_page.ts b/test/functional/page_objects/home_page.ts index 6225b4e3aca62f..6fdc306e39192d 100644 --- a/test/functional/page_objects/home_page.ts +++ b/test/functional/page_objects/home_page.ts @@ -79,6 +79,39 @@ export function HomePageProvider({ getService, getPageObjects }: FtrProviderCont await testSubjects.click(`launchSampleDataSet${id}`); } + async clickAllKibanaPlugins() { + await testSubjects.click('allPlugins'); + } + + async clickVisualizeExplorePlugins() { + await testSubjects.click('tab-data'); + } + + async clickAdminPlugin() { + await testSubjects.click('tab-admin'); + } + + async clickOnConsole() { + await testSubjects.click('homeSynopsisLinkconsole'); + } + async clickOnLogo() { + await testSubjects.click('logo'); + } + + async ClickOnLogsData() { + await testSubjects.click('logsData'); + } + + // clicks on Active MQ logs + async clickOnLogsTutorial() { + await testSubjects.click('homeSynopsisLinkactivemq logs'); + } + + // clicks on cloud tutorial link + async clickOnCloudTutorial() { + await testSubjects.click('onCloudTutorial'); + } + async loadSavedObjects() { await retry.try(async () => { await testSubjects.click('loadSavedObjects'); diff --git a/vars/workers.groovy b/vars/workers.groovy index c5638f2624fe5d..1c55c676d94253 100644 --- a/vars/workers.groovy +++ b/vars/workers.groovy @@ -1,23 +1,38 @@ // "Workers" in this file will spin up an instance, do some setup etc depending on the configuration, and then execute some work that you define // e.g. workers.base(name: 'my-worker') { sh "echo 'ready to execute some kibana scripts'" } +def label(size) { + switch(size) { + case 's': + return 'linux && immutable' + case 'l': + return 'tests-l' + case 'xl': + return 'tests-xl' + case 'xxl': + return 'tests-xxl' + } + + error "unknown size '${size}'" +} + /* The base worker that all of the others use. Will clone the scm (assumed to be kibana), and run kibana bootstrap processes by default. Parameters: - label - gobld/agent label to use, e.g. 'linux && immutable' + size - size of worker label to use, e.g. 's' or 'xl' ramDisk - Should the workspace be mounted in memory? Default: true bootstrapped - If true, download kibana dependencies, run kbn bootstrap, etc. Default: true name - Name of the worker for display purposes, filenames, etc. scm - Jenkins scm configuration for checking out code. Use `null` to disable checkout. Default: inherited from job */ def base(Map params, Closure closure) { - def config = [label: '', ramDisk: true, bootstrapped: true, name: 'unnamed-worker', scm: scm] + params - if (!config.label) { - error "You must specify an agent label, such as 'tests-xl' or 'linux && immutable', when using workers.base()" + def config = [size: '', ramDisk: true, bootstrapped: true, name: 'unnamed-worker', scm: scm] + params + if (!config.size) { + error "You must specify an agent size, such as 'xl' or 's', when using workers.base()" } - node(config.label) { + node(label(config.size)) { agentInfo.print() if (config.ramDisk) { @@ -88,7 +103,7 @@ def ci(Map params, Closure closure) { // Worker for running the current intake jobs. Just runs a single script after bootstrap. def intake(jobName, String script) { return { - ci(name: jobName, label: 'linux && immutable', ramDisk: false) { + ci(name: jobName, size: 's', ramDisk: false) { withEnv(["JOB=${jobName}"]) { runbld(script, "Execute ${jobName}") } @@ -99,7 +114,7 @@ def intake(jobName, String script) { // Worker for running functional tests. Runs a setup process (e.g. the kibana build) then executes a map of closures in parallel (e.g. one for each ciGroup) def functional(name, Closure setup, Map processes) { return { - parallelProcesses(name: name, setup: setup, processes: processes, delayBetweenProcesses: 20, label: 'tests-xl') + parallelProcesses(name: name, setup: setup, processes: processes, delayBetweenProcesses: 20, size: 'xl') } } @@ -111,12 +126,12 @@ def functional(name, Closure setup, Map processes) { setup: Closure to execute after the agent is bootstrapped, before starting the parallel work processes: Map of closures that will execute in parallel after setup. Each closure is passed a unique number. delayBetweenProcesses: Number of seconds to wait between starting the parallel processes. Useful to spread the load of heavy init processes, e.g. Elasticsearch starting up. Default: 0 - label: gobld/agent label to use, e.g. 'linux && immutable'. Default: 'tests-xl', a 32 CPU machine used for running many functional test suites in parallel + size: size of worker label to use, e.g. 's' or 'xl' */ def parallelProcesses(Map params) { - def config = [name: 'parallel-worker', setup: {}, processes: [:], delayBetweenProcesses: 0, label: 'tests-xl'] + params + def config = [name: 'parallel-worker', setup: {}, processes: [:], delayBetweenProcesses: 0, size: 'xl'] + params - ci(label: config.label, name: config.name) { + ci(size: config.size, name: config.name) { config.setup() def nextProcessNumber = 1 diff --git a/webpackShims/elasticsearch-browser.js b/webpackShims/elasticsearch-browser.js deleted file mode 100644 index a4373dcdfe1d14..00000000000000 --- a/webpackShims/elasticsearch-browser.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -require('angular'); -module.exports = require('elasticsearch-browser/elasticsearch.angular.js'); diff --git a/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/index.ts b/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/index.ts index 949264fcc9fdb1..b0083eb4f87e27 100644 --- a/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/index.ts +++ b/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/index.ts @@ -57,7 +57,7 @@ export const WorkpadExport = compose( ({ workpad, pageCount, kibana }: Props & WithKibanaProps): ComponentProps => ({ getExportUrl: type => { if (type === 'pdf') { - const pdfUrl = getPdfUrl(workpad, { pageCount }, kibana.services.http.basePath.prepend); + const pdfUrl = getPdfUrl(workpad, { pageCount }, kibana.services.http.basePath); return getAbsoluteUrl(pdfUrl); } @@ -78,7 +78,7 @@ export const WorkpadExport = compose( onExport: type => { switch (type) { case 'pdf': - return createPdf(workpad, { pageCount }, kibana.services.http.basePath.prepend) + return createPdf(workpad, { pageCount }, kibana.services.http.basePath) .then(({ data }: { data: { job: { id: string } } }) => { notify.info(strings.getExportPDFMessage(), { title: strings.getExportPDFTitle(workpad.name), diff --git a/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.test.ts b/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.test.ts index ceaf82c1c07d62..6c7d7ddd0a7931 100644 --- a/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.test.ts +++ b/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.test.ts @@ -9,29 +9,34 @@ jest.mock('../../../../common/lib/fetch'); import { getPdfUrl, createPdf } from './utils'; import { workpads } from '../../../../__tests__/fixtures/workpads'; import { fetch } from '../../../../common/lib/fetch'; +import { IBasePath } from 'kibana/public'; -const addBasePath = jest.fn().mockImplementation(s => `basepath/${s}`); +const basePath = ({ + prepend: jest.fn().mockImplementation(s => `basepath/s/spacey/${s}`), + get: () => 'basepath/s/spacey', + serverBasePath: `basepath`, +} as unknown) as IBasePath; const workpad = workpads[0]; test('getPdfUrl returns the correct url', () => { - const url = getPdfUrl(workpad, { pageCount: 2 }, addBasePath); + const url = getPdfUrl(workpad, { pageCount: 2 }, basePath); expect(url).toMatchInlineSnapshot( - `"basepath//api/reporting/generate/printablePdf?jobParams=(browserTimezone:America%2FPhoenix,layout:(dimensions:(height:0,width:0),id:preserve_layout),objectType:'canvas%20workpad',relativeUrls:!(%2Fapp%2Fcanvas%23%2Fexport%2Fworkpad%2Fpdf%2Fbase-workpad%2Fpage%2F1,%2Fapp%2Fcanvas%23%2Fexport%2Fworkpad%2Fpdf%2Fbase-workpad%2Fpage%2F2),title:'base%20workpad')"` + `"basepath/s/spacey//api/reporting/generate/printablePdf?jobParams=(browserTimezone:America%2FPhoenix,layout:(dimensions:(height:0,width:0),id:preserve_layout),objectType:'canvas%20workpad',relativeUrls:!(%2Fs%2Fspacey%2Fapp%2Fcanvas%23%2Fexport%2Fworkpad%2Fpdf%2Fbase-workpad%2Fpage%2F1,%2Fs%2Fspacey%2Fapp%2Fcanvas%23%2Fexport%2Fworkpad%2Fpdf%2Fbase-workpad%2Fpage%2F2),title:'base%20workpad')"` ); }); test('createPdf posts to create the pdf', () => { - createPdf(workpad, { pageCount: 2 }, addBasePath); + createPdf(workpad, { pageCount: 2 }, basePath); expect(fetch.post).toBeCalled(); const args = (fetch.post as jest.MockedFunction).mock.calls[0]; - expect(args[0]).toMatchInlineSnapshot(`"basepath//api/reporting/generate/printablePdf"`); + expect(args[0]).toMatchInlineSnapshot(`"basepath/s/spacey//api/reporting/generate/printablePdf"`); expect(args[1]).toMatchInlineSnapshot(` Object { - "jobParams": "(browserTimezone:America/Phoenix,layout:(dimensions:(height:0,width:0),id:preserve_layout),objectType:'canvas workpad',relativeUrls:!(/app/canvas#/export/workpad/pdf/base-workpad/page/1,/app/canvas#/export/workpad/pdf/base-workpad/page/2),title:'base workpad')", + "jobParams": "(browserTimezone:America/Phoenix,layout:(dimensions:(height:0,width:0),id:preserve_layout),objectType:'canvas workpad',relativeUrls:!(/s/spacey/app/canvas#/export/workpad/pdf/base-workpad/page/1,/s/spacey/app/canvas#/export/workpad/pdf/base-workpad/page/2),title:'base workpad')", } `); }); diff --git a/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.ts b/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.ts index 5adbf4ce66c130..dc99c0687f388e 100644 --- a/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.ts +++ b/x-pack/legacy/plugins/canvas/public/components/workpad_header/workpad_export/utils.ts @@ -6,6 +6,7 @@ import rison from 'rison-node'; // @ts-ignore Untyped local. +import { IBasePath } from 'kibana/public'; import { fetch } from '../../../../common/lib/fetch'; import { CanvasWorkpad } from '../../../../types'; import { url } from '../../../../../../../../src/plugins/kibana_utils/public'; @@ -17,9 +18,7 @@ interface PageCount { pageCount: number; } -type AddBasePath = (url: string) => string; - -type Arguments = [CanvasWorkpad, PageCount, AddBasePath]; +type Arguments = [CanvasWorkpad, PageCount, IBasePath]; interface PdfUrlData { createPdfUri: string; @@ -29,10 +28,11 @@ interface PdfUrlData { function getPdfUrlParts( { id, name: title, width, height }: CanvasWorkpad, { pageCount }: PageCount, - addBasePath: (path: string) => string + basePath: IBasePath ): PdfUrlData { - const reportingEntry = addBasePath('/api/reporting/generate'); - const canvasEntry = '/app/canvas#'; + const reportingEntry = basePath.prepend('/api/reporting/generate'); + const urlPrefix = basePath.get().replace(basePath.serverBasePath, ''); // for Spaces prefix, which is included in basePath.get() + const canvasEntry = `${urlPrefix}/app/canvas#`; // The viewport in Reporting by specifying the dimensions. In order for things to work, // we need a viewport that will include all of the pages in the workpad. The viewport diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_map_select.js b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_map_select.js index bf57306df5697d..eadaf42ca694da 100644 --- a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_map_select.js +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_map_select.js @@ -99,6 +99,7 @@ export class ColorMapSelect extends Component { ); } else @@ -108,6 +109,7 @@ export class ColorMapSelect extends Component { field={this.props.styleProperty.getField()} getValueSuggestions={this.props.styleProperty.getValueSuggestions} onChange={this._onCustomColorMapChange} + swatches={this.props.swatches} /> ); diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops.js b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops.js index 059543d705fc73..20fd97a229352c 100644 --- a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops.js +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops.js @@ -8,61 +8,8 @@ import _ from 'lodash'; import React from 'react'; import { removeRow, isColorInvalid } from './color_stops_utils'; import { i18n } from '@kbn/i18n'; -import { EuiButtonIcon, EuiColorPicker, EuiFlexGroup, EuiFlexItem, EuiFormRow } from '@elastic/eui'; - -function getColorStopRow({ index, errors, stopInput, onColorChange, color, deleteButton, onAdd }) { - const colorPickerButtons = ( -
- {deleteButton} - -
- ); - return ( - - - - {stopInput} - - - - - - - ); -} - -export function getDeleteButton(onRemove) { - return ( - - ); -} +import { EuiButtonIcon, EuiFlexGroup, EuiFlexItem, EuiFormRow } from '@elastic/eui'; +import { MbValidatedColorPicker } from './mb_validated_color_picker'; export const ColorStops = ({ onChange, @@ -72,6 +19,7 @@ export const ColorStops = ({ renderStopInput, addNewRow, canDeleteStop, + swatches, }) => { function getStopInput(stop, index) { const onStopChange = newStopValue => { @@ -134,10 +82,56 @@ export const ColorStops = ({ isInvalid: isStopsInvalid(newColorStops), }); }; - deleteButton = getDeleteButton(onRemove); + deleteButton = ( + + ); } - return getColorStopRow({ index, errors, stopInput, onColorChange, color, deleteButton, onAdd }); + const colorPickerButtons = ( +
+ {deleteButton} + +
+ ); + return ( + + + + {stopInput} + + + + + + + ); }); return
{rows}
; diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_categorical.js b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_categorical.js index edf230b0a945c0..0656173e5c4113 100644 --- a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_categorical.js +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_categorical.js @@ -27,6 +27,7 @@ export const ColorStopsCategorical = ({ field, onChange, getValueSuggestions, + swatches, }) => { const getStopError = (stop, index) => { let count = 0; @@ -81,6 +82,7 @@ export const ColorStopsCategorical = ({ renderStopInput={renderStopInput} canDeleteStop={canDeleteStop} addNewRow={addCategoricalRow} + swatches={swatches} /> ); }; diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_ordinal.js b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_ordinal.js index 0f6a0583d3dbcc..4e2d07b9dfea02 100644 --- a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_ordinal.js +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/color_stops_ordinal.js @@ -20,6 +20,7 @@ import { i18n } from '@kbn/i18n'; export const ColorStopsOrdinal = ({ colorStops = [{ stop: 0, color: DEFAULT_CUSTOM_COLOR }], onChange, + swatches, }) => { const getStopError = (stop, index) => { let error; @@ -69,6 +70,7 @@ export const ColorStopsOrdinal = ({ renderStopInput={renderStopInput} canDeleteStop={canDeleteStop} addNewRow={addOrdinalRow} + swatches={swatches} /> ); }; diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/dynamic_color_form.js b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/dynamic_color_form.js index 5e8f720fcc5e35..460e7379920c46 100644 --- a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/dynamic_color_form.js +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/dynamic_color_form.js @@ -18,6 +18,7 @@ export function DynamicColorForm({ onDynamicStyleChange, staticDynamicSelect, styleProperty, + swatches, }) { const styleOptions = styleProperty.getOptions(); @@ -101,6 +102,7 @@ export function DynamicColorForm({ useCustomColorMap={_.get(styleOptions, 'useCustomColorRamp', false)} styleProperty={styleProperty} showColorMapTypeToggle={showColorMapTypeToggle} + swatches={swatches} /> ); } else if (styleProperty.isCategorical()) { @@ -118,6 +120,7 @@ export function DynamicColorForm({ useCustomColorMap={_.get(styleOptions, 'useCustomColorPalette', false)} styleProperty={styleProperty} showColorMapTypeToggle={showColorMapTypeToggle} + swatches={swatches} /> ); } diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/mb_validated_color_picker.tsx b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/mb_validated_color_picker.tsx new file mode 100644 index 00000000000000..b4fad6690b9ac8 --- /dev/null +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/mb_validated_color_picker.tsx @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { Component } from 'react'; +import { isValidHex, EuiColorPicker, EuiFormControlLayoutProps } from '@elastic/eui'; + +export const RGBA_0000 = 'rgba(0,0,0,0)'; + +interface Props { + onChange: (color: string) => void; + color: string; + swatches?: string[]; + append?: EuiFormControlLayoutProps['append']; +} + +interface State { + colorInputValue: string; +} + +// EuiColorPicker treats '' or invalid colors as transparent. +// Mapbox logs errors for '' or invalid colors. +// MbValidatedColorPicker is a wrapper around EuiColorPicker that reconciles the behavior difference +// between the two by returning a Mapbox safe RGBA_0000 for '' or invalid colors +// while keeping invalid state local so EuiColorPicker's input properly handles text input. +export class MbValidatedColorPicker extends Component { + state = { + colorInputValue: this.props.color === RGBA_0000 ? '' : this.props.color, + }; + + _onColorChange = (color: string) => { + // reflect all user input, whether valid or not + this.setState({ colorInputValue: color }); + // Only surface mapbox valid input to caller + this.props.onChange(isValidHex(color) ? color : RGBA_0000); + }; + + render() { + return ( + + ); + } +} diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/static_color_form.js b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/static_color_form.js index ab1634a53a966a..a295556ee3126d 100644 --- a/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/static_color_form.js +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/components/color/static_color_form.js @@ -5,7 +5,8 @@ */ import React from 'react'; -import { EuiColorPicker, EuiFlexGroup, EuiFlexItem } from '@elastic/eui'; +import { EuiFlexGroup, EuiFlexItem } from '@elastic/eui'; +import { MbValidatedColorPicker } from './mb_validated_color_picker'; export function StaticColorForm({ onStaticStyleChange, @@ -23,11 +24,10 @@ export function StaticColorForm({ {staticDynamicSelect} - diff --git a/x-pack/legacy/plugins/maps/public/layers/styles/vector/properties/dynamic_color_property.js b/x-pack/legacy/plugins/maps/public/layers/styles/vector/properties/dynamic_color_property.js index 146bc40aa8531b..e671f00b783819 100644 --- a/x-pack/legacy/plugins/maps/public/layers/styles/vector/properties/dynamic_color_property.js +++ b/x-pack/legacy/plugins/maps/public/layers/styles/vector/properties/dynamic_color_property.js @@ -18,11 +18,10 @@ import { EuiTextColor, } from '@elastic/eui'; import { Category } from '../components/legend/category'; -import { COLOR_MAP_TYPE } from '../../../../../common/constants'; +import { COLOR_MAP_TYPE, RGBA_0000 } from '../../../../../common/constants'; import { isCategoricalStopsInvalid } from '../components/color/color_stops_utils'; const EMPTY_STOPS = { stops: [], defaultColor: null }; -const RGBA_0000 = 'rgba(0,0,0,0)'; export class DynamicColorProperty extends DynamicStyleProperty { syncCircleColorWithMb(mbLayerId, mbMap, alpha) { diff --git a/x-pack/legacy/plugins/reporting/common/constants.ts b/x-pack/legacy/plugins/reporting/common/constants.ts index 1746345879192e..8f7a06ba9f8e92 100644 --- a/x-pack/legacy/plugins/reporting/common/constants.ts +++ b/x-pack/legacy/plugins/reporting/common/constants.ts @@ -27,6 +27,9 @@ export const WHITELISTED_JOB_CONTENT_TYPES = [ 'image/png', ]; +// See: +// https://github.com/chromium/chromium/blob/3611052c055897e5ebbc5b73ea295092e0c20141/services/network/public/cpp/header_util_unittest.cc#L50 +// For a list of headers that chromium doesn't like export const KBN_SCREENSHOT_HEADER_BLACKLIST = [ 'accept-encoding', 'connection', @@ -38,8 +41,14 @@ export const KBN_SCREENSHOT_HEADER_BLACKLIST = [ // only for a single transport-level connection, and shouldn't // be stored by caches or forwarded by proxies. 'transfer-encoding', + 'trailer', + 'te', + 'upgrade', + 'keep-alive', ]; +export const KBN_SCREENSHOT_HEADER_BLACKLIST_STARTS_WITH_PATTERN = ['proxy-']; + export const UI_SETTINGS_CUSTOM_PDF_LOGO = 'xpackReporting:customPdfLogo'; /** diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts index 468caf93ec5dd5..9085fb3cbc876d 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts @@ -5,33 +5,27 @@ */ import { cryptoFactory } from '../../../server/lib/crypto'; -import { createMockServer } from '../../../test_helpers'; import { Logger } from '../../../types'; import { decryptJobHeaders } from './decrypt_job_headers'; -let mockServer: any; -beforeEach(() => { - mockServer = createMockServer(''); -}); - -const encryptHeaders = async (headers: Record) => { - const crypto = cryptoFactory(mockServer); +const encryptHeaders = async (encryptionKey: string, headers: Record) => { + const crypto = cryptoFactory(encryptionKey); return await crypto.encrypt(headers); }; describe('headers', () => { test(`fails if it can't decrypt headers`, async () => { - await expect( + const getDecryptedHeaders = () => decryptJobHeaders({ + encryptionKey: 'abcsecretsauce', job: { headers: 'Q53+9A+zf+Xe+ceR/uB/aR/Sw/8e+M+qR+WiG+8z+EY+mo+HiU/zQL+Xn', }, logger: ({ error: jest.fn(), } as unknown) as Logger, - server: mockServer, - }) - ).rejects.toMatchInlineSnapshot( + }); + await expect(getDecryptedHeaders()).rejects.toMatchInlineSnapshot( `[Error: Failed to decrypt report job data. Please ensure that xpack.reporting.encryptionKey is set and re-generate this report. Error: Invalid IV length]` ); }); @@ -42,15 +36,15 @@ describe('headers', () => { baz: 'quix', }; - const encryptedHeaders = await encryptHeaders(headers); + const encryptedHeaders = await encryptHeaders('abcsecretsauce', headers); const decryptedHeaders = await decryptJobHeaders({ + encryptionKey: 'abcsecretsauce', job: { title: 'cool-job-bro', type: 'csv', headers: encryptedHeaders, }, logger: {} as Logger, - server: mockServer, }); expect(decryptedHeaders).toEqual(headers); }); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.ts index 436b2c2dab1ad5..6f415d7ee5ea93 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.ts @@ -6,7 +6,7 @@ import { i18n } from '@kbn/i18n'; import { cryptoFactory } from '../../../server/lib/crypto'; -import { CryptoFactory, ServerFacade, Logger } from '../../../types'; +import { CryptoFactory, Logger } from '../../../types'; interface HasEncryptedHeaders { headers?: string; @@ -17,15 +17,15 @@ export const decryptJobHeaders = async < JobParamsType, JobDocPayloadType extends HasEncryptedHeaders >({ - server, + encryptionKey, job, logger, }: { - server: ServerFacade; + encryptionKey?: string; job: JobDocPayloadType; logger: Logger; }): Promise> => { - const crypto: CryptoFactory = cryptoFactory(server); + const crypto: CryptoFactory = cryptoFactory(encryptionKey); try { const decryptedHeaders: Record = await crypto.decrypt(job.headers); return decryptedHeaders; diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts index eedb742ad75976..5f5fc94eee8308 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts @@ -4,27 +4,32 @@ * you may not use this file except in compliance with the Elastic License. */ -import { createMockReportingCore, createMockServer } from '../../../test_helpers'; -import { ReportingCore } from '../../../server'; +import sinon from 'sinon'; +import { createMockReportingCore } from '../../../test_helpers'; +import { ReportingConfig, ReportingCore } from '../../../server/types'; import { JobDocPayload } from '../../../types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; import { getConditionalHeaders, getCustomLogo } from './index'; +let mockConfig: ReportingConfig; let mockReportingPlugin: ReportingCore; -let mockServer: any; + +const getMockConfig = (mockConfigGet: sinon.SinonStub) => ({ + get: mockConfigGet, + kbnConfig: { get: mockConfigGet }, +}); + beforeEach(async () => { - mockReportingPlugin = await createMockReportingCore(); - mockServer = createMockServer(''); + const mockConfigGet = sinon + .stub() + .withArgs('kibanaServer', 'hostname') + .returns('custom-hostname'); + mockConfig = getMockConfig(mockConfigGet); + mockReportingPlugin = await createMockReportingCore(mockConfig); }); describe('conditions', () => { test(`uses hostname from reporting config if set`, async () => { - const settings: any = { - 'xpack.reporting.kibanaServer.hostname': 'custom-hostname', - }; - - mockServer = createMockServer({ settings }); - const permittedHeaders = { foo: 'bar', baz: 'quix', @@ -33,121 +38,20 @@ describe('conditions', () => { const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: permittedHeaders, - server: mockServer, + config: mockConfig, }); expect(conditionalHeaders.conditions.hostname).toEqual( - mockServer.config().get('xpack.reporting.kibanaServer.hostname') + mockConfig.get('kibanaServer', 'hostname') ); - }); - - test(`uses hostname from server.config if reporting config not set`, async () => { - const permittedHeaders = { - foo: 'bar', - baz: 'quix', - }; - - const conditionalHeaders = await getConditionalHeaders({ - job: {} as JobDocPayload, - filteredHeaders: permittedHeaders, - server: mockServer, - }); - - expect(conditionalHeaders.conditions.hostname).toEqual(mockServer.config().get('server.host')); - }); - - test(`uses port from reporting config if set`, async () => { - const settings = { - 'xpack.reporting.kibanaServer.port': 443, - }; - - mockServer = createMockServer({ settings }); - - const permittedHeaders = { - foo: 'bar', - baz: 'quix', - }; - - const conditionalHeaders = await getConditionalHeaders({ - job: {} as JobDocPayload, - filteredHeaders: permittedHeaders, - server: mockServer, - }); - - expect(conditionalHeaders.conditions.port).toEqual( - mockServer.config().get('xpack.reporting.kibanaServer.port') + expect(conditionalHeaders.conditions.port).toEqual(mockConfig.get('kibanaServer', 'port')); + expect(conditionalHeaders.conditions.protocol).toEqual( + mockConfig.get('kibanaServer', 'protocol') ); - }); - - test(`uses port from server if reporting config not set`, async () => { - const permittedHeaders = { - foo: 'bar', - baz: 'quix', - }; - - const conditionalHeaders = await getConditionalHeaders({ - job: {} as JobDocPayload, - filteredHeaders: permittedHeaders, - server: mockServer, - }); - - expect(conditionalHeaders.conditions.port).toEqual(mockServer.config().get('server.port')); - }); - - test(`uses basePath from server config`, async () => { - const permittedHeaders = { - foo: 'bar', - baz: 'quix', - }; - - const conditionalHeaders = await getConditionalHeaders({ - job: {} as JobDocPayload, - filteredHeaders: permittedHeaders, - server: mockServer, - }); - expect(conditionalHeaders.conditions.basePath).toEqual( - mockServer.config().get('server.basePath') + mockConfig.kbnConfig.get('server', 'basePath') ); }); - - test(`uses protocol from reporting config if set`, async () => { - const settings = { - 'xpack.reporting.kibanaServer.protocol': 'https', - }; - - mockServer = createMockServer({ settings }); - - const permittedHeaders = { - foo: 'bar', - baz: 'quix', - }; - - const conditionalHeaders = await getConditionalHeaders({ - job: {} as JobDocPayload, - filteredHeaders: permittedHeaders, - server: mockServer, - }); - - expect(conditionalHeaders.conditions.protocol).toEqual( - mockServer.config().get('xpack.reporting.kibanaServer.protocol') - ); - }); - - test(`uses protocol from server.info`, async () => { - const permittedHeaders = { - foo: 'bar', - baz: 'quix', - }; - - const conditionalHeaders = await getConditionalHeaders({ - job: {} as JobDocPayload, - filteredHeaders: permittedHeaders, - server: mockServer, - }); - - expect(conditionalHeaders.conditions.protocol).toEqual(mockServer.info.protocol); - }); }); test('uses basePath from job when creating saved object service', async () => { @@ -161,14 +65,14 @@ test('uses basePath from job when creating saved object service', async () => { const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: permittedHeaders, - server: mockServer, + config: mockConfig, }); const jobBasePath = '/sbp/s/marketing'; await getCustomLogo({ reporting: mockReportingPlugin, job: { basePath: jobBasePath } as JobDocPayloadPDF, conditionalHeaders, - server: mockServer, + config: mockConfig, }); const getBasePath = mockGetSavedObjectsClient.mock.calls[0][0].getBasePath; @@ -179,6 +83,11 @@ test(`uses basePath from server if job doesn't have a basePath when creating sav const mockGetSavedObjectsClient = jest.fn(); mockReportingPlugin.getSavedObjectsClient = mockGetSavedObjectsClient; + const mockConfigGet = sinon.stub(); + mockConfigGet.withArgs('kibanaServer', 'hostname').returns('localhost'); + mockConfigGet.withArgs('server', 'basePath').returns('/sbp'); + mockConfig = getMockConfig(mockConfigGet); + const permittedHeaders = { foo: 'bar', baz: 'quix', @@ -186,14 +95,14 @@ test(`uses basePath from server if job doesn't have a basePath when creating sav const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: permittedHeaders, - server: mockServer, + config: mockConfig, }); await getCustomLogo({ reporting: mockReportingPlugin, job: {} as JobDocPayloadPDF, conditionalHeaders, - server: mockServer, + config: mockConfig, }); const getBasePath = mockGetSavedObjectsClient.mock.calls[0][0].getBasePath; @@ -225,19 +134,26 @@ test(`uses basePath from server if job doesn't have a basePath when creating sav describe('config formatting', () => { test(`lowercases server.host`, async () => { - mockServer = createMockServer({ settings: { 'server.host': 'COOL-HOSTNAME' } }); + const mockConfigGet = sinon + .stub() + .withArgs('server', 'host') + .returns('COOL-HOSTNAME'); + mockConfig = getMockConfig(mockConfigGet); + const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: {}, - server: mockServer, + config: mockConfig, }); expect(conditionalHeaders.conditions.hostname).toEqual('cool-hostname'); }); - test(`lowercases xpack.reporting.kibanaServer.hostname`, async () => { - mockServer = createMockServer({ - settings: { 'xpack.reporting.kibanaServer.hostname': 'GREAT-HOSTNAME' }, - }); + test(`lowercases kibanaServer.hostname`, async () => { + const mockConfigGet = sinon + .stub() + .withArgs('kibanaServer', 'hostname') + .returns('GREAT-HOSTNAME'); + mockConfig = getMockConfig(mockConfigGet); const conditionalHeaders = await getConditionalHeaders({ job: { title: 'cool-job-bro', @@ -249,7 +165,7 @@ describe('config formatting', () => { }, }, filteredHeaders: {}, - server: mockServer, + config: mockConfig, }); expect(conditionalHeaders.conditions.hostname).toEqual('great-hostname'); }); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.ts index 975060a8052f07..bd7999d697ca9d 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.ts @@ -3,29 +3,31 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { ConditionalHeaders, ServerFacade } from '../../../types'; + +import { ReportingConfig } from '../../../server/types'; +import { ConditionalHeaders } from '../../../types'; export const getConditionalHeaders = ({ - server, + config, job, filteredHeaders, }: { - server: ServerFacade; + config: ReportingConfig; job: JobDocPayloadType; filteredHeaders: Record; }) => { - const config = server.config(); + const { kbnConfig } = config; const [hostname, port, basePath, protocol] = [ - config.get('xpack.reporting.kibanaServer.hostname') || config.get('server.host'), - config.get('xpack.reporting.kibanaServer.port') || config.get('server.port'), - config.get('server.basePath'), - config.get('xpack.reporting.kibanaServer.protocol') || server.info.protocol, + config.get('kibanaServer', 'hostname'), + config.get('kibanaServer', 'port'), + kbnConfig.get('server', 'basePath'), + config.get('kibanaServer', 'protocol'), ] as [string, number, string, string]; const conditionalHeaders: ConditionalHeaders = { headers: filteredHeaders, conditions: { - hostname: hostname.toLowerCase(), + hostname: hostname ? hostname.toLowerCase() : hostname, port, basePath, protocol, diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts index fa53f474dfba7b..2cbde69c81316f 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts @@ -5,16 +5,18 @@ */ import { ReportingCore } from '../../../server'; -import { createMockReportingCore, createMockServer } from '../../../test_helpers'; -import { ServerFacade } from '../../../types'; +import { createMockReportingCore } from '../../../test_helpers'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; import { getConditionalHeaders, getCustomLogo } from './index'; +const mockConfigGet = jest.fn().mockImplementation((key: string) => { + return 'localhost'; +}); +const mockConfig = { get: mockConfigGet, kbnConfig: { get: mockConfigGet } }; + let mockReportingPlugin: ReportingCore; -let mockServer: ServerFacade; beforeEach(async () => { - mockReportingPlugin = await createMockReportingCore(); - mockServer = createMockServer(''); + mockReportingPlugin = await createMockReportingCore(mockConfig); }); test(`gets logo from uiSettings`, async () => { @@ -37,14 +39,14 @@ test(`gets logo from uiSettings`, async () => { const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayloadPDF, filteredHeaders: permittedHeaders, - server: mockServer, + config: mockConfig, }); const { logo } = await getCustomLogo({ reporting: mockReportingPlugin, + config: mockConfig, job: {} as JobDocPayloadPDF, conditionalHeaders, - server: mockServer, }); expect(mockGet).toBeCalledWith('xpackReporting:customPdfLogo'); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts index 7af5edab41ab77..a13f992e7867cd 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts @@ -5,23 +5,22 @@ */ import { UI_SETTINGS_CUSTOM_PDF_LOGO } from '../../../common/constants'; -import { ReportingCore } from '../../../server'; -import { ConditionalHeaders, ServerFacade } from '../../../types'; +import { ReportingConfig, ReportingCore } from '../../../server/types'; +import { ConditionalHeaders } from '../../../types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; // Logo is PDF only export const getCustomLogo = async ({ reporting, - server, + config, job, conditionalHeaders, }: { reporting: ReportingCore; - server: ServerFacade; + config: ReportingConfig; job: JobDocPayloadPDF; conditionalHeaders: ConditionalHeaders; }) => { - const serverBasePath: string = server.config().get('server.basePath'); - + const serverBasePath: string = config.kbnConfig.get('server', 'basePath'); const fakeRequest: any = { headers: conditionalHeaders.headers, // This is used by the spaces SavedObjectClientWrapper to determine the existing space. diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts index 27e772195f7260..5f55617724ff68 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts @@ -4,29 +4,41 @@ * you may not use this file except in compliance with the Elastic License. */ -import { createMockServer } from '../../../test_helpers'; -import { ServerFacade } from '../../../types'; +import { ReportingConfig } from '../../../server'; import { JobDocPayloadPNG } from '../../png/types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; import { getFullUrls } from './get_full_urls'; interface FullUrlsOpts { job: JobDocPayloadPNG & JobDocPayloadPDF; - server: ServerFacade; - conditionalHeaders: any; + config: ReportingConfig; } -let mockServer: any; +let mockConfig: ReportingConfig; +const getMockConfig = (mockConfigGet: jest.Mock) => { + return { + get: mockConfigGet, + kbnConfig: { get: mockConfigGet }, + }; +}; + beforeEach(() => { - mockServer = createMockServer(''); + const reportingConfig: Record = { + 'kibanaServer.hostname': 'localhost', + 'kibanaServer.port': 5601, + 'kibanaServer.protocol': 'http', + 'server.basePath': '/sbp', + }; + const mockConfigGet = jest.fn().mockImplementation((...keys: string[]) => { + return reportingConfig[keys.join('.') as string]; + }); + mockConfig = getMockConfig(mockConfigGet); }); +const getMockJob = (base: object) => base as JobDocPayloadPNG & JobDocPayloadPDF; + test(`fails if no URL is passed`, async () => { - const fn = () => - getFullUrls({ - job: {}, - server: mockServer, - } as FullUrlsOpts); + const fn = () => getFullUrls({ job: getMockJob({}), config: mockConfig } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"No valid URL fields found in Job Params! Expected \`job.relativeUrl: string\` or \`job.relativeUrls: string[]\`"` ); @@ -37,8 +49,8 @@ test(`fails if URLs are file-protocols for PNGs`, async () => { const relativeUrl = 'file://etc/passwd/#/something'; const fn = () => getFullUrls({ - job: { relativeUrl, forceNow }, - server: mockServer, + job: getMockJob({ relativeUrl, forceNow }), + config: mockConfig, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: file://etc/passwd/#/something"` @@ -51,8 +63,8 @@ test(`fails if URLs are absolute for PNGs`, async () => { 'http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something'; const fn = () => getFullUrls({ - job: { relativeUrl, forceNow }, - server: mockServer, + job: getMockJob({ relativeUrl, forceNow }), + config: mockConfig, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something"` @@ -64,11 +76,11 @@ test(`fails if URLs are file-protocols for PDF`, async () => { const relativeUrl = 'file://etc/passwd/#/something'; const fn = () => getFullUrls({ - job: { + job: getMockJob({ relativeUrls: [relativeUrl], forceNow, - }, - server: mockServer, + }), + config: mockConfig, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: file://etc/passwd/#/something"` @@ -81,11 +93,11 @@ test(`fails if URLs are absolute for PDF`, async () => { 'http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something'; const fn = () => getFullUrls({ - job: { + job: getMockJob({ relativeUrls: [relativeUrl], forceNow, - }, - server: mockServer, + }), + config: mockConfig, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something"` @@ -102,8 +114,8 @@ test(`fails if any URLs are absolute or file's for PDF`, async () => { const fn = () => getFullUrls({ - job: { relativeUrls, forceNow }, - server: mockServer, + job: getMockJob({ relativeUrls, forceNow }), + config: mockConfig, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"Found invalid URL(s), all URLs must be relative: http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something file://etc/passwd/#/something"` @@ -113,8 +125,8 @@ test(`fails if any URLs are absolute or file's for PDF`, async () => { test(`fails if URL does not route to a visualization`, async () => { const fn = () => getFullUrls({ - job: { relativeUrl: '/app/phoney' }, - server: mockServer, + job: getMockJob({ relativeUrl: '/app/phoney' }), + config: mockConfig, } as FullUrlsOpts); expect(fn).toThrowErrorMatchingInlineSnapshot( `"No valid hash in the URL! A hash is expected for the application to route to the intended visualization."` @@ -124,8 +136,8 @@ test(`fails if URL does not route to a visualization`, async () => { test(`adds forceNow to hash's query, if it exists`, async () => { const forceNow = '2000-01-01T00:00:00.000Z'; const urls = await getFullUrls({ - job: { relativeUrl: '/app/kibana#/something', forceNow }, - server: mockServer, + job: getMockJob({ relativeUrl: '/app/kibana#/something', forceNow }), + config: mockConfig, } as FullUrlsOpts); expect(urls[0]).toEqual( @@ -137,8 +149,8 @@ test(`appends forceNow to hash's query, if it exists`, async () => { const forceNow = '2000-01-01T00:00:00.000Z'; const urls = await getFullUrls({ - job: { relativeUrl: '/app/kibana#/something?_g=something', forceNow }, - server: mockServer, + job: getMockJob({ relativeUrl: '/app/kibana#/something?_g=something', forceNow }), + config: mockConfig, } as FullUrlsOpts); expect(urls[0]).toEqual( @@ -148,8 +160,8 @@ test(`appends forceNow to hash's query, if it exists`, async () => { test(`doesn't append forceNow query to url, if it doesn't exists`, async () => { const urls = await getFullUrls({ - job: { relativeUrl: '/app/kibana#/something' }, - server: mockServer, + job: getMockJob({ relativeUrl: '/app/kibana#/something' }), + config: mockConfig, } as FullUrlsOpts); expect(urls[0]).toEqual('http://localhost:5601/sbp/app/kibana#/something'); @@ -158,7 +170,7 @@ test(`doesn't append forceNow query to url, if it doesn't exists`, async () => { test(`adds forceNow to each of multiple urls`, async () => { const forceNow = '2000-01-01T00:00:00.000Z'; const urls = await getFullUrls({ - job: { + job: getMockJob({ relativeUrls: [ '/app/kibana#/something_aaa', '/app/kibana#/something_bbb', @@ -166,8 +178,8 @@ test(`adds forceNow to each of multiple urls`, async () => { '/app/kibana#/something_ddd', ], forceNow, - }, - server: mockServer, + }), + config: mockConfig, } as FullUrlsOpts); expect(urls).toEqual([ diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.ts index ca64d8632dbfeb..c4b6f31019fdf4 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.ts @@ -12,7 +12,7 @@ import { } from 'url'; import { getAbsoluteUrlFactory } from '../../../common/get_absolute_url'; import { validateUrls } from '../../../common/validate_urls'; -import { ServerFacade } from '../../../types'; +import { ReportingConfig } from '../../../server/types'; import { JobDocPayloadPNG } from '../../png/types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; @@ -24,19 +24,23 @@ function isPdfJob(job: JobDocPayloadPNG | JobDocPayloadPDF): job is JobDocPayloa } export function getFullUrls({ - server, + config, job, }: { - server: ServerFacade; + config: ReportingConfig; job: JobDocPayloadPDF | JobDocPayloadPNG; }) { - const config = server.config(); - + const [basePath, protocol, hostname, port] = [ + config.kbnConfig.get('server', 'basePath'), + config.get('kibanaServer', 'protocol'), + config.get('kibanaServer', 'hostname'), + config.get('kibanaServer', 'port'), + ] as string[]; const getAbsoluteUrl = getAbsoluteUrlFactory({ - defaultBasePath: config.get('server.basePath'), - protocol: config.get('xpack.reporting.kibanaServer.protocol') || server.info.protocol, - hostname: config.get('xpack.reporting.kibanaServer.hostname') || config.get('server.host'), - port: config.get('xpack.reporting.kibanaServer.port') || config.get('server.port'), + defaultBasePath: basePath, + protocol, + hostname, + port, }); // PDF and PNG job params put in the url differently diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.test.ts index f446369fec78ce..abf5784dacff9f 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.test.ts @@ -19,6 +19,9 @@ test(`omits blacklisted headers`, async () => { 'content-type': '', host: '', 'transfer-encoding': '', + 'proxy-connection': 'bananas', + 'proxy-authorization': 'some-base64-encoded-thing', + trailer: 's are for trucks', }; const filteredHeaders = await omitBlacklistedHeaders({ diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.ts index cbebd6bc21b0e6..2fbfd868674f60 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/omit_blacklisted_headers.ts @@ -4,7 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ import { omit } from 'lodash'; -import { KBN_SCREENSHOT_HEADER_BLACKLIST } from '../../../common/constants'; +import { + KBN_SCREENSHOT_HEADER_BLACKLIST, + KBN_SCREENSHOT_HEADER_BLACKLIST_STARTS_WITH_PATTERN, +} from '../../../common/constants'; export const omitBlacklistedHeaders = ({ job, @@ -15,7 +18,12 @@ export const omitBlacklistedHeaders = ({ }) => { const filteredHeaders: Record = omit( decryptedHeaders, - KBN_SCREENSHOT_HEADER_BLACKLIST + (_value, header: string) => + header && + (KBN_SCREENSHOT_HEADER_BLACKLIST.includes(header) || + KBN_SCREENSHOT_HEADER_BLACKLIST_STARTS_WITH_PATTERN.some(pattern => + header?.startsWith(pattern) + )) ); return filteredHeaders; }; diff --git a/x-pack/legacy/plugins/reporting/export_types/common/layouts/create_layout.ts b/x-pack/legacy/plugins/reporting/export_types/common/layouts/create_layout.ts index 0cb83352d4606a..07fceb603e451e 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/layouts/create_layout.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/layouts/create_layout.ts @@ -3,17 +3,18 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { ServerFacade } from '../../../types'; + +import { CaptureConfig } from '../../../server/types'; import { LayoutTypes } from '../constants'; import { Layout, LayoutParams } from './layout'; import { PreserveLayout } from './preserve_layout'; import { PrintLayout } from './print_layout'; -export function createLayout(server: ServerFacade, layoutParams?: LayoutParams): Layout { +export function createLayout(captureConfig: CaptureConfig, layoutParams?: LayoutParams): Layout { if (layoutParams && layoutParams.id === LayoutTypes.PRESERVE_LAYOUT) { return new PreserveLayout(layoutParams.dimensions); } // this is the default because some jobs won't have anything specified - return new PrintLayout(server); + return new PrintLayout(captureConfig); } diff --git a/x-pack/legacy/plugins/reporting/export_types/common/layouts/print_layout.ts b/x-pack/legacy/plugins/reporting/export_types/common/layouts/print_layout.ts index 6007c2960057a2..f6974379253fb9 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/layouts/print_layout.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/layouts/print_layout.ts @@ -3,14 +3,14 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ + import path from 'path'; import { EvaluateFn, SerializableOrJSHandle } from 'puppeteer'; -import { LevelLogger } from '../../../server/lib'; import { HeadlessChromiumDriver } from '../../../server/browsers'; -import { ServerFacade } from '../../../types'; +import { LevelLogger } from '../../../server/lib'; +import { CaptureConfig } from '../../../server/types'; import { LayoutTypes } from '../constants'; import { getDefaultLayoutSelectors, Layout, LayoutSelectorDictionary, Size } from './layout'; -import { CaptureConfig } from './types'; export class PrintLayout extends Layout { public readonly selectors: LayoutSelectorDictionary = { @@ -20,9 +20,9 @@ export class PrintLayout extends Layout { public readonly groupCount = 2; private captureConfig: CaptureConfig; - constructor(server: ServerFacade) { + constructor(captureConfig: CaptureConfig) { super(LayoutTypes.PRINT); - this.captureConfig = server.config().get('xpack.reporting.capture'); + this.captureConfig = captureConfig; } public getCssOverridesPath() { diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/get_number_of_items.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/get_number_of_items.ts index 16eb433e8a75e3..57d025890d3e22 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/get_number_of_items.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/get_number_of_items.ts @@ -7,17 +7,16 @@ import { i18n } from '@kbn/i18n'; import { HeadlessChromiumDriver as HeadlessBrowser } from '../../../../server/browsers'; import { LevelLogger } from '../../../../server/lib'; -import { ServerFacade } from '../../../../types'; +import { CaptureConfig } from '../../../../server/types'; import { LayoutInstance } from '../../layouts/layout'; import { CONTEXT_GETNUMBEROFITEMS, CONTEXT_READMETADATA } from './constants'; export const getNumberOfItems = async ( - server: ServerFacade, + captureConfig: CaptureConfig, browser: HeadlessBrowser, layout: LayoutInstance, logger: LevelLogger ): Promise => { - const config = server.config(); const { renderComplete: renderCompleteSelector, itemsCountAttribute } = layout.selectors; let itemsCount: number; @@ -33,7 +32,7 @@ export const getNumberOfItems = async ( // we have to use this hint to wait for all of them await browser.waitForSelector( `${renderCompleteSelector},[${itemsCountAttribute}]`, - { timeout: config.get('xpack.reporting.capture.timeouts.waitForElements') }, + { timeout: captureConfig.timeouts.waitForElements }, { context: CONTEXT_READMETADATA }, logger ); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.test.ts index 13d07bcdd6baf7..75ac3dca4ffa06 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.test.ts @@ -19,12 +19,9 @@ import * as Rx from 'rxjs'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths import { loggingServiceMock } from '../../../../../../../../src/core/server/mocks'; import { LevelLogger } from '../../../../server/lib'; -import { - createMockBrowserDriverFactory, - createMockLayoutInstance, - createMockServer, -} from '../../../../test_helpers'; +import { createMockBrowserDriverFactory, createMockLayoutInstance } from '../../../../test_helpers'; import { ConditionalHeaders, HeadlessChromiumDriver } from '../../../../types'; +import { CaptureConfig } from '../../../../server/types'; import { screenshotsObservableFactory } from './observable'; import { ElementsPositionAndAttribute } from './types'; @@ -34,8 +31,8 @@ import { ElementsPositionAndAttribute } from './types'; const mockLogger = jest.fn(loggingServiceMock.create); const logger = new LevelLogger(mockLogger()); -const __LEGACY = createMockServer({ settings: { 'xpack.reporting.capture': { loadDelay: 13 } } }); -const mockLayout = createMockLayoutInstance(__LEGACY); +const mockConfig = { timeouts: { openUrl: 13 } } as CaptureConfig; +const mockLayout = createMockLayoutInstance(mockConfig); /* * Tests @@ -48,7 +45,7 @@ describe('Screenshot Observable Pipeline', () => { }); it('pipelines a single url into screenshot and timeRange', async () => { - const getScreenshots$ = screenshotsObservableFactory(__LEGACY, mockBrowserDriverFactory); + const getScreenshots$ = screenshotsObservableFactory(mockConfig, mockBrowserDriverFactory); const result = await getScreenshots$({ logger, urls: ['/welcome/home/start/index.htm'], @@ -86,7 +83,7 @@ describe('Screenshot Observable Pipeline', () => { }); // test - const getScreenshots$ = screenshotsObservableFactory(__LEGACY, mockBrowserDriverFactory); + const getScreenshots$ = screenshotsObservableFactory(mockConfig, mockBrowserDriverFactory); const result = await getScreenshots$({ logger, urls: ['/welcome/home/start/index2.htm', '/welcome/home/start/index.php3?page=./home.php'], @@ -136,7 +133,7 @@ describe('Screenshot Observable Pipeline', () => { }); // test - const getScreenshots$ = screenshotsObservableFactory(__LEGACY, mockBrowserDriverFactory); + const getScreenshots$ = screenshotsObservableFactory(mockConfig, mockBrowserDriverFactory); const getScreenshot = async () => { return await getScreenshots$({ logger, @@ -197,7 +194,7 @@ describe('Screenshot Observable Pipeline', () => { }); // test - const getScreenshots$ = screenshotsObservableFactory(__LEGACY, mockBrowserDriverFactory); + const getScreenshots$ = screenshotsObservableFactory(mockConfig, mockBrowserDriverFactory); const getScreenshot = async () => { return await getScreenshots$({ logger, diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.ts index 44c04c763f840a..53a11c18abd797 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/observable.ts @@ -6,24 +6,22 @@ import * as Rx from 'rxjs'; import { catchError, concatMap, first, mergeMap, take, takeUntil, toArray } from 'rxjs/operators'; -import { CaptureConfig, HeadlessChromiumDriverFactory, ServerFacade } from '../../../../types'; +import { CaptureConfig } from '../../../../server/types'; +import { HeadlessChromiumDriverFactory } from '../../../../types'; import { getElementPositionAndAttributes } from './get_element_position_data'; import { getNumberOfItems } from './get_number_of_items'; import { getScreenshots } from './get_screenshots'; import { getTimeRange } from './get_time_range'; +import { injectCustomCss } from './inject_css'; import { openUrl } from './open_url'; import { ScreenSetupData, ScreenshotObservableOpts, ScreenshotResults } from './types'; import { waitForRenderComplete } from './wait_for_render'; import { waitForVisualizations } from './wait_for_visualizations'; -import { injectCustomCss } from './inject_css'; export function screenshotsObservableFactory( - server: ServerFacade, + captureConfig: CaptureConfig, browserDriverFactory: HeadlessChromiumDriverFactory ) { - const config = server.config(); - const captureConfig: CaptureConfig = config.get('xpack.reporting.capture'); - return function screenshotsObservable({ logger, urls, @@ -41,13 +39,13 @@ export function screenshotsObservableFactory( mergeMap(({ driver, exit$ }) => { const setup$: Rx.Observable = Rx.of(1).pipe( takeUntil(exit$), - mergeMap(() => openUrl(server, driver, url, conditionalHeaders, logger)), - mergeMap(() => getNumberOfItems(server, driver, layout, logger)), + mergeMap(() => openUrl(captureConfig, driver, url, conditionalHeaders, logger)), + mergeMap(() => getNumberOfItems(captureConfig, driver, layout, logger)), mergeMap(async itemsCount => { const viewport = layout.getViewport(itemsCount); await Promise.all([ driver.setViewport(viewport, logger), - waitForVisualizations(server, driver, itemsCount, layout, logger), + waitForVisualizations(captureConfig, driver, itemsCount, layout, logger), ]); }), mergeMap(async () => { @@ -60,7 +58,7 @@ export function screenshotsObservableFactory( await layout.positionElements(driver, logger); } - await waitForRenderComplete(driver, layout, captureConfig, logger); + await waitForRenderComplete(captureConfig, driver, layout, logger); }), mergeMap(async () => { return await Promise.all([ diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/open_url.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/open_url.ts index fbae1f91a7a6a7..a484dfb243563d 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/open_url.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/open_url.ts @@ -5,27 +5,26 @@ */ import { i18n } from '@kbn/i18n'; -import { ConditionalHeaders, ServerFacade } from '../../../../types'; -import { LevelLogger } from '../../../../server/lib'; import { HeadlessChromiumDriver as HeadlessBrowser } from '../../../../server/browsers'; +import { LevelLogger } from '../../../../server/lib'; +import { CaptureConfig } from '../../../../server/types'; +import { ConditionalHeaders } from '../../../../types'; import { PAGELOAD_SELECTOR } from '../../constants'; export const openUrl = async ( - server: ServerFacade, + captureConfig: CaptureConfig, browser: HeadlessBrowser, url: string, conditionalHeaders: ConditionalHeaders, logger: LevelLogger ): Promise => { - const config = server.config(); - try { await browser.open( url, { conditionalHeaders, waitForSelector: PAGELOAD_SELECTOR, - timeout: config.get('xpack.reporting.capture.timeouts.openUrl'), + timeout: captureConfig.timeouts.openUrl, }, logger ); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/types.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/types.ts index ab81a952f345ce..76613c2d631d64 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/types.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/types.ts @@ -4,8 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ElementPosition, ConditionalHeaders } from '../../../../types'; import { LevelLogger } from '../../../../server/lib'; +import { ConditionalHeaders, ElementPosition } from '../../../../types'; import { LayoutInstance } from '../../layouts/layout'; export interface ScreenshotObservableOpts { diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_render.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_render.ts index 2f6dc2829dfd8d..069896c8d9e90c 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_render.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_render.ts @@ -5,16 +5,16 @@ */ import { i18n } from '@kbn/i18n'; -import { CaptureConfig } from '../../../../types'; import { HeadlessChromiumDriver as HeadlessBrowser } from '../../../../server/browsers'; import { LevelLogger } from '../../../../server/lib'; +import { CaptureConfig } from '../../../../server/types'; import { LayoutInstance } from '../../layouts/layout'; import { CONTEXT_WAITFORRENDER } from './constants'; export const waitForRenderComplete = async ( + captureConfig: CaptureConfig, browser: HeadlessBrowser, layout: LayoutInstance, - captureConfig: CaptureConfig, logger: LevelLogger ) => { logger.debug( diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_visualizations.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_visualizations.ts index 93ad40026dff81..7960e1552e5590 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_visualizations.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_visualizations.ts @@ -5,9 +5,9 @@ */ import { i18n } from '@kbn/i18n'; -import { ServerFacade } from '../../../../types'; import { HeadlessChromiumDriver as HeadlessBrowser } from '../../../../server/browsers'; import { LevelLogger } from '../../../../server/lib'; +import { CaptureConfig } from '../../../../server/types'; import { LayoutInstance } from '../../layouts/layout'; import { CONTEXT_WAITFORELEMENTSTOBEINDOM } from './constants'; @@ -23,13 +23,12 @@ const getCompletedItemsCount = ({ renderCompleteSelector }: SelectorArgs) => { * 3. Wait for the render complete event to be fired once for each item */ export const waitForVisualizations = async ( - server: ServerFacade, + captureConfig: CaptureConfig, browser: HeadlessBrowser, itemsCount: number, layout: LayoutInstance, logger: LevelLogger ): Promise => { - const config = server.config(); const { renderComplete: renderCompleteSelector } = layout.selectors; logger.debug( @@ -45,7 +44,7 @@ export const waitForVisualizations = async ( fn: getCompletedItemsCount, args: [{ renderCompleteSelector }], toEqual: itemsCount, - timeout: config.get('xpack.reporting.capture.timeouts.renderComplete'), + timeout: captureConfig.timeouts.renderComplete, }, { context: CONTEXT_WAITFORELEMENTSTOBEINDOM }, logger diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts index 7ea67277015ab6..0e704a041452ab 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts @@ -11,14 +11,14 @@ import { CreateJobFactory, ESQueueCreateJobFn, RequestFacade, - ServerFacade, } from '../../../types'; import { JobParamsDiscoverCsv } from '../types'; export const createJobFactory: CreateJobFactory> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { - const crypto = cryptoFactory(server); +>> = function createJobFactoryFn(reporting: ReportingCore) { + const config = reporting.getConfig(); + const crypto = cryptoFactory(config.get('encryptionKey')); return async function createJob( jobParams: JobParamsDiscoverCsv, diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js index f12916b734dbf6..93dbe598b367c9 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js @@ -11,8 +11,8 @@ import { CancellationToken } from '../../../common/cancellation_token'; import { fieldFormats } from '../../../../../../../src/plugins/data/server'; import { createMockReportingCore } from '../../../test_helpers'; import { LevelLogger } from '../../../server/lib/level_logger'; -import { executeJobFactory } from './execute_job'; import { setFieldFormats } from '../../../server/services'; +import { executeJobFactory } from './execute_job'; const delay = ms => new Promise(resolve => setTimeout(() => resolve(), ms)); @@ -36,11 +36,12 @@ describe('CSV Execute Job', function() { let defaultElasticsearchResponse; let encryptedHeaders; - let cancellationToken; - let mockReportingPlugin; - let mockServer; let clusterStub; + let configGetStub; + let mockReportingConfig; + let mockReportingPlugin; let callAsCurrentUserStub; + let cancellationToken; const mockElasticsearch = { dataClient: { @@ -57,8 +58,16 @@ describe('CSV Execute Job', function() { }); beforeEach(async function() { - mockReportingPlugin = await createMockReportingCore(); - mockReportingPlugin.getUiSettingsServiceFactory = () => mockUiSettingsClient; + configGetStub = sinon.stub(); + configGetStub.withArgs('encryptionKey').returns(encryptionKey); + configGetStub.withArgs('csv', 'maxSizeBytes').returns(1024 * 1000); // 1mB + configGetStub.withArgs('csv', 'scroll').returns({}); + mockReportingConfig = { get: configGetStub, kbnConfig: { get: configGetStub } }; + + mockReportingPlugin = await createMockReportingCore(mockReportingConfig); + mockReportingPlugin.getUiSettingsServiceFactory = () => Promise.resolve(mockUiSettingsClient); + mockReportingPlugin.getElasticsearchService = () => Promise.resolve(mockElasticsearch); + cancellationToken = new CancellationToken(); defaultElasticsearchResponse = { @@ -75,7 +84,6 @@ describe('CSV Execute Job', function() { .stub(clusterStub, 'callAsCurrentUser') .resolves(defaultElasticsearchResponse); - const configGetStub = sinon.stub(); mockUiSettingsClient.get.withArgs('csv:separator').returns(','); mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true); @@ -93,36 +101,11 @@ describe('CSV Execute Job', function() { return fieldFormatsRegistry; }, }); - - mockServer = { - config: function() { - return { - get: configGetStub, - }; - }, - }; - mockServer - .config() - .get.withArgs('xpack.reporting.encryptionKey') - .returns(encryptionKey); - mockServer - .config() - .get.withArgs('xpack.reporting.csv.maxSizeBytes') - .returns(1024 * 1000); // 1mB - mockServer - .config() - .get.withArgs('xpack.reporting.csv.scroll') - .returns({}); }); describe('basic Elasticsearch call behavior', function() { it('should decrypt encrypted headers and pass to callAsCurrentUser', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -138,12 +121,7 @@ describe('CSV Execute Job', function() { testBody: true, }; - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const job = { headers: encryptedHeaders, fields: [], @@ -170,12 +148,7 @@ describe('CSV Execute Job', function() { _scroll_id: scrollId, }); callAsCurrentUserStub.onSecondCall().resolves(defaultElasticsearchResponse); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -189,12 +162,7 @@ describe('CSV Execute Job', function() { }); it('should not execute scroll if there are no hits from the search', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -224,12 +192,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -264,12 +227,7 @@ describe('CSV Execute Job', function() { _scroll_id: lastScrollId, }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -297,12 +255,7 @@ describe('CSV Execute Job', function() { _scroll_id: lastScrollId, }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -321,10 +274,7 @@ describe('CSV Execute Job', function() { describe('Cells with formula values', () => { it('returns `csv_contains_formulas` when cells contain formulas', async function() { - mockServer - .config() - .get.withArgs('xpack.reporting.csv.checkForFormulas') - .returns(true); + configGetStub.withArgs('csv', 'checkForFormulas').returns(true); callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { one: '=SUM(A1:A2)', two: 'bar' } }], @@ -332,12 +282,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -354,10 +299,7 @@ describe('CSV Execute Job', function() { }); it('returns warnings when headings contain formulas', async function() { - mockServer - .config() - .get.withArgs('xpack.reporting.csv.checkForFormulas') - .returns(true); + configGetStub.withArgs('csv', 'checkForFormulas').returns(true); callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { '=SUM(A1:A2)': 'foo', two: 'bar' } }], @@ -365,12 +307,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['=SUM(A1:A2)', 'two'], @@ -387,10 +324,7 @@ describe('CSV Execute Job', function() { }); it('returns no warnings when cells have no formulas', async function() { - mockServer - .config() - .get.withArgs('xpack.reporting.csv.checkForFormulas') - .returns(true); + configGetStub.withArgs('csv', 'checkForFormulas').returns(true); callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -398,12 +332,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -420,10 +349,7 @@ describe('CSV Execute Job', function() { }); it('returns no warnings when configured not to', async () => { - mockServer - .config() - .get.withArgs('xpack.reporting.csv.checkForFormulas') - .returns(false); + configGetStub.withArgs('csv', 'checkForFormulas').returns(false); callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { one: '=SUM(A1:A2)', two: 'bar' } }], @@ -431,12 +357,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -456,12 +377,7 @@ describe('CSV Execute Job', function() { describe('Elasticsearch call errors', function() { it('should reject Promise if search call errors out', async function() { callAsCurrentUserStub.rejects(new Error()); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -480,12 +396,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); callAsCurrentUserStub.onSecondCall().rejects(new Error()); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -506,12 +417,7 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -532,12 +438,7 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -565,12 +466,7 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -598,12 +494,7 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -639,12 +530,7 @@ describe('CSV Execute Job', function() { }); it('should stop calling Elasticsearch when cancellationToken.cancel is called', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -659,12 +545,7 @@ describe('CSV Execute Job', function() { }); it(`shouldn't call clearScroll if it never got a scrollId`, async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -678,12 +559,7 @@ describe('CSV Execute Job', function() { }); it('should call clearScroll if it got a scrollId', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -701,12 +577,7 @@ describe('CSV Execute Job', function() { describe('csv content', function() { it('should write column headers to output, even if there are no results', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -718,12 +589,7 @@ describe('CSV Execute Job', function() { it('should use custom uiSettings csv:separator for header', async function() { mockUiSettingsClient.get.withArgs('csv:separator').returns(';'); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -735,12 +601,7 @@ describe('CSV Execute Job', function() { it('should escape column headers if uiSettings csv:quoteValues is true', async function() { mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one and a half', 'two', 'three-and-four', 'five & six'], @@ -752,12 +613,7 @@ describe('CSV Execute Job', function() { it(`shouldn't escape column headers if uiSettings csv:quoteValues is false`, async function() { mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(false); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one and a half', 'two', 'three-and-four', 'five & six'], @@ -768,12 +624,7 @@ describe('CSV Execute Job', function() { }); it('should write column headers to output, when there are results', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ one: '1', two: '2' }], @@ -793,12 +644,7 @@ describe('CSV Execute Job', function() { }); it('should use comma separated values of non-nested fields from _source', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -819,12 +665,7 @@ describe('CSV Execute Job', function() { }); it('should concatenate the hits from multiple responses', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -852,12 +693,7 @@ describe('CSV Execute Job', function() { }); it('should use field formatters to format fields', async function() { - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -897,17 +733,9 @@ describe('CSV Execute Job', function() { let maxSizeReached; beforeEach(async function() { - mockServer - .config() - .get.withArgs('xpack.reporting.csv.maxSizeBytes') - .returns(1); - - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + configGetStub.withArgs('csv', 'maxSizeBytes').returns(1); + + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -935,17 +763,9 @@ describe('CSV Execute Job', function() { let maxSizeReached; beforeEach(async function() { - mockServer - .config() - .get.withArgs('xpack.reporting.csv.maxSizeBytes') - .returns(9); - - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + configGetStub.withArgs('csv', 'maxSizeBytes').returns(9); + + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -973,10 +793,7 @@ describe('CSV Execute Job', function() { let maxSizeReached; beforeEach(async function() { - mockServer - .config() - .get.withArgs('xpack.reporting.csv.maxSizeBytes') - .returns(9); + configGetStub.withArgs('csv', 'maxSizeBytes').returns(9); callAsCurrentUserStub.onFirstCall().returns({ hits: { @@ -985,12 +802,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1020,10 +832,7 @@ describe('CSV Execute Job', function() { beforeEach(async function() { mockReportingPlugin.getUiSettingsServiceFactory = () => mockUiSettingsClient; - mockServer - .config() - .get.withArgs('xpack.reporting.csv.maxSizeBytes') - .returns(18); + configGetStub.withArgs('csv', 'maxSizeBytes').returns(18); callAsCurrentUserStub.onFirstCall().returns({ hits: { @@ -1032,12 +841,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1065,10 +869,7 @@ describe('CSV Execute Job', function() { describe('scroll settings', function() { it('passes scroll duration to initial search call', async function() { const scrollDuration = 'test'; - mockServer - .config() - .get.withArgs('xpack.reporting.csv.scroll') - .returns({ duration: scrollDuration }); + configGetStub.withArgs('csv', 'scroll').returns({ duration: scrollDuration }); callAsCurrentUserStub.onFirstCall().returns({ hits: { @@ -1077,12 +878,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1099,10 +895,7 @@ describe('CSV Execute Job', function() { it('passes scroll size to initial search call', async function() { const scrollSize = 100; - mockServer - .config() - .get.withArgs('xpack.reporting.csv.scroll') - .returns({ size: scrollSize }); + configGetStub.withArgs('csv', 'scroll').returns({ size: scrollSize }); callAsCurrentUserStub.onFirstCall().resolves({ hits: { @@ -1111,12 +904,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1133,10 +921,7 @@ describe('CSV Execute Job', function() { it('passes scroll duration to subsequent scroll call', async function() { const scrollDuration = 'test'; - mockServer - .config() - .get.withArgs('xpack.reporting.csv.scroll') - .returns({ duration: scrollDuration }); + configGetStub.withArgs('csv', 'scroll').returns({ duration: scrollDuration }); callAsCurrentUserStub.onFirstCall().resolves({ hits: { @@ -1145,12 +930,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = await executeJobFactory( - mockReportingPlugin, - mockServer, - mockElasticsearch, - mockLogger - ); + const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts index 15799858910532..d78d8a8a8010d8 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts @@ -6,38 +6,30 @@ import { i18n } from '@kbn/i18n'; import Hapi from 'hapi'; -import { - ElasticsearchServiceSetup, - IUiSettingsClient, - KibanaRequest, -} from '../../../../../../../src/core/server'; +import { IUiSettingsClient, KibanaRequest } from '../../../../../../../src/core/server'; import { CSV_JOB_TYPE } from '../../../common/constants'; -import { ReportingCore } from '../../../server'; +import { ReportingCore } from '../../../server/core'; import { cryptoFactory } from '../../../server/lib'; import { getFieldFormats } from '../../../server/services'; -import { ESQueueWorkerExecuteFn, ExecuteJobFactory, Logger, ServerFacade } from '../../../types'; +import { ESQueueWorkerExecuteFn, ExecuteJobFactory, Logger } from '../../../types'; import { JobDocPayloadDiscoverCsv } from '../types'; import { fieldFormatMapFactory } from './lib/field_format_map'; import { createGenerateCsv } from './lib/generate_csv'; export const executeJobFactory: ExecuteJobFactory> = async function executeJobFactoryFn( - reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, - parentLogger: Logger -) { - const crypto = cryptoFactory(server); - const config = server.config(); +>> = async function executeJobFactoryFn(reporting: ReportingCore, parentLogger: Logger) { + const config = reporting.getConfig(); + const crypto = cryptoFactory(config.get('encryptionKey')); const logger = parentLogger.clone([CSV_JOB_TYPE, 'execute-job']); - const serverBasePath = config.get('server.basePath'); + const serverBasePath = config.kbnConfig.get('server', 'basePath'); return async function executeJob( jobId: string, job: JobDocPayloadDiscoverCsv, cancellationToken: any ) { + const elasticsearch = await reporting.getElasticsearchService(); const jobLogger = logger.clone([jobId]); const { @@ -131,9 +123,9 @@ export const executeJobFactory: ExecuteJobFactory) { const response = await request; diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/types.d.ts b/x-pack/legacy/plugins/reporting/export_types/csv/types.d.ts index 842330fa7c93f3..529c195486bc6d 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/types.d.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/types.d.ts @@ -5,7 +5,8 @@ */ import { CancellationToken } from '../../common/cancellation_token'; -import { JobDocPayload, JobParamPostPayload, ConditionalHeaders, RequestFacade } from '../../types'; +import { ScrollConfig } from '../../server/types'; +import { JobDocPayload, JobParamPostPayload } from '../../types'; interface DocValueField { field: string; @@ -106,7 +107,7 @@ export interface GenerateCsvParams { quoteValues: boolean; timezone: string | null; maxSizeBytes: number; - scroll: { duration: string; size: number }; + scroll: ScrollConfig; checkForFormulas?: boolean; }; } diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts index 17072d311b35f3..8e0376a190267a 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts @@ -5,18 +5,11 @@ */ import { notFound, notImplemented } from 'boom'; -import { ElasticsearchServiceSetup } from 'kibana/server'; import { get } from 'lodash'; import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../../common/constants'; import { ReportingCore } from '../../../../server'; import { cryptoFactory } from '../../../../server/lib'; -import { - CreateJobFactory, - ImmediateCreateJobFn, - Logger, - RequestFacade, - ServerFacade, -} from '../../../../types'; +import { CreateJobFactory, ImmediateCreateJobFn, Logger, RequestFacade } from '../../../../types'; import { JobDocPayloadPanelCsv, JobParamsPanelCsv, @@ -37,13 +30,9 @@ interface VisData { export const createJobFactory: CreateJobFactory> = function createJobFactoryFn( - reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, - parentLogger: Logger -) { - const crypto = cryptoFactory(server); +>> = function createJobFactoryFn(reporting: ReportingCore, parentLogger: Logger) { + const config = reporting.getConfig(); + const crypto = cryptoFactory(config.get('encryptionKey')); const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'create-job']); return async function createJob( diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts index 6bb3e73fcfe84a..afa917f17651c3 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts @@ -5,7 +5,6 @@ */ import { i18n } from '@kbn/i18n'; -import { ElasticsearchServiceSetup } from 'kibana/server'; import { CONTENT_TYPE_CSV, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants'; import { ReportingCore } from '../../../server'; import { cryptoFactory } from '../../../server/lib'; @@ -15,7 +14,6 @@ import { JobDocOutput, Logger, RequestFacade, - ServerFacade, } from '../../../types'; import { CsvResultFromSearch } from '../../csv/types'; import { FakeRequest, JobDocPayloadPanelCsv, JobParamsPanelCsv, SearchPanel } from '../types'; @@ -23,15 +21,11 @@ import { createGenerateCsv } from './lib'; export const executeJobFactory: ExecuteJobFactory> = async function executeJobFactoryFn( - reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, - parentLogger: Logger -) { - const crypto = cryptoFactory(server); +>> = async function executeJobFactoryFn(reporting: ReportingCore, parentLogger: Logger) { + const config = reporting.getConfig(); + const crypto = cryptoFactory(config.get('encryptionKey')); const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'execute-job']); - const generateCsv = createGenerateCsv(reporting, server, elasticsearch, parentLogger); + const generateCsv = createGenerateCsv(reporting, parentLogger); return async function executeJob( jobId: string | null, @@ -57,11 +51,11 @@ export const executeJobFactory: ExecuteJobFactory; const serializedEncryptedHeaders = job.headers; try { decryptedHeaders = await crypto.decrypt(serializedEncryptedHeaders); @@ -79,10 +73,7 @@ export const executeJobFactory: ExecuteJobFactory { export async function generateCsvSearch( req: RequestFacade, reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, logger: Logger, searchPanel: SearchPanel, jobParams: JobParamsDiscoverCsv @@ -159,11 +153,12 @@ export async function generateCsvSearch( }, }; + const config = reporting.getConfig(); + const elasticsearch = await reporting.getElasticsearchService(); const { callAsCurrentUser } = elasticsearch.dataClient.asScoped( KibanaRequest.from(req.getRawRequest()) ); const callCluster = (...params: [string, object]) => callAsCurrentUser(...params); - const config = server.config(); const uiSettings = await getUiSettings(uiConfig); const generateCsvParams: GenerateCsvParams = { @@ -176,8 +171,8 @@ export async function generateCsvSearch( cancellationToken: new CancellationToken(), settings: { ...uiSettings, - maxSizeBytes: config.get('xpack.reporting.csv.maxSizeBytes'), - scroll: config.get('xpack.reporting.csv.scroll'), + maxSizeBytes: config.get('csv', 'maxSizeBytes'), + scroll: config.get('csv', 'scroll'), timezone, }, }; diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/types.d.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/types.d.ts index 6a7d5f336e238d..ab14d2dd8a660c 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/types.d.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/types.d.ts @@ -4,11 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ -import { JobParamPostPayload, JobDocPayload, ServerFacade } from '../../types'; +import { JobDocPayload, JobParamPostPayload } from '../../types'; export interface FakeRequest { - headers: any; - server: ServerFacade; + headers: Record; } export interface JobParamsPostPayloadPanelCsv extends JobParamPostPayload { diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts index a6911e1f147040..1f834bde88a2de 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts @@ -12,14 +12,14 @@ import { CreateJobFactory, ESQueueCreateJobFn, RequestFacade, - ServerFacade, } from '../../../../types'; import { JobParamsPNG } from '../../types'; export const createJobFactory: CreateJobFactory> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { - const crypto = cryptoFactory(server); +>> = function createJobFactoryFn(reporting: ReportingCore) { + const config = reporting.getConfig(); + const crypto = cryptoFactory(config.get('encryptionKey')); return async function createJob( { objectType, title, relativeUrl, browserTimezone, layout }: JobParamsPNG, diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js index e2e6ba1b890963..cb63e7dad2fdf4 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js @@ -5,7 +5,6 @@ */ import * as Rx from 'rxjs'; -import { memoize } from 'lodash'; import { createMockReportingCore } from '../../../../test_helpers'; import { cryptoFactory } from '../../../../server/lib/crypto'; import { executeJobFactory } from './index'; @@ -14,63 +13,65 @@ import { LevelLogger } from '../../../../server/lib'; jest.mock('../lib/generate_png', () => ({ generatePngObservableFactory: jest.fn() })); +let mockReporting; + const cancellationToken = { on: jest.fn(), }; -let config; -let mockServer; -let mockReporting; +const mockLoggerFactory = { + get: jest.fn().mockImplementation(() => ({ + error: jest.fn(), + debug: jest.fn(), + warn: jest.fn(), + })), +}; +const getMockLogger = () => new LevelLogger(mockLoggerFactory); -beforeEach(async () => { - mockReporting = await createMockReportingCore(); +const mockEncryptionKey = 'abcabcsecuresecret'; +const encryptHeaders = async headers => { + const crypto = cryptoFactory(mockEncryptionKey); + return await crypto.encrypt(headers); +}; - config = { - 'xpack.reporting.encryptionKey': 'testencryptionkey', +beforeEach(async () => { + const kbnConfig = { 'server.basePath': '/sbp', - 'server.host': 'localhost', - 'server.port': 5601, }; - mockServer = { - config: memoize(() => ({ get: jest.fn() })), - info: { - protocol: 'http', + const reportingConfig = { + encryptionKey: mockEncryptionKey, + 'kibanaServer.hostname': 'localhost', + 'kibanaServer.port': 5601, + 'kibanaServer.protocol': 'http', + }; + const mockReportingConfig = { + get: (...keys) => reportingConfig[keys.join('.')], + kbnConfig: { get: (...keys) => kbnConfig[keys.join('.')] }, + }; + + mockReporting = await createMockReportingCore(mockReportingConfig); + + const mockElasticsearch = { + dataClient: { + asScoped: () => ({ callAsCurrentUser: jest.fn() }), }, }; - mockServer.config().get.mockImplementation(key => { - return config[key]; - }); + const mockGetElasticsearch = jest.fn(); + mockGetElasticsearch.mockImplementation(() => Promise.resolve(mockElasticsearch)); + mockReporting.getElasticsearchService = mockGetElasticsearch; generatePngObservableFactory.mockReturnValue(jest.fn()); }); afterEach(() => generatePngObservableFactory.mockReset()); -const mockElasticsearch = { - dataClient: { - asScoped: () => ({ callAsCurrentUser: jest.fn() }), - }, -}; - -const getMockLogger = () => new LevelLogger(); - -const encryptHeaders = async headers => { - const crypto = cryptoFactory(mockServer); - return await crypto.encrypt(headers); -}; - test(`passes browserTimezone to generatePng`, async () => { const encryptedHeaders = await encryptHeaders({}); const generatePngObservable = generatePngObservableFactory(); generatePngObservable.mockReturnValue(Rx.of(Buffer.from(''))); - const executeJob = await executeJobFactory( - mockReporting, - mockServer, - mockElasticsearch, - getMockLogger() - ); + const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const browserTimezone = 'UTC'; await executeJob( 'pngJobId', @@ -88,15 +89,7 @@ test(`passes browserTimezone to generatePng`, async () => { }); test(`returns content_type of application/png`, async () => { - const executeJob = await executeJobFactory( - mockReporting, - mockServer, - mockElasticsearch, - getMockLogger(), - { - browserDriverFactory: {}, - } - ); + const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const encryptedHeaders = await encryptHeaders({}); const generatePngObservable = generatePngObservableFactory(); @@ -116,15 +109,7 @@ test(`returns content of generatePng getBuffer base64 encoded`, async () => { const generatePngObservable = generatePngObservableFactory(); generatePngObservable.mockReturnValue(Rx.of({ buffer: Buffer.from(testContent) })); - const executeJob = await executeJobFactory( - mockReporting, - mockServer, - mockElasticsearch, - getMockLogger(), - { - browserDriverFactory: {}, - } - ); + const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const encryptedHeaders = await encryptHeaders({}); const { content } = await executeJob( 'pngJobId', diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts index 8670f0027af89e..113da92d1862f0 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts @@ -4,18 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ElasticsearchServiceSetup } from 'kibana/server'; import * as Rx from 'rxjs'; import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators'; import { PNG_JOB_TYPE } from '../../../../common/constants'; import { ReportingCore } from '../../../../server'; -import { - ESQueueWorkerExecuteFn, - ExecuteJobFactory, - JobDocOutput, - Logger, - ServerFacade, -} from '../../../../types'; +import { ESQueueWorkerExecuteFn, ExecuteJobFactory, JobDocOutput, Logger } from '../../../../types'; import { decryptJobHeaders, getConditionalHeaders, @@ -29,22 +22,23 @@ type QueuedPngExecutorFactory = ExecuteJobFactory = Rx.of(1).pipe( - mergeMap(() => decryptJobHeaders({ server, job, logger })), + mergeMap(() => decryptJobHeaders({ encryptionKey, job, logger })), map(decryptedHeaders => omitBlacklistedHeaders({ job, decryptedHeaders })), - map(filteredHeaders => getConditionalHeaders({ server, job, filteredHeaders })), + map(filteredHeaders => getConditionalHeaders({ config, job, filteredHeaders })), mergeMap(conditionalHeaders => { - const urls = getFullUrls({ server, job }); + const urls = getFullUrls({ config, job }); const hashUrl = urls[0]; return generatePngObservable( jobLogger, diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/lib/generate_png.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/lib/generate_png.ts index 88e91982adc632..a15541d99f6fb5 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/lib/generate_png.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/lib/generate_png.ts @@ -7,17 +7,18 @@ import * as Rx from 'rxjs'; import { map } from 'rxjs/operators'; import { LevelLogger } from '../../../../server/lib'; -import { ConditionalHeaders, HeadlessChromiumDriverFactory, ServerFacade } from '../../../../types'; +import { CaptureConfig } from '../../../../server/types'; +import { ConditionalHeaders, HeadlessChromiumDriverFactory } from '../../../../types'; import { LayoutParams } from '../../../common/layouts/layout'; import { PreserveLayout } from '../../../common/layouts/preserve_layout'; import { screenshotsObservableFactory } from '../../../common/lib/screenshots'; import { ScreenshotResults } from '../../../common/lib/screenshots/types'; export function generatePngObservableFactory( - server: ServerFacade, + captureConfig: CaptureConfig, browserDriverFactory: HeadlessChromiumDriverFactory ) { - const screenshotsObservable = screenshotsObservableFactory(server, browserDriverFactory); + const screenshotsObservable = screenshotsObservableFactory(captureConfig, browserDriverFactory); return function generatePngObservable( logger: LevelLogger, diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts index 656c99991e1f61..25d2d64b1029d7 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts @@ -12,14 +12,14 @@ import { CreateJobFactory, ESQueueCreateJobFn, RequestFacade, - ServerFacade, } from '../../../../types'; import { JobParamsPDF } from '../../types'; export const createJobFactory: CreateJobFactory> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { - const crypto = cryptoFactory(server); +>> = function createJobFactoryFn(reporting: ReportingCore) { + const config = reporting.getConfig(); + const crypto = cryptoFactory(config.get('encryptionKey')); return async function createJobFn( { title, relativeUrls, browserTimezone, layout, objectType }: JobParamsPDF, diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js index 484842ba18f2ad..c6f07f8ad2d344 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js @@ -5,7 +5,6 @@ */ import * as Rx from 'rxjs'; -import { memoize } from 'lodash'; import { createMockReportingCore } from '../../../../test_helpers'; import { cryptoFactory } from '../../../../server/lib/crypto'; import { executeJobFactory } from './index'; @@ -14,57 +13,60 @@ import { LevelLogger } from '../../../../server/lib'; jest.mock('../lib/generate_pdf', () => ({ generatePdfObservableFactory: jest.fn() })); +let mockReporting; + const cancellationToken = { on: jest.fn(), }; -let config; -let mockServer; -let mockReporting; +const mockLoggerFactory = { + get: jest.fn().mockImplementation(() => ({ + error: jest.fn(), + debug: jest.fn(), + warn: jest.fn(), + })), +}; +const getMockLogger = () => new LevelLogger(mockLoggerFactory); -beforeEach(async () => { - mockReporting = await createMockReportingCore(); +const mockEncryptionKey = 'testencryptionkey'; +const encryptHeaders = async headers => { + const crypto = cryptoFactory(mockEncryptionKey); + return await crypto.encrypt(headers); +}; - config = { - 'xpack.reporting.encryptionKey': 'testencryptionkey', +beforeEach(async () => { + const kbnConfig = { 'server.basePath': '/sbp', - 'server.host': 'localhost', - 'server.port': 5601, }; - mockServer = { - config: memoize(() => ({ get: jest.fn() })), - info: { - protocol: 'http', + const reportingConfig = { + encryptionKey: mockEncryptionKey, + 'kibanaServer.hostname': 'localhost', + 'kibanaServer.port': 5601, + 'kibanaServer.protocol': 'http', + }; + const mockReportingConfig = { + get: (...keys) => reportingConfig[keys.join('.')], + kbnConfig: { get: (...keys) => kbnConfig[keys.join('.')] }, + }; + + mockReporting = await createMockReportingCore(mockReportingConfig); + + const mockElasticsearch = { + dataClient: { + asScoped: () => ({ callAsCurrentUser: jest.fn() }), }, }; - mockServer.config().get.mockImplementation(key => { - return config[key]; - }); + const mockGetElasticsearch = jest.fn(); + mockGetElasticsearch.mockImplementation(() => Promise.resolve(mockElasticsearch)); + mockReporting.getElasticsearchService = mockGetElasticsearch; generatePdfObservableFactory.mockReturnValue(jest.fn()); }); afterEach(() => generatePdfObservableFactory.mockReset()); -const getMockLogger = () => new LevelLogger(); -const mockElasticsearch = { - dataClient: { - asScoped: () => ({ callAsCurrentUser: jest.fn() }), - }, -}; - -const encryptHeaders = async headers => { - const crypto = cryptoFactory(mockServer); - return await crypto.encrypt(headers); -}; - test(`returns content_type of application/pdf`, async () => { - const executeJob = await executeJobFactory( - mockReporting, - mockServer, - mockElasticsearch, - getMockLogger() - ); + const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const encryptedHeaders = await encryptHeaders({}); const generatePdfObservable = generatePdfObservableFactory(); @@ -84,12 +86,7 @@ test(`returns content of generatePdf getBuffer base64 encoded`, async () => { const generatePdfObservable = generatePdfObservableFactory(); generatePdfObservable.mockReturnValue(Rx.of({ buffer: Buffer.from(testContent) })); - const executeJob = await executeJobFactory( - mockReporting, - mockServer, - mockElasticsearch, - getMockLogger() - ); + const executeJob = await executeJobFactory(mockReporting, getMockLogger()); const encryptedHeaders = await encryptHeaders({}); const { content } = await executeJob( 'pdfJobId', diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts index 535c2dcd439a7a..dbdccb6160a6e6 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts @@ -4,18 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ElasticsearchServiceSetup } from 'kibana/server'; import * as Rx from 'rxjs'; import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators'; import { PDF_JOB_TYPE } from '../../../../common/constants'; import { ReportingCore } from '../../../../server'; -import { - ESQueueWorkerExecuteFn, - ExecuteJobFactory, - JobDocOutput, - Logger, - ServerFacade, -} from '../../../../types'; +import { ESQueueWorkerExecuteFn, ExecuteJobFactory, JobDocOutput, Logger } from '../../../../types'; import { decryptJobHeaders, getConditionalHeaders, @@ -30,23 +23,26 @@ type QueuedPdfExecutorFactory = ExecuteJobFactory = Rx.of(1).pipe( - mergeMap(() => decryptJobHeaders({ server, job, logger })), + mergeMap(() => decryptJobHeaders({ encryptionKey, job, logger })), map(decryptedHeaders => omitBlacklistedHeaders({ job, decryptedHeaders })), - map(filteredHeaders => getConditionalHeaders({ server, job, filteredHeaders })), - mergeMap(conditionalHeaders => getCustomLogo({ reporting, server, job, conditionalHeaders })), + map(filteredHeaders => getConditionalHeaders({ config, job, filteredHeaders })), + mergeMap(conditionalHeaders => getCustomLogo({ reporting, config, job, conditionalHeaders })), mergeMap(({ logo, conditionalHeaders }) => { - const urls = getFullUrls({ server, job }); + const urls = getFullUrls({ config, job }); const { browserTimezone, layout, title } = job; return generatePdfObservable( diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/generate_pdf.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/generate_pdf.ts index d78effaa1fc2f9..a62b7ec7013a59 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/generate_pdf.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/generate_pdf.ts @@ -8,7 +8,8 @@ import { groupBy } from 'lodash'; import * as Rx from 'rxjs'; import { mergeMap } from 'rxjs/operators'; import { LevelLogger } from '../../../../server/lib'; -import { ConditionalHeaders, HeadlessChromiumDriverFactory, ServerFacade } from '../../../../types'; +import { CaptureConfig } from '../../../../server/types'; +import { ConditionalHeaders, HeadlessChromiumDriverFactory } from '../../../../types'; import { createLayout } from '../../../common/layouts'; import { LayoutInstance, LayoutParams } from '../../../common/layouts/layout'; import { screenshotsObservableFactory } from '../../../common/lib/screenshots'; @@ -27,10 +28,10 @@ const getTimeRange = (urlScreenshots: ScreenshotResults[]) => { }; export function generatePdfObservableFactory( - server: ServerFacade, + captureConfig: CaptureConfig, browserDriverFactory: HeadlessChromiumDriverFactory ) { - const screenshotsObservable = screenshotsObservableFactory(server, browserDriverFactory); + const screenshotsObservable = screenshotsObservableFactory(captureConfig, browserDriverFactory); return function generatePdfObservable( logger: LevelLogger, @@ -41,7 +42,7 @@ export function generatePdfObservableFactory( layoutParams: LayoutParams, logo?: string ): Rx.Observable<{ buffer: Buffer; warnings: string[] }> { - const layout = createLayout(server, layoutParams) as LayoutInstance; + const layout = createLayout(captureConfig, layoutParams) as LayoutInstance; const screenshots$ = screenshotsObservable({ logger, urls, diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/types.d.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/types.d.ts index 0a9dcfe986ca63..e8dd3c5207d926 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/types.d.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/types.d.ts @@ -4,8 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ +import { JobDocPayload } from '../../types'; import { LayoutInstance, LayoutParams } from '../common/layouts/layout'; -import { JobDocPayload, ServerFacade, RequestFacade } from '../../types'; // Job params: structure of incoming user request data, after being parsed from RISON export interface JobParamsPDF { diff --git a/x-pack/legacy/plugins/reporting/index.ts b/x-pack/legacy/plugins/reporting/index.ts index 89e98302cddc91..a5d27d0545da10 100644 --- a/x-pack/legacy/plugins/reporting/index.ts +++ b/x-pack/legacy/plugins/reporting/index.ts @@ -12,9 +12,7 @@ import { config as reportingConfig } from './config'; import { legacyInit } from './server/legacy'; import { ReportingPluginSpecOptions } from './types'; -const kbToBase64Length = (kb: number) => { - return Math.floor((kb * 1024 * 8) / 6); -}; +const kbToBase64Length = (kb: number) => Math.floor((kb * 1024 * 8) / 6); export const reporting = (kibana: any) => { return new kibana.Plugin({ diff --git a/x-pack/legacy/plugins/reporting/log_configuration.ts b/x-pack/legacy/plugins/reporting/log_configuration.ts index b07475df6304ff..7aaed2038bd523 100644 --- a/x-pack/legacy/plugins/reporting/log_configuration.ts +++ b/x-pack/legacy/plugins/reporting/log_configuration.ts @@ -6,22 +6,23 @@ import getosSync, { LinuxOs } from 'getos'; import { promisify } from 'util'; -import { ServerFacade, Logger } from './types'; +import { BROWSER_TYPE } from './common/constants'; +import { CaptureConfig } from './server/types'; +import { Logger } from './types'; const getos = promisify(getosSync); -export async function logConfiguration(server: ServerFacade, logger: Logger) { - const config = server.config(); +export async function logConfiguration(captureConfig: CaptureConfig, logger: Logger) { + const { + browser: { + type: browserType, + chromium: { disableSandbox }, + }, + } = captureConfig; - const browserType = config.get('xpack.reporting.capture.browser.type'); logger.debug(`Browser type: ${browserType}`); - - if (browserType === 'chromium') { - logger.debug( - `Chromium sandbox disabled: ${config.get( - 'xpack.reporting.capture.browser.chromium.disableSandbox' - )}` - ); + if (browserType === BROWSER_TYPE) { + logger.debug(`Chromium sandbox disabled: ${disableSandbox}`); } const os = await getos(); diff --git a/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/args.ts b/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/args.ts index dc79a6b9db2c11..a2f7a1f3ad0dae 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/args.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/args.ts @@ -4,11 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -import { BrowserConfig } from '../../../../types'; +import { CaptureConfig } from '../../../../server/types'; + +type ViewportConfig = CaptureConfig['viewport']; +type BrowserConfig = CaptureConfig['browser']['chromium']; interface LaunchArgs { userDataDir: BrowserConfig['userDataDir']; - viewport: BrowserConfig['viewport']; + viewport: ViewportConfig; disableSandbox: BrowserConfig['disableSandbox']; proxy: BrowserConfig['proxy']; } diff --git a/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/index.ts b/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/index.ts index f90f2c7aee395b..cb228150efbcd0 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/index.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/chromium/driver_factory/index.ts @@ -19,7 +19,8 @@ import { import * as Rx from 'rxjs'; import { InnerSubscriber } from 'rxjs/internal/InnerSubscriber'; import { ignoreElements, map, mergeMap, tap } from 'rxjs/operators'; -import { BrowserConfig, CaptureConfig } from '../../../../types'; +import { BROWSER_TYPE } from '../../../../common/constants'; +import { CaptureConfig } from '../../../../server/types'; import { LevelLogger as Logger } from '../../../lib/level_logger'; import { safeChildProcess } from '../../safe_child_process'; import { HeadlessChromiumDriver } from '../driver'; @@ -28,7 +29,8 @@ import { puppeteerLaunch } from '../puppeteer'; import { args } from './args'; type binaryPath = string; -type ViewportConfig = BrowserConfig['viewport']; +type BrowserConfig = CaptureConfig['browser']['chromium']; +type ViewportConfig = CaptureConfig['viewport']; export class HeadlessChromiumDriverFactory { private binaryPath: binaryPath; @@ -37,15 +39,10 @@ export class HeadlessChromiumDriverFactory { private userDataDir: string; private getChromiumArgs: (viewport: ViewportConfig) => string[]; - constructor( - binaryPath: binaryPath, - logger: Logger, - browserConfig: BrowserConfig, - captureConfig: CaptureConfig - ) { + constructor(binaryPath: binaryPath, logger: Logger, captureConfig: CaptureConfig) { this.binaryPath = binaryPath; - this.browserConfig = browserConfig; this.captureConfig = captureConfig; + this.browserConfig = captureConfig.browser.chromium; this.userDataDir = fs.mkdtempSync(path.join(os.tmpdir(), 'chromium-')); this.getChromiumArgs = (viewport: ViewportConfig) => @@ -57,7 +54,7 @@ export class HeadlessChromiumDriverFactory { }); } - type = 'chromium'; + type = BROWSER_TYPE; test(logger: Logger) { const chromiumArgs = args({ @@ -153,7 +150,7 @@ export class HeadlessChromiumDriverFactory { // HeadlessChromiumDriver: object to "drive" a browser page const driver = new HeadlessChromiumDriver(page, { - inspect: this.browserConfig.inspect, + inspect: !!this.browserConfig.inspect, networkPolicy: this.captureConfig.networkPolicy, }); diff --git a/x-pack/legacy/plugins/reporting/server/browsers/chromium/index.ts b/x-pack/legacy/plugins/reporting/server/browsers/chromium/index.ts index d32338ae3e311e..5f89662c94da2e 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/chromium/index.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/chromium/index.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { BrowserConfig, CaptureConfig } from '../../../types'; +import { CaptureConfig } from '../../../server/types'; import { LevelLogger } from '../../lib'; import { HeadlessChromiumDriverFactory } from './driver_factory'; @@ -13,8 +13,7 @@ export { paths } from './paths'; export async function createDriverFactory( binaryPath: string, logger: LevelLogger, - browserConfig: BrowserConfig, captureConfig: CaptureConfig ): Promise { - return new HeadlessChromiumDriverFactory(binaryPath, logger, browserConfig, captureConfig); + return new HeadlessChromiumDriverFactory(binaryPath, logger, captureConfig); } diff --git a/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts b/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts index 49c6222c9f276f..af3b86919dc508 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts @@ -4,24 +4,22 @@ * you may not use this file except in compliance with the Elastic License. */ +import { Logger } from '../../types'; +import { ReportingConfig } from '../types'; +import { HeadlessChromiumDriverFactory } from './chromium/driver_factory'; import { ensureBrowserDownloaded } from './download'; -import { installBrowser } from './install'; -import { ServerFacade, CaptureConfig, Logger } from '../../types'; -import { BROWSER_TYPE } from '../../common/constants'; import { chromium } from './index'; -import { HeadlessChromiumDriverFactory } from './chromium/driver_factory'; +import { installBrowser } from './install'; export async function createBrowserDriverFactory( - server: ServerFacade, + config: ReportingConfig, logger: Logger ): Promise { - const config = server.config(); - - const dataDir: string = config.get('path.data'); - const captureConfig: CaptureConfig = config.get('xpack.reporting.capture'); - const browserType = captureConfig.browser.type; + const captureConfig = config.get('capture'); + const browserConfig = captureConfig.browser.chromium; const browserAutoDownload = captureConfig.browser.autoDownload; - const browserConfig = captureConfig.browser[BROWSER_TYPE]; + const browserType = captureConfig.browser.type; + const dataDir = config.kbnConfig.get('path', 'data'); if (browserConfig.disableSandbox) { logger.warning(`Enabling the Chromium sandbox provides an additional layer of protection.`); @@ -32,7 +30,7 @@ export async function createBrowserDriverFactory( try { const { binaryPath } = await installBrowser(logger, chromium, dataDir); - return chromium.createDriverFactory(binaryPath, logger, browserConfig, captureConfig); + return chromium.createDriverFactory(binaryPath, logger, captureConfig); } catch (error) { if (error.cause && ['EACCES', 'EEXIST'].includes(error.cause.code)) { logger.error( diff --git a/x-pack/legacy/plugins/reporting/server/browsers/download/ensure_downloaded.ts b/x-pack/legacy/plugins/reporting/server/browsers/download/ensure_downloaded.ts index 73186966e3d2f3..3697c4b86ce3ce 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/download/ensure_downloaded.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/download/ensure_downloaded.ts @@ -4,16 +4,15 @@ * you may not use this file except in compliance with the Elastic License. */ -import { resolve as resolvePath } from 'path'; import { existsSync } from 'fs'; - +import { resolve as resolvePath } from 'path'; +import { BROWSER_TYPE } from '../../../common/constants'; import { chromium } from '../index'; -import { BrowserDownload, BrowserType } from '../types'; - +import { BrowserDownload } from '../types'; import { md5 } from './checksum'; -import { asyncMap } from './util'; -import { download } from './download'; import { clean } from './clean'; +import { download } from './download'; +import { asyncMap } from './util'; /** * Check for the downloaded archive of each requested browser type and @@ -21,7 +20,7 @@ import { clean } from './clean'; * @param {String} browserType * @return {Promise} */ -export async function ensureBrowserDownloaded(browserType: BrowserType) { +export async function ensureBrowserDownloaded(browserType = BROWSER_TYPE) { await ensureDownloaded([chromium]); } diff --git a/x-pack/legacy/plugins/reporting/server/browsers/network_policy.ts b/x-pack/legacy/plugins/reporting/server/browsers/network_policy.ts index b36345c08bfee9..9714c5965a5db2 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/network_policy.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/network_policy.ts @@ -6,12 +6,7 @@ import * as _ from 'lodash'; import { parse } from 'url'; - -interface FirewallRule { - allow: boolean; - host?: string; - protocol?: string; -} +import { NetworkPolicyRule } from '../../types'; const isHostMatch = (actualHost: string, ruleHost: string) => { const hostParts = actualHost.split('.').reverse(); @@ -20,7 +15,7 @@ const isHostMatch = (actualHost: string, ruleHost: string) => { return _.every(ruleParts, (part, idx) => part === hostParts[idx]); }; -export const allowRequest = (url: string, rules: FirewallRule[]) => { +export const allowRequest = (url: string, rules: NetworkPolicyRule[]) => { const parsed = parse(url); if (!rules.length) { diff --git a/x-pack/legacy/plugins/reporting/server/browsers/types.d.ts b/x-pack/legacy/plugins/reporting/server/browsers/types.d.ts index 0c480fc82752bc..f096073ec2f5f1 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/types.d.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/types.d.ts @@ -4,8 +4,6 @@ * you may not use this file except in compliance with the Elastic License. */ -export type BrowserType = 'chromium'; - export interface BrowserDownload { paths: { archivesPath: string; diff --git a/x-pack/legacy/plugins/reporting/server/config/index.ts b/x-pack/legacy/plugins/reporting/server/config/index.ts new file mode 100644 index 00000000000000..623d3c2015f3b2 --- /dev/null +++ b/x-pack/legacy/plugins/reporting/server/config/index.ts @@ -0,0 +1,214 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Legacy } from 'kibana'; +import { CoreSetup } from 'src/core/server'; +import { i18n } from '@kbn/i18n'; +import crypto from 'crypto'; +import { get } from 'lodash'; +import { NetworkPolicy } from '../../types'; + +// make config.get() aware of the value type it returns +interface Config { + get(key1: Key1): BaseType[Key1]; + get( + key1: Key1, + key2: Key2 + ): BaseType[Key1][Key2]; + get< + Key1 extends keyof BaseType, + Key2 extends keyof BaseType[Key1], + Key3 extends keyof BaseType[Key1][Key2] + >( + key1: Key1, + key2: Key2, + key3: Key3 + ): BaseType[Key1][Key2][Key3]; + get< + Key1 extends keyof BaseType, + Key2 extends keyof BaseType[Key1], + Key3 extends keyof BaseType[Key1][Key2], + Key4 extends keyof BaseType[Key1][Key2][Key3] + >( + key1: Key1, + key2: Key2, + key3: Key3, + key4: Key4 + ): BaseType[Key1][Key2][Key3][Key4]; +} + +interface KbnServerConfigType { + path: { data: string }; + server: { + basePath: string; + host: string; + name: string; + port: number; + protocol: string; + uuid: string; + }; +} + +export interface ReportingConfig extends Config { + kbnConfig: Config; +} + +type BrowserType = 'chromium'; + +interface BrowserConfig { + inspect: boolean; + userDataDir: string; + viewport: { width: number; height: number }; + disableSandbox: boolean; + proxy: { + enabled: boolean; + server?: string; + bypass?: string[]; + }; +} + +interface CaptureConfig { + browser: { + type: BrowserType; + autoDownload: boolean; + chromium: BrowserConfig; + }; + maxAttempts: number; + networkPolicy: NetworkPolicy; + loadDelay: number; + timeouts: { + openUrl: number; + waitForElements: number; + renderComplete: number; + }; + viewport: any; + zoom: any; +} + +interface QueueConfig { + indexInterval: string; + pollEnabled: boolean; + pollInterval: number; + pollIntervalErrorMultiplier: number; + timeout: number; +} + +interface ScrollConfig { + duration: string; + size: number; +} + +export interface ReportingConfigType { + capture: CaptureConfig; + csv: { + scroll: ScrollConfig; + enablePanelActionDownload: boolean; + checkForFormulas: boolean; + maxSizeBytes: number; + }; + encryptionKey: string; + kibanaServer: any; + index: string; + queue: QueueConfig; + roles: any; +} + +const addConfigDefaults = ( + server: Legacy.Server, + core: CoreSetup, + baseConfig: ReportingConfigType +) => { + // encryption key + let encryptionKey = baseConfig.encryptionKey; + if (encryptionKey === undefined) { + server.log( + ['reporting', 'config', 'warning'], + i18n.translate('xpack.reporting.selfCheckEncryptionKey.warning', { + defaultMessage: + `Generating a random key for {setting}. To prevent pending reports ` + + `from failing on restart, please set {setting} in kibana.yml`, + values: { + setting: 'xpack.reporting.encryptionKey', + }, + }) + ); + encryptionKey = crypto.randomBytes(16).toString('hex'); + } + + const { kibanaServer: reportingServer } = baseConfig; + const serverInfo = core.http.getServerInfo(); + + // kibanaServer.hostname, default to server.host, don't allow "0" + let kibanaServerHostname = reportingServer.hostname ? reportingServer.hostname : serverInfo.host; + if (kibanaServerHostname === '0') { + server.log( + ['reporting', 'config', 'warning'], + i18n.translate('xpack.reporting.selfCheckHostname.warning', { + defaultMessage: + `Found 'server.host: "0"' in settings. This is incompatible with Reporting. ` + + `To enable Reporting to work, '{setting}: 0.0.0.0' is being automatically to the configuration. ` + + `You can change to 'server.host: 0.0.0.0' or add '{setting}: 0.0.0.0' in kibana.yml to prevent this message.`, + values: { + setting: 'xpack.reporting.kibanaServer.hostname', + }, + }) + ); + kibanaServerHostname = '0.0.0.0'; + } + + // kibanaServer.port, default to server.port + const kibanaServerPort = reportingServer.port + ? reportingServer.port + : serverInfo.port; // prettier-ignore + + // kibanaServer.protocol, default to server.protocol + const kibanaServerProtocol = reportingServer.protocol + ? reportingServer.protocol + : serverInfo.protocol; + + return { + ...baseConfig, + encryptionKey, + kibanaServer: { + hostname: kibanaServerHostname, + port: kibanaServerPort, + protocol: kibanaServerProtocol, + }, + }; +}; + +export const buildConfig = ( + core: CoreSetup, + server: Legacy.Server, + reportingConfig: ReportingConfigType +): ReportingConfig => { + const config = server.config(); + const { http } = core; + const serverInfo = http.getServerInfo(); + + const kbnConfig = { + path: { + data: config.get('path.data'), + }, + server: { + basePath: core.http.basePath.serverBasePath, + host: serverInfo.host, + name: serverInfo.name, + port: serverInfo.port, + uuid: core.uuid.getInstanceUuid(), + protocol: serverInfo.protocol, + }, + }; + + // spreading arguments as an array allows the return type to be known by the compiler + reportingConfig = addConfigDefaults(server, core, reportingConfig); + return { + get: (...keys: string[]) => get(reportingConfig, keys.join('.'), null), + kbnConfig: { + get: (...keys: string[]) => get(kbnConfig, keys.join('.'), null), + }, + }; +}; diff --git a/x-pack/legacy/plugins/reporting/server/core.ts b/x-pack/legacy/plugins/reporting/server/core.ts index 4506d41e4f5c3e..9be61d091b00e8 100644 --- a/x-pack/legacy/plugins/reporting/server/core.ts +++ b/x-pack/legacy/plugins/reporting/server/core.ts @@ -7,6 +7,7 @@ import * as Rx from 'rxjs'; import { first, mapTo } from 'rxjs/operators'; import { + ElasticsearchServiceSetup, IUiSettingsClient, KibanaRequest, SavedObjectsClient, @@ -19,20 +20,24 @@ import { XPackMainPlugin } from '../../xpack_main/server/xpack_main'; import { PLUGIN_ID } from '../common/constants'; import { EnqueueJobFn, ESQueueInstance, ReportingPluginSpecOptions, ServerFacade } from '../types'; import { HeadlessChromiumDriverFactory } from './browsers/chromium/driver_factory'; +import { ReportingConfig, ReportingConfigType } from './config'; import { checkLicenseFactory, getExportTypesRegistry, LevelLogger } from './lib'; import { registerRoutes } from './routes'; import { ReportingSetupDeps } from './types'; interface ReportingInternalSetup { browserDriverFactory: HeadlessChromiumDriverFactory; + elasticsearch: ElasticsearchServiceSetup; } interface ReportingInternalStart { + enqueueJob: EnqueueJobFn; + esqueue: ESQueueInstance; savedObjects: SavedObjectsServiceStart; uiSettings: UiSettingsServiceStart; - esqueue: ESQueueInstance; - enqueueJob: EnqueueJobFn; } +export { ReportingConfig, ReportingConfigType }; + export class ReportingCore { private pluginSetupDeps?: ReportingInternalSetup; private pluginStartDeps?: ReportingInternalStart; @@ -40,7 +45,7 @@ export class ReportingCore { private readonly pluginStart$ = new Rx.ReplaySubject(); private exportTypesRegistry = getExportTypesRegistry(); - constructor(private logger: LevelLogger) {} + constructor(private logger: LevelLogger, private config: ReportingConfig) {} legacySetup( xpackMainPlugin: XPackMainPlugin, @@ -48,14 +53,18 @@ export class ReportingCore { __LEGACY: ServerFacade, plugins: ReportingSetupDeps ) { + // legacy plugin status mirrorPluginStatus(xpackMainPlugin, reporting); + + // legacy license check const checkLicense = checkLicenseFactory(this.exportTypesRegistry); (xpackMainPlugin as any).status.once('green', () => { // Register a function that is called whenever the xpack info changes, // to re-compute the license check results for this plugin xpackMainPlugin.info.feature(PLUGIN_ID).registerLicenseCheckResultsGenerator(checkLicense); }); - // Reporting routes + + // legacy routes registerRoutes(this, __LEGACY, plugins, this.logger); } @@ -90,23 +99,31 @@ export class ReportingCore { return (await this.getPluginSetupDeps()).browserDriverFactory; } + public getConfig(): ReportingConfig { + return this.config; + } + /* - * Kibana core module dependencies + * Outside dependencies */ - private async getPluginSetupDeps() { + private async getPluginSetupDeps(): Promise { if (this.pluginSetupDeps) { return this.pluginSetupDeps; } return await this.pluginSetup$.pipe(first()).toPromise(); } - private async getPluginStartDeps() { + private async getPluginStartDeps(): Promise { if (this.pluginStartDeps) { return this.pluginStartDeps; } return await this.pluginStart$.pipe(first()).toPromise(); } + public async getElasticsearchService(): Promise { + return (await this.getPluginSetupDeps()).elasticsearch; + } + public async getSavedObjectsClient(fakeRequest: KibanaRequest): Promise { const { savedObjects } = await this.getPluginStartDeps(); return savedObjects.getScopedClient(fakeRequest) as SavedObjectsClient; diff --git a/x-pack/legacy/plugins/reporting/server/index.ts b/x-pack/legacy/plugins/reporting/server/index.ts index 24e2a954415d9c..c564963e363cc2 100644 --- a/x-pack/legacy/plugins/reporting/server/index.ts +++ b/x-pack/legacy/plugins/reporting/server/index.ts @@ -6,10 +6,11 @@ import { PluginInitializerContext } from 'src/core/server'; import { ReportingPlugin as Plugin } from './plugin'; +import { ReportingConfig, ReportingCore } from './core'; -export const plugin = (context: PluginInitializerContext) => { - return new Plugin(context); +export const plugin = (context: PluginInitializerContext, config: ReportingConfig) => { + return new Plugin(context, config); }; -export { ReportingCore } from './core'; export { ReportingPlugin } from './plugin'; +export { ReportingConfig, ReportingCore }; diff --git a/x-pack/legacy/plugins/reporting/server/legacy.ts b/x-pack/legacy/plugins/reporting/server/legacy.ts index 336ff5f4d2ee7c..679b42aca6de5a 100644 --- a/x-pack/legacy/plugins/reporting/server/legacy.ts +++ b/x-pack/legacy/plugins/reporting/server/legacy.ts @@ -3,10 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ + import { Legacy } from 'kibana'; import { PluginInitializerContext } from 'src/core/server'; import { SecurityPluginSetup } from '../../../../plugins/security/server'; import { ReportingPluginSpecOptions } from '../types'; +import { buildConfig } from './config'; import { plugin } from './index'; import { LegacySetup, ReportingStartDeps } from './types'; @@ -14,24 +16,31 @@ const buildLegacyDependencies = ( server: Legacy.Server, reportingPlugin: ReportingPluginSpecOptions ): LegacySetup => ({ - config: server.config, - info: server.info, route: server.route.bind(server), + config: server.config, plugins: { - elasticsearch: server.plugins.elasticsearch, xpack_main: server.plugins.xpack_main, reporting: reportingPlugin, }, }); +/* + * Starts the New Platform instance of Reporting using legacy dependencies + */ export const legacyInit = async ( server: Legacy.Server, - reportingPlugin: ReportingPluginSpecOptions + reportingLegacyPlugin: ReportingPluginSpecOptions ) => { - const coreSetup = server.newPlatform.setup.core; - const pluginInstance = plugin(server.newPlatform.coreContext as PluginInitializerContext); + const { core: coreSetup } = server.newPlatform.setup; + const legacyConfig = server.config(); + const reportingConfig = buildConfig(coreSetup, server, legacyConfig.get('xpack.reporting')); - const __LEGACY = buildLegacyDependencies(server, reportingPlugin); + const __LEGACY = buildLegacyDependencies(server, reportingLegacyPlugin); + + const pluginInstance = plugin( + server.newPlatform.coreContext as PluginInitializerContext, + reportingConfig + ); await pluginInstance.setup(coreSetup, { elasticsearch: coreSetup.elasticsearch, security: server.newPlatform.setup.plugins.security as SecurityPluginSetup, @@ -42,7 +51,6 @@ export const legacyInit = async ( // Schedule to call the "start" hook only after start dependencies are ready coreSetup.getStartServices().then(([core, plugins]) => pluginInstance.start(core, { - elasticsearch: coreSetup.elasticsearch, data: (plugins as ReportingStartDeps).data, __LEGACY, }) diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts b/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts index d593e4625cdf48..8230ee889ae057 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts @@ -4,22 +4,21 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ElasticsearchServiceSetup } from 'kibana/server'; -import { ESQueueInstance, ServerFacade, QueueConfig, Logger } from '../../types'; +import { ESQueueInstance, Logger } from '../../types'; import { ReportingCore } from '../core'; +import { createTaggedLogger } from './create_tagged_logger'; // TODO remove createTaggedLogger once esqueue is removed +import { createWorkerFactory } from './create_worker'; // @ts-ignore import { Esqueue } from './esqueue'; -import { createWorkerFactory } from './create_worker'; -import { createTaggedLogger } from './create_tagged_logger'; // TODO remove createTaggedLogger once esqueue is removed export async function createQueueFactory( reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, logger: Logger ): Promise { - const queueConfig: QueueConfig = server.config().get('xpack.reporting.queue'); - const index = server.config().get('xpack.reporting.index'); + const config = reporting.getConfig(); + const queueConfig = config.get('queue'); + const index = config.get('index'); + const elasticsearch = await reporting.getElasticsearchService(); const queueOptions = { interval: queueConfig.indexInterval, @@ -33,7 +32,7 @@ export async function createQueueFactory( if (queueConfig.pollEnabled) { // create workers to poll the index for idle jobs waiting to be claimed and executed - const createWorker = createWorkerFactory(reporting, server, elasticsearch, logger); + const createWorker = createWorkerFactory(reporting, logger); await createWorker(queue); } else { logger.info( diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts b/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts index d4d913243e18d8..ad8db3201844e0 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts @@ -4,11 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ElasticsearchServiceSetup } from 'kibana/server'; import * as sinon from 'sinon'; -import { ReportingCore } from '../../server'; +import { ReportingConfig, ReportingCore } from '../../server/types'; import { createMockReportingCore } from '../../test_helpers'; -import { ServerFacade } from '../../types'; import { createWorkerFactory } from './create_worker'; // @ts-ignore import { Esqueue } from './esqueue'; @@ -17,21 +15,15 @@ import { ClientMock } from './esqueue/__tests__/fixtures/legacy_elasticsearch'; import { ExportTypesRegistry } from './export_types_registry'; const configGetStub = sinon.stub(); -configGetStub.withArgs('xpack.reporting.queue').returns({ +configGetStub.withArgs('queue').returns({ pollInterval: 3300, pollIntervalErrorMultiplier: 10, }); -configGetStub.withArgs('server.name').returns('test-server-123'); -configGetStub.withArgs('server.uuid').returns('g9ymiujthvy6v8yrh7567g6fwzgzftzfr'); +configGetStub.withArgs('server', 'name').returns('test-server-123'); +configGetStub.withArgs('server', 'uuid').returns('g9ymiujthvy6v8yrh7567g6fwzgzftzfr'); const executeJobFactoryStub = sinon.stub(); - -const getMockServer = (): ServerFacade => { - return ({ - config: () => ({ get: configGetStub }), - } as unknown) as ServerFacade; -}; -const getMockLogger = jest.fn(); +const getMockLogger = sinon.stub(); const getMockExportTypesRegistry = ( exportTypes: any[] = [{ executeJobFactory: executeJobFactoryStub }] @@ -41,25 +33,22 @@ const getMockExportTypesRegistry = ( } as ExportTypesRegistry); describe('Create Worker', () => { + let mockReporting: ReportingCore; + let mockConfig: ReportingConfig; let queue: Esqueue; let client: ClientMock; - let mockReporting: ReportingCore; beforeEach(async () => { - mockReporting = await createMockReportingCore(); + mockConfig = { get: configGetStub, kbnConfig: { get: configGetStub } }; + mockReporting = await createMockReportingCore(mockConfig); + mockReporting.getExportTypesRegistry = () => getMockExportTypesRegistry(); client = new ClientMock(); queue = new Esqueue('reporting-queue', { client }); executeJobFactoryStub.reset(); }); test('Creates a single Esqueue worker for Reporting', async () => { - mockReporting.getExportTypesRegistry = () => getMockExportTypesRegistry(); - const createWorker = createWorkerFactory( - mockReporting, - getMockServer(), - {} as ElasticsearchServiceSetup, - getMockLogger() - ); + const createWorker = createWorkerFactory(mockReporting, getMockLogger()); const registerWorkerSpy = sinon.spy(queue, 'registerWorker'); await createWorker(queue); @@ -91,12 +80,7 @@ Object { { executeJobFactory: executeJobFactoryStub }, ]); mockReporting.getExportTypesRegistry = () => exportTypesRegistry; - const createWorker = createWorkerFactory( - mockReporting, - getMockServer(), - {} as ElasticsearchServiceSetup, - getMockLogger() - ); + const createWorker = createWorkerFactory(mockReporting, getMockLogger()); const registerWorkerSpy = sinon.spy(queue, 'registerWorker'); await createWorker(queue); diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts b/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts index 35677123676081..16b8fbdb30fdd9 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts @@ -4,9 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ElasticsearchServiceSetup } from 'kibana/server'; import { CancellationToken } from '../../common/cancellation_token'; import { PLUGIN_ID } from '../../common/constants'; +import { ReportingCore } from '../../server/types'; import { ESQueueInstance, ESQueueWorkerExecuteFn, @@ -15,25 +15,18 @@ import { JobDocPayload, JobSource, Logger, - QueueConfig, RequestFacade, - ServerFacade, } from '../../types'; -import { ReportingCore } from '../core'; // @ts-ignore untyped dependency import { events as esqueueEvents } from './esqueue'; -export function createWorkerFactory( - reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, - logger: Logger -) { +export function createWorkerFactory(reporting: ReportingCore, logger: Logger) { type JobDocPayloadType = JobDocPayload; - const config = server.config(); - const queueConfig: QueueConfig = config.get('xpack.reporting.queue'); - const kibanaName: string = config.get('server.name'); - const kibanaId: string = config.get('server.uuid'); + + const config = reporting.getConfig(); + const queueConfig = config.get('queue'); + const kibanaName = config.kbnConfig.get('server', 'name'); + const kibanaId = config.kbnConfig.get('server', 'uuid'); // Once more document types are added, this will need to be passed in return async function createWorker(queue: ESQueueInstance) { @@ -44,15 +37,14 @@ export function createWorkerFactory( > = new Map(); for (const exportType of reporting.getExportTypesRegistry().getAll() as Array< - ExportTypeDefinition + ExportTypeDefinition< + JobParamsType, + unknown, + unknown, + ImmediateExecuteFn | ESQueueWorkerExecuteFn + > >) { - // TODO: the executeJobFn should be unwrapped in the register method of the export types registry - const jobExecutor = await exportType.executeJobFactory( - reporting, - server, - elasticsearch, - logger - ); + const jobExecutor = await exportType.executeJobFactory(reporting, logger); // FIXME: does not "need" to be async jobExecutors.set(exportType.jobType, jobExecutor); } diff --git a/x-pack/legacy/plugins/reporting/server/lib/crypto.ts b/x-pack/legacy/plugins/reporting/server/lib/crypto.ts index dbc01fc947f8b6..97876529ecfa71 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/crypto.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/crypto.ts @@ -5,12 +5,7 @@ */ import nodeCrypto from '@elastic/node-crypto'; -import { oncePerServer } from './once_per_server'; -import { ServerFacade } from '../../types'; -function cryptoFn(server: ServerFacade) { - const encryptionKey = server.config().get('xpack.reporting.encryptionKey'); +export function cryptoFactory(encryptionKey: string | undefined) { return nodeCrypto({ encryptionKey }); } - -export const cryptoFactory = oncePerServer(cryptoFn); diff --git a/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts b/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts index c215bdc3989045..5a062a693b4681 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts @@ -5,22 +5,18 @@ */ import { get } from 'lodash'; -import { ElasticsearchServiceSetup } from 'kibana/server'; -// @ts-ignore -import { events as esqueueEvents } from './esqueue'; import { + ConditionalHeaders, EnqueueJobFn, ESQueueCreateJobFn, ImmediateCreateJobFn, Job, - ServerFacade, - RequestFacade, Logger, - CaptureConfig, - QueueConfig, - ConditionalHeaders, + RequestFacade, } from '../../types'; import { ReportingCore } from '../core'; +// @ts-ignore +import { events as esqueueEvents } from './esqueue'; interface ConfirmedJob { id: string; @@ -29,18 +25,13 @@ interface ConfirmedJob { _primary_term: number; } -export function enqueueJobFactory( - reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, - parentLogger: Logger -): EnqueueJobFn { +export function enqueueJobFactory(reporting: ReportingCore, parentLogger: Logger): EnqueueJobFn { const logger = parentLogger.clone(['queue-job']); - const config = server.config(); - const captureConfig: CaptureConfig = config.get('xpack.reporting.capture'); + const config = reporting.getConfig(); + const captureConfig = config.get('capture'); + const queueConfig = config.get('queue'); const browserType = captureConfig.browser.type; const maxAttempts = captureConfig.maxAttempts; - const queueConfig: QueueConfig = config.get('xpack.reporting.queue'); return async function enqueueJob( exportTypeId: string, @@ -58,13 +49,7 @@ export function enqueueJobFactory( throw new Error(`Export type ${exportTypeId} does not exist in the registry!`); } - // TODO: the createJobFn should be unwrapped in the register method of the export types registry - const createJob = exportType.createJobFactory( - reporting, - server, - elasticsearch, - logger - ) as CreateJobFn; + const createJob = exportType.createJobFactory(reporting, logger) as CreateJobFn; const payload = await createJob(jobParams, headers, request); const options = { diff --git a/x-pack/legacy/plugins/reporting/server/lib/get_user.ts b/x-pack/legacy/plugins/reporting/server/lib/get_user.ts index 49d5c568c39818..5e73fe77ecb79d 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/get_user.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/get_user.ts @@ -6,10 +6,10 @@ import { Legacy } from 'kibana'; import { KibanaRequest } from '../../../../../../src/core/server'; -import { ServerFacade } from '../../types'; +import { Logger } from '../../types'; import { ReportingSetupDeps } from '../types'; -export function getUserFactory(server: ServerFacade, security: ReportingSetupDeps['security']) { +export function getUserFactory(security: ReportingSetupDeps['security'], logger: Logger) { /* * Legacy.Request because this is called from routing middleware */ diff --git a/x-pack/legacy/plugins/reporting/server/lib/index.ts b/x-pack/legacy/plugins/reporting/server/lib/index.ts index 0a2db749cb954a..f5ccbe493a91f0 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/index.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/index.ts @@ -4,11 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ -export { getExportTypesRegistry } from './export_types_registry'; export { checkLicenseFactory } from './check_license'; -export { LevelLogger } from './level_logger'; -export { cryptoFactory } from './crypto'; -export { oncePerServer } from './once_per_server'; -export { runValidations } from './validate'; export { createQueueFactory } from './create_queue'; +export { cryptoFactory } from './crypto'; export { enqueueJobFactory } from './enqueue_job'; +export { getExportTypesRegistry } from './export_types_registry'; +export { LevelLogger } from './level_logger'; +export { runValidations } from './validate'; diff --git a/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts b/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts index c01e6377b039e5..0affc111c13685 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts @@ -9,7 +9,8 @@ import Boom from 'boom'; import { errors as elasticsearchErrors } from 'elasticsearch'; import { ElasticsearchServiceSetup } from 'kibana/server'; import { get } from 'lodash'; -import { JobSource, ServerFacade } from '../../types'; +import { JobSource } from '../../types'; +import { ReportingConfig } from '../types'; const esErrors = elasticsearchErrors as Record; const defaultSize = 10; @@ -39,8 +40,11 @@ interface CountAggResult { count: number; } -export function jobsQueryFactory(server: ServerFacade, elasticsearch: ElasticsearchServiceSetup) { - const index = server.config().get('xpack.reporting.index'); +export function jobsQueryFactory( + config: ReportingConfig, + elasticsearch: ElasticsearchServiceSetup +) { + const index = config.get('index'); const { callAsInternalUser } = elasticsearch.adminClient; function getUsername(user: any) { diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_encryption_key.js b/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_encryption_key.js deleted file mode 100644 index 10980f702d8493..00000000000000 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_encryption_key.js +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import expect from '@kbn/expect'; -import sinon from 'sinon'; -import { validateEncryptionKey } from '../validate_encryption_key'; - -describe('Reporting: Validate config', () => { - const logger = { - warning: sinon.spy(), - }; - - beforeEach(() => { - logger.warning.resetHistory(); - }); - - [undefined, null].forEach(value => { - it(`should log a warning and set xpack.reporting.encryptionKey if encryptionKey is ${value}`, () => { - const config = { - get: sinon.stub().returns(value), - set: sinon.stub(), - }; - - expect(() => validateEncryptionKey({ config: () => config }, logger)).not.to.throwError(); - - sinon.assert.calledWith(config.set, 'xpack.reporting.encryptionKey'); - sinon.assert.calledWithMatch(logger.warning, /Generating a random key/); - sinon.assert.calledWithMatch(logger.warning, /please set xpack.reporting.encryptionKey/); - }); - }); -}); diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_server_host.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_server_host.ts deleted file mode 100644 index 04f998fd3e5a5d..00000000000000 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_server_host.ts +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import expect from '@kbn/expect'; -import sinon from 'sinon'; -import { ServerFacade } from '../../../../types'; -import { validateServerHost } from '../validate_server_host'; - -const configKey = 'xpack.reporting.kibanaServer.hostname'; - -describe('Reporting: Validate server host setting', () => { - it(`should log a warning and set ${configKey} if server.host is "0"`, () => { - const getStub = sinon.stub(); - getStub.withArgs('server.host').returns('0'); - getStub.withArgs(configKey).returns(undefined); - const config = { - get: getStub, - set: sinon.stub(), - }; - - expect(() => - validateServerHost(({ config: () => config } as unknown) as ServerFacade) - ).to.throwError(); - - sinon.assert.calledWith(config.set, configKey); - }); -}); diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts index 0fdbd858b8e3c7..85d9f727d7fa7b 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts @@ -6,25 +6,22 @@ import { i18n } from '@kbn/i18n'; import { ElasticsearchServiceSetup } from 'kibana/server'; -import { Logger, ServerFacade } from '../../../types'; +import { Logger } from '../../../types'; import { HeadlessChromiumDriverFactory } from '../../browsers/chromium/driver_factory'; +import { ReportingConfig } from '../../types'; import { validateBrowser } from './validate_browser'; -import { validateEncryptionKey } from './validate_encryption_key'; import { validateMaxContentLength } from './validate_max_content_length'; -import { validateServerHost } from './validate_server_host'; export async function runValidations( - server: ServerFacade, + config: ReportingConfig, elasticsearch: ElasticsearchServiceSetup, browserFactory: HeadlessChromiumDriverFactory, logger: Logger ) { try { await Promise.all([ - validateBrowser(server, browserFactory, logger), - validateEncryptionKey(server, logger), - validateMaxContentLength(server, elasticsearch, logger), - validateServerHost(server), + validateBrowser(browserFactory, logger), + validateMaxContentLength(config, elasticsearch, logger), ]); logger.debug( i18n.translate('xpack.reporting.selfCheck.ok', { diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_browser.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_browser.ts index 89c49123e85bf2..d6512d5eb718b8 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_browser.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_browser.ts @@ -3,9 +3,10 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ + import { Browser } from 'puppeteer'; import { BROWSER_TYPE } from '../../../common/constants'; -import { ServerFacade, Logger } from '../../../types'; +import { Logger } from '../../../types'; import { HeadlessChromiumDriverFactory } from '../../browsers/chromium/driver_factory'; /* @@ -13,7 +14,6 @@ import { HeadlessChromiumDriverFactory } from '../../browsers/chromium/driver_fa * to the locally running Kibana instance. */ export const validateBrowser = async ( - server: ServerFacade, browserFactory: HeadlessChromiumDriverFactory, logger: Logger ) => { diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_encryption_key.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_encryption_key.ts deleted file mode 100644 index e0af94cbdc29cf..00000000000000 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_encryption_key.ts +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { i18n } from '@kbn/i18n'; -import crypto from 'crypto'; -import { ServerFacade, Logger } from '../../../types'; - -export function validateEncryptionKey(serverFacade: ServerFacade, logger: Logger) { - const config = serverFacade.config(); - - const encryptionKey = config.get('xpack.reporting.encryptionKey'); - if (encryptionKey == null) { - // TODO this should simply throw an error and let the handler conver it to a warning mesasge. See validateServerHost. - logger.warning( - i18n.translate('xpack.reporting.selfCheckEncryptionKey.warning', { - defaultMessage: - `Generating a random key for {setting}. To prevent pending reports ` + - `from failing on restart, please set {setting} in kibana.yml`, - values: { - setting: 'xpack.reporting.encryptionKey', - }, - }) - ); - - // @ts-ignore: No set() method on KibanaConfig, just get() and has() - config.set('xpack.reporting.encryptionKey', crypto.randomBytes(16).toString('hex')); // update config in memory to contain a usable encryption key - } -} diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js index 942dcaf842696c..2551fd48b91f34 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js @@ -32,11 +32,7 @@ describe('Reporting: Validate Max Content Length', () => { }); it('should log warning messages when reporting has a higher max-size than elasticsearch', async () => { - const server = { - config: () => ({ - get: sinon.stub().returns(FIVE_HUNDRED_MEGABYTES), - }), - }; + const config = { get: sinon.stub().returns(FIVE_HUNDRED_MEGABYTES) }; const elasticsearch = { dataClient: { callAsInternalUser: () => ({ @@ -49,7 +45,7 @@ describe('Reporting: Validate Max Content Length', () => { }, }; - await validateMaxContentLength(server, elasticsearch, logger); + await validateMaxContentLength(config, elasticsearch, logger); sinon.assert.calledWithMatch( logger.warning, @@ -70,14 +66,10 @@ describe('Reporting: Validate Max Content Length', () => { }); it('should do nothing when reporting has the same max-size as elasticsearch', async () => { - const server = { - config: () => ({ - get: sinon.stub().returns(ONE_HUNDRED_MEGABYTES), - }), - }; + const config = { get: sinon.stub().returns(ONE_HUNDRED_MEGABYTES) }; expect( - async () => await validateMaxContentLength(server, elasticsearch, logger.warning) + async () => await validateMaxContentLength(config, elasticsearch, logger.warning) ).not.toThrow(); sinon.assert.notCalled(logger.warning); }); diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts index ce4a5b93e74310..a20905ba093d4e 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts @@ -7,17 +7,17 @@ import numeral from '@elastic/numeral'; import { ElasticsearchServiceSetup } from 'kibana/server'; import { defaults, get } from 'lodash'; -import { Logger, ServerFacade } from '../../../types'; +import { Logger } from '../../../types'; +import { ReportingConfig } from '../../types'; -const KIBANA_MAX_SIZE_BYTES_PATH = 'xpack.reporting.csv.maxSizeBytes'; +const KIBANA_MAX_SIZE_BYTES_PATH = 'csv.maxSizeBytes'; const ES_MAX_SIZE_BYTES_PATH = 'http.max_content_length'; export async function validateMaxContentLength( - server: ServerFacade, + config: ReportingConfig, elasticsearch: ElasticsearchServiceSetup, logger: Logger ) { - const config = server.config(); const { callAsInternalUser } = elasticsearch.dataClient; const elasticClusterSettingsResponse = await callAsInternalUser('cluster.getSettings', { @@ -28,13 +28,13 @@ export async function validateMaxContentLength( const elasticSearchMaxContent = get(elasticClusterSettings, 'http.max_content_length', '100mb'); const elasticSearchMaxContentBytes = numeral().unformat(elasticSearchMaxContent.toUpperCase()); - const kibanaMaxContentBytes: number = config.get(KIBANA_MAX_SIZE_BYTES_PATH); + const kibanaMaxContentBytes = config.get('csv', 'maxSizeBytes'); if (kibanaMaxContentBytes > elasticSearchMaxContentBytes) { // TODO this should simply throw an error and let the handler conver it to a warning mesasge. See validateServerHost. logger.warning( - `${KIBANA_MAX_SIZE_BYTES_PATH} (${kibanaMaxContentBytes}) is higher than ElasticSearch's ${ES_MAX_SIZE_BYTES_PATH} (${elasticSearchMaxContentBytes}). ` + - `Please set ${ES_MAX_SIZE_BYTES_PATH} in ElasticSearch to match, or lower your ${KIBANA_MAX_SIZE_BYTES_PATH} in Kibana to avoid this warning.` + `xpack.reporting.${KIBANA_MAX_SIZE_BYTES_PATH} (${kibanaMaxContentBytes}) is higher than ElasticSearch's ${ES_MAX_SIZE_BYTES_PATH} (${elasticSearchMaxContentBytes}). ` + + `Please set ${ES_MAX_SIZE_BYTES_PATH} in ElasticSearch to match, or lower your xpack.reporting.${KIBANA_MAX_SIZE_BYTES_PATH} in Kibana to avoid this warning.` ); } } diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_server_host.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_server_host.ts deleted file mode 100644 index f4f4d61246b6ae..00000000000000 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_server_host.ts +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { ServerFacade } from '../../../types'; - -const configKey = 'xpack.reporting.kibanaServer.hostname'; - -export function validateServerHost(serverFacade: ServerFacade) { - const config = serverFacade.config(); - - const serverHost = config.get('server.host'); - const reportingKibanaHostName = config.get(configKey); - - if (!reportingKibanaHostName && serverHost === '0') { - // @ts-ignore: No set() method on KibanaConfig, just get() and has() - config.set(configKey, '0.0.0.0'); // update config in memory to allow Reporting to work - - throw new Error( - `Found 'server.host: "0"' in settings. This is incompatible with Reporting. ` + - `To enable Reporting to work, '${configKey}: 0.0.0.0' is being automatically to the configuration. ` + - `You can change to 'server.host: 0.0.0.0' or add '${configKey}: 0.0.0.0' in kibana.yml to prevent this message.` - ); - } -} diff --git a/x-pack/legacy/plugins/reporting/server/plugin.ts b/x-pack/legacy/plugins/reporting/server/plugin.ts index 4f24cc16b2277c..c9ed2e81c6792a 100644 --- a/x-pack/legacy/plugins/reporting/server/plugin.ts +++ b/x-pack/legacy/plugins/reporting/server/plugin.ts @@ -7,7 +7,7 @@ import { CoreSetup, CoreStart, Plugin, PluginInitializerContext } from 'src/core/server'; import { logConfiguration } from '../log_configuration'; import { createBrowserDriverFactory } from './browsers'; -import { ReportingCore } from './core'; +import { ReportingCore, ReportingConfig } from './core'; import { createQueueFactory, enqueueJobFactory, LevelLogger, runValidations } from './lib'; import { setFieldFormats } from './services'; import { ReportingSetup, ReportingSetupDeps, ReportingStart, ReportingStartDeps } from './types'; @@ -17,38 +17,40 @@ import { mirrorPluginStatus } from '../../../server/lib/mirror_plugin_status'; export class ReportingPlugin implements Plugin { + private config: ReportingConfig; private logger: LevelLogger; private reportingCore: ReportingCore; - constructor(context: PluginInitializerContext) { + constructor(context: PluginInitializerContext, config: ReportingConfig) { + this.config = config; this.logger = new LevelLogger(context.logger.get('reporting')); - this.reportingCore = new ReportingCore(this.logger); + this.reportingCore = new ReportingCore(this.logger, this.config); } public async setup(core: CoreSetup, plugins: ReportingSetupDeps) { - const { elasticsearch, usageCollection, __LEGACY } = plugins; + const { config } = this; + const { elasticsearch, __LEGACY } = plugins; - const browserDriverFactory = await createBrowserDriverFactory(__LEGACY, this.logger); // required for validations :( - runValidations(__LEGACY, elasticsearch, browserDriverFactory, this.logger); // this must run early, as it sets up config defaults + const browserDriverFactory = await createBrowserDriverFactory(config, this.logger); // required for validations :( + runValidations(config, elasticsearch, browserDriverFactory, this.logger); const { xpack_main: xpackMainLegacy, reporting: reportingLegacy } = __LEGACY.plugins; this.reportingCore.legacySetup(xpackMainLegacy, reportingLegacy, __LEGACY, plugins); // Register a function with server to manage the collection of usage stats - registerReportingUsageCollector(this.reportingCore, __LEGACY, usageCollection); + registerReportingUsageCollector(this.reportingCore, plugins); // regsister setup internals - this.reportingCore.pluginSetup({ browserDriverFactory }); + this.reportingCore.pluginSetup({ browserDriverFactory, elasticsearch }); return {}; } public async start(core: CoreStart, plugins: ReportingStartDeps) { const { reportingCore, logger } = this; - const { elasticsearch, __LEGACY } = plugins; - const esqueue = await createQueueFactory(reportingCore, __LEGACY, elasticsearch, logger); - const enqueueJob = enqueueJobFactory(reportingCore, __LEGACY, elasticsearch, logger); + const esqueue = await createQueueFactory(reportingCore, logger); + const enqueueJob = enqueueJobFactory(reportingCore, logger); this.reportingCore.pluginStart({ savedObjects: core.savedObjects, @@ -58,7 +60,8 @@ export class ReportingPlugin }); setFieldFormats(plugins.data.fieldFormats); - logConfiguration(__LEGACY, this.logger); + + logConfiguration(this.config.get('capture'), this.logger); return {}; } diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts index 56622617586f7d..6b4f5dbd9203a3 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts @@ -10,7 +10,7 @@ import { Legacy } from 'kibana'; import rison from 'rison-node'; import { API_BASE_URL } from '../../common/constants'; import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; -import { ReportingSetupDeps } from '../types'; +import { ReportingCore, ReportingSetupDeps } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { GetRouteConfigFactoryFn, @@ -22,15 +22,17 @@ import { HandlerErrorFunction, HandlerFunction } from './types'; const BASE_GENERATE = `${API_BASE_URL}/generate`; export function registerGenerateFromJobParams( + reporting: ReportingCore, server: ServerFacade, plugins: ReportingSetupDeps, handler: HandlerFunction, handleError: HandlerErrorFunction, logger: Logger ) { + const config = reporting.getConfig(); const getRouteConfig = () => { const getOriginalRouteConfig: GetRouteConfigFactoryFn = getRouteConfigFactoryReportingPre( - server, + config, plugins, logger ); diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts index 415b6b7d643669..830953d5322431 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts @@ -9,7 +9,7 @@ import { get } from 'lodash'; import { API_BASE_GENERATE_V1, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../common/constants'; import { getJobParamsFromRequest } from '../../export_types/csv_from_savedobject/server/lib/get_job_params_from_request'; import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; -import { ReportingSetupDeps } from '../types'; +import { ReportingCore, ReportingSetupDeps } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { getRouteOptionsCsv } from './lib/route_config_factories'; import { HandlerErrorFunction, HandlerFunction, QueuedJobPayload } from './types'; @@ -24,13 +24,15 @@ import { HandlerErrorFunction, HandlerFunction, QueuedJobPayload } from './types * - local (transient) changes the user made to the saved object */ export function registerGenerateCsvFromSavedObject( + reporting: ReportingCore, server: ServerFacade, plugins: ReportingSetupDeps, handleRoute: HandlerFunction, handleRouteError: HandlerErrorFunction, logger: Logger ) { - const routeOptions = getRouteOptionsCsv(server, plugins, logger); + const config = reporting.getConfig(); + const routeOptions = getRouteOptionsCsv(config, plugins, logger); server.route({ path: `${API_BASE_GENERATE_V1}/csv/saved-object/{savedObjectType}:{savedObjectId}`, diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts index 5d17fa2e82b8c6..519e49f56c3778 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts @@ -16,7 +16,7 @@ import { ResponseFacade, ServerFacade, } from '../../types'; -import { ReportingSetupDeps, ReportingCore } from '../types'; +import { ReportingCore, ReportingSetupDeps } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { getRouteOptionsCsv } from './lib/route_config_factories'; @@ -35,8 +35,8 @@ export function registerGenerateCsvFromSavedObjectImmediate( plugins: ReportingSetupDeps, parentLogger: Logger ) { - const routeOptions = getRouteOptionsCsv(server, plugins, parentLogger); - const { elasticsearch } = plugins; + const config = reporting.getConfig(); + const routeOptions = getRouteOptionsCsv(config, plugins, parentLogger); /* * CSV export with the `immediate` option does not queue a job with Reporting's ESQueue to run the job async. Instead, this does: @@ -51,15 +51,8 @@ export function registerGenerateCsvFromSavedObjectImmediate( const request = makeRequestFacade(legacyRequest); const logger = parentLogger.clone(['savedobject-csv']); const jobParams = getJobParamsFromRequest(request, { isImmediate: true }); - - /* TODO these functions should be made available in the export types registry: - * - * const { createJobFn, executeJobFn } = exportTypesRegistry.getById(CSV_FROM_SAVEDOBJECT_JOB_TYPE) - * - * Calling an execute job factory requires passing a browserDriverFactory option, so we should not call the factory from here - */ - const createJobFn = createJobFactory(reporting, server, elasticsearch, logger); - const executeJobFn = await executeJobFactory(reporting, server, elasticsearch, logger); + const createJobFn = createJobFactory(reporting, logger); + const executeJobFn = await executeJobFactory(reporting, logger); // FIXME: does not "need" to be async const jobDocPayload: JobDocPayloadPanelCsv = await createJobFn( jobParams, request.headers, diff --git a/x-pack/legacy/plugins/reporting/server/routes/generation.test.ts b/x-pack/legacy/plugins/reporting/server/routes/generation.test.ts index 54d9671692c5de..8e54feac3c8a6e 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generation.test.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generation.test.ts @@ -7,7 +7,7 @@ import Hapi from 'hapi'; import { createMockReportingCore } from '../../test_helpers'; import { Logger, ServerFacade } from '../../types'; -import { ReportingCore, ReportingSetupDeps } from '../../server/types'; +import { ReportingConfig, ReportingCore, ReportingSetupDeps } from '../types'; jest.mock('./lib/authorized_user_pre_routing', () => ({ authorizedUserPreRoutingFactory: () => () => ({}), @@ -22,6 +22,8 @@ import { registerJobGenerationRoutes } from './generation'; let mockServer: Hapi.Server; let mockReportingPlugin: ReportingCore; +let mockReportingConfig: ReportingConfig; + const mockLogger = ({ error: jest.fn(), debug: jest.fn(), @@ -33,8 +35,9 @@ beforeEach(async () => { port: 8080, routes: { log: { collect: true } }, }); - mockServer.config = () => ({ get: jest.fn(), has: jest.fn() }); - mockReportingPlugin = await createMockReportingCore(); + + mockReportingConfig = { get: jest.fn(), kbnConfig: { get: jest.fn() } }; + mockReportingPlugin = await createMockReportingCore(mockReportingConfig); mockReportingPlugin.getEnqueueJob = async () => jest.fn().mockImplementation(() => ({ toJSON: () => '{ "job": "data" }' })); }); diff --git a/x-pack/legacy/plugins/reporting/server/routes/generation.ts b/x-pack/legacy/plugins/reporting/server/routes/generation.ts index 096ba84b63d1ac..1c6129313db4b4 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generation.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generation.ts @@ -9,7 +9,7 @@ import { errors as elasticsearchErrors } from 'elasticsearch'; import { Legacy } from 'kibana'; import { API_BASE_URL } from '../../common/constants'; import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; -import { ReportingSetupDeps, ReportingCore } from '../types'; +import { ReportingCore, ReportingSetupDeps } from '../types'; import { registerGenerateFromJobParams } from './generate_from_jobparams'; import { registerGenerateCsvFromSavedObject } from './generate_from_savedobject'; import { registerGenerateCsvFromSavedObjectImmediate } from './generate_from_savedobject_immediate'; @@ -23,8 +23,9 @@ export function registerJobGenerationRoutes( plugins: ReportingSetupDeps, logger: Logger ) { - const config = server.config(); - const DOWNLOAD_BASE_URL = config.get('server.basePath') + `${API_BASE_URL}/jobs/download`; + const config = reporting.getConfig(); + const downloadBaseUrl = + config.kbnConfig.get('server', 'basePath') + `${API_BASE_URL}/jobs/download`; /* * Generates enqueued job details to use in responses @@ -47,7 +48,7 @@ export function registerJobGenerationRoutes( return h .response({ - path: `${DOWNLOAD_BASE_URL}/${jobJson.id}`, + path: `${downloadBaseUrl}/${jobJson.id}`, job: jobJson, }) .type('application/json'); @@ -66,11 +67,11 @@ export function registerJobGenerationRoutes( return err; } - registerGenerateFromJobParams(server, plugins, handler, handleError, logger); + registerGenerateFromJobParams(reporting, server, plugins, handler, handleError, logger); // Register beta panel-action download-related API's - if (config.get('xpack.reporting.csv.enablePanelActionDownload')) { - registerGenerateCsvFromSavedObject(server, plugins, handler, handleError, logger); + if (config.get('csv', 'enablePanelActionDownload')) { + registerGenerateCsvFromSavedObject(reporting, server, plugins, handler, handleError, logger); registerGenerateCsvFromSavedObjectImmediate(reporting, server, plugins, logger); } } diff --git a/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js b/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js index 071b401d2321bc..9f0de844df3699 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js +++ b/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js @@ -5,7 +5,6 @@ */ import Hapi from 'hapi'; -import { memoize } from 'lodash'; import { createMockReportingCore } from '../../test_helpers'; import { ExportTypesRegistry } from '../lib/export_types_registry'; @@ -23,6 +22,7 @@ import { registerJobInfoRoutes } from './jobs'; let mockServer; let exportTypesRegistry; let mockReportingPlugin; +let mockReportingConfig; const mockLogger = { error: jest.fn(), debug: jest.fn(), @@ -30,7 +30,6 @@ const mockLogger = { beforeEach(async () => { mockServer = new Hapi.Server({ debug: false, port: 8080, routes: { log: { collect: true } } }); - mockServer.config = memoize(() => ({ get: jest.fn() })); exportTypesRegistry = new ExportTypesRegistry(); exportTypesRegistry.register({ id: 'unencoded', @@ -43,7 +42,9 @@ beforeEach(async () => { jobContentEncoding: 'base64', jobContentExtension: 'pdf', }); - mockReportingPlugin = await createMockReportingCore(); + + mockReportingConfig = { get: jest.fn(), kbnConfig: { get: jest.fn() } }; + mockReportingPlugin = await createMockReportingCore(mockReportingConfig); mockReportingPlugin.getExportTypesRegistry = () => exportTypesRegistry; }); diff --git a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts index b9aa75e0ddd000..f6f98b2377db6b 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts @@ -17,7 +17,7 @@ import { ServerFacade, } from '../../types'; import { jobsQueryFactory } from '../lib/jobs_query'; -import { ReportingSetupDeps, ReportingCore } from '../types'; +import { ReportingCore, ReportingSetupDeps } from '../types'; import { deleteJobResponseHandlerFactory, downloadJobResponseHandlerFactory, @@ -41,9 +41,10 @@ export function registerJobInfoRoutes( plugins: ReportingSetupDeps, logger: Logger ) { + const config = reporting.getConfig(); const { elasticsearch } = plugins; - const jobsQuery = jobsQueryFactory(server, elasticsearch); - const getRouteConfig = getRouteConfigFactoryManagementPre(server, plugins, logger); + const jobsQuery = jobsQueryFactory(config, elasticsearch); + const getRouteConfig = getRouteConfigFactoryManagementPre(config, plugins, logger); // list jobs in the queue, paginated server.route({ @@ -141,8 +142,8 @@ export function registerJobInfoRoutes( // trigger a download of the output from a job const exportTypesRegistry = reporting.getExportTypesRegistry(); - const getRouteConfigDownload = getRouteConfigFactoryDownloadPre(server, plugins, logger); - const downloadResponseHandler = downloadJobResponseHandlerFactory(server, elasticsearch, exportTypesRegistry); // prettier-ignore + const getRouteConfigDownload = getRouteConfigFactoryDownloadPre(config, plugins, logger); + const downloadResponseHandler = downloadJobResponseHandlerFactory(config, elasticsearch, exportTypesRegistry); // prettier-ignore server.route({ path: `${MAIN_ENTRY}/download/{docId}`, method: 'GET', @@ -181,8 +182,8 @@ export function registerJobInfoRoutes( }); // allow a report to be deleted - const getRouteConfigDelete = getRouteConfigFactoryDeletePre(server, plugins, logger); - const deleteResponseHandler = deleteJobResponseHandlerFactory(server, elasticsearch); + const getRouteConfigDelete = getRouteConfigFactoryDeletePre(config, plugins, logger); + const deleteResponseHandler = deleteJobResponseHandlerFactory(config, elasticsearch); server.route({ path: `${MAIN_ENTRY}/delete/{docId}`, method: 'DELETE', diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js index 3460d22592e3db..b5d6ae59ce5dda 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js @@ -7,56 +7,48 @@ import { authorizedUserPreRoutingFactory } from './authorized_user_pre_routing'; describe('authorized_user_pre_routing', function() { - // the getClientShield is using `once` which forces us to use a constant mock - // which makes testing anything that is dependent on `oncePerServer` confusing. - // so createMockServer reuses the same 'instance' of the server and overwrites - // the properties to contain different values - const createMockServer = (function() { - const getUserStub = jest.fn(); - let mockConfig; - - const mockServer = { - expose() {}, - config() { - return { - get(key) { - return mockConfig[key]; - }, - }; - }, - log: function() {}, - plugins: { - xpack_main: {}, - security: { getUser: getUserStub }, - }, + const createMockConfig = (mockConfig = {}) => { + return { + get: (...keys) => mockConfig[keys.join('.')], + kbnConfig: { get: (...keys) => mockConfig[keys.join('.')] }, }; + }; + const createMockPlugins = (function() { + const getUserStub = jest.fn(); return function({ securityEnabled = true, xpackInfoUndefined = false, xpackInfoAvailable = true, + getCurrentUser = undefined, user = undefined, - config = {}, }) { - mockConfig = config; - - mockServer.plugins.xpack_main = { - info: !xpackInfoUndefined && { - isAvailable: () => xpackInfoAvailable, - feature(featureName) { - if (featureName === 'security') { - return { - isEnabled: () => securityEnabled, - isAvailable: () => xpackInfoAvailable, - }; + getUserStub.mockReset(); + getUserStub.mockResolvedValue(user); + return { + security: securityEnabled + ? { + authc: { getCurrentUser }, } + : null, + __LEGACY: { + plugins: { + xpack_main: { + info: !xpackInfoUndefined && { + isAvailable: () => xpackInfoAvailable, + feature(featureName) { + if (featureName === 'security') { + return { + isEnabled: () => securityEnabled, + isAvailable: () => xpackInfoAvailable, + }; + } + }, + }, + }, }, }, }; - - getUserStub.mockReset(); - getUserStub.mockResolvedValue(user); - return mockServer; }; })(); @@ -75,10 +67,6 @@ describe('authorized_user_pre_routing', function() { raw: { req: mockRequestRaw }, }); - const getMockPlugins = pluginSet => { - return pluginSet || { security: null }; - }; - const getMockLogger = () => ({ warn: jest.fn(), error: msg => { @@ -87,11 +75,9 @@ describe('authorized_user_pre_routing', function() { }); it('should return with boom notFound when xpackInfo is undefined', async function() { - const mockServer = createMockServer({ xpackInfoUndefined: true }); - const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockServer, - getMockPlugins(), + createMockConfig(), + createMockPlugins({ xpackInfoUndefined: true }), getMockLogger() ); const response = await authorizedUserPreRouting(getMockRequest()); @@ -100,11 +86,9 @@ describe('authorized_user_pre_routing', function() { }); it(`should return with boom notFound when xpackInfo isn't available`, async function() { - const mockServer = createMockServer({ xpackInfoAvailable: false }); - const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockServer, - getMockPlugins(), + createMockConfig(), + createMockPlugins({ xpackInfoAvailable: false }), getMockLogger() ); const response = await authorizedUserPreRouting(getMockRequest()); @@ -113,11 +97,9 @@ describe('authorized_user_pre_routing', function() { }); it('should return with null user when security is disabled in Elasticsearch', async function() { - const mockServer = createMockServer({ securityEnabled: false }); - const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockServer, - getMockPlugins(), + createMockConfig(), + createMockPlugins({ securityEnabled: false }), getMockLogger() ); const response = await authorizedUserPreRouting(getMockRequest()); @@ -125,16 +107,14 @@ describe('authorized_user_pre_routing', function() { }); it('should return with boom unauthenticated when security is enabled but no authenticated user', async function() { - const mockServer = createMockServer({ + const mockPlugins = createMockPlugins({ user: null, config: { 'xpack.reporting.roles.allow': ['.reporting_user'] }, }); - const mockPlugins = getMockPlugins({ - security: { authc: { getCurrentUser: () => null } }, - }); + mockPlugins.security = { authc: { getCurrentUser: () => null } }; const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockServer, + createMockConfig(), mockPlugins, getMockLogger() ); @@ -144,16 +124,14 @@ describe('authorized_user_pre_routing', function() { }); it(`should return with boom forbidden when security is enabled but user doesn't have allowed role`, async function() { - const mockServer = createMockServer({ + const mockConfig = createMockConfig({ 'roles.allow': ['.reporting_user'] }); + const mockPlugins = createMockPlugins({ user: { roles: [] }, - config: { 'xpack.reporting.roles.allow': ['.reporting_user'] }, - }); - const mockPlugins = getMockPlugins({ - security: { authc: { getCurrentUser: () => ({ roles: ['something_else'] }) } }, + getCurrentUser: () => ({ roles: ['something_else'] }), }); const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockServer, + mockConfig, mockPlugins, getMockLogger() ); @@ -164,18 +142,14 @@ describe('authorized_user_pre_routing', function() { it('should return with user when security is enabled and user has explicitly allowed role', async function() { const user = { roles: ['.reporting_user', 'something_else'] }; - const mockServer = createMockServer({ + const mockConfig = createMockConfig({ 'roles.allow': ['.reporting_user'] }); + const mockPlugins = createMockPlugins({ user, - config: { 'xpack.reporting.roles.allow': ['.reporting_user'] }, - }); - const mockPlugins = getMockPlugins({ - security: { - authc: { getCurrentUser: () => ({ roles: ['.reporting_user', 'something_else'] }) }, - }, + getCurrentUser: () => ({ roles: ['.reporting_user', 'something_else'] }), }); const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockServer, + mockConfig, mockPlugins, getMockLogger() ); @@ -185,16 +159,13 @@ describe('authorized_user_pre_routing', function() { it('should return with user when security is enabled and user has superuser role', async function() { const user = { roles: ['superuser', 'something_else'] }; - const mockServer = createMockServer({ - user, - config: { 'xpack.reporting.roles.allow': [] }, - }); - const mockPlugins = getMockPlugins({ - security: { authc: { getCurrentUser: () => ({ roles: ['superuser', 'something_else'] }) } }, + const mockConfig = createMockConfig({ 'roles.allow': [] }); + const mockPlugins = createMockPlugins({ + getCurrentUser: () => ({ roles: ['superuser', 'something_else'] }), }); const authorizedUserPreRouting = authorizedUserPreRoutingFactory( - mockServer, + mockConfig, mockPlugins, getMockLogger() ); diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts index c5f8c78016f618..1ca28ca62a7f28 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts @@ -7,7 +7,8 @@ import Boom from 'boom'; import { Legacy } from 'kibana'; import { AuthenticatedUser } from '../../../../../../plugins/security/server'; -import { Logger, ServerFacade } from '../../../types'; +import { ReportingConfig } from '../../../server'; +import { Logger } from '../../../types'; import { getUserFactory } from '../../lib/get_user'; import { ReportingSetupDeps } from '../../types'; @@ -18,16 +19,14 @@ export type PreRoutingFunction = ( ) => Promise | AuthenticatedUser | null>; export const authorizedUserPreRoutingFactory = function authorizedUserPreRoutingFn( - server: ServerFacade, + config: ReportingConfig, plugins: ReportingSetupDeps, logger: Logger ) { - const getUser = getUserFactory(server, plugins.security); - const config = server.config(); + const getUser = getUserFactory(plugins.security, logger); + const { info: xpackInfo } = plugins.__LEGACY.plugins.xpack_main; return async function authorizedUserPreRouting(request: Legacy.Request) { - const xpackInfo = server.plugins.xpack_main.info; - if (!xpackInfo || !xpackInfo.isAvailable()) { logger.warn('Unable to authorize user before xpack info is available.', [ 'authorizedUserPreRouting', @@ -46,10 +45,7 @@ export const authorizedUserPreRoutingFactory = function authorizedUserPreRouting return Boom.unauthorized(`Sorry, you aren't authenticated`); } - const authorizedRoles = [ - superuserRole, - ...(config.get('xpack.reporting.roles.allow') as string[]), - ]; + const authorizedRoles = [superuserRole, ...(config.get('roles', 'allow') as string[])]; if (!user.roles.find(role => authorizedRoles.includes(role))) { return Boom.forbidden(`Sorry, you don't have access to Reporting`); } diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts index fb3944ea33552f..aef37754681ec9 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts @@ -8,13 +8,7 @@ import contentDisposition from 'content-disposition'; import * as _ from 'lodash'; import { CSV_JOB_TYPE } from '../../../common/constants'; -import { - ExportTypeDefinition, - ExportTypesRegistry, - JobDocOutput, - JobSource, - ServerFacade, -} from '../../../types'; +import { ExportTypeDefinition, ExportTypesRegistry, JobDocOutput, JobSource } from '../../../types'; interface ICustomHeaders { [x: string]: any; @@ -22,9 +16,15 @@ interface ICustomHeaders { type ExportTypeType = ExportTypeDefinition; +interface ErrorFromPayload { + message: string; + reason: string | null; +} + +// A camelCase version of JobDocOutput interface Payload { statusCode: number; - content: any; + content: string | Buffer | ErrorFromPayload; contentType: string; headers: Record; } @@ -48,20 +48,17 @@ const getReportingHeaders = (output: JobDocOutput, exportType: ExportTypeType) = return metaDataHeaders; }; -export function getDocumentPayloadFactory( - server: ServerFacade, - exportTypesRegistry: ExportTypesRegistry -) { - function encodeContent(content: string | null, exportType: ExportTypeType) { +export function getDocumentPayloadFactory(exportTypesRegistry: ExportTypesRegistry) { + function encodeContent(content: string | null, exportType: ExportTypeType): Buffer | string { switch (exportType.jobContentEncoding) { case 'base64': - return content ? Buffer.from(content, 'base64') : content; // Buffer.from rejects null + return content ? Buffer.from(content, 'base64') : ''; // convert null to empty string default: - return content; + return content ? content : ''; // convert null to empty string } } - function getCompleted(output: JobDocOutput, jobType: string, title: string) { + function getCompleted(output: JobDocOutput, jobType: string, title: string): Payload { const exportType = exportTypesRegistry.get((item: ExportTypeType) => item.jobType === jobType); const filename = getTitle(exportType, title); const headers = getReportingHeaders(output, exportType); @@ -77,7 +74,7 @@ export function getDocumentPayloadFactory( }; } - function getFailure(output: JobDocOutput) { + function getFailure(output: JobDocOutput): Payload { return { statusCode: 500, content: { diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts index 30627d5b232301..e7e7c866db96a0 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts @@ -5,11 +5,12 @@ */ import Boom from 'boom'; -import { ElasticsearchServiceSetup } from 'kibana/server'; import { ResponseToolkit } from 'hapi'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { WHITELISTED_JOB_CONTENT_TYPES } from '../../../common/constants'; -import { ExportTypesRegistry, ServerFacade } from '../../../types'; +import { ExportTypesRegistry } from '../../../types'; import { jobsQueryFactory } from '../../lib/jobs_query'; +import { ReportingConfig } from '../../types'; import { getDocumentPayloadFactory } from './get_document_payload'; interface JobResponseHandlerParams { @@ -21,12 +22,12 @@ interface JobResponseHandlerOpts { } export function downloadJobResponseHandlerFactory( - server: ServerFacade, + config: ReportingConfig, elasticsearch: ElasticsearchServiceSetup, exportTypesRegistry: ExportTypesRegistry ) { - const jobsQuery = jobsQueryFactory(server, elasticsearch); - const getDocumentPayload = getDocumentPayloadFactory(server, exportTypesRegistry); + const jobsQuery = jobsQueryFactory(config, elasticsearch); + const getDocumentPayload = getDocumentPayloadFactory(exportTypesRegistry); return function jobResponseHandler( validJobTypes: string[], @@ -70,10 +71,10 @@ export function downloadJobResponseHandlerFactory( } export function deleteJobResponseHandlerFactory( - server: ServerFacade, + config: ReportingConfig, elasticsearch: ElasticsearchServiceSetup ) { - const jobsQuery = jobsQueryFactory(server, elasticsearch); + const jobsQuery = jobsQueryFactory(config, elasticsearch); return async function deleteJobResponseHander( validJobTypes: string[], diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts index 9e618ff1fe40a2..8a79566aafae29 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts @@ -6,17 +6,17 @@ import Boom from 'boom'; import { Legacy } from 'kibana'; -import { Logger, ServerFacade } from '../../../types'; -import { ReportingSetupDeps } from '../../types'; +import { Logger } from '../../../types'; +import { ReportingConfig, ReportingSetupDeps } from '../../types'; export type GetReportingFeatureIdFn = (request: Legacy.Request) => string; export const reportingFeaturePreRoutingFactory = function reportingFeaturePreRoutingFn( - server: ServerFacade, + config: ReportingConfig, plugins: ReportingSetupDeps, logger: Logger ) { - const xpackMainPlugin = server.plugins.xpack_main; + const xpackMainPlugin = plugins.__LEGACY.plugins.xpack_main; const pluginId = 'reporting'; // License checking and enable/disable logic diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts index 3d275d34e2f7d6..06f7efaa9dcbbf 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts @@ -6,8 +6,8 @@ import Joi from 'joi'; import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants'; -import { Logger, ServerFacade } from '../../../types'; -import { ReportingSetupDeps } from '../../types'; +import { Logger } from '../../../types'; +import { ReportingConfig, ReportingSetupDeps } from '../../types'; import { authorizedUserPreRoutingFactory } from './authorized_user_pre_routing'; import { GetReportingFeatureIdFn, @@ -29,12 +29,12 @@ export type GetRouteConfigFactoryFn = ( ) => RouteConfigFactory; export function getRouteConfigFactoryReportingPre( - server: ServerFacade, + config: ReportingConfig, plugins: ReportingSetupDeps, logger: Logger ): GetRouteConfigFactoryFn { - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(server, plugins, logger); - const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(server, plugins, logger); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(config, plugins, logger); + const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(config, plugins, logger); return (getFeatureId?: GetReportingFeatureIdFn): RouteConfigFactory => { const preRouting: any[] = [{ method: authorizedUserPreRouting, assign: 'user' }]; @@ -50,11 +50,11 @@ export function getRouteConfigFactoryReportingPre( } export function getRouteOptionsCsv( - server: ServerFacade, + config: ReportingConfig, plugins: ReportingSetupDeps, logger: Logger ) { - const getRouteConfig = getRouteConfigFactoryReportingPre(server, plugins, logger); + const getRouteConfig = getRouteConfigFactoryReportingPre(config, plugins, logger); return { ...getRouteConfig(() => CSV_FROM_SAVEDOBJECT_JOB_TYPE), validate: { @@ -75,12 +75,12 @@ export function getRouteOptionsCsv( } export function getRouteConfigFactoryManagementPre( - server: ServerFacade, + config: ReportingConfig, plugins: ReportingSetupDeps, logger: Logger ): GetRouteConfigFactoryFn { - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(server, plugins, logger); - const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(server, plugins, logger); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(config, plugins, logger); + const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(config, plugins, logger); const managementPreRouting = reportingFeaturePreRouting(() => 'management'); return (): RouteConfigFactory => { @@ -99,11 +99,11 @@ export function getRouteConfigFactoryManagementPre( // Additionally, the range-request doesn't alleviate any performance issues on the server as the entire // download is loaded into memory. export function getRouteConfigFactoryDownloadPre( - server: ServerFacade, + config: ReportingConfig, plugins: ReportingSetupDeps, logger: Logger ): GetRouteConfigFactoryFn { - const getManagementRouteConfig = getRouteConfigFactoryManagementPre(server, plugins, logger); + const getManagementRouteConfig = getRouteConfigFactoryManagementPre(config, plugins, logger); return (): RouteConfigFactory => ({ ...getManagementRouteConfig(), tags: [API_TAG, 'download'], @@ -114,11 +114,11 @@ export function getRouteConfigFactoryDownloadPre( } export function getRouteConfigFactoryDeletePre( - server: ServerFacade, + config: ReportingConfig, plugins: ReportingSetupDeps, logger: Logger ): GetRouteConfigFactoryFn { - const getManagementRouteConfig = getRouteConfigFactoryManagementPre(server, plugins, logger); + const getManagementRouteConfig = getRouteConfigFactoryManagementPre(config, plugins, logger); return (): RouteConfigFactory => ({ ...getManagementRouteConfig(), tags: [API_TAG, 'delete'], diff --git a/x-pack/legacy/plugins/reporting/server/types.d.ts b/x-pack/legacy/plugins/reporting/server/types.d.ts index 59b7bc2020ad93..bec00688432cc4 100644 --- a/x-pack/legacy/plugins/reporting/server/types.d.ts +++ b/x-pack/legacy/plugins/reporting/server/types.d.ts @@ -11,16 +11,16 @@ import { PluginStart as DataPluginStart } from '../../../../../src/plugins/data/ import { SecurityPluginSetup } from '../../../../plugins/security/server'; import { XPackMainPlugin } from '../../xpack_main/server/xpack_main'; import { ReportingPluginSpecOptions } from '../types'; +import { ReportingConfig, ReportingConfigType } from './core'; export interface ReportingSetupDeps { elasticsearch: ElasticsearchServiceSetup; security: SecurityPluginSetup; - usageCollection: UsageCollectionSetup; + usageCollection?: UsageCollectionSetup; __LEGACY: LegacySetup; } export interface ReportingStartDeps { - elasticsearch: ElasticsearchServiceSetup; data: DataPluginStart; __LEGACY: LegacySetup; } @@ -31,9 +31,7 @@ export type ReportingStart = object; export interface LegacySetup { config: Legacy.Server['config']; - info: Legacy.Server['info']; plugins: { - elasticsearch: Legacy.Server['plugins']['elasticsearch']; xpack_main: XPackMainPlugin & { status?: any; }; @@ -42,4 +40,7 @@ export interface LegacySetup { route: Legacy.Server['route']; } -export { ReportingCore } from './core'; +export { ReportingConfig, ReportingConfigType, ReportingCore } from './core'; + +export type CaptureConfig = ReportingConfigType['capture']; +export type ScrollConfig = ReportingConfigType['csv']['scroll']; diff --git a/x-pack/legacy/plugins/reporting/server/usage/get_reporting_usage.ts b/x-pack/legacy/plugins/reporting/server/usage/get_reporting_usage.ts index bd2d0cb835a790..e9523d9e702029 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/get_reporting_usage.ts +++ b/x-pack/legacy/plugins/reporting/server/usage/get_reporting_usage.ts @@ -5,7 +5,11 @@ */ import { get } from 'lodash'; -import { ServerFacade, ExportTypesRegistry, ESCallCluster } from '../../types'; +import { XPackMainPlugin } from '../../../xpack_main/server/xpack_main'; +import { ESCallCluster, ExportTypesRegistry } from '../../types'; +import { ReportingConfig } from '../types'; +import { decorateRangeStats } from './decorate_range_stats'; +import { getExportTypesHandler } from './get_export_type_handler'; import { AggregationBuckets, AggregationResults, @@ -15,8 +19,8 @@ import { RangeAggregationResults, RangeStats, } from './types'; -import { decorateRangeStats } from './decorate_range_stats'; -import { getExportTypesHandler } from './get_export_type_handler'; + +type XPackInfo = XPackMainPlugin['info']; const JOB_TYPES_KEY = 'jobTypes'; const JOB_TYPES_FIELD = 'jobtype'; @@ -79,10 +83,7 @@ type RangeStatSets = Partial< last7Days: RangeStats; } >; -async function handleResponse( - server: ServerFacade, - response: AggregationResults -): Promise { +async function handleResponse(response: AggregationResults): Promise { const buckets = get(response, 'aggregations.ranges.buckets'); if (!buckets) { return {}; @@ -101,12 +102,12 @@ async function handleResponse( } export async function getReportingUsage( - server: ServerFacade, + config: ReportingConfig, + xpackMainInfo: XPackInfo, callCluster: ESCallCluster, exportTypesRegistry: ExportTypesRegistry ) { - const config = server.config(); - const reportingIndex = config.get('xpack.reporting.index'); + const reportingIndex = config.get('index'); const params = { index: `${reportingIndex}-*`, @@ -140,15 +141,16 @@ export async function getReportingUsage( }; return callCluster('search', params) - .then((response: AggregationResults) => handleResponse(server, response)) + .then((response: AggregationResults) => handleResponse(response)) .then((usage: RangeStatSets) => { // Allow this to explicitly throw an exception if/when this config is deprecated, // because we shouldn't collect browserType in that case! - const browserType = config.get('xpack.reporting.capture.browser.type'); + const browserType = config.get('capture', 'browser', 'type'); - const xpackInfo = server.plugins.xpack_main.info; const exportTypesHandler = getExportTypesHandler(exportTypesRegistry); - const availability = exportTypesHandler.getAvailability(xpackInfo) as FeatureAvailabilityMap; + const availability = exportTypesHandler.getAvailability( + xpackMainInfo + ) as FeatureAvailabilityMap; const { lastDay, last7Days, ...all } = usage; diff --git a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js index a6d753f9b107a2..929109e66914d9 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js +++ b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js @@ -24,62 +24,60 @@ function getMockUsageCollection() { makeUsageCollector: options => { return new MockUsageCollector(this, options); }, + registerCollector: sinon.stub(), }; } -function getServerMock(customization) { - const getLicenseCheckResults = sinon.stub().returns({}); - const defaultServerMock = { - plugins: { - security: { - isAuthenticated: sinon.stub().returns(true), - }, - xpack_main: { - info: { - isAvailable: sinon.stub().returns(true), - feature: () => ({ - getLicenseCheckResults, - }), - license: { - isOneOf: sinon.stub().returns(false), - getType: sinon.stub().returns('platinum'), - }, - toJSON: () => ({ b: 1 }), - }, +function getPluginsMock( + { license, usageCollection = getMockUsageCollection() } = { license: 'platinum' } +) { + const mockXpackMain = { + info: { + isAvailable: sinon.stub().returns(true), + feature: () => ({ + getLicenseCheckResults: sinon.stub(), + }), + license: { + isOneOf: sinon.stub().returns(false), + getType: sinon.stub().returns(license), }, + toJSON: () => ({ b: 1 }), }, - log: () => {}, - config: () => ({ - get: key => { - if (key === 'xpack.reporting.enabled') { - return true; - } else if (key === 'xpack.reporting.index') { - return '.reporting-index'; - } + }; + return { + usageCollection, + __LEGACY: { + plugins: { + xpack_main: mockXpackMain, }, - }), + }, }; - return Object.assign(defaultServerMock, customization); } +const getMockReportingConfig = () => ({ + get: () => {}, + kbnConfig: { get: () => '' }, +}); const getResponseMock = (customization = {}) => customization; describe('license checks', () => { + let mockConfig; + beforeAll(async () => { + mockConfig = getMockReportingConfig(); + }); + describe('with a basic license', () => { let usageStats; beforeAll(async () => { - const serverWithBasicLicenseMock = getServerMock(); - serverWithBasicLicenseMock.plugins.xpack_main.info.license.getType = sinon - .stub() - .returns('basic'); + const plugins = getPluginsMock({ license: 'basic' }); const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); - const usageCollection = getMockUsageCollection(); - const { fetch: getReportingUsage } = getReportingUsageCollector( - serverWithBasicLicenseMock, - usageCollection, + const { fetch } = getReportingUsageCollector( + mockConfig, + plugins.usageCollection, + plugins.__LEGACY.plugins.xpack_main.info, exportTypesRegistry ); - usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); + usageStats = await fetch(callClusterMock, exportTypesRegistry); }); test('sets enables to true', async () => { @@ -98,18 +96,15 @@ describe('license checks', () => { describe('with no license', () => { let usageStats; beforeAll(async () => { - const serverWithNoLicenseMock = getServerMock(); - serverWithNoLicenseMock.plugins.xpack_main.info.license.getType = sinon - .stub() - .returns('none'); + const plugins = getPluginsMock({ license: 'none' }); const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); - const usageCollection = getMockUsageCollection(); - const { fetch: getReportingUsage } = getReportingUsageCollector( - serverWithNoLicenseMock, - usageCollection, + const { fetch } = getReportingUsageCollector( + mockConfig, + plugins.usageCollection, + plugins.__LEGACY.plugins.xpack_main.info, exportTypesRegistry ); - usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); + usageStats = await fetch(callClusterMock, exportTypesRegistry); }); test('sets enables to true', async () => { @@ -128,18 +123,15 @@ describe('license checks', () => { describe('with platinum license', () => { let usageStats; beforeAll(async () => { - const serverWithPlatinumLicenseMock = getServerMock(); - serverWithPlatinumLicenseMock.plugins.xpack_main.info.license.getType = sinon - .stub() - .returns('platinum'); + const plugins = getPluginsMock({ license: 'platinum' }); const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); - const usageCollection = getMockUsageCollection(); - const { fetch: getReportingUsage } = getReportingUsageCollector( - serverWithPlatinumLicenseMock, - usageCollection, + const { fetch } = getReportingUsageCollector( + mockConfig, + plugins.usageCollection, + plugins.__LEGACY.plugins.xpack_main.info, exportTypesRegistry ); - usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); + usageStats = await fetch(callClusterMock, exportTypesRegistry); }); test('sets enables to true', async () => { @@ -158,18 +150,15 @@ describe('license checks', () => { describe('with no usage data', () => { let usageStats; beforeAll(async () => { - const serverWithBasicLicenseMock = getServerMock(); - serverWithBasicLicenseMock.plugins.xpack_main.info.license.getType = sinon - .stub() - .returns('basic'); + const plugins = getPluginsMock({ license: 'basic' }); const callClusterMock = jest.fn(() => Promise.resolve({})); - const usageCollection = getMockUsageCollection(); - const { fetch: getReportingUsage } = getReportingUsageCollector( - serverWithBasicLicenseMock, - usageCollection, + const { fetch } = getReportingUsageCollector( + mockConfig, + plugins.usageCollection, + plugins.__LEGACY.plugins.xpack_main.info, exportTypesRegistry ); - usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); + usageStats = await fetch(callClusterMock, exportTypesRegistry); }); test('sets enables to true', async () => { @@ -183,21 +172,15 @@ describe('license checks', () => { }); describe('data modeling', () => { - let getReportingUsage; - beforeAll(async () => { - const usageCollection = getMockUsageCollection(); - const serverWithPlatinumLicenseMock = getServerMock(); - serverWithPlatinumLicenseMock.plugins.xpack_main.info.license.getType = sinon - .stub() - .returns('platinum'); - ({ fetch: getReportingUsage } = getReportingUsageCollector( - serverWithPlatinumLicenseMock, - usageCollection, - exportTypesRegistry - )); - }); - test('with normal looking usage data', async () => { + const mockConfig = getMockReportingConfig(); + const plugins = getPluginsMock(); + const { fetch } = getReportingUsageCollector( + mockConfig, + plugins.usageCollection, + plugins.__LEGACY.plugins.xpack_main.info, + exportTypesRegistry + ); const callClusterMock = jest.fn(() => Promise.resolve( getResponseMock({ @@ -320,7 +303,7 @@ describe('data modeling', () => { ) ); - const usageStats = await getReportingUsage(callClusterMock); + const usageStats = await fetch(callClusterMock); expect(usageStats).toMatchInlineSnapshot(` Object { "PNG": Object { @@ -415,20 +398,16 @@ describe('data modeling', () => { }); describe('Ready for collection observable', () => { - let mockReporting; - - beforeEach(async () => { - mockReporting = await createMockReportingCore(); - }); - test('converts observable to promise', async () => { - const serverWithBasicLicenseMock = getServerMock(); + const mockConfig = getMockReportingConfig(); + const mockReporting = await createMockReportingCore(mockConfig); + + const usageCollection = getMockUsageCollection(); const makeCollectorSpy = sinon.spy(); - const usageCollection = { - makeUsageCollector: makeCollectorSpy, - registerCollector: sinon.stub(), - }; - registerReportingUsageCollector(mockReporting, serverWithBasicLicenseMock, usageCollection); + usageCollection.makeUsageCollector = makeCollectorSpy; + + const plugins = getPluginsMock({ usageCollection }); + registerReportingUsageCollector(mockReporting, plugins); const [args] = makeCollectorSpy.firstCall.args; expect(args).toMatchInlineSnapshot(` diff --git a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts index 14202530fb6c7b..8f9d65c200dade 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts +++ b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts @@ -5,29 +5,32 @@ */ import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; +import { XPackMainPlugin } from '../../../xpack_main/server/xpack_main'; import { KIBANA_REPORTING_TYPE } from '../../common/constants'; -import { ReportingCore } from '../../server'; -import { ESCallCluster, ExportTypesRegistry, ServerFacade } from '../../types'; +import { ReportingConfig, ReportingCore, ReportingSetupDeps } from '../../server/types'; +import { ESCallCluster, ExportTypesRegistry } from '../../types'; import { getReportingUsage } from './get_reporting_usage'; import { RangeStats } from './types'; +type XPackInfo = XPackMainPlugin['info']; + // places the reporting data as kibana stats const METATYPE = 'kibana_stats'; /* - * @param {Object} server * @return {Object} kibana usage stats type collection object */ export function getReportingUsageCollector( - server: ServerFacade, + config: ReportingConfig, usageCollection: UsageCollectionSetup, + xpackMainInfo: XPackInfo, exportTypesRegistry: ExportTypesRegistry, isReady: () => Promise ) { return usageCollection.makeUsageCollector({ type: KIBANA_REPORTING_TYPE, fetch: (callCluster: ESCallCluster) => - getReportingUsage(server, callCluster, exportTypesRegistry), + getReportingUsage(config, xpackMainInfo, callCluster, exportTypesRegistry), isReady, /* @@ -52,17 +55,23 @@ export function getReportingUsageCollector( export function registerReportingUsageCollector( reporting: ReportingCore, - server: ServerFacade, - usageCollection: UsageCollectionSetup + plugins: ReportingSetupDeps ) { + if (!plugins.usageCollection) { + return; + } + const xpackMainInfo = plugins.__LEGACY.plugins.xpack_main.info; + const exportTypesRegistry = reporting.getExportTypesRegistry(); const collectionIsReady = reporting.pluginHasStarted.bind(reporting); + const config = reporting.getConfig(); const collector = getReportingUsageCollector( - server, - usageCollection, + config, + plugins.usageCollection, + xpackMainInfo, exportTypesRegistry, collectionIsReady ); - usageCollection.registerCollector(collector); + plugins.usageCollection.registerCollector(collector); } diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_browserdriverfactory.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_browserdriverfactory.ts index 883276d43e27e0..930aa7601b8cbc 100644 --- a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_browserdriverfactory.ts +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_browserdriverfactory.ts @@ -10,7 +10,8 @@ import * as contexts from '../export_types/common/lib/screenshots/constants'; import { ElementsPositionAndAttribute } from '../export_types/common/lib/screenshots/types'; import { HeadlessChromiumDriver, HeadlessChromiumDriverFactory } from '../server/browsers'; import { createDriverFactory } from '../server/browsers/chromium'; -import { BrowserConfig, CaptureConfig, Logger } from '../types'; +import { CaptureConfig } from '../server/types'; +import { Logger } from '../types'; interface CreateMockBrowserDriverFactoryOpts { evaluate: jest.Mock, any[]>; @@ -93,24 +94,34 @@ export const createMockBrowserDriverFactory = async ( logger: Logger, opts: Partial ): Promise => { - const browserConfig = { - inspect: true, - userDataDir: '/usr/data/dir', - viewport: { width: 12, height: 12 }, - disableSandbox: false, - proxy: { enabled: false }, - } as BrowserConfig; + const captureConfig = { + timeouts: { openUrl: 30000, waitForElements: 30000, renderComplete: 30000 }, + browser: { + type: 'chromium', + chromium: { + inspect: false, + disableSandbox: false, + userDataDir: '/usr/data/dir', + viewport: { width: 12, height: 12 }, + proxy: { enabled: false, server: undefined, bypass: undefined }, + }, + autoDownload: false, + inspect: true, + userDataDir: '/usr/data/dir', + viewport: { width: 12, height: 12 }, + disableSandbox: false, + proxy: { enabled: false, server: undefined, bypass: undefined }, + maxScreenshotDimension: undefined, + }, + networkPolicy: { enabled: true, rules: [] }, + viewport: { width: 800, height: 600 }, + loadDelay: 2000, + zoom: 1, + maxAttempts: 1, + } as CaptureConfig; const binaryPath = '/usr/local/share/common/secure/'; - const captureConfig = { networkPolicy: {}, timeouts: {} } as CaptureConfig; - - const mockBrowserDriverFactory = await createDriverFactory( - binaryPath, - logger, - browserConfig, - captureConfig - ); - + const mockBrowserDriverFactory = await createDriverFactory(binaryPath, logger, captureConfig); const mockPage = {} as Page; const mockBrowserDriver = new HeadlessChromiumDriver(mockPage, { inspect: true, diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_layoutinstance.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_layoutinstance.ts index 0250e6c0a9afdb..be60b56dcc0c17 100644 --- a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_layoutinstance.ts +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_layoutinstance.ts @@ -4,13 +4,13 @@ * you may not use this file except in compliance with the Elastic License. */ -import { createLayout } from '../export_types/common/layouts'; import { LayoutTypes } from '../export_types/common/constants'; +import { createLayout } from '../export_types/common/layouts'; import { LayoutInstance } from '../export_types/common/layouts/layout'; -import { ServerFacade } from '../types'; +import { CaptureConfig } from '../server/types'; -export const createMockLayoutInstance = (__LEGACY: ServerFacade) => { - const mockLayout = createLayout(__LEGACY, { +export const createMockLayoutInstance = (captureConfig: CaptureConfig) => { + const mockLayout = createLayout(captureConfig, { id: LayoutTypes.PRESERVE_LAYOUT, dimensions: { height: 12, width: 12 }, }) as LayoutInstance; diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts index 2cd129d47b3f96..34ff91d1972a08 100644 --- a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts @@ -16,24 +16,26 @@ jest.mock('../log_configuration'); import { EventEmitter } from 'events'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths import { coreMock } from 'src/core/server/mocks'; -import { ReportingPlugin, ReportingCore } from '../server'; +import { ReportingPlugin, ReportingCore, ReportingConfig } from '../server'; import { ReportingSetupDeps, ReportingStartDeps } from '../server/types'; -export const createMockSetupDeps = (setupMock?: any): ReportingSetupDeps => ({ - elasticsearch: setupMock.elasticsearch, - security: setupMock.security, - usageCollection: {} as any, - __LEGACY: { plugins: { xpack_main: { status: new EventEmitter() } } } as any, -}); +const createMockSetupDeps = (setupMock?: any): ReportingSetupDeps => { + return { + elasticsearch: setupMock.elasticsearch, + security: setupMock.security, + usageCollection: {} as any, + __LEGACY: { plugins: { xpack_main: { status: new EventEmitter() } } } as any, + }; +}; export const createMockStartDeps = (startMock?: any): ReportingStartDeps => ({ data: startMock.data, - elasticsearch: startMock.elasticsearch, __LEGACY: {} as any, }); -const createMockReportingPlugin = async (config = {}): Promise => { - const plugin = new ReportingPlugin(coreMock.createPluginInitializerContext(config)); +const createMockReportingPlugin = async (config: ReportingConfig): Promise => { + config = config || {}; + const plugin = new ReportingPlugin(coreMock.createPluginInitializerContext(config), config); const setupMock = coreMock.createSetup(); const coreStartMock = coreMock.createStart(); const startMock = { @@ -47,7 +49,8 @@ const createMockReportingPlugin = async (config = {}): Promise return plugin; }; -export const createMockReportingCore = async (config = {}): Promise => { +export const createMockReportingCore = async (config: ReportingConfig): Promise => { + config = config || {}; const plugin = await createMockReportingPlugin(config); return plugin.getReportingCore(); }; diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts index bb7851ba036a90..531e1dcaf84e0f 100644 --- a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts @@ -3,36 +3,10 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { memoize } from 'lodash'; -import { ServerFacade } from '../types'; - -export const createMockServer = ({ settings = {} }: any): ServerFacade => { - const mockServer = { - config: memoize(() => ({ get: jest.fn() })), - info: { - protocol: 'http', - }, - plugins: { - elasticsearch: { - getCluster: memoize(() => { - return { - callWithRequest: jest.fn(), - }; - }), - }, - }, - }; - const defaultSettings: any = { - 'xpack.reporting.encryptionKey': 'testencryptionkey', - 'server.basePath': '/sbp', - 'server.host': 'localhost', - 'server.port': 5601, - 'xpack.reporting.kibanaServer': {}, - }; - mockServer.config().get.mockImplementation((key: any) => { - return key in settings ? settings[key] : defaultSettings[key]; - }); +import { ServerFacade } from '../types'; - return (mockServer as unknown) as ServerFacade; +export const createMockServer = (): ServerFacade => { + const mockServer = {}; + return mockServer as any; }; diff --git a/x-pack/legacy/plugins/reporting/types.d.ts b/x-pack/legacy/plugins/reporting/types.d.ts index 238079ba92a291..09d53278941c91 100644 --- a/x-pack/legacy/plugins/reporting/types.d.ts +++ b/x-pack/legacy/plugins/reporting/types.d.ts @@ -7,14 +7,11 @@ import { EventEmitter } from 'events'; import { ResponseObject } from 'hapi'; import { Legacy } from 'kibana'; -import { ElasticsearchServiceSetup } from 'kibana/server'; import { CallCluster } from '../../../../src/legacy/core_plugins/elasticsearch'; import { CancellationToken } from './common/cancellation_token'; -import { HeadlessChromiumDriverFactory } from './server/browsers/chromium/driver_factory'; -import { BrowserType } from './server/browsers/types'; -import { LevelLogger } from './server/lib/level_logger'; import { ReportingCore } from './server/core'; -import { LegacySetup, ReportingStartDeps, ReportingSetup, ReportingStart } from './server/types'; +import { LevelLogger } from './server/lib/level_logger'; +import { LegacySetup } from './server/types'; export type Job = EventEmitter & { id: string; @@ -25,8 +22,8 @@ export type Job = EventEmitter & { export interface NetworkPolicyRule { allow: boolean; - protocol: string; - host: string; + protocol?: string; + host?: string; } export interface NetworkPolicy { @@ -93,51 +90,6 @@ export type ReportingResponseToolkit = Legacy.ResponseToolkit; export type ESCallCluster = CallCluster; -/* - * Reporting Config - */ - -export interface CaptureConfig { - browser: { - type: BrowserType; - autoDownload: boolean; - chromium: BrowserConfig; - }; - maxAttempts: number; - networkPolicy: NetworkPolicy; - loadDelay: number; - timeouts: { - openUrl: number; - waitForElements: number; - renderComplet: number; - }; -} - -export interface BrowserConfig { - inspect: boolean; - userDataDir: string; - viewport: { width: number; height: number }; - disableSandbox: boolean; - proxy: { - enabled: boolean; - server: string; - bypass?: string[]; - }; -} - -export interface QueueConfig { - indexInterval: string; - pollEnabled: boolean; - pollInterval: number; - pollIntervalErrorMultiplier: number; - timeout: number; -} - -export interface ScrollConfig { - duration: string; - size: number; -} - export interface ElementPosition { boundingClientRect: { // modern browsers support x/y, but older ones don't @@ -274,16 +226,12 @@ export interface ESQueueInstance { export type CreateJobFactory = ( reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, logger: LevelLogger ) => CreateJobFnType; export type ExecuteJobFactory = ( reporting: ReportingCore, - server: ServerFacade, - elasticsearch: ElasticsearchServiceSetup, logger: LevelLogger -) => Promise; +) => Promise; // FIXME: does not "need" to be async export interface ExportTypeDefinition< JobParamsType, diff --git a/x-pack/legacy/plugins/siem/cypress/integration/detections.spec.ts b/x-pack/legacy/plugins/siem/cypress/integration/detections.spec.ts index 646132c3f88ebb..f38cb2285b4803 100644 --- a/x-pack/legacy/plugins/siem/cypress/integration/detections.spec.ts +++ b/x-pack/legacy/plugins/siem/cypress/integration/detections.spec.ts @@ -42,16 +42,15 @@ describe('Detections', () => { cy.get(NUMBER_OF_SIGNALS) .invoke('text') .then(numberOfSignals => { - cy.get(SHOWING_SIGNALS) - .invoke('text') - .should('eql', `Showing ${numberOfSignals} signals`); + cy.get(SHOWING_SIGNALS).should('have.text', `Showing ${numberOfSignals} signals`); const numberOfSignalsToBeClosed = 3; selectNumberOfSignals(numberOfSignalsToBeClosed); - cy.get(SELECTED_SIGNALS) - .invoke('text') - .should('eql', `Selected ${numberOfSignalsToBeClosed} signals`); + cy.get(SELECTED_SIGNALS).should( + 'have.text', + `Selected ${numberOfSignalsToBeClosed} signals` + ); closeSignals(); waitForSignals(); @@ -59,30 +58,33 @@ describe('Detections', () => { waitForSignals(); const expectedNumberOfSignalsAfterClosing = +numberOfSignals - numberOfSignalsToBeClosed; - cy.get(NUMBER_OF_SIGNALS) - .invoke('text') - .should('eq', expectedNumberOfSignalsAfterClosing.toString()); - cy.get(SHOWING_SIGNALS) - .invoke('text') - .should('eql', `Showing ${expectedNumberOfSignalsAfterClosing.toString()} signals`); + cy.get(NUMBER_OF_SIGNALS).should( + 'have.text', + expectedNumberOfSignalsAfterClosing.toString() + ); + + cy.get(SHOWING_SIGNALS).should( + 'have.text', + `Showing ${expectedNumberOfSignalsAfterClosing.toString()} signals` + ); goToClosedSignals(); waitForSignals(); - cy.get(NUMBER_OF_SIGNALS) - .invoke('text') - .should('eql', numberOfSignalsToBeClosed.toString()); - cy.get(SHOWING_SIGNALS) - .invoke('text') - .should('eql', `Showing ${numberOfSignalsToBeClosed.toString()} signals`); + cy.get(NUMBER_OF_SIGNALS).should('have.text', numberOfSignalsToBeClosed.toString()); + cy.get(SHOWING_SIGNALS).should( + 'have.text', + `Showing ${numberOfSignalsToBeClosed.toString()} signals` + ); cy.get(SIGNALS).should('have.length', numberOfSignalsToBeClosed); const numberOfSignalsToBeOpened = 1; selectNumberOfSignals(numberOfSignalsToBeOpened); - cy.get(SELECTED_SIGNALS) - .invoke('text') - .should('eql', `Selected ${numberOfSignalsToBeOpened} signal`); + cy.get(SELECTED_SIGNALS).should( + 'have.text', + `Selected ${numberOfSignalsToBeOpened} signal` + ); openSignals(); waitForSignals(); @@ -93,15 +95,14 @@ describe('Detections', () => { waitForSignals(); const expectedNumberOfClosedSignalsAfterOpened = 2; - cy.get(NUMBER_OF_SIGNALS) - .invoke('text') - .should('eql', expectedNumberOfClosedSignalsAfterOpened.toString()); - cy.get(SHOWING_SIGNALS) - .invoke('text') - .should( - 'eql', - `Showing ${expectedNumberOfClosedSignalsAfterOpened.toString()} signals` - ); + cy.get(NUMBER_OF_SIGNALS).should( + 'have.text', + expectedNumberOfClosedSignalsAfterOpened.toString() + ); + cy.get(SHOWING_SIGNALS).should( + 'have.text', + `Showing ${expectedNumberOfClosedSignalsAfterOpened.toString()} signals` + ); cy.get(SIGNALS).should('have.length', expectedNumberOfClosedSignalsAfterOpened); goToOpenedSignals(); @@ -109,13 +110,15 @@ describe('Detections', () => { const expectedNumberOfOpenedSignals = +numberOfSignals - expectedNumberOfClosedSignalsAfterOpened; - cy.get(SHOWING_SIGNALS) - .invoke('text') - .should('eql', `Showing ${expectedNumberOfOpenedSignals.toString()} signals`); - - cy.get('[data-test-subj="server-side-event-count"]') - .invoke('text') - .should('eql', expectedNumberOfOpenedSignals.toString()); + cy.get(SHOWING_SIGNALS).should( + 'have.text', + `Showing ${expectedNumberOfOpenedSignals.toString()} signals` + ); + + cy.get('[data-test-subj="server-side-event-count"]').should( + 'have.text', + expectedNumberOfOpenedSignals.toString() + ); }); }); diff --git a/x-pack/legacy/plugins/siem/cypress/screens/detections.ts b/x-pack/legacy/plugins/siem/cypress/screens/detections.ts index f388ac1215d01f..cb776be8d7b6bb 100644 --- a/x-pack/legacy/plugins/siem/cypress/screens/detections.ts +++ b/x-pack/legacy/plugins/siem/cypress/screens/detections.ts @@ -10,7 +10,7 @@ export const LOADING_SIGNALS_PANEL = '[data-test-subj="loading-signals-panel"]'; export const MANAGE_SIGNAL_DETECTION_RULES_BTN = '[data-test-subj="manage-signal-detection-rules"]'; -export const NUMBER_OF_SIGNALS = '[data-test-subj="server-side-event-count"]'; +export const NUMBER_OF_SIGNALS = '[data-test-subj="server-side-event-count"] .euiBadge__text'; export const OPEN_CLOSE_SIGNAL_BTN = '[data-test-subj="update-signal-status-button"]'; diff --git a/x-pack/legacy/plugins/siem/public/components/ml_popover/ml_popover.tsx b/x-pack/legacy/plugins/siem/public/components/ml_popover/ml_popover.tsx index 05dfd561b1f5ec..b00eef79ee480c 100644 --- a/x-pack/legacy/plugins/siem/public/components/ml_popover/ml_popover.tsx +++ b/x-pack/legacy/plugins/siem/public/components/ml_popover/ml_popover.tsx @@ -7,13 +7,13 @@ import { EuiButtonEmpty, EuiCallOut, EuiPopover, EuiPopoverTitle, EuiSpacer } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; import moment from 'moment'; -import React, { useReducer, useState } from 'react'; +import React, { Dispatch, useCallback, useReducer, useState } from 'react'; import styled from 'styled-components'; import { useKibana } from '../../lib/kibana'; import { METRIC_TYPE, TELEMETRY_EVENT, track } from '../../lib/telemetry'; import { hasMlAdminPermissions } from '../ml/permissions/has_ml_admin_permissions'; -import { errorToToaster, useStateToaster } from '../toasters'; +import { errorToToaster, useStateToaster, ActionToaster } from '../toasters'; import { setupMlJob, startDatafeeds, stopDatafeeds } from './api'; import { filterJobs } from './helpers'; import { useSiemJobs } from './hooks/use_siem_jobs'; @@ -22,7 +22,7 @@ import { JobsTable } from './jobs_table/jobs_table'; import { ShowingCount } from './jobs_table/showing_count'; import { PopoverDescription } from './popover_description'; import * as i18n from './translations'; -import { JobsFilters, JobSummary, SiemJob } from './types'; +import { JobsFilters, SiemJob } from './types'; import { UpgradeContents } from './upgrade_contents'; import { useMlCapabilities } from './hooks/use_ml_capabilities'; @@ -34,15 +34,10 @@ PopoverContentsDiv.displayName = 'PopoverContentsDiv'; interface State { isLoading: boolean; - jobs: JobSummary[]; refreshToggle: boolean; } -type Action = - | { type: 'refresh' } - | { type: 'loading' } - | { type: 'success'; results: JobSummary[] } - | { type: 'failure' }; +type Action = { type: 'refresh' } | { type: 'loading' } | { type: 'success' } | { type: 'failure' }; function mlPopoverReducer(state: State, action: Action): State { switch (action.type) { @@ -62,14 +57,12 @@ function mlPopoverReducer(state: State, action: Action): State { return { ...state, isLoading: false, - jobs: action.results, }; } case 'failure': { return { ...state, isLoading: false, - jobs: [], }; } default: @@ -79,7 +72,6 @@ function mlPopoverReducer(state: State, action: Action): State { const initialState: State = { isLoading: false, - jobs: [], refreshToggle: true, }; @@ -91,7 +83,7 @@ const defaultFilterProps: JobsFilters = { }; export const MlPopover = React.memo(() => { - const [{ refreshToggle }, dispatch] = useReducer(mlPopoverReducer, initialState); + const [{ isLoading, refreshToggle }, dispatch] = useReducer(mlPopoverReducer, initialState); const [isPopoverOpen, setIsPopoverOpen] = useState(false); const [filterProperties, setFilterProperties] = useState(defaultFilterProps); @@ -99,50 +91,11 @@ export const MlPopover = React.memo(() => { const [, dispatchToaster] = useStateToaster(); const capabilities = useMlCapabilities(); const docLinks = useKibana().services.docLinks; - - // Enable/Disable Job & Datafeed -- passed to JobsTable for use as callback on JobSwitch - const enableDatafeed = async (job: SiemJob, latestTimestampMs: number, enable: boolean) => { - submitTelemetry(job, enable); - - if (!job.isInstalled) { - try { - await setupMlJob({ - configTemplate: job.moduleId, - indexPatternName: job.defaultIndexPattern, - jobIdErrorFilter: [job.id], - groups: job.groups, - }); - } catch (error) { - errorToToaster({ title: i18n.CREATE_JOB_FAILURE, error, dispatchToaster }); - dispatch({ type: 'refresh' }); - return; - } - } - - // Max start time for job is no more than two weeks ago to ensure job performance - const maxStartTime = moment - .utc() - .subtract(14, 'days') - .valueOf(); - - if (enable) { - const startTime = Math.max(latestTimestampMs, maxStartTime); - try { - await startDatafeeds({ datafeedIds: [`datafeed-${job.id}`], start: startTime }); - } catch (error) { - track(METRIC_TYPE.COUNT, TELEMETRY_EVENT.JOB_ENABLE_FAILURE); - errorToToaster({ title: i18n.START_JOB_FAILURE, error, dispatchToaster }); - } - } else { - try { - await stopDatafeeds({ datafeedIds: [`datafeed-${job.id}`] }); - } catch (error) { - track(METRIC_TYPE.COUNT, TELEMETRY_EVENT.JOB_DISABLE_FAILURE); - errorToToaster({ title: i18n.STOP_JOB_FAILURE, error, dispatchToaster }); - } - } - dispatch({ type: 'refresh' }); - }; + const handleJobStateChange = useCallback( + (job: SiemJob, latestTimestampMs: number, enable: boolean) => + enableDatafeed(job, latestTimestampMs, enable, dispatch, dispatchToaster), + [dispatch, dispatchToaster] + ); const filteredJobs = filterJobs({ jobs: siemJobs, @@ -239,9 +192,9 @@ export const MlPopover = React.memo(() => { )} @@ -252,6 +205,59 @@ export const MlPopover = React.memo(() => { } }); +// Enable/Disable Job & Datafeed -- passed to JobsTable for use as callback on JobSwitch +const enableDatafeed = async ( + job: SiemJob, + latestTimestampMs: number, + enable: boolean, + dispatch: Dispatch, + dispatchToaster: Dispatch +) => { + submitTelemetry(job, enable); + + if (!job.isInstalled) { + dispatch({ type: 'loading' }); + try { + await setupMlJob({ + configTemplate: job.moduleId, + indexPatternName: job.defaultIndexPattern, + jobIdErrorFilter: [job.id], + groups: job.groups, + }); + dispatch({ type: 'success' }); + } catch (error) { + errorToToaster({ title: i18n.CREATE_JOB_FAILURE, error, dispatchToaster }); + dispatch({ type: 'failure' }); + dispatch({ type: 'refresh' }); + return; + } + } + + // Max start time for job is no more than two weeks ago to ensure job performance + const maxStartTime = moment + .utc() + .subtract(14, 'days') + .valueOf(); + + if (enable) { + const startTime = Math.max(latestTimestampMs, maxStartTime); + try { + await startDatafeeds({ datafeedIds: [`datafeed-${job.id}`], start: startTime }); + } catch (error) { + track(METRIC_TYPE.COUNT, TELEMETRY_EVENT.JOB_ENABLE_FAILURE); + errorToToaster({ title: i18n.START_JOB_FAILURE, error, dispatchToaster }); + } + } else { + try { + await stopDatafeeds({ datafeedIds: [`datafeed-${job.id}`] }); + } catch (error) { + track(METRIC_TYPE.COUNT, TELEMETRY_EVENT.JOB_DISABLE_FAILURE); + errorToToaster({ title: i18n.STOP_JOB_FAILURE, error, dispatchToaster }); + } + } + dispatch({ type: 'refresh' }); +}; + const submitTelemetry = (job: SiemJob, enabled: boolean) => { // Report type of job enabled/disabled track( diff --git a/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.test.tsx b/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.test.tsx index 426a1ab9238dc4..4d9e283bfb9cc2 100644 --- a/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.test.tsx +++ b/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.test.tsx @@ -22,7 +22,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: null, hasIndexWrite: null, - hasManageApiKey: null, isAuthenticated: null, hasEncryptionKey: null, isSignalIndexExists: null, @@ -50,7 +49,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: null, hasIndexWrite: null, - hasManageApiKey: null, isAuthenticated: null, hasEncryptionKey: null, isSignalIndexExists: null, @@ -79,7 +77,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: true, hasIndexWrite: true, - hasManageApiKey: true, isAuthenticated: true, hasEncryptionKey: true, isSignalIndexExists: true, @@ -116,7 +113,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: true, hasIndexWrite: true, - hasManageApiKey: true, isAuthenticated: true, hasEncryptionKey: true, isSignalIndexExists: true, @@ -139,7 +135,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: false, hasIndexWrite: true, - hasManageApiKey: true, isAuthenticated: true, hasEncryptionKey: true, isSignalIndexExists: true, @@ -161,29 +156,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: true, hasIndexWrite: false, - hasManageApiKey: true, - isAuthenticated: true, - hasEncryptionKey: true, - isSignalIndexExists: true, - }) - ); - await waitForNextUpdate(); - await waitForNextUpdate(); - let resp = null; - if (result.current.createPrePackagedRules) { - resp = await result.current.createPrePackagedRules(); - } - expect(resp).toEqual(false); - }); - }); - - test('can NOT createPrePackagedRules because hasManageApiKey === false', async () => { - await act(async () => { - const { result, waitForNextUpdate } = renderHook(() => - usePrePackagedRules({ - canUserCRUD: true, - hasIndexWrite: true, - hasManageApiKey: false, isAuthenticated: true, hasEncryptionKey: true, isSignalIndexExists: true, @@ -205,7 +177,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: true, hasIndexWrite: true, - hasManageApiKey: true, isAuthenticated: false, hasEncryptionKey: true, isSignalIndexExists: true, @@ -227,7 +198,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: true, hasIndexWrite: true, - hasManageApiKey: true, isAuthenticated: true, hasEncryptionKey: false, isSignalIndexExists: true, @@ -249,7 +219,6 @@ describe('usePersistRule', () => { usePrePackagedRules({ canUserCRUD: true, hasIndexWrite: true, - hasManageApiKey: true, isAuthenticated: true, hasEncryptionKey: true, isSignalIndexExists: false, diff --git a/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.tsx b/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.tsx index 0dd95bea8a0b24..44d5de10e361a0 100644 --- a/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.tsx +++ b/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_pre_packaged_rules.tsx @@ -26,7 +26,6 @@ export interface ReturnPrePackagedRules { interface UsePrePackagedRuleProps { canUserCRUD: boolean | null; hasIndexWrite: boolean | null; - hasManageApiKey: boolean | null; isAuthenticated: boolean | null; hasEncryptionKey: boolean | null; isSignalIndexExists: boolean | null; @@ -36,7 +35,6 @@ interface UsePrePackagedRuleProps { * Hook for using to get status about pre-packaged Rules from the Detection Engine API * * @param hasIndexWrite boolean - * @param hasManageApiKey boolean * @param isAuthenticated boolean * @param hasEncryptionKey boolean * @param isSignalIndexExists boolean @@ -45,7 +43,6 @@ interface UsePrePackagedRuleProps { export const usePrePackagedRules = ({ canUserCRUD, hasIndexWrite, - hasManageApiKey, isAuthenticated, hasEncryptionKey, isSignalIndexExists, @@ -117,7 +114,6 @@ export const usePrePackagedRules = ({ if ( canUserCRUD && hasIndexWrite && - hasManageApiKey && isAuthenticated && hasEncryptionKey && isSignalIndexExists @@ -185,14 +181,7 @@ export const usePrePackagedRules = ({ isSubscribed = false; abortCtrl.abort(); }; - }, [ - canUserCRUD, - hasIndexWrite, - hasManageApiKey, - isAuthenticated, - hasEncryptionKey, - isSignalIndexExists, - ]); + }, [canUserCRUD, hasIndexWrite, isAuthenticated, hasEncryptionKey, isSignalIndexExists]); return { loading, diff --git a/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_rule_status.tsx b/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_rule_status.tsx index 0d37cce1fd85ca..412fc0706b1517 100644 --- a/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_rule_status.tsx +++ b/x-pack/legacy/plugins/siem/public/containers/detection_engine/rules/use_rule_status.tsx @@ -16,7 +16,7 @@ type Func = (ruleId: string) => void; export type ReturnRuleStatus = [boolean, RuleStatus | null, Func | null]; export interface ReturnRulesStatuses { loading: boolean; - rulesStatuses: RuleStatusRowItemType[] | null; + rulesStatuses: RuleStatusRowItemType[]; } /** diff --git a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/mock.ts b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/mock.ts index 37e93b1481e15f..6b0c7e0078268e 100644 --- a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/mock.ts +++ b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/mock.ts @@ -992,7 +992,6 @@ export const mockUserPrivilege: Privilege = { monitor_watcher: true, monitor_transform: true, read_ilm: true, - manage_api_key: true, manage_security: true, manage_own_api_key: false, manage_saml: true, diff --git a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/types.ts b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/types.ts index d90f94d32001da..4e97c597546a7a 100644 --- a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/types.ts +++ b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/types.ts @@ -54,7 +54,6 @@ export interface Privilege { monitor_watcher: boolean; monitor_transform: boolean; read_ilm: boolean; - manage_api_key: boolean; manage_security: boolean; manage_own_api_key: boolean; manage_saml: boolean; diff --git a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.test.tsx b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.test.tsx index 26827429604421..c248223c6b81be 100644 --- a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.test.tsx +++ b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.test.tsx @@ -21,7 +21,6 @@ describe('usePrivilegeUser', () => { hasEncryptionKey: null, hasIndexManage: null, hasIndexWrite: null, - hasManageApiKey: null, isAuthenticated: null, loading: true, }); @@ -39,7 +38,6 @@ describe('usePrivilegeUser', () => { hasEncryptionKey: true, hasIndexManage: true, hasIndexWrite: true, - hasManageApiKey: true, isAuthenticated: true, loading: false, }); @@ -61,7 +59,6 @@ describe('usePrivilegeUser', () => { hasEncryptionKey: false, hasIndexManage: false, hasIndexWrite: false, - hasManageApiKey: false, isAuthenticated: false, loading: false, }); diff --git a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.tsx b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.tsx index c58e62c062faec..140dd1544b12b0 100644 --- a/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.tsx +++ b/x-pack/legacy/plugins/siem/public/containers/detection_engine/signals/use_privilege_user.tsx @@ -15,7 +15,6 @@ export interface ReturnPrivilegeUser { isAuthenticated: boolean | null; hasEncryptionKey: boolean | null; hasIndexManage: boolean | null; - hasManageApiKey: boolean | null; hasIndexWrite: boolean | null; } /** @@ -27,17 +26,12 @@ export const usePrivilegeUser = (): ReturnPrivilegeUser => { const [privilegeUser, setPrivilegeUser] = useState< Pick< ReturnPrivilegeUser, - | 'isAuthenticated' - | 'hasEncryptionKey' - | 'hasIndexManage' - | 'hasManageApiKey' - | 'hasIndexWrite' + 'isAuthenticated' | 'hasEncryptionKey' | 'hasIndexManage' | 'hasIndexWrite' > >({ isAuthenticated: null, hasEncryptionKey: null, hasIndexManage: null, - hasManageApiKey: null, hasIndexWrite: null, }); const [, dispatchToaster] = useStateToaster(); @@ -65,10 +59,6 @@ export const usePrivilegeUser = (): ReturnPrivilegeUser => { privilege.index[indexName].create_doc || privilege.index[indexName].index || privilege.index[indexName].write, - hasManageApiKey: - privilege.cluster.manage_security || - privilege.cluster.manage_api_key || - privilege.cluster.manage_own_api_key, }); } } @@ -78,7 +68,6 @@ export const usePrivilegeUser = (): ReturnPrivilegeUser => { isAuthenticated: false, hasEncryptionKey: false, hasIndexManage: false, - hasManageApiKey: false, hasIndexWrite: false, }); errorToToaster({ title: i18n.PRIVILEGE_FETCH_FAILURE, error, dispatchToaster }); diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/components/user_info/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/components/user_info/index.tsx index a96913f2ad541f..9e45371fb6058e 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/components/user_info/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/components/user_info/index.tsx @@ -15,7 +15,6 @@ export interface State { canUserCRUD: boolean | null; hasIndexManage: boolean | null; hasIndexWrite: boolean | null; - hasManageApiKey: boolean | null; isSignalIndexExists: boolean | null; isAuthenticated: boolean | null; hasEncryptionKey: boolean | null; @@ -27,7 +26,6 @@ const initialState: State = { canUserCRUD: null, hasIndexManage: null, hasIndexWrite: null, - hasManageApiKey: null, isSignalIndexExists: null, isAuthenticated: null, hasEncryptionKey: null, @@ -37,10 +35,6 @@ const initialState: State = { export type Action = | { type: 'updateLoading'; loading: boolean } - | { - type: 'updateHasManageApiKey'; - hasManageApiKey: boolean | null; - } | { type: 'updateHasIndexManage'; hasIndexManage: boolean | null; @@ -90,12 +84,6 @@ export const userInfoReducer = (state: State, action: Action): State => { hasIndexWrite: action.hasIndexWrite, }; } - case 'updateHasManageApiKey': { - return { - ...state, - hasManageApiKey: action.hasManageApiKey, - }; - } case 'updateIsSignalIndexExists': { return { ...state, @@ -151,7 +139,6 @@ export const useUserInfo = (): State => { canUserCRUD, hasIndexManage, hasIndexWrite, - hasManageApiKey, isSignalIndexExists, isAuthenticated, hasEncryptionKey, @@ -166,7 +153,6 @@ export const useUserInfo = (): State => { hasEncryptionKey: isApiEncryptionKey, hasIndexManage: hasApiIndexManage, hasIndexWrite: hasApiIndexWrite, - hasManageApiKey: hasApiManageApiKey, } = usePrivilegeUser(); const { loading: indexNameLoading, @@ -197,12 +183,6 @@ export const useUserInfo = (): State => { } }, [loading, hasIndexWrite, hasApiIndexWrite]); - useEffect(() => { - if (!loading && hasManageApiKey !== hasApiManageApiKey && hasApiManageApiKey != null) { - dispatch({ type: 'updateHasManageApiKey', hasManageApiKey: hasApiManageApiKey }); - } - }, [loading, hasManageApiKey, hasApiManageApiKey]); - useEffect(() => { if ( !loading && @@ -258,7 +238,6 @@ export const useUserInfo = (): State => { canUserCRUD, hasIndexManage, hasIndexWrite, - hasManageApiKey, signalIndexName, }; }; diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/columns.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/columns.tsx index 5157bd81403e28..9a84d33ab5fdf4 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/columns.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/columns.tsx @@ -81,8 +81,8 @@ export type RuleStatusRowItemType = RuleStatus & { name: string; id: string; }; -type RulesColumns = EuiBasicTableColumn | EuiTableActionsColumnType; -type RulesStatusesColumns = EuiBasicTableColumn; +export type RulesColumns = EuiBasicTableColumn | EuiTableActionsColumnType; +export type RulesStatusesColumns = EuiBasicTableColumn; interface GetColumns { dispatch: React.Dispatch; diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/index.tsx index 1a982725464402..ccdfd1ed1be38d 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/all/index.tsx @@ -31,7 +31,7 @@ import { Loader } from '../../../../components/loader'; import { Panel } from '../../../../components/panel'; import { PrePackagedRulesPrompt } from '../components/pre_packaged_rules/load_empty_prompt'; import { GenericDownloader } from '../../../../components/generic_downloader'; -import { AllRulesTables } from '../components/all_rules_tables'; +import { AllRulesTables, SortingType } from '../components/all_rules_tables'; import { getPrePackagedRuleStatus } from '../helpers'; import * as i18n from '../translations'; import { EuiBasicTableOnChange } from '../types'; @@ -128,7 +128,7 @@ export const AllRules = React.memo( }); const sorting = useMemo( - () => ({ sort: { field: 'enabled', direction: filterOptions.sortOrder } }), + (): SortingType => ({ sort: { field: 'enabled', direction: filterOptions.sortOrder } }), [filterOptions.sortOrder] ); @@ -330,7 +330,7 @@ export const AllRules = React.memo( euiBasicTableSelectionProps={euiBasicTableSelectionProps} hasNoPermissions={hasNoPermissions} monitoringColumns={monitoringColumns} - paginationMemo={paginationMemo} + pagination={paginationMemo} rules={rules} rulesColumns={rulesColumns} rulesStatuses={rulesStatuses} diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/all_rules_tables/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/all_rules_tables/index.tsx index 0fd07f30a00b67..31aaa426e4f3b8 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/all_rules_tables/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/all_rules_tables/index.tsx @@ -4,30 +4,59 @@ * you may not use this file except in compliance with the Elastic License. */ -import { EuiBasicTable, EuiTab, EuiTabs, EuiEmptyPrompt } from '@elastic/eui'; +import { + EuiBasicTable, + EuiBasicTableColumn, + EuiTab, + EuiTabs, + EuiEmptyPrompt, + Direction, + EuiTableSelectionType, +} from '@elastic/eui'; import React, { useMemo, memo, useState } from 'react'; import styled from 'styled-components'; +import { EuiBasicTableOnChange } from '../../types'; import * as i18n from '../../translations'; -import { RuleStatusRowItemType } from '../../../../../pages/detection_engine/rules/all/columns'; -import { Rules } from '../../../../../containers/detection_engine/rules'; +import { + RulesColumns, + RuleStatusRowItemType, +} from '../../../../../pages/detection_engine/rules/all/columns'; +import { Rule, Rules } from '../../../../../containers/detection_engine/rules'; // EuiBasicTable give me a hardtime with adding the ref attributes so I went the easy way // after few hours of fight with typescript !!!! I lost :( // eslint-disable-next-line @typescript-eslint/no-explicit-any const MyEuiBasicTable = styled(EuiBasicTable as any)`` as any; +export interface SortingType { + sort: { + field: 'enabled'; + direction: Direction; + }; +} + interface AllRulesTablesProps { - euiBasicTableSelectionProps: unknown; + euiBasicTableSelectionProps: EuiTableSelectionType; hasNoPermissions: boolean; - monitoringColumns: unknown; - paginationMemo: unknown; + monitoringColumns: Array>; + pagination: { + pageIndex: number; + pageSize: number; + totalItemCount: number; + pageSizeOptions: number[]; + }; rules: Rules; - rulesColumns: unknown; - rulesStatuses: RuleStatusRowItemType[] | null; - sorting: unknown; - tableOnChangeCallback: unknown; - tableRef?: unknown; + rulesColumns: RulesColumns[]; + rulesStatuses: RuleStatusRowItemType[]; + sorting: { + sort: { + field: 'enabled'; + direction: Direction; + }; + }; + tableOnChangeCallback: ({ page, sort }: EuiBasicTableOnChange) => void; + tableRef?: React.MutableRefObject; } enum AllRulesTabs { @@ -52,7 +81,7 @@ const AllRulesTablesComponent: React.FC = ({ euiBasicTableSelectionProps, hasNoPermissions, monitoringColumns, - paginationMemo, + pagination, rules, rulesColumns, rulesStatuses, @@ -95,7 +124,7 @@ const AllRulesTablesComponent: React.FC = ({ items={rules ?? []} noItemsMessage={emptyPrompt} onChange={tableOnChangeCallback} - pagination={paginationMemo} + pagination={pagination} ref={tableRef} sorting={sorting} selection={hasNoPermissions ? undefined : euiBasicTableSelectionProps} @@ -110,7 +139,7 @@ const AllRulesTablesComponent: React.FC = ({ items={rulesStatuses} noItemsMessage={emptyPrompt} onChange={tableOnChangeCallback} - pagination={paginationMemo} + pagination={pagination} sorting={sorting} /> )} diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/__snapshots__/index.test.tsx.snap b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/__snapshots__/index.test.tsx.snap index 65a606604d4a7c..1bee36ed9e1850 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/__snapshots__/index.test.tsx.snap +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/__snapshots__/index.test.tsx.snap @@ -1,6 +1,6 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`RuleActionsOverflow renders correctly against snapshot 1`] = ` +exports[`RuleActionsOverflow snapshots renders correctly against snapshot 1`] = ` } closePopover={[Function]} + data-test-subj="rules-details-popover" display="inlineBlock" hasArrow={true} id="ruleActionsOverflow" @@ -27,24 +29,28 @@ exports[`RuleActionsOverflow renders correctly against snapshot 1`] = ` panelPaddingSize="none" > Duplicate rule… , Export rule , ({ }), })); +jest.mock('../../all/actions', () => ({ + deleteRulesAction: jest.fn(), + duplicateRulesAction: jest.fn(), +})); + describe('RuleActionsOverflow', () => { - test('renders correctly against snapshot', () => { - const wrapper = shallow( - - ); - expect(wrapper).toMatchSnapshot(); + describe('snapshots', () => { + test('renders correctly against snapshot', () => { + const wrapper = shallow( + + ); + expect(wrapper).toMatchSnapshot(); + }); + }); + + describe('rules details menu panel', () => { + test('there is at least one item when there is a rule within the rules-details-menu-panel', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + const items: unknown[] = wrapper + .find('[data-test-subj="rules-details-menu-panel"]') + .first() + .prop('items'); + + expect(items.length).toBeGreaterThan(0); + }); + + test('items are empty when there is a null rule within the rules-details-menu-panel', () => { + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-menu-panel"]') + .first() + .prop('items') + ).toEqual([]); + }); + + test('items are empty when there is an undefined rule within the rules-details-menu-panel', () => { + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-menu-panel"]') + .first() + .prop('items') + ).toEqual([]); + }); + + test('it opens the popover when rules-details-popover-button-icon is clicked', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-popover"]') + .first() + .prop('isOpen') + ).toEqual(true); + }); + }); + + describe('rules details pop over button icon', () => { + test('it does not open the popover when rules-details-popover-button-icon is clicked when the user does not have permission', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-popover"]') + .first() + .prop('isOpen') + ).toEqual(false); + }); + }); + + describe('rules details duplicate rule', () => { + test('it does not open the popover when rules-details-popover-button-icon is clicked and the user does not have permission', () => { + const rule = mockRule('id'); + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect(wrapper.find('[data-test-subj="rules-details-delete-rule"] button').exists()).toEqual( + false + ); + }); + + test('it opens the popover when rules-details-popover-button-icon is clicked', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-popover"]') + .first() + .prop('isOpen') + ).toEqual(true); + }); + + test('it closes the popover when rules-details-duplicate-rule is clicked', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-duplicate-rule"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-popover"]') + .first() + .prop('isOpen') + ).toEqual(false); + }); + + test('it calls duplicateRulesAction when rules-details-duplicate-rule is clicked', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-duplicate-rule"] button').simulate('click'); + wrapper.update(); + expect(duplicateRulesAction).toHaveBeenCalled(); + }); + + test('it calls duplicateRulesAction with the rule and rule.id when rules-details-duplicate-rule is clicked', () => { + const rule = mockRule('id'); + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-duplicate-rule"] button').simulate('click'); + wrapper.update(); + expect(duplicateRulesAction).toHaveBeenCalledWith( + [rule], + [rule.id], + expect.anything(), + expect.anything() + ); + }); + }); + + describe('rules details export rule', () => { + test('it does not open the popover when rules-details-popover-button-icon is clicked and the user does not have permission', () => { + const rule = mockRule('id'); + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect(wrapper.find('[data-test-subj="rules-details-export-rule"] button').exists()).toEqual( + false + ); + }); + + test('it closes the popover when rules-details-export-rule is clicked', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-export-rule"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-popover"]') + .first() + .prop('isOpen') + ).toEqual(false); + }); + + test('it sets the rule.rule_id on the generic downloader when rules-details-export-rule is clicked', () => { + const rule = mockRule('id'); + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-export-rule"] button').simulate('click'); + wrapper.update(); + expect( + wrapper.find('[data-test-subj="rules-details-generic-downloader"]').prop('ids') + ).toEqual([rule.rule_id]); + }); + + test('it does not close the pop over on rules-details-export-rule when the rule is an immutable rule and the user does a click', () => { + const rule = mockRule('id'); + rule.immutable = true; + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-export-rule"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-popover"]') + .first() + .prop('isOpen') + ).toEqual(true); + }); + + test('it does not set the rule.rule_id on rules-details-export-rule when the rule is an immutable rule', () => { + const rule = mockRule('id'); + rule.immutable = true; + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-export-rule"] button').simulate('click'); + wrapper.update(); + expect( + wrapper.find('[data-test-subj="rules-details-generic-downloader"]').prop('ids') + ).toEqual([]); + }); + }); + + describe('rules details delete rule', () => { + test('it does not open the popover when rules-details-popover-button-icon is clicked and the user does not have permission', () => { + const rule = mockRule('id'); + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + expect(wrapper.find('[data-test-subj="rules-details-delete-rule"] button').exists()).toEqual( + false + ); + }); + + test('it closes the popover when rules-details-delete-rule is clicked', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-delete-rule"] button').simulate('click'); + wrapper.update(); + expect( + wrapper + .find('[data-test-subj="rules-details-popover"]') + .first() + .prop('isOpen') + ).toEqual(false); + }); + + test('it calls deleteRulesAction when rules-details-delete-rule is clicked', () => { + const wrapper = mount( + + ); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-delete-rule"] button').simulate('click'); + wrapper.update(); + expect(deleteRulesAction).toHaveBeenCalled(); + }); + + test('it calls deleteRulesAction with the rule.id when rules-details-delete-rule is clicked', () => { + const rule = mockRule('id'); + const wrapper = mount(); + wrapper.find('[data-test-subj="rules-details-popover-button-icon"] button').simulate('click'); + wrapper.update(); + wrapper.find('[data-test-subj="rules-details-delete-rule"] button').simulate('click'); + wrapper.update(); + expect(deleteRulesAction).toHaveBeenCalledWith( + [rule.id], + expect.anything(), + expect.anything(), + expect.anything() + ); + }); }); }); diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/index.tsx index e1ca84ed8cc642..a7ce0c85ffdcf7 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/components/rule_actions_overflow/index.tsx @@ -62,8 +62,9 @@ const RuleActionsOverflowComponent = ({ ? [ { setIsPopoverOpen(false); await duplicateRulesAction([rule], [rule.id], noop, dispatchToaster); @@ -73,11 +74,12 @@ const RuleActionsOverflowComponent = ({ , { setIsPopoverOpen(false); - setRulesToExport([rule.id]); + setRulesToExport([rule.rule_id]); }} > {i18nActions.EXPORT_RULE} @@ -86,6 +88,7 @@ const RuleActionsOverflowComponent = ({ key={i18nActions.DELETE_RULE} icon="trash" disabled={userHasNoPermissions} + data-test-subj="rules-details-delete-rule" onClick={async () => { setIsPopoverOpen(false); await deleteRulesAction([rule.id], noop, dispatchToaster, onRuleDeletedCallback); @@ -109,6 +112,7 @@ const RuleActionsOverflowComponent = ({ iconType="boxesHorizontal" aria-label={i18n.ALL_ACTIONS} isDisabled={userHasNoPermissions} + data-test-subj="rules-details-popover-button-icon" onClick={handlePopoverOpen} /> @@ -124,15 +128,17 @@ const RuleActionsOverflowComponent = ({ closePopover={() => setIsPopoverOpen(false)} id="ruleActionsOverflow" isOpen={isPopoverOpen} + data-test-subj="rules-details-popover" ownFocus={true} panelPaddingSize="none" > - + { displaySuccessToast( i18nActions.SUCCESSFULLY_EXPORTED_RULES(exportCount), diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/create/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/create/index.tsx index 03352166729154..2686bb47925b6c 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/create/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/create/index.tsx @@ -24,7 +24,7 @@ import { StepScheduleRule } from '../components/step_schedule_rule'; import { StepRuleActions } from '../components/step_rule_actions'; import { DetectionEngineHeaderPage } from '../../components/detection_engine_header_page'; import * as RuleI18n from '../translations'; -import { redirectToDetections, getActionMessageParams } from '../helpers'; +import { redirectToDetections, getActionMessageParams, userHasNoPermissions } from '../helpers'; import { AboutStepRule, DefineStepRule, @@ -85,7 +85,6 @@ const CreateRulePageComponent: React.FC = () => { isAuthenticated, hasEncryptionKey, canUserCRUD, - hasManageApiKey, } = useUserInfo(); const [, dispatchToaster] = useStateToaster(); const [openAccordionId, setOpenAccordionId] = useState(RuleStep.defineRule); @@ -117,8 +116,6 @@ const CreateRulePageComponent: React.FC = () => { getActionMessageParams((stepsData.current['define-rule'].data as DefineStepRule).ruleType), [stepsData.current['define-rule'].data] ); - const userHasNoPermissions = - canUserCRUD != null && hasManageApiKey != null ? !canUserCRUD || !hasManageApiKey : false; const setStepData = useCallback( (step: RuleStep, data: unknown, isValid: boolean) => { @@ -274,7 +271,7 @@ const CreateRulePageComponent: React.FC = () => { if (redirectToDetections(isSignalIndexExists, isAuthenticated, hasEncryptionKey)) { return ; - } else if (userHasNoPermissions) { + } else if (userHasNoPermissions(canUserCRUD)) { return ; } diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/details/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/details/index.tsx index b8e2310ef06146..cb4d88a8bb539c 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/details/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/details/index.tsx @@ -53,7 +53,7 @@ import * as detectionI18n from '../../translations'; import { ReadOnlyCallOut } from '../components/read_only_callout'; import { RuleSwitch } from '../components/rule_switch'; import { StepPanel } from '../components/step_panel'; -import { getStepsData, redirectToDetections } from '../helpers'; +import { getStepsData, redirectToDetections, userHasNoPermissions } from '../helpers'; import * as ruleI18n from '../translations'; import * as i18n from './translations'; import { GlobalTime } from '../../../../containers/global_time'; @@ -96,7 +96,6 @@ const RuleDetailsPageComponent: FC = ({ isAuthenticated, hasEncryptionKey, canUserCRUD, - hasManageApiKey, hasIndexWrite, signalIndexName, } = useUserInfo(); @@ -115,8 +114,6 @@ const RuleDetailsPageComponent: FC = ({ scheduleRuleData: null, }; const [lastSignals] = useSignalInfo({ ruleId }); - const userHasNoPermissions = - canUserCRUD != null && hasManageApiKey != null ? !canUserCRUD || !hasManageApiKey : false; const title = isLoading === true || rule === null ? : rule.name; const subTitle = useMemo( @@ -227,7 +224,7 @@ const RuleDetailsPageComponent: FC = ({ return ( <> {hasIndexWrite != null && !hasIndexWrite && } - {userHasNoPermissions && } + {userHasNoPermissions(canUserCRUD) && } {({ indicesExist, indexPattern }) => { return indicesExistOrDataTemporarilyUnavailable(indicesExist) ? ( @@ -264,7 +261,7 @@ const RuleDetailsPageComponent: FC = ({ = ({ {ruleI18n.EDIT_RULE_SETTINGS} @@ -285,7 +282,7 @@ const RuleDetailsPageComponent: FC = ({ diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/edit/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/edit/index.tsx index 60d6158987a1db..c42e7b902cd5c5 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/edit/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/edit/index.tsx @@ -33,7 +33,12 @@ import { StepDefineRule } from '../components/step_define_rule'; import { StepScheduleRule } from '../components/step_schedule_rule'; import { StepRuleActions } from '../components/step_rule_actions'; import { formatRule } from '../create/helpers'; -import { getStepsData, redirectToDetections, getActionMessageParams } from '../helpers'; +import { + getStepsData, + redirectToDetections, + getActionMessageParams, + userHasNoPermissions, +} from '../helpers'; import * as ruleI18n from '../translations'; import { RuleStep, @@ -69,14 +74,10 @@ const EditRulePageComponent: FC = () => { isAuthenticated, hasEncryptionKey, canUserCRUD, - hasManageApiKey, } = useUserInfo(); const { detailName: ruleId } = useParams(); const [loading, rule] = useRule(ruleId); - const userHasNoPermissions = - canUserCRUD != null && hasManageApiKey != null ? !canUserCRUD || !hasManageApiKey : false; - const [initForm, setInitForm] = useState(false); const [myAboutRuleForm, setMyAboutRuleForm] = useState({ data: null, @@ -346,7 +347,7 @@ const EditRulePageComponent: FC = () => { if (redirectToDetections(isSignalIndexExists, isAuthenticated, hasEncryptionKey)) { return ; - } else if (userHasNoPermissions) { + } else if (userHasNoPermissions(canUserCRUD)) { return ; } diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.test.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.test.tsx index 522464d585ccae..443dbd2c93a35d 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.test.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.test.tsx @@ -14,6 +14,7 @@ import { getHumanizedDuration, getModifiedAboutDetailsData, determineDetailsValue, + userHasNoPermissions, } from './helpers'; import { mockRuleWithEverything, mockRule } from './all/__mocks__/mock'; import { esFilters } from '../../../../../../../../src/plugins/data/public'; @@ -337,4 +338,27 @@ describe('rule helpers', () => { expect(result).toEqual(aboutRuleDetailsData); }); }); + + describe('userHasNoPermissions', () => { + test("returns false when user's CRUD operations are null", () => { + const result: boolean = userHasNoPermissions(null); + const userHasNoPermissionsExpectedResult = false; + + expect(result).toEqual(userHasNoPermissionsExpectedResult); + }); + + test('returns true when user cannot CRUD', () => { + const result: boolean = userHasNoPermissions(false); + const userHasNoPermissionsExpectedResult = true; + + expect(result).toEqual(userHasNoPermissionsExpectedResult); + }); + + test('returns false when user can CRUD', () => { + const result: boolean = userHasNoPermissions(true); + const userHasNoPermissionsExpectedResult = false; + + expect(result).toEqual(userHasNoPermissionsExpectedResult); + }); + }); }); diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.tsx index b6afba527ccdcf..db1f2298b5ea78 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/helpers.tsx @@ -267,3 +267,7 @@ export const getActionMessageParams = memoizeOne((ruleType: RuleType | undefined ...actionMessageRuleParams.map(param => `context.rule.${param}`), ]; }); + +// typed as null not undefined as the initial state for this value is null. +export const userHasNoPermissions = (canUserCRUD: boolean | null): boolean => + canUserCRUD != null ? !canUserCRUD : false; diff --git a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/index.tsx b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/index.tsx index 2b93ec8b101120..8831bc77691fa7 100644 --- a/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/index.tsx +++ b/x-pack/legacy/plugins/siem/public/pages/detection_engine/rules/index.tsx @@ -23,7 +23,7 @@ import { AllRules } from './all'; import { ImportDataModal } from '../../../components/import_data_modal'; import { ReadOnlyCallOut } from './components/read_only_callout'; import { UpdatePrePackagedRulesCallOut } from './components/pre_packaged_rules/update_callout'; -import { getPrePackagedRuleStatus, redirectToDetections } from './helpers'; +import { getPrePackagedRuleStatus, redirectToDetections, userHasNoPermissions } from './helpers'; import * as i18n from './translations'; type Func = (refreshPrePackagedRule?: boolean) => void; @@ -38,7 +38,6 @@ const RulesPageComponent: React.FC = () => { hasEncryptionKey, canUserCRUD, hasIndexWrite, - hasManageApiKey, } = useUserInfo(); const { createPrePackagedRules, @@ -52,7 +51,6 @@ const RulesPageComponent: React.FC = () => { } = usePrePackagedRules({ canUserCRUD, hasIndexWrite, - hasManageApiKey, isSignalIndexExists, isAuthenticated, hasEncryptionKey, @@ -63,9 +61,6 @@ const RulesPageComponent: React.FC = () => { rulesNotUpdated ); - const userHasNoPermissions = - canUserCRUD != null && hasManageApiKey != null ? !canUserCRUD || !hasManageApiKey : false; - const handleRefreshRules = useCallback(async () => { if (refreshRulesData.current != null) { refreshRulesData.current(true); @@ -95,7 +90,7 @@ const RulesPageComponent: React.FC = () => { return ( <> - {userHasNoPermissions && } + {userHasNoPermissions(canUserCRUD) && } setShowImportModal(false)} @@ -125,7 +120,7 @@ const RulesPageComponent: React.FC = () => { {i18n.LOAD_PREPACKAGED_RULES} @@ -138,7 +133,7 @@ const RulesPageComponent: React.FC = () => { data-test-subj="reloadPrebuiltRulesBtn" iconType="plusInCircle" isLoading={loadingCreatePrePackagedRules} - isDisabled={userHasNoPermissions || loading} + isDisabled={userHasNoPermissions(canUserCRUD) || loading} onClick={handleCreatePrePackagedRules} > {i18n.RELOAD_MISSING_PREPACKAGED_RULES(rulesNotInstalled ?? 0)} @@ -148,7 +143,7 @@ const RulesPageComponent: React.FC = () => { { setShowImportModal(true); }} @@ -162,7 +157,7 @@ const RulesPageComponent: React.FC = () => { fill href={getCreateRuleUrl()} iconType="plusInCircle" - isDisabled={userHasNoPermissions || loading} + isDisabled={userHasNoPermissions(canUserCRUD) || loading} > {i18n.ADD_NEW_RULE} @@ -180,7 +175,7 @@ const RulesPageComponent: React.FC = () => { createPrePackagedRules={createPrePackagedRules} loading={loading || prePackagedRuleLoading} loadingCreatePrePackagedRules={loadingCreatePrePackagedRules} - hasNoPermissions={userHasNoPermissions} + hasNoPermissions={userHasNoPermissions(canUserCRUD)} refetchPrePackagedRulesStatus={handleRefetchPrePackagedRulesStatus} rulesCustomInstalled={rulesCustomInstalled} rulesInstalled={rulesInstalled} diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.test.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.test.ts index 8e4b5ce3c99242..bdbb6ff7d1052b 100644 --- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.test.ts +++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.test.ts @@ -28,25 +28,23 @@ describe('buildRuleMessageFactory', () => { expect(message).toEqual(expect.stringContaining('signals index: "index"')); }); - it('joins message parts with newlines', () => { + it('joins message parts with spaces', () => { const buildMessage = buildRuleMessageFactory(factoryParams); const message = buildMessage('my message'); - const messageParts = message.split('\n'); - expect(messageParts).toContain('my message'); - expect(messageParts).toContain('name: "name"'); - expect(messageParts).toContain('id: "id"'); - expect(messageParts).toContain('rule id: "ruleId"'); - expect(messageParts).toContain('signals index: "index"'); + expect(message).toEqual(expect.stringContaining('my message ')); + expect(message).toEqual(expect.stringContaining(' name: "name" ')); + expect(message).toEqual(expect.stringContaining(' id: "id" ')); + expect(message).toEqual(expect.stringContaining(' rule id: "ruleId" ')); + expect(message).toEqual(expect.stringContaining(' signals index: "index"')); }); - it('joins multiple arguments with newlines', () => { + it('joins multiple arguments with spaces', () => { const buildMessage = buildRuleMessageFactory(factoryParams); const message = buildMessage('my message', 'here is more'); - const messageParts = message.split('\n'); - expect(messageParts).toContain('my message'); - expect(messageParts).toContain('here is more'); + expect(message).toEqual(expect.stringContaining('my message ')); + expect(message).toEqual(expect.stringContaining(' here is more')); }); it('defaults the rule ID if not provided ', () => { diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.ts index d5f9d332bbcddb..cc97a1f8a9f0b2 100644 --- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.ts +++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/rule_messages.ts @@ -24,4 +24,4 @@ export const buildRuleMessageFactory = ({ `id: "${id}"`, `rule id: "${ruleId ?? '(unknown rule id)'}"`, `signals index: "${index}"`, - ].join('\n'); + ].join(' '); diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/signal_rule_alert_type.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/signal_rule_alert_type.ts index 91905722fbca31..246701e94c99a2 100644 --- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/signal_rule_alert_type.ts +++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals/signal_rule_alert_type.ts @@ -55,6 +55,7 @@ export const signalRulesAlertType = ({ index, filters, language, + maxSignals, meta, machineLearningJobId, outputIndex, @@ -63,6 +64,14 @@ export const signalRulesAlertType = ({ to, type, } = params; + const searchAfterSize = Math.min(maxSignals, DEFAULT_SEARCH_AFTER_PAGE_SIZE); + let hasError: boolean = false; + let result: SearchAfterAndBulkCreateReturnType = { + success: false, + bulkCreateTimes: [], + searchAfterTimes: [], + lastLookBackDate: null, + }; const ruleStatusClient = ruleStatusSavedObjectsClientFactory(services.savedObjectsClient); const ruleStatusService = await ruleStatusServiceFactory({ alertId, @@ -104,17 +113,10 @@ export const signalRulesAlertType = ({ ); logger.warn(gapMessage); + hasError = true; await ruleStatusService.error(gapMessage, { gap: gapString }); } - const searchAfterSize = Math.min(params.maxSignals, DEFAULT_SEARCH_AFTER_PAGE_SIZE); - let result: SearchAfterAndBulkCreateReturnType = { - success: false, - bulkCreateTimes: [], - searchAfterTimes: [], - lastLookBackDate: null, - }; - try { if (isMlRule(type)) { if (ml == null) { @@ -126,7 +128,7 @@ export const signalRulesAlertType = ({ 'Machine learning rule is missing job id and/or anomaly threshold:', `job id: "${machineLearningJobId}"`, `anomaly threshold: "${anomalyThreshold}"`, - ].join('\n') + ].join(' ') ); } @@ -143,6 +145,7 @@ export const signalRulesAlertType = ({ `datafeed status: "${jobSummary?.datafeedState}"` ); logger.warn(errorMessage); + hasError = true; await ruleStatusService.error(errorMessage); } @@ -270,11 +273,13 @@ export const signalRulesAlertType = ({ } logger.debug(buildRuleMessage('[+] Signal Rule execution completed.')); - await ruleStatusService.success('succeeded', { - bulkCreateTimeDurations: result.bulkCreateTimes, - searchAfterTimeDurations: result.searchAfterTimes, - lastLookBackDate: result.lastLookBackDate?.toISOString(), - }); + if (!hasError) { + await ruleStatusService.success('succeeded', { + bulkCreateTimeDurations: result.bulkCreateTimes, + searchAfterTimeDurations: result.searchAfterTimes, + lastLookBackDate: result.lastLookBackDate?.toISOString(), + }); + } } else { const errorMessage = buildRuleMessage( 'Bulk Indexing of signals failed. Check logs for further details.' diff --git a/x-pack/legacy/plugins/siem/server/lib/timeline/routes/utils/import_timelines.ts b/x-pack/legacy/plugins/siem/server/lib/timeline/routes/utils/import_timelines.ts index 5596d0c70f5ea3..f69a715f9b2c99 100644 --- a/x-pack/legacy/plugins/siem/server/lib/timeline/routes/utils/import_timelines.ts +++ b/x-pack/legacy/plugins/siem/server/lib/timeline/routes/utils/import_timelines.ts @@ -127,7 +127,7 @@ export const saveNotes = ( existingNoteIds?: string[], newNotes?: NoteResult[] ) => { - return ( + return Promise.all( newNotes?.map(note => { const newNote: SavedNote = { eventId: note.eventId, diff --git a/x-pack/legacy/plugins/spaces/index.ts b/x-pack/legacy/plugins/spaces/index.ts index 757c1eb557c543..8d44c170182551 100644 --- a/x-pack/legacy/plugins/spaces/index.ts +++ b/x-pack/legacy/plugins/spaces/index.ts @@ -12,9 +12,7 @@ import { SpacesServiceSetup } from '../../../plugins/spaces/server'; import { SpacesPluginSetup } from '../../../plugins/spaces/server'; // @ts-ignore import { AuditLogger } from '../../server/lib/audit_logger'; -import mappings from './mappings.json'; import { wrapError } from './server/lib/errors'; -import { migrateToKibana660 } from './server/lib/migrations'; // @ts-ignore import { watchStatusAndLicenseToInitialize } from '../../server/lib/watch_status_and_license_to_initialize'; import { initEnterSpaceView } from './server/routes/views'; @@ -39,18 +37,6 @@ export const spaces = (kibana: Record) => managementSections: [], apps: [], hacks: ['plugins/spaces/legacy'], - mappings, - migrations: { - space: { - '6.6.0': migrateToKibana660, - }, - }, - savedObjectSchemas: { - space: { - isNamespaceAgnostic: true, - hidden: true, - }, - }, home: [], injectDefaultVars(server: Server) { return { @@ -100,7 +86,6 @@ export const spaces = (kibana: Record) => const { registerLegacyAPI, createDefaultSpace } = spacesPlugin.__legacyCompat; registerLegacyAPI({ - savedObjects: server.savedObjects, auditLogger: { create: (pluginId: string) => new AuditLogger(server, pluginId, server.config(), server.plugins.xpack_main.info), diff --git a/x-pack/legacy/plugins/spaces/mappings.json b/x-pack/legacy/plugins/spaces/mappings.json deleted file mode 100644 index dc73dc28718854..00000000000000 --- a/x-pack/legacy/plugins/spaces/mappings.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "space": { - "properties": { - "name": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 2048 - } - } - }, - "description": { - "type": "text" - }, - "initials": { - "type": "keyword" - }, - "color": { - "type": "keyword" - }, - "disabledFeatures": { - "type": "keyword" - }, - "imageUrl": { - "type": "text", - "index": false - }, - "_reserved": { - "type": "boolean" - } - } - } -} diff --git a/x-pack/legacy/plugins/spaces/server/lib/migrations/migrate_6x.test.ts b/x-pack/legacy/plugins/spaces/server/lib/migrations/migrate_6x.test.ts deleted file mode 100644 index 964eb8137685f6..00000000000000 --- a/x-pack/legacy/plugins/spaces/server/lib/migrations/migrate_6x.test.ts +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { migrateToKibana660 } from './migrate_6x'; - -describe('migrateTo660', () => { - it('adds a "disabledFeatures" attribute initialized as an empty array', () => { - expect( - migrateToKibana660({ - id: 'space:foo', - attributes: {}, - }) - ).toEqual({ - id: 'space:foo', - attributes: { - disabledFeatures: [], - }, - }); - }); - - it('does not initialize "disabledFeatures" if the property already exists', () => { - // This scenario shouldn't happen organically. Protecting against defects in the migration. - expect( - migrateToKibana660({ - id: 'space:foo', - attributes: { - disabledFeatures: ['foo', 'bar', 'baz'], - }, - }) - ).toEqual({ - id: 'space:foo', - attributes: { - disabledFeatures: ['foo', 'bar', 'baz'], - }, - }); - }); -}); diff --git a/x-pack/legacy/plugins/spaces/server/lib/migrations/migrate_6x.ts b/x-pack/legacy/plugins/spaces/server/lib/migrations/migrate_6x.ts deleted file mode 100644 index 0c080a8dabb0a7..00000000000000 --- a/x-pack/legacy/plugins/spaces/server/lib/migrations/migrate_6x.ts +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -export function migrateToKibana660(doc: Record) { - if (!doc.attributes.hasOwnProperty('disabledFeatures')) { - doc.attributes.disabledFeatures = []; - } - return doc; -} diff --git a/x-pack/package.json b/x-pack/package.json index bbab1a96f52f43..24b23256bf18ea 100644 --- a/x-pack/package.json +++ b/x-pack/package.json @@ -315,6 +315,7 @@ "react-portal": "^3.2.0", "react-redux": "^7.1.3", "react-reverse-portal": "^1.0.4", + "react-router": "^5.1.2", "react-router-dom": "^5.1.2", "react-shortcuts": "^2.0.0", "react-sticky": "^6.0.3", diff --git a/x-pack/plugins/endpoint/common/generate_data.test.ts b/x-pack/plugins/endpoint/common/generate_data.test.ts index dfb906c7af6064..88e1c66ea3e821 100644 --- a/x-pack/plugins/endpoint/common/generate_data.test.ts +++ b/x-pack/plugins/endpoint/common/generate_data.test.ts @@ -86,7 +86,7 @@ describe('data generator', () => { let events: Event[]; beforeEach(() => { - events = generator.generateAlertEventAncestry(3); + events = generator.createAlertEventAncestry(3); }); it('with n-1 process events', () => { @@ -153,7 +153,7 @@ describe('data generator', () => { const timestamp = new Date().getTime(); const root = generator.generateEvent({ timestamp }); const generations = 2; - const events = [root, ...generator.generateDescendantsTree(root, generations)]; + const events = [root, ...generator.descendantsTreeGenerator(root, generations)]; const rootNode = buildResolverTree(events); const visitedEvents = countResolverEvents(rootNode, generations); expect(visitedEvents).toEqual(events.length); @@ -162,7 +162,7 @@ describe('data generator', () => { it('creates full resolver tree', () => { const alertAncestors = 3; const generations = 2; - const events = generator.generateFullResolverTree(alertAncestors, generations); + const events = [...generator.fullResolverTreeGenerator(alertAncestors, generations)]; const rootNode = buildResolverTree(events); const visitedEvents = countResolverEvents(rootNode, alertAncestors + generations); expect(visitedEvents).toEqual(events.length); diff --git a/x-pack/plugins/endpoint/common/generate_data.ts b/x-pack/plugins/endpoint/common/generate_data.ts index 430ba1d422b96e..0ec105129b7ac4 100644 --- a/x-pack/plugins/endpoint/common/generate_data.ts +++ b/x-pack/plugins/endpoint/common/generate_data.ts @@ -100,19 +100,30 @@ interface HostInfo { }; } +interface NodeState { + event: Event; + childrenCreated: number; + maxChildren: number; +} + export class EndpointDocGenerator { commonInfo: HostInfo; random: seedrandom.prng; - constructor(seed = Math.random().toString()) { - this.random = seedrandom(seed); + constructor(seed: string | seedrandom.prng = Math.random().toString()) { + if (typeof seed === 'string') { + this.random = seedrandom(seed); + } else { + this.random = seed; + } this.commonInfo = this.createHostData(); } - // This function will create new values for all the host fields, so documents from a different host can be created - // This provides a convenient way to make documents from multiple hosts that are all tied to a single seed value - public randomizeHostData() { - this.commonInfo = this.createHostData(); + /** + * Creates new random IP addresses for the host to simulate new DHCP assignment + */ + public updateHostData() { + this.commonInfo.host.ip = this.randomArray(3, () => this.randomIP()); } private createHostData(): HostInfo { @@ -139,6 +150,10 @@ export class EndpointDocGenerator { }; } + /** + * Creates a host metadata document + * @param ts - Timestamp to put in the event + */ public generateHostMetadata(ts = new Date().getTime()): HostMetadata { return { '@timestamp': ts, @@ -149,6 +164,12 @@ export class EndpointDocGenerator { }; } + /** + * Creates an alert from the simulated host represented by this EndpointDocGenerator + * @param ts - Timestamp to put in the event + * @param entityID - entityID of the originating process + * @param parentEntityID - optional entityID of the parent process, if it exists + */ public generateAlert( ts = new Date().getTime(), entityID = this.randomString(10), @@ -183,7 +204,7 @@ export class EndpointDocGenerator { trusted: false, subject_name: 'bad signer', }, - malware_classifier: { + malware_classification: { identifier: 'endpointpe', score: 1, threshold: 0.66, @@ -241,7 +262,7 @@ export class EndpointDocGenerator { sha1: 'ca85243c0af6a6471bdaa560685c51eefd6dbc0d', sha256: '8ad40c90a611d36eb8f9eb24fa04f7dbca713db383ff55a03aa0f382e92061a2', }, - malware_classifier: { + malware_classification: { identifier: 'Whitelisted', score: 0, threshold: 0, @@ -255,6 +276,10 @@ export class EndpointDocGenerator { }; } + /** + * Creates an event, customized by the options parameter + * @param options - Allows event field values to be specified + */ public generateEvent(options: EventOptions = {}): EndpointEvent { return { '@timestamp': options.timestamp ? options.timestamp : new Date().getTime(), @@ -277,17 +302,31 @@ export class EndpointDocGenerator { }; } - public generateFullResolverTree( + /** + * Generator function that creates the full set of events needed to render resolver. + * The number of nodes grows exponentially with the number of generations and children per node. + * Each node is logically a process, and will have 1 or more process events associated with it. + * @param alertAncestors - number of ancestor generations to create relative to the alert + * @param childGenerations - number of child generations to create relative to the alert + * @param maxChildrenPerNode - maximum number of children for any given node in the tree + * @param relatedEventsPerNode - number of related events (file, registry, etc) to create for each process event in the tree + * @param percentNodesWithRelated - percent of nodes which should have related events + * @param percentChildrenTerminated - percent of nodes which will have process termination events + */ + public *fullResolverTreeGenerator( alertAncestors?: number, childGenerations?: number, maxChildrenPerNode?: number, relatedEventsPerNode?: number, percentNodesWithRelated?: number, percentChildrenTerminated?: number - ): Event[] { - const ancestry = this.generateAlertEventAncestry(alertAncestors); + ) { + const ancestry = this.createAlertEventAncestry(alertAncestors); + for (let i = 0; i < ancestry.length; i++) { + yield ancestry[i]; + } // ancestry will always have at least 2 elements, and the second to last element will be the process associated with the alert - const descendants = this.generateDescendantsTree( + yield* this.descendantsTreeGenerator( ancestry[ancestry.length - 2], childGenerations, maxChildrenPerNode, @@ -295,10 +334,13 @@ export class EndpointDocGenerator { percentNodesWithRelated, percentChildrenTerminated ); - return ancestry.concat(descendants); } - public generateAlertEventAncestry(alertAncestors = 3): Event[] { + /** + * Creates an alert event and associated process ancestry. The alert event will always be the last event in the return array. + * @param alertAncestors - number of ancestor generations to create + */ + public createAlertEventAncestry(alertAncestors = 3): Event[] { const events = []; const startDate = new Date().getTime(); const root = this.generateEvent({ timestamp: startDate + 1000 }); @@ -321,75 +363,93 @@ export class EndpointDocGenerator { return events; } - public generateDescendantsTree( + /** + * Creates the child generations of a process. The number of returned events grows exponentially with generations and maxChildrenPerNode. + * @param root - The process event to use as the root node of the tree + * @param generations - number of child generations to create. The root node is not counted as a generation. + * @param maxChildrenPerNode - maximum number of children for any given node in the tree + * @param relatedEventsPerNode - number of related events (file, registry, etc) to create for each process event in the tree + * @param percentNodesWithRelated - percent of nodes which should have related events + * @param percentChildrenTerminated - percent of nodes which will have process termination events + */ + public *descendantsTreeGenerator( root: Event, generations = 2, maxChildrenPerNode = 2, relatedEventsPerNode = 3, percentNodesWithRelated = 100, percentChildrenTerminated = 100 - ): Event[] { - let events: Event[] = []; - let parents = [root]; + ) { + const rootState: NodeState = { + event: root, + childrenCreated: 0, + maxChildren: this.randomN(maxChildrenPerNode + 1), + }; + const lineage: NodeState[] = [rootState]; let timestamp = root['@timestamp']; - for (let i = 0; i < generations; i++) { - const newParents: EndpointEvent[] = []; - parents.forEach(element => { - const numChildren = this.randomN(maxChildrenPerNode + 1); - for (let j = 0; j < numChildren; j++) { - timestamp = timestamp + 1000; - const child = this.generateEvent({ - timestamp, - parentEntityID: element.process.entity_id, - }); - newParents.push(child); - } + while (lineage.length > 0) { + const currentState = lineage[lineage.length - 1]; + // If we get to a state node and it has made all the children, move back up a level + if ( + currentState.childrenCreated === currentState.maxChildren || + lineage.length === generations + 1 + ) { + lineage.pop(); + continue; + } + // Otherwise, add a child and any nodes associated with it + currentState.childrenCreated++; + timestamp = timestamp + 1000; + const child = this.generateEvent({ + timestamp, + parentEntityID: currentState.event.process.entity_id, }); - events = events.concat(newParents); - parents = newParents; - } - const terminationEvents: EndpointEvent[] = []; - let relatedEvents: EndpointEvent[] = []; - events.forEach(element => { + lineage.push({ + event: child, + childrenCreated: 0, + maxChildren: this.randomN(maxChildrenPerNode + 1), + }); + yield child; + let processDuration: number = 6 * 3600; if (this.randomN(100) < percentChildrenTerminated) { - timestamp = timestamp + 1000; - terminationEvents.push( - this.generateEvent({ - timestamp, - entityID: element.process.entity_id, - parentEntityID: element.process.parent?.entity_id, - eventCategory: 'process', - eventType: 'end', - }) - ); + processDuration = this.randomN(1000000); // This lets termination events be up to 1 million seconds after the creation event (~11 days) + yield this.generateEvent({ + timestamp: timestamp + processDuration * 1000, + entityID: child.process.entity_id, + parentEntityID: child.process.parent?.entity_id, + eventCategory: 'process', + eventType: 'end', + }); } if (this.randomN(100) < percentNodesWithRelated) { - relatedEvents = relatedEvents.concat( - this.generateRelatedEvents(element, relatedEventsPerNode) - ); + yield* this.relatedEventsGenerator(child, relatedEventsPerNode, processDuration); } - }); - events = events.concat(terminationEvents); - events = events.concat(relatedEvents); - return events; + } } - public generateRelatedEvents(node: Event, numRelatedEvents = 10): EndpointEvent[] { - const ts = node['@timestamp'] + 1000; - const relatedEvents: EndpointEvent[] = []; + /** + * Creates related events for a process event + * @param node - process event to relate events to by entityID + * @param numRelatedEvents - number of related events to generate + * @param processDuration - maximum number of seconds after process event that related event timestamp can be + */ + public *relatedEventsGenerator( + node: Event, + numRelatedEvents = 10, + processDuration: number = 6 * 3600 + ) { for (let i = 0; i < numRelatedEvents; i++) { const eventInfo = this.randomChoice(OTHER_EVENT_CATEGORIES); - relatedEvents.push( - this.generateEvent({ - timestamp: ts, - entityID: node.process.entity_id, - parentEntityID: node.process.parent?.entity_id, - eventCategory: eventInfo.category, - eventType: eventInfo.creationType, - }) - ); + + const ts = node['@timestamp'] + this.randomN(processDuration) * 1000; + yield this.generateEvent({ + timestamp: ts, + entityID: node.process.entity_id, + parentEntityID: node.process.parent?.entity_id, + eventCategory: eventInfo.category, + eventType: eventInfo.creationType, + }); } - return relatedEvents; } private randomN(n: number): number { diff --git a/x-pack/plugins/endpoint/common/types.ts b/x-pack/plugins/endpoint/common/types.ts index 565f47e7a0d6fb..e8e1281a889253 100644 --- a/x-pack/plugins/endpoint/common/types.ts +++ b/x-pack/plugins/endpoint/common/types.ts @@ -113,7 +113,7 @@ export interface HashFields { sha1: string; sha256: string; } -export interface MalwareClassifierFields { +export interface MalwareClassificationFields { identifier: string; score: number; threshold: number; @@ -142,7 +142,7 @@ export interface DllFields { }; compile_time: number; hash: HashFields; - malware_classifier: MalwareClassifierFields; + malware_classification: MalwareClassificationFields; mapped_address: number; mapped_size: number; path: string; @@ -194,7 +194,7 @@ export type AlertEvent = Immutable<{ executable: string; sid?: string; start: number; - malware_classifier?: MalwareClassifierFields; + malware_classification?: MalwareClassificationFields; token: { domain: string; type: string; @@ -224,7 +224,7 @@ export type AlertEvent = Immutable<{ trusted: boolean; subject_name: string; }; - malware_classifier: MalwareClassifierFields; + malware_classification: MalwareClassificationFields; temp_file_path: string; }; host: HostFields; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/models/policy.ts b/x-pack/plugins/endpoint/public/applications/endpoint/models/policy.ts index e1ac9defc858eb..9ac53f9be609ff 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/models/policy.ts +++ b/x-pack/plugins/endpoint/public/applications/endpoint/models/policy.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { PolicyConfig } from '../types'; +import { PolicyConfig, ProtectionModes } from '../types'; /** * Generate a new Policy model. @@ -19,7 +19,7 @@ export const generatePolicy = (): PolicyConfig => { network: true, }, malware: { - mode: 'prevent', + mode: ProtectionModes.prevent, }, logging: { stdout: 'debug', @@ -44,7 +44,7 @@ export const generatePolicy = (): PolicyConfig => { process: true, }, malware: { - mode: 'detect', + mode: ProtectionModes.detect, }, logging: { stdout: 'debug', diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/types.ts b/x-pack/plugins/endpoint/public/applications/endpoint/types.ts index 4215edb4d68108..d4f6d2457254e2 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/types.ts +++ b/x-pack/plugins/endpoint/public/applications/endpoint/types.ts @@ -123,10 +123,8 @@ export interface PolicyConfig { process: boolean; network: boolean; }; - /** malware mode can be detect, prevent or prevent and notify user */ - malware: { - mode: string; - }; + /** malware mode can be off, detect, prevent or prevent and notify user */ + malware: MalwareFields; logging: { stdout: string; file: string; @@ -137,9 +135,7 @@ export interface PolicyConfig { events: { process: boolean; }; - malware: { - mode: string; - }; + malware: MalwareFields; logging: { stdout: string; file: string; @@ -209,6 +205,44 @@ export enum EventingFields { network = 'network', } +/** + * Returns the keys of an object whose values meet a criteria. + * Ex) interface largeNestedObject = { + * a: { + * food: Foods; + * toiletPaper: true; + * }; + * b: { + * food: Foods; + * streamingServices: Streams; + * }; + * c: {}; + * } + * + * type hasFoods = KeysByValueCriteria; + * The above type will be: [a, b] only, and will not include c. + * + */ +export type KeysByValueCriteria = { + [K in keyof O]: O[K] extends Criteria ? K : never; +}[keyof O]; + +/** Returns an array of the policy OSes that have a malware protection field */ + +export type MalwareProtectionOSes = KeysByValueCriteria; +/** Policy: Malware protection fields */ +export interface MalwareFields { + mode: ProtectionModes; +} + +/** Policy protection mode options */ +export enum ProtectionModes { + detect = 'detect', + prevent = 'prevent', + preventNotify = 'preventNotify', + off = 'off', +} + export interface GlobalState { readonly hostList: HostListState; readonly alertList: AlertListState; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/view/alerts/details/metadata/general_accordion.tsx b/x-pack/plugins/endpoint/public/applications/endpoint/view/alerts/details/metadata/general_accordion.tsx index 0183e9663bb444..79cb61693056cf 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/view/alerts/details/metadata/general_accordion.tsx +++ b/x-pack/plugins/endpoint/public/applications/endpoint/view/alerts/details/metadata/general_accordion.tsx @@ -40,7 +40,7 @@ export const GeneralAccordion = memo(({ alertData }: { alertData: Immutable { } else if (columnId === 'archived') { return null; } else if (columnId === 'malware_score') { - return row.file.malware_classifier.score; + return row.file.malware_classification.score; } return null; }; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_details.tsx b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_details.tsx index f2c79155f3c23f..2dba301bf45376 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_details.tsx +++ b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_details.tsx @@ -35,6 +35,7 @@ import { AppAction } from '../../types'; import { useKibana } from '../../../../../../../../src/plugins/kibana_react/public'; import { AgentsSummary } from './agents_summary'; import { VerticalDivider } from './vertical_divider'; +import { MalwareProtections } from './policy_forms/protections/malware'; export const PolicyDetails = React.memo(() => { const dispatch = useDispatch<(action: AppAction) => void>(); @@ -181,6 +182,17 @@ export const PolicyDetails = React.memo(() => { headerLeft={headerLeftContent} headerRight={headerRightContent} > + +

+ +

+
+ + +

= React.memo(({ type, supportedOss, children, id, selectedEventing, totalEventing }) => { + /** Takes a react component to be put on the right corner of the card */ + rightCorner: React.ReactNode; +}> = React.memo(({ type, supportedOss, children, id, rightCorner }) => { const typeTitle = () => { return ( @@ -63,32 +62,11 @@ export const ConfigForm: React.FC<{ {supportedOss.join(', ')} - - - - - + {rightCorner} ); }; - const events = () => { - return ( - -
- -
-
- ); - }; - return ( - {events()} - {children} } diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_forms/eventing/windows.tsx b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_forms/eventing/windows.tsx index e92e22fc97fe6e..7bec2c4c742d2b 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_forms/eventing/windows.tsx +++ b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_forms/eventing/windows.tsx @@ -6,6 +6,8 @@ import React, { useMemo } from 'react'; import { i18n } from '@kbn/i18n'; +import { FormattedMessage } from '@kbn/i18n/react'; +import { EuiTitle, EuiText, EuiSpacer } from '@elastic/eui'; import { EventingCheckbox } from './checkbox'; import { OS, EventingFields } from '../../../../types'; import { usePolicyDetailsSelector } from '../../policy_hooks'; @@ -16,6 +18,9 @@ import { import { ConfigForm } from '../config_form'; export const WindowsEventing = React.memo(() => { + const selected = usePolicyDetailsSelector(selectedWindowsEventing); + const total = usePolicyDetailsSelector(totalWindowsEventing); + const checkboxes = useMemo( () => [ { @@ -37,21 +42,43 @@ export const WindowsEventing = React.memo(() => { ); const renderCheckboxes = () => { - return checkboxes.map((item, index) => { - return ( - - ); - }); + return ( + <> + +
+ +
+
+ + {checkboxes.map((item, index) => { + return ( + + ); + })} + + ); }; - const selected = usePolicyDetailsSelector(selectedWindowsEventing); - const total = usePolicyDetailsSelector(totalWindowsEventing); + const collectionsEnabled = () => { + return ( + + + + ); + }; return ( { i18n.translate('xpack.endpoint.policy.details.windows', { defaultMessage: 'Windows' }), ]} id="windowsEventingForm" + rightCorner={collectionsEnabled()} children={renderCheckboxes()} - selectedEventing={selected} - totalEventing={total} /> ); }); diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_forms/protections/malware.tsx b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_forms/protections/malware.tsx new file mode 100644 index 00000000000000..66b22178607b94 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_forms/protections/malware.tsx @@ -0,0 +1,180 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { useCallback, useMemo } from 'react'; +import { useDispatch } from 'react-redux'; +import styled from 'styled-components'; +import { EuiRadio, EuiSwitch, EuiTitle, EuiSpacer } from '@elastic/eui'; +import { i18n } from '@kbn/i18n'; +import { FormattedMessage } from '@kbn/i18n/react'; +import { htmlIdGenerator } from '@elastic/eui'; +import { Immutable } from '../../../../../../../common/types'; +import { OS, ProtectionModes, MalwareProtectionOSes } from '../../../../types'; +import { ConfigForm } from '../config_form'; +import { policyConfig } from '../../../../store/policy_details/selectors'; +import { usePolicyDetailsSelector } from '../../policy_hooks'; +import { clone } from '../../../../models/policy_details_config'; + +const ProtectionRadioGroup = styled.div` + display: flex; + .policyDetailsProtectionRadio { + margin-right: ${props => props.theme.eui.euiSizeXXL}; + } +`; + +const OSes: Immutable = [OS.windows, OS.mac]; +const protection = 'malware'; + +const ProtectionRadio = React.memo(({ id, label }: { id: ProtectionModes; label: string }) => { + const policyDetailsConfig = usePolicyDetailsSelector(policyConfig); + const dispatch = useDispatch(); + // currently just taking windows.malware, but both windows.malware and mac.malware should be the same value + const selected = policyDetailsConfig && policyDetailsConfig.windows.malware.mode; + + const handleRadioChange = useCallback(() => { + if (policyDetailsConfig) { + const newPayload = clone(policyDetailsConfig); + for (const os of OSes) { + newPayload[os][protection].mode = id; + } + dispatch({ + type: 'userChangedPolicyConfig', + payload: { policyConfig: newPayload }, + }); + } + }, [dispatch, id, policyDetailsConfig]); + + /** + * Passing an arbitrary id because EuiRadio + * requires an id if label is passed + */ + + return ( + htmlIdGenerator()(), [])} + checked={selected === id} + onChange={handleRadioChange} + disabled={selected === ProtectionModes.off} + /> + ); +}); + +/** The Malware Protections form for policy details + * which will configure for all relevant OSes. + */ +export const MalwareProtections = React.memo(() => { + const policyDetailsConfig = usePolicyDetailsSelector(policyConfig); + const dispatch = useDispatch(); + // currently just taking windows.malware, but both windows.malware and mac.malware should be the same value + const selected = policyDetailsConfig && policyDetailsConfig.windows.malware.mode; + + const radios: Array<{ + id: ProtectionModes; + label: string; + protection: 'malware'; + }> = useMemo(() => { + return [ + { + id: ProtectionModes.detect, + label: i18n.translate('xpack.endpoint.policy.details.detect', { defaultMessage: 'Detect' }), + protection: 'malware', + }, + { + id: ProtectionModes.prevent, + label: i18n.translate('xpack.endpoint.policy.details.prevent', { + defaultMessage: 'Prevent', + }), + protection: 'malware', + }, + { + id: ProtectionModes.preventNotify, + label: i18n.translate('xpack.endpoint.policy.details.preventAndNotify', { + defaultMessage: 'Prevent and notify user', + }), + protection: 'malware', + }, + ]; + }, []); + + const handleSwitchChange = useCallback( + event => { + if (policyDetailsConfig) { + const newPayload = clone(policyDetailsConfig); + if (event.target.checked === false) { + for (const os of OSes) { + newPayload[os][protection].mode = ProtectionModes.off; + } + } else { + for (const os of OSes) { + newPayload[os][protection].mode = ProtectionModes.prevent; + } + } + dispatch({ + type: 'userChangedPolicyConfig', + payload: { policyConfig: newPayload }, + }); + } + }, + [dispatch, policyDetailsConfig] + ); + + const RadioButtons = () => { + return ( + <> + +
+ +
+
+ + + {radios.map(radio => { + return ( + + ); + })} + + + ); + }; + + const ProtectionSwitch = () => { + return ( + + ); + }; + + return ( + + ); +}); diff --git a/x-pack/plugins/endpoint/public/embeddables/resolver/store/actions.ts b/x-pack/plugins/endpoint/public/embeddables/resolver/store/actions.ts index 0860c9c62aca47..a26f43e1f8cc08 100644 --- a/x-pack/plugins/endpoint/public/embeddables/resolver/store/actions.ts +++ b/x-pack/plugins/endpoint/public/embeddables/resolver/store/actions.ts @@ -45,7 +45,11 @@ interface AppRequestedResolverData { } /** - * When the user switches the active descendent of the Resolver. + * When the user switches the "active descendant" of the Resolver. + * The "active descendant" (from the point of view of the parent element) + * corresponds to the "current" child element. "active" or "current" here meaning + * the element that is focused on by the user's interactions with the UI, but + * not necessarily "selected" (see UserSelectedResolverNode below) */ interface UserFocusedOnResolverNode { readonly type: 'userFocusedOnResolverNode'; @@ -57,10 +61,27 @@ interface UserFocusedOnResolverNode { }; } +/** + * When the user "selects" a node in the Resolver + * "Selected" refers to the state of being the element that the + * user most recently "picked" (by e.g. pressing a button corresponding + * to the element in a list) as opposed to "active" or "current" (see UserFocusedOnResolverNode above). + */ +interface UserSelectedResolverNode { + readonly type: 'userSelectedResolverNode'; + readonly payload: { + /** + * Used to identify the process node that the user selected + */ + readonly nodeId: string; + }; +} + export type ResolverAction = | CameraAction | DataAction | UserBroughtProcessIntoView | UserChangedSelectedEvent | AppRequestedResolverData - | UserFocusedOnResolverNode; + | UserFocusedOnResolverNode + | UserSelectedResolverNode; diff --git a/x-pack/plugins/endpoint/public/embeddables/resolver/store/reducer.ts b/x-pack/plugins/endpoint/public/embeddables/resolver/store/reducer.ts index 1c66a998a4c228..82206d77f83490 100644 --- a/x-pack/plugins/endpoint/public/embeddables/resolver/store/reducer.ts +++ b/x-pack/plugins/endpoint/public/embeddables/resolver/store/reducer.ts @@ -4,18 +4,44 @@ * you may not use this file except in compliance with the Elastic License. */ import { Reducer, combineReducers } from 'redux'; +import { htmlIdGenerator } from '@elastic/eui'; import { animateProcessIntoView } from './methods'; import { cameraReducer } from './camera/reducer'; import { dataReducer } from './data/reducer'; import { ResolverState, ResolverAction, ResolverUIState } from '../types'; +import { uniquePidForProcess } from '../models/process_event'; + +/** + * Despite the name "generator", this function is entirely determinant + * (i.e. it will return the same html id given the same prefix 'resolverNode' + * and nodeId) + */ +const resolverNodeIdGenerator = htmlIdGenerator('resolverNode'); const uiReducer: Reducer = ( - uiState = { activeDescendentId: null }, + uiState = { activeDescendantId: null, selectedDescendantId: null }, action ) => { if (action.type === 'userFocusedOnResolverNode') { return { - activeDescendentId: action.payload.nodeId, + ...uiState, + activeDescendantId: action.payload.nodeId, + }; + } else if (action.type === 'userSelectedResolverNode') { + return { + ...uiState, + selectedDescendantId: action.payload.nodeId, + }; + } else if (action.type === 'userBroughtProcessIntoView') { + /** + * This action has a process payload (instead of a processId), so we use + * `uniquePidForProcess` and `resolverNodeIdGenerator` to resolve the determinant + * html id of the node being brought into view. + */ + const processNodeId = resolverNodeIdGenerator(uniquePidForProcess(action.payload.process)); + return { + ...uiState, + activeDescendantId: processNodeId, }; } else { return uiState; diff --git a/x-pack/plugins/endpoint/public/embeddables/resolver/store/selectors.ts b/x-pack/plugins/endpoint/public/embeddables/resolver/store/selectors.ts index 37482916496e75..e8ae3d08e5cb61 100644 --- a/x-pack/plugins/endpoint/public/embeddables/resolver/store/selectors.ts +++ b/x-pack/plugins/endpoint/public/embeddables/resolver/store/selectors.ts @@ -6,6 +6,7 @@ import * as cameraSelectors from './camera/selectors'; import * as dataSelectors from './data/selectors'; +import * as uiSelectors from './ui/selectors'; import { ResolverState } from '../types'; /** @@ -59,6 +60,22 @@ export const processAdjacencies = composeSelectors( dataSelectors.processAdjacencies ); +/** + * Returns the id of the "current" tree node (fake-focused) + */ +export const uiActiveDescendantId = composeSelectors( + uiStateSelector, + uiSelectors.activeDescendantId +); + +/** + * Returns the id of the "selected" tree node (the node that is currently "pressed" and possibly controlling other popups / components) + */ +export const uiSelectedDescendantId = composeSelectors( + uiStateSelector, + uiSelectors.selectedDescendantId +); + /** * Returns the camera state from within ResolverState */ @@ -73,6 +90,13 @@ function dataStateSelector(state: ResolverState) { return state.data; } +/** + * Returns the ui state from within ResolverState + */ +function uiStateSelector(state: ResolverState) { + return state.ui; +} + /** * Whether or not the resolver is pending fetching data */ diff --git a/x-pack/plugins/endpoint/public/embeddables/resolver/store/ui/selectors.ts b/x-pack/plugins/endpoint/public/embeddables/resolver/store/ui/selectors.ts new file mode 100644 index 00000000000000..196e834c406b31 --- /dev/null +++ b/x-pack/plugins/endpoint/public/embeddables/resolver/store/ui/selectors.ts @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { createSelector } from 'reselect'; +import { ResolverUIState } from '../../types'; + +/** + * id of the "current" tree node (fake-focused) + */ +export const activeDescendantId = createSelector( + (uiState: ResolverUIState) => uiState, + /* eslint-disable no-shadow */ + ({ activeDescendantId }) => { + return activeDescendantId; + } +); + +/** + * id of the currently "selected" tree node + */ +export const selectedDescendantId = createSelector( + (uiState: ResolverUIState) => uiState, + /* eslint-disable no-shadow */ + ({ selectedDescendantId }) => { + return selectedDescendantId; + } +); diff --git a/x-pack/plugins/endpoint/public/embeddables/resolver/types.ts b/x-pack/plugins/endpoint/public/embeddables/resolver/types.ts index 674553aba09372..d370bda0d18424 100644 --- a/x-pack/plugins/endpoint/public/embeddables/resolver/types.ts +++ b/x-pack/plugins/endpoint/public/embeddables/resolver/types.ts @@ -37,7 +37,11 @@ export interface ResolverUIState { /** * The ID attribute of the resolver's aria-activedescendent. */ - readonly activeDescendentId: string | null; + readonly activeDescendantId: string | null; + /** + * The ID attribute of the resolver's currently selected descendant. + */ + readonly selectedDescendantId: string | null; } /** diff --git a/x-pack/plugins/endpoint/public/embeddables/resolver/view/defs.tsx b/x-pack/plugins/endpoint/public/embeddables/resolver/view/defs.tsx index 911cda1be65170..8ee9bfafc630e6 100644 --- a/x-pack/plugins/endpoint/public/embeddables/resolver/view/defs.tsx +++ b/x-pack/plugins/endpoint/public/embeddables/resolver/view/defs.tsx @@ -193,6 +193,7 @@ export const SymbolIds = { runningTriggerCube: idGenerator('runningTriggerCube'), terminatedProcessCube: idGenerator('terminatedCube'), terminatedTriggerCube: idGenerator('terminatedTriggerCube'), + processCubeActiveBacking: idGenerator('activeBacking'), }; /** @@ -393,6 +394,15 @@ const SymbolsAndShapes = memo(() => ( /> + + resolver active backing + + )); diff --git a/x-pack/plugins/endpoint/public/embeddables/resolver/view/index.tsx b/x-pack/plugins/endpoint/public/embeddables/resolver/view/index.tsx index 58ce9b963de5d8..36155ece57a9c4 100644 --- a/x-pack/plugins/endpoint/public/embeddables/resolver/view/index.tsx +++ b/x-pack/plugins/endpoint/public/embeddables/resolver/view/index.tsx @@ -59,6 +59,7 @@ export const Resolver = styled( const { projectionMatrix, ref, onMouseDown } = useCamera(); const isLoading = useSelector(selectors.isLoading); + const activeDescendantId = useSelector(selectors.uiActiveDescendantId); useLayoutEffect(() => { dispatch({ @@ -66,6 +67,7 @@ export const Resolver = styled( payload: { selectedEvent }, }); }, [dispatch, selectedEvent]); + return (
{isLoading ? ( @@ -79,6 +81,7 @@ export const Resolver = styled( ref={ref} role="tree" tabIndex={0} + aria-activedescendant={activeDescendantId || undefined} > {edgeLineSegments.map(([startPosition, endPosition], index) => ( ({ left: `${left}px`, @@ -143,6 +148,9 @@ export const ProcessEventDot = styled( const labelId = useMemo(() => resolverNodeIdGenerator(), [resolverNodeIdGenerator]); const descriptionId = useMemo(() => resolverNodeIdGenerator(), [resolverNodeIdGenerator]); + const isActiveDescendant = nodeId === activeDescendantId; + const isSelectedDescendant = nodeId === selectedDescendantId; + const dispatch = useResolverDispatch(); const handleFocus = useCallback( @@ -153,16 +161,24 @@ export const ProcessEventDot = styled( nodeId, }, }); - focusEvent.currentTarget.setAttribute('aria-current', 'true'); }, [dispatch, nodeId] ); - const handleClick = useCallback(() => { - if (animationTarget.current !== null) { - animationTarget.current.beginElement(); - } - }, [animationTarget]); + const handleClick = useCallback( + (clickEvent: React.MouseEvent) => { + if (animationTarget.current !== null) { + (animationTarget.current as any).beginElement(); + } + dispatch({ + type: 'userSelectedResolverNode', + payload: { + nodeId, + }, + }); + }, + [animationTarget, dispatch, nodeId] + ); return ( @@ -179,6 +195,8 @@ export const ProcessEventDot = styled( aria-labelledby={labelId} aria-describedby={descriptionId} aria-haspopup={'true'} + aria-current={isActiveDescendant ? 'true' : undefined} + aria-selected={isSelectedDescendant ? 'true' : undefined} style={nodeViewportStyle} id={nodeId} onClick={handleClick} @@ -186,6 +204,15 @@ export const ProcessEventDot = styled( tabIndex={-1} > + + = { diff --git a/x-pack/plugins/endpoint/scripts/mapping.json b/x-pack/plugins/endpoint/scripts/mapping.json index 34c039d6435171..5878e01b52a47d 100644 --- a/x-pack/plugins/endpoint/scripts/mapping.json +++ b/x-pack/plugins/endpoint/scripts/mapping.json @@ -90,7 +90,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -452,7 +452,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -849,7 +849,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -1494,7 +1494,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -1687,7 +1687,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { diff --git a/x-pack/plugins/endpoint/scripts/resolver_generator.ts b/x-pack/plugins/endpoint/scripts/resolver_generator.ts index 3d11ccaad005d4..aebf92eff6cb81 100644 --- a/x-pack/plugins/endpoint/scripts/resolver_generator.ts +++ b/x-pack/plugins/endpoint/scripts/resolver_generator.ts @@ -4,9 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ import * as yargs from 'yargs'; +import seedrandom from 'seedrandom'; import { Client, ClientOptions } from '@elastic/elasticsearch'; import { ResponseError } from '@elastic/elasticsearch/lib/errors'; -import { EndpointDocGenerator } from '../common/generate_data'; +import { EndpointDocGenerator, Event } from '../common/generate_data'; import { default as mapping } from './mapping.json'; main(); @@ -137,14 +138,24 @@ async function main() { // eslint-disable-next-line no-console console.log('No seed supplied, using random seed: ' + seed); } - const generator = new EndpointDocGenerator(seed); + const random = seedrandom(seed); for (let i = 0; i < argv.numHosts; i++) { - await client.index({ - index: argv.metadataIndex, - body: generator.generateHostMetadata(), - }); + const generator = new EndpointDocGenerator(random); + const timeBetweenDocs = 6 * 3600 * 1000; // 6 hours between metadata documents + const numMetadataDocs = 5; + const timestamp = new Date().getTime(); + for (let j = 0; j < numMetadataDocs; j++) { + generator.updateHostData(); + await client.index({ + index: argv.metadataIndex, + body: generator.generateHostMetadata( + timestamp - timeBetweenDocs * (numMetadataDocs - j - 1) + ), + }); + } + for (let j = 0; j < argv.alertsPerHost; j++) { - const resolverDocs = generator.generateFullResolverTree( + const resolverDocGenerator = generator.fullResolverTreeGenerator( argv.ancestors, argv.generations, argv.children, @@ -152,15 +163,23 @@ async function main() { argv.percentWithRelated, argv.percentTerminated ); - const body = resolverDocs.reduce( - (array: Array>, doc) => ( - array.push({ index: { _index: argv.eventIndex } }, doc), array - ), - [] - ); - - await client.bulk({ body }); + let result = resolverDocGenerator.next(); + while (!result.done) { + let k = 0; + const resolverDocs: Event[] = []; + while (k < 1000 && !result.done) { + resolverDocs.push(result.value); + result = resolverDocGenerator.next(); + k++; + } + const body = resolverDocs.reduce( + (array: Array>, doc) => ( + array.push({ index: { _index: argv.eventIndex } }, doc), array + ), + [] + ); + await client.bulk({ body }); + } } - generator.randomizeHostData(); } } diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/index.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/index.ts index 663017e2e47afd..cc4c17c5c63a37 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/index.ts +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/index.ts @@ -63,6 +63,10 @@ export * from './max_shingle_size_parameter'; export * from './relations_parameter'; +export * from './other_type_name_parameter'; + +export * from './other_type_json_parameter'; + export const PARAMETER_SERIALIZERS = [relationsSerializer, dynamicSerializer]; export const PARAMETER_DESERIALIZERS = [relationsDeserializer, dynamicDeserializer]; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/other_type_json_parameter.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/other_type_json_parameter.tsx new file mode 100644 index 00000000000000..64e50f711a249a --- /dev/null +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/other_type_json_parameter.tsx @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React from 'react'; +import { i18n } from '@kbn/i18n'; + +import { + UseField, + JsonEditorField, + ValidationFuncArg, + fieldValidators, + FieldConfig, +} from '../../../shared_imports'; + +const { isJsonField } = fieldValidators; + +/** + * This is a special component that does not have an explicit entry in {@link PARAMETERS_DEFINITION}. + * + * We use it to store custom defined parameters in a field called "otherTypeJson". + */ + +const fieldConfig: FieldConfig = { + label: i18n.translate('xpack.idxMgmt.mappingsEditor.otherTypeJsonFieldLabel', { + defaultMessage: 'Type Parameters JSON', + }), + defaultValue: {}, + validations: [ + { + validator: isJsonField( + i18n.translate( + 'xpack.idxMgmt.mappingsEditor.parameters.validations.otherTypeJsonInvalidJSONErrorMessage', + { + defaultMessage: 'Invalid JSON.', + } + ) + ), + }, + { + validator: ({ value }: ValidationFuncArg) => { + const json = JSON.parse(value); + if (Array.isArray(json)) { + return { + message: i18n.translate( + 'xpack.idxMgmt.mappingsEditor.parameters.validations.otherTypeJsonArrayNotAllowedErrorMessage', + { + defaultMessage: 'Arrays are not allowed.', + } + ), + }; + } + }, + }, + { + validator: ({ value }: ValidationFuncArg) => { + const json = JSON.parse(value); + if (json.type) { + return { + code: 'ERR_CUSTOM_TYPE_OVERRIDDEN', + message: i18n.translate( + 'xpack.idxMgmt.mappingsEditor.parameters.validations.otherTypeJsonTypeFieldErrorMessage', + { + defaultMessage: 'Cannot override the "type" field.', + } + ), + }; + } + }, + }, + ], + deserializer: (value: any) => { + if (value === '') { + return value; + } + return JSON.stringify(value, null, 2); + }, + serializer: (value: string) => { + try { + return JSON.parse(value); + } catch (error) { + // swallow error and return non-parsed value; + return value; + } + }, +}; + +export const OtherTypeJsonParameter = () => ( + +); diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/other_type_name_parameter.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/other_type_name_parameter.tsx new file mode 100644 index 00000000000000..6004e484323a14 --- /dev/null +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/other_type_name_parameter.tsx @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React from 'react'; + +import { i18n } from '@kbn/i18n'; +import { UseField, TextField, FieldConfig } from '../../../shared_imports'; +import { fieldValidators } from '../../../shared_imports'; + +const { emptyField } = fieldValidators; + +/** + * This is a special component that does not have an explicit entry in {@link PARAMETERS_DEFINITION}. + * + * We use it to store the name of types unknown to the mappings editor in the "subType" path. + */ + +const fieldConfig: FieldConfig = { + label: i18n.translate('xpack.idxMgmt.mappingsEditor.otherTypeNameFieldLabel', { + defaultMessage: 'Type Name', + }), + defaultValue: '', + validations: [ + { + validator: emptyField( + i18n.translate( + 'xpack.idxMgmt.mappingsEditor.parameters.validations.otherTypeNameIsRequiredErrorMessage', + { + defaultMessage: 'The type name is required.', + } + ) + ), + }, + ], +}; + +export const OtherTypeNameParameter = () => ( + +); diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx index 60b025ce644efe..b41f35b9838851 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/create_field.tsx @@ -5,6 +5,7 @@ */ import React, { useEffect, useCallback } from 'react'; import classNames from 'classnames'; +import * as _ from 'lodash'; import { i18n } from '@kbn/i18n'; @@ -31,7 +32,7 @@ import { filterTypesForNonRootFields, } from '../../../../lib'; import { Field, MainType, SubType, NormalizedFields, ComboBoxOption } from '../../../../types'; -import { NameParameter, TypeParameter } from '../../field_parameters'; +import { NameParameter, TypeParameter, OtherTypeNameParameter } from '../../field_parameters'; import { getParametersFormForType } from './required_parameters_forms'; const formWrapper = (props: any) =>
; @@ -155,9 +156,9 @@ export const CreateField = React.memo(function CreateFieldComponent({ }, [form, getSubTypeMeta] ); - const renderFormFields = useCallback( ({ type }) => { + const isOtherType = type === 'other'; const { subTypeOptions, subTypeLabel } = getSubTypeMeta(type); const docLink = documentationService.getTypeDocLink(type) as string; @@ -178,7 +179,13 @@ export const CreateField = React.memo(function CreateFieldComponent({ docLink={docLink} /> - {/* Field sub type (if any) */} + {/* Other type */} + {isOtherType && ( + + + + )} + {/* Field sub type (if any) - will never be the case if we have an "other" type */} {subTypeOptions && ( {/* Documentation link */} - - - {i18n.translate( - 'xpack.idxMgmt.mappingsEditor.editField.typeDocumentation', - { - defaultMessage: '{type} documentation', - values: { - type: subTypeDefinition - ? subTypeDefinition.label - : typeDefinition.label, - }, - } - )} - - + {linkDocumentation && ( + + + {i18n.translate( + 'xpack.idxMgmt.mappingsEditor.editField.typeDocumentation', + { + defaultMessage: '{type} documentation', + values: { + type: subTypeDefinition + ? subTypeDefinition.label + : typeDefinition.label, + }, + } + )} + + + )} {/* Field path */} diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_header_form.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_header_form.tsx index ddb808094428d9..75a083d64b6db5 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_header_form.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/edit_field/edit_field_header_form.tsx @@ -17,7 +17,7 @@ import { } from '../../../../lib'; import { TYPE_DEFINITION } from '../../../../constants'; -import { NameParameter, TypeParameter } from '../../field_parameters'; +import { NameParameter, TypeParameter, OtherTypeNameParameter } from '../../field_parameters'; import { FieldDescriptionSection } from './field_description_section'; interface Props { @@ -80,9 +80,17 @@ export const EditFieldHeaderForm = React.memo( /> - {/* Field sub type (if any) */} + {/* Other type */} + {type === 'other' && ( + + + + )} + + {/* Field sub type (if any) - will never be the case if we have an "other" type */} {hasSubType && ( + {' '} } = { shape: ShapeType, dense_vector: DenseVectorType, object: ObjectType, + other: OtherType, nested: NestedType, join: JoinType, }; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/field_types/other_type.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/field_types/other_type.tsx new file mode 100644 index 00000000000000..c403bbfb79056e --- /dev/null +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/field_types/other_type.tsx @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import React from 'react'; + +import { OtherTypeJsonParameter } from '../../field_parameters'; +import { BasicParametersSection } from '../edit_field'; + +export const OtherType = () => { + return ( + + + + ); +}; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item.tsx index 4c1c8bc1da1143..f274159bd6c308 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/fields/fields_list_item.tsx @@ -16,11 +16,13 @@ import { import { i18n } from '@kbn/i18n'; import { NormalizedField, NormalizedFields } from '../../../types'; +import { getTypeLabelFromType } from '../../../lib'; import { TYPE_DEFINITION, CHILD_FIELD_INDENT_SIZE, LEFT_PADDING_SIZE_FIELD_ITEM_WRAPPER, } from '../../../constants'; + import { FieldsList } from './fields_list'; import { CreateField } from './create_field'; import { DeleteFieldProvider } from './delete_field_provider'; @@ -265,7 +267,7 @@ function FieldListItemComponent( dataType: TYPE_DEFINITION[source.type].label, }, }) - : TYPE_DEFINITION[source.type].label} + : getTypeLabelFromType(source.type)} diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx index dbb8a788514bcb..614b7cb56bef64 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/components/document_fields/search_fields/search_result_item.tsx @@ -11,6 +11,7 @@ import { i18n } from '@kbn/i18n'; import { SearchResult } from '../../../types'; import { TYPE_DEFINITION } from '../../../constants'; import { useDispatch } from '../../../mappings_state'; +import { getTypeLabelFromType } from '../../../lib'; import { DeleteFieldProvider } from '../fields/delete_field_provider'; interface Props { @@ -115,7 +116,7 @@ export const SearchResultItem = React.memo(function FieldListItemFlatComponent({ dataType: TYPE_DEFINITION[source.type].label, }, }) - : TYPE_DEFINITION[source.type].label} + : getTypeLabelFromType(source.type)} diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/data_types_definition.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/data_types_definition.tsx index f904281181c485..4206fe8b696da7 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/data_types_definition.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/data_types_definition.tsx @@ -784,6 +784,20 @@ export const TYPE_DEFINITION: { [key in DataType]: DataTypeDefinition } = {

), }, + other: { + label: i18n.translate('xpack.idxMgmt.mappingsEditor.dataType.otherDescription', { + defaultMessage: 'Other', + }), + value: 'other', + description: () => ( +

+ +

+ ), + }, }; export const MAIN_TYPES: MainType[] = [ @@ -811,6 +825,7 @@ export const MAIN_TYPES: MainType[] = [ 'shape', 'text', 'token_count', + 'other', ]; export const MAIN_DATA_TYPE_DEFINITION: { diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/parameters_definition.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/parameters_definition.tsx index 732449f382f93c..1b9372e4b50c4a 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/parameters_definition.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/constants/parameters_definition.tsx @@ -504,7 +504,7 @@ export const PARAMETERS_DEFINITION: { [key in ParameterName]: ParameterDefinitio fieldConfig: { defaultValue: '', type: FIELD_TYPES.NUMBER, - deserializer: (value: string | number) => +value, + deserializer: (value: string | number) => (value === '' ? value : +value), formatters: [toInt], label: i18n.translate('xpack.idxMgmt.mappingsEditor.parameters.scalingFactorLabel', { defaultMessage: 'Scaling factor', diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/search_fields.tsx b/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/search_fields.tsx index 5a277073c5f1a3..618d106b0e7a11 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/search_fields.tsx +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/search_fields.tsx @@ -185,8 +185,6 @@ const getSearchMetadata = (searchData: SearchData, fieldData: FieldData): Search const score = calculateScore(metadata); const display = getJSXdisplayFromMeta(searchData, fieldData, metadata); - // console.log(fieldData.path, score, metadata); - return { ...metadata, display, diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/serializers.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/serializers.ts index 131d886ff05d95..6b817c829251f6 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/serializers.ts +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/serializers.ts @@ -45,16 +45,19 @@ const runParametersDeserializers = (field: Field): Field => ); export const fieldSerializer: SerializerFunc = (field: Field) => { + const { otherTypeJson, ...rest } = field; + const updatedField: Field = Boolean(otherTypeJson) ? { ...otherTypeJson, ...rest } : { ...rest }; + // If a subType is present, use it as type for ES - if ({}.hasOwnProperty.call(field, 'subType')) { - field.type = field.subType as DataType; - delete field.subType; + if ({}.hasOwnProperty.call(updatedField, 'subType')) { + updatedField.type = updatedField.subType as DataType; + delete updatedField.subType; } // Delete temp fields - delete (field as any).useSameAnalyzerForSearch; + delete (updatedField as any).useSameAnalyzerForSearch; - return sanitizeField(runParametersSerializers(field)); + return sanitizeField(runParametersSerializers(updatedField)); }; export const fieldDeserializer: SerializerFunc = (field: Field): Field => { @@ -70,8 +73,18 @@ export const fieldDeserializer: SerializerFunc = (field: Field): Field => field.type = type; } - (field as any).useSameAnalyzerForSearch = - {}.hasOwnProperty.call(field, 'search_analyzer') === false; + if (field.type === 'other') { + const { type, subType, name, ...otherTypeJson } = field; + /** + * For "other" type (type we don't support through a form) + * we grab all the parameters and put them in the "otherTypeJson" object + * that we will render in a JSON editor. + */ + field.otherTypeJson = otherTypeJson; + } else { + (field as any).useSameAnalyzerForSearch = + {}.hasOwnProperty.call(field, 'search_analyzer') === false; + } return runParametersDeserializers(field); }; diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/utils.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/utils.ts index 337554ab5fa5a1..cece26618ced87 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/utils.ts +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/lib/utils.ts @@ -25,6 +25,7 @@ import { PARAMETERS_DEFINITION, TYPE_NOT_ALLOWED_MULTIFIELD, TYPE_ONLY_ALLOWED_AT_ROOT_LEVEL, + TYPE_DEFINITION, } from '../constants'; import { State } from '../reducer'; @@ -71,6 +72,9 @@ export const getFieldMeta = (field: Field, isMultiField?: boolean): FieldMeta => }; }; +export const getTypeLabelFromType = (type: DataType) => + TYPE_DEFINITION[type] ? TYPE_DEFINITION[type].label : `${TYPE_DEFINITION.other.label}: ${type}`; + export const getFieldConfig = (param: ParameterName, prop?: string): FieldConfig => { if (prop !== undefined) { if ( @@ -122,7 +126,7 @@ const replaceAliasPathByAliasId = ( }; export const getMainTypeFromSubType = (subType: SubType): MainType => - SUB_TYPE_MAP_TO_MAIN[subType] as MainType; + (SUB_TYPE_MAP_TO_MAIN[subType] ?? 'other') as MainType; /** * In order to better work with the recursive pattern of the mappings `properties`, this method flatten the fields @@ -287,7 +291,9 @@ export const deNormalize = ({ rootLevelFields, byId, aliases }: NormalizedFields const { source, childFields, childFieldsName } = serializedFieldsById[id]; const { name, ...normalizedField } = source; const field: Omit = normalizedField; + to[name] = field; + if (childFields) { field[childFieldsName!] = {}; return deNormalizePaths(childFields, field[childFieldsName!]); diff --git a/x-pack/plugins/index_management/public/application/components/mappings_editor/types.ts b/x-pack/plugins/index_management/public/application/components/mappings_editor/types.ts index dbbffe5a0bd316..5b18af68ed55b9 100644 --- a/x-pack/plugins/index_management/public/application/components/mappings_editor/types.ts +++ b/x-pack/plugins/index_management/public/application/components/mappings_editor/types.ts @@ -56,7 +56,12 @@ export type MainType = | 'date_nanos' | 'geo_point' | 'geo_shape' - | 'token_count'; + | 'token_count' + /** + * 'other' is a special type that only exists inside of MappingsEditor as a placeholder + * for undocumented field types. + */ + | 'other'; export type SubType = NumericType | RangeType; @@ -156,6 +161,10 @@ interface FieldBasic { subType?: SubType; properties?: { [key: string]: Omit }; fields?: { [key: string]: Omit }; + + // other* exist together as a holder of types that the mappings editor does not yet know about but + // enables the user to create mappings with them. + otherTypeJson?: GenericObject; } type FieldParams = { diff --git a/x-pack/plugins/infra/public/components/alerting/metrics/expression.tsx b/x-pack/plugins/infra/public/components/alerting/metrics/expression.tsx index 0909a3c2ed569f..cd3ba43c3607c7 100644 --- a/x-pack/plugins/infra/public/components/alerting/metrics/expression.tsx +++ b/x-pack/plugins/infra/public/components/alerting/metrics/expression.tsx @@ -89,7 +89,7 @@ export const Expressions: React.FC = props => { const defaultExpression = useMemo( () => ({ - aggType: AGGREGATION_TYPES.MAX, + aggType: AGGREGATION_TYPES.AVERAGE, comparator: '>', threshold: [], timeSize: 1, diff --git a/x-pack/plugins/infra/public/compose_libs.ts b/x-pack/plugins/infra/public/compose_libs.ts new file mode 100644 index 00000000000000..debd83f43d52cf --- /dev/null +++ b/x-pack/plugins/infra/public/compose_libs.ts @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { InMemoryCache, IntrospectionFragmentMatcher } from 'apollo-cache-inmemory'; +import ApolloClient from 'apollo-client'; +import { ApolloLink } from 'apollo-link'; +import { createHttpLink } from 'apollo-link-http'; +import { withClientState } from 'apollo-link-state'; +import { CoreStart, HttpFetchOptions } from 'src/core/public'; +import { InfraFrontendLibs } from './lib/lib'; +import introspectionQueryResultData from './graphql/introspection.json'; +import { InfraKibanaObservableApiAdapter } from './lib/adapters/observable_api/kibana_observable_api'; + +export function composeLibs(core: CoreStart) { + const cache = new InMemoryCache({ + addTypename: false, + fragmentMatcher: new IntrospectionFragmentMatcher({ + introspectionQueryResultData, + }), + }); + + const observableApi = new InfraKibanaObservableApiAdapter({ + basePath: core.http.basePath.get(), + }); + + const wrappedFetch = (path: string, options: HttpFetchOptions) => { + return new Promise(async (resolve, reject) => { + // core.http.fetch isn't 100% compatible with the Fetch API and will + // throw Errors on 401s. This top level try / catch handles those scenarios. + try { + core.http + .fetch(path, { + ...options, + // Set headers to undefined due to this bug: https://github.com/apollographql/apollo-link/issues/249, + // Apollo will try to set a "content-type" header which will conflict with the "Content-Type" header that + // core.http.fetch correctly sets. + headers: undefined, + asResponse: true, + }) + .then(res => { + if (!res.response) { + return reject(); + } + // core.http.fetch will parse the Response and set a body before handing it back. As such .text() / .json() + // will have already been called on the Response instance. However, Apollo will also want to call + // .text() / .json() on the instance, as it expects the raw Response instance, rather than core's wrapper. + // .text() / .json() can only be called once, and an Error will be thrown if those methods are accessed again. + // This hacks around that by setting up a new .text() method that will restringify the JSON response we already have. + // This does result in an extra stringify / parse cycle, which isn't ideal, but as we only have a few endpoints left using + // GraphQL this shouldn't create excessive overhead. + // Ref: https://github.com/apollographql/apollo-link/blob/master/packages/apollo-link-http/src/httpLink.ts#L134 + // and + // https://github.com/apollographql/apollo-link/blob/master/packages/apollo-link-http-common/src/index.ts#L125 + return resolve({ + ...res.response, + text: () => { + return new Promise(async (resolveText, rejectText) => { + if (res.body) { + return resolveText(JSON.stringify(res.body)); + } else { + return rejectText(); + } + }); + }, + }); + }); + } catch (error) { + reject(error); + } + }); + }; + + const HttpLink = createHttpLink({ + fetch: wrappedFetch, + uri: `/api/infra/graphql`, + }); + + const graphQLOptions = { + cache, + link: ApolloLink.from([ + withClientState({ + cache, + resolvers: {}, + }), + HttpLink, + ]), + }; + + const apolloClient = new ApolloClient(graphQLOptions); + + const libs: InfraFrontendLibs = { + apolloClient, + observableApi, + }; + return libs; +} diff --git a/x-pack/plugins/infra/public/plugin.ts b/x-pack/plugins/infra/public/plugin.ts index 15796f35856bdb..3b6647b9bfbbeb 100644 --- a/x-pack/plugins/infra/public/plugin.ts +++ b/x-pack/plugins/infra/public/plugin.ts @@ -12,23 +12,14 @@ import { PluginInitializerContext, AppMountParameters, } from 'kibana/public'; -import { InMemoryCache, IntrospectionFragmentMatcher } from 'apollo-cache-inmemory'; -import ApolloClient from 'apollo-client'; -import { ApolloLink } from 'apollo-link'; -import { createHttpLink } from 'apollo-link-http'; -import { withClientState } from 'apollo-link-state'; -import { HttpFetchOptions } from 'src/core/public'; import { DEFAULT_APP_CATEGORIES } from '../../../../src/core/utils'; -import { InfraFrontendLibs } from './lib/lib'; -import introspectionQueryResultData from './graphql/introspection.json'; -import { InfraKibanaObservableApiAdapter } from './lib/adapters/observable_api/kibana_observable_api'; import { registerStartSingleton } from './legacy_singletons'; import { registerFeatures } from './register_feature'; import { HomePublicPluginSetup } from '../../../../src/plugins/home/public'; import { DataPublicPluginSetup, DataPublicPluginStart } from '../../../../src/plugins/data/public'; import { UsageCollectionSetup } from '../../../../src/plugins/usage_collection/public'; import { DataEnhancedSetup, DataEnhancedStart } from '../../data_enhanced/public'; -import { LogsRouter, MetricsRouter } from './routers'; + import { TriggersAndActionsUIPublicPluginSetup } from '../../../plugins/triggers_actions_ui/public'; import { getAlertType } from './components/alerting/metrics/metric_threshold_alert_type'; @@ -75,9 +66,10 @@ export class Plugin mount: async (params: AppMountParameters) => { const [coreStart, pluginsStart] = await core.getStartServices(); const plugins = getMergedPlugins(pluginsSetup, pluginsStart as ClientPluginsStart); - const { startApp } = await import('./apps/start_app'); + const { startApp, composeLibs, LogsRouter } = await this.downloadAssets(); + return startApp( - this.composeLibs(coreStart, plugins), + composeLibs(coreStart), coreStart, plugins, params, @@ -99,9 +91,10 @@ export class Plugin mount: async (params: AppMountParameters) => { const [coreStart, pluginsStart] = await core.getStartServices(); const plugins = getMergedPlugins(pluginsSetup, pluginsStart as ClientPluginsStart); - const { startApp } = await import('./apps/start_app'); + const { startApp, composeLibs, MetricsRouter } = await this.downloadAssets(); + return startApp( - this.composeLibs(coreStart, plugins), + composeLibs(coreStart), coreStart, plugins, params, @@ -129,87 +122,18 @@ export class Plugin registerStartSingleton(core); } - composeLibs(core: CoreStart, plugins: ClientPluginsStart) { - const cache = new InMemoryCache({ - addTypename: false, - fragmentMatcher: new IntrospectionFragmentMatcher({ - introspectionQueryResultData, - }), - }); - - const observableApi = new InfraKibanaObservableApiAdapter({ - basePath: core.http.basePath.get(), - }); - - const wrappedFetch = (path: string, options: HttpFetchOptions) => { - return new Promise(async (resolve, reject) => { - // core.http.fetch isn't 100% compatible with the Fetch API and will - // throw Errors on 401s. This top level try / catch handles those scenarios. - try { - core.http - .fetch(path, { - ...options, - // Set headers to undefined due to this bug: https://github.com/apollographql/apollo-link/issues/249, - // Apollo will try to set a "content-type" header which will conflict with the "Content-Type" header that - // core.http.fetch correctly sets. - headers: undefined, - asResponse: true, - }) - .then(res => { - if (!res.response) { - return reject(); - } - // core.http.fetch will parse the Response and set a body before handing it back. As such .text() / .json() - // will have already been called on the Response instance. However, Apollo will also want to call - // .text() / .json() on the instance, as it expects the raw Response instance, rather than core's wrapper. - // .text() / .json() can only be called once, and an Error will be thrown if those methods are accessed again. - // This hacks around that by setting up a new .text() method that will restringify the JSON response we already have. - // This does result in an extra stringify / parse cycle, which isn't ideal, but as we only have a few endpoints left using - // GraphQL this shouldn't create excessive overhead. - // Ref: https://github.com/apollographql/apollo-link/blob/master/packages/apollo-link-http/src/httpLink.ts#L134 - // and - // https://github.com/apollographql/apollo-link/blob/master/packages/apollo-link-http-common/src/index.ts#L125 - return resolve({ - ...res.response, - text: () => { - return new Promise(async (resolveText, rejectText) => { - if (res.body) { - return resolveText(JSON.stringify(res.body)); - } else { - return rejectText(); - } - }); - }, - }); - }); - } catch (error) { - reject(error); - } - }); - }; - - const HttpLink = createHttpLink({ - fetch: wrappedFetch, - uri: `/api/infra/graphql`, - }); - - const graphQLOptions = { - cache, - link: ApolloLink.from([ - withClientState({ - cache, - resolvers: {}, - }), - HttpLink, - ]), - }; - - const apolloClient = new ApolloClient(graphQLOptions); - - const libs: InfraFrontendLibs = { - apolloClient, - observableApi, + private async downloadAssets() { + const [{ startApp }, { composeLibs }, { LogsRouter, MetricsRouter }] = await Promise.all([ + import('./apps/start_app'), + import('./compose_libs'), + import('./routers'), + ]); + + return { + startApp, + composeLibs, + LogsRouter, + MetricsRouter, }; - return libs; } } diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/components/package_icon.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/components/package_icon.tsx index 8ba597a0d377e1..de0dd75f635cf4 100644 --- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/components/package_icon.tsx +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/components/package_icon.tsx @@ -3,78 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import React, { useEffect, useMemo, useState } from 'react'; -import { ICON_TYPES, EuiIcon, EuiIconProps } from '@elastic/eui'; -import { PackageInfo, PackageListItem } from '../../../../common/types/models'; -import { useLinks } from '../sections/epm/hooks'; -import { epmRouteService } from '../../../../common/services'; -import { sendRequest } from '../hooks/use_request'; -import { GetInfoResponse } from '../types'; -type Package = PackageInfo | PackageListItem; +import React from 'react'; +import { EuiIcon, EuiIconProps } from '@elastic/eui'; +import { usePackageIconType, UsePackageIconType } from '../hooks'; -const CACHED_ICONS = new Map(); - -export const PackageIcon: React.FunctionComponent<{ - packageName: string; - version?: string; - icons?: Package['icons']; -} & Omit> = ({ packageName, version, icons, ...euiIconProps }) => { - const iconType = usePackageIcon(packageName, version, icons); +export const PackageIcon: React.FunctionComponent> = ({ packageName, version, icons, tryApi, ...euiIconProps }) => { + const iconType = usePackageIconType({ packageName, version, icons, tryApi }); return ; }; - -const usePackageIcon = (packageName: string, version?: string, icons?: Package['icons']) => { - const { toImage } = useLinks(); - const [iconType, setIconType] = useState(''); // FIXME: use `empty` icon during initialization - see: https://github.com/elastic/kibana/issues/60622 - const pkgKey = `${packageName}-${version ?? ''}`; - - // Generates an icon path or Eui Icon name based on an icon list from the package - // or by using the package name against logo icons from Eui - const fromInput = useMemo(() => { - return (iconList?: Package['icons']) => { - const svgIcons = iconList?.filter(iconDef => iconDef.type === 'image/svg+xml'); - const localIconSrc = Array.isArray(svgIcons) && svgIcons[0]?.src; - if (localIconSrc) { - CACHED_ICONS.set(pkgKey, toImage(localIconSrc)); - setIconType(CACHED_ICONS.get(pkgKey) as string); - return; - } - - const euiLogoIcon = ICON_TYPES.find(key => key.toLowerCase() === `logo${packageName}`); - if (euiLogoIcon) { - CACHED_ICONS.set(pkgKey, euiLogoIcon); - setIconType(euiLogoIcon); - return; - } - - CACHED_ICONS.set(pkgKey, 'package'); - setIconType('package'); - }; - }, [packageName, pkgKey, toImage]); - - useEffect(() => { - if (CACHED_ICONS.has(pkgKey)) { - setIconType(CACHED_ICONS.get(pkgKey) as string); - return; - } - - // Use API to see if package has icons defined - if (!icons && version) { - fromPackageInfo(pkgKey) - .catch(() => undefined) // ignore API errors - .then(fromInput); - } else { - fromInput(icons); - } - }, [icons, toImage, packageName, version, fromInput, pkgKey]); - - return iconType; -}; - -const fromPackageInfo = async (pkgKey: string) => { - const { data } = await sendRequest({ - path: epmRouteService.getInfoPath(pkgKey), - method: 'get', - }); - return data?.response?.icons; -}; diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/index.ts b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/index.ts index 5e0695bd3e305a..66c7333150fb7b 100644 --- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/index.ts +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/index.ts @@ -9,6 +9,7 @@ export { useCore, CoreContext } from './use_core'; export { useConfig, ConfigContext } from './use_config'; export { useSetupDeps, useStartDeps, DepsContext } from './use_deps'; export { useLink } from './use_link'; +export { usePackageIconType, UsePackageIconType } from './use_package_icon_type'; export { usePagination, Pagination } from './use_pagination'; export { useDebounce } from './use_debounce'; export * from './use_request'; diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/use_package_icon_type.ts b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/use_package_icon_type.ts new file mode 100644 index 00000000000000..5f231b5cc9ec99 --- /dev/null +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/use_package_icon_type.ts @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { useEffect, useState } from 'react'; +import { ICON_TYPES } from '@elastic/eui'; +import { PackageInfo, PackageListItem } from '../../../../common/types/models'; +import { useLinks } from '../sections/epm/hooks'; +import { sendGetPackageInfoByKey } from './index'; + +type Package = PackageInfo | PackageListItem; + +export interface UsePackageIconType { + packageName: Package['name']; + version: Package['version']; + icons?: Package['icons']; + tryApi?: boolean; // should it call API to try to find missing icons? +} + +const CACHED_ICONS = new Map(); + +export const usePackageIconType = ({ + packageName, + version, + icons: paramIcons, + tryApi = false, +}: UsePackageIconType) => { + const { toImage } = useLinks(); + const [iconList, setIconList] = useState(); + const [iconType, setIconType] = useState(''); // FIXME: use `empty` icon during initialization - see: https://github.com/elastic/kibana/issues/60622 + const pkgKey = `${packageName}-${version}`; + + // Generates an icon path or Eui Icon name based on an icon list from the package + // or by using the package name against logo icons from Eui + useEffect(() => { + if (CACHED_ICONS.has(pkgKey)) { + setIconType(CACHED_ICONS.get(pkgKey) || ''); + return; + } + const svgIcons = (paramIcons || iconList)?.filter(iconDef => iconDef.type === 'image/svg+xml'); + const localIconSrc = Array.isArray(svgIcons) && svgIcons[0]?.src; + if (localIconSrc) { + CACHED_ICONS.set(pkgKey, toImage(localIconSrc)); + setIconType(CACHED_ICONS.get(pkgKey) || ''); + return; + } + + const euiLogoIcon = ICON_TYPES.find(key => key.toLowerCase() === `logo${packageName}`); + if (euiLogoIcon) { + CACHED_ICONS.set(pkgKey, euiLogoIcon); + setIconType(euiLogoIcon); + return; + } + + if (tryApi && !paramIcons && !iconList) { + sendGetPackageInfoByKey(pkgKey) + .catch(error => undefined) // Ignore API errors + .then(res => { + CACHED_ICONS.delete(pkgKey); + setIconList(res?.data?.response?.icons); + }); + } + + CACHED_ICONS.set(pkgKey, 'package'); + setIconType('package'); + }, [paramIcons, pkgKey, toImage, iconList, packageName, iconType, tryApi]); + + return iconType; +}; diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_datasource_page/step_select_package.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_datasource_page/step_select_package.tsx index 0b48020c3cac1a..cc7fc89ab8a80d 100644 --- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_datasource_page/step_select_package.tsx +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_datasource_page/step_select_package.tsx @@ -130,7 +130,15 @@ export const StepSelectPackage: React.FunctionComponent<{ return { label: title || name, key: pkgkey, - prepend: , + prepend: ( + + ), checked: selectedPkgKey === pkgkey ? 'on' : undefined, }; })} diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/details_page/components/datasources/datasources_table.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/details_page/components/datasources/datasources_table.tsx index 49285707457e13..87155afdc21be9 100644 --- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/details_page/components/datasources/datasources_table.tsx +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/details_page/components/datasources/datasources_table.tsx @@ -150,6 +150,7 @@ export const DatasourcesTable: React.FunctionComponent = ({ packageName={datasource.package.name} version={datasource.package.version} size="m" + tryApi={true} /> )} diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/components/icon_panel.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/components/icon_panel.tsx index 7ce386ed56f5f2..684b158b5da86e 100644 --- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/components/icon_panel.tsx +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/components/icon_panel.tsx @@ -16,7 +16,8 @@ export function IconPanel({ iconType }: { iconType: IconType }) { text-align: center; vertical-align: middle; padding: ${props => props.theme.eui.spacerSizes.xl}; - svg { + svg, + img { height: ${props => props.theme.eui.euiKeyPadMenuSize}; width: ${props => props.theme.eui.euiKeyPadMenuSize}; } diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/screens/detail/index.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/screens/detail/index.tsx index 4bc90c6a0f8fd8..3239d7b90e3c3c 100644 --- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/screens/detail/index.tsx +++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/epm/screens/detail/index.tsx @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { EuiPage, EuiPageBody, EuiPageProps, ICON_TYPES } from '@elastic/eui'; +import { EuiPage, EuiPageBody, EuiPageProps } from '@elastic/eui'; import React, { Fragment, useEffect, useState } from 'react'; import { useParams } from 'react-router-dom'; import styled from 'styled-components'; @@ -12,7 +12,7 @@ import { PackageInfo } from '../../../../types'; import { useSetPackageInstallStatus } from '../../hooks'; import { Content } from './content'; import { Header } from './header'; -import { sendGetPackageInfoByKey } from '../../../../hooks'; +import { sendGetPackageInfoByKey, usePackageIconType } from '../../../../hooks'; export const DEFAULT_PANEL: DetailViewPanelName = 'overview'; @@ -62,8 +62,8 @@ const FullWidthContent = styled(EuiPage)` type LayoutProps = PackageInfo & Pick & Pick; export function DetailLayout(props: LayoutProps) { - const { name, restrictWidth } = props; - const iconType = ICON_TYPES.find(key => key.toLowerCase() === `logo${name}`); + const { name: packageName, version, icons, restrictWidth } = props; + const iconType = usePackageIconType({ packageName, version, icons }); return ( diff --git a/x-pack/plugins/ingest_manager/server/routes/agent/acks_handlers.ts b/x-pack/plugins/ingest_manager/server/routes/agent/acks_handlers.ts index 53b677bb1389ea..13dcea75f31d0e 100644 --- a/x-pack/plugins/ingest_manager/server/routes/agent/acks_handlers.ts +++ b/x-pack/plugins/ingest_manager/server/routes/agent/acks_handlers.ts @@ -24,7 +24,7 @@ export const postAgentAcksHandlerBuilder = function( return async (context, request, response) => { try { const soClient = ackService.getSavedObjectsClientContract(request); - const res = APIKeyService.parseApiKey(request.headers); + const res = APIKeyService.parseApiKeyFromHeaders(request.headers); const agent = await ackService.getAgentByAccessAPIKeyId(soClient, res.apiKeyId as string); const agentEvents = request.body.events as AgentEvent[]; diff --git a/x-pack/plugins/ingest_manager/server/routes/agent/handlers.ts b/x-pack/plugins/ingest_manager/server/routes/agent/handlers.ts index 7d991f5ad2cc25..adff1fda11200d 100644 --- a/x-pack/plugins/ingest_manager/server/routes/agent/handlers.ts +++ b/x-pack/plugins/ingest_manager/server/routes/agent/handlers.ts @@ -175,7 +175,7 @@ export const postAgentCheckinHandler: RequestHandler< > = async (context, request, response) => { try { const soClient = getInternalUserSOClient(request); - const res = APIKeyService.parseApiKey(request.headers); + const res = APIKeyService.parseApiKeyFromHeaders(request.headers); const agent = await AgentService.getAgentByAccessAPIKeyId(soClient, res.apiKeyId); const { actions } = await AgentService.agentCheckin( soClient, @@ -216,7 +216,7 @@ export const postAgentEnrollHandler: RequestHandler< > = async (context, request, response) => { try { const soClient = getInternalUserSOClient(request); - const { apiKeyId } = APIKeyService.parseApiKey(request.headers); + const { apiKeyId } = APIKeyService.parseApiKeyFromHeaders(request.headers); const enrollmentAPIKey = await APIKeyService.getEnrollmentAPIKeyById(soClient, apiKeyId); if (!enrollmentAPIKey || !enrollmentAPIKey.active) { diff --git a/x-pack/plugins/ingest_manager/server/services/agents/crud.ts b/x-pack/plugins/ingest_manager/server/services/agents/crud.ts index 41bd2476c99a12..ec270884e62b4d 100644 --- a/x-pack/plugins/ingest_manager/server/services/agents/crud.ts +++ b/x-pack/plugins/ingest_manager/server/services/agents/crud.ts @@ -14,6 +14,7 @@ import { } from '../../constants'; import { AgentSOAttributes, Agent, AgentEventSOAttributes } from '../../types'; import { savedObjectToAgent } from './saved_objects'; +import { escapeSearchQueryPhrase } from '../saved_object'; export async function listAgents( soClient: SavedObjectsClientContract, @@ -72,14 +73,16 @@ export async function getAgentByAccessAPIKeyId( const response = await soClient.find({ type: AGENT_SAVED_OBJECT_TYPE, searchFields: ['access_api_key_id'], - search: accessAPIKeyId, + search: escapeSearchQueryPhrase(accessAPIKeyId), }); - const [agent] = response.saved_objects.map(savedObjectToAgent); if (!agent) { throw Boom.notFound('Agent not found'); } + if (agent.access_api_key_id !== accessAPIKeyId) { + throw new Error('Agent api key id is not matching'); + } if (!agent.active) { throw Boom.forbidden('Agent inactive'); } diff --git a/x-pack/plugins/ingest_manager/server/services/agents/unenroll.ts b/x-pack/plugins/ingest_manager/server/services/agents/unenroll.ts index bf6f6526be0696..18af9fd4de73f2 100644 --- a/x-pack/plugins/ingest_manager/server/services/agents/unenroll.ts +++ b/x-pack/plugins/ingest_manager/server/services/agents/unenroll.ts @@ -7,6 +7,8 @@ import { SavedObjectsClientContract } from 'src/core/server'; import { AgentSOAttributes } from '../../types'; import { AGENT_SAVED_OBJECT_TYPE } from '../../constants'; +import { getAgent } from './crud'; +import * as APIKeyService from '../api_keys'; export async function unenrollAgents( soClient: SavedObjectsClientContract, @@ -15,9 +17,7 @@ export async function unenrollAgents( const response = []; for (const id of toUnenrollIds) { try { - await soClient.update(AGENT_SAVED_OBJECT_TYPE, id, { - active: false, - }); + await unenrollAgent(soClient, id); response.push({ id, success: true, @@ -33,3 +33,22 @@ export async function unenrollAgents( return response; } + +async function unenrollAgent(soClient: SavedObjectsClientContract, agentId: string) { + const agent = await getAgent(soClient, agentId); + + await Promise.all([ + agent.access_api_key_id + ? APIKeyService.invalidateAPIKey(soClient, agent.access_api_key_id) + : undefined, + agent.default_api_key + ? APIKeyService.invalidateAPIKey( + soClient, + APIKeyService.parseApiKey(agent.default_api_key).apiKeyId + ) + : undefined, + ]); + await soClient.update(AGENT_SAVED_OBJECT_TYPE, agentId, { + active: false, + }); +} diff --git a/x-pack/plugins/ingest_manager/server/services/api_keys/index.ts b/x-pack/plugins/ingest_manager/server/services/api_keys/index.ts index 7f3f9f5281f0cb..57362e6b4b0deb 100644 --- a/x-pack/plugins/ingest_manager/server/services/api_keys/index.ts +++ b/x-pack/plugins/ingest_manager/server/services/api_keys/index.ts @@ -8,7 +8,9 @@ import { SavedObjectsClientContract, SavedObject, KibanaRequest } from 'src/core import { ENROLLMENT_API_KEYS_SAVED_OBJECT_TYPE } from '../../constants'; import { EnrollmentAPIKeySOAttributes, EnrollmentAPIKey } from '../../types'; import { createAPIKey } from './security'; +import { escapeSearchQueryPhrase } from '../saved_object'; +export { invalidateAPIKey } from './security'; export * from './enrollment_api_key'; export async function generateOutputApiKey( @@ -70,14 +72,18 @@ export async function getEnrollmentAPIKeyById( await soClient.find({ type: ENROLLMENT_API_KEYS_SAVED_OBJECT_TYPE, searchFields: ['api_key_id'], - search: apiKeyId, + search: escapeSearchQueryPhrase(apiKeyId), }) ).saved_objects.map(_savedObjectToEnrollmentApiKey); + if (enrollmentAPIKey?.api_key_id !== apiKeyId) { + throw new Error('find enrollmentKeyById returned an incorrect key'); + } + return enrollmentAPIKey; } -export function parseApiKey(headers: KibanaRequest['headers']) { +export function parseApiKeyFromHeaders(headers: KibanaRequest['headers']) { const authorizationHeader = headers.authorization; if (!authorizationHeader) { @@ -93,9 +99,11 @@ export function parseApiKey(headers: KibanaRequest['headers']) { } const apiKey = authorizationHeader.split(' ')[1]; - if (!apiKey) { - throw new Error('Authorization header is malformed'); - } + + return parseApiKey(apiKey); +} + +export function parseApiKey(apiKey: string) { const apiKeyId = Buffer.from(apiKey, 'base64') .toString('utf8') .split(':')[0]; diff --git a/x-pack/plugins/ingest_manager/server/services/saved_object.test.ts b/x-pack/plugins/ingest_manager/server/services/saved_object.test.ts new file mode 100644 index 00000000000000..9eb5dccb76ac5c --- /dev/null +++ b/x-pack/plugins/ingest_manager/server/services/saved_object.test.ts @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { escapeSearchQueryPhrase } from './saved_object'; + +describe('Saved object service', () => { + describe('escapeSearchQueryPhrase', () => { + it('should return value between quotes', () => { + const res = escapeSearchQueryPhrase('-test'); + + expect(res).toEqual('"-test"'); + }); + + it('should escape quotes', () => { + const res = escapeSearchQueryPhrase('test1"test2'); + + expect(res).toEqual(`"test1\"test2"`); + }); + }); +}); diff --git a/x-pack/plugins/ingest_manager/server/services/saved_object.ts b/x-pack/plugins/ingest_manager/server/services/saved_object.ts new file mode 100644 index 00000000000000..8fe7ffcdfc8968 --- /dev/null +++ b/x-pack/plugins/ingest_manager/server/services/saved_object.ts @@ -0,0 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Escape a value with double quote to use with saved object search + * Example: escapeSearchQueryPhrase('-test"toto') => '"-test\"toto""' + * @param val + */ +export function escapeSearchQueryPhrase(val: string): string { + return `"${val.replace(/["]/g, '"')}"`; +} diff --git a/x-pack/plugins/maps/common/constants.ts b/x-pack/plugins/maps/common/constants.ts index bd4406ef5ce634..f3997f741a1bfe 100644 --- a/x-pack/plugins/maps/common/constants.ts +++ b/x-pack/plugins/maps/common/constants.ts @@ -213,3 +213,5 @@ export enum SCALING_TYPES { CLUSTERS = 'CLUSTERS', TOP_HITS = 'TOP_HITS', } + +export const RGBA_0000 = 'rgba(0,0,0,0)'; diff --git a/x-pack/plugins/ml/common/util/es_utils.ts b/x-pack/plugins/ml/common/util/es_utils.ts index bed7ba8bc77367..ff632a60dd5161 100644 --- a/x-pack/plugins/ml/common/util/es_utils.ts +++ b/x-pack/plugins/ml/common/util/es_utils.ts @@ -26,6 +26,7 @@ function isValidIndexNameLength(indexName: string) { // https://github.com/elastic/elasticsearch/blob/master/docs/reference/indices/create-index.asciidoc export function isValidIndexName(indexName: string) { return ( + typeof indexName === 'string' && // Lowercase only indexName === indexName.toLowerCase() && // Cannot include \, /, *, ?, ", <, >, |, space character, comma, #, : diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/common/analytics.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/common/analytics.ts index d77f19c0df79d2..511ebb7e1647a5 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/common/analytics.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/common/analytics.ts @@ -33,6 +33,7 @@ interface OutlierAnalysis { interface Regression { dependent_variable: string; training_percent?: number; + num_top_feature_importance_values?: number; prediction_field_name?: string; } export interface RegressionAnalysis { @@ -44,6 +45,7 @@ interface Classification { dependent_variable: string; training_percent?: number; num_top_classes?: string; + num_top_feature_importance_values?: number; prediction_field_name?: string; } export interface ClassificationAnalysis { @@ -65,6 +67,8 @@ export const SEARCH_SIZE = 1000; export const TRAINING_PERCENT_MIN = 1; export const TRAINING_PERCENT_MAX = 100; +export const NUM_TOP_FEATURE_IMPORTANCE_VALUES_MIN = 0; + export const defaultSearchQuery = { match_all: {}, }; @@ -152,7 +156,7 @@ type AnalysisConfig = | ClassificationAnalysis | GenericAnalysis; -export const getAnalysisType = (analysis: AnalysisConfig) => { +export const getAnalysisType = (analysis: AnalysisConfig): string => { const keys = Object.keys(analysis); if (keys.length === 1) { @@ -162,7 +166,11 @@ export const getAnalysisType = (analysis: AnalysisConfig) => { return 'unknown'; }; -export const getDependentVar = (analysis: AnalysisConfig) => { +export const getDependentVar = ( + analysis: AnalysisConfig +): + | RegressionAnalysis['regression']['dependent_variable'] + | ClassificationAnalysis['classification']['dependent_variable'] => { let depVar = ''; if (isRegressionAnalysis(analysis)) { @@ -175,7 +183,11 @@ export const getDependentVar = (analysis: AnalysisConfig) => { return depVar; }; -export const getTrainingPercent = (analysis: AnalysisConfig) => { +export const getTrainingPercent = ( + analysis: AnalysisConfig +): + | RegressionAnalysis['regression']['training_percent'] + | ClassificationAnalysis['classification']['training_percent'] => { let trainingPercent; if (isRegressionAnalysis(analysis)) { @@ -188,7 +200,11 @@ export const getTrainingPercent = (analysis: AnalysisConfig) => { return trainingPercent; }; -export const getPredictionFieldName = (analysis: AnalysisConfig) => { +export const getPredictionFieldName = ( + analysis: AnalysisConfig +): + | RegressionAnalysis['regression']['prediction_field_name'] + | ClassificationAnalysis['classification']['prediction_field_name'] => { // If undefined will be defaulted to dependent_variable when config is created let predictionFieldName; if (isRegressionAnalysis(analysis) && analysis.regression.prediction_field_name !== undefined) { @@ -202,6 +218,26 @@ export const getPredictionFieldName = (analysis: AnalysisConfig) => { return predictionFieldName; }; +export const getNumTopFeatureImportanceValues = ( + analysis: AnalysisConfig +): + | RegressionAnalysis['regression']['num_top_feature_importance_values'] + | ClassificationAnalysis['classification']['num_top_feature_importance_values'] => { + let numTopFeatureImportanceValues; + if ( + isRegressionAnalysis(analysis) && + analysis.regression.num_top_feature_importance_values !== undefined + ) { + numTopFeatureImportanceValues = analysis.regression.num_top_feature_importance_values; + } else if ( + isClassificationAnalysis(analysis) && + analysis.classification.num_top_feature_importance_values !== undefined + ) { + numTopFeatureImportanceValues = analysis.classification.num_top_feature_importance_values; + } + return numTopFeatureImportanceValues; +}; + export const getPredictedFieldName = ( resultsField: string, analysis: AnalysisConfig, diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/common/fields.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/common/fields.ts index 59b42935a141d7..92d8731959895c 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/common/fields.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/common/fields.ts @@ -7,12 +7,13 @@ import { getNestedProperty } from '../../util/object_utils'; import { DataFrameAnalyticsConfig, + getNumTopFeatureImportanceValues, getPredictedFieldName, getDependentVar, getPredictionFieldName, } from './analytics'; import { Field } from '../../../../common/types/fields'; -import { ES_FIELD_TYPES } from '../../../../../../../src/plugins/data/public'; +import { ES_FIELD_TYPES, KBN_FIELD_TYPES } from '../../../../../../../src/plugins/data/public'; import { newJobCapsService } from '../../services/new_job_capabilities_service'; export type EsId = string; @@ -254,6 +255,7 @@ export const getDefaultFieldsFromJobCaps = ( const dependentVariable = getDependentVar(jobConfig.analysis); const type = newJobCapsService.getFieldById(dependentVariable)?.type; const predictionFieldName = getPredictionFieldName(jobConfig.analysis); + const numTopFeatureImportanceValues = getNumTopFeatureImportanceValues(jobConfig.analysis); // default is 'ml' const resultsField = jobConfig.dest.results_field; @@ -261,7 +263,20 @@ export const getDefaultFieldsFromJobCaps = ( const predictedField = `${resultsField}.${ predictionFieldName ? predictionFieldName : defaultPredictionField }`; - // Only need to add these first two fields if we didn't use dest index pattern to get the fields + + const featureImportanceFields = []; + + if ((numTopFeatureImportanceValues ?? 0) > 0) { + featureImportanceFields.push( + ...fields.map(d => ({ + id: `${resultsField}.feature_importance.${d.id}`, + name: `${resultsField}.feature_importance.${d.name}`, + type: KBN_FIELD_TYPES.NUMBER, + })) + ); + } + + // Only need to add these fields if we didn't use dest index pattern to get the fields const allFields: any = needsDestIndexFields === true ? [ @@ -271,16 +286,20 @@ export const getDefaultFieldsFromJobCaps = ( type: ES_FIELD_TYPES.BOOLEAN, }, { id: predictedField, name: predictedField, type }, + ...featureImportanceFields, ] : []; allFields.push(...fields); - // @ts-ignore - allFields.sort(({ name: a }, { name: b }) => sortRegressionResultsFields(a, b, jobConfig)); - - let selectedFields = allFields - .slice(0, DEFAULT_REGRESSION_COLUMNS * 2) - .filter((field: any) => field.name === predictedField || !field.name.includes('.keyword')); + allFields.sort(({ name: a }: { name: string }, { name: b }: { name: string }) => + sortRegressionResultsFields(a, b, jobConfig) + ); + + let selectedFields = allFields.filter( + (field: any) => + field.name === predictedField || + (!field.name.includes('.keyword') && !field.name.includes('.feature_importance.')) + ); if (selectedFields.length > DEFAULT_REGRESSION_COLUMNS) { selectedFields = selectedFields.slice(0, DEFAULT_REGRESSION_COLUMNS); diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.test.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.test.ts index 6225bca592be39..2463da054d1406 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.test.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.test.ts @@ -25,6 +25,7 @@ describe('Analytics job clone action', () => { classification: { dependent_variable: 'y', num_top_classes: 2, + num_top_feature_importance_values: 4, prediction_field_name: 'y_prediction', training_percent: 2, randomize_seed: 6233212276062807000, @@ -90,6 +91,7 @@ describe('Analytics job clone action', () => { prediction_field_name: 'stab_prediction', training_percent: 20, randomize_seed: -2228827740028660200, + num_top_feature_importance_values: 4, }, }, analyzed_fields: { @@ -120,6 +122,7 @@ describe('Analytics job clone action', () => { classification: { dependent_variable: 'y', num_top_classes: 2, + num_top_feature_importance_values: 4, prediction_field_name: 'y_prediction', training_percent: 2, randomize_seed: 6233212276062807000, @@ -188,6 +191,7 @@ describe('Analytics job clone action', () => { prediction_field_name: 'stab_prediction', training_percent: 20, randomize_seed: -2228827740028660200, + num_top_feature_importance_values: 4, }, }, analyzed_fields: { @@ -218,6 +222,7 @@ describe('Analytics job clone action', () => { dependent_variable: 'y', training_percent: 71, max_trees: 1500, + num_top_feature_importance_values: 4, }, }, model_memory_limit: '400mb', @@ -243,6 +248,7 @@ describe('Analytics job clone action', () => { dependent_variable: 'y', training_percent: 71, maximum_number_trees: 1500, + num_top_feature_importance_values: 4, }, }, model_memory_limit: '400mb', diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.tsx b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.tsx index 3a0f98fc5acaac..eb1871c98764b3 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.tsx +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/analytics_list/action_clone.tsx @@ -11,7 +11,10 @@ import { i18n } from '@kbn/i18n'; import { DeepReadonly } from '../../../../../../../common/types/common'; import { DataFrameAnalyticsConfig, isOutlierAnalysis } from '../../../../common'; import { isClassificationAnalysis, isRegressionAnalysis } from '../../../../common/analytics'; -import { CreateAnalyticsFormProps } from '../../hooks/use_create_analytics_form'; +import { + CreateAnalyticsFormProps, + DEFAULT_NUM_TOP_FEATURE_IMPORTANCE_VALUES, +} from '../../hooks/use_create_analytics_form'; import { State } from '../../hooks/use_create_analytics_form/state'; import { DataFrameAnalyticsListRow } from './common'; @@ -97,6 +100,8 @@ const getAnalyticsJobMeta = (config: CloneDataFrameAnalyticsConfig): AnalyticsJo }, num_top_feature_importance_values: { optional: true, + defaultValue: DEFAULT_NUM_TOP_FEATURE_IMPORTANCE_VALUES, + formKey: 'numTopFeatureImportanceValues', }, class_assignment_objective: { optional: true, @@ -164,6 +169,8 @@ const getAnalyticsJobMeta = (config: CloneDataFrameAnalyticsConfig): AnalyticsJo }, num_top_feature_importance_values: { optional: true, + defaultValue: DEFAULT_NUM_TOP_FEATURE_IMPORTANCE_VALUES, + formKey: 'numTopFeatureImportanceValues', }, randomize_seed: { optional: true, diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/create_analytics_form/create_analytics_form.tsx b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/create_analytics_form/create_analytics_form.tsx index 044bb9f5170010..e5f30a50ed8f0d 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/create_analytics_form/create_analytics_form.tsx +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/components/create_analytics_form/create_analytics_form.tsx @@ -10,6 +10,7 @@ import { EuiComboBox, EuiComboBoxOptionOption, EuiForm, + EuiFieldNumber, EuiFieldText, EuiFormRow, EuiLink, @@ -41,6 +42,7 @@ import { ANALYSIS_CONFIG_TYPE, DfAnalyticsExplainResponse, FieldSelectionItem, + NUM_TOP_FEATURE_IMPORTANCE_VALUES_MIN, TRAINING_PERCENT_MIN, TRAINING_PERCENT_MAX, } from '../../../../common/analytics'; @@ -83,6 +85,8 @@ export const CreateAnalyticsForm: FC = ({ actions, sta maxDistinctValuesError, modelMemoryLimit, modelMemoryLimitValidationResult, + numTopFeatureImportanceValues, + numTopFeatureImportanceValuesValid, previousJobType, previousSourceIndex, sourceIndex, @@ -645,6 +649,54 @@ export const CreateAnalyticsForm: FC = ({ actions, sta data-test-subj="mlAnalyticsCreateJobFlyoutTrainingPercentSlider" /> + {/* num_top_feature_importance_values */} + + {i18n.translate( + 'xpack.ml.dataframe.analytics.create.numTopFeatureImportanceValuesErrorText', + { + defaultMessage: + 'Invalid maximum number of feature importance values.', + } + )} + , + ] + : []), + ]} + > + setFormState({ numTopFeatureImportanceValues: +e.target.value })} + step={1} + value={numTopFeatureImportanceValues} + /> + )} merge(getInitialState(), { form: { @@ -34,7 +41,11 @@ const getMockState = ({ source: { index }, dest: { index: 'the-destination-index' }, analysis: { - classification: { dependent_variable: 'the-variable', training_percent: trainingPercent }, + classification: { + dependent_variable: 'the-variable', + num_top_feature_importance_values: numTopFeatureImportanceValues, + training_percent: trainingPercent, + }, }, model_memory_limit: modelMemoryLimit, }, @@ -173,6 +184,27 @@ describe('useCreateAnalyticsForm', () => { .isValid ).toBe(false); }); + + test('validateAdvancedEditor(): check num_top_feature_importance_values validation', () => { + // valid num_top_feature_importance_values value + expect( + validateAdvancedEditor( + getMockState({ index: 'the-source-index', numTopFeatureImportanceValues: 1 }) + ).isValid + ).toBe(true); + // invalid num_top_feature_importance_values numeric value + expect( + validateAdvancedEditor( + getMockState({ index: 'the-source-index', numTopFeatureImportanceValues: -1 }) + ).isValid + ).toBe(false); + // invalid training_percent numeric value if not an integer + expect( + validateAdvancedEditor( + getMockState({ index: 'the-source-index', numTopFeatureImportanceValues: 1.1 }) + ).isValid + ).toBe(false); + }); }); describe('validateMinMML', () => { @@ -194,3 +226,24 @@ describe('validateMinMML', () => { expect(validateMinMML((undefined as unknown) as string)('')).toEqual(null); }); }); + +describe('validateNumTopFeatureImportanceValues()', () => { + test('should not allow below 0', () => { + expect(validateNumTopFeatureImportanceValues(-1)).toBe(false); + }); + + test('should not allow strings', () => { + expect(validateNumTopFeatureImportanceValues('1')).toBe(false); + }); + + test('should not allow floats', () => { + expect(validateNumTopFeatureImportanceValues(0.1)).toBe(false); + expect(validateNumTopFeatureImportanceValues(1.1)).toBe(false); + expect(validateNumTopFeatureImportanceValues(-1.1)).toBe(false); + }); + + test('should allow 0 and higher', () => { + expect(validateNumTopFeatureImportanceValues(0)).toBe(true); + expect(validateNumTopFeatureImportanceValues(1)).toBe(true); + }); +}); diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.ts index 28d8afbcd88cc3..ded6e509470350 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.ts @@ -31,10 +31,12 @@ import { } from '../../../../../../../common/constants/validation'; import { getDependentVar, + getNumTopFeatureImportanceValues, getTrainingPercent, isRegressionAnalysis, isClassificationAnalysis, ANALYSIS_CONFIG_TYPE, + NUM_TOP_FEATURE_IMPORTANCE_VALUES_MIN, TRAINING_PERCENT_MIN, TRAINING_PERCENT_MAX, } from '../../../../common/analytics'; @@ -100,6 +102,19 @@ const getSourceIndexString = (state: State) => { return ''; }; +/** + * Validates num_top_feature_importance_values. Must be an integer >= 0. + */ +export const validateNumTopFeatureImportanceValues = ( + numTopFeatureImportanceValues: any +): boolean => { + return ( + typeof numTopFeatureImportanceValues === 'number' && + numTopFeatureImportanceValues >= NUM_TOP_FEATURE_IMPORTANCE_VALUES_MIN && + Number.isInteger(numTopFeatureImportanceValues) + ); +}; + export const validateAdvancedEditor = (state: State): State => { const { jobIdEmpty, @@ -147,6 +162,7 @@ export const validateAdvancedEditor = (state: State): State => { let dependentVariableEmpty = false; let excludesValid = true; let trainingPercentValid = true; + let numTopFeatureImportanceValuesValid = true; if ( jobConfig.analysis === undefined && @@ -180,6 +196,7 @@ export const validateAdvancedEditor = (state: State): State => { if ( trainingPercent !== undefined && (isNaN(trainingPercent) || + typeof trainingPercent !== 'number' || trainingPercent < TRAINING_PERCENT_MIN || trainingPercent > TRAINING_PERCENT_MAX) ) { @@ -189,7 +206,7 @@ export const validateAdvancedEditor = (state: State): State => { error: i18n.translate( 'xpack.ml.dataframe.analytics.create.advancedEditorMessage.trainingPercentInvalid', { - defaultMessage: 'The training percent must be a value between {min} and {max}.', + defaultMessage: 'The training percent must be a number between {min} and {max}.', values: { min: TRAINING_PERCENT_MIN, max: TRAINING_PERCENT_MAX, @@ -199,6 +216,28 @@ export const validateAdvancedEditor = (state: State): State => { message: '', }); } + + const numTopFeatureImportanceValues = getNumTopFeatureImportanceValues(jobConfig.analysis); + if (numTopFeatureImportanceValues !== undefined) { + numTopFeatureImportanceValuesValid = validateNumTopFeatureImportanceValues( + numTopFeatureImportanceValues + ); + if (numTopFeatureImportanceValuesValid === false) { + state.advancedEditorMessages.push({ + error: i18n.translate( + 'xpack.ml.dataframe.analytics.create.advancedEditorMessage.numTopFeatureImportanceValuesInvalid', + { + defaultMessage: + 'The value for num_top_feature_importance_values must be an integer of {min} or higher.', + values: { + min: 0, + }, + } + ), + message: '', + }); + } + } } if (sourceIndexNameEmpty) { @@ -233,6 +272,17 @@ export const validateAdvancedEditor = (state: State): State => { ), message: '', }); + } else if (destinationIndexPatternTitleExists && !createIndexPattern) { + state.advancedEditorMessages.push({ + error: i18n.translate( + 'xpack.ml.dataframe.analytics.create.advancedEditorMessage.destinationIndexNameExistsWarn', + { + defaultMessage: + 'An index with this destination index name already exists. Be aware that running this analytics job will modify this destination index.', + } + ), + message: '', + }); } else if (!destinationIndexNameValid) { state.advancedEditorMessages.push({ error: i18n.translate( @@ -276,6 +326,8 @@ export const validateAdvancedEditor = (state: State): State => { }); } + state.form.destinationIndexPatternTitleExists = destinationIndexPatternTitleExists; + state.isValid = maxDistinctValuesError === undefined && excludesValid && @@ -290,6 +342,7 @@ export const validateAdvancedEditor = (state: State): State => { destinationIndexNameValid && !dependentVariableEmpty && !modelMemoryLimitEmpty && + numTopFeatureImportanceValuesValid && (!destinationIndexPatternTitleExists || !createIndexPattern); return state; @@ -343,6 +396,7 @@ const validateForm = (state: State): State => { dependentVariable, maxDistinctValuesError, modelMemoryLimit, + numTopFeatureImportanceValuesValid, } = state.form; const { estimatedModelMemoryLimit } = state; @@ -368,6 +422,7 @@ const validateForm = (state: State): State => { !destinationIndexNameEmpty && destinationIndexNameValid && !dependentVariableEmpty && + numTopFeatureImportanceValuesValid && (!destinationIndexPatternTitleExists || !createIndexPattern); return state; @@ -443,6 +498,12 @@ export function reducer(state: State, action: Action): State { newFormState.sourceIndexNameValid = Object.keys(validationMessages).length === 0; } + if (action.payload.numTopFeatureImportanceValues !== undefined) { + newFormState.numTopFeatureImportanceValuesValid = validateNumTopFeatureImportanceValues( + newFormState?.numTopFeatureImportanceValues + ); + } + return state.isAdvancedEditorEnabled ? validateAdvancedEditor({ ...state, form: newFormState }) : validateForm({ ...state, form: newFormState }); diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/state.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/state.ts index fe741fe9a92d46..01a39d2ef9f3b6 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/state.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/state.ts @@ -25,6 +25,8 @@ export enum DEFAULT_MODEL_MEMORY_LIMIT { classification = '100mb', } +export const DEFAULT_NUM_TOP_FEATURE_IMPORTANCE_VALUES = 2; + export type EsIndexName = string; export type DependentVariable = string; export type IndexPatternTitle = string; @@ -69,6 +71,8 @@ export interface State { modelMemoryLimit: string | undefined; modelMemoryLimitUnitValid: boolean; modelMemoryLimitValidationResult: any; + numTopFeatureImportanceValues: number | undefined; + numTopFeatureImportanceValuesValid: boolean; previousJobType: null | AnalyticsJobType; previousSourceIndex: EsIndexName | undefined; sourceIndex: EsIndexName; @@ -124,6 +128,8 @@ export const getInitialState = (): State => ({ modelMemoryLimit: undefined, modelMemoryLimitUnitValid: true, modelMemoryLimitValidationResult: null, + numTopFeatureImportanceValues: DEFAULT_NUM_TOP_FEATURE_IMPORTANCE_VALUES, + numTopFeatureImportanceValuesValid: true, previousJobType: null, previousSourceIndex: undefined, sourceIndex: '', @@ -184,6 +190,7 @@ export const getJobConfigFromFormState = ( jobConfig.analysis = { [formState.jobType]: { dependent_variable: formState.dependentVariable, + num_top_feature_importance_values: formState.numTopFeatureImportanceValues, training_percent: formState.trainingPercent, }, }; @@ -218,6 +225,7 @@ export function getCloneFormStateFromJobConfig( const analysisConfig = analyticsJobConfig.analysis[jobType]; resultState.dependentVariable = analysisConfig.dependent_variable; + resultState.numTopFeatureImportanceValues = analysisConfig.num_top_feature_importance_values; resultState.trainingPercent = analysisConfig.training_percent; } diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/use_create_analytics_form.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/use_create_analytics_form.ts index 44bfc0c5a472ce..2478dbf7cf63de 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/use_create_analytics_form.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/use_create_analytics_form.ts @@ -47,7 +47,8 @@ export const useCreateAnalyticsForm = (): CreateAnalyticsFormProps => { const { refresh } = useRefreshAnalyticsList(); const { form, jobConfig, isAdvancedEditorEnabled } = state; - const { createIndexPattern, destinationIndex, jobId } = form; + const { createIndexPattern, jobId } = form; + let { destinationIndex } = form; const addRequestMessage = (requestMessage: FormMessage) => dispatch({ type: ACTION.ADD_REQUEST_MESSAGE, requestMessage }); @@ -90,9 +91,13 @@ export const useCreateAnalyticsForm = (): CreateAnalyticsFormProps => { resetRequestMessages(); setIsModalButtonDisabled(true); - const analyticsJobConfig = isAdvancedEditorEnabled + const analyticsJobConfig = (isAdvancedEditorEnabled ? jobConfig - : getJobConfigFromFormState(form); + : getJobConfigFromFormState(form)) as DataFrameAnalyticsConfig; + + if (isAdvancedEditorEnabled) { + destinationIndex = analyticsJobConfig.dest.index; + } try { await ml.dataFrameAnalytics.createDataFrameAnalytics(jobId, analyticsJobConfig); diff --git a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/advanced_job_creator.ts b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/advanced_job_creator.ts index e170b08949f40f..9fa0eb901c61fb 100644 --- a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/advanced_job_creator.ts +++ b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/advanced_job_creator.ts @@ -45,6 +45,8 @@ export class AdvancedJobCreator extends JobCreator { super(indexPattern, savedSearch, query); this._queryString = JSON.stringify(this._datafeed_config.query); + + this._wizardInitialized$.next(true); } public addDetector( diff --git a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/categorization_job_creator.ts b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/categorization_job_creator.ts index 95fd9df892cab9..852810275139b2 100644 --- a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/categorization_job_creator.ts +++ b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/categorization_job_creator.ts @@ -118,6 +118,9 @@ export class CategorizationJobCreator extends JobCreator { this._categoryFieldExamples = examples; this._validationChecks = validationChecks; this._overallValidStatus = overallValidStatus; + + this._wizardInitialized$.next(true); + return { examples, sampleSize, overallValidStatus, validationChecks }; } diff --git a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/job_creator.ts b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/job_creator.ts index 0b45209ca4f37d..ca982304bd4f30 100644 --- a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/job_creator.ts +++ b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/job_creator.ts @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ +import { BehaviorSubject } from 'rxjs'; import { SavedSearchSavedObject } from '../../../../../../common/types/kibana'; import { UrlConfig } from '../../../../../../common/types/custom_urls'; import { IndexPatternTitle } from '../../../../../../common/types/kibana'; @@ -57,6 +58,9 @@ export class JobCreator { stop: boolean; } = { stop: false }; + protected _wizardInitialized$ = new BehaviorSubject(false); + public wizardInitialized$ = this._wizardInitialized$.asObservable(); + constructor( indexPattern: IndexPattern, savedSearch: SavedSearchSavedObject | null, diff --git a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/multi_metric_job_creator.ts b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/multi_metric_job_creator.ts index 035af2d81adbcc..6c2030daec39d5 100644 --- a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/multi_metric_job_creator.ts +++ b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/multi_metric_job_creator.ts @@ -32,6 +32,7 @@ export class MultiMetricJobCreator extends JobCreator { ) { super(indexPattern, savedSearch, query); this.createdBy = CREATED_BY_LABEL.MULTI_METRIC; + this._wizardInitialized$.next(true); } // set the split field, applying it to each detector diff --git a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/population_job_creator.ts b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/population_job_creator.ts index 319e66912ce64d..276f16c9e76b74 100644 --- a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/population_job_creator.ts +++ b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/population_job_creator.ts @@ -32,6 +32,7 @@ export class PopulationJobCreator extends JobCreator { ) { super(indexPattern, savedSearch, query); this.createdBy = CREATED_BY_LABEL.POPULATION; + this._wizardInitialized$.next(true); } // add a by field to a specific detector diff --git a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/single_metric_job_creator.ts b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/single_metric_job_creator.ts index ad3aa7eae7291c..febfc5ca3eb9e5 100644 --- a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/single_metric_job_creator.ts +++ b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/single_metric_job_creator.ts @@ -33,6 +33,7 @@ export class SingleMetricJobCreator extends JobCreator { ) { super(indexPattern, savedSearch, query); this.createdBy = CREATED_BY_LABEL.SINGLE_METRIC; + this._wizardInitialized$.next(true); } // only a single detector exists for this job type diff --git a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/util/model_memory_estimator.test.ts b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/util/model_memory_estimator.test.ts index f85223db653991..6ca14b544ecfa5 100644 --- a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/util/model_memory_estimator.test.ts +++ b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/util/model_memory_estimator.test.ts @@ -7,8 +7,9 @@ import { useFakeTimers, SinonFakeTimers } from 'sinon'; import { CalculatePayload, modelMemoryEstimatorProvider } from './model_memory_estimator'; import { JobValidator } from '../../job_validator'; -import { DEFAULT_MODEL_MEMORY_LIMIT } from '../../../../../../../common/constants/new_job'; import { ml } from '../../../../../services/ml_api_service'; +import { JobCreator } from '../job_creator'; +import { BehaviorSubject } from 'rxjs'; jest.mock('../../../../../services/ml_api_service', () => { return { @@ -25,6 +26,8 @@ jest.mock('../../../../../services/ml_api_service', () => { describe('delay', () => { let clock: SinonFakeTimers; let modelMemoryEstimator: ReturnType; + let mockJobCreator: JobCreator; + let wizardInitialized$: BehaviorSubject; let mockJobValidator: JobValidator; beforeEach(() => { @@ -32,60 +35,74 @@ describe('delay', () => { mockJobValidator = { isModelMemoryEstimationPayloadValid: true, } as JobValidator; - modelMemoryEstimator = modelMemoryEstimatorProvider(mockJobValidator); + wizardInitialized$ = new BehaviorSubject(false); + mockJobCreator = ({ + wizardInitialized$, + } as unknown) as JobCreator; + modelMemoryEstimator = modelMemoryEstimatorProvider(mockJobCreator, mockJobValidator); }); afterEach(() => { clock.restore(); jest.clearAllMocks(); }); - test('should emit a default value first', () => { + test('should not proceed further if the wizard has not been initialized yet', () => { const spy = jest.fn(); modelMemoryEstimator.updates$.subscribe(spy); - expect(spy).toHaveBeenCalledWith(DEFAULT_MODEL_MEMORY_LIMIT); + + modelMemoryEstimator.update({ analysisConfig: { detectors: [{}] } } as CalculatePayload); + clock.tick(601); + + expect(ml.calculateModelMemoryLimit$).not.toHaveBeenCalled(); + expect(spy).not.toHaveBeenCalled(); }); - test('should debounce it for 600 ms', () => { + test('should not emit any value on subscription initialization', () => { const spy = jest.fn(); - modelMemoryEstimator.updates$.subscribe(spy); + wizardInitialized$.next(true); + expect(spy).not.toHaveBeenCalled(); + }); + test('should debounce it for 600 ms', () => { + // arrange + const spy = jest.fn(); + modelMemoryEstimator.updates$.subscribe(spy); + // act modelMemoryEstimator.update({ analysisConfig: { detectors: [{}] } } as CalculatePayload); - + wizardInitialized$.next(true); clock.tick(601); + // assert expect(spy).toHaveBeenCalledWith('15MB'); }); test('should not proceed further if the payload has not been changed', () => { const spy = jest.fn(); - modelMemoryEstimator.updates$.subscribe(spy); - modelMemoryEstimator.update({ - analysisConfig: { detectors: [{ by_field_name: 'test' }] }, - } as CalculatePayload); - - clock.tick(601); + wizardInitialized$.next(true); + // first emitted modelMemoryEstimator.update({ analysisConfig: { detectors: [{ by_field_name: 'test' }] }, } as CalculatePayload); - clock.tick(601); + // second emitted with the same configuration modelMemoryEstimator.update({ analysisConfig: { detectors: [{ by_field_name: 'test' }] }, } as CalculatePayload); - clock.tick(601); expect(ml.calculateModelMemoryLimit$).toHaveBeenCalledTimes(1); - expect(spy).toHaveBeenCalledTimes(2); + expect(spy).toHaveBeenCalledTimes(1); }); - test('should call the endpoint only with a valid payload', () => { + test('should call the endpoint only with a valid configuration', () => { const spy = jest.fn(); + wizardInitialized$.next(true); + modelMemoryEstimator.updates$.subscribe(spy); modelMemoryEstimator.update(({ @@ -93,7 +110,6 @@ describe('delay', () => { } as unknown) as CalculatePayload); // @ts-ignore mockJobValidator.isModelMemoryEstimationPayloadValid = false; - clock.tick(601); expect(ml.calculateModelMemoryLimit$).not.toHaveBeenCalled(); diff --git a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/util/model_memory_estimator.ts b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/util/model_memory_estimator.ts index 501a63492da56c..eb563e8b361078 100644 --- a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/util/model_memory_estimator.ts +++ b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_creator/util/model_memory_estimator.ts @@ -5,7 +5,7 @@ */ import { i18n } from '@kbn/i18n'; -import { Observable, of, Subject, Subscription } from 'rxjs'; +import { combineLatest, Observable, of, Subject, Subscription } from 'rxjs'; import { isEqual, cloneDeep } from 'lodash'; import { catchError, @@ -16,8 +16,10 @@ import { switchMap, map, pairwise, + filter, + skipWhile, } from 'rxjs/operators'; -import { useEffect, useState } from 'react'; +import { useEffect, useMemo } from 'react'; import { DEFAULT_MODEL_MEMORY_LIMIT } from '../../../../../../../common/constants/new_job'; import { ml } from '../../../../../services/ml_api_service'; import { JobValidator, VALIDATION_DELAY_MS } from '../../job_validator/job_validator'; @@ -27,7 +29,12 @@ import { JobCreator } from '../job_creator'; export type CalculatePayload = Parameters[0]; -export const modelMemoryEstimatorProvider = (jobValidator: JobValidator) => { +type ModelMemoryEstimator = ReturnType; + +export const modelMemoryEstimatorProvider = ( + jobCreator: JobCreator, + jobValidator: JobValidator +) => { const modelMemoryCheck$ = new Subject(); const error$ = new Subject(); @@ -36,29 +43,33 @@ export const modelMemoryEstimatorProvider = (jobValidator: JobValidator) => { return error$.asObservable(); }, get updates$(): Observable { - return modelMemoryCheck$.pipe( + return combineLatest([ + jobCreator.wizardInitialized$.pipe( + skipWhile(wizardInitialized => wizardInitialized === false) + ), + modelMemoryCheck$, + ]).pipe( + map(([, payload]) => payload), // delay the request, making sure the validation is completed debounceTime(VALIDATION_DELAY_MS + 100), // clone the object to compare payloads and proceed further only // if the configuration has been changed map(cloneDeep), distinctUntilChanged(isEqual), + // don't call the endpoint with invalid payload + filter(() => jobValidator.isModelMemoryEstimationPayloadValid), switchMap(payload => { - const isPayloadValid = jobValidator.isModelMemoryEstimationPayloadValid; - - return isPayloadValid - ? ml.calculateModelMemoryLimit$(payload).pipe( - pluck('modelMemoryLimit'), - catchError(error => { - // eslint-disable-next-line no-console - console.error('Model memory limit could not be calculated', error.body); - error$.next(error.body); - return of(DEFAULT_MODEL_MEMORY_LIMIT); - }) - ) - : of(DEFAULT_MODEL_MEMORY_LIMIT); - }), - startWith(DEFAULT_MODEL_MEMORY_LIMIT) + return ml.calculateModelMemoryLimit$(payload).pipe( + pluck('modelMemoryLimit'), + catchError(error => { + // eslint-disable-next-line no-console + console.error('Model memory limit could not be calculated', error.body); + error$.next(error.body); + // fallback to the default in case estimation failed + return of(DEFAULT_MODEL_MEMORY_LIMIT); + }) + ); + }) ); }, update(payload: CalculatePayload) { @@ -78,7 +89,10 @@ export const useModelMemoryEstimator = ( } = useMlKibana(); // Initialize model memory estimator only once - const [modelMemoryEstimator] = useState(modelMemoryEstimatorProvider(jobValidator)); + const modelMemoryEstimator = useMemo( + () => modelMemoryEstimatorProvider(jobCreator, jobValidator), + [] + ); // Listen for estimation results and errors useEffect(() => { @@ -86,7 +100,7 @@ export const useModelMemoryEstimator = ( subscription.add( modelMemoryEstimator.updates$ - .pipe(pairwise()) + .pipe(startWith(jobCreator.modelMemoryLimit), pairwise()) .subscribe(([previousEstimation, currentEstimation]) => { // to make sure we don't overwrite a manual input if ( diff --git a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_validator/job_validator.ts b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_validator/job_validator.ts index 2650f89cf25ca3..a942603d7f9d4e 100644 --- a/x-pack/plugins/ml/public/application/jobs/new_job/common/job_validator/job_validator.ts +++ b/x-pack/plugins/ml/public/application/jobs/new_job/common/job_validator/job_validator.ts @@ -137,6 +137,7 @@ export class JobValidator { const formattedJobConfig = this._jobCreator.formattedJobJson; const formattedDatafeedConfig = this._jobCreator.formattedDatafeedJson; + this._runAdvancedValidation(); // only validate if the config has changed if ( forceValidate || @@ -151,7 +152,6 @@ export class JobValidator { this._lastDatafeedConfig = formattedDatafeedConfig; this._validateTimeout = setTimeout(() => { this._runBasicValidation(); - this._runAdvancedValidation(); this._jobCreatorSubject$.next(this._jobCreator); diff --git a/x-pack/plugins/ml/public/application/jobs/new_job/pages/new_job/wizard.tsx b/x-pack/plugins/ml/public/application/jobs/new_job/pages/new_job/wizard.tsx index 2ca0607f81a1e0..bfb34b977ec979 100644 --- a/x-pack/plugins/ml/public/application/jobs/new_job/pages/new_job/wizard.tsx +++ b/x-pack/plugins/ml/public/application/jobs/new_job/pages/new_job/wizard.tsx @@ -79,8 +79,6 @@ export const Wizard: FC = ({ stringifyConfigs(jobCreator.jobConfig, jobCreator.datafeedConfig) ); - useModelMemoryEstimator(jobCreator, jobValidator, jobCreatorUpdate, jobCreatorUpdated); - useEffect(() => { const subscription = jobValidator.validationResult$.subscribe(() => { setJobValidatorUpdate(jobValidatorUpdated); @@ -123,6 +121,8 @@ export const Wizard: FC = ({ } }, [currentStep]); + useModelMemoryEstimator(jobCreator, jobValidator, jobCreatorUpdate, jobCreatorUpdated); + return ( @@ -47,10 +44,9 @@ export function initManagementSection( defaultMessage: 'Jobs list', }), order: 10, - async mount({ element, setBreadcrumbs }) { - const [coreStart] = await core.getStartServices(); - setBreadcrumbs(getJobsListBreadcrumbs()); - return renderApp(element, coreStart); + async mount(params) { + const { mountApp } = await import('./jobs_list'); + return mountApp(core, params); }, }); } diff --git a/x-pack/plugins/ml/public/application/management/jobs_list/index.ts b/x-pack/plugins/ml/public/application/management/jobs_list/index.ts index 77fa4b9c35b46b..cfe37ce14bb788 100644 --- a/x-pack/plugins/ml/public/application/management/jobs_list/index.ts +++ b/x-pack/plugins/ml/public/application/management/jobs_list/index.ts @@ -6,13 +6,25 @@ import ReactDOM, { unmountComponentAtNode } from 'react-dom'; import React from 'react'; -import { CoreStart } from 'kibana/public'; +import { CoreSetup, CoreStart } from 'kibana/public'; +import { ManagementAppMountParams } from '../../../../../../../src/plugins/management/public/'; +import { MlStartDependencies } from '../../../plugin'; import { JobsListPage } from './components'; +import { getJobsListBreadcrumbs } from '../breadcrumbs'; -export const renderApp = (element: HTMLElement, coreStart: CoreStart) => { +const renderApp = (element: HTMLElement, coreStart: CoreStart) => { const I18nContext = coreStart.i18n.Context; ReactDOM.render(React.createElement(JobsListPage, { I18nContext }), element); return () => { unmountComponentAtNode(element); }; }; + +export async function mountApp( + core: CoreSetup, + params: ManagementAppMountParams +) { + const [coreStart] = await core.getStartServices(); + params.setBreadcrumbs(getJobsListBreadcrumbs()); + return renderApp(params.element, coreStart); +} diff --git a/x-pack/plugins/reporting/public/lib/reporting_api_client.ts b/x-pack/plugins/reporting/public/lib/reporting_api_client.ts index cddfcd3ec855a6..b6c33860752d61 100644 --- a/x-pack/plugins/reporting/public/lib/reporting_api_client.ts +++ b/x-pack/plugins/reporting/public/lib/reporting_api_client.ts @@ -9,12 +9,7 @@ import rison from 'rison-node'; import { HttpSetup } from 'src/core/public'; import { add } from './job_completion_notifications'; -import { - API_LIST_URL, - API_BASE_URL, - API_BASE_GENERATE, - REPORTING_MANAGEMENT_HOME, -} from '../../constants'; +import { API_LIST_URL, API_BASE_GENERATE, REPORTING_MANAGEMENT_HOME } from '../../constants'; import { JobId, SourceJob } from '../..'; export interface JobQueueEntry { @@ -129,12 +124,17 @@ export class ReportingAPIClient { }); }; + /* + * Return a URL to queue a job, with the job params encoded in the query string of the URL. Used for copying POST URL + */ public getReportingJobPath = (exportType: string, jobParams: JobParams) => { const params = stringify({ jobParams: rison.encode(jobParams) }); - - return `${this.http.basePath.prepend(API_BASE_URL)}/${exportType}?${params}`; + return `${this.http.basePath.prepend(API_BASE_GENERATE)}/${exportType}?${params}`; }; + /* + * Sends a request to queue a job, with the job params in the POST body + */ public createReportingJob = async (exportType: string, jobParams: any) => { const jobParamsRison = rison.encode(jobParams); const resp = await this.http.post(`${API_BASE_GENERATE}/${exportType}`, { @@ -154,5 +154,8 @@ export class ReportingAPIClient { public getDownloadLink = (jobId: JobId) => this.http.basePath.prepend(`${API_LIST_URL}/download/${jobId}`); - public getBasePath = () => this.http.basePath.get(); + /* + * provides the raw server basePath to allow it to be stripped out from relativeUrls in job params + */ + public getServerBasePath = () => this.http.basePath.serverBasePath; } diff --git a/x-pack/plugins/reporting/public/share_context_menu/register_pdf_png_reporting.tsx b/x-pack/plugins/reporting/public/share_context_menu/register_pdf_png_reporting.tsx index e9eaa9c2ed2a17..2a955ea398bd4e 100644 --- a/x-pack/plugins/reporting/public/share_context_menu/register_pdf_png_reporting.tsx +++ b/x-pack/plugins/reporting/public/share_context_menu/register_pdf_png_reporting.tsx @@ -58,9 +58,10 @@ export const reportingPDFPNGProvider = ({ } const getReportingJobParams = () => { + // Relative URL must have URL prefix (Spaces ID prefix), but not server basePath // Replace hashes with original RISON values. const relativeUrl = shareableUrl.replace( - window.location.origin + apiClient.getBasePath(), + window.location.origin + apiClient.getServerBasePath(), '' ); @@ -80,7 +81,7 @@ export const reportingPDFPNGProvider = ({ const getPngJobParams = () => { // Replace hashes with original RISON values. const relativeUrl = shareableUrl.replace( - window.location.origin + apiClient.getBasePath(), + window.location.origin + apiClient.getServerBasePath(), '' ); diff --git a/x-pack/plugins/spaces/server/lib/__snapshots__/create_default_space.test.ts.snap b/x-pack/plugins/spaces/server/lib/__snapshots__/create_default_space.test.ts.snap deleted file mode 100644 index bbb3b1918718df..00000000000000 --- a/x-pack/plugins/spaces/server/lib/__snapshots__/create_default_space.test.ts.snap +++ /dev/null @@ -1,5 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`it throws all other errors from the saved objects client when checking for the default space 1`] = `"unit test: unexpected exception condition"`; - -exports[`it throws other errors if there is an error creating the default space 1`] = `"unit test: some other unexpected error"`; diff --git a/x-pack/plugins/spaces/server/lib/copy_to_spaces/copy_to_spaces.test.ts b/x-pack/plugins/spaces/server/lib/copy_to_spaces/copy_to_spaces.test.ts index 9ef229159a8855..59e157c3fc2dba 100644 --- a/x-pack/plugins/spaces/server/lib/copy_to_spaces/copy_to_spaces.test.ts +++ b/x-pack/plugins/spaces/server/lib/copy_to_spaces/copy_to_spaces.test.ts @@ -4,20 +4,31 @@ * you may not use this file except in compliance with the Elastic License. */ import { - SavedObjectsSchema, - SavedObjectsLegacyService, - SavedObjectsClientContract, SavedObjectsImportResponse, SavedObjectsImportOptions, SavedObjectsExportOptions, } from 'src/core/server'; import { copySavedObjectsToSpacesFactory } from './copy_to_spaces'; import { Readable } from 'stream'; +import { coreMock, savedObjectsTypeRegistryMock, httpServerMock } from 'src/core/server/mocks'; + +jest.mock('../../../../../../src/core/server', () => { + return { + exportSavedObjectsToStream: jest.fn(), + importSavedObjectsFromStream: jest.fn(), + }; +}); +import { + exportSavedObjectsToStream, + importSavedObjectsFromStream, +} from '../../../../../../src/core/server'; interface SetupOpts { objects: Array<{ type: string; id: string; attributes: Record }>; - getSortedObjectsForExportImpl?: (opts: SavedObjectsExportOptions) => Promise; - importSavedObjectsImpl?: (opts: SavedObjectsImportOptions) => Promise; + exportSavedObjectsToStreamImpl?: (opts: SavedObjectsExportOptions) => Promise; + importSavedObjectsFromStreamImpl?: ( + opts: SavedObjectsImportOptions + ) => Promise; } const expectStreamToContainObjects = async ( @@ -40,49 +51,75 @@ const expectStreamToContainObjects = async ( describe('copySavedObjectsToSpaces', () => { const setup = (setupOpts: SetupOpts) => { - const savedObjectsClient = (null as unknown) as SavedObjectsClientContract; + const coreStart = coreMock.createStart(); + + const typeRegistry = savedObjectsTypeRegistryMock.create(); + typeRegistry.getAllTypes.mockReturnValue([ + { + name: 'dashboard', + namespaceAgnostic: false, + hidden: false, + mappings: { properties: {} }, + }, + { + name: 'visualization', + namespaceAgnostic: false, + hidden: false, + mappings: { properties: {} }, + }, + { + name: 'globaltype', + namespaceAgnostic: true, + hidden: false, + mappings: { properties: {} }, + }, + ]); - const savedObjectsService: SavedObjectsLegacyService = ({ - importExport: { - objectLimit: 1000, - getSortedObjectsForExport: - setupOpts.getSortedObjectsForExportImpl || - jest.fn().mockResolvedValue( - new Readable({ - objectMode: true, - read() { - setupOpts.objects.forEach(o => this.push(o)); + typeRegistry.isNamespaceAgnostic.mockImplementation((type: string) => + typeRegistry.getAllTypes().some(t => t.name === type && t.namespaceAgnostic) + ); - this.push(null); - }, - }) - ), - importSavedObjects: - setupOpts.importSavedObjectsImpl || - jest.fn().mockImplementation(async (importOpts: SavedObjectsImportOptions) => { - await expectStreamToContainObjects(importOpts.readStream, setupOpts.objects); - const response: SavedObjectsImportResponse = { - success: true, - successCount: setupOpts.objects.length, - }; + coreStart.savedObjects.getTypeRegistry.mockReturnValue(typeRegistry); - return Promise.resolve(response); - }), - }, - types: ['dashboard', 'visualization', 'globalType'], - schema: new SavedObjectsSchema({ - globalType: { isNamespaceAgnostic: true }, - }), - } as unknown) as SavedObjectsLegacyService; + (exportSavedObjectsToStream as jest.Mock).mockImplementation( + async (opts: SavedObjectsExportOptions) => { + return ( + setupOpts.exportSavedObjectsToStreamImpl?.(opts) ?? + new Readable({ + objectMode: true, + read() { + setupOpts.objects.forEach(o => this.push(o)); + + this.push(null); + }, + }) + ); + } + ); + + (importSavedObjectsFromStream as jest.Mock).mockImplementation( + async (opts: SavedObjectsImportOptions) => { + const defaultImpl = async () => { + await expectStreamToContainObjects(opts.readStream, setupOpts.objects); + const response: SavedObjectsImportResponse = { + success: true, + successCount: setupOpts.objects.length, + }; + + return Promise.resolve(response); + }; + + return setupOpts.importSavedObjectsFromStreamImpl?.(opts) ?? defaultImpl(); + } + ); return { - savedObjectsClient, - savedObjectsService, + savedObjects: coreStart.savedObjects, }; }; it('uses the Saved Objects Service to perform an export followed by a series of imports', async () => { - const { savedObjectsClient, savedObjectsService } = setup({ + const { savedObjects } = setup({ objects: [ { type: 'dashboard', @@ -102,9 +139,12 @@ describe('copySavedObjectsToSpaces', () => { ], }); + const request = httpServerMock.createKibanaRequest(); + const copySavedObjectsToSpaces = copySavedObjectsToSpacesFactory( - savedObjectsClient, - savedObjectsService + savedObjects, + () => 1000, + request ); const result = await copySavedObjectsToSpaces('sourceSpace', ['destination1', 'destination2'], { @@ -133,8 +173,7 @@ describe('copySavedObjectsToSpaces', () => { } `); - expect((savedObjectsService.importExport.getSortedObjectsForExport as jest.Mock).mock.calls) - .toMatchInlineSnapshot(` + expect((exportSavedObjectsToStream as jest.Mock).mock.calls).toMatchInlineSnapshot(` Array [ Array [ Object { @@ -148,14 +187,23 @@ describe('copySavedObjectsToSpaces', () => { "type": "dashboard", }, ], - "savedObjectsClient": null, + "savedObjectsClient": Object { + "bulkCreate": [MockFunction], + "bulkGet": [MockFunction], + "bulkUpdate": [MockFunction], + "create": [MockFunction], + "delete": [MockFunction], + "errors": [Function], + "find": [MockFunction], + "get": [MockFunction], + "update": [MockFunction], + }, }, ], ] `); - expect((savedObjectsService.importExport.importSavedObjects as jest.Mock).mock.calls) - .toMatchInlineSnapshot(` + expect((importSavedObjectsFromStream as jest.Mock).mock.calls).toMatchInlineSnapshot(` Array [ Array [ Object { @@ -203,7 +251,17 @@ describe('copySavedObjectsToSpaces', () => { }, "readable": false, }, - "savedObjectsClient": null, + "savedObjectsClient": Object { + "bulkCreate": [MockFunction], + "bulkGet": [MockFunction], + "bulkUpdate": [MockFunction], + "create": [MockFunction], + "delete": [MockFunction], + "errors": [Function], + "find": [MockFunction], + "get": [MockFunction], + "update": [MockFunction], + }, "supportedTypes": Array [ "dashboard", "visualization", @@ -256,7 +314,17 @@ describe('copySavedObjectsToSpaces', () => { }, "readable": false, }, - "savedObjectsClient": null, + "savedObjectsClient": Object { + "bulkCreate": [MockFunction], + "bulkGet": [MockFunction], + "bulkUpdate": [MockFunction], + "create": [MockFunction], + "delete": [MockFunction], + "errors": [Function], + "find": [MockFunction], + "get": [MockFunction], + "update": [MockFunction], + }, "supportedTypes": Array [ "dashboard", "visualization", @@ -285,9 +353,10 @@ describe('copySavedObjectsToSpaces', () => { attributes: {}, }, ]; - const { savedObjectsClient, savedObjectsService } = setup({ + + const { savedObjects } = setup({ objects, - importSavedObjectsImpl: async opts => { + importSavedObjectsFromStreamImpl: async opts => { if (opts.namespace === 'failure-space') { throw new Error(`Some error occurred!`); } @@ -299,9 +368,12 @@ describe('copySavedObjectsToSpaces', () => { }, }); + const request = httpServerMock.createKibanaRequest(); + const copySavedObjectsToSpaces = copySavedObjectsToSpacesFactory( - savedObjectsClient, - savedObjectsService + savedObjects, + () => 1000, + request ); const result = await copySavedObjectsToSpaces( @@ -343,7 +415,7 @@ describe('copySavedObjectsToSpaces', () => { }); it(`handles stream read errors`, async () => { - const { savedObjectsClient, savedObjectsService } = setup({ + const { savedObjects } = setup({ objects: [ { type: 'dashboard', @@ -361,7 +433,7 @@ describe('copySavedObjectsToSpaces', () => { attributes: {}, }, ], - getSortedObjectsForExportImpl: opts => { + exportSavedObjectsToStreamImpl: opts => { return Promise.resolve( new Readable({ objectMode: true, @@ -373,9 +445,12 @@ describe('copySavedObjectsToSpaces', () => { }, }); + const request = httpServerMock.createKibanaRequest(); + const copySavedObjectsToSpaces = copySavedObjectsToSpacesFactory( - savedObjectsClient, - savedObjectsService + savedObjects, + () => 1000, + request ); await expect( diff --git a/x-pack/plugins/spaces/server/lib/copy_to_spaces/copy_to_spaces.ts b/x-pack/plugins/spaces/server/lib/copy_to_spaces/copy_to_spaces.ts index 04b09b5e05b83f..dca6f2a6206ab9 100644 --- a/x-pack/plugins/spaces/server/lib/copy_to_spaces/copy_to_spaces.ts +++ b/x-pack/plugins/spaces/server/lib/copy_to_spaces/copy_to_spaces.ts @@ -4,42 +4,42 @@ * you may not use this file except in compliance with the Elastic License. */ -import { - SavedObjectsClientContract, - SavedObjectsLegacyService, - SavedObject, -} from 'src/core/server'; +import { SavedObject, KibanaRequest, CoreStart } from 'src/core/server'; import { Readable } from 'stream'; -import { SavedObjectsClientProviderOptions } from 'src/core/server'; +import { + exportSavedObjectsToStream, + importSavedObjectsFromStream, +} from '../../../../../../src/core/server'; import { spaceIdToNamespace } from '../utils/namespace'; import { CopyOptions, CopyResponse } from './types'; import { getEligibleTypes } from './lib/get_eligible_types'; import { createReadableStreamFromArray } from './lib/readable_stream_from_array'; import { createEmptyFailureResponse } from './lib/create_empty_failure_response'; import { readStreamToCompletion } from './lib/read_stream_to_completion'; - -export const COPY_TO_SPACES_SAVED_OBJECTS_CLIENT_OPTS: SavedObjectsClientProviderOptions = { - excludedWrappers: ['spaces'], -}; +import { COPY_TO_SPACES_SAVED_OBJECTS_CLIENT_OPTS } from './lib/saved_objects_client_opts'; export function copySavedObjectsToSpacesFactory( - savedObjectsClient: SavedObjectsClientContract, - savedObjectsService: SavedObjectsLegacyService + savedObjects: CoreStart['savedObjects'], + getImportExportObjectLimit: () => number, + request: KibanaRequest ) { - const { importExport, types, schema } = savedObjectsService; - const eligibleTypes = getEligibleTypes({ types, schema }); + const { getTypeRegistry, getScopedClient } = savedObjects; + + const savedObjectsClient = getScopedClient(request, COPY_TO_SPACES_SAVED_OBJECTS_CLIENT_OPTS); + + const eligibleTypes = getEligibleTypes(getTypeRegistry()); const exportRequestedObjects = async ( sourceSpaceId: string, options: Pick ) => { - const objectStream = await importExport.getSortedObjectsForExport({ + const objectStream = await exportSavedObjectsToStream({ namespace: spaceIdToNamespace(sourceSpaceId), includeReferencesDeep: options.includeReferences, excludeExportDetails: true, objects: options.objects, savedObjectsClient, - exportSizeLimit: importExport.objectLimit, + exportSizeLimit: getImportExportObjectLimit(), }); return readStreamToCompletion(objectStream); @@ -51,9 +51,9 @@ export function copySavedObjectsToSpacesFactory( options: CopyOptions ) => { try { - const importResponse = await importExport.importSavedObjects({ + const importResponse = await importSavedObjectsFromStream({ namespace: spaceIdToNamespace(spaceId), - objectLimit: importExport.objectLimit, + objectLimit: getImportExportObjectLimit(), overwrite: options.overwrite, savedObjectsClient, supportedTypes: eligibleTypes, diff --git a/x-pack/plugins/spaces/server/lib/copy_to_spaces/lib/get_eligible_types.ts b/x-pack/plugins/spaces/server/lib/copy_to_spaces/lib/get_eligible_types.ts index 76bb374f9eb6d4..2a54921c055688 100644 --- a/x-pack/plugins/spaces/server/lib/copy_to_spaces/lib/get_eligible_types.ts +++ b/x-pack/plugins/spaces/server/lib/copy_to_spaces/lib/get_eligible_types.ts @@ -4,11 +4,13 @@ * you may not use this file except in compliance with the Elastic License. */ -import { SavedObjectsLegacyService } from 'src/core/server'; +import { SavedObjectTypeRegistry } from 'src/core/server'; -export function getEligibleTypes({ - types, - schema, -}: Pick) { - return types.filter(type => !schema.isNamespaceAgnostic(type)); +export function getEligibleTypes( + typeRegistry: Pick +) { + return typeRegistry + .getAllTypes() + .filter(type => !typeRegistry.isNamespaceAgnostic(type.name)) + .map(type => type.name); } diff --git a/x-pack/legacy/plugins/reporting/export_types/common/layouts/types.d.ts b/x-pack/plugins/spaces/server/lib/copy_to_spaces/lib/saved_objects_client_opts.ts similarity index 55% rename from x-pack/legacy/plugins/reporting/export_types/common/layouts/types.d.ts rename to x-pack/plugins/spaces/server/lib/copy_to_spaces/lib/saved_objects_client_opts.ts index ccfa82ca0ae533..a16cd00fd86605 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/layouts/types.d.ts +++ b/x-pack/plugins/spaces/server/lib/copy_to_spaces/lib/saved_objects_client_opts.ts @@ -4,9 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Size } from './layout'; +import { SavedObjectsClientProviderOptions } from 'src/core/server'; -export interface CaptureConfig { - zoom: number; - viewport: Size; -} +export const COPY_TO_SPACES_SAVED_OBJECTS_CLIENT_OPTS: SavedObjectsClientProviderOptions = { + excludedWrappers: ['spaces'], +}; diff --git a/x-pack/plugins/spaces/server/lib/copy_to_spaces/resolve_copy_conflicts.test.ts b/x-pack/plugins/spaces/server/lib/copy_to_spaces/resolve_copy_conflicts.test.ts index 25ed4dee6d4d05..7809f1f8be66f5 100644 --- a/x-pack/plugins/spaces/server/lib/copy_to_spaces/resolve_copy_conflicts.test.ts +++ b/x-pack/plugins/spaces/server/lib/copy_to_spaces/resolve_copy_conflicts.test.ts @@ -4,20 +4,29 @@ * you may not use this file except in compliance with the Elastic License. */ import { - SavedObjectsSchema, - SavedObjectsLegacyService, - SavedObjectsClientContract, SavedObjectsImportResponse, SavedObjectsResolveImportErrorsOptions, SavedObjectsExportOptions, } from 'src/core/server'; +import { coreMock, savedObjectsTypeRegistryMock, httpServerMock } from 'src/core/server/mocks'; import { Readable } from 'stream'; import { resolveCopySavedObjectsToSpacesConflictsFactory } from './resolve_copy_conflicts'; +jest.mock('../../../../../../src/core/server', () => { + return { + exportSavedObjectsToStream: jest.fn(), + resolveSavedObjectsImportErrors: jest.fn(), + }; +}); +import { + exportSavedObjectsToStream, + resolveSavedObjectsImportErrors, +} from '../../../../../../src/core/server'; + interface SetupOpts { objects: Array<{ type: string; id: string; attributes: Record }>; - getSortedObjectsForExportImpl?: (opts: SavedObjectsExportOptions) => Promise; - resolveImportErrorsImpl?: ( + exportSavedObjectsToStreamImpl?: (opts: SavedObjectsExportOptions) => Promise; + resolveSavedObjectsImportErrorsImpl?: ( opts: SavedObjectsResolveImportErrorsOptions ) => Promise; } @@ -42,52 +51,76 @@ const expectStreamToContainObjects = async ( describe('resolveCopySavedObjectsToSpacesConflicts', () => { const setup = (setupOpts: SetupOpts) => { - const savedObjectsService: SavedObjectsLegacyService = ({ - importExport: { - objectLimit: 1000, - getSortedObjectsForExport: - setupOpts.getSortedObjectsForExportImpl || - jest.fn().mockResolvedValue( - new Readable({ - objectMode: true, - read() { - setupOpts.objects.forEach(o => this.push(o)); - - this.push(null); - }, - }) - ), - resolveImportErrors: - setupOpts.resolveImportErrorsImpl || - jest - .fn() - .mockImplementation(async (resolveOpts: SavedObjectsResolveImportErrorsOptions) => { - await expectStreamToContainObjects(resolveOpts.readStream, setupOpts.objects); - - const response: SavedObjectsImportResponse = { - success: true, - successCount: setupOpts.objects.length, - }; - - return response; - }), + const coreStart = coreMock.createStart(); + + const typeRegistry = savedObjectsTypeRegistryMock.create(); + typeRegistry.getAllTypes.mockReturnValue([ + { + name: 'dashboard', + namespaceAgnostic: false, + hidden: false, + mappings: { properties: {} }, + }, + { + name: 'visualization', + namespaceAgnostic: false, + hidden: false, + mappings: { properties: {} }, }, - types: ['dashboard', 'visualization', 'globalType'], - schema: new SavedObjectsSchema({ - globalType: { isNamespaceAgnostic: true }, - }), - } as unknown) as SavedObjectsLegacyService; + { + name: 'globaltype', + namespaceAgnostic: true, + hidden: false, + mappings: { properties: {} }, + }, + ]); + + typeRegistry.isNamespaceAgnostic.mockImplementation((type: string) => + typeRegistry.getAllTypes().some(t => t.name === type && t.namespaceAgnostic) + ); - const savedObjectsClient = (null as unknown) as SavedObjectsClientContract; + coreStart.savedObjects.getTypeRegistry.mockReturnValue(typeRegistry); + + (exportSavedObjectsToStream as jest.Mock).mockImplementation( + async (opts: SavedObjectsExportOptions) => { + return ( + setupOpts.exportSavedObjectsToStreamImpl?.(opts) ?? + new Readable({ + objectMode: true, + read() { + setupOpts.objects.forEach(o => this.push(o)); + + this.push(null); + }, + }) + ); + } + ); + + (resolveSavedObjectsImportErrors as jest.Mock).mockImplementation( + async (opts: SavedObjectsResolveImportErrorsOptions) => { + const defaultImpl = async () => { + await expectStreamToContainObjects(opts.readStream, setupOpts.objects); + + const response: SavedObjectsImportResponse = { + success: true, + successCount: setupOpts.objects.length, + }; + + return response; + }; + + return setupOpts.resolveSavedObjectsImportErrorsImpl?.(opts) ?? defaultImpl(); + } + ); return { - savedObjectsClient, - savedObjectsService, + savedObjects: coreStart.savedObjects, }; }; it('uses the Saved Objects Service to perform an export followed by a series of conflict resolution calls', async () => { - const { savedObjectsClient, savedObjectsService } = setup({ + const { savedObjects } = setup({ objects: [ { type: 'dashboard', @@ -107,9 +140,12 @@ describe('resolveCopySavedObjectsToSpacesConflicts', () => { ], }); + const request = httpServerMock.createKibanaRequest(); + const resolveCopySavedObjectsToSpacesConflicts = resolveCopySavedObjectsToSpacesConflictsFactory( - savedObjectsClient, - savedObjectsService + savedObjects, + () => 1000, + request ); const result = await resolveCopySavedObjectsToSpacesConflicts('sourceSpace', { @@ -153,8 +189,7 @@ describe('resolveCopySavedObjectsToSpacesConflicts', () => { } `); - expect((savedObjectsService.importExport.getSortedObjectsForExport as jest.Mock).mock.calls) - .toMatchInlineSnapshot(` + expect((exportSavedObjectsToStream as jest.Mock).mock.calls).toMatchInlineSnapshot(` Array [ Array [ Object { @@ -168,14 +203,23 @@ describe('resolveCopySavedObjectsToSpacesConflicts', () => { "type": "dashboard", }, ], - "savedObjectsClient": null, + "savedObjectsClient": Object { + "bulkCreate": [MockFunction], + "bulkGet": [MockFunction], + "bulkUpdate": [MockFunction], + "create": [MockFunction], + "delete": [MockFunction], + "errors": [Function], + "find": [MockFunction], + "get": [MockFunction], + "update": [MockFunction], + }, }, ], ] `); - expect((savedObjectsService.importExport.resolveImportErrors as jest.Mock).mock.calls) - .toMatchInlineSnapshot(` + expect((resolveSavedObjectsImportErrors as jest.Mock).mock.calls).toMatchInlineSnapshot(` Array [ Array [ Object { @@ -230,7 +274,17 @@ describe('resolveCopySavedObjectsToSpacesConflicts', () => { "type": "visualization", }, ], - "savedObjectsClient": null, + "savedObjectsClient": Object { + "bulkCreate": [MockFunction], + "bulkGet": [MockFunction], + "bulkUpdate": [MockFunction], + "create": [MockFunction], + "delete": [MockFunction], + "errors": [Function], + "find": [MockFunction], + "get": [MockFunction], + "update": [MockFunction], + }, "supportedTypes": Array [ "dashboard", "visualization", @@ -290,7 +344,17 @@ describe('resolveCopySavedObjectsToSpacesConflicts', () => { "type": "visualization", }, ], - "savedObjectsClient": null, + "savedObjectsClient": Object { + "bulkCreate": [MockFunction], + "bulkGet": [MockFunction], + "bulkUpdate": [MockFunction], + "create": [MockFunction], + "delete": [MockFunction], + "errors": [Function], + "find": [MockFunction], + "get": [MockFunction], + "update": [MockFunction], + }, "supportedTypes": Array [ "dashboard", "visualization", @@ -320,9 +384,9 @@ describe('resolveCopySavedObjectsToSpacesConflicts', () => { }, ]; - const { savedObjectsClient, savedObjectsService } = setup({ + const { savedObjects } = setup({ objects, - resolveImportErrorsImpl: async opts => { + resolveSavedObjectsImportErrorsImpl: async opts => { if (opts.namespace === 'failure-space') { throw new Error(`Some error occurred!`); } @@ -334,9 +398,12 @@ describe('resolveCopySavedObjectsToSpacesConflicts', () => { }, }); + const request = httpServerMock.createKibanaRequest(); + const resolveCopySavedObjectsToSpacesConflicts = resolveCopySavedObjectsToSpacesConflictsFactory( - savedObjectsClient, - savedObjectsService + savedObjects, + () => 1000, + request ); const result = await resolveCopySavedObjectsToSpacesConflicts('sourceSpace', { @@ -396,9 +463,9 @@ describe('resolveCopySavedObjectsToSpacesConflicts', () => { }); it(`handles stream read errors`, async () => { - const { savedObjectsClient, savedObjectsService } = setup({ + const { savedObjects } = setup({ objects: [], - getSortedObjectsForExportImpl: opts => { + exportSavedObjectsToStreamImpl: opts => { return Promise.resolve( new Readable({ objectMode: true, @@ -410,9 +477,12 @@ describe('resolveCopySavedObjectsToSpacesConflicts', () => { }, }); + const request = httpServerMock.createKibanaRequest(); + const resolveCopySavedObjectsToSpacesConflicts = resolveCopySavedObjectsToSpacesConflictsFactory( - savedObjectsClient, - savedObjectsService + savedObjects, + () => 1000, + request ); await expect( diff --git a/x-pack/plugins/spaces/server/lib/copy_to_spaces/resolve_copy_conflicts.ts b/x-pack/plugins/spaces/server/lib/copy_to_spaces/resolve_copy_conflicts.ts index 1ec642c1587741..38668d1b989a03 100644 --- a/x-pack/plugins/spaces/server/lib/copy_to_spaces/resolve_copy_conflicts.ts +++ b/x-pack/plugins/spaces/server/lib/copy_to_spaces/resolve_copy_conflicts.ts @@ -4,37 +4,42 @@ * you may not use this file except in compliance with the Elastic License. */ -import { - SavedObjectsClientContract, - SavedObjectsLegacyService, - SavedObject, -} from 'src/core/server'; import { Readable } from 'stream'; +import { SavedObject, CoreStart, KibanaRequest } from 'src/core/server'; +import { + exportSavedObjectsToStream, + resolveSavedObjectsImportErrors, +} from '../../../../../../src/core/server'; import { spaceIdToNamespace } from '../utils/namespace'; import { CopyOptions, ResolveConflictsOptions, CopyResponse } from './types'; import { getEligibleTypes } from './lib/get_eligible_types'; import { createEmptyFailureResponse } from './lib/create_empty_failure_response'; import { readStreamToCompletion } from './lib/read_stream_to_completion'; import { createReadableStreamFromArray } from './lib/readable_stream_from_array'; +import { COPY_TO_SPACES_SAVED_OBJECTS_CLIENT_OPTS } from './lib/saved_objects_client_opts'; export function resolveCopySavedObjectsToSpacesConflictsFactory( - savedObjectsClient: SavedObjectsClientContract, - savedObjectsService: SavedObjectsLegacyService + savedObjects: CoreStart['savedObjects'], + getImportExportObjectLimit: () => number, + request: KibanaRequest ) { - const { importExport, types, schema } = savedObjectsService; - const eligibleTypes = getEligibleTypes({ types, schema }); + const { getTypeRegistry, getScopedClient } = savedObjects; + + const savedObjectsClient = getScopedClient(request, COPY_TO_SPACES_SAVED_OBJECTS_CLIENT_OPTS); + + const eligibleTypes = getEligibleTypes(getTypeRegistry()); const exportRequestedObjects = async ( sourceSpaceId: string, options: Pick ) => { - const objectStream = await importExport.getSortedObjectsForExport({ + const objectStream = await exportSavedObjectsToStream({ namespace: spaceIdToNamespace(sourceSpaceId), includeReferencesDeep: options.includeReferences, excludeExportDetails: true, objects: options.objects, savedObjectsClient, - exportSizeLimit: importExport.objectLimit, + exportSizeLimit: getImportExportObjectLimit(), }); return readStreamToCompletion(objectStream); }; @@ -50,9 +55,9 @@ export function resolveCopySavedObjectsToSpacesConflictsFactory( }> ) => { try { - const importResponse = await importExport.resolveImportErrors({ + const importResponse = await resolveSavedObjectsImportErrors({ namespace: spaceIdToNamespace(spaceId), - objectLimit: importExport.objectLimit, + objectLimit: getImportExportObjectLimit(), savedObjectsClient, supportedTypes: eligibleTypes, readStream: objectsStream, diff --git a/x-pack/plugins/spaces/server/lib/create_default_space.test.ts b/x-pack/plugins/spaces/server/lib/create_default_space.test.ts index 8486508c45364d..03e774ce67d2bf 100644 --- a/x-pack/plugins/spaces/server/lib/create_default_space.test.ts +++ b/x-pack/plugins/spaces/server/lib/create_default_space.test.ts @@ -4,9 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import Boom from 'boom'; import { createDefaultSpace } from './create_default_space'; -import { SavedObjectsLegacyService, IClusterClient } from 'src/core/server'; +import { SavedObjectsErrorHelpers } from 'src/core/server'; interface MockServerSettings { defaultExists?: boolean; @@ -23,7 +22,7 @@ const createMockDeps = (settings: MockServerSettings = {}) => { simulateCreateErrorCondition = false, } = settings; - const mockGet = jest.fn().mockImplementation(() => { + const mockGet = jest.fn().mockImplementation((type, id) => { if (simulateGetErrorCondition) { throw new Error('unit test: unexpected exception condition'); } @@ -31,12 +30,14 @@ const createMockDeps = (settings: MockServerSettings = {}) => { if (defaultExists) { return; } - throw Boom.notFound('unit test: default space not found'); + throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); }); const mockCreate = jest.fn().mockImplementation(() => { if (simulateConflict) { - throw new Error('unit test: default space already exists'); + throw SavedObjectsErrorHelpers.decorateConflictError( + new Error('unit test: default space already exists') + ); } if (simulateCreateErrorCondition) { throw new Error('unit test: some other unexpected error'); @@ -45,18 +46,9 @@ const createMockDeps = (settings: MockServerSettings = {}) => { return null; }); - const mockServer = { - config: jest.fn().mockReturnValue({ - get: jest.fn(), - }), + return { savedObjects: { - SavedObjectsClient: { - errors: { - isNotFoundError: (e: Error) => e.message === 'unit test: default space not found', - isConflictError: (e: Error) => e.message === 'unit test: default space already exists', - }, - }, - getSavedObjectsRepository: jest.fn().mockImplementation(() => { + createInternalRepository: jest.fn().mockImplementation(() => { return { get: mockGet, create: mockCreate, @@ -64,18 +56,6 @@ const createMockDeps = (settings: MockServerSettings = {}) => { }), }, }; - - mockServer.config().get.mockImplementation((key: string) => { - return settings[key]; - }); - - return { - config: mockServer.config(), - savedObjects: (mockServer.savedObjects as unknown) as SavedObjectsLegacyService, - esClient: ({ - callAsInternalUser: jest.fn(), - } as unknown) as jest.Mocked, - }; }; test(`it creates the default space when one does not exist`, async () => { @@ -85,7 +65,7 @@ test(`it creates the default space when one does not exist`, async () => { await createDefaultSpace(deps); - const repository = deps.savedObjects.getSavedObjectsRepository(); + const repository = deps.savedObjects.createInternalRepository(); expect(repository.get).toHaveBeenCalledTimes(1); expect(repository.create).toHaveBeenCalledTimes(1); @@ -109,7 +89,7 @@ test(`it does not attempt to recreate the default space if it already exists`, a await createDefaultSpace(deps); - const repository = deps.savedObjects.getSavedObjectsRepository(); + const repository = deps.savedObjects.createInternalRepository(); expect(repository.get).toHaveBeenCalledTimes(1); expect(repository.create).toHaveBeenCalledTimes(0); @@ -121,7 +101,9 @@ test(`it throws all other errors from the saved objects client when checking for simulateGetErrorCondition: true, }); - expect(createDefaultSpace(deps)).rejects.toThrowErrorMatchingSnapshot(); + expect(createDefaultSpace(deps)).rejects.toThrowErrorMatchingInlineSnapshot( + `"unit test: unexpected exception condition"` + ); }); test(`it ignores conflict errors if the default space already exists`, async () => { @@ -132,7 +114,7 @@ test(`it ignores conflict errors if the default space already exists`, async () await createDefaultSpace(deps); - const repository = deps.savedObjects.getSavedObjectsRepository(); + const repository = deps.savedObjects.createInternalRepository(); expect(repository.get).toHaveBeenCalledTimes(1); expect(repository.create).toHaveBeenCalledTimes(1); @@ -144,5 +126,7 @@ test(`it throws other errors if there is an error creating the default space`, a simulateCreateErrorCondition: true, }); - expect(createDefaultSpace(deps)).rejects.toThrowErrorMatchingSnapshot(); + expect(createDefaultSpace(deps)).rejects.toThrowErrorMatchingInlineSnapshot( + `"unit test: some other unexpected error"` + ); }); diff --git a/x-pack/plugins/spaces/server/lib/create_default_space.ts b/x-pack/plugins/spaces/server/lib/create_default_space.ts index 0d1a4ddab91bb5..e0cb75c54220a3 100644 --- a/x-pack/plugins/spaces/server/lib/create_default_space.ts +++ b/x-pack/plugins/spaces/server/lib/create_default_space.ts @@ -5,23 +5,20 @@ */ import { i18n } from '@kbn/i18n'; -import { SavedObjectsLegacyService, IClusterClient } from 'src/core/server'; +import { SavedObjectsServiceStart, SavedObjectsRepository } from 'src/core/server'; +import { SavedObjectsErrorHelpers } from '../../../../../src/core/server'; import { DEFAULT_SPACE_ID } from '../../common/constants'; interface Deps { - esClient: IClusterClient; - savedObjects: SavedObjectsLegacyService; + savedObjects: Pick; } -export async function createDefaultSpace({ esClient, savedObjects }: Deps) { - const { getSavedObjectsRepository, SavedObjectsClient } = savedObjects; +export async function createDefaultSpace({ savedObjects }: Deps) { + const { createInternalRepository } = savedObjects; - const savedObjectsRepository = getSavedObjectsRepository(esClient.callAsInternalUser, ['space']); + const savedObjectsRepository = createInternalRepository(['space']); - const defaultSpaceExists = await doesDefaultSpaceExist( - SavedObjectsClient, - savedObjectsRepository - ); + const defaultSpaceExists = await doesDefaultSpaceExist(savedObjectsRepository); if (defaultSpaceExists) { return; @@ -51,19 +48,19 @@ export async function createDefaultSpace({ esClient, savedObjects }: Deps) { // Ignore conflict errors. // It is possible that another Kibana instance, or another invocation of this function // created the default space in the time it took this to complete. - if (SavedObjectsClient.errors.isConflictError(error)) { + if (SavedObjectsErrorHelpers.isConflictError(error)) { return; } throw error; } } -async function doesDefaultSpaceExist(SavedObjectsClient: any, savedObjectsRepository: any) { +async function doesDefaultSpaceExist(savedObjectsRepository: Pick) { try { await savedObjectsRepository.get('space', DEFAULT_SPACE_ID); return true; } catch (e) { - if (SavedObjectsClient.errors.isNotFoundError(e)) { + if (SavedObjectsErrorHelpers.isNotFoundError(e)) { return false; } throw e; diff --git a/x-pack/plugins/spaces/server/lib/request_interceptors/on_post_auth_interceptor.test.ts b/x-pack/plugins/spaces/server/lib/request_interceptors/on_post_auth_interceptor.test.ts index 40e35085ea18ac..cf334bb7b34cf3 100644 --- a/x-pack/plugins/spaces/server/lib/request_interceptors/on_post_auth_interceptor.test.ts +++ b/x-pack/plugins/spaces/server/lib/request_interceptors/on_post_auth_interceptor.test.ts @@ -11,7 +11,6 @@ import { kibanaTestUser } from '@kbn/test'; import { initSpacesOnRequestInterceptor } from './on_request_interceptor'; import { CoreSetup, - SavedObjectsLegacyService, SavedObjectsErrorHelpers, IBasePath, IRouter, @@ -19,9 +18,10 @@ import { import { elasticsearchServiceMock, loggingServiceMock, + coreMock, } from '../../../../../../src/core/server/mocks'; import * as kbnTestServer from '../../../../../../src/test_utils/kbn_server'; -import { LegacyAPI, PluginsSetup } from '../../plugin'; +import { PluginsSetup } from '../../plugin'; import { SpacesService } from '../../spaces_service'; import { SpacesAuditLogger } from '../audit_logger'; import { convertSavedObjectToSpace } from '../../routes/lib'; @@ -152,35 +152,30 @@ describe.skip('onPostAuthInterceptor', () => { ] as Feature[], } as PluginsSetup['features']; - const savedObjectsService = { - SavedObjectsClient: { - errors: SavedObjectsErrorHelpers, - }, - getSavedObjectsRepository: jest.fn().mockImplementation(() => { - return { - get: (type: string, id: string) => { - if (type === 'space') { - const space = availableSpaces.find(s => s.id === id); - if (space) { - return space; - } - throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); + const mockRepository = jest.fn().mockImplementation(() => { + return { + get: (type: string, id: string) => { + if (type === 'space') { + const space = availableSpaces.find(s => s.id === id); + if (space) { + return space; } - }, - create: () => null, - }; - }), - }; + throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); + } + }, + create: () => null, + }; + }); - const legacyAPI = { - savedObjects: (savedObjectsService as unknown) as SavedObjectsLegacyService, - } as LegacyAPI; + const coreStart = coreMock.createStart(); + coreStart.savedObjects.createInternalRepository.mockImplementation(mockRepository); + coreStart.savedObjects.createScopedRepository.mockImplementation(mockRepository); - const service = new SpacesService(loggingMock, () => legacyAPI); + const service = new SpacesService(loggingMock); const spacesService = await service.setup({ http: (http as unknown) as CoreSetup['http'], - elasticsearch: elasticsearchServiceMock.createSetup(), + getStartServices: async () => [coreStart, {}, {}], authorization: securityMock.createSetup().authz, getSpacesAuditLogger: () => ({} as SpacesAuditLogger), config$: Rx.of(spacesConfig), diff --git a/x-pack/plugins/spaces/server/lib/spaces_tutorial_context_factory.test.ts b/x-pack/plugins/spaces/server/lib/spaces_tutorial_context_factory.test.ts index 094ca8a11816ea..1a32e861b22e1a 100644 --- a/x-pack/plugins/spaces/server/lib/spaces_tutorial_context_factory.test.ts +++ b/x-pack/plugins/spaces/server/lib/spaces_tutorial_context_factory.test.ts @@ -8,25 +8,15 @@ import * as Rx from 'rxjs'; import { DEFAULT_SPACE_ID } from '../../common/constants'; import { createSpacesTutorialContextFactory } from './spaces_tutorial_context_factory'; import { SpacesService } from '../spaces_service'; -import { SavedObjectsLegacyService } from 'src/core/server'; import { SpacesAuditLogger } from './audit_logger'; -import { - elasticsearchServiceMock, - coreMock, - loggingServiceMock, -} from '../../../../../src/core/server/mocks'; +import { coreMock, loggingServiceMock } from '../../../../../src/core/server/mocks'; import { spacesServiceMock } from '../spaces_service/spaces_service.mock'; -import { LegacyAPI } from '../plugin'; import { spacesConfig } from './__fixtures__'; import { securityMock } from '../../../security/server/mocks'; const log = loggingServiceMock.createLogger(); -const legacyAPI: LegacyAPI = { - savedObjects: {} as SavedObjectsLegacyService, -} as LegacyAPI; - -const service = new SpacesService(log, () => legacyAPI); +const service = new SpacesService(log); describe('createSpacesTutorialContextFactory', () => { it('should create a valid context factory', async () => { @@ -49,7 +39,7 @@ describe('createSpacesTutorialContextFactory', () => { it('should create context with the current space id for the default space', async () => { const spacesService = await service.setup({ http: coreMock.createSetup().http, - elasticsearch: elasticsearchServiceMock.createSetup(), + getStartServices: async () => [coreMock.createStart(), {}, {}], authorization: securityMock.createSetup().authz, getSpacesAuditLogger: () => ({} as SpacesAuditLogger), config$: Rx.of(spacesConfig), diff --git a/x-pack/plugins/spaces/server/plugin.test.ts b/x-pack/plugins/spaces/server/plugin.test.ts index 4e3f4f52cbeb49..0b9905d5e9c95f 100644 --- a/x-pack/plugins/spaces/server/plugin.test.ts +++ b/x-pack/plugins/spaces/server/plugin.test.ts @@ -68,5 +68,30 @@ describe('Spaces Plugin', () => { expect(usageCollection.getCollectorByType('spaces')).toBeDefined(); }); + + it('registers the "space" saved object type and client wrapper', async () => { + const initializerContext = coreMock.createPluginInitializerContext({}); + const core = coreMock.createSetup() as CoreSetup; + const features = featuresPluginMock.createSetup(); + const licensing = licensingMock.createSetup(); + + const plugin = new Plugin(initializerContext); + + await plugin.setup(core, { features, licensing }); + + expect(core.savedObjects.registerType).toHaveBeenCalledWith({ + name: 'space', + namespaceAgnostic: true, + hidden: true, + mappings: expect.any(Object), + migrations: expect.any(Object), + }); + + expect(core.savedObjects.addClientWrapper).toHaveBeenCalledWith( + Number.MIN_SAFE_INTEGER, + 'spaces', + expect.any(Function) + ); + }); }); }); diff --git a/x-pack/plugins/spaces/server/plugin.ts b/x-pack/plugins/spaces/server/plugin.ts index d125e0f54e9c1f..a24d626c2a85d3 100644 --- a/x-pack/plugins/spaces/server/plugin.ts +++ b/x-pack/plugins/spaces/server/plugin.ts @@ -7,12 +7,7 @@ import { Observable } from 'rxjs'; import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; import { HomeServerPluginSetup } from 'src/plugins/home/server'; -import { - SavedObjectsLegacyService, - CoreSetup, - Logger, - PluginInitializerContext, -} from '../../../../src/core/server'; +import { CoreSetup, Logger, PluginInitializerContext } from '../../../../src/core/server'; import { PluginSetupContract as FeaturesPluginSetup, PluginStartContract as FeaturesPluginStart, @@ -22,7 +17,6 @@ import { LicensingPluginSetup } from '../../licensing/server'; import { createDefaultSpace } from './lib/create_default_space'; // @ts-ignore import { AuditLogger } from '../../../../server/lib/audit_logger'; -import { spacesSavedObjectsClientWrapperFactory } from './lib/saved_objects_client/saved_objects_client_wrapper_factory'; import { SpacesAuditLogger } from './lib/audit_logger'; import { createSpacesTutorialContextFactory } from './lib/spaces_tutorial_context_factory'; import { registerSpacesUsageCollector } from './usage_collection'; @@ -34,13 +28,13 @@ import { initExternalSpacesApi } from './routes/api/external'; import { initInternalSpacesApi } from './routes/api/internal'; import { initSpacesViewsRoutes } from './routes/views'; import { setupCapabilities } from './capabilities'; +import { SpacesSavedObjectsService } from './saved_objects'; /** * Describes a set of APIs that is available in the legacy platform only and required by this plugin * to function properly. */ export interface LegacyAPI { - savedObjects: SavedObjectsLegacyService; auditLogger: { create: (pluginId: string) => AuditLogger; }; @@ -108,16 +102,19 @@ export class Plugin { core: CoreSetup, plugins: PluginsSetup ): Promise { - const service = new SpacesService(this.log, this.getLegacyAPI); + const service = new SpacesService(this.log); const spacesService = await service.setup({ http: core.http, - elasticsearch: core.elasticsearch, + getStartServices: core.getStartServices, authorization: plugins.security ? plugins.security.authz : null, getSpacesAuditLogger: this.getSpacesAuditLogger, config$: this.config$, }); + const savedObjectsService = new SpacesSavedObjectsService(); + savedObjectsService.setup({ core, spacesService }); + const viewRouter = core.http.createRouter(); initSpacesViewsRoutes({ viewRouter, @@ -128,7 +125,8 @@ export class Plugin { initExternalSpacesApi({ externalRouter, log: this.log, - getSavedObjects: () => this.getLegacyAPI().savedObjects, + getStartServices: core.getStartServices, + getImportExportObjectLimit: core.savedObjects.getImportExportObjectLimit, spacesService, }); @@ -170,12 +168,11 @@ export class Plugin { __legacyCompat: { registerLegacyAPI: (legacyAPI: LegacyAPI) => { this.legacyAPI = legacyAPI; - this.setupLegacyComponents(spacesService); }, createDefaultSpace: async () => { + const [coreStart] = await core.getStartServices(); return await createDefaultSpace({ - esClient: core.elasticsearch.adminClient, - savedObjects: this.getLegacyAPI().savedObjects, + savedObjects: coreStart.savedObjects, }); }, }, @@ -183,14 +180,4 @@ export class Plugin { } public stop() {} - - private setupLegacyComponents(spacesService: SpacesServiceSetup) { - const legacyAPI = this.getLegacyAPI(); - const { addScopedSavedObjectsClientWrapperFactory, types } = legacyAPI.savedObjects; - addScopedSavedObjectsClientWrapperFactory( - Number.MIN_SAFE_INTEGER, - 'spaces', - spacesSavedObjectsClientWrapperFactory(spacesService, types) - ); - } } diff --git a/x-pack/plugins/spaces/server/routes/api/__fixtures__/create_copy_to_space_mocks.ts b/x-pack/plugins/spaces/server/routes/api/__fixtures__/create_copy_to_space_mocks.ts new file mode 100644 index 00000000000000..0e117b3f16e3f4 --- /dev/null +++ b/x-pack/plugins/spaces/server/routes/api/__fixtures__/create_copy_to_space_mocks.ts @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Readable } from 'stream'; +import { createPromiseFromStreams, createConcatStream } from 'src/legacy/utils'; + +async function readStreamToCompletion(stream: Readable) { + return (await (createPromiseFromStreams([stream, createConcatStream([])]) as unknown)) as any[]; +} + +export const createExportSavedObjectsToStreamMock = () => { + return jest.fn().mockResolvedValue( + new Readable({ + objectMode: true, + read() { + this.push(null); + }, + }) + ); +}; + +export const createImportSavedObjectsFromStreamMock = () => { + return jest.fn().mockImplementation(async (opts: Record) => { + const objectsToImport: any[] = await readStreamToCompletion(opts.readStream); + return { + success: true, + successCount: objectsToImport.length, + }; + }); +}; + +export const createResolveSavedObjectsImportErrorsMock = () => { + return jest.fn().mockImplementation(async (opts: Record) => { + const objectsToImport: any[] = await readStreamToCompletion(opts.readStream); + return { + success: true, + successCount: objectsToImport.length, + }; + }); +}; diff --git a/x-pack/plugins/spaces/server/routes/api/__fixtures__/create_legacy_api.ts b/x-pack/plugins/spaces/server/routes/api/__fixtures__/create_legacy_api.ts deleted file mode 100644 index 7765cc3c52e966..00000000000000 --- a/x-pack/plugins/spaces/server/routes/api/__fixtures__/create_legacy_api.ts +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { Readable } from 'stream'; -import { createPromiseFromStreams, createConcatStream } from 'src/legacy/utils/streams'; -import { SavedObjectsSchema, SavedObjectsLegacyService } from 'src/core/server'; -import { LegacyAPI } from '../../../plugin'; -import { Space } from '../../../../common/model/space'; -import { createSpaces } from '.'; - -async function readStreamToCompletion(stream: Readable) { - return (await (createPromiseFromStreams([stream, createConcatStream([])]) as unknown)) as any[]; -} - -interface LegacyAPIOpts { - spaces?: Space[]; -} - -export const createLegacyAPI = ({ - spaces = createSpaces().map(s => ({ id: s.id, ...s.attributes })), -}: LegacyAPIOpts = {}) => { - const mockSavedObjectsClientContract = { - get: jest.fn((type, id) => { - const result = spaces.filter(s => s.id === id); - if (!result.length) { - throw new Error(`not found: [${type}:${id}]`); - } - return result[0]; - }), - find: jest.fn(() => { - return { - total: spaces.length, - saved_objects: spaces, - }; - }), - create: jest.fn((type, attributes, { id }) => { - if (spaces.find(s => s.id === id)) { - throw new Error('conflict'); - } - return {}; - }), - update: jest.fn((type, id) => { - if (!spaces.find(s => s.id === id)) { - throw new Error('not found: during update'); - } - return {}; - }), - delete: jest.fn((type: string, id: string) => { - return {}; - }), - deleteByNamespace: jest.fn(), - }; - - const savedObjectsService = ({ - types: ['visualization', 'dashboard', 'index-pattern', 'globalType'], - schema: new SavedObjectsSchema({ - space: { - isNamespaceAgnostic: true, - hidden: true, - }, - globalType: { - isNamespaceAgnostic: true, - }, - }), - getScopedSavedObjectsClient: jest.fn().mockResolvedValue(mockSavedObjectsClientContract), - importExport: { - objectLimit: 10000, - getSortedObjectsForExport: jest.fn().mockResolvedValue( - new Readable({ - objectMode: true, - read() { - this.push(null); - }, - }) - ), - importSavedObjects: jest.fn().mockImplementation(async (opts: Record) => { - const objectsToImport: any[] = await readStreamToCompletion(opts.readStream); - return { - success: true, - successCount: objectsToImport.length, - }; - }), - resolveImportErrors: jest.fn().mockImplementation(async (opts: Record) => { - const objectsToImport: any[] = await readStreamToCompletion(opts.readStream); - return { - success: true, - successCount: objectsToImport.length, - }; - }), - }, - SavedObjectsClient: { - errors: { - isNotFoundError: jest.fn((e: any) => e.message.startsWith('not found:')), - isConflictError: jest.fn((e: any) => e.message.startsWith('conflict')), - }, - }, - } as unknown) as jest.Mocked; - - const legacyAPI: jest.Mocked = { - auditLogger: {} as any, - savedObjects: savedObjectsService, - }; - - return legacyAPI; -}; diff --git a/x-pack/plugins/spaces/server/routes/api/__fixtures__/create_mock_so_service.ts b/x-pack/plugins/spaces/server/routes/api/__fixtures__/create_mock_so_service.ts new file mode 100644 index 00000000000000..d8c318369834ec --- /dev/null +++ b/x-pack/plugins/spaces/server/routes/api/__fixtures__/create_mock_so_service.ts @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { SavedObjectsClientContract, SavedObjectsErrorHelpers } from 'src/core/server'; +import { coreMock, savedObjectsTypeRegistryMock } from '../../../../../../../src/core/server/mocks'; + +export const createMockSavedObjectsService = (spaces: any[] = []) => { + const mockSavedObjectsClientContract = ({ + get: jest.fn((type, id) => { + const result = spaces.filter(s => s.id === id); + if (!result.length) { + throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); + } + return result[0]; + }), + find: jest.fn(() => { + return { + total: spaces.length, + saved_objects: spaces, + }; + }), + create: jest.fn((type, attributes, { id }) => { + if (spaces.find(s => s.id === id)) { + throw SavedObjectsErrorHelpers.decorateConflictError(new Error(), 'space conflict'); + } + return {}; + }), + update: jest.fn((type, id) => { + if (!spaces.find(s => s.id === id)) { + throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); + } + return {}; + }), + delete: jest.fn((type: string, id: string) => { + return {}; + }), + deleteByNamespace: jest.fn(), + } as unknown) as jest.Mocked; + + const { savedObjects } = coreMock.createStart(); + + const typeRegistry = savedObjectsTypeRegistryMock.create(); + typeRegistry.getAllTypes.mockReturnValue([ + { + name: 'visualization', + namespaceAgnostic: false, + hidden: false, + mappings: { properties: {} }, + }, + { + name: 'dashboard', + namespaceAgnostic: false, + hidden: false, + mappings: { properties: {} }, + }, + { + name: 'index-pattern', + namespaceAgnostic: false, + hidden: false, + mappings: { properties: {} }, + }, + { + name: 'globalType', + namespaceAgnostic: true, + hidden: false, + mappings: { properties: {} }, + }, + { + name: 'space', + namespaceAgnostic: true, + hidden: true, + mappings: { properties: {} }, + }, + ]); + typeRegistry.isNamespaceAgnostic.mockImplementation((type: string) => + typeRegistry.getAllTypes().some(t => t.name === type && t.namespaceAgnostic) + ); + savedObjects.getTypeRegistry.mockReturnValue(typeRegistry); + + savedObjects.getScopedClient.mockReturnValue(mockSavedObjectsClientContract); + + return savedObjects; +}; diff --git a/x-pack/plugins/spaces/server/routes/api/__fixtures__/index.ts b/x-pack/plugins/spaces/server/routes/api/__fixtures__/index.ts index 1f5a5fe2cc91e6..c37db713c4afbd 100644 --- a/x-pack/plugins/spaces/server/routes/api/__fixtures__/index.ts +++ b/x-pack/plugins/spaces/server/routes/api/__fixtures__/index.ts @@ -5,6 +5,11 @@ */ export { createSpaces } from './create_spaces'; -export { createLegacyAPI } from './create_legacy_api'; export { createMockSavedObjectsRepository } from './create_mock_so_repository'; +export { createMockSavedObjectsService } from './create_mock_so_service'; export { mockRouteContext, mockRouteContextWithInvalidLicense } from './route_contexts'; +export { + createExportSavedObjectsToStreamMock, + createImportSavedObjectsFromStreamMock, + createResolveSavedObjectsImportErrorsMock, +} from './create_copy_to_space_mocks'; diff --git a/x-pack/plugins/spaces/server/routes/api/external/copy_to_space.test.ts b/x-pack/plugins/spaces/server/routes/api/external/copy_to_space.test.ts index 74197e6ca7556d..5267f4cb1f1d5b 100644 --- a/x-pack/plugins/spaces/server/routes/api/external/copy_to_space.test.ts +++ b/x-pack/plugins/spaces/server/routes/api/external/copy_to_space.test.ts @@ -6,17 +6,20 @@ import * as Rx from 'rxjs'; import { createSpaces, - createLegacyAPI, createMockSavedObjectsRepository, mockRouteContext, mockRouteContextWithInvalidLicense, + createExportSavedObjectsToStreamMock, + createImportSavedObjectsFromStreamMock, + createResolveSavedObjectsImportErrorsMock, + createMockSavedObjectsService, } from '../__fixtures__'; import { CoreSetup, IRouter, kibanaResponseFactory, RouteValidatorConfig } from 'src/core/server'; import { loggingServiceMock, - elasticsearchServiceMock, httpServiceMock, httpServerMock, + coreMock, } from 'src/core/server/mocks'; import { SpacesService } from '../../../spaces_service'; import { SpacesAuditLogger } from '../../../lib/audit_logger'; @@ -25,25 +28,55 @@ import { initCopyToSpacesApi } from './copy_to_space'; import { spacesConfig } from '../../../lib/__fixtures__'; import { securityMock } from '../../../../../security/server/mocks'; import { ObjectType } from '@kbn/config-schema'; +jest.mock('../../../../../../../src/core/server', () => { + return { + exportSavedObjectsToStream: jest.fn(), + importSavedObjectsFromStream: jest.fn(), + resolveSavedObjectsImportErrors: jest.fn(), + kibanaResponseFactory: jest.requireActual('src/core/server').kibanaResponseFactory, + }; +}); +import { + exportSavedObjectsToStream, + importSavedObjectsFromStream, + resolveSavedObjectsImportErrors, +} from '../../../../../../../src/core/server'; describe('copy to space', () => { const spacesSavedObjects = createSpaces(); const spaces = spacesSavedObjects.map(s => ({ id: s.id, ...s.attributes })); + beforeEach(() => { + (exportSavedObjectsToStream as jest.Mock).mockReset(); + (importSavedObjectsFromStream as jest.Mock).mockReset(); + (resolveSavedObjectsImportErrors as jest.Mock).mockReset(); + }); + const setup = async () => { const httpService = httpServiceMock.createSetupContract(); const router = httpService.createRouter('') as jest.Mocked; - const legacyAPI = createLegacyAPI({ spaces }); - const savedObjectsRepositoryMock = createMockSavedObjectsRepository(spacesSavedObjects); + (exportSavedObjectsToStream as jest.Mock).mockImplementation( + createExportSavedObjectsToStreamMock() + ); + (importSavedObjectsFromStream as jest.Mock).mockImplementation( + createImportSavedObjectsFromStreamMock() + ); + (resolveSavedObjectsImportErrors as jest.Mock).mockImplementation( + createResolveSavedObjectsImportErrorsMock() + ); + const log = loggingServiceMock.create().get('spaces'); - const service = new SpacesService(log, () => legacyAPI); + const coreStart = coreMock.createStart(); + coreStart.savedObjects = createMockSavedObjectsService(spaces); + + const service = new SpacesService(log); const spacesService = await service.setup({ http: (httpService as unknown) as CoreSetup['http'], - elasticsearch: elasticsearchServiceMock.createSetup(), + getStartServices: async () => [coreStart, {}, {}], authorization: securityMock.createSetup().authz, getSpacesAuditLogger: () => ({} as SpacesAuditLogger), config$: Rx.of(spacesConfig), @@ -65,7 +98,8 @@ describe('copy to space', () => { initCopyToSpacesApi({ externalRouter: router, - getSavedObjects: () => legacyAPI.savedObjects, + getStartServices: async () => [coreStart, {}, {}], + getImportExportObjectLimit: () => 1000, log, spacesService, }); @@ -76,6 +110,7 @@ describe('copy to space', () => { ] = router.post.mock.calls; return { + coreStart, copyToSpace: { routeValidation: ctsRouteDefinition.validate as RouteValidatorConfig<{}, {}, {}>, routeHandler: ctsRouteHandler, @@ -85,7 +120,6 @@ describe('copy to space', () => { routeHandler: resolveRouteHandler, }, savedObjectsRepositoryMock, - legacyAPI, }; }; @@ -115,7 +149,7 @@ describe('copy to space', () => { objects: [], }; - const { copyToSpace, legacyAPI } = await setup(); + const { copyToSpace, coreStart } = await setup(); const request = httpServerMock.createKibanaRequest({ body: payload, @@ -124,12 +158,9 @@ describe('copy to space', () => { await copyToSpace.routeHandler(mockRouteContext, request, kibanaResponseFactory); - expect(legacyAPI.savedObjects.getScopedSavedObjectsClient).toHaveBeenCalledWith( - expect.any(Object), - { - excludedWrappers: ['spaces'], - } - ); + expect(coreStart.savedObjects.getScopedClient).toHaveBeenCalledWith(request, { + excludedWrappers: ['spaces'], + }); }); it(`requires space IDs to be unique`, async () => { @@ -185,7 +216,7 @@ describe('copy to space', () => { ], }; - const { copyToSpace, legacyAPI } = await setup(); + const { copyToSpace } = await setup(); const request = httpServerMock.createKibanaRequest({ body: payload, @@ -201,9 +232,8 @@ describe('copy to space', () => { const { status } = response; expect(status).toEqual(200); - expect(legacyAPI.savedObjects.importExport.importSavedObjects).toHaveBeenCalledTimes(1); - const [importCallOptions] = (legacyAPI.savedObjects.importExport - .importSavedObjects as any).mock.calls[0]; + expect(importSavedObjectsFromStream).toHaveBeenCalledTimes(1); + const [importCallOptions] = (importSavedObjectsFromStream as jest.Mock).mock.calls[0]; expect(importCallOptions).toMatchObject({ namespace: 'a-space', @@ -217,7 +247,7 @@ describe('copy to space', () => { objects: [{ type: 'visualization', id: 'bar' }], }; - const { copyToSpace, legacyAPI } = await setup(); + const { copyToSpace } = await setup(); const request = httpServerMock.createKibanaRequest({ body: payload, @@ -233,16 +263,14 @@ describe('copy to space', () => { const { status } = response; expect(status).toEqual(200); - expect(legacyAPI.savedObjects.importExport.importSavedObjects).toHaveBeenCalledTimes(2); - const [firstImportCallOptions] = (legacyAPI.savedObjects.importExport - .importSavedObjects as any).mock.calls[0]; + expect(importSavedObjectsFromStream).toHaveBeenCalledTimes(2); + const [firstImportCallOptions] = (importSavedObjectsFromStream as jest.Mock).mock.calls[0]; expect(firstImportCallOptions).toMatchObject({ namespace: 'a-space', }); - const [secondImportCallOptions] = (legacyAPI.savedObjects.importExport - .importSavedObjects as any).mock.calls[1]; + const [secondImportCallOptions] = (importSavedObjectsFromStream as jest.Mock).mock.calls[1]; expect(secondImportCallOptions).toMatchObject({ namespace: 'b-space', @@ -284,7 +312,7 @@ describe('copy to space', () => { objects: [{ type: 'visualization', id: 'bar' }], }; - const { resolveConflicts, legacyAPI } = await setup(); + const { resolveConflicts, coreStart } = await setup(); const request = httpServerMock.createKibanaRequest({ body: payload, @@ -293,12 +321,9 @@ describe('copy to space', () => { await resolveConflicts.routeHandler(mockRouteContext, request, kibanaResponseFactory); - expect(legacyAPI.savedObjects.getScopedSavedObjectsClient).toHaveBeenCalledWith( - expect.any(Object), - { - excludedWrappers: ['spaces'], - } - ); + expect(coreStart.savedObjects.getScopedClient).toHaveBeenCalledWith(request, { + excludedWrappers: ['spaces'], + }); }); it(`requires objects to be unique`, async () => { @@ -365,7 +390,7 @@ describe('copy to space', () => { ], }; - const { resolveConflicts, legacyAPI } = await setup(); + const { resolveConflicts } = await setup(); const request = httpServerMock.createKibanaRequest({ body: payload, @@ -381,9 +406,10 @@ describe('copy to space', () => { const { status } = response; expect(status).toEqual(200); - expect(legacyAPI.savedObjects.importExport.resolveImportErrors).toHaveBeenCalledTimes(1); - const [resolveImportErrorsCallOptions] = (legacyAPI.savedObjects.importExport - .resolveImportErrors as any).mock.calls[0]; + expect(resolveSavedObjectsImportErrors).toHaveBeenCalledTimes(1); + const [ + resolveImportErrorsCallOptions, + ] = (resolveSavedObjectsImportErrors as jest.Mock).mock.calls[0]; expect(resolveImportErrorsCallOptions).toMatchObject({ namespace: 'a-space', @@ -412,7 +438,7 @@ describe('copy to space', () => { }, }; - const { resolveConflicts, legacyAPI } = await setup(); + const { resolveConflicts } = await setup(); const request = httpServerMock.createKibanaRequest({ body: payload, @@ -428,17 +454,19 @@ describe('copy to space', () => { const { status } = response; expect(status).toEqual(200); - expect(legacyAPI.savedObjects.importExport.resolveImportErrors).toHaveBeenCalledTimes(2); - const [resolveImportErrorsFirstCallOptions] = (legacyAPI.savedObjects.importExport - .resolveImportErrors as any).mock.calls[0]; + expect(resolveSavedObjectsImportErrors).toHaveBeenCalledTimes(2); + const [ + resolveImportErrorsFirstCallOptions, + ] = (resolveSavedObjectsImportErrors as jest.Mock).mock.calls[0]; expect(resolveImportErrorsFirstCallOptions).toMatchObject({ namespace: 'a-space', supportedTypes: ['visualization', 'dashboard', 'index-pattern'], }); - const [resolveImportErrorsSecondCallOptions] = (legacyAPI.savedObjects.importExport - .resolveImportErrors as any).mock.calls[1]; + const [ + resolveImportErrorsSecondCallOptions, + ] = (resolveSavedObjectsImportErrors as jest.Mock).mock.calls[1]; expect(resolveImportErrorsSecondCallOptions).toMatchObject({ namespace: 'b-space', diff --git a/x-pack/plugins/spaces/server/routes/api/external/copy_to_space.ts b/x-pack/plugins/spaces/server/routes/api/external/copy_to_space.ts index 040a0552c38be0..a36cdb8c08c933 100644 --- a/x-pack/plugins/spaces/server/routes/api/external/copy_to_space.ts +++ b/x-pack/plugins/spaces/server/routes/api/external/copy_to_space.ts @@ -12,7 +12,6 @@ import { resolveCopySavedObjectsToSpacesConflictsFactory, } from '../../../lib/copy_to_spaces'; import { ExternalRouteDeps } from '.'; -import { COPY_TO_SPACES_SAVED_OBJECTS_CLIENT_OPTS } from '../../../lib/copy_to_spaces/copy_to_spaces'; import { SPACE_ID_REGEX } from '../../../lib/space_schema'; import { createLicensedRouteHandler } from '../../lib'; @@ -22,7 +21,7 @@ const areObjectsUnique = (objects: SavedObjectIdentifier[]) => _.uniq(objects, (o: SavedObjectIdentifier) => `${o.type}:${o.id}`).length === objects.length; export function initCopyToSpacesApi(deps: ExternalRouteDeps) { - const { externalRouter, spacesService, getSavedObjects } = deps; + const { externalRouter, spacesService, getImportExportObjectLimit, getStartServices } = deps; externalRouter.post( { @@ -67,13 +66,12 @@ export function initCopyToSpacesApi(deps: ExternalRouteDeps) { }, }, createLicensedRouteHandler(async (context, request, response) => { - const savedObjectsClient = getSavedObjects().getScopedSavedObjectsClient( - request, - COPY_TO_SPACES_SAVED_OBJECTS_CLIENT_OPTS - ); + const [startServices] = await getStartServices(); + const copySavedObjectsToSpaces = copySavedObjectsToSpacesFactory( - savedObjectsClient, - getSavedObjects() + startServices.savedObjects, + getImportExportObjectLimit, + request ); const { spaces: destinationSpaceIds, objects, includeReferences, overwrite } = request.body; const sourceSpaceId = spacesService.getSpaceId(request); @@ -128,13 +126,12 @@ export function initCopyToSpacesApi(deps: ExternalRouteDeps) { }, }, createLicensedRouteHandler(async (context, request, response) => { - const savedObjectsClient = getSavedObjects().getScopedSavedObjectsClient( - request, - COPY_TO_SPACES_SAVED_OBJECTS_CLIENT_OPTS - ); + const [startServices] = await getStartServices(); + const resolveCopySavedObjectsToSpacesConflicts = resolveCopySavedObjectsToSpacesConflictsFactory( - savedObjectsClient, - getSavedObjects() + startServices.savedObjects, + getImportExportObjectLimit, + request ); const { objects, includeReferences, retries } = request.body; const sourceSpaceId = spacesService.getSpaceId(request); diff --git a/x-pack/plugins/spaces/server/routes/api/external/delete.test.ts b/x-pack/plugins/spaces/server/routes/api/external/delete.test.ts index 35f18cf66a57e0..f2ba8785f5a3f3 100644 --- a/x-pack/plugins/spaces/server/routes/api/external/delete.test.ts +++ b/x-pack/plugins/spaces/server/routes/api/external/delete.test.ts @@ -7,7 +7,6 @@ import * as Rx from 'rxjs'; import { createSpaces, - createLegacyAPI, createMockSavedObjectsRepository, mockRouteContext, mockRouteContextWithInvalidLicense, @@ -15,9 +14,9 @@ import { import { CoreSetup, IRouter, kibanaResponseFactory, RouteValidatorConfig } from 'src/core/server'; import { loggingServiceMock, - elasticsearchServiceMock, httpServiceMock, httpServerMock, + coreMock, } from 'src/core/server/mocks'; import { SpacesService } from '../../../spaces_service'; import { SpacesAuditLogger } from '../../../lib/audit_logger'; @@ -29,22 +28,21 @@ import { ObjectType } from '@kbn/config-schema'; describe('Spaces Public API', () => { const spacesSavedObjects = createSpaces(); - const spaces = spacesSavedObjects.map(s => ({ id: s.id, ...s.attributes })); const setup = async () => { const httpService = httpServiceMock.createSetupContract(); const router = httpService.createRouter('') as jest.Mocked; - const legacyAPI = createLegacyAPI({ spaces }); - const savedObjectsRepositoryMock = createMockSavedObjectsRepository(spacesSavedObjects); const log = loggingServiceMock.create().get('spaces'); - const service = new SpacesService(log, () => legacyAPI); + const coreStart = coreMock.createStart(); + + const service = new SpacesService(log); const spacesService = await service.setup({ http: (httpService as unknown) as CoreSetup['http'], - elasticsearch: elasticsearchServiceMock.createSetup(), + getStartServices: async () => [coreStart, {}, {}], authorization: securityMock.createSetup().authz, getSpacesAuditLogger: () => ({} as SpacesAuditLogger), config$: Rx.of(spacesConfig), @@ -66,7 +64,8 @@ describe('Spaces Public API', () => { initDeleteSpacesApi({ externalRouter: router, - getSavedObjects: () => legacyAPI.savedObjects, + getStartServices: async () => [coreStart, {}, {}], + getImportExportObjectLimit: () => 1000, log, spacesService, }); diff --git a/x-pack/plugins/spaces/server/routes/api/external/delete.ts b/x-pack/plugins/spaces/server/routes/api/external/delete.ts index 536efdc1de6490..4b7e6b00182acf 100644 --- a/x-pack/plugins/spaces/server/routes/api/external/delete.ts +++ b/x-pack/plugins/spaces/server/routes/api/external/delete.ts @@ -5,13 +5,14 @@ */ import { schema } from '@kbn/config-schema'; +import { SavedObjectsErrorHelpers } from '../../../../../../../src/core/server'; import { wrapError } from '../../../lib/errors'; import { SpacesClient } from '../../../lib/spaces_client'; import { ExternalRouteDeps } from '.'; import { createLicensedRouteHandler } from '../../lib'; export function initDeleteSpacesApi(deps: ExternalRouteDeps) { - const { externalRouter, getSavedObjects, spacesService } = deps; + const { externalRouter, spacesService } = deps; externalRouter.delete( { @@ -23,7 +24,6 @@ export function initDeleteSpacesApi(deps: ExternalRouteDeps) { }, }, createLicensedRouteHandler(async (context, request, response) => { - const { SavedObjectsClient } = getSavedObjects(); const spacesClient: SpacesClient = await spacesService.scopedClient(request); const id = request.params.id; @@ -31,7 +31,7 @@ export function initDeleteSpacesApi(deps: ExternalRouteDeps) { try { await spacesClient.delete(id); } catch (error) { - if (SavedObjectsClient.errors.isNotFoundError(error)) { + if (SavedObjectsErrorHelpers.isNotFoundError(error)) { return response.notFound(); } return response.customError(wrapError(error)); diff --git a/x-pack/plugins/spaces/server/routes/api/external/get.test.ts b/x-pack/plugins/spaces/server/routes/api/external/get.test.ts index 3300e308252834..482bf7165919a9 100644 --- a/x-pack/plugins/spaces/server/routes/api/external/get.test.ts +++ b/x-pack/plugins/spaces/server/routes/api/external/get.test.ts @@ -6,7 +6,6 @@ import * as Rx from 'rxjs'; import { createSpaces, - createLegacyAPI, createMockSavedObjectsRepository, mockRouteContextWithInvalidLicense, mockRouteContext, @@ -15,9 +14,9 @@ import { initGetSpaceApi } from './get'; import { CoreSetup, IRouter, kibanaResponseFactory } from 'src/core/server'; import { loggingServiceMock, - elasticsearchServiceMock, httpServiceMock, httpServerMock, + coreMock, } from 'src/core/server/mocks'; import { SpacesService } from '../../../spaces_service'; import { SpacesAuditLogger } from '../../../lib/audit_logger'; @@ -33,16 +32,16 @@ describe('GET space', () => { const httpService = httpServiceMock.createSetupContract(); const router = httpService.createRouter('') as jest.Mocked; - const legacyAPI = createLegacyAPI({ spaces }); + const coreStart = coreMock.createStart(); const savedObjectsRepositoryMock = createMockSavedObjectsRepository(spacesSavedObjects); const log = loggingServiceMock.create().get('spaces'); - const service = new SpacesService(log, () => legacyAPI); + const service = new SpacesService(log); const spacesService = await service.setup({ http: (httpService as unknown) as CoreSetup['http'], - elasticsearch: elasticsearchServiceMock.createSetup(), + getStartServices: async () => [coreStart, {}, {}], authorization: securityMock.createSetup().authz, getSpacesAuditLogger: () => ({} as SpacesAuditLogger), config$: Rx.of(spacesConfig), @@ -64,7 +63,8 @@ describe('GET space', () => { initGetSpaceApi({ externalRouter: router, - getSavedObjects: () => legacyAPI.savedObjects, + getStartServices: async () => [coreStart, {}, {}], + getImportExportObjectLimit: () => 1000, log, spacesService, }); diff --git a/x-pack/plugins/spaces/server/routes/api/external/get.ts b/x-pack/plugins/spaces/server/routes/api/external/get.ts index 7643ec811db71e..150c9f05156a2c 100644 --- a/x-pack/plugins/spaces/server/routes/api/external/get.ts +++ b/x-pack/plugins/spaces/server/routes/api/external/get.ts @@ -5,12 +5,13 @@ */ import { schema } from '@kbn/config-schema'; +import { SavedObjectsErrorHelpers } from '../../../../../../../src/core/server'; import { wrapError } from '../../../lib/errors'; import { ExternalRouteDeps } from '.'; import { createLicensedRouteHandler } from '../../lib'; export function initGetSpaceApi(deps: ExternalRouteDeps) { - const { externalRouter, spacesService, getSavedObjects } = deps; + const { externalRouter, spacesService } = deps; externalRouter.get( { @@ -23,15 +24,13 @@ export function initGetSpaceApi(deps: ExternalRouteDeps) { }, createLicensedRouteHandler(async (context, request, response) => { const spaceId = request.params.id; - - const { SavedObjectsClient } = getSavedObjects(); const spacesClient = await spacesService.scopedClient(request); try { const space = await spacesClient.get(spaceId); return response.ok({ body: space }); } catch (error) { - if (SavedObjectsClient.errors.isNotFoundError(error)) { + if (SavedObjectsErrorHelpers.isNotFoundError(error)) { return response.notFound(); } return response.customError(wrapError(error)); diff --git a/x-pack/plugins/spaces/server/routes/api/external/get_all.test.ts b/x-pack/plugins/spaces/server/routes/api/external/get_all.test.ts index ca89731f35946f..c2d8abe6b40673 100644 --- a/x-pack/plugins/spaces/server/routes/api/external/get_all.test.ts +++ b/x-pack/plugins/spaces/server/routes/api/external/get_all.test.ts @@ -6,7 +6,6 @@ import * as Rx from 'rxjs'; import { createSpaces, - createLegacyAPI, createMockSavedObjectsRepository, mockRouteContext, mockRouteContextWithInvalidLicense, @@ -14,9 +13,9 @@ import { import { CoreSetup, kibanaResponseFactory, IRouter } from 'src/core/server'; import { loggingServiceMock, - elasticsearchServiceMock, httpServiceMock, httpServerMock, + coreMock, } from 'src/core/server/mocks'; import { SpacesService } from '../../../spaces_service'; import { SpacesAuditLogger } from '../../../lib/audit_logger'; @@ -33,16 +32,16 @@ describe('GET /spaces/space', () => { const httpService = httpServiceMock.createSetupContract(); const router = httpService.createRouter('') as jest.Mocked; - const legacyAPI = createLegacyAPI({ spaces }); + const coreStart = coreMock.createStart(); const savedObjectsRepositoryMock = createMockSavedObjectsRepository(spacesSavedObjects); const log = loggingServiceMock.create().get('spaces'); - const service = new SpacesService(log, () => legacyAPI); + const service = new SpacesService(log); const spacesService = await service.setup({ http: (httpService as unknown) as CoreSetup['http'], - elasticsearch: elasticsearchServiceMock.createSetup(), + getStartServices: async () => [coreStart, {}, {}], authorization: securityMock.createSetup().authz, getSpacesAuditLogger: () => ({} as SpacesAuditLogger), config$: Rx.of(spacesConfig), @@ -64,7 +63,8 @@ describe('GET /spaces/space', () => { initGetAllSpacesApi({ externalRouter: router, - getSavedObjects: () => legacyAPI.savedObjects, + getStartServices: async () => [coreStart, {}, {}], + getImportExportObjectLimit: () => 1000, log, spacesService, }); diff --git a/x-pack/plugins/spaces/server/routes/api/external/index.ts b/x-pack/plugins/spaces/server/routes/api/external/index.ts index 60b0170ee04a73..1bdb7ceb8a3f7e 100644 --- a/x-pack/plugins/spaces/server/routes/api/external/index.ts +++ b/x-pack/plugins/spaces/server/routes/api/external/index.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Logger, SavedObjectsLegacyService, IRouter } from 'src/core/server'; +import { Logger, IRouter, CoreSetup } from 'src/core/server'; import { initDeleteSpacesApi } from './delete'; import { initGetSpaceApi } from './get'; import { initGetAllSpacesApi } from './get_all'; @@ -15,7 +15,8 @@ import { initCopyToSpacesApi } from './copy_to_space'; export interface ExternalRouteDeps { externalRouter: IRouter; - getSavedObjects: () => SavedObjectsLegacyService; + getStartServices: CoreSetup['getStartServices']; + getImportExportObjectLimit: () => number; spacesService: SpacesServiceSetup; log: Logger; } diff --git a/x-pack/plugins/spaces/server/routes/api/external/post.test.ts b/x-pack/plugins/spaces/server/routes/api/external/post.test.ts index 26ecbf2247e0f2..51fcfbfeaa95dc 100644 --- a/x-pack/plugins/spaces/server/routes/api/external/post.test.ts +++ b/x-pack/plugins/spaces/server/routes/api/external/post.test.ts @@ -6,7 +6,6 @@ import * as Rx from 'rxjs'; import { createSpaces, - createLegacyAPI, createMockSavedObjectsRepository, mockRouteContext, mockRouteContextWithInvalidLicense, @@ -14,9 +13,9 @@ import { import { CoreSetup, kibanaResponseFactory, IRouter, RouteValidatorConfig } from 'src/core/server'; import { loggingServiceMock, - elasticsearchServiceMock, httpServerMock, httpServiceMock, + coreMock, } from 'src/core/server/mocks'; import { SpacesService } from '../../../spaces_service'; import { SpacesAuditLogger } from '../../../lib/audit_logger'; @@ -28,22 +27,21 @@ import { ObjectType } from '@kbn/config-schema'; describe('Spaces Public API', () => { const spacesSavedObjects = createSpaces(); - const spaces = spacesSavedObjects.map(s => ({ id: s.id, ...s.attributes })); const setup = async () => { const httpService = httpServiceMock.createSetupContract(); const router = httpService.createRouter('') as jest.Mocked; - const legacyAPI = createLegacyAPI({ spaces }); + const coreStart = coreMock.createStart(); const savedObjectsRepositoryMock = createMockSavedObjectsRepository(spacesSavedObjects); const log = loggingServiceMock.create().get('spaces'); - const service = new SpacesService(log, () => legacyAPI); + const service = new SpacesService(log); const spacesService = await service.setup({ http: (httpService as unknown) as CoreSetup['http'], - elasticsearch: elasticsearchServiceMock.createSetup(), + getStartServices: async () => [coreStart, {}, {}], authorization: securityMock.createSetup().authz, getSpacesAuditLogger: () => ({} as SpacesAuditLogger), config$: Rx.of(spacesConfig), @@ -65,7 +63,8 @@ describe('Spaces Public API', () => { initPostSpacesApi({ externalRouter: router, - getSavedObjects: () => legacyAPI.savedObjects, + getStartServices: async () => [coreStart, {}, {}], + getImportExportObjectLimit: () => 1000, log, spacesService, }); @@ -145,7 +144,7 @@ describe('Spaces Public API', () => { const { status, payload: responsePayload } = response; expect(status).toEqual(409); - expect(responsePayload.message).toEqual('space conflict'); + expect(responsePayload.message).toEqual('A space with the identifier a-space already exists.'); }); it('should not require disabledFeatures to be specified', async () => { diff --git a/x-pack/plugins/spaces/server/routes/api/external/post.ts b/x-pack/plugins/spaces/server/routes/api/external/post.ts index 3a24df8b7270ef..61f90adb300abd 100644 --- a/x-pack/plugins/spaces/server/routes/api/external/post.ts +++ b/x-pack/plugins/spaces/server/routes/api/external/post.ts @@ -4,13 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ import Boom from 'boom'; +import { SavedObjectsErrorHelpers } from '../../../../../../../src/core/server'; import { wrapError } from '../../../lib/errors'; import { spaceSchema } from '../../../lib/space_schema'; import { ExternalRouteDeps } from '.'; import { createLicensedRouteHandler } from '../../lib'; export function initPostSpacesApi(deps: ExternalRouteDeps) { - const { externalRouter, log, spacesService, getSavedObjects } = deps; + const { externalRouter, log, spacesService } = deps; externalRouter.post( { @@ -21,7 +22,6 @@ export function initPostSpacesApi(deps: ExternalRouteDeps) { }, createLicensedRouteHandler(async (context, request, response) => { log.debug(`Inside POST /api/spaces/space`); - const { SavedObjectsClient } = getSavedObjects(); const spacesClient = await spacesService.scopedClient(request); const space = request.body; @@ -31,7 +31,7 @@ export function initPostSpacesApi(deps: ExternalRouteDeps) { const createdSpace = await spacesClient.create(space); return response.ok({ body: createdSpace }); } catch (error) { - if (SavedObjectsClient.errors.isConflictError(error)) { + if (SavedObjectsErrorHelpers.isConflictError(error)) { const { body } = wrapError( Boom.conflict(`A space with the identifier ${space.id} already exists.`) ); diff --git a/x-pack/plugins/spaces/server/routes/api/external/put.test.ts b/x-pack/plugins/spaces/server/routes/api/external/put.test.ts index e6182e027b854d..3575d89b151e84 100644 --- a/x-pack/plugins/spaces/server/routes/api/external/put.test.ts +++ b/x-pack/plugins/spaces/server/routes/api/external/put.test.ts @@ -7,7 +7,6 @@ import * as Rx from 'rxjs'; import { createSpaces, - createLegacyAPI, createMockSavedObjectsRepository, mockRouteContext, mockRouteContextWithInvalidLicense, @@ -15,9 +14,9 @@ import { import { CoreSetup, IRouter, kibanaResponseFactory, RouteValidatorConfig } from 'src/core/server'; import { loggingServiceMock, - elasticsearchServiceMock, httpServiceMock, httpServerMock, + coreMock, } from 'src/core/server/mocks'; import { SpacesService } from '../../../spaces_service'; import { SpacesAuditLogger } from '../../../lib/audit_logger'; @@ -29,22 +28,21 @@ import { ObjectType } from '@kbn/config-schema'; describe('PUT /api/spaces/space', () => { const spacesSavedObjects = createSpaces(); - const spaces = spacesSavedObjects.map(s => ({ id: s.id, ...s.attributes })); const setup = async () => { const httpService = httpServiceMock.createSetupContract(); const router = httpService.createRouter('') as jest.Mocked; - const legacyAPI = createLegacyAPI({ spaces }); + const coreStart = coreMock.createStart(); const savedObjectsRepositoryMock = createMockSavedObjectsRepository(spacesSavedObjects); const log = loggingServiceMock.create().get('spaces'); - const service = new SpacesService(log, () => legacyAPI); + const service = new SpacesService(log); const spacesService = await service.setup({ http: (httpService as unknown) as CoreSetup['http'], - elasticsearch: elasticsearchServiceMock.createSetup(), + getStartServices: async () => [coreStart, {}, {}], authorization: securityMock.createSetup().authz, getSpacesAuditLogger: () => ({} as SpacesAuditLogger), config$: Rx.of(spacesConfig), @@ -66,7 +64,8 @@ describe('PUT /api/spaces/space', () => { initPutSpacesApi({ externalRouter: router, - getSavedObjects: () => legacyAPI.savedObjects, + getStartServices: async () => [coreStart, {}, {}], + getImportExportObjectLimit: () => 1000, log, spacesService, }); diff --git a/x-pack/plugins/spaces/server/routes/api/external/put.ts b/x-pack/plugins/spaces/server/routes/api/external/put.ts index 4c19b0bd2eddaf..2054cf5d1c8296 100644 --- a/x-pack/plugins/spaces/server/routes/api/external/put.ts +++ b/x-pack/plugins/spaces/server/routes/api/external/put.ts @@ -5,6 +5,7 @@ */ import { schema } from '@kbn/config-schema'; +import { SavedObjectsErrorHelpers } from '../../../../../../../src/core/server'; import { Space } from '../../../../common/model/space'; import { wrapError } from '../../../lib/errors'; import { spaceSchema } from '../../../lib/space_schema'; @@ -12,7 +13,7 @@ import { ExternalRouteDeps } from '.'; import { createLicensedRouteHandler } from '../../lib'; export function initPutSpacesApi(deps: ExternalRouteDeps) { - const { externalRouter, spacesService, getSavedObjects } = deps; + const { externalRouter, spacesService } = deps; externalRouter.put( { @@ -25,7 +26,6 @@ export function initPutSpacesApi(deps: ExternalRouteDeps) { }, }, createLicensedRouteHandler(async (context, request, response) => { - const { SavedObjectsClient } = getSavedObjects(); const spacesClient = await spacesService.scopedClient(request); const space = request.body; @@ -35,7 +35,7 @@ export function initPutSpacesApi(deps: ExternalRouteDeps) { try { result = await spacesClient.update(id, { ...space }); } catch (error) { - if (SavedObjectsClient.errors.isNotFoundError(error)) { + if (SavedObjectsErrorHelpers.isNotFoundError(error)) { return response.notFound(); } return response.customError(wrapError(error)); diff --git a/x-pack/plugins/spaces/server/routes/api/internal/get_active_space.test.ts b/x-pack/plugins/spaces/server/routes/api/internal/get_active_space.test.ts index 461f816ff5019a..82de102e119c73 100644 --- a/x-pack/plugins/spaces/server/routes/api/internal/get_active_space.test.ts +++ b/x-pack/plugins/spaces/server/routes/api/internal/get_active_space.test.ts @@ -4,9 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ import * as Rx from 'rxjs'; -import { createLegacyAPI, mockRouteContextWithInvalidLicense } from '../__fixtures__'; +import { mockRouteContextWithInvalidLicense } from '../__fixtures__'; import { CoreSetup, kibanaResponseFactory } from 'src/core/server'; -import { httpServiceMock, httpServerMock, elasticsearchServiceMock } from 'src/core/server/mocks'; +import { httpServiceMock, httpServerMock, coreMock } from 'src/core/server/mocks'; import { SpacesService } from '../../../spaces_service'; import { SpacesAuditLogger } from '../../../lib/audit_logger'; import { spacesConfig } from '../../../lib/__fixtures__'; @@ -17,12 +17,12 @@ describe('GET /internal/spaces/_active_space', () => { const httpService = httpServiceMock.createSetupContract(); const router = httpServiceMock.createRouter(); - const legacyAPI = createLegacyAPI(); + const coreStart = coreMock.createStart(); - const service = new SpacesService(null as any, () => legacyAPI); + const service = new SpacesService(null as any); const spacesService = await service.setup({ http: (httpService as unknown) as CoreSetup['http'], - elasticsearch: elasticsearchServiceMock.createSetup(), + getStartServices: async () => [coreStart, {}, {}], authorization: null, getSpacesAuditLogger: () => ({} as SpacesAuditLogger), config$: Rx.of(spacesConfig), diff --git a/x-pack/plugins/spaces/server/lib/saved_objects_client/__snapshots__/spaces_saved_objects_client.test.ts.snap b/x-pack/plugins/spaces/server/saved_objects/__snapshots__/spaces_saved_objects_client.test.ts.snap similarity index 100% rename from x-pack/plugins/spaces/server/lib/saved_objects_client/__snapshots__/spaces_saved_objects_client.test.ts.snap rename to x-pack/plugins/spaces/server/saved_objects/__snapshots__/spaces_saved_objects_client.test.ts.snap diff --git a/x-pack/plugins/spaces/server/lib/migrations/index.ts b/x-pack/plugins/spaces/server/saved_objects/index.ts similarity index 77% rename from x-pack/plugins/spaces/server/lib/migrations/index.ts rename to x-pack/plugins/spaces/server/saved_objects/index.ts index b303a8489ffb09..fb02c7cb7245ab 100644 --- a/x-pack/plugins/spaces/server/lib/migrations/index.ts +++ b/x-pack/plugins/spaces/server/saved_objects/index.ts @@ -4,4 +4,4 @@ * you may not use this file except in compliance with the Elastic License. */ -export { migrateToKibana660 } from './migrate_6x'; +export { SpacesSavedObjectsService } from './saved_objects_service'; diff --git a/x-pack/plugins/spaces/server/saved_objects/mappings.ts b/x-pack/plugins/spaces/server/saved_objects/mappings.ts new file mode 100644 index 00000000000000..00e1ab732a8a52 --- /dev/null +++ b/x-pack/plugins/spaces/server/saved_objects/mappings.ts @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { deepFreeze } from '../../../../../src/core/utils'; + +export const SpacesSavedObjectMappings = deepFreeze({ + properties: { + name: { + type: 'text', + fields: { + keyword: { + type: 'keyword', + ignore_above: 2048, + }, + }, + }, + description: { + type: 'text', + }, + initials: { + type: 'keyword', + }, + color: { + type: 'keyword', + }, + disabledFeatures: { + type: 'keyword', + }, + imageUrl: { + type: 'text', + index: false, + }, + _reserved: { + type: 'boolean', + }, + }, +}); diff --git a/x-pack/legacy/plugins/spaces/server/lib/migrations/index.ts b/x-pack/plugins/spaces/server/saved_objects/migrations/index.ts similarity index 100% rename from x-pack/legacy/plugins/spaces/server/lib/migrations/index.ts rename to x-pack/plugins/spaces/server/saved_objects/migrations/index.ts diff --git a/x-pack/plugins/spaces/server/lib/migrations/migrate_6x.test.ts b/x-pack/plugins/spaces/server/saved_objects/migrations/migrate_6x.test.ts similarity index 62% rename from x-pack/plugins/spaces/server/lib/migrations/migrate_6x.test.ts rename to x-pack/plugins/spaces/server/saved_objects/migrations/migrate_6x.test.ts index 964eb8137685f6..681e189bd6e658 100644 --- a/x-pack/plugins/spaces/server/lib/migrations/migrate_6x.test.ts +++ b/x-pack/plugins/spaces/server/saved_objects/migrations/migrate_6x.test.ts @@ -5,16 +5,24 @@ */ import { migrateToKibana660 } from './migrate_6x'; +import { SavedObjectMigrationContext } from 'src/core/server'; + +const mockContext = {} as SavedObjectMigrationContext; describe('migrateTo660', () => { it('adds a "disabledFeatures" attribute initialized as an empty array', () => { expect( - migrateToKibana660({ - id: 'space:foo', - attributes: {}, - }) + migrateToKibana660( + { + id: 'space:foo', + type: 'space', + attributes: {}, + }, + mockContext + ) ).toEqual({ id: 'space:foo', + type: 'space', attributes: { disabledFeatures: [], }, @@ -24,14 +32,19 @@ describe('migrateTo660', () => { it('does not initialize "disabledFeatures" if the property already exists', () => { // This scenario shouldn't happen organically. Protecting against defects in the migration. expect( - migrateToKibana660({ - id: 'space:foo', - attributes: { - disabledFeatures: ['foo', 'bar', 'baz'], + migrateToKibana660( + { + id: 'space:foo', + type: 'space', + attributes: { + disabledFeatures: ['foo', 'bar', 'baz'], + }, }, - }) + mockContext + ) ).toEqual({ id: 'space:foo', + type: 'space', attributes: { disabledFeatures: ['foo', 'bar', 'baz'], }, diff --git a/x-pack/plugins/spaces/server/lib/migrations/migrate_6x.ts b/x-pack/plugins/spaces/server/saved_objects/migrations/migrate_6x.ts similarity index 73% rename from x-pack/plugins/spaces/server/lib/migrations/migrate_6x.ts rename to x-pack/plugins/spaces/server/saved_objects/migrations/migrate_6x.ts index 0c080a8dabb0a7..b063404f68e4fb 100644 --- a/x-pack/plugins/spaces/server/lib/migrations/migrate_6x.ts +++ b/x-pack/plugins/spaces/server/saved_objects/migrations/migrate_6x.ts @@ -4,9 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -export function migrateToKibana660(doc: Record) { +import { SavedObjectMigrationFn } from 'src/core/server'; + +export const migrateToKibana660: SavedObjectMigrationFn = doc => { if (!doc.attributes.hasOwnProperty('disabledFeatures')) { doc.attributes.disabledFeatures = []; } return doc; -} +}; diff --git a/x-pack/plugins/spaces/server/lib/saved_objects_client/saved_objects_client_wrapper_factory.ts b/x-pack/plugins/spaces/server/saved_objects/saved_objects_client_wrapper_factory.ts similarity index 55% rename from x-pack/plugins/spaces/server/lib/saved_objects_client/saved_objects_client_wrapper_factory.ts rename to x-pack/plugins/spaces/server/saved_objects/saved_objects_client_wrapper_factory.ts index aa61af07c268ec..e545cccfeadd74 100644 --- a/x-pack/plugins/spaces/server/lib/saved_objects_client/saved_objects_client_wrapper_factory.ts +++ b/x-pack/plugins/spaces/server/saved_objects/saved_objects_client_wrapper_factory.ts @@ -4,19 +4,21 @@ * you may not use this file except in compliance with the Elastic License. */ -import { SavedObjectsClientWrapperFactory } from 'src/core/server'; +import { + SavedObjectsClientWrapperFactory, + SavedObjectsClientWrapperOptions, +} from 'src/core/server'; import { SpacesSavedObjectsClient } from './spaces_saved_objects_client'; -import { SpacesServiceSetup } from '../../spaces_service/spaces_service'; +import { SpacesServiceSetup } from '../spaces_service/spaces_service'; export function spacesSavedObjectsClientWrapperFactory( - spacesService: SpacesServiceSetup, - types: string[] + spacesService: SpacesServiceSetup ): SavedObjectsClientWrapperFactory { - return ({ client, request }) => + return (options: SavedObjectsClientWrapperOptions) => new SpacesSavedObjectsClient({ - baseClient: client, - request, + baseClient: options.client, + request: options.request, spacesService, - types, + typeRegistry: options.typeRegistry, }); } diff --git a/x-pack/plugins/spaces/server/saved_objects/saved_objects_service.test.ts b/x-pack/plugins/spaces/server/saved_objects/saved_objects_service.test.ts new file mode 100644 index 00000000000000..4a9756d9e03f89 --- /dev/null +++ b/x-pack/plugins/spaces/server/saved_objects/saved_objects_service.test.ts @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { coreMock } from 'src/core/server/mocks'; +import { spacesServiceMock } from '../spaces_service/spaces_service.mock'; +import { SpacesSavedObjectsService } from './saved_objects_service'; + +describe('SpacesSavedObjectsService', () => { + describe('#setup', () => { + it('registers the "space" saved object type with appropriate mappings and migrations', () => { + const core = coreMock.createSetup(); + const spacesService = spacesServiceMock.createSetupContract(); + + const service = new SpacesSavedObjectsService(); + service.setup({ core, spacesService }); + + expect(core.savedObjects.registerType).toHaveBeenCalledTimes(1); + expect(core.savedObjects.registerType.mock.calls[0]).toMatchInlineSnapshot(` + Array [ + Object { + "hidden": true, + "mappings": Object { + "properties": Object { + "_reserved": Object { + "type": "boolean", + }, + "color": Object { + "type": "keyword", + }, + "description": Object { + "type": "text", + }, + "disabledFeatures": Object { + "type": "keyword", + }, + "imageUrl": Object { + "index": false, + "type": "text", + }, + "initials": Object { + "type": "keyword", + }, + "name": Object { + "fields": Object { + "keyword": Object { + "ignore_above": 2048, + "type": "keyword", + }, + }, + "type": "text", + }, + }, + }, + "migrations": Object { + "6.6.0": [Function], + }, + "name": "space", + "namespaceAgnostic": true, + }, + ] + `); + }); + + it('registers the client wrapper', () => { + const core = coreMock.createSetup(); + const spacesService = spacesServiceMock.createSetupContract(); + + const service = new SpacesSavedObjectsService(); + service.setup({ core, spacesService }); + + expect(core.savedObjects.addClientWrapper).toHaveBeenCalledTimes(1); + expect(core.savedObjects.addClientWrapper).toHaveBeenCalledWith( + Number.MIN_SAFE_INTEGER, + 'spaces', + expect.any(Function) + ); + }); + }); +}); diff --git a/x-pack/plugins/spaces/server/saved_objects/saved_objects_service.ts b/x-pack/plugins/spaces/server/saved_objects/saved_objects_service.ts new file mode 100644 index 00000000000000..40ea49573e3c17 --- /dev/null +++ b/x-pack/plugins/spaces/server/saved_objects/saved_objects_service.ts @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { CoreSetup } from 'src/core/server'; +import { SpacesSavedObjectMappings } from './mappings'; +import { migrateToKibana660 } from './migrations'; +import { spacesSavedObjectsClientWrapperFactory } from './saved_objects_client_wrapper_factory'; +import { SpacesServiceSetup } from '../spaces_service'; + +interface SetupDeps { + core: Pick; + spacesService: SpacesServiceSetup; +} + +export class SpacesSavedObjectsService { + public setup({ core, spacesService }: SetupDeps) { + core.savedObjects.registerType({ + name: 'space', + hidden: true, + namespaceAgnostic: true, + mappings: SpacesSavedObjectMappings, + migrations: { + '6.6.0': migrateToKibana660, + }, + }); + + core.savedObjects.addClientWrapper( + Number.MIN_SAFE_INTEGER, + 'spaces', + spacesSavedObjectsClientWrapperFactory(spacesService) + ); + } +} diff --git a/x-pack/plugins/spaces/server/lib/saved_objects_client/spaces_saved_objects_client.test.ts b/x-pack/plugins/spaces/server/saved_objects/spaces_saved_objects_client.test.ts similarity index 92% rename from x-pack/plugins/spaces/server/lib/saved_objects_client/spaces_saved_objects_client.test.ts rename to x-pack/plugins/spaces/server/saved_objects/spaces_saved_objects_client.test.ts index c2bc534f742a87..2d6fe36792c403 100644 --- a/x-pack/plugins/spaces/server/lib/saved_objects_client/spaces_saved_objects_client.test.ts +++ b/x-pack/plugins/spaces/server/saved_objects/spaces_saved_objects_client.test.ts @@ -4,12 +4,33 @@ * you may not use this file except in compliance with the Elastic License. */ -import { DEFAULT_SPACE_ID } from '../../../common/constants'; +import { DEFAULT_SPACE_ID } from '../../common/constants'; import { SpacesSavedObjectsClient } from './spaces_saved_objects_client'; -import { spacesServiceMock } from '../../spaces_service/spaces_service.mock'; -import { savedObjectsClientMock } from '../../../../../../src/core/server/mocks'; +import { spacesServiceMock } from '../spaces_service/spaces_service.mock'; +import { savedObjectsClientMock } from '../../../../../src/core/server/mocks'; +import { SavedObjectTypeRegistry } from 'src/core/server'; + +const typeRegistry = new SavedObjectTypeRegistry(); +typeRegistry.registerType({ + name: 'foo', + namespaceAgnostic: false, + hidden: false, + mappings: { properties: {} }, +}); + +typeRegistry.registerType({ + name: 'bar', + namespaceAgnostic: false, + hidden: false, + mappings: { properties: {} }, +}); -const types = ['foo', 'bar', 'space']; +typeRegistry.registerType({ + name: 'space', + namespaceAgnostic: true, + hidden: true, + mappings: { properties: {} }, +}); const createMockRequest = () => ({}); @@ -44,7 +65,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); await expect( @@ -63,7 +84,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); const type = Symbol(); const id = Symbol(); @@ -89,7 +110,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); await expect( @@ -110,7 +131,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); const objects = [{ type: 'foo' }]; @@ -136,7 +157,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); await expect( @@ -160,7 +181,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); const options = Object.freeze({ type: 'foo' }); @@ -189,7 +210,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); const options = Object.freeze({ type: ['foo', 'bar'] }); @@ -213,7 +234,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); await expect( @@ -232,7 +253,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); const type = Symbol(); @@ -259,7 +280,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); await expect( @@ -280,7 +301,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); const objects = [{ type: 'foo' }]; @@ -306,7 +327,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); await expect( @@ -326,7 +347,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); const type = Symbol(); @@ -358,7 +379,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); const actualReturnValue = await client.bulkUpdate([ @@ -390,7 +411,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); await expect( @@ -410,7 +431,7 @@ const createMockResponse = () => ({ request, baseClient, spacesService, - types, + typeRegistry, }); const type = Symbol(); diff --git a/x-pack/plugins/spaces/server/lib/saved_objects_client/spaces_saved_objects_client.ts b/x-pack/plugins/spaces/server/saved_objects/spaces_saved_objects_client.ts similarity index 95% rename from x-pack/plugins/spaces/server/lib/saved_objects_client/spaces_saved_objects_client.ts rename to x-pack/plugins/spaces/server/saved_objects/spaces_saved_objects_client.ts index 534d7971239401..f216d5743cf89e 100644 --- a/x-pack/plugins/spaces/server/lib/saved_objects_client/spaces_saved_objects_client.ts +++ b/x-pack/plugins/spaces/server/saved_objects/spaces_saved_objects_client.ts @@ -13,15 +13,16 @@ import { SavedObjectsCreateOptions, SavedObjectsFindOptions, SavedObjectsUpdateOptions, + ISavedObjectTypeRegistry, } from 'src/core/server'; -import { SpacesServiceSetup } from '../../spaces_service/spaces_service'; -import { spaceIdToNamespace } from '../utils/namespace'; +import { SpacesServiceSetup } from '../spaces_service/spaces_service'; +import { spaceIdToNamespace } from '../lib/utils/namespace'; interface SpacesSavedObjectsClientOptions { baseClient: SavedObjectsClientContract; request: any; spacesService: SpacesServiceSetup; - types: string[]; + typeRegistry: ISavedObjectTypeRegistry; } const coerceToArray = (param: string | string[]) => { @@ -45,11 +46,11 @@ export class SpacesSavedObjectsClient implements SavedObjectsClientContract { public readonly errors: SavedObjectsClientContract['errors']; constructor(options: SpacesSavedObjectsClientOptions) { - const { baseClient, request, spacesService, types } = options; + const { baseClient, request, spacesService, typeRegistry } = options; this.client = baseClient; this.spaceId = spacesService.getSpaceId(request); - this.types = types; + this.types = typeRegistry.getAllTypes().map(t => t.name); this.errors = baseClient.errors; } diff --git a/x-pack/plugins/spaces/server/spaces_service/spaces_service.test.ts b/x-pack/plugins/spaces/server/spaces_service/spaces_service.test.ts index fc5ff397805244..3ea1da1c835b22 100644 --- a/x-pack/plugins/spaces/server/spaces_service/spaces_service.test.ts +++ b/x-pack/plugins/spaces/server/spaces_service/spaces_service.test.ts @@ -5,58 +5,53 @@ */ import * as Rx from 'rxjs'; import { SpacesService } from './spaces_service'; -import { - coreMock, - elasticsearchServiceMock, - httpServerMock, - loggingServiceMock, -} from 'src/core/server/mocks'; +import { coreMock, httpServerMock, loggingServiceMock } from 'src/core/server/mocks'; import { SpacesAuditLogger } from '../lib/audit_logger'; import { KibanaRequest, - SavedObjectsLegacyService, SavedObjectsErrorHelpers, HttpServiceSetup, + SavedObjectsRepository, } from 'src/core/server'; import { DEFAULT_SPACE_ID } from '../../common/constants'; import { getSpaceIdFromPath } from '../../common/lib/spaces_url_parser'; -import { LegacyAPI } from '../plugin'; import { spacesConfig } from '../lib/__fixtures__'; import { securityMock } from '../../../security/server/mocks'; const mockLogger = loggingServiceMock.createLogger(); const createService = async (serverBasePath: string = '') => { - const legacyAPI = { - savedObjects: ({ - getSavedObjectsRepository: jest.fn().mockReturnValue({ - get: jest.fn().mockImplementation((type, id) => { - if (type === 'space' && id === 'foo') { - return Promise.resolve({ - id: 'space:foo', - attributes: { - name: 'Foo Space', - disabledFeatures: [], - }, - }); - } - if (type === 'space' && id === 'default') { - return Promise.resolve({ - id: 'space:default', - attributes: { - name: 'Default Space', - disabledFeatures: [], - _reserved: true, - }, - }); - } - throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); - }), - }), - } as unknown) as SavedObjectsLegacyService, - } as LegacyAPI; - - const spacesService = new SpacesService(mockLogger, () => legacyAPI); + const spacesService = new SpacesService(mockLogger); + + const coreStart = coreMock.createStart(); + + const respositoryMock = ({ + get: jest.fn().mockImplementation((type, id) => { + if (type === 'space' && id === 'foo') { + return Promise.resolve({ + id: 'space:foo', + attributes: { + name: 'Foo Space', + disabledFeatures: [], + }, + }); + } + if (type === 'space' && id === 'default') { + return Promise.resolve({ + id: 'space:default', + attributes: { + name: 'Default Space', + disabledFeatures: [], + _reserved: true, + }, + }); + } + throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id); + }), + } as unknown) as SavedObjectsRepository; + + coreStart.savedObjects.createInternalRepository.mockReturnValue(respositoryMock); + coreStart.savedObjects.createScopedRepository.mockReturnValue(respositoryMock); const httpSetup = coreMock.createSetup().http; httpSetup.basePath = { @@ -73,7 +68,7 @@ const createService = async (serverBasePath: string = '') => { const spacesServiceSetup = await spacesService.setup({ http: httpSetup, - elasticsearch: elasticsearchServiceMock.createSetup(), + getStartServices: async () => [coreStart, {}, {}], config$: Rx.of(spacesConfig), authorization: securityMock.createSetup().authz, getSpacesAuditLogger: () => new SpacesAuditLogger({}), diff --git a/x-pack/plugins/spaces/server/spaces_service/spaces_service.ts b/x-pack/plugins/spaces/server/spaces_service/spaces_service.ts index 95bda96d894615..ca8b67ead6d586 100644 --- a/x-pack/plugins/spaces/server/spaces_service/spaces_service.ts +++ b/x-pack/plugins/spaces/server/spaces_service/spaces_service.ts @@ -9,7 +9,6 @@ import { Observable, Subscription } from 'rxjs'; import { Legacy } from 'kibana'; import { Logger, KibanaRequest, CoreSetup } from '../../../../../src/core/server'; import { SecurityPluginSetup } from '../../../security/server'; -import { LegacyAPI } from '../plugin'; import { SpacesClient } from '../lib/spaces_client'; import { ConfigType } from '../config'; import { getSpaceIdFromPath, addSpaceIdToPath } from '../../common/lib/spaces_url_parser'; @@ -37,7 +36,7 @@ export interface SpacesServiceSetup { interface SpacesServiceDeps { http: CoreSetup['http']; - elasticsearch: CoreSetup['elasticsearch']; + getStartServices: CoreSetup['getStartServices']; authorization: SecurityPluginSetup['authz'] | null; config$: Observable; getSpacesAuditLogger(): any; @@ -46,11 +45,11 @@ interface SpacesServiceDeps { export class SpacesService { private configSubscription$?: Subscription; - constructor(private readonly log: Logger, private readonly getLegacyAPI: () => LegacyAPI) {} + constructor(private readonly log: Logger) {} public async setup({ http, - elasticsearch, + getStartServices, authorization, config$, getSpacesAuditLogger, @@ -69,18 +68,15 @@ export class SpacesService { }; const getScopedClient = async (request: KibanaRequest) => { + const [coreStart] = await getStartServices(); + return config$ .pipe( map(config => { - const internalRepository = this.getLegacyAPI().savedObjects.getSavedObjectsRepository( - elasticsearch.adminClient.callAsInternalUser, - ['space'] - ); - - const callCluster = elasticsearch.adminClient.asScoped(request).callAsCurrentUser; + const internalRepository = coreStart.savedObjects.createInternalRepository(['space']); - const callWithRequestRepository = this.getLegacyAPI().savedObjects.getSavedObjectsRepository( - callCluster, + const callWithRequestRepository = coreStart.savedObjects.createScopedRepository( + request, ['space'] ); diff --git a/x-pack/plugins/transform/public/app/mount_management_section.ts b/x-pack/plugins/transform/public/app/mount_management_section.ts new file mode 100644 index 00000000000000..f3a48975a68e65 --- /dev/null +++ b/x-pack/plugins/transform/public/app/mount_management_section.ts @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import { CoreSetup } from 'src/core/public'; +import { ManagementAppMountParams } from '../../../../../src/plugins/management/public/'; +import { Storage } from '../../../../../src/plugins/kibana_utils/public'; + +import { PluginsDependencies } from '../plugin'; + +import { AppDependencies } from './app_dependencies'; +import { breadcrumbService } from './services/navigation'; +import { docTitleService } from './services/navigation'; +import { textService } from './services/text'; +import { renderApp } from './app'; + +const localStorage = new Storage(window.localStorage); + +export async function mountManagementSection( + coreSetup: CoreSetup, + params: ManagementAppMountParams +) { + const { element, setBreadcrumbs } = params; + const { http, notifications, getStartServices } = coreSetup; + const startServices = await getStartServices(); + const [core, plugins] = startServices; + const { chrome, docLinks, i18n, overlays, savedObjects, uiSettings } = core; + const { data } = plugins; + const { docTitle } = chrome; + + // Initialize services + textService.init(); + docTitleService.init(docTitle.change); + breadcrumbService.setup(setBreadcrumbs); + + // AppCore/AppPlugins to be passed on as React context + const appDependencies: AppDependencies = { + chrome, + data, + docLinks, + http, + i18n, + notifications, + overlays, + savedObjects, + storage: localStorage, + uiSettings, + }; + + return renderApp(element, appDependencies); +} diff --git a/x-pack/plugins/transform/public/plugin.ts b/x-pack/plugins/transform/public/plugin.ts index 9a83f5b0e05f33..cfe84a5ab693d8 100644 --- a/x-pack/plugins/transform/public/plugin.ts +++ b/x-pack/plugins/transform/public/plugin.ts @@ -9,16 +9,6 @@ import { CoreSetup } from 'src/core/public'; import { DataPublicPluginStart } from 'src/plugins/data/public'; import { ManagementSetup } from 'src/plugins/management/public'; -import { Storage } from '../../../../src/plugins/kibana_utils/public'; - -import { renderApp } from './app/app'; -import { AppDependencies } from './app/app_dependencies'; -import { breadcrumbService } from './app/services/navigation'; -import { docTitleService } from './app/services/navigation'; -import { textService } from './app/services/text'; - -const localStorage = new Storage(window.localStorage); - export interface PluginsDependencies { data: DataPublicPluginStart; management: ManagementSetup; @@ -37,34 +27,9 @@ export class TransformUiPlugin { defaultMessage: 'Transforms', }), order: 3, - mount: async ({ element, setBreadcrumbs }) => { - const { http, notifications, getStartServices } = coreSetup; - const startServices = await getStartServices(); - const [core, plugins] = startServices; - const { chrome, docLinks, i18n, overlays, savedObjects, uiSettings } = core; - const { data } = plugins; - const { docTitle } = chrome; - - // Initialize services - textService.init(); - docTitleService.init(docTitle.change); - breadcrumbService.setup(setBreadcrumbs); - - // AppCore/AppPlugins to be passed on as React context - const appDependencies: AppDependencies = { - chrome, - data, - docLinks, - http, - i18n, - notifications, - overlays, - savedObjects, - storage: localStorage, - uiSettings, - }; - - return renderApp(element, appDependencies); + mount: async params => { + const { mountManagementSection } = await import('./app/mount_management_section'); + return mountManagementSection(coreSetup, params); }, }); } diff --git a/x-pack/test/accessibility/apps/grok_debugger.ts b/x-pack/test/accessibility/apps/grok_debugger.ts new file mode 100644 index 00000000000000..0b052d39a4db85 --- /dev/null +++ b/x-pack/test/accessibility/apps/grok_debugger.ts @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { FtrProviderContext } from '../ftr_provider_context'; + +export default function({ getService, getPageObjects }: FtrProviderContext) { + const PageObjects = getPageObjects(['common', 'security']); + const a11y = getService('a11y'); + const grokDebugger = getService('grokDebugger'); + + // this test is failing as there is a violation https://github.com/elastic/kibana/issues/62102 + describe.skip('Dev tools grok debugger', () => { + before(async () => { + await PageObjects.common.navigateToApp('grokDebugger'); + await grokDebugger.assertExists(); + }); + + it('Dev tools grok debugger set input', async () => { + await grokDebugger.setEventInput('SegerCommaBob'); + await a11y.testAppSnapshot(); + }); + + it('Dev tools grok debugger set pattern', async () => { + await grokDebugger.setPatternInput('%{USERNAME:u}'); + await a11y.testAppSnapshot(); + }); + + it('Dev tools grok debugger simulate', async () => { + await grokDebugger.clickSimulate(); + await a11y.testAppSnapshot(); + }); + }); +} diff --git a/x-pack/test/accessibility/apps/home.ts b/x-pack/test/accessibility/apps/home.ts new file mode 100644 index 00000000000000..f40976f09f9c88 --- /dev/null +++ b/x-pack/test/accessibility/apps/home.ts @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { FtrProviderContext } from '../ftr_provider_context'; + +export default function({ getService, getPageObjects }: FtrProviderContext) { + const PageObjects = getPageObjects(['common', 'home']); + const a11y = getService('a11y'); + + describe('Kibana Home', () => { + before(async () => { + await PageObjects.common.navigateToApp('home'); + }); + + it('Kibana Home view', async () => { + await a11y.testAppSnapshot(); + }); + + it('all plugins view page meets a11y requirements', async () => { + await PageObjects.home.clickAllKibanaPlugins(); + await a11y.testAppSnapshot(); + }); + + it('visualize & explore details tab meets a11y requirements', async () => { + await PageObjects.home.clickVisualizeExplorePlugins(); + await a11y.testAppSnapshot(); + }); + + it('administrative detail tab meets a11y requirements', async () => { + await PageObjects.home.clickAdminPlugin(); + await a11y.testAppSnapshot(); + }); + + it('navigating to console app from administration tab meets a11y requirements', async () => { + await PageObjects.home.clickOnConsole(); + await a11y.testAppSnapshot(); + }); + + // issue: https://github.com/elastic/kibana/issues/38980 + it.skip('navigating back to home page from console meets a11y requirements', async () => { + await PageObjects.home.clickOnLogo(); + await a11y.testAppSnapshot(); + }); + + // Extra clickon logo step here will be removed after preceding test is fixed. + it('click on Add logs panel to open all log examples page meets a11y requirements ', async () => { + await PageObjects.home.clickOnLogo(); + await PageObjects.home.ClickOnLogsData(); + await a11y.testAppSnapshot(); + }); + + // issue - logo images are missing alt -text https://github.com/elastic/kibana/issues/62239 + it.skip('click on ActiveMQ logs panel to open tutorial meets a11y requirements', async () => { + await PageObjects.home.clickOnLogsTutorial(); + await a11y.testAppSnapshot(); + }); + + // https://github.com/elastic/kibana/issues/62239 + it.skip('click on cloud tutorial meets a11y requirements', async () => { + await PageObjects.home.clickOnCloudTutorial(); + await a11y.testAppSnapshot(); + }); + }); +} diff --git a/x-pack/test/accessibility/config.ts b/x-pack/test/accessibility/config.ts index a9ac7c71d3e79e..7bf6079cc6487d 100644 --- a/x-pack/test/accessibility/config.ts +++ b/x-pack/test/accessibility/config.ts @@ -13,7 +13,11 @@ export default async function({ readConfigFile }: FtrConfigProviderContext) { return { ...functionalConfig.getAll(), - testFiles: [require.resolve('./apps/login_page')], + testFiles: [ + require.resolve('./apps/login_page'), + require.resolve('./apps/home'), + require.resolve('./apps/grok_debugger'), + ], pageObjects, services, diff --git a/x-pack/test/api_integration/apis/fleet/agents/acks.ts b/x-pack/test/api_integration/apis/fleet/agents/acks.ts index db925813b90c41..a2eba2c23c39d6 100644 --- a/x-pack/test/api_integration/apis/fleet/agents/acks.ts +++ b/x-pack/test/api_integration/apis/fleet/agents/acks.ts @@ -18,8 +18,7 @@ export default function(providerContext: FtrProviderContext) { const supertest = getSupertestWithoutAuth(providerContext); let apiKey: { id: string; api_key: string }; - // FLAKY: https://github.com/elastic/kibana/issues/60471 - describe.skip('fleet_agents_acks', () => { + describe('fleet_agents_acks', () => { before(async () => { await esArchiver.loadIfNeeded('fleet/agents'); diff --git a/x-pack/test/api_integration/apis/fleet/unenroll_agent.ts b/x-pack/test/api_integration/apis/fleet/unenroll_agent.ts index 4b6b28e3d6350f..b484f1f5a8ed2c 100644 --- a/x-pack/test/api_integration/apis/fleet/unenroll_agent.ts +++ b/x-pack/test/api_integration/apis/fleet/unenroll_agent.ts @@ -5,17 +5,58 @@ */ import expect from '@kbn/expect'; +import uuid from 'uuid'; import { FtrProviderContext } from '../../ftr_provider_context'; +import { setupIngest } from './agents/services'; -export default function({ getService }: FtrProviderContext) { +export default function(providerContext: FtrProviderContext) { + const { getService } = providerContext; const esArchiver = getService('esArchiver'); const supertest = getService('supertest'); + const esClient = getService('es'); describe('fleet_unenroll_agent', () => { + let accessAPIKeyId: string; + let outputAPIKeyId: string; before(async () => { await esArchiver.loadIfNeeded('fleet/agents'); }); + setupIngest(providerContext); + beforeEach(async () => { + const { body: accessAPIKeyBody } = await esClient.security.createApiKey({ + body: { + name: `test access api key: ${uuid.v4()}`, + }, + }); + accessAPIKeyId = accessAPIKeyBody.id; + const { body: outputAPIKeyBody } = await esClient.security.createApiKey({ + body: { + name: `test output api key: ${uuid.v4()}`, + }, + }); + outputAPIKeyId = outputAPIKeyBody.id; + const { + body: { _source: agentDoc }, + } = await esClient.get({ + index: '.kibana', + id: 'agents:agent1', + }); + // @ts-ignore + agentDoc.agents.access_api_key_id = accessAPIKeyId; + agentDoc.agents.default_api_key = Buffer.from( + `${outputAPIKeyBody.id}:${outputAPIKeyBody.api_key}` + ).toString('base64'); + + await esClient.update({ + index: '.kibana', + id: 'agents:agent1', + refresh: 'true', + body: { + doc: agentDoc, + }, + }); + }); after(async () => { await esArchiver.unload('fleet/agents'); }); @@ -54,6 +95,31 @@ export default function({ getService }: FtrProviderContext) { expect(body.results[0].success).to.be(true); }); + it('should invalidate related API keys', async () => { + const { body } = await supertest + .post(`/api/ingest_manager/fleet/agents/unenroll`) + .set('kbn-xsrf', 'xxx') + .send({ + ids: ['agent1'], + }) + .expect(200); + + expect(body).to.have.keys('results', 'success'); + expect(body.success).to.be(true); + + const { + body: { api_keys: accessAPIKeys }, + } = await esClient.security.getApiKey({ id: accessAPIKeyId }); + expect(accessAPIKeys).length(1); + expect(accessAPIKeys[0].invalidated).eql(true); + + const { + body: { api_keys: outputAPIKeys }, + } = await esClient.security.getApiKey({ id: outputAPIKeyId }); + expect(outputAPIKeys).length(1); + expect(outputAPIKeys[0].invalidated).eql(true); + }); + it('allow to unenroll using a kibana query', async () => { const { body } = await supertest .post(`/api/ingest_manager/fleet/agents/unenroll`) diff --git a/x-pack/test/functional/apps/endpoint/host_list.ts b/x-pack/test/functional/apps/endpoint/host_list.ts index 6eca8cc3bcce93..2e204775808c97 100644 --- a/x-pack/test/functional/apps/endpoint/host_list.ts +++ b/x-pack/test/functional/apps/endpoint/host_list.ts @@ -8,7 +8,7 @@ import expect from '@kbn/expect'; import { FtrProviderContext } from '../../ftr_provider_context'; export default ({ getPageObjects, getService }: FtrProviderContext) => { - const pageObjects = getPageObjects(['common', 'endpoint']); + const pageObjects = getPageObjects(['common', 'endpoint', 'header']); const esArchiver = getService('esArchiver'); const testSubjects = getService('testSubjects'); @@ -18,6 +18,7 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => { before(async () => { await esArchiver.load('endpoint/metadata/api_feature'); await pageObjects.common.navigateToUrlWithBrowserHistory('endpoint', '/hosts'); + await pageObjects.header.waitUntilLoadingHasFinished(); }); it('finds title', async () => { @@ -114,6 +115,7 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => { // clear out the data and reload the page await esArchiver.unload('endpoint/metadata/api_feature'); await pageObjects.common.navigateToUrlWithBrowserHistory('endpoint', '/hosts'); + await pageObjects.header.waitUntilLoadingHasFinished(); }); after(async () => { // reload the data so the other tests continue to pass @@ -135,6 +137,7 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => { '/hosts', 'selected_host=fc0ff548-feba-41b6-8367-65e8790d0eaf' ); + await pageObjects.header.waitUntilLoadingHasFinished(); }); it('shows a flyout', async () => { diff --git a/x-pack/test/functional/apps/machine_learning/anomaly_detection/advanced_job.ts b/x-pack/test/functional/apps/machine_learning/anomaly_detection/advanced_job.ts index 3669ed3ab579b1..53b1cb83c524ba 100644 --- a/x-pack/test/functional/apps/machine_learning/anomaly_detection/advanced_job.ts +++ b/x-pack/test/functional/apps/machine_learning/anomaly_detection/advanced_job.ts @@ -682,7 +682,9 @@ export default function({ getService }: FtrProviderContext) { await ml.jobWizardCommon.assertInfluencerSelection(testData.pickFieldsConfig.influencers); }); - it('job cloning pre-fills the model memory limit', async () => { + // MML during clone has changed in #61589 + // TODO: adjust test code to reflect the new behavior + it.skip('job cloning pre-fills the model memory limit', async () => { await ml.jobWizardCommon.assertModelMemoryLimitInputExists({ withAdvancedSection: false, }); diff --git a/x-pack/test/functional/apps/machine_learning/anomaly_detection/categorization_job.ts b/x-pack/test/functional/apps/machine_learning/anomaly_detection/categorization_job.ts index 9fa53d6e546ba6..6408c6de1f9280 100644 --- a/x-pack/test/functional/apps/machine_learning/anomaly_detection/categorization_job.ts +++ b/x-pack/test/functional/apps/machine_learning/anomaly_detection/categorization_job.ts @@ -328,7 +328,9 @@ export default function({ getService }: FtrProviderContext) { await ml.jobWizardCommon.assertDedicatedIndexSwitchCheckedState(true); }); - it('job cloning pre-fills the model memory limit', async () => { + // MML during clone has changed in #61589 + // TODO: adjust test code to reflect the new behavior + it.skip('job cloning pre-fills the model memory limit', async () => { await ml.jobWizardCommon.assertModelMemoryLimitInputExists(); await ml.jobWizardCommon.assertModelMemoryLimitValue(memoryLimit); }); diff --git a/x-pack/test/functional/apps/machine_learning/anomaly_detection/multi_metric_job.ts b/x-pack/test/functional/apps/machine_learning/anomaly_detection/multi_metric_job.ts index f886453f7c5349..08175b79462597 100644 --- a/x-pack/test/functional/apps/machine_learning/anomaly_detection/multi_metric_job.ts +++ b/x-pack/test/functional/apps/machine_learning/anomaly_detection/multi_metric_job.ts @@ -346,7 +346,9 @@ export default function({ getService }: FtrProviderContext) { await ml.jobWizardCommon.assertDedicatedIndexSwitchCheckedState(true); }); - it('job cloning pre-fills the model memory limit', async () => { + // MML during clone has changed in #61589 + // TODO: adjust test code to reflect the new behavior + it.skip('job cloning pre-fills the model memory limit', async () => { await ml.jobWizardCommon.assertModelMemoryLimitInputExists(); await ml.jobWizardCommon.assertModelMemoryLimitValue(memoryLimit); }); diff --git a/x-pack/test/functional/apps/machine_learning/anomaly_detection/population_job.ts b/x-pack/test/functional/apps/machine_learning/anomaly_detection/population_job.ts index e8f45891ce064b..512d13307ea052 100644 --- a/x-pack/test/functional/apps/machine_learning/anomaly_detection/population_job.ts +++ b/x-pack/test/functional/apps/machine_learning/anomaly_detection/population_job.ts @@ -384,7 +384,9 @@ export default function({ getService }: FtrProviderContext) { await ml.jobWizardCommon.assertDedicatedIndexSwitchCheckedState(true); }); - it('job cloning pre-fills the model memory limit', async () => { + // MML during clone has changed in #61589 + // TODO: adjust test code to reflect the new behavior + it.skip('job cloning pre-fills the model memory limit', async () => { await ml.jobWizardCommon.assertModelMemoryLimitInputExists(); await ml.jobWizardCommon.assertModelMemoryLimitValue(memoryLimit); }); diff --git a/x-pack/test/functional/apps/machine_learning/anomaly_detection/single_metric_job.ts b/x-pack/test/functional/apps/machine_learning/anomaly_detection/single_metric_job.ts index 0d7e87cf6bd38f..4e6d480c12d82a 100644 --- a/x-pack/test/functional/apps/machine_learning/anomaly_detection/single_metric_job.ts +++ b/x-pack/test/functional/apps/machine_learning/anomaly_detection/single_metric_job.ts @@ -311,7 +311,9 @@ export default function({ getService }: FtrProviderContext) { await ml.jobWizardCommon.assertDedicatedIndexSwitchCheckedState(true); }); - it('job cloning pre-fills the model memory limit', async () => { + // MML during clone has changed in #61589 + // TODO: adjust test code to reflect the new behavior + it.skip('job cloning pre-fills the model memory limit', async () => { await ml.jobWizardCommon.assertModelMemoryLimitInputExists(); await ml.jobWizardCommon.assertModelMemoryLimitValue(memoryLimit); }); diff --git a/x-pack/test/functional/apps/maps/discover.js b/x-pack/test/functional/apps/maps/discover.js index ce335964767551..43a7a93ad62e4b 100644 --- a/x-pack/test/functional/apps/maps/discover.js +++ b/x-pack/test/functional/apps/maps/discover.js @@ -17,7 +17,6 @@ export default function({ getService, getPageObjects }) { it('should link geo_shape fields to Maps application', async () => { await PageObjects.discover.selectIndexPattern('geo_shapes*'); - await PageObjects.discover.clickFieldListItem('geometry'); await PageObjects.discover.clickFieldListItemVisualize('geometry'); await PageObjects.header.waitUntilLoadingHasFinished(); await PageObjects.maps.waitForLayersToLoad(); @@ -37,7 +36,6 @@ export default function({ getService, getPageObjects }) { await queryBar.submitQuery(); await PageObjects.header.waitUntilLoadingHasFinished(); - await PageObjects.discover.clickFieldListItem('geo.coordinates'); await PageObjects.discover.clickFieldListItemVisualize('geo.coordinates'); await PageObjects.header.waitUntilLoadingHasFinished(); await PageObjects.maps.waitForLayersToLoad(); diff --git a/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/data.json.gz b/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/data.json.gz index c1a3c44cb8d8d6..feb2af93b0fd18 100644 Binary files a/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/data.json.gz and b/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/data.json.gz differ diff --git a/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/mappings.json b/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/mappings.json index e0a7068e1149a3..64dc395ab69a43 100644 --- a/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/mappings.json +++ b/x-pack/test/functional/es_archives/endpoint/alerts/api_feature/mappings.json @@ -94,7 +94,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -454,7 +454,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -851,7 +851,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -1496,7 +1496,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { @@ -1689,7 +1689,7 @@ } } }, - "malware_classifier": { + "malware_classification": { "properties": { "features": { "properties": { diff --git a/x-pack/test/functional/services/machine_learning/job_table.ts b/x-pack/test/functional/services/machine_learning/job_table.ts index dc401ca4548354..0e638963f2367d 100644 --- a/x-pack/test/functional/services/machine_learning/job_table.ts +++ b/x-pack/test/functional/services/machine_learning/job_table.ts @@ -217,6 +217,13 @@ export function MachineLearningJobTableProvider({ getService }: FtrProviderConte delete modelSizeStats.rare_category_count; delete modelSizeStats.total_category_count; + // MML during clone has changed in #61589 + // TODO: adjust test code to reflect the new behavior + expect(modelSizeStats).to.have.property('model_bytes_memory_limit'); + delete modelSizeStats.model_bytes_memory_limit; + // @ts-ignore + delete expectedModelSizeStats.model_bytes_memory_limit; + expect(modelSizeStats).to.eql(expectedModelSizeStats); } diff --git a/x-pack/test/functional/services/uptime/navigation.ts b/x-pack/test/functional/services/uptime/navigation.ts index c762ddf34be04f..15ee869da1e6ad 100644 --- a/x-pack/test/functional/services/uptime/navigation.ts +++ b/x-pack/test/functional/services/uptime/navigation.ts @@ -9,11 +9,12 @@ import { FtrProviderContext } from '../../ftr_provider_context'; export function UptimeNavigationProvider({ getService, getPageObjects }: FtrProviderContext) { const retry = getService('retry'); const testSubjects = getService('testSubjects'); - const PageObjects = getPageObjects(['common']); + const PageObjects = getPageObjects(['common', 'header']); const goToUptimeRoot = async () => { await retry.tryForTime(30 * 1000, async () => { await PageObjects.common.navigateToApp('uptime'); + await PageObjects.header.waitUntilLoadingHasFinished(); await testSubjects.existOrFail('uptimeOverviewPage', { timeout: 2000 }); }); }; diff --git a/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/alerts.ts b/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/alerts.ts index 347eb5e14d0a88..029af1ea06e4fb 100644 --- a/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/alerts.ts +++ b/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/alerts.ts @@ -38,7 +38,8 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => { return createdAlert; } - describe('alerts', function() { + // FLAKY: https://github.com/elastic/kibana/issues/62472 + describe.skip('alerts', function() { before(async () => { await pageObjects.common.navigateToApp('triggersActions'); await testSubjects.click('alertsTab'); diff --git a/yarn.lock b/yarn.lock index 8176eab436afd9..d9edb55a320395 100644 --- a/yarn.lock +++ b/yarn.lock @@ -9651,7 +9651,7 @@ core-js@^2.2.0, core-js@^2.4.0, core-js@^2.5.0, core-js@^2.5.1, core-js@^2.5.3, resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.9.tgz#6b4b214620c834152e179323727fc19741b084f2" integrity sha512-HOpZf6eXmnl7la+cUdMnLvUxKNqLUzJvgIziQ0DiF3JwSImNphIqdGqzj6hIKyX04MmV0poclQ7+wjWvxQyR2A== -core-js@^3.0.1, core-js@^3.0.4, core-js@^3.2.1, core-js@^3.4.1, core-js@^3.6.4: +core-js@^3.0.1, core-js@^3.0.4, core-js@^3.4.1, core-js@^3.6.4: version "3.6.4" resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.6.4.tgz#440a83536b458114b9cb2ac1580ba377dc470647" integrity sha512-4paDGScNgZP2IXXilaffL9X7968RuvwlkK3xWtZRVqgd8SYNiVKRJvkFd1aqqEuPfN7E68ZHEp9hDj6lHj4Hyw== @@ -24284,7 +24284,7 @@ react-router-redux@^4.0.8: resolved "https://registry.yarnpkg.com/react-router-redux/-/react-router-redux-4.0.8.tgz#227403596b5151e182377dab835b5d45f0f8054e" integrity sha1-InQDWWtRUeGCN32rg1tdRfD4BU4= -react-router@5.1.2: +react-router@5.1.2, react-router@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/react-router/-/react-router-5.1.2.tgz#6ea51d789cb36a6be1ba5f7c0d48dd9e817d3418" integrity sha512-yjEuMFy1ONK246B+rsa0cUam5OeAQ8pyclRDgpxuSCrAlJ1qN9uZ5IgyKC7gQg0w8OM50NXHEegPh/ks9YuR2A==