diff --git a/.bazelignore b/.bazelignore
index ac7a2d15a7aa4a..ec61c2f3e7e750 100644
--- a/.bazelignore
+++ b/.bazelignore
@@ -8,7 +8,10 @@
.idea
.teamcity
.yarn-local-mirror
-/bazel
+bazel-bin
+bazel-kibana
+bazel-out
+bazel-testlogs
build
node_modules
target
diff --git a/.bazelrc.common b/.bazelrc.common
index fb8e8e86b9ef59..20a41c4cde9a0d 100644
--- a/.bazelrc.common
+++ b/.bazelrc.common
@@ -18,17 +18,16 @@ build --disk_cache=~/.bazel-cache/disk-cache
build --repository_cache=~/.bazel-cache/repository-cache
# Bazel will create symlinks from the workspace directory to output artifacts.
-# Build results will be placed in a directory called "bazel/bin"
+# Build results will be placed in a directory called "bazel-bin"
# This will still create a bazel-out symlink in
# the project directory, which must be excluded from the
# editor's search path.
-build --symlink_prefix=bazel/
# To disable the symlinks altogether (including bazel-out) we can use
# build --symlink_prefix=/
# however this makes it harder to find outputs.
# Prevents the creation of bazel-out dir
-build --experimental_no_product_name_out_symlink
+# build --experimental_no_product_name_out_symlink
# Make direct file system calls to create symlink trees
build --experimental_inprocess_symlink_creation
@@ -83,7 +82,7 @@ test:debug --test_output=streamed --test_strategy=exclusive --test_timeout=9999
run:debug --define=VERBOSE_LOGS=1 -- --node_options=--inspect-brk
# The following option will change the build output of certain rules such as terser and may not be desirable in all cases
# It will also output both the repo cache and action cache to a folder inside the repo
-build:debug --compilation_mode=dbg --show_result=1 --disk_cache=bazel/disk-cache --repository_cache=bazel/repository-cache
+build:debug --compilation_mode=dbg --show_result=1
# Turn off legacy external runfiles
# This prevents accidentally depending on this feature, which Bazel will remove.
diff --git a/.ci/packer_cache.sh b/.ci/packer_cache.sh
index 5317b2c500b493..a63c2825816bdd 100755
--- a/.ci/packer_cache.sh
+++ b/.ci/packer_cache.sh
@@ -2,8 +2,10 @@
set -e
-# cache image used by kibana-load-testing project
-docker pull "maven:3.6.3-openjdk-8-slim"
+if [[ "$(which docker)" != "" && "$(command uname -m)" != "aarch64" ]]; then
+ # cache image used by kibana-load-testing project
+ docker pull "maven:3.6.3-openjdk-8-slim"
+fi
./.ci/packer_cache_for_branch.sh master
./.ci/packer_cache_for_branch.sh 7.x
diff --git a/.eslintignore b/.eslintignore
index 4559711bb9dd31..4058d971b76420 100644
--- a/.eslintignore
+++ b/.eslintignore
@@ -21,19 +21,13 @@ snapshots.js
# plugin overrides
/src/core/lib/kbn_internal_native_observable
-/src/legacy/plugin_discovery/plugin_pack/__tests__/fixtures/plugins/broken
/src/plugins/data/common/es_query/kuery/ast/_generated_/**
/src/plugins/vis_type_timelion/common/_generated_/**
-/x-pack/legacy/plugins/**/__tests__/fixtures/**
/x-pack/plugins/apm/e2e/tmp/*
/x-pack/plugins/canvas/canvas_plugin
/x-pack/plugins/canvas/shareable_runtime/build
/x-pack/plugins/canvas/storybook/build
/x-pack/plugins/reporting/server/export_types/printable_pdf/server/lib/pdf/assets/**
-/x-pack/legacy/plugins/infra/common/graphql/types.ts
-/x-pack/legacy/plugins/infra/public/graphql/types.ts
-/x-pack/legacy/plugins/infra/server/graphql/types.ts
-/x-pack/legacy/plugins/maps/public/vendor/**
# package overrides
/packages/elastic-eslint-config-kibana
@@ -48,4 +42,4 @@ snapshots.js
/packages/kbn-monaco/src/painless/antlr
# Bazel
-/bazel
+/bazel-*
diff --git a/.eslintrc.js b/.eslintrc.js
index a7b45534391c0a..19ba7cacc3c44e 100644
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -410,11 +410,7 @@ module.exports = {
errorMessage: `Common code can not import from server or public, use a common directory.`,
},
{
- target: [
- 'src/legacy/**/*',
- '(src|x-pack)/plugins/**/(public|server)/**/*',
- 'examples/**/*',
- ],
+ target: ['(src|x-pack)/plugins/**/(public|server)/**/*', 'examples/**/*'],
from: [
'src/core/public/**/*',
'!src/core/public/index.ts', // relative import
@@ -428,8 +424,6 @@ module.exports = {
'!src/core/server/mocks{,.ts}',
'!src/core/server/types{,.ts}',
'!src/core/server/test_utils{,.ts}',
- '!src/core/server/utils', // ts alias
- '!src/core/server/utils/**/*',
// for absolute imports until fixed in
// https://github.com/elastic/kibana/issues/36096
'!src/core/server/*.test.mocks{,.ts}',
@@ -442,7 +436,6 @@ module.exports = {
},
{
target: [
- 'src/legacy/**/*',
'(src|x-pack)/plugins/**/(public|server)/**/*',
'examples/**/*',
'!(src|x-pack)/**/*.test.*',
@@ -482,7 +475,7 @@ module.exports = {
},
{
target: ['src/core/**/*'],
- from: ['plugins/**/*', 'src/plugins/**/*', 'src/legacy/ui/**/*'],
+ from: ['plugins/**/*', 'src/plugins/**/*'],
errorMessage: 'The core cannot depend on any plugins.',
},
{
@@ -490,19 +483,6 @@ module.exports = {
from: ['ui/**/*'],
errorMessage: 'Plugins cannot import legacy UI code.',
},
- {
- from: ['src/legacy/ui/**/*', 'ui/**/*'],
- target: [
- 'test/plugin_functional/plugins/**/public/np_ready/**/*',
- 'test/plugin_functional/plugins/**/server/np_ready/**/*',
- ],
- allowSameFolder: true,
- errorMessage:
- 'NP-ready code should not import from /src/legacy/ui/** folder. ' +
- 'Instead of importing from /src/legacy/ui/** deeply within a np_ready folder, ' +
- 'import those things once at the top level of your plugin and pass those down, just ' +
- 'like you pass down `core` and `plugins` objects.',
- },
],
},
],
@@ -1180,7 +1160,7 @@ module.exports = {
pathGroups: [
{
pattern:
- '{../../../../../../,../../../../../,../../../../,../../../,../../,../}{common/,*}__mocks__{*,/**}',
+ '{../../../../../../,../../../../../,../../../../,../../../,../../,../,./}{common/,*}__mocks__{*,/**}',
group: 'unknown',
},
{
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index f27885c1e32c34..a8dcafeb7753c0 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -107,7 +107,6 @@
/x-pack/plugins/dashboard_enhanced/ @elastic/kibana-presentation
/x-pack/test/functional/apps/canvas/ @elastic/kibana-presentation
#CC# /src/plugins/kibana_react/public/code_editor/ @elastic/kibana-presentation
-#CC# /x-pack/legacy/plugins/canvas/ @elastic/kibana-presentation
#CC# /x-pack/plugins/dashboard_mode @elastic/kibana-presentation
@@ -146,7 +145,6 @@
/x-pack/test/visual_regression/tests/maps/index.js @elastic/kibana-gis
#CC# /src/plugins/maps_legacy/ @elastic/kibana-gis
#CC# /x-pack/plugins/file_upload @elastic/kibana-gis
-#CC# /x-pack/plugins/maps_legacy_licensing @elastic/kibana-gis
/src/plugins/tile_map/ @elastic/kibana-gis
/src/plugins/region_map/ @elastic/kibana-gis
@@ -165,7 +163,6 @@
/packages/kbn-utils/ @elastic/kibana-operations
/packages/kbn-cli-dev-mode/ @elastic/kibana-operations
/src/cli/keystore/ @elastic/kibana-operations
-/src/legacy/server/warnings/ @elastic/kibana-operations
/.ci/es-snapshots/ @elastic/kibana-operations
/.github/workflows/ @elastic/kibana-operations
/vars/ @elastic/kibana-operations
@@ -202,9 +199,6 @@
/packages/kbn-legacy-logging/ @elastic/kibana-core
/packages/kbn-crypto/ @elastic/kibana-core
/packages/kbn-http-tools/ @elastic/kibana-core
-/src/legacy/server/config/ @elastic/kibana-core
-/src/legacy/server/http/ @elastic/kibana-core
-/src/legacy/server/logging/ @elastic/kibana-core
/src/plugins/status_page/ @elastic/kibana-core
/src/plugins/saved_objects_management/ @elastic/kibana-core
/src/dev/run_check_published_api_changes.ts @elastic/kibana-core
@@ -214,9 +208,6 @@
/src/plugins/kibana_overview/ @elastic/kibana-core
/x-pack/plugins/global_search_bar/ @elastic/kibana-core
#CC# /src/core/server/csp/ @elastic/kibana-core
-#CC# /src/legacy/server/config/ @elastic/kibana-core
-#CC# /src/legacy/server/http/ @elastic/kibana-core
-#CC# /src/legacy/ui/public/documentation_links @elastic/kibana-core
#CC# /src/plugins/legacy_export/ @elastic/kibana-core
#CC# /src/plugins/xpack_legacy/ @elastic/kibana-core
#CC# /src/plugins/saved_objects/ @elastic/kibana-core
@@ -348,6 +339,7 @@
# Security Solution sub teams
/x-pack/plugins/case @elastic/security-threat-hunting
+/x-pack/plugins/timelines @elastic/security-threat-hunting
/x-pack/test/case_api_integration @elastic/security-threat-hunting
/x-pack/plugins/lists @elastic/security-detections-response
diff --git a/.github/ISSUE_TEMPLATE/v8_breaking_change.md b/.github/ISSUE_TEMPLATE/v8_breaking_change.md
index 86e321990d05f5..67d2ee2d3286b4 100644
--- a/.github/ISSUE_TEMPLATE/v8_breaking_change.md
+++ b/.github/ISSUE_TEMPLATE/v8_breaking_change.md
@@ -2,7 +2,7 @@
name: 8.0 Breaking change
about: Breaking changes from 7.x -> 8.0
title: "[Breaking change]"
-labels: Team:Elasticsearch UI, Feature:Upgrade Assistant, Breaking Change
+labels: Feature:Upgrade Assistant, Breaking Change
assignees: ''
---
@@ -12,8 +12,8 @@ assignees: ''
******* LABEL CHANGES NECESSARY ********
****************************************
-Please add a "NeededFor:${TeamName}" label to denote the team that is
-requesting the breaking change to be surfaced in the Upgrade Assistant.
+Please add a team label to denote the team that the
+breaking change is applicable to.
-->
@@ -30,16 +30,14 @@ requesting the breaking change to be surfaced in the Upgrade Assistant.
-**How can we programmatically determine whether the cluster is affected by this breaking change?**
+**Can the change be registered with the [Kibana deprecation service](https://github.com/elastic/kibana/blob/master/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.md)?**
-**What can users do to address the change manually?**
+
-
-
-**How could we make migration easier with the Upgrade Assistant?**
-
-
+
**Are there any edge cases?**
diff --git a/.github/workflows/project-assigner.yml b/.github/workflows/project-assigner.yml
index d9d2d6d1ddb8b5..37d04abda7530e 100644
--- a/.github/workflows/project-assigner.yml
+++ b/.github/workflows/project-assigner.yml
@@ -11,7 +11,7 @@ jobs:
uses: elastic/github-actions/project-assigner@v2.0.0
id: project_assigner
with:
- issue-mappings: '[{"label": "Feature:Lens", "projectNumber": 32, "columnName": "Long-term goals"}, {"label": "Feature:Canvas", "projectNumber": 38, "columnName": "Inbox"}, {"label": "Feature:Dashboard", "projectNumber": 68, "columnName": "Inbox"}, {"label": "Feature:Drilldowns", "projectNumber": 68, "columnName": "Inbox"}]'
+ issue-mappings: '[{"label": "Feature:Lens", "projectNumber": 32, "columnName": "Long-term goals"}, {"label": "Feature:Canvas", "projectNumber": 38, "columnName": "Inbox"}, {"label": "Feature:Dashboard", "projectNumber": 68, "columnName": "Inbox"}, {"label": "Feature:Drilldowns", "projectNumber": 68, "columnName": "Inbox"}], {"label": "Feature:Input Controls", "projectNumber": 72, "columnName": "Inbox"}]'
ghToken: ${{ secrets.PROJECT_ASSIGNER_TOKEN }}
diff --git a/.gitignore b/.gitignore
index fbe28b8f1e77cc..ce8fd38b18a929 100644
--- a/.gitignore
+++ b/.gitignore
@@ -75,5 +75,6 @@ report.asciidoc
.yarn-local-mirror
# Bazel
-/bazel
-/.bazelrc.user
+bazel
+bazel-*
+.bazelrc.user
diff --git a/.stylelintignore b/.stylelintignore
index a48b3adfa36321..72d9d5104a0e99 100644
--- a/.stylelintignore
+++ b/.stylelintignore
@@ -1,3 +1,4 @@
x-pack/plugins/canvas/shareable_runtime/**/*.s+(a|c)ss
build
target
+bazel-*
diff --git a/BUILD.bazel b/BUILD.bazel
index 38a478565a4af7..4502daeaacb597 100644
--- a/BUILD.bazel
+++ b/BUILD.bazel
@@ -2,6 +2,7 @@
# other packages builds and need to be included as inputs
exports_files(
[
+ "tsconfig.base.json",
"tsconfig.json",
"package.json"
],
diff --git a/docs/api/actions-and-connectors/legacy/create.asciidoc b/docs/api/actions-and-connectors/legacy/create.asciidoc
index af4feddcb80fba..0361c4222986b2 100644
--- a/docs/api/actions-and-connectors/legacy/create.asciidoc
+++ b/docs/api/actions-and-connectors/legacy/create.asciidoc
@@ -4,7 +4,7 @@
Legacy Create connector
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Creates a connector.
diff --git a/docs/api/actions-and-connectors/legacy/delete.asciidoc b/docs/api/actions-and-connectors/legacy/delete.asciidoc
index 170fceba2d157e..9ec2c0d392a969 100644
--- a/docs/api/actions-and-connectors/legacy/delete.asciidoc
+++ b/docs/api/actions-and-connectors/legacy/delete.asciidoc
@@ -4,7 +4,7 @@
Legacy Delete connector
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Deletes a connector by ID.
diff --git a/docs/api/actions-and-connectors/legacy/execute.asciidoc b/docs/api/actions-and-connectors/legacy/execute.asciidoc
index 200844ab72f17c..f01aa1585b1925 100644
--- a/docs/api/actions-and-connectors/legacy/execute.asciidoc
+++ b/docs/api/actions-and-connectors/legacy/execute.asciidoc
@@ -4,7 +4,7 @@
Legacy Execute connector
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Executes a connector by ID.
diff --git a/docs/api/actions-and-connectors/legacy/get.asciidoc b/docs/api/actions-and-connectors/legacy/get.asciidoc
index 1b138fb7032e04..6413fce558f5bb 100644
--- a/docs/api/actions-and-connectors/legacy/get.asciidoc
+++ b/docs/api/actions-and-connectors/legacy/get.asciidoc
@@ -4,7 +4,7 @@
Legacy Get connector
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Retrieves a connector by ID.
diff --git a/docs/api/actions-and-connectors/legacy/get_all.asciidoc b/docs/api/actions-and-connectors/legacy/get_all.asciidoc
index ba235955c005ef..191eccb6f8d39d 100644
--- a/docs/api/actions-and-connectors/legacy/get_all.asciidoc
+++ b/docs/api/actions-and-connectors/legacy/get_all.asciidoc
@@ -4,7 +4,7 @@
Legacy Get all connector
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Retrieves all connectors.
diff --git a/docs/api/actions-and-connectors/legacy/list.asciidoc b/docs/api/actions-and-connectors/legacy/list.asciidoc
index 8acfd5415af573..d78838dcbe9745 100644
--- a/docs/api/actions-and-connectors/legacy/list.asciidoc
+++ b/docs/api/actions-and-connectors/legacy/list.asciidoc
@@ -4,7 +4,7 @@
Legacy List all connector types
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Retrieves a list of all connector types.
diff --git a/docs/api/actions-and-connectors/legacy/update.asciidoc b/docs/api/actions-and-connectors/legacy/update.asciidoc
index 517daf9a40dca7..6a33e765cf063c 100644
--- a/docs/api/actions-and-connectors/legacy/update.asciidoc
+++ b/docs/api/actions-and-connectors/legacy/update.asciidoc
@@ -4,7 +4,7 @@
Legacy Update connector
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Updates the attributes for an existing connector.
diff --git a/docs/api/alerting/legacy/create.asciidoc b/docs/api/alerting/legacy/create.asciidoc
index 5c594d64a3f45b..8363569541356d 100644
--- a/docs/api/alerting/legacy/create.asciidoc
+++ b/docs/api/alerting/legacy/create.asciidoc
@@ -4,7 +4,7 @@
Legacy create alert
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Create {kib} alerts.
diff --git a/docs/api/alerting/legacy/delete.asciidoc b/docs/api/alerting/legacy/delete.asciidoc
index 68851973cab5b9..2af420f2bc34ec 100644
--- a/docs/api/alerting/legacy/delete.asciidoc
+++ b/docs/api/alerting/legacy/delete.asciidoc
@@ -4,7 +4,7 @@
Legacy delete alert
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Permanently remove an alert.
diff --git a/docs/api/alerting/legacy/disable.asciidoc b/docs/api/alerting/legacy/disable.asciidoc
index 56e06371570c2a..1a9b928bfba78c 100644
--- a/docs/api/alerting/legacy/disable.asciidoc
+++ b/docs/api/alerting/legacy/disable.asciidoc
@@ -4,7 +4,7 @@
Legacy disable alert
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Disable an alert.
diff --git a/docs/api/alerting/legacy/enable.asciidoc b/docs/api/alerting/legacy/enable.asciidoc
index 913d96a84352bd..da4b466d6fda49 100644
--- a/docs/api/alerting/legacy/enable.asciidoc
+++ b/docs/api/alerting/legacy/enable.asciidoc
@@ -4,7 +4,7 @@
Legacy enable alert
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Enable an alert.
diff --git a/docs/api/alerting/legacy/find.asciidoc b/docs/api/alerting/legacy/find.asciidoc
index 94d9bc425bd214..7c493e9c8eb5bf 100644
--- a/docs/api/alerting/legacy/find.asciidoc
+++ b/docs/api/alerting/legacy/find.asciidoc
@@ -4,7 +4,7 @@
Legacy find alerts
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Retrieve a paginated set of alerts based on condition.
diff --git a/docs/api/alerting/legacy/get.asciidoc b/docs/api/alerting/legacy/get.asciidoc
index f1014d18e87741..ee0f52f51005a2 100644
--- a/docs/api/alerting/legacy/get.asciidoc
+++ b/docs/api/alerting/legacy/get.asciidoc
@@ -4,7 +4,7 @@
Legacy get alert
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Retrieve an alert by ID.
diff --git a/docs/api/alerting/legacy/health.asciidoc b/docs/api/alerting/legacy/health.asciidoc
index b25307fb5efd18..68f04cc715bd7b 100644
--- a/docs/api/alerting/legacy/health.asciidoc
+++ b/docs/api/alerting/legacy/health.asciidoc
@@ -4,7 +4,7 @@
Legacy get Alerting framework health
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Retrieve the health status of the Alerting framework.
diff --git a/docs/api/alerting/legacy/list.asciidoc b/docs/api/alerting/legacy/list.asciidoc
index e9ef3bbc27cd9f..be37be36cd0e89 100644
--- a/docs/api/alerting/legacy/list.asciidoc
+++ b/docs/api/alerting/legacy/list.asciidoc
@@ -4,7 +4,7 @@
Legacy list all alert types
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Retrieve a list of all alert types.
diff --git a/docs/api/alerting/legacy/mute.asciidoc b/docs/api/alerting/legacy/mute.asciidoc
index dff42f5911e53f..cf7adc446a2fd8 100644
--- a/docs/api/alerting/legacy/mute.asciidoc
+++ b/docs/api/alerting/legacy/mute.asciidoc
@@ -4,7 +4,7 @@
Legacy mute alert instance
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Mute an alert instance.
diff --git a/docs/api/alerting/legacy/mute_all.asciidoc b/docs/api/alerting/legacy/mute_all.asciidoc
index df89fa15d15902..bc865480340e2c 100644
--- a/docs/api/alerting/legacy/mute_all.asciidoc
+++ b/docs/api/alerting/legacy/mute_all.asciidoc
@@ -4,7 +4,7 @@
Legacy mute all alert instances
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Mute all alert instances.
diff --git a/docs/api/alerting/legacy/unmute.asciidoc b/docs/api/alerting/legacy/unmute.asciidoc
index 0be7e40dc1a198..300cf71b57a01d 100644
--- a/docs/api/alerting/legacy/unmute.asciidoc
+++ b/docs/api/alerting/legacy/unmute.asciidoc
@@ -4,7 +4,7 @@
Legacy unmute alert instance
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Unmute an alert instance.
diff --git a/docs/api/alerting/legacy/unmute_all.asciidoc b/docs/api/alerting/legacy/unmute_all.asciidoc
index 8687c2d2fe8bb2..3b0a7afe5f44d6 100644
--- a/docs/api/alerting/legacy/unmute_all.asciidoc
+++ b/docs/api/alerting/legacy/unmute_all.asciidoc
@@ -4,7 +4,7 @@
Legacy unmute all alert instances
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Unmute all alert instances.
diff --git a/docs/api/alerting/legacy/update.asciidoc b/docs/api/alerting/legacy/update.asciidoc
index bffdf26c314001..b9cce995660e6a 100644
--- a/docs/api/alerting/legacy/update.asciidoc
+++ b/docs/api/alerting/legacy/update.asciidoc
@@ -4,7 +4,7 @@
Legacy update alert
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Update the attributes for an existing alert.
diff --git a/docs/developer/plugin-list.asciidoc b/docs/developer/plugin-list.asciidoc
index e1c2c40a31384b..691d7fb82f3bc4 100644
--- a/docs/developer/plugin-list.asciidoc
+++ b/docs/developer/plugin-list.asciidoc
@@ -452,10 +452,6 @@ using the CURL scripts in the scripts folder.
|Visualize geo data from Elasticsearch or 3rd party geo-services.
-|{kib-repo}blob/{branch}/x-pack/plugins/maps_legacy_licensing/README.md[mapsLegacyLicensing]
-|This plugin provides access to the detailed tile map services from Elastic.
-
-
|{kib-repo}blob/{branch}/x-pack/plugins/ml/readme.md[ml]
|This plugin provides access to the machine learning features provided by
Elastic.
@@ -537,6 +533,10 @@ Documentation: https://www.elastic.co/guide/en/kibana/master/task-manager-produc
|Gathers all usage collection, retrieving them from both: OSS and X-Pack plugins.
+|{kib-repo}blob/{branch}/x-pack/plugins/timelines/README.md[timelines]
+|Timelines is a plugin that provides a grid component with accompanying server side apis to help users identify events of interest and perform root cause analysis within Kibana.
+
+
|{kib-repo}blob/{branch}/x-pack/plugins/transform/readme.md[transform]
|This plugin provides access to the transforms features provided by Elastic.
diff --git a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
index 6ca7a83ac0a030..860f7c3c748924 100644
--- a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
+++ b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
@@ -144,6 +144,7 @@ readonly links: {
putComponentTemplateMetadata: string;
putSnapshotLifecyclePolicy: string;
putWatch: string;
+ simulatePipeline: string;
updateTransform: string;
}>;
readonly observability: Record;
diff --git a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
index 3847ab0c6183a4..a9cb6729b214e6 100644
--- a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
+++ b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
@@ -17,5 +17,5 @@ export interface DocLinksStart
| --- | --- | --- |
| [DOC\_LINK\_VERSION](./kibana-plugin-core-public.doclinksstart.doc_link_version.md) | string
| |
| [ELASTIC\_WEBSITE\_URL](./kibana-plugin-core-public.doclinksstart.elastic_website_url.md) | string
| |
-| [links](./kibana-plugin-core-public.doclinksstart.links.md) | {
readonly dashboard: {
readonly guide: string;
readonly drilldowns: string;
readonly drilldownsTriggerPicker: string;
readonly urlDrilldownTemplateSyntax: string;
readonly urlDrilldownVariables: string;
};
readonly discover: Record<string, string>;
readonly filebeat: {
readonly base: string;
readonly installation: string;
readonly configuration: string;
readonly elasticsearchOutput: string;
readonly elasticsearchModule: string;
readonly startup: string;
readonly exportedFields: string;
};
readonly auditbeat: {
readonly base: string;
};
readonly metricbeat: {
readonly base: string;
readonly configure: string;
readonly httpEndpoint: string;
readonly install: string;
readonly start: string;
};
readonly enterpriseSearch: {
readonly base: string;
readonly appSearchBase: string;
readonly workplaceSearchBase: string;
};
readonly heartbeat: {
readonly base: string;
};
readonly logstash: {
readonly base: string;
};
readonly functionbeat: {
readonly base: string;
};
readonly winlogbeat: {
readonly base: string;
};
readonly aggs: {
readonly composite: string;
readonly composite_missing_bucket: string;
readonly date_histogram: string;
readonly date_range: string;
readonly date_format_pattern: string;
readonly filter: string;
readonly filters: string;
readonly geohash_grid: string;
readonly histogram: string;
readonly ip_range: string;
readonly range: string;
readonly significant_terms: string;
readonly terms: string;
readonly avg: string;
readonly avg_bucket: string;
readonly max_bucket: string;
readonly min_bucket: string;
readonly sum_bucket: string;
readonly cardinality: string;
readonly count: string;
readonly cumulative_sum: string;
readonly derivative: string;
readonly geo_bounds: string;
readonly geo_centroid: string;
readonly max: string;
readonly median: string;
readonly min: string;
readonly moving_avg: string;
readonly percentile_ranks: string;
readonly serial_diff: string;
readonly std_dev: string;
readonly sum: string;
readonly top_hits: string;
};
readonly runtimeFields: {
readonly mapping: string;
};
readonly scriptedFields: {
readonly scriptFields: string;
readonly scriptAggs: string;
readonly painless: string;
readonly painlessApi: string;
readonly painlessLangSpec: string;
readonly painlessSyntax: string;
readonly painlessWalkthrough: string;
readonly luceneExpressions: string;
};
readonly indexPatterns: {
readonly introduction: string;
readonly fieldFormattersNumber: string;
readonly fieldFormattersString: string;
};
readonly addData: string;
readonly kibana: string;
readonly elasticsearch: Record<string, string>;
readonly siem: {
readonly guide: string;
readonly gettingStarted: string;
};
readonly query: {
readonly eql: string;
readonly luceneQuerySyntax: string;
readonly queryDsl: string;
readonly kueryQuerySyntax: string;
};
readonly date: {
readonly dateMath: string;
readonly dateMathIndexNames: string;
};
readonly management: Record<string, string>;
readonly ml: Record<string, string>;
readonly transforms: Record<string, string>;
readonly visualize: Record<string, string>;
readonly apis: Readonly<{
createIndex: string;
createSnapshotLifecyclePolicy: string;
createRoleMapping: string;
createRoleMappingTemplates: string;
createApiKey: string;
createPipeline: string;
createTransformRequest: string;
cronExpressions: string;
executeWatchActionModes: string;
indexExists: string;
openIndex: string;
putComponentTemplate: string;
painlessExecute: string;
painlessExecuteAPIContexts: string;
putComponentTemplateMetadata: string;
putSnapshotLifecyclePolicy: string;
putWatch: string;
updateTransform: string;
}>;
readonly observability: Record<string, string>;
readonly alerting: Record<string, string>;
readonly maps: Record<string, string>;
readonly monitoring: Record<string, string>;
readonly security: Readonly<{
apiKeyServiceSettings: string;
clusterPrivileges: string;
elasticsearchSettings: string;
elasticsearchEnableSecurity: string;
indicesPrivileges: string;
kibanaTLS: string;
kibanaPrivileges: string;
mappingRoles: string;
mappingRolesFieldRules: string;
runAsPrivilege: string;
}>;
readonly watcher: Record<string, string>;
readonly ccs: Record<string, string>;
readonly plugins: Record<string, string>;
readonly snapshotRestore: Record<string, string>;
readonly ingest: Record<string, string>;
}
| |
+| [links](./kibana-plugin-core-public.doclinksstart.links.md) | {
readonly dashboard: {
readonly guide: string;
readonly drilldowns: string;
readonly drilldownsTriggerPicker: string;
readonly urlDrilldownTemplateSyntax: string;
readonly urlDrilldownVariables: string;
};
readonly discover: Record<string, string>;
readonly filebeat: {
readonly base: string;
readonly installation: string;
readonly configuration: string;
readonly elasticsearchOutput: string;
readonly elasticsearchModule: string;
readonly startup: string;
readonly exportedFields: string;
};
readonly auditbeat: {
readonly base: string;
};
readonly metricbeat: {
readonly base: string;
readonly configure: string;
readonly httpEndpoint: string;
readonly install: string;
readonly start: string;
};
readonly enterpriseSearch: {
readonly base: string;
readonly appSearchBase: string;
readonly workplaceSearchBase: string;
};
readonly heartbeat: {
readonly base: string;
};
readonly logstash: {
readonly base: string;
};
readonly functionbeat: {
readonly base: string;
};
readonly winlogbeat: {
readonly base: string;
};
readonly aggs: {
readonly composite: string;
readonly composite_missing_bucket: string;
readonly date_histogram: string;
readonly date_range: string;
readonly date_format_pattern: string;
readonly filter: string;
readonly filters: string;
readonly geohash_grid: string;
readonly histogram: string;
readonly ip_range: string;
readonly range: string;
readonly significant_terms: string;
readonly terms: string;
readonly avg: string;
readonly avg_bucket: string;
readonly max_bucket: string;
readonly min_bucket: string;
readonly sum_bucket: string;
readonly cardinality: string;
readonly count: string;
readonly cumulative_sum: string;
readonly derivative: string;
readonly geo_bounds: string;
readonly geo_centroid: string;
readonly max: string;
readonly median: string;
readonly min: string;
readonly moving_avg: string;
readonly percentile_ranks: string;
readonly serial_diff: string;
readonly std_dev: string;
readonly sum: string;
readonly top_hits: string;
};
readonly runtimeFields: {
readonly mapping: string;
};
readonly scriptedFields: {
readonly scriptFields: string;
readonly scriptAggs: string;
readonly painless: string;
readonly painlessApi: string;
readonly painlessLangSpec: string;
readonly painlessSyntax: string;
readonly painlessWalkthrough: string;
readonly luceneExpressions: string;
};
readonly indexPatterns: {
readonly introduction: string;
readonly fieldFormattersNumber: string;
readonly fieldFormattersString: string;
};
readonly addData: string;
readonly kibana: string;
readonly elasticsearch: Record<string, string>;
readonly siem: {
readonly guide: string;
readonly gettingStarted: string;
};
readonly query: {
readonly eql: string;
readonly luceneQuerySyntax: string;
readonly queryDsl: string;
readonly kueryQuerySyntax: string;
};
readonly date: {
readonly dateMath: string;
readonly dateMathIndexNames: string;
};
readonly management: Record<string, string>;
readonly ml: Record<string, string>;
readonly transforms: Record<string, string>;
readonly visualize: Record<string, string>;
readonly apis: Readonly<{
createIndex: string;
createSnapshotLifecyclePolicy: string;
createRoleMapping: string;
createRoleMappingTemplates: string;
createApiKey: string;
createPipeline: string;
createTransformRequest: string;
cronExpressions: string;
executeWatchActionModes: string;
indexExists: string;
openIndex: string;
putComponentTemplate: string;
painlessExecute: string;
painlessExecuteAPIContexts: string;
putComponentTemplateMetadata: string;
putSnapshotLifecyclePolicy: string;
putWatch: string;
simulatePipeline: string;
updateTransform: string;
}>;
readonly observability: Record<string, string>;
readonly alerting: Record<string, string>;
readonly maps: Record<string, string>;
readonly monitoring: Record<string, string>;
readonly security: Readonly<{
apiKeyServiceSettings: string;
clusterPrivileges: string;
elasticsearchSettings: string;
elasticsearchEnableSecurity: string;
indicesPrivileges: string;
kibanaTLS: string;
kibanaPrivileges: string;
mappingRoles: string;
mappingRolesFieldRules: string;
runAsPrivilege: string;
}>;
readonly watcher: Record<string, string>;
readonly ccs: Record<string, string>;
readonly plugins: Record<string, string>;
readonly snapshotRestore: Record<string, string>;
readonly ingest: Record<string, string>;
}
| |
diff --git a/docs/development/core/server/kibana-plugin-core-server.kibanaresponsefactory.md b/docs/development/core/server/kibana-plugin-core-server.kibanaresponsefactory.md
index 395c26a6e4bf65..8ddc0da5f1b285 100644
--- a/docs/development/core/server/kibana-plugin-core-server.kibanaresponsefactory.md
+++ b/docs/development/core/server/kibana-plugin-core-server.kibanaresponsefactory.md
@@ -10,10 +10,10 @@ Set of helpers used to create `KibanaResponse` to form HTTP response on an incom
```typescript
kibanaResponseFactory: {
- custom: | Error | Buffer | {
+ custom: | Error | Buffer | Stream | {
message: string | Error;
attributes?: Record | undefined;
- } | Stream | undefined>(options: CustomHttpResponseOptions) => KibanaResponse;
+ } | undefined>(options: CustomHttpResponseOptions) => KibanaResponse;
badRequest: (options?: ErrorHttpResponseOptions) => KibanaResponse;
unauthorized: (options?: ErrorHttpResponseOptions) => KibanaResponse;
forbidden: (options?: ErrorHttpResponseOptions) => KibanaResponse;
diff --git a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.core.md b/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.core.md
deleted file mode 100644
index 67f2cf0cdcc7ca..00000000000000
--- a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.core.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [LegacyServiceSetupDeps](./kibana-plugin-core-server.legacyservicesetupdeps.md) > [core](./kibana-plugin-core-server.legacyservicesetupdeps.core.md)
-
-## LegacyServiceSetupDeps.core property
-
-Signature:
-
-```typescript
-core: LegacyCoreSetup;
-```
diff --git a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.md b/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.md
deleted file mode 100644
index a5c1d59be06d35..00000000000000
--- a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.md
+++ /dev/null
@@ -1,24 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [LegacyServiceSetupDeps](./kibana-plugin-core-server.legacyservicesetupdeps.md)
-
-## LegacyServiceSetupDeps interface
-
-> Warning: This API is now obsolete.
->
->
-
-Signature:
-
-```typescript
-export interface LegacyServiceSetupDeps
-```
-
-## Properties
-
-| Property | Type | Description |
-| --- | --- | --- |
-| [core](./kibana-plugin-core-server.legacyservicesetupdeps.core.md) | LegacyCoreSetup
| |
-| [plugins](./kibana-plugin-core-server.legacyservicesetupdeps.plugins.md) | Record<string, unknown>
| |
-| [uiPlugins](./kibana-plugin-core-server.legacyservicesetupdeps.uiplugins.md) | UiPlugins
| |
-
diff --git a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.plugins.md b/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.plugins.md
deleted file mode 100644
index 032762904640b6..00000000000000
--- a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.plugins.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [LegacyServiceSetupDeps](./kibana-plugin-core-server.legacyservicesetupdeps.md) > [plugins](./kibana-plugin-core-server.legacyservicesetupdeps.plugins.md)
-
-## LegacyServiceSetupDeps.plugins property
-
-Signature:
-
-```typescript
-plugins: Record;
-```
diff --git a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.uiplugins.md b/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.uiplugins.md
deleted file mode 100644
index d19a7dfcbfcfad..00000000000000
--- a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.uiplugins.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [LegacyServiceSetupDeps](./kibana-plugin-core-server.legacyservicesetupdeps.md) > [uiPlugins](./kibana-plugin-core-server.legacyservicesetupdeps.uiplugins.md)
-
-## LegacyServiceSetupDeps.uiPlugins property
-
-Signature:
-
-```typescript
-uiPlugins: UiPlugins;
-```
diff --git a/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.core.md b/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.core.md
deleted file mode 100644
index 17369e00a70684..00000000000000
--- a/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.core.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [LegacyServiceStartDeps](./kibana-plugin-core-server.legacyservicestartdeps.md) > [core](./kibana-plugin-core-server.legacyservicestartdeps.core.md)
-
-## LegacyServiceStartDeps.core property
-
-Signature:
-
-```typescript
-core: LegacyCoreStart;
-```
diff --git a/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.md b/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.md
deleted file mode 100644
index d6f6b38b79f847..00000000000000
--- a/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.md
+++ /dev/null
@@ -1,23 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [LegacyServiceStartDeps](./kibana-plugin-core-server.legacyservicestartdeps.md)
-
-## LegacyServiceStartDeps interface
-
-> Warning: This API is now obsolete.
->
->
-
-Signature:
-
-```typescript
-export interface LegacyServiceStartDeps
-```
-
-## Properties
-
-| Property | Type | Description |
-| --- | --- | --- |
-| [core](./kibana-plugin-core-server.legacyservicestartdeps.core.md) | LegacyCoreStart
| |
-| [plugins](./kibana-plugin-core-server.legacyservicestartdeps.plugins.md) | Record<string, unknown>
| |
-
diff --git a/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.plugins.md b/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.plugins.md
deleted file mode 100644
index 4634bf21fb42c4..00000000000000
--- a/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.plugins.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [LegacyServiceStartDeps](./kibana-plugin-core-server.legacyservicestartdeps.md) > [plugins](./kibana-plugin-core-server.legacyservicestartdeps.plugins.md)
-
-## LegacyServiceStartDeps.plugins property
-
-Signature:
-
-```typescript
-plugins: Record;
-```
diff --git a/docs/development/core/server/kibana-plugin-core-server.md b/docs/development/core/server/kibana-plugin-core-server.md
index faac8108de8254..3bbdf8c703ab1f 100644
--- a/docs/development/core/server/kibana-plugin-core-server.md
+++ b/docs/development/core/server/kibana-plugin-core-server.md
@@ -110,8 +110,6 @@ The plugin integrates with the core system via lifecycle events: `setup`
| [LegacyCallAPIOptions](./kibana-plugin-core-server.legacycallapioptions.md) | The set of options that defines how API call should be made and result be processed. |
| [LegacyElasticsearchError](./kibana-plugin-core-server.legacyelasticsearcherror.md) | @deprecated. The new elasticsearch client doesn't wrap errors anymore. |
| [LegacyRequest](./kibana-plugin-core-server.legacyrequest.md) | |
-| [LegacyServiceSetupDeps](./kibana-plugin-core-server.legacyservicesetupdeps.md) | |
-| [LegacyServiceStartDeps](./kibana-plugin-core-server.legacyservicestartdeps.md) | |
| [LoggerContextConfigInput](./kibana-plugin-core-server.loggercontextconfiginput.md) | |
| [LoggingServiceSetup](./kibana-plugin-core-server.loggingservicesetup.md) | Provides APIs to plugins for customizing the plugin's logger. |
| [MetricsServiceSetup](./kibana-plugin-core-server.metricsservicesetup.md) | APIs to retrieves metrics gathered and exposed by the core platform. |
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggfunctionsmapping.aggsinglepercentile.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggfunctionsmapping.aggsinglepercentile.md
new file mode 100644
index 00000000000000..4e432b8d365a34
--- /dev/null
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggfunctionsmapping.aggsinglepercentile.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [AggFunctionsMapping](./kibana-plugin-plugins-data-public.aggfunctionsmapping.md) > [aggSinglePercentile](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggsinglepercentile.md)
+
+## AggFunctionsMapping.aggSinglePercentile property
+
+Signature:
+
+```typescript
+aggSinglePercentile: ReturnType;
+```
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggfunctionsmapping.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggfunctionsmapping.md
index 05388e2b86d7b5..852c6d5f1c00b8 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggfunctionsmapping.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggfunctionsmapping.md
@@ -45,6 +45,7 @@ export interface AggFunctionsMapping
| [aggRange](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggrange.md) | ReturnType<typeof aggRange>
| |
| [aggSerialDiff](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggserialdiff.md) | ReturnType<typeof aggSerialDiff>
| |
| [aggSignificantTerms](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggsignificantterms.md) | ReturnType<typeof aggSignificantTerms>
| |
+| [aggSinglePercentile](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggsinglepercentile.md) | ReturnType<typeof aggSinglePercentile>
| |
| [aggStdDeviation](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggstddeviation.md) | ReturnType<typeof aggStdDeviation>
| |
| [aggSum](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggsum.md) | ReturnType<typeof aggSum>
| |
| [aggTerms](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggterms.md) | ReturnType<typeof aggTerms>
| |
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.metric_types.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.metric_types.md
index 3b5cecf1a0b82a..bdae3ec738ac34 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.metric_types.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.metric_types.md
@@ -32,6 +32,7 @@ export declare enum METRIC_TYPES
| PERCENTILE\_RANKS | "percentile_ranks"
| |
| PERCENTILES | "percentiles"
| |
| SERIAL\_DIFF | "serial_diff"
| |
+| SINGLE\_PERCENTILE | "single_percentile"
| |
| STD\_DEV | "std_dev"
| |
| SUM | "sum"
| |
| SUM\_BUCKET | "sum_bucket"
| |
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.aggfunctionsmapping.aggsinglepercentile.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.aggfunctionsmapping.aggsinglepercentile.md
new file mode 100644
index 00000000000000..d1418d7245d731
--- /dev/null
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.aggfunctionsmapping.aggsinglepercentile.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [AggFunctionsMapping](./kibana-plugin-plugins-data-server.aggfunctionsmapping.md) > [aggSinglePercentile](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggsinglepercentile.md)
+
+## AggFunctionsMapping.aggSinglePercentile property
+
+Signature:
+
+```typescript
+aggSinglePercentile: ReturnType;
+```
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.aggfunctionsmapping.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.aggfunctionsmapping.md
index 86bf797572b09b..6b5f854c155f30 100644
--- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.aggfunctionsmapping.md
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.aggfunctionsmapping.md
@@ -45,6 +45,7 @@ export interface AggFunctionsMapping
| [aggRange](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggrange.md) | ReturnType<typeof aggRange>
| |
| [aggSerialDiff](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggserialdiff.md) | ReturnType<typeof aggSerialDiff>
| |
| [aggSignificantTerms](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggsignificantterms.md) | ReturnType<typeof aggSignificantTerms>
| |
+| [aggSinglePercentile](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggsinglepercentile.md) | ReturnType<typeof aggSinglePercentile>
| |
| [aggStdDeviation](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggstddeviation.md) | ReturnType<typeof aggStdDeviation>
| |
| [aggSum](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggsum.md) | ReturnType<typeof aggSum>
| |
| [aggTerms](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggterms.md) | ReturnType<typeof aggTerms>
| |
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.md
index d408f00e33c9e8..b5c7d8931ad4b8 100644
--- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.md
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.md
@@ -14,6 +14,6 @@ export declare class IndexPatternsServiceProvider implements PluginSignature:
```typescript
-setup(core: CoreSetup, { expressions }: IndexPatternsServiceSetupDeps): void;
+setup(core: CoreSetup, { expressions, usageCollection }: IndexPatternsServiceSetupDeps): void;
```
## Parameters
| Parameter | Type | Description |
| --- | --- | --- |
-| core | CoreSetup<DataPluginStartDependencies, DataPluginStart>
| |
-| { expressions } | IndexPatternsServiceSetupDeps
| |
+| core | CoreSetup<IndexPatternsServiceStartDeps, DataPluginStart>
| |
+| { expressions, usageCollection } | IndexPatternsServiceSetupDeps
| |
Returns:
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.start.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.start.md
index 98f9310c6d98cc..88079bb2fa3cb6 100644
--- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.start.md
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.start.md
@@ -8,7 +8,7 @@
```typescript
start(core: CoreStart, { fieldFormats, logger }: IndexPatternsServiceStartDeps): {
- indexPatternsServiceFactory: (savedObjectsClient: SavedObjectsClientContract, elasticsearchClient: ElasticsearchClient) => Promise;
+ indexPatternsServiceFactory: (savedObjectsClient: Pick, elasticsearchClient: ElasticsearchClient) => Promise;
};
```
@@ -22,6 +22,6 @@ start(core: CoreStart, { fieldFormats, logger }: IndexPatternsServiceStartDeps):
Returns:
`{
- indexPatternsServiceFactory: (savedObjectsClient: SavedObjectsClientContract, elasticsearchClient: ElasticsearchClient) => Promise;
+ indexPatternsServiceFactory: (savedObjectsClient: Pick, elasticsearchClient: ElasticsearchClient) => Promise;
}`
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.metric_types.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.metric_types.md
index 250173d11a056d..37f53af8971b3c 100644
--- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.metric_types.md
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.metric_types.md
@@ -32,6 +32,7 @@ export declare enum METRIC_TYPES
| PERCENTILE\_RANKS | "percentile_ranks"
| |
| PERCENTILES | "percentiles"
| |
| SERIAL\_DIFF | "serial_diff"
| |
+| SINGLE\_PERCENTILE | "single_percentile"
| |
| STD\_DEV | "std_dev"
| |
| SUM | "sum"
| |
| SUM\_BUCKET | "sum_bucket"
| |
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.plugin.start.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.plugin.start.md
index 025cab9f48c1a5..f4404521561d24 100644
--- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.plugin.start.md
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.plugin.start.md
@@ -12,7 +12,7 @@ start(core: CoreStart): {
fieldFormatServiceFactory: (uiSettings: import("../../../core/server").IUiSettingsClient) => Promise;
};
indexPatterns: {
- indexPatternsServiceFactory: (savedObjectsClient: Pick, elasticsearchClient: import("../../../core/server").ElasticsearchClient) => Promise;
+ indexPatternsServiceFactory: (savedObjectsClient: Pick, elasticsearchClient: import("../../../core/server").ElasticsearchClient) => Promise;
};
search: ISearchStart>;
};
@@ -31,7 +31,7 @@ start(core: CoreStart): {
fieldFormatServiceFactory: (uiSettings: import("../../../core/server").IUiSettingsClient) => Promise;
};
indexPatterns: {
- indexPatternsServiceFactory: (savedObjectsClient: Pick, elasticsearchClient: import("../../../core/server").ElasticsearchClient) => Promise;
+ indexPatternsServiceFactory: (savedObjectsClient: Pick, elasticsearchClient: import("../../../core/server").ElasticsearchClient) => Promise;
};
search: ISearchStart>;
}`
diff --git a/docs/discover/search.asciidoc b/docs/discover/search.asciidoc
index 9971a6f574f9c6..0306be3eb670d7 100644
--- a/docs/discover/search.asciidoc
+++ b/docs/discover/search.asciidoc
@@ -110,7 +110,7 @@ image::discover/images/read-only-badge.png[Example of Discover's read only acces
==== Save a search
To save the current search:
-. Click *Save* in the Kibana toolbar.
+. Click *Save* in the toolbar.
. Enter a name for the search and click *Save*.
To import, export, and delete saved searches, open the main menu,
@@ -119,7 +119,7 @@ then click *Stack Management > Saved Objects*.
==== Open a saved search
To load a saved search into Discover:
-. Click *Open* in the Kibana toolbar.
+. Click *Open* in the toolbar.
. Select the search you want to open.
If the saved search is associated with a different index pattern than is currently
diff --git a/docs/management/managing-fields.asciidoc b/docs/management/managing-fields.asciidoc
index 5cd5c1ffd6248b..505f6853c79060 100644
--- a/docs/management/managing-fields.asciidoc
+++ b/docs/management/managing-fields.asciidoc
@@ -78,6 +78,7 @@ include::field-formatters/color-formatter.asciidoc[]
[[scripted-fields]]
=== Scripted fields
+deprecated::[7.13,Use {ref}/runtime.html[runtime fields] instead of scripted fields. Runtime fields support Painless scripts and provide greater flexibility.]
Scripted fields compute data on the fly from the data in your {es} indices. The data is shown on
the Discover tab as part of the document data, and you can use scripted fields in your visualizations. You query scripted fields with the <>, and can filter them using the filter bar. The scripted field values are computed at query time, so they aren't indexed and cannot be searched using the {kib} default
@@ -87,7 +88,7 @@ WARNING: Computing data on the fly with scripted fields can be very resource int
{kib} performance. Keep in mind that there's no built-in validation of a scripted field. If your scripts are
buggy, you'll get exceptions whenever you try to view the dynamically generated data.
-When you define a scripted field in {kib}, you have a choice of the {ref}/modules-scripting-expression.html[Lucene expressions] or the
+When you define a scripted field in {kib}, you have a choice of the {ref}/modules-scripting-expression.html[Lucene expressions] or the
{ref}/modules-scripting-painless.html[Painless] scripting language.
You can reference any single value numeric field in your expressions, for example:
diff --git a/docs/maps/import-geospatial-data.asciidoc b/docs/maps/import-geospatial-data.asciidoc
index fb4250368086e8..0218bac58815a7 100644
--- a/docs/maps/import-geospatial-data.asciidoc
+++ b/docs/maps/import-geospatial-data.asciidoc
@@ -6,6 +6,30 @@ To import geospatical data into the Elastic Stack, the data must be indexed as {
Geospatial data comes in many formats.
Choose an import tool based on the format of your geospatial data.
+[discrete]
+[[import-geospatial-privileges]]
+=== Security privileges
+
+The {stack-security-features} provide roles and privileges that control which users can upload files.
+You can manage your roles, privileges, and
+spaces in **{stack-manage-app}** in {kib}. For more information, see
+{ref}/security-privileges.html[Security privileges],
+<>, and <>.
+
+To upload GeoJSON files in {kib} with *Maps*, you must have:
+
+* The `all` {kib} privilege for *Maps*.
+* The `all` {kib} privilege for *Index Pattern Management*.
+* The `create` and `create_index` index privileges for destination indices.
+* To use the index in *Maps*, you must also have the `read` and `view_index_metadata` index privileges for destination indices.
+
+To upload CSV files in {kib} with the *{file-data-viz}*, you must have privileges to upload GeoJSON files and:
+
+* The `manage_pipeline` cluster privilege.
+* The `read` {kib} privilege for *Machine Learning*.
+* The `machine_learning_admin` or `machine_learning_user` role.
+
+
[discrete]
=== Upload CSV with latitude and longitude columns
diff --git a/docs/maps/maps-aggregations.asciidoc b/docs/maps/maps-aggregations.asciidoc
index 265bf6bfaea304..7f4af952653e7c 100644
--- a/docs/maps/maps-aggregations.asciidoc
+++ b/docs/maps/maps-aggregations.asciidoc
@@ -76,9 +76,8 @@ then accumulates the most relevant documents based on sort order for each entry
To enable top hits:
-. Click *Add layer*, then select the *Documents* layer.
+. Click *Add layer*, then select the *Top hits per entity* layer.
. Configure *Index pattern* and *Geospatial field*.
-. In *Scaling*, select *Show top hits per entity*.
. Set *Entity* to the field that identifies entities in your documents.
This field will be used in the terms aggregation to group your documents into entity buckets.
. Set *Documents per entity* to configure the maximum number of documents accumulated per entity.
diff --git a/docs/maps/vector-layer.asciidoc b/docs/maps/vector-layer.asciidoc
index 6a2228161845ef..2115c16a889c63 100644
--- a/docs/maps/vector-layer.asciidoc
+++ b/docs/maps/vector-layer.asciidoc
@@ -23,8 +23,6 @@ Select the appropriate *Scaling* option for your use case.
* *Limit results to 10000.* The layer displays features from the first `index.max_result_window` documents.
Results exceeding `index.max_result_window` are not displayed.
-* *Show top hits per entity.* The layer displays the <>.
-
* *Show clusters when results exceed 10000.* When results exceed `index.max_result_window`, the layer uses {ref}/search-aggregations-bucket-geotilegrid-aggregation.html[GeoTile grid aggregation] to group your documents into clusters and displays metrics for each cluster. When results are less then `index.max_result_window`, the layer displays features from individual documents.
* *Use vector tiles.* Vector tiles partition your map into 6 to 8 tiles.
@@ -36,6 +34,9 @@ Tiles exceeding `index.max_result_window` have a visual indicator when there are
*Point to point*:: Aggregated data paths between the source and destination.
The index must contain at least 2 fields mapped as {ref}/geo-point.html[geo_point], source and destination.
+*Top hits per entity*:: The layer displays the <>.
+The index must contain at least one field mapped as {ref}/geo-point.html[geo_point] or {ref}/geo-shape.html[geo_shape].
+
*Tracks*:: Create lines from points.
The index must contain at least one field mapped as {ref}/geo-point.html[geo_point].
diff --git a/docs/migration/migrate_8_0.asciidoc b/docs/migration/migrate_8_0.asciidoc
index f5ebac1ebf02e6..acb343191609df 100644
--- a/docs/migration/migrate_8_0.asciidoc
+++ b/docs/migration/migrate_8_0.asciidoc
@@ -320,6 +320,15 @@ All supported operating systems support using systemd service files. Any system
*Impact:*
Any installations using `.deb` or `.rpm` packages using SysV will need to migrate to systemd.
+[float]
+=== TLS v1.0 and v1.1 are disabled by default
+
+*Details:*
+Support can be re-enabled by setting `--tls-min-1.0` in the `node.options` config file that can be found inside `kibana/config` folder or any other configured with the environment variable `KBN_PATH_CONF` (for example in Debian based system would be `/etc/kibana`).
+
+*Impact:*
+Browser and proxy clients communicating over TLS v1.0 and v1.1.
+
[float]
=== Platform removed from root folder name for `.tar.gz` and `.zip` archives
diff --git a/docs/settings/reporting-settings.asciidoc b/docs/settings/reporting-settings.asciidoc
index cef5a953fded40..9bb11f3f99a156 100644
--- a/docs/settings/reporting-settings.asciidoc
+++ b/docs/settings/reporting-settings.asciidoc
@@ -260,19 +260,21 @@ For information about {kib} memory limits, see <> setting. Defaults to `.reporting`.
-
| `xpack.reporting.capture.networkPolicy`
| Capturing a screenshot from a {kib} page involves sending out requests for all the linked web assets. For example, a Markdown
visualization can show an image from a remote server. You can configure what type of requests to allow or filter by setting a
<> for Reporting.
+| `xpack.reporting.index`
+ | deprecated:[7.11.0,This setting will be removed in 8.0.] Multitenancy by
+ changing `kibana.index` will not be supported starting in 8.0. See
+ https://ela.st/kbn-remove-legacy-multitenancy[8.0 Breaking Changes] for more
+ details. Reporting uses a weekly index in {es} to store the reporting job and
+ the report content. The index is automatically created if it does not already
+ exist. Configure this to a unique value, beginning with `.reporting-`, for
+ every {kib} instance that has a unique <>
+ setting. Defaults to `.reporting`.
+
| `xpack.reporting.roles.allow`
| Specifies the roles in addition to superusers that can use reporting.
Defaults to `[ "reporting_user" ]`. +
diff --git a/docs/settings/search-sessions-settings.asciidoc b/docs/settings/search-sessions-settings.asciidoc
index c9a9e709ac7f89..cf64d08e4806c1 100644
--- a/docs/settings/search-sessions-settings.asciidoc
+++ b/docs/settings/search-sessions-settings.asciidoc
@@ -11,15 +11,15 @@ Configure the search session settings in your `kibana.yml` configuration file.
[cols="2*<"]
|===
a| `xpack.data_enhanced.`
-`search.sessions:enabled`
- | Set to `true` (default) to enable search sessions.
+`search.sessions.enabled`
+| Set to `true` (default) to enable search sessions.
-a| `xpack.data.enhanced.`
-`search.sessions:trackingInterval`
- | The frequency for updating the state of a search session. The default is 10s.
+a| `xpack.data_enhanced.`
+`search.sessions.trackingInterval`
+| The frequency for updating the state of a search session. The default is 10s.
-a| `xpack.data.enhanced.`
-`search.sessions:defaultExpiration`
- | How long search session results are stored before they are deleted.
- Extending a search session resets the expiration by the same value. The default is 7d.
+a| `xpack.data_enhanced.`
+`search.sessions.defaultExpiration`
+| How long search session results are stored before they are deleted.
+Extending a search session resets the expiration by the same value. The default is 7d.
|===
diff --git a/docs/setup/docker.asciidoc b/docs/setup/docker.asciidoc
index 25883307e69f0d..31e7b25eb66b18 100644
--- a/docs/setup/docker.asciidoc
+++ b/docs/setup/docker.asciidoc
@@ -39,11 +39,13 @@ docker pull {docker-repo}:{version}
=== Run Kibana on Docker for development
Kibana can be quickly started and connected to a local Elasticsearch container for development
or testing use with the following command:
---------------------------------------------
+
+[source,sh,subs="attributes"]
+----
docker run --link YOUR_ELASTICSEARCH_CONTAINER_NAME_OR_ID:elasticsearch -p 5601:5601 {docker-repo}:{version}
---------------------------------------------
-endif::[]
+----
+endif::[]
[float]
[[configuring-kibana-docker]]
=== Configure Kibana on Docker
diff --git a/docs/setup/settings.asciidoc b/docs/setup/settings.asciidoc
index e5cbc2c7ea6db7..73b268e1e48b36 100644
--- a/docs/setup/settings.asciidoc
+++ b/docs/setup/settings.asciidoc
@@ -25,12 +25,14 @@ which may cause a delay before pages start being served.
Set to `false` to disable Console. *Default: `true`*
| `cpu.cgroup.path.override:`
- | *deprecated* This setting has been renamed to <>
-and the old name will no longer be supported as of 8.0.
+ | deprecated:[7.10.0,"This setting will no longer be supported as of 8.0."]
+ This setting has been renamed to
+ <>.
| `cpuacct.cgroup.path.override:`
- | *deprecated* This setting has been renamed to <>
-and the old name will no longer be supported as of 8.0.
+ | deprecated:[7.10.0,"This setting will no longer be supported as of 8.0."]
+ This setting has been renamed to
+ <>.
| `csp.rules:`
| A https://w3c.github.io/webappsec-csp/[content-security-policy] template
@@ -64,10 +66,12 @@ To enable SSL/TLS for outbound connections to {es}, use the `https` protocol
in this setting.
| `elasticsearch.logQueries:`
- | *deprecated* This setting is no longer used and will get removed in Kibana 8.0. Instead, configure the `elasticsearch.query` logger.
-This is useful for seeing the query DSL generated by applications that
-currently do not have an inspector, for example Timelion and Monitoring.
-*Default: `false`*
+ | deprecated:[7.12.0,"This setting is no longer used and will be removed in Kibana 8.0."]
+ Instead, configure the `elasticsearch.query` logger.
+ +
+ This is useful for seeing the query DSL generated by applications that
+ currently do not have an inspector, for example Timelion and Monitoring.
+ *Default: `false`*
The following example shows a valid `elasticsearch.query` logger configuration:
|===
@@ -240,18 +244,22 @@ on the {kib} index at startup. {kib} users still need to authenticate with
| Enables use of interpreter in Visualize. *Default: `true`*
| `kibana.defaultAppId:`
- | *deprecated* This setting is deprecated and will get removed in Kibana 8.0.
-Please use the `defaultRoute` advanced setting instead.
-The default application to load. *Default: `"home"`*
+ | deprecated:[7.9.0,This setting will be removed in Kibana 8.0.]
+ Instead, use the <>.
+ +
+ The default application to load. *Default: `"home"`*
|[[kibana-index]] `kibana.index:`
- | *deprecated* This setting is deprecated and will be removed in 8.0. Multitenancy by changing
-`kibana.index` will not be supported starting in 8.0. See https://ela.st/kbn-remove-legacy-multitenancy[8.0 Breaking Changes]
-for more details. {kib} uses an index in {es} to store saved searches, visualizations, and
-dashboards. {kib} creates a new index if the index doesn’t already exist.
-If you configure a custom index, the name must be lowercase, and conform to the
-{es} {ref}/indices-create-index.html[index name limitations].
-*Default: `".kibana"`*
+ | deprecated:[7.11.0,This setting will be removed in 8.0.] Multitenancy by
+ changing `kibana.index` will not be supported starting in 8.0. See
+ https://ela.st/kbn-remove-legacy-multitenancy[8.0 Breaking Changes] for more
+ details.
+ +
+ {kib} uses an index in {es} to store saved searches, visualizations, and
+ dashboards. {kib} creates a new index if the index doesn’t already exist. If
+ you configure a custom index, the name must be lowercase, and conform to the
+ {es} {ref}/indices-create-index.html[index name limitations].
+ *Default: `".kibana"`*
| `kibana.autocompleteTimeout:` {ess-icon}
| Time in milliseconds to wait for autocomplete suggestions from {es}.
diff --git a/docs/user/dashboard/timelion.asciidoc b/docs/user/dashboard/timelion.asciidoc
index 676c46368a6ee4..80ce77f30c75e5 100644
--- a/docs/user/dashboard/timelion.asciidoc
+++ b/docs/user/dashboard/timelion.asciidoc
@@ -4,17 +4,7 @@
Instead of using a visual editor to create charts, you define a graph by chaining functions together, using the *Timelion*-specific syntax.
The syntax enables some features that classical point series charts don't offer, such as pulling data from different indices or data sources into one graph.
-[NOTE]
-====
-Timelion app deprecation
-
-*Timelion* is still supported, the *Timelion app* is deprecated in 7.0, replaced by
-dashboard features. In 8.0 and later, the *Timelion app* is removed from {kib}.
-To prepare for the removal of *Timelion app*, you must migrate *Timelion app* worksheets to a dashboard.
-
-For information on how to migrate *Timelion app* worksheets, refer to the
-link:https://www.elastic.co/guide/en/kibana/7.10/release-notes-7.10.0.html#deprecation-v7.10.0[7.10.0 Release Notes].
-====
+deprecated::[7.0.0,"*Timelion* is still supported. The *Timelion app* is deprecated in 7.0, replaced by dashboard features. In 8.0 and later, the *Timelion app* is removed from {kib}. To prepare for the removal of *Timelion app*, you must migrate *Timelion app* worksheets to a dashboard. For information on how to migrate *Timelion app* worksheets, refer to the link:https://www.elastic.co/guide/en/kibana/7.10/release-notes-7.10.0.html#deprecation-v7.10.0[7.10.0 Release Notes]."]
[float]
==== Timelion expressions
diff --git a/jest.config.js b/jest.config.js
index 03dc832ba170c9..bd1e865a7e64a5 100644
--- a/jest.config.js
+++ b/jest.config.js
@@ -12,7 +12,6 @@ module.exports = {
projects: [
'/packages/*/jest.config.js',
'/src/*/jest.config.js',
- '/src/legacy/*/jest.config.js',
'/src/plugins/*/jest.config.js',
'/test/*/jest.config.js',
'/x-pack/plugins/*/jest.config.js',
diff --git a/kibana.d.ts b/kibana.d.ts
index a2c670c96a699e..8a7a531890057f 100644
--- a/kibana.d.ts
+++ b/kibana.d.ts
@@ -13,18 +13,3 @@ import * as Public from 'src/core/public';
import * as Server from 'src/core/server';
export { Public, Server };
-
-/**
- * All exports from TS ambient definitions (where types are added for JS source in a .d.ts file).
- */
-import * as LegacyKibanaServer from './src/legacy/server/kbn_server';
-
-/**
- * Re-export legacy types under a namespace.
- */
-export namespace Legacy {
- export type KibanaConfig = LegacyKibanaServer.KibanaConfig;
- export type Request = LegacyKibanaServer.Request;
- export type ResponseToolkit = LegacyKibanaServer.ResponseToolkit;
- export type Server = LegacyKibanaServer.Server;
-}
diff --git a/package.json b/package.json
index e379123269847f..34e044140d297a 100644
--- a/package.json
+++ b/package.json
@@ -97,8 +97,8 @@
"dependencies": {
"@elastic/apm-rum": "^5.6.1",
"@elastic/apm-rum-react": "^1.2.5",
- "@elastic/charts": "26.0.0",
- "@elastic/datemath": "link:packages/elastic-datemath",
+ "@elastic/charts": "27.0.0",
+ "@elastic/datemath": "link:bazel-bin/packages/elastic-datemath/npm_module",
"@elastic/elasticsearch": "npm:@elastic/elasticsearch-canary@^8.0.0-canary.4",
"@elastic/ems-client": "7.12.0",
"@elastic/eui": "31.10.0",
@@ -441,6 +441,7 @@
"@babel/traverse": "^7.12.12",
"@babel/types": "^7.12.12",
"@bazel/ibazel": "^0.14.0",
+ "@bazel/typescript": "^3.2.3",
"@cypress/snapshot": "^2.1.7",
"@cypress/webpack-preprocessor": "^5.5.0",
"@elastic/apm-rum": "^5.6.1",
diff --git a/packages/BUILD.bazel b/packages/BUILD.bazel
index 1f1eba0747ab7f..31894fcb1bb5db 100644
--- a/packages/BUILD.bazel
+++ b/packages/BUILD.bazel
@@ -2,5 +2,7 @@
# targets so we can build them all at once
filegroup(
name = "build",
- srcs = [],
+ srcs = [
+ "//packages/elastic-datemath:build"
+ ],
)
diff --git a/packages/elastic-datemath/.npmignore b/packages/elastic-datemath/.npmignore
index 591be7afd16696..cb8c40d17ea043 100644
--- a/packages/elastic-datemath/.npmignore
+++ b/packages/elastic-datemath/.npmignore
@@ -1,2 +1,3 @@
+/index.test.js
+/jest.config.js
/tsconfig.json
-/__tests__
diff --git a/packages/elastic-datemath/BUILD.bazel b/packages/elastic-datemath/BUILD.bazel
new file mode 100644
index 00000000000000..6a80556d4eed51
--- /dev/null
+++ b/packages/elastic-datemath/BUILD.bazel
@@ -0,0 +1,76 @@
+load("@npm//@bazel/typescript:index.bzl", "ts_config", "ts_project")
+load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
+
+PKG_BASE_NAME = "elastic-datemath"
+PKG_REQUIRE_NAME = "@elastic/datemath"
+
+SOURCE_FILES = [
+ "src/index.ts",
+]
+
+SRCS = SOURCE_FILES
+
+filegroup(
+ name = "srcs",
+ srcs = glob(SOURCE_FILES),
+)
+
+NPM_MODULE_EXTRA_FILES = [
+ "package.json",
+ "README.md",
+]
+
+SRC_DEPS = [
+ "@npm//moment",
+]
+
+TYPES_DEPS = [
+ "@npm//@types/node",
+]
+
+DEPS = SRC_DEPS + TYPES_DEPS
+
+ts_config(
+ name = "tsconfig",
+ src = "tsconfig.json",
+ deps = [
+ "//:tsconfig.base.json",
+ ],
+)
+
+ts_project(
+ name = "tsc",
+ srcs = SRCS,
+ deps = DEPS,
+ declaration = True,
+ declaration_map = True,
+ incremental = True,
+ out_dir = "target",
+ source_map = True,
+ root_dir = "src",
+ tsconfig = ":tsconfig",
+)
+
+js_library(
+ name = PKG_BASE_NAME,
+ srcs = [],
+ deps = [":tsc"] + DEPS,
+ package_name = PKG_REQUIRE_NAME,
+ visibility = ["//visibility:public"],
+)
+
+pkg_npm(
+ name = "npm_module",
+ srcs = NPM_MODULE_EXTRA_FILES,
+ deps = [
+ ":%s" % PKG_BASE_NAME,
+ ]
+)
+
+filegroup(
+ name = "build",
+ srcs = [
+ ":npm_module",
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/packages/elastic-datemath/package.json b/packages/elastic-datemath/package.json
index 4dc9c4f24d5678..67fbb74eb223cb 100644
--- a/packages/elastic-datemath/package.json
+++ b/packages/elastic-datemath/package.json
@@ -5,8 +5,7 @@
"license": "Apache-2.0",
"main": "./target/index.js",
"types": "./target/index.d.ts",
- "scripts": {
- "build": "../../node_modules/.bin/tsc",
- "kbn:bootstrap": "yarn build"
+ "peerDependencies": {
+ "moment": "^2.24.0"
}
}
\ No newline at end of file
diff --git a/packages/elastic-datemath/tsconfig.json b/packages/elastic-datemath/tsconfig.json
index 6f04bee983a9e5..d0fa806ed411b4 100644
--- a/packages/elastic-datemath/tsconfig.json
+++ b/packages/elastic-datemath/tsconfig.json
@@ -1,10 +1,10 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
- "incremental": false,
- "outDir": "./target",
"declaration": true,
"declarationMap": true,
+ "outDir": "target",
+ "rootDir": "src",
"sourceMap": true,
"sourceRoot": "../../../../packages/elastic-datemath/src",
"types": [
diff --git a/packages/kbn-cli-dev-mode/src/dev_server.ts b/packages/kbn-cli-dev-mode/src/dev_server.ts
index 3daf298c823249..60a279e456e3df 100644
--- a/packages/kbn-cli-dev-mode/src/dev_server.ts
+++ b/packages/kbn-cli-dev-mode/src/dev_server.ts
@@ -249,5 +249,11 @@ export class DevServer {
)
.subscribe(subscriber)
);
+
+ // complete state subjects when run$ completes
+ subscriber.add(() => {
+ this.phase$.complete();
+ this.ready$.complete();
+ });
});
}
diff --git a/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts b/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts
index ab113b96a5f039..ff25f2a7bf55e6 100644
--- a/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts
+++ b/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts
@@ -27,8 +27,6 @@ it('produces the right watch and ignore list', () => {
expect(watchPaths).toMatchInlineSnapshot(`
Array [
/src/core,
- /src/legacy/server,
- /src/legacy/utils,
/config,
/x-pack/test/plugin_functional/plugins/resolver_test,
/src/plugins,
diff --git a/packages/kbn-cli-dev-mode/src/optimizer.test.ts b/packages/kbn-cli-dev-mode/src/optimizer.test.ts
index e3bfb2eb0bb9e9..ee8ea5f38ae84f 100644
--- a/packages/kbn-cli-dev-mode/src/optimizer.test.ts
+++ b/packages/kbn-cli-dev-mode/src/optimizer.test.ts
@@ -180,6 +180,7 @@ it('is ready when optimizer phase is success or issue and logs in familiar forma
"ready: false",
"",
"ready: true",
+ "complete",
]
`);
diff --git a/packages/kbn-cli-dev-mode/src/optimizer.ts b/packages/kbn-cli-dev-mode/src/optimizer.ts
index 750b61140e920a..fab566829f7a6a 100644
--- a/packages/kbn-cli-dev-mode/src/optimizer.ts
+++ b/packages/kbn-cli-dev-mode/src/optimizer.ts
@@ -107,14 +107,26 @@ export class Optimizer {
},
]);
- this.run$ = runOptimizer(config).pipe(
- logOptimizerState(log, config),
- tap(({ state }) => {
- this.phase$.next(state.phase);
- this.ready$.next(state.phase === 'success' || state.phase === 'issue');
- }),
- ignoreElements()
- );
+ this.run$ = new Rx.Observable((subscriber) => {
+ subscriber.add(
+ runOptimizer(config)
+ .pipe(
+ logOptimizerState(log, config),
+ tap(({ state }) => {
+ this.phase$.next(state.phase);
+ this.ready$.next(state.phase === 'success' || state.phase === 'issue');
+ }),
+ ignoreElements()
+ )
+ .subscribe(subscriber)
+ );
+
+ // complete state subjects when run$ completes
+ subscriber.add(() => {
+ this.phase$.complete();
+ this.ready$.complete();
+ });
+ });
}
getPhase$() {
diff --git a/packages/kbn-cli-dev-mode/src/watcher.ts b/packages/kbn-cli-dev-mode/src/watcher.ts
index 8e8d2db1b20bb2..17993326cfcf3a 100644
--- a/packages/kbn-cli-dev-mode/src/watcher.ts
+++ b/packages/kbn-cli-dev-mode/src/watcher.ts
@@ -103,6 +103,11 @@ export class Watcher {
.pipe(ignoreElements())
.subscribe(subscriber)
);
+
+ // complete state subjects when run$ completes
+ subscriber.add(() => {
+ this.restart$.complete();
+ });
});
serverShouldRestart$() {
diff --git a/packages/kbn-config/src/legacy/__snapshots__/legacy_object_to_config_adapter.test.ts.snap b/packages/kbn-config/src/legacy/__snapshots__/legacy_object_to_config_adapter.test.ts.snap
index 2801e0a0688cc6..17ac75e9f3d9e4 100644
--- a/packages/kbn-config/src/legacy/__snapshots__/legacy_object_to_config_adapter.test.ts.snap
+++ b/packages/kbn-config/src/legacy/__snapshots__/legacy_object_to_config_adapter.test.ts.snap
@@ -1,69 +1,5 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`#get correctly handles server config.: default 1`] = `
-Object {
- "autoListen": true,
- "basePath": "/abc",
- "compression": Object {
- "enabled": true,
- },
- "cors": false,
- "customResponseHeaders": Object {
- "custom-header": "custom-value",
- },
- "host": "host",
- "keepaliveTimeout": 5000,
- "maxPayload": 1000,
- "name": "kibana-hostname",
- "port": 1234,
- "publicBaseUrl": "https://myhost.com/abc",
- "rewriteBasePath": false,
- "socketTimeout": 2000,
- "ssl": Object {
- "enabled": true,
- "keyPassphrase": "some-phrase",
- "someNewValue": "new",
- },
- "uuid": undefined,
- "xsrf": Object {
- "allowlist": Array [],
- "disableProtection": false,
- },
-}
-`;
-
-exports[`#get correctly handles server config.: disabled ssl 1`] = `
-Object {
- "autoListen": true,
- "basePath": "/abc",
- "compression": Object {
- "enabled": true,
- },
- "cors": false,
- "customResponseHeaders": Object {
- "custom-header": "custom-value",
- },
- "host": "host",
- "keepaliveTimeout": 5000,
- "maxPayload": 1000,
- "name": "kibana-hostname",
- "port": 1234,
- "publicBaseUrl": "http://myhost.com/abc",
- "rewriteBasePath": false,
- "socketTimeout": 2000,
- "ssl": Object {
- "certificate": "cert",
- "enabled": false,
- "key": "key",
- },
- "uuid": undefined,
- "xsrf": Object {
- "allowlist": Array [],
- "disableProtection": false,
- },
-}
-`;
-
exports[`#get correctly handles silent logging config. 1`] = `
Object {
"appenders": Object {
@@ -78,6 +14,7 @@ Object {
"root": Object {
"level": "off",
},
+ "silent": true,
}
`;
@@ -93,10 +30,13 @@ Object {
"type": "legacy-appender",
},
},
+ "dest": "/some/path.log",
+ "json": true,
"loggers": undefined,
"root": Object {
"level": "all",
},
+ "verbose": true,
}
`;
diff --git a/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.test.ts b/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.test.ts
index 5dd1941545708d..47151503e16349 100644
--- a/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.test.ts
+++ b/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.test.ts
@@ -65,59 +65,6 @@ describe('#get', () => {
expect(configAdapter.get('logging')).toMatchSnapshot();
});
-
- test('correctly handles server config.', () => {
- const configAdapter = new LegacyObjectToConfigAdapter({
- server: {
- name: 'kibana-hostname',
- autoListen: true,
- basePath: '/abc',
- cors: false,
- customResponseHeaders: { 'custom-header': 'custom-value' },
- host: 'host',
- maxPayloadBytes: 1000,
- keepaliveTimeout: 5000,
- socketTimeout: 2000,
- port: 1234,
- publicBaseUrl: 'https://myhost.com/abc',
- rewriteBasePath: false,
- ssl: { enabled: true, keyPassphrase: 'some-phrase', someNewValue: 'new' },
- compression: { enabled: true },
- someNotSupportedValue: 'val',
- xsrf: {
- disableProtection: false,
- allowlist: [],
- },
- },
- });
-
- const configAdapterWithDisabledSSL = new LegacyObjectToConfigAdapter({
- server: {
- name: 'kibana-hostname',
- autoListen: true,
- basePath: '/abc',
- cors: false,
- customResponseHeaders: { 'custom-header': 'custom-value' },
- host: 'host',
- maxPayloadBytes: 1000,
- keepaliveTimeout: 5000,
- socketTimeout: 2000,
- port: 1234,
- publicBaseUrl: 'http://myhost.com/abc',
- rewriteBasePath: false,
- ssl: { enabled: false, certificate: 'cert', key: 'key' },
- compression: { enabled: true },
- someNotSupportedValue: 'val',
- xsrf: {
- disableProtection: false,
- allowlist: [],
- },
- },
- });
-
- expect(configAdapter.get('server')).toMatchSnapshot('default');
- expect(configAdapterWithDisabledSSL.get('server')).toMatchSnapshot('disabled ssl');
- });
});
describe('#set', () => {
diff --git a/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.ts b/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.ts
index 8ec26ff1f8e71c..bc6fd49e2498a0 100644
--- a/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.ts
+++ b/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.ts
@@ -9,15 +9,6 @@
import { ConfigPath } from '../config';
import { ObjectToConfigAdapter } from '../object_to_config_adapter';
-// TODO: fix once core schemas are moved to this package
-type LoggingConfigType = any;
-
-/**
- * @internal
- * @deprecated
- */
-export type LegacyVars = Record;
-
/**
* Represents logging config supported by the legacy platform.
*/
@@ -30,7 +21,7 @@ export interface LegacyLoggingConfig {
events?: Record;
}
-type MixedLoggingConfig = LegacyLoggingConfig & Partial;
+type MixedLoggingConfig = LegacyLoggingConfig & Record;
/**
* Represents adapter between config provided by legacy platform and `Config`
@@ -48,6 +39,7 @@ export class LegacyObjectToConfigAdapter extends ObjectToConfigAdapter {
},
root: { level: 'info', ...root },
loggers,
+ ...legacyLoggingConfig,
};
if (configValue.silent) {
@@ -61,47 +53,11 @@ export class LegacyObjectToConfigAdapter extends ObjectToConfigAdapter {
return loggingConfig;
}
- private static transformServer(configValue: any = {}) {
- // TODO: New platform uses just a subset of `server` config from the legacy platform,
- // new values will be exposed once we need them
- return {
- autoListen: configValue.autoListen,
- basePath: configValue.basePath,
- cors: configValue.cors,
- customResponseHeaders: configValue.customResponseHeaders,
- host: configValue.host,
- maxPayload: configValue.maxPayloadBytes,
- name: configValue.name,
- port: configValue.port,
- publicBaseUrl: configValue.publicBaseUrl,
- rewriteBasePath: configValue.rewriteBasePath,
- ssl: configValue.ssl,
- keepaliveTimeout: configValue.keepaliveTimeout,
- socketTimeout: configValue.socketTimeout,
- compression: configValue.compression,
- uuid: configValue.uuid,
- xsrf: configValue.xsrf,
- };
- }
-
- private static transformPlugins(configValue: LegacyVars = {}) {
- // These properties are the only ones we use from the existing `plugins` config node
- // since `scanDirs` isn't respected by new platform plugin discovery.
- return {
- initialize: configValue.initialize,
- paths: configValue.paths,
- };
- }
-
public get(configPath: ConfigPath) {
const configValue = super.get(configPath);
switch (configPath) {
case 'logging':
return LegacyObjectToConfigAdapter.transformLogging(configValue as LegacyLoggingConfig);
- case 'server':
- return LegacyObjectToConfigAdapter.transformServer(configValue);
- case 'plugins':
- return LegacyObjectToConfigAdapter.transformPlugins(configValue as LegacyVars);
default:
return configValue;
}
diff --git a/packages/kbn-legacy-logging/package.json b/packages/kbn-legacy-logging/package.json
index 9450fd39607ea9..96edeccad6658a 100644
--- a/packages/kbn-legacy-logging/package.json
+++ b/packages/kbn-legacy-logging/package.json
@@ -11,6 +11,7 @@
"kbn:watch": "yarn build --watch"
},
"dependencies": {
- "@kbn/utils": "link:../kbn-utils"
+ "@kbn/utils": "link:../kbn-utils",
+ "@kbn/config-schema": "link:../kbn-config-schema"
}
}
diff --git a/packages/kbn-legacy-logging/src/legacy_logging_server.ts b/packages/kbn-legacy-logging/src/legacy_logging_server.ts
index e1edd06a4b4a26..3ece0f6f1ee478 100644
--- a/packages/kbn-legacy-logging/src/legacy_logging_server.ts
+++ b/packages/kbn-legacy-logging/src/legacy_logging_server.ts
@@ -88,7 +88,7 @@ export class LegacyLoggingServer {
// We set `ops.interval` to max allowed number and `ops` filter to value
// that doesn't exist to avoid logging of ops at all, if turned on it will be
// logged by the "legacy" Kibana.
- const { value: loggingConfig } = legacyLoggingConfigSchema.validate({
+ const loggingConfig = legacyLoggingConfigSchema.validate({
...legacyLoggingConfig,
events: {
...legacyLoggingConfig.events,
diff --git a/packages/kbn-legacy-logging/src/schema.ts b/packages/kbn-legacy-logging/src/schema.ts
index 76d7381ee87284..0330708e746c07 100644
--- a/packages/kbn-legacy-logging/src/schema.ts
+++ b/packages/kbn-legacy-logging/src/schema.ts
@@ -6,11 +6,8 @@
* Side Public License, v 1.
*/
-import Joi from 'joi';
+import { schema } from '@kbn/config-schema';
-const HANDLED_IN_KIBANA_PLATFORM = Joi.any().description(
- 'This key is handled in the new platform ONLY'
-);
/**
* @deprecated
*
@@ -36,46 +33,65 @@ export interface LegacyLoggingConfig {
};
}
-export const legacyLoggingConfigSchema = Joi.object()
- .keys({
- appenders: HANDLED_IN_KIBANA_PLATFORM,
- loggers: HANDLED_IN_KIBANA_PLATFORM,
- root: HANDLED_IN_KIBANA_PLATFORM,
-
- silent: Joi.boolean().default(false),
- quiet: Joi.boolean().when('silent', {
- is: true,
- then: Joi.boolean().default(true).valid(true),
- otherwise: Joi.boolean().default(false),
+export const legacyLoggingConfigSchema = schema.object({
+ silent: schema.boolean({ defaultValue: false }),
+ quiet: schema.conditional(
+ schema.siblingRef('silent'),
+ true,
+ schema.boolean({
+ defaultValue: true,
+ validate: (quiet) => {
+ if (!quiet) {
+ return 'must be true when `silent` is true';
+ }
+ },
+ }),
+ schema.boolean({ defaultValue: false })
+ ),
+ verbose: schema.conditional(
+ schema.siblingRef('quiet'),
+ true,
+ schema.boolean({
+ defaultValue: false,
+ validate: (verbose) => {
+ if (verbose) {
+ return 'must be false when `quiet` is true';
+ }
+ },
+ }),
+ schema.boolean({ defaultValue: false })
+ ),
+ events: schema.recordOf(schema.string(), schema.any(), { defaultValue: {} }),
+ dest: schema.string({ defaultValue: 'stdout' }),
+ filter: schema.recordOf(schema.string(), schema.any(), { defaultValue: {} }),
+ json: schema.conditional(
+ schema.siblingRef('dest'),
+ 'stdout',
+ schema.boolean({
+ defaultValue: !process.stdout.isTTY,
+ }),
+ schema.boolean({
+ defaultValue: true,
+ })
+ ),
+ timezone: schema.maybe(schema.string()),
+ rotate: schema.object({
+ enabled: schema.boolean({ defaultValue: false }),
+ everyBytes: schema.number({
+ min: 1048576, // > 1MB
+ max: 1073741825, // < 1GB
+ defaultValue: 10485760, // 10MB
}),
- verbose: Joi.boolean().when('quiet', {
- is: true,
- then: Joi.valid(false).default(false),
- otherwise: Joi.boolean().default(false),
+ keepFiles: schema.number({
+ min: 2,
+ max: 1024,
+ defaultValue: 7,
}),
- events: Joi.any().default({}),
- dest: Joi.string().default('stdout'),
- filter: Joi.any().default({}),
- json: Joi.boolean().when('dest', {
- is: 'stdout',
- then: Joi.boolean().default(!process.stdout.isTTY),
- otherwise: Joi.boolean().default(true),
+ pollingInterval: schema.number({
+ min: 5000,
+ max: 3600000,
+ defaultValue: 10000,
}),
- timezone: Joi.string(),
- rotate: Joi.object()
- .keys({
- enabled: Joi.boolean().default(false),
- everyBytes: Joi.number()
- // > 1MB
- .greater(1048576)
- // < 1GB
- .less(1073741825)
- // 10MB
- .default(10485760),
- keepFiles: Joi.number().greater(2).less(1024).default(7),
- pollingInterval: Joi.number().greater(5000).less(3600000).default(10000),
- usePolling: Joi.boolean().default(false),
- })
- .default(),
- })
- .default();
+ usePolling: schema.boolean({ defaultValue: false }),
+ }),
+});
diff --git a/packages/kbn-optimizer/limits.yml b/packages/kbn-optimizer/limits.yml
index f93849e011d41c..a027768ad66a0a 100644
--- a/packages/kbn-optimizer/limits.yml
+++ b/packages/kbn-optimizer/limits.yml
@@ -9,7 +9,7 @@ pageLoadAssetSize:
charts: 195358
cloud: 21076
console: 46091
- core: 692106
+ core: 397521
crossClusterReplication: 65408
dashboard: 374194
dashboardEnhanced: 65646
@@ -24,13 +24,13 @@ pageLoadAssetSize:
enterpriseSearch: 35741
esUiShared: 326654
expressions: 224136
- features: 31211
- globalSearch: 43548
- globalSearchBar: 62888
+ features: 21723
+ globalSearch: 29696
+ globalSearchBar: 50403
globalSearchProviders: 25554
graph: 31504
grokdebugger: 26779
- home: 41661
+ home: 30182
indexLifecycleManagement: 107090
indexManagement: 140608
indexPatternManagement: 28222
@@ -45,13 +45,12 @@ pageLoadAssetSize:
kibanaUtils: 198829
lens: 96624
licenseManagement: 41817
- licensing: 39008
+ licensing: 29004
lists: 202261
logstash: 53548
management: 46112
- maps: 183610
+ maps: 80000
mapsLegacy: 87859
- mapsLegacyLicensing: 20214
ml: 82187
monitoring: 80000
navigation: 37269
@@ -73,8 +72,8 @@ pageLoadAssetSize:
share: 99061
snapshotRestore: 79032
spaces: 387915
- telemetry: 91832
- telemetryManagementSection: 52443
+ telemetry: 51957
+ telemetryManagementSection: 38586
tileMap: 65337
timelion: 29920
transform: 41007
@@ -108,3 +107,4 @@ pageLoadAssetSize:
fileUpload: 25664
banners: 17946
mapsEms: 26072
+ timelines: 28613
diff --git a/packages/kbn-optimizer/src/cli.ts b/packages/kbn-optimizer/src/cli.ts
index 6e3106dbc2af79..d5b9996dfb2cd2 100644
--- a/packages/kbn-optimizer/src/cli.ts
+++ b/packages/kbn-optimizer/src/cli.ts
@@ -6,8 +6,6 @@
* Side Public License, v 1.
*/
-import 'source-map-support/register';
-
import Path from 'path';
import { REPO_ROOT } from '@kbn/utils';
diff --git a/packages/kbn-pm/dist/index.js b/packages/kbn-pm/dist/index.js
index bcb0b6da2a2f80..509ce89f8c02cb 100644
--- a/packages/kbn-pm/dist/index.js
+++ b/packages/kbn-pm/dist/index.js
@@ -209,7 +209,7 @@ async function run(argv) {
},
default: {
cache: true,
- 'force-install': true,
+ 'force-install': false,
offline: false,
validate: true
},
@@ -8910,8 +8910,11 @@ const BootstrapCommand = {
const nonBazelProjectsOnly = await Object(_utils_projects__WEBPACK_IMPORTED_MODULE_4__["getNonBazelProjectsOnly"])(projects);
const batchedNonBazelProjects = Object(_utils_projects__WEBPACK_IMPORTED_MODULE_4__["topologicallyBatchProjects"])(nonBazelProjectsOnly, projectGraph);
const kibanaProjectPath = ((_projects$get = projects.get('kibana')) === null || _projects$get === void 0 ? void 0 : _projects$get.path) || '';
- const runOffline = (options === null || options === void 0 ? void 0 : options.offline) === true;
- const forceInstall = !!options && options['force-install'] === true; // Ensure we have a `node_modules/.yarn-integrity` file as we depend on it
+ const runOffline = (options === null || options === void 0 ? void 0 : options.offline) === true; // Force install is set in case a flag is passed or
+ // if the `.yarn-integrity` file is not found which
+ // will be indicated by the return of yarnIntegrityFileExists.
+
+ const forceInstall = !!options && options['force-install'] === true || !(await Object(_utils_bazel__WEBPACK_IMPORTED_MODULE_9__["yarnIntegrityFileExists"])(Object(path__WEBPACK_IMPORTED_MODULE_0__["resolve"])(kibanaProjectPath, 'node_modules'))); // Ensure we have a `node_modules/.yarn-integrity` file as we depend on it
// for bazel to know it has to re-install the node_modules after a reset or a clean
await Object(_utils_bazel__WEBPACK_IMPORTED_MODULE_9__["ensureYarnIntegrityFileExists"])(Object(path__WEBPACK_IMPORTED_MODULE_0__["resolve"])(kibanaProjectPath, 'node_modules')); // Install bazel machinery tools if needed
@@ -8925,9 +8928,6 @@ const BootstrapCommand = {
// That way non bazel projects could depend on bazel projects but not the other way around
// That is only intended during the migration process while non Bazel projects are not removed at all.
//
- // Until we have our first package build within Bazel we will always need to directly call the yarn rule
- // otherwise yarn install won't trigger as we don't have any npm dependency within Bazel
- // TODO: Change CLI default in order to not force install as soon as we have our first Bazel package being built
if (forceInstall) {
await Object(_utils_bazel__WEBPACK_IMPORTED_MODULE_9__["runBazel"])(['run', '@nodejs//:yarn'], runOffline);
@@ -9105,6 +9105,7 @@ __webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isDirectory", function() { return isDirectory; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isFile", function() { return isFile; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "createSymlink", function() { return createSymlink; });
+/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "tryRealpath", function() { return tryRealpath; });
/* harmony import */ var cmd_shim__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(132);
/* harmony import */ var cmd_shim__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(cmd_shim__WEBPACK_IMPORTED_MODULE_0__);
/* harmony import */ var del__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(143);
@@ -9137,6 +9138,7 @@ const symlink = Object(util__WEBPACK_IMPORTED_MODULE_5__["promisify"])(fs__WEBPA
const chmod = Object(util__WEBPACK_IMPORTED_MODULE_5__["promisify"])(fs__WEBPACK_IMPORTED_MODULE_2___default.a.chmod);
const cmdShim = Object(util__WEBPACK_IMPORTED_MODULE_5__["promisify"])(cmd_shim__WEBPACK_IMPORTED_MODULE_0___default.a);
const mkdir = Object(util__WEBPACK_IMPORTED_MODULE_5__["promisify"])(fs__WEBPACK_IMPORTED_MODULE_2___default.a.mkdir);
+const realpathNative = Object(util__WEBPACK_IMPORTED_MODULE_5__["promisify"])(fs__WEBPACK_IMPORTED_MODULE_2___default.a.realpath.native);
const mkdirp = async path => await mkdir(path, {
recursive: true
});
@@ -9220,6 +9222,20 @@ async function forceCreate(src, dest, type) {
await symlink(src, dest, type);
}
+async function tryRealpath(path) {
+ let calculatedPath = path;
+
+ try {
+ calculatedPath = await realpathNative(path);
+ } catch (error) {
+ if (error.code !== 'ENOENT') {
+ throw error;
+ }
+ }
+
+ return calculatedPath;
+}
+
/***/ }),
/* 132 */
/***/ (function(module, exports, __webpack_require__) {
@@ -22981,11 +22997,11 @@ class Project {
ensureValidProjectDependency(project) {
const relativePathToProject = normalizePath(path__WEBPACK_IMPORTED_MODULE_1___default.a.relative(this.path, project.path));
- const relativePathToProjectIfBazelPkg = normalizePath(path__WEBPACK_IMPORTED_MODULE_1___default.a.relative(this.path, `bazel/bin/packages/${path__WEBPACK_IMPORTED_MODULE_1___default.a.basename(project.path)}`));
+ const relativePathToProjectIfBazelPkg = normalizePath(path__WEBPACK_IMPORTED_MODULE_1___default.a.relative(this.path, `${__dirname}/../../../bazel-bin/packages/${path__WEBPACK_IMPORTED_MODULE_1___default.a.basename(project.path)}/npm_module`));
const versionInPackageJson = this.allDependencies[project.name];
const expectedVersionInPackageJson = `link:${relativePathToProject}`;
const expectedVersionInPackageJsonIfBazelPkg = `link:${relativePathToProjectIfBazelPkg}`; // TODO: after introduce bazel to build all the packages and completely remove the support for kbn packages
- // do not allow child projects to hold dependencies
+ // do not allow child projects to hold dependencies, unless they are meant to be published externally
if (versionInPackageJson === expectedVersionInPackageJson || versionInPackageJson === expectedVersionInPackageJsonIfBazelPkg) {
return;
@@ -23143,7 +23159,7 @@ const createProductionPackageJson = pkgJson => _objectSpread(_objectSpread({}, p
dependencies: transformDependencies(pkgJson.dependencies)
});
const isLinkDependency = depVersion => depVersion.startsWith('link:');
-const isBazelPackageDependency = depVersion => depVersion.startsWith('link:bazel/bin/');
+const isBazelPackageDependency = depVersion => depVersion.startsWith('link:bazel-bin/');
/**
* Replaces `link:` dependencies with `file:` dependencies. When installing
* dependencies, these `file:` dependencies will be copied into `node_modules`
@@ -23153,7 +23169,7 @@ const isBazelPackageDependency = depVersion => depVersion.startsWith('link:bazel
* will then _copy_ the `file:` dependencies into `node_modules` instead of
* symlinking like we do in development.
*
- * Additionally it also taken care of replacing `link:bazel/bin/` with
+ * Additionally it also taken care of replacing `link:bazel-bin/` with
* `file:` so we can also support the copy of the Bazel packages dist already into
* build/packages to be copied into the node_modules
*/
@@ -23170,7 +23186,7 @@ function transformDependencies(dependencies = {}) {
}
if (isBazelPackageDependency(depVersion)) {
- newDeps[name] = depVersion.replace('link:bazel/bin/', 'file:');
+ newDeps[name] = depVersion.replace('link:bazel-bin/', 'file:').replace('/npm_module', '');
continue;
}
@@ -48065,8 +48081,10 @@ function addProjectToTree(tree, pathParts, project) {
"use strict";
__webpack_require__.r(__webpack_exports__);
-/* harmony import */ var _ensure_yarn_integrity_exists__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(373);
-/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "ensureYarnIntegrityFileExists", function() { return _ensure_yarn_integrity_exists__WEBPACK_IMPORTED_MODULE_0__["ensureYarnIntegrityFileExists"]; });
+/* harmony import */ var _yarn_integrity__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(373);
+/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "yarnIntegrityFileExists", function() { return _yarn_integrity__WEBPACK_IMPORTED_MODULE_0__["yarnIntegrityFileExists"]; });
+
+/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "ensureYarnIntegrityFileExists", function() { return _yarn_integrity__WEBPACK_IMPORTED_MODULE_0__["ensureYarnIntegrityFileExists"]; });
/* harmony import */ var _get_cache_folders__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(374);
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "getBazelDiskCacheFolder", function() { return _get_cache_folders__WEBPACK_IMPORTED_MODULE_1__["getBazelDiskCacheFolder"]; });
@@ -48099,6 +48117,7 @@ __webpack_require__.r(__webpack_exports__);
"use strict";
__webpack_require__.r(__webpack_exports__);
+/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "yarnIntegrityFileExists", function() { return yarnIntegrityFileExists; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "ensureYarnIntegrityFileExists", function() { return ensureYarnIntegrityFileExists; });
/* harmony import */ var path__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(4);
/* harmony import */ var path__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(path__WEBPACK_IMPORTED_MODULE_0__);
@@ -48112,9 +48131,27 @@ __webpack_require__.r(__webpack_exports__);
*/
+async function yarnIntegrityFileExists(nodeModulesPath) {
+ try {
+ const nodeModulesRealPath = await Object(_fs__WEBPACK_IMPORTED_MODULE_1__["tryRealpath"])(nodeModulesPath);
+ const yarnIntegrityFilePath = Object(path__WEBPACK_IMPORTED_MODULE_0__["join"])(nodeModulesRealPath, '.yarn-integrity'); // check if the file already exists
+
+ if (await Object(_fs__WEBPACK_IMPORTED_MODULE_1__["isFile"])(yarnIntegrityFilePath)) {
+ return true;
+ }
+ } catch {// no-op
+ }
+
+ return false;
+}
async function ensureYarnIntegrityFileExists(nodeModulesPath) {
try {
- await Object(_fs__WEBPACK_IMPORTED_MODULE_1__["writeFile"])(Object(path__WEBPACK_IMPORTED_MODULE_0__["join"])(nodeModulesPath, '.yarn-integrity'), '', {
+ const nodeModulesRealPath = await Object(_fs__WEBPACK_IMPORTED_MODULE_1__["tryRealpath"])(nodeModulesPath);
+ const yarnIntegrityFilePath = Object(path__WEBPACK_IMPORTED_MODULE_0__["join"])(nodeModulesRealPath, '.yarn-integrity'); // ensure node_modules folder is created
+
+ await Object(_fs__WEBPACK_IMPORTED_MODULE_1__["mkdirp"])(nodeModulesRealPath); // write a blank file in case it doesn't exists
+
+ await Object(_fs__WEBPACK_IMPORTED_MODULE_1__["writeFile"])(yarnIntegrityFilePath, '', {
flag: 'wx'
});
} catch {// no-op
@@ -63656,7 +63693,7 @@ async function buildBazelProductionProjects({
const projectNames = [...projects.values()].map(project => project.name);
_utils_log__WEBPACK_IMPORTED_MODULE_6__["log"].info(`Preparing Bazel projects production build for [${projectNames.join(', ')}]`);
await Object(_utils_bazel__WEBPACK_IMPORTED_MODULE_4__["runBazel"])(['build', '//packages:build']);
- _utils_log__WEBPACK_IMPORTED_MODULE_6__["log"].info(`All Bazel projects production builds for [${projectNames.join(', ')}] are complete}]`);
+ _utils_log__WEBPACK_IMPORTED_MODULE_6__["log"].info(`All Bazel projects production builds for [${projectNames.join(', ')}] are complete`);
for (const project of projects.values()) {
await copyToBuild(project, kibanaRoot, buildRoot);
@@ -63680,7 +63717,7 @@ async function copyToBuild(project, kibanaRoot, buildRoot) {
const relativeProjectPath = Object(path__WEBPACK_IMPORTED_MODULE_2__["relative"])(kibanaRoot, project.path);
const buildProjectPath = Object(path__WEBPACK_IMPORTED_MODULE_2__["resolve"])(buildRoot, relativeProjectPath);
await cpy__WEBPACK_IMPORTED_MODULE_0___default()(['**/*'], buildProjectPath, {
- cwd: Object(path__WEBPACK_IMPORTED_MODULE_2__["join"])(kibanaRoot, 'bazel', 'bin', 'packages', Object(path__WEBPACK_IMPORTED_MODULE_2__["basename"])(buildProjectPath), 'npm_module'),
+ cwd: Object(path__WEBPACK_IMPORTED_MODULE_2__["join"])(kibanaRoot, 'bazel-bin', 'packages', Object(path__WEBPACK_IMPORTED_MODULE_2__["basename"])(buildProjectPath), 'npm_module'),
dot: true,
onlyFiles: true,
parents: true
@@ -63702,12 +63739,12 @@ async function applyCorrectPermissions(project, kibanaRoot, buildRoot) {
const buildProjectPath = Object(path__WEBPACK_IMPORTED_MODULE_2__["resolve"])(buildRoot, relativeProjectPath);
const allPluginPaths = await globby__WEBPACK_IMPORTED_MODULE_1___default()([`**/*`], {
onlyFiles: false,
- cwd: Object(path__WEBPACK_IMPORTED_MODULE_2__["join"])(kibanaRoot, 'bazel', 'bin', 'packages', Object(path__WEBPACK_IMPORTED_MODULE_2__["basename"])(buildProjectPath), 'npm_module'),
+ cwd: buildProjectPath,
dot: true
});
for (const pluginPath of allPluginPaths) {
- const resolvedPluginPath = Object(path__WEBPACK_IMPORTED_MODULE_2__["resolve"])(buildRoot, pluginPath);
+ const resolvedPluginPath = Object(path__WEBPACK_IMPORTED_MODULE_2__["resolve"])(buildProjectPath, pluginPath);
if (await Object(_utils_fs__WEBPACK_IMPORTED_MODULE_5__["isFile"])(resolvedPluginPath)) {
await Object(_utils_fs__WEBPACK_IMPORTED_MODULE_5__["chmod"])(resolvedPluginPath, 0o644);
diff --git a/packages/kbn-pm/package.json b/packages/kbn-pm/package.json
index 0fa79fff6e0d95..050aadd402d8a5 100644
--- a/packages/kbn-pm/package.json
+++ b/packages/kbn-pm/package.json
@@ -9,7 +9,7 @@
},
"scripts": {
"build": "../../node_modules/.bin/webpack",
- "kbn:watch": "../../node_modules/.bin/webpack --watch --progress",
+ "kbn:watch": "../../node_modules/.bin/webpack --watch",
"prettier": "../../node_modules/.bin/prettier --write './src/**/*.ts'"
},
"devDependencies": {
diff --git a/packages/kbn-pm/src/cli.ts b/packages/kbn-pm/src/cli.ts
index 6d033b4121d992..f6ea4d7124ab27 100644
--- a/packages/kbn-pm/src/cli.ts
+++ b/packages/kbn-pm/src/cli.ts
@@ -75,7 +75,7 @@ export async function run(argv: string[]) {
},
default: {
cache: true,
- 'force-install': true,
+ 'force-install': false,
offline: false,
validate: true,
},
diff --git a/packages/kbn-pm/src/commands/bootstrap.ts b/packages/kbn-pm/src/commands/bootstrap.ts
index 4a6a43ff2d91f3..b383a52be63f50 100644
--- a/packages/kbn-pm/src/commands/bootstrap.ts
+++ b/packages/kbn-pm/src/commands/bootstrap.ts
@@ -17,7 +17,12 @@ import { getAllChecksums } from '../utils/project_checksums';
import { BootstrapCacheFile } from '../utils/bootstrap_cache_file';
import { readYarnLock } from '../utils/yarn_lock';
import { validateDependencies } from '../utils/validate_dependencies';
-import { ensureYarnIntegrityFileExists, installBazelTools, runBazel } from '../utils/bazel';
+import {
+ ensureYarnIntegrityFileExists,
+ installBazelTools,
+ runBazel,
+ yarnIntegrityFileExists,
+} from '../utils/bazel';
export const BootstrapCommand: ICommand = {
description: 'Install dependencies and crosslink projects',
@@ -33,7 +38,13 @@ export const BootstrapCommand: ICommand = {
const batchedNonBazelProjects = topologicallyBatchProjects(nonBazelProjectsOnly, projectGraph);
const kibanaProjectPath = projects.get('kibana')?.path || '';
const runOffline = options?.offline === true;
- const forceInstall = !!options && options['force-install'] === true;
+
+ // Force install is set in case a flag is passed or
+ // if the `.yarn-integrity` file is not found which
+ // will be indicated by the return of yarnIntegrityFileExists.
+ const forceInstall =
+ (!!options && options['force-install'] === true) ||
+ !(await yarnIntegrityFileExists(resolve(kibanaProjectPath, 'node_modules')));
// Ensure we have a `node_modules/.yarn-integrity` file as we depend on it
// for bazel to know it has to re-install the node_modules after a reset or a clean
@@ -51,9 +62,6 @@ export const BootstrapCommand: ICommand = {
// That way non bazel projects could depend on bazel projects but not the other way around
// That is only intended during the migration process while non Bazel projects are not removed at all.
//
- // Until we have our first package build within Bazel we will always need to directly call the yarn rule
- // otherwise yarn install won't trigger as we don't have any npm dependency within Bazel
- // TODO: Change CLI default in order to not force install as soon as we have our first Bazel package being built
if (forceInstall) {
await runBazel(['run', '@nodejs//:yarn'], runOffline);
}
diff --git a/packages/kbn-pm/src/production/build_bazel_production_projects.ts b/packages/kbn-pm/src/production/build_bazel_production_projects.ts
index 313622d44276a4..07c0b651f5ad13 100644
--- a/packages/kbn-pm/src/production/build_bazel_production_projects.ts
+++ b/packages/kbn-pm/src/production/build_bazel_production_projects.ts
@@ -37,7 +37,7 @@ export async function buildBazelProductionProjects({
log.info(`Preparing Bazel projects production build for [${projectNames.join(', ')}]`);
await runBazel(['build', '//packages:build']);
- log.info(`All Bazel projects production builds for [${projectNames.join(', ')}] are complete}]`);
+ log.info(`All Bazel projects production builds for [${projectNames.join(', ')}] are complete`);
for (const project of projects.values()) {
await copyToBuild(project, kibanaRoot, buildRoot);
@@ -62,7 +62,7 @@ async function copyToBuild(project: Project, kibanaRoot: string, buildRoot: stri
const buildProjectPath = resolve(buildRoot, relativeProjectPath);
await copy(['**/*'], buildProjectPath, {
- cwd: join(kibanaRoot, 'bazel', 'bin', 'packages', basename(buildProjectPath), 'npm_module'),
+ cwd: join(kibanaRoot, 'bazel-bin', 'packages', basename(buildProjectPath), 'npm_module'),
dot: true,
onlyFiles: true,
parents: true,
@@ -88,12 +88,12 @@ async function applyCorrectPermissions(project: Project, kibanaRoot: string, bui
const buildProjectPath = resolve(buildRoot, relativeProjectPath);
const allPluginPaths = await globby([`**/*`], {
onlyFiles: false,
- cwd: join(kibanaRoot, 'bazel', 'bin', 'packages', basename(buildProjectPath), 'npm_module'),
+ cwd: buildProjectPath,
dot: true,
});
for (const pluginPath of allPluginPaths) {
- const resolvedPluginPath = resolve(buildRoot, pluginPath);
+ const resolvedPluginPath = resolve(buildProjectPath, pluginPath);
if (await isFile(resolvedPluginPath)) {
await chmod(resolvedPluginPath, 0o644);
}
diff --git a/packages/kbn-pm/src/utils/__snapshots__/link_project_executables.test.ts.snap b/packages/kbn-pm/src/utils/__snapshots__/link_project_executables.test.ts.snap
index c037c2a4976b43..8aeae04c265cf5 100644
--- a/packages/kbn-pm/src/utils/__snapshots__/link_project_executables.test.ts.snap
+++ b/packages/kbn-pm/src/utils/__snapshots__/link_project_executables.test.ts.snap
@@ -11,6 +11,7 @@ Object {
"mkdirp": Array [],
"readFile": Array [],
"rmdirp": Array [],
+ "tryRealpath": Array [],
"unlink": Array [],
"writeFile": Array [],
}
@@ -27,6 +28,7 @@ Object {
"mkdirp": Array [],
"readFile": Array [],
"rmdirp": Array [],
+ "tryRealpath": Array [],
"unlink": Array [],
"writeFile": Array [],
}
diff --git a/packages/kbn-pm/src/utils/bazel/ensure_yarn_integrity_exists.ts b/packages/kbn-pm/src/utils/bazel/ensure_yarn_integrity_exists.ts
deleted file mode 100644
index 90786bc0ea55e8..00000000000000
--- a/packages/kbn-pm/src/utils/bazel/ensure_yarn_integrity_exists.ts
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { join } from 'path';
-import { writeFile } from '../fs';
-
-export async function ensureYarnIntegrityFileExists(nodeModulesPath: string) {
- try {
- await writeFile(join(nodeModulesPath, '.yarn-integrity'), '', { flag: 'wx' });
- } catch {
- // no-op
- }
-}
diff --git a/packages/kbn-pm/src/utils/bazel/index.ts b/packages/kbn-pm/src/utils/bazel/index.ts
index 0b755ba2446a04..a3651039161b86 100644
--- a/packages/kbn-pm/src/utils/bazel/index.ts
+++ b/packages/kbn-pm/src/utils/bazel/index.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-export * from './ensure_yarn_integrity_exists';
+export * from './yarn_integrity';
export * from './get_cache_folders';
export * from './install_tools';
export * from './run';
diff --git a/packages/kbn-pm/src/utils/bazel/yarn_integrity.ts b/packages/kbn-pm/src/utils/bazel/yarn_integrity.ts
new file mode 100644
index 00000000000000..3a72f5ca080b8e
--- /dev/null
+++ b/packages/kbn-pm/src/utils/bazel/yarn_integrity.ts
@@ -0,0 +1,41 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { join } from 'path';
+import { isFile, mkdirp, tryRealpath, writeFile } from '../fs';
+
+export async function yarnIntegrityFileExists(nodeModulesPath: string) {
+ try {
+ const nodeModulesRealPath = await tryRealpath(nodeModulesPath);
+ const yarnIntegrityFilePath = join(nodeModulesRealPath, '.yarn-integrity');
+
+ // check if the file already exists
+ if (await isFile(yarnIntegrityFilePath)) {
+ return true;
+ }
+ } catch {
+ // no-op
+ }
+
+ return false;
+}
+
+export async function ensureYarnIntegrityFileExists(nodeModulesPath: string) {
+ try {
+ const nodeModulesRealPath = await tryRealpath(nodeModulesPath);
+ const yarnIntegrityFilePath = join(nodeModulesRealPath, '.yarn-integrity');
+
+ // ensure node_modules folder is created
+ await mkdirp(nodeModulesRealPath);
+
+ // write a blank file in case it doesn't exists
+ await writeFile(yarnIntegrityFilePath, '', { flag: 'wx' });
+ } catch {
+ // no-op
+ }
+}
diff --git a/packages/kbn-pm/src/utils/fs.ts b/packages/kbn-pm/src/utils/fs.ts
index dd961b83214464..5739d319e08e7d 100644
--- a/packages/kbn-pm/src/utils/fs.ts
+++ b/packages/kbn-pm/src/utils/fs.ts
@@ -20,6 +20,7 @@ const symlink = promisify(fs.symlink);
export const chmod = promisify(fs.chmod);
const cmdShim = promisify(cmdShimCb);
const mkdir = promisify(fs.mkdir);
+const realpathNative = promisify(fs.realpath.native);
export const mkdirp = async (path: string) => await mkdir(path, { recursive: true });
export const rmdirp = async (path: string) => await del(path, { force: true });
export const unlink = promisify(fs.unlink);
@@ -96,3 +97,17 @@ async function forceCreate(src: string, dest: string, type: string) {
await symlink(src, dest, type);
}
+
+export async function tryRealpath(path: string): Promise {
+ let calculatedPath = path;
+
+ try {
+ calculatedPath = await realpathNative(path);
+ } catch (error) {
+ if (error.code !== 'ENOENT') {
+ throw error;
+ }
+ }
+
+ return calculatedPath;
+}
diff --git a/packages/kbn-pm/src/utils/package_json.ts b/packages/kbn-pm/src/utils/package_json.ts
index b405b544ab800c..e635c2566e65ac 100644
--- a/packages/kbn-pm/src/utils/package_json.ts
+++ b/packages/kbn-pm/src/utils/package_json.ts
@@ -35,7 +35,7 @@ export const createProductionPackageJson = (pkgJson: IPackageJson) => ({
export const isLinkDependency = (depVersion: string) => depVersion.startsWith('link:');
export const isBazelPackageDependency = (depVersion: string) =>
- depVersion.startsWith('link:bazel/bin/');
+ depVersion.startsWith('link:bazel-bin/');
/**
* Replaces `link:` dependencies with `file:` dependencies. When installing
@@ -46,7 +46,7 @@ export const isBazelPackageDependency = (depVersion: string) =>
* will then _copy_ the `file:` dependencies into `node_modules` instead of
* symlinking like we do in development.
*
- * Additionally it also taken care of replacing `link:bazel/bin/` with
+ * Additionally it also taken care of replacing `link:bazel-bin/` with
* `file:` so we can also support the copy of the Bazel packages dist already into
* build/packages to be copied into the node_modules
*/
@@ -61,7 +61,7 @@ export function transformDependencies(dependencies: IPackageDependencies = {}) {
}
if (isBazelPackageDependency(depVersion)) {
- newDeps[name] = depVersion.replace('link:bazel/bin/', 'file:');
+ newDeps[name] = depVersion.replace('link:bazel-bin/', 'file:').replace('/npm_module', '');
continue;
}
diff --git a/packages/kbn-pm/src/utils/project.ts b/packages/kbn-pm/src/utils/project.ts
index 797a9a36df78f7..5d2a0547b25772 100644
--- a/packages/kbn-pm/src/utils/project.ts
+++ b/packages/kbn-pm/src/utils/project.ts
@@ -92,7 +92,10 @@ export class Project {
public ensureValidProjectDependency(project: Project) {
const relativePathToProject = normalizePath(Path.relative(this.path, project.path));
const relativePathToProjectIfBazelPkg = normalizePath(
- Path.relative(this.path, `bazel/bin/packages/${Path.basename(project.path)}`)
+ Path.relative(
+ this.path,
+ `${__dirname}/../../../bazel-bin/packages/${Path.basename(project.path)}/npm_module`
+ )
);
const versionInPackageJson = this.allDependencies[project.name];
@@ -100,7 +103,7 @@ export class Project {
const expectedVersionInPackageJsonIfBazelPkg = `link:${relativePathToProjectIfBazelPkg}`;
// TODO: after introduce bazel to build all the packages and completely remove the support for kbn packages
- // do not allow child projects to hold dependencies
+ // do not allow child projects to hold dependencies, unless they are meant to be published externally
if (
versionInPackageJson === expectedVersionInPackageJson ||
versionInPackageJson === expectedVersionInPackageJsonIfBazelPkg
diff --git a/packages/kbn-test/jest-preset.js b/packages/kbn-test/jest-preset.js
index 4949d6d1f9fad4..225f93d4878238 100644
--- a/packages/kbn-test/jest-preset.js
+++ b/packages/kbn-test/jest-preset.js
@@ -107,4 +107,7 @@ module.exports = {
'!**/*.d.ts',
'!**/index.{js,ts}',
],
+
+ // A custom resolver to preserve symlinks by default
+ resolver: '/packages/kbn-test/target/jest/setup/preserve_symlinks_resolver.js',
};
diff --git a/packages/kbn-test/src/jest/setup/preserve_symlinks_resolver.js b/packages/kbn-test/src/jest/setup/preserve_symlinks_resolver.js
new file mode 100644
index 00000000000000..711bf2c9aa189c
--- /dev/null
+++ b/packages/kbn-test/src/jest/setup/preserve_symlinks_resolver.js
@@ -0,0 +1,30 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+// Inspired in a discussion found at https://github.com/facebook/jest/issues/5356 as Jest currently doesn't
+// offer any other option to preserve symlinks.
+//
+// It would be available once https://github.com/facebook/jest/pull/9976 got merged.
+
+const resolve = require('resolve');
+
+module.exports = (request, options) => {
+ try {
+ return resolve.sync(request, {
+ basedir: options.basedir,
+ extensions: options.extensions,
+ preserveSymlinks: true,
+ });
+ } catch (error) {
+ if (error.code === 'MODULE_NOT_FOUND') {
+ return options.defaultResolver(request, options);
+ }
+
+ throw error;
+ }
+};
diff --git a/packages/kbn-ui-shared-deps/entry.js b/packages/kbn-ui-shared-deps/entry.js
index ede617908fd3d9..f14c793d22a097 100644
--- a/packages/kbn-ui-shared-deps/entry.js
+++ b/packages/kbn-ui-shared-deps/entry.js
@@ -47,3 +47,5 @@ export const LodashFp = require('lodash/fp');
// runtime deps which don't need to be copied across all bundles
export const TsLib = require('tslib');
export const KbnAnalytics = require('@kbn/analytics');
+export const KbnStd = require('@kbn/std');
+export const SaferLodashSet = require('@elastic/safer-lodash-set');
diff --git a/packages/kbn-ui-shared-deps/index.js b/packages/kbn-ui-shared-deps/index.js
index d1217dd8db0d4c..0542bc89ff9e48 100644
--- a/packages/kbn-ui-shared-deps/index.js
+++ b/packages/kbn-ui-shared-deps/index.js
@@ -58,5 +58,7 @@ exports.externals = {
*/
tslib: '__kbnSharedDeps__.TsLib',
'@kbn/analytics': '__kbnSharedDeps__.KbnAnalytics',
+ '@kbn/std': '__kbnSharedDeps__.KbnStd',
+ '@elastic/safer-lodash-set': '__kbnSharedDeps__.SaferLodashSet',
};
exports.publicPathLoader = require.resolve('./public_path_loader');
diff --git a/packages/kbn-ui-shared-deps/webpack.config.js b/packages/kbn-ui-shared-deps/webpack.config.js
index 135884fbf13e7e..76e6843bea2f82 100644
--- a/packages/kbn-ui-shared-deps/webpack.config.js
+++ b/packages/kbn-ui-shared-deps/webpack.config.js
@@ -177,22 +177,22 @@ exports.getWebpackConfig = ({ dev = false } = {}) => ({
compiler.hooks.emit.tap('MetricsPlugin', (compilation) => {
const metrics = [
{
- group: '@kbn/ui-shared-deps asset size',
- id: 'kbn-ui-shared-deps.js',
+ group: 'page load bundle size',
+ id: 'kbnUiSharedDeps-js',
value: compilation.assets['kbn-ui-shared-deps.js'].size(),
},
{
- group: '@kbn/ui-shared-deps asset size',
- id: 'kbn-ui-shared-deps.@elastic.js',
- value: compilation.assets['kbn-ui-shared-deps.@elastic.js'].size(),
- },
- {
- group: '@kbn/ui-shared-deps asset size',
- id: 'css',
+ group: 'page load bundle size',
+ id: 'kbnUiSharedDeps-css',
value:
compilation.assets['kbn-ui-shared-deps.css'].size() +
compilation.assets['kbn-ui-shared-deps.v7.light.css'].size(),
},
+ {
+ group: 'page load bundle size',
+ id: 'kbnUiSharedDeps-elastic',
+ value: compilation.assets['kbn-ui-shared-deps.@elastic.js'].size(),
+ },
];
compilation.emitAsset(
diff --git a/packages/kbn-utils/src/package_json/index.ts b/packages/kbn-utils/src/package_json/index.ts
index 40ce353780749a..d9304cee2ca386 100644
--- a/packages/kbn-utils/src/package_json/index.ts
+++ b/packages/kbn-utils/src/package_json/index.ts
@@ -14,3 +14,7 @@ export const kibanaPackageJson = {
__dirname: dirname(resolve(REPO_ROOT, 'package.json')),
...require(resolve(REPO_ROOT, 'package.json')),
};
+
+export const isKibanaDistributable = () => {
+ return kibanaPackageJson.build && kibanaPackageJson.build.distributable === true;
+};
diff --git a/scripts/build_kibana_platform_plugins.js b/scripts/build_kibana_platform_plugins.js
index fa630e0bb1808d..9038d08364400c 100644
--- a/scripts/build_kibana_platform_plugins.js
+++ b/scripts/build_kibana_platform_plugins.js
@@ -7,6 +7,7 @@
*/
require('../src/setup_node_env/ensure_node_preserve_symlinks');
+require('source-map-support/register');
require('@kbn/optimizer').runKbnOptimizerCli({
defaultLimitsPath: require.resolve('../packages/kbn-optimizer/limits.yml'),
});
diff --git a/src/cli/cli.js b/src/cli/cli.js
index 4540bf4a3f93c6..d3bff4f492a80b 100644
--- a/src/cli/cli.js
+++ b/src/cli/cli.js
@@ -7,7 +7,7 @@
*/
import _ from 'lodash';
-import { pkg } from '../core/server/utils';
+import { kibanaPackageJson as pkg } from '@kbn/utils';
import Command from './command';
import serveCommand from './serve/serve';
diff --git a/src/cli/serve/serve.js b/src/cli/serve/serve.js
index a494e4538e79a7..ad83965efde338 100644
--- a/src/cli/serve/serve.js
+++ b/src/cli/serve/serve.js
@@ -12,8 +12,7 @@ import { statSync } from 'fs';
import { resolve } from 'path';
import url from 'url';
-import { getConfigPath, fromRoot } from '@kbn/utils';
-import { IS_KIBANA_DISTRIBUTABLE } from '../../legacy/utils';
+import { getConfigPath, fromRoot, isKibanaDistributable } from '@kbn/utils';
import { readKeystore } from '../keystore/read_keystore';
function canRequire(path) {
@@ -65,9 +64,10 @@ function applyConfigOverrides(rawConfig, opts, extraCliOptions) {
delete rawConfig.xpack;
}
- if (opts.dev) {
- set('env', 'development');
+ // only used to set cliArgs.envName, we don't want to inject that into the config
+ delete extraCliOptions.env;
+ if (opts.dev) {
if (!has('elasticsearch.username')) {
set('elasticsearch.username', 'kibana_system');
}
@@ -184,7 +184,7 @@ export default function (program) {
.option('--plugins ', 'an alias for --plugin-dir', pluginDirCollector)
.option('--optimize', 'Deprecated, running the optimizer is no longer required');
- if (!IS_KIBANA_DISTRIBUTABLE) {
+ if (!isKibanaDistributable()) {
command
.option('--oss', 'Start Kibana without X-Pack')
.option(
diff --git a/src/cli_encryption_keys/cli_encryption_keys.js b/src/cli_encryption_keys/cli_encryption_keys.js
index e922b9354d291a..acee81aabb706d 100644
--- a/src/cli_encryption_keys/cli_encryption_keys.js
+++ b/src/cli_encryption_keys/cli_encryption_keys.js
@@ -6,7 +6,8 @@
* Side Public License, v 1.
*/
-import { pkg } from '../core/server/utils';
+import { kibanaPackageJson as pkg } from '@kbn/utils';
+
import Command from '../cli/command';
import { EncryptionConfig } from './encryption_config';
diff --git a/src/cli_keystore/cli_keystore.js b/src/cli_keystore/cli_keystore.js
index b325f685766aad..9f44e5d56e9d21 100644
--- a/src/cli_keystore/cli_keystore.js
+++ b/src/cli_keystore/cli_keystore.js
@@ -7,8 +7,8 @@
*/
import _ from 'lodash';
+import { kibanaPackageJson as pkg } from '@kbn/utils';
-import { pkg } from '../core/server/utils';
import Command from '../cli/command';
import { Keystore } from '../cli/keystore';
diff --git a/src/cli_plugin/cli.js b/src/cli_plugin/cli.js
index 24ccba6a233972..5ef142192c5097 100644
--- a/src/cli_plugin/cli.js
+++ b/src/cli_plugin/cli.js
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { pkg } from '../core/server/utils';
+import { kibanaPackageJson as pkg } from '@kbn/utils';
import Command from '../cli/command';
import { listCommand } from './list';
import { installCommand } from './install';
diff --git a/src/cli_plugin/install/index.js b/src/cli_plugin/install/index.js
index c028facc28e2b4..2683dd41d2bb32 100644
--- a/src/cli_plugin/install/index.js
+++ b/src/cli_plugin/install/index.js
@@ -6,8 +6,7 @@
* Side Public License, v 1.
*/
-import { getConfigPath } from '@kbn/utils';
-import { pkg } from '../../core/server/utils';
+import { getConfigPath, kibanaPackageJson as pkg } from '@kbn/utils';
import { install } from './install';
import { Logger } from '../lib/logger';
import { parse, parseMilliseconds } from './settings';
diff --git a/src/cli_plugin/install/kibana.js b/src/cli_plugin/install/kibana.js
index 29cb8df7401b63..1de157b951d035 100644
--- a/src/cli_plugin/install/kibana.js
+++ b/src/cli_plugin/install/kibana.js
@@ -9,7 +9,7 @@
import path from 'path';
import { statSync } from 'fs';
-import { versionSatisfies, cleanVersion } from '../../legacy/utils/version';
+import { versionSatisfies, cleanVersion } from './utils/version';
export function existingInstall(settings, logger) {
try {
diff --git a/src/cli_plugin/install/settings.js b/src/cli_plugin/install/settings.js
index 94473cc12aab22..e1536d66e05293 100644
--- a/src/cli_plugin/install/settings.js
+++ b/src/cli_plugin/install/settings.js
@@ -7,10 +7,8 @@
*/
import { resolve } from 'path';
-
import expiry from 'expiry-js';
-
-import { fromRoot } from '../../core/server/utils';
+import { fromRoot } from '@kbn/utils';
function generateUrls({ version, plugin }) {
return [
diff --git a/src/cli_plugin/install/settings.test.js b/src/cli_plugin/install/settings.test.js
index f06fd7eca79021..c7985763524ed2 100644
--- a/src/cli_plugin/install/settings.test.js
+++ b/src/cli_plugin/install/settings.test.js
@@ -7,8 +7,8 @@
*/
import { createAbsolutePathSerializer } from '@kbn/dev-utils';
+import { fromRoot } from '@kbn/utils';
-import { fromRoot } from '../../core/server/utils';
import { parseMilliseconds, parse } from './settings';
const SECOND = 1000;
diff --git a/src/legacy/utils/version.js b/src/cli_plugin/install/utils/version.js
similarity index 100%
rename from src/legacy/utils/version.js
rename to src/cli_plugin/install/utils/version.js
diff --git a/src/cli_plugin/list/index.js b/src/cli_plugin/list/index.js
index ce55b939b8a4cb..02d1ed19f8445e 100644
--- a/src/cli_plugin/list/index.js
+++ b/src/cli_plugin/list/index.js
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { fromRoot } from '../../core/server/utils';
+import { fromRoot } from '@kbn/utils';
import { list } from './list';
import { Logger } from '../lib/logger';
import { logWarnings } from '../lib/log_warnings';
diff --git a/src/cli_plugin/remove/settings.js b/src/cli_plugin/remove/settings.js
index 333fa7cb0f2e16..2381770ee0a65b 100644
--- a/src/cli_plugin/remove/settings.js
+++ b/src/cli_plugin/remove/settings.js
@@ -7,8 +7,7 @@
*/
import { resolve } from 'path';
-
-import { fromRoot } from '../../core/server/utils';
+import { fromRoot } from '@kbn/utils';
export function parse(command, options) {
const settings = {
diff --git a/src/core/public/doc_links/doc_links_service.ts b/src/core/public/doc_links/doc_links_service.ts
index ef3172b620b232..b179c998f1126f 100644
--- a/src/core/public/doc_links/doc_links_service.ts
+++ b/src/core/public/doc_links/doc_links_service.ts
@@ -216,6 +216,7 @@ export class DocLinksService {
},
maps: {
guide: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/maps.html`,
+ importGeospatialPrivileges: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/import-geospatial-data.html#import-geospatial-privileges`,
},
monitoring: {
alertsKibana: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/kibana-alerts.html`,
@@ -271,8 +272,10 @@ export class DocLinksService {
painlessExecute: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/painless/${DOC_LINK_VERSION}/painless-execute-api.html`,
painlessExecuteAPIContexts: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/painless/${DOC_LINK_VERSION}/painless-execute-api.html#_contexts`,
putComponentTemplateMetadata: `${ELASTICSEARCH_DOCS}indices-component-template.html#component-templates-metadata`,
+ putEnrichPolicy: `${ELASTICSEARCH_DOCS}put-enrich-policy-api.html`,
putSnapshotLifecyclePolicy: `${ELASTICSEARCH_DOCS}slm-api-put-policy.html`,
- putWatch: `${ELASTICSEARCH_DOCS}/watcher-api-put-watch.html`,
+ putWatch: `${ELASTICSEARCH_DOCS}watcher-api-put-watch.html`,
+ simulatePipeline: `${ELASTICSEARCH_DOCS}simulate-pipeline-api.html`,
updateTransform: `${ELASTICSEARCH_DOCS}update-transform.html`,
},
plugins: {
@@ -293,9 +296,47 @@ export class DocLinksService {
restoreSnapshotApi: `${ELASTICSEARCH_DOCS}restore-snapshot-api.html#restore-snapshot-api-request-body`,
},
ingest: {
+ append: `${ELASTICSEARCH_DOCS}append-processor.html`,
+ bytes: `${ELASTICSEARCH_DOCS}bytes-processor.html`,
+ circle: `${ELASTICSEARCH_DOCS}ingest-circle-processor.html`,
+ convert: `${ELASTICSEARCH_DOCS}convert-processor.html`,
+ csv: `${ELASTICSEARCH_DOCS}csv-processor.html`,
+ date: `${ELASTICSEARCH_DOCS}date-processor.html`,
+ dateIndexName: `${ELASTICSEARCH_DOCS}date-index-name-processor.html`,
+ dissect: `${ELASTICSEARCH_DOCS}dissect-processor.html`,
+ dissectKeyModifiers: `${ELASTICSEARCH_DOCS}dissect-processor.html#dissect-key-modifiers`,
+ dotExpander: `${ELASTICSEARCH_DOCS}dot-expand-processor.html`,
+ drop: `${ELASTICSEARCH_DOCS}drop-processor.html`,
+ enrich: `${ELASTICSEARCH_DOCS}ingest-enriching-data.html`,
+ fail: `${ELASTICSEARCH_DOCS}fail-processor.html`,
+ foreach: `${ELASTICSEARCH_DOCS}foreach-processor.html`,
+ geoIp: `${ELASTICSEARCH_DOCS}geoip-processor.html`,
+ grok: `${ELASTICSEARCH_DOCS}grok-processor.html`,
+ gsub: `${ELASTICSEARCH_DOCS}gsub-processor.html`,
+ htmlString: `${ELASTICSEARCH_DOCS}htmlstrip-processor.html`,
+ inference: `${ELASTICSEARCH_DOCS}inference-processor.html`,
+ inferenceClassification: `${ELASTICSEARCH_DOCS}inference-processor.html#inference-processor-classification-opt`,
+ inferenceRegression: `${ELASTICSEARCH_DOCS}inference-processor.html#inference-processor-regression-opt`,
+ join: `${ELASTICSEARCH_DOCS}join-processor.html`,
+ json: `${ELASTICSEARCH_DOCS}json-processor.html`,
+ kv: `${ELASTICSEARCH_DOCS}kv-processor.html`,
+ lowercase: `${ELASTICSEARCH_DOCS}lowercase-processor.html`,
+ pipeline: `${ELASTICSEARCH_DOCS}pipeline-processor.html`,
pipelines: `${ELASTICSEARCH_DOCS}ingest.html`,
pipelineFailure: `${ELASTICSEARCH_DOCS}ingest.html#handling-pipeline-failures`,
processors: `${ELASTICSEARCH_DOCS}processors.html`,
+ remove: `${ELASTICSEARCH_DOCS}remove-processor.html`,
+ rename: `${ELASTICSEARCH_DOCS}rename-processor.html`,
+ script: `${ELASTICSEARCH_DOCS}script-processor.html`,
+ set: `${ELASTICSEARCH_DOCS}set-processor.html`,
+ setSecurityUser: `${ELASTICSEARCH_DOCS}ingest-node-set-security-user-processor.html`,
+ sort: `${ELASTICSEARCH_DOCS}sort-processor.html`,
+ split: `${ELASTICSEARCH_DOCS}split-processor.html`,
+ trim: `${ELASTICSEARCH_DOCS}trim-processor.html`,
+ uppercase: `${ELASTICSEARCH_DOCS}uppercase-processor.html`,
+ uriParts: `${ELASTICSEARCH_DOCS}uri-parts-processor.html`,
+ urlDecode: `${ELASTICSEARCH_DOCS}urldecode-processor.html`,
+ userAgent: `${ELASTICSEARCH_DOCS}user-agent-processor.html`,
},
},
});
@@ -443,6 +484,7 @@ export interface DocLinksStart {
putComponentTemplateMetadata: string;
putSnapshotLifecyclePolicy: string;
putWatch: string;
+ simulatePipeline: string;
updateTransform: string;
}>;
readonly observability: Record;
diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md
index 0a1c7a9b0fa360..8327428991e13b 100644
--- a/src/core/public/public.api.md
+++ b/src/core/public/public.api.md
@@ -627,6 +627,7 @@ export interface DocLinksStart {
putComponentTemplateMetadata: string;
putSnapshotLifecyclePolicy: string;
putWatch: string;
+ simulatePipeline: string;
updateTransform: string;
}>;
readonly observability: Record;
diff --git a/src/core/public/rendering/_base.scss b/src/core/public/rendering/_base.scss
index de13785a17f5b9..ed2d9bc0b3917e 100644
--- a/src/core/public/rendering/_base.scss
+++ b/src/core/public/rendering/_base.scss
@@ -11,6 +11,16 @@
min-height: 100%;
}
+#app-fixed-viewport {
+ pointer-events: none;
+ visibility: hidden;
+ position: fixed;
+ top: 0;
+ right: 0;
+ bottom: 0;
+ left: 0;
+}
+
.app-wrapper {
display: flex;
flex-flow: column nowrap;
@@ -35,6 +45,10 @@
@mixin kbnAffordForHeader($headerHeight) {
padding-top: $headerHeight;
+ #app-fixed-viewport {
+ top: $headerHeight;
+ }
+
.euiFlyout,
.euiCollapsibleNav {
top: $headerHeight;
diff --git a/src/core/public/rendering/rendering_service.tsx b/src/core/public/rendering/rendering_service.tsx
index 843f2a253f33ec..787fa475c7d5f8 100644
--- a/src/core/public/rendering/rendering_service.tsx
+++ b/src/core/public/rendering/rendering_service.tsx
@@ -52,6 +52,7 @@ export class RenderingService {
{chromeHeader}
+
{bannerComponent}
{appComponent}
diff --git a/src/core/server/config/ensure_valid_configuration.test.ts b/src/core/server/config/ensure_valid_configuration.test.ts
new file mode 100644
index 00000000000000..474e8dd59b4c4e
--- /dev/null
+++ b/src/core/server/config/ensure_valid_configuration.test.ts
@@ -0,0 +1,47 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { configServiceMock } from './mocks';
+import { ensureValidConfiguration } from './ensure_valid_configuration';
+import { CriticalError } from '../errors';
+
+describe('ensureValidConfiguration', () => {
+ let configService: ReturnType;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ configService = configServiceMock.create();
+ configService.getUsedPaths.mockReturnValue(Promise.resolve(['core', 'elastic']));
+ });
+
+ it('returns normally when there is no unused keys', async () => {
+ configService.getUnusedPaths.mockResolvedValue([]);
+ await expect(ensureValidConfiguration(configService as any)).resolves.toBeUndefined();
+ });
+
+ it('throws when there are some unused keys', async () => {
+ configService.getUnusedPaths.mockResolvedValue(['some.key', 'some.other.key']);
+
+ await expect(ensureValidConfiguration(configService as any)).rejects.toMatchInlineSnapshot(
+ `[Error: Unknown configuration key(s): "some.key", "some.other.key". Check for spelling errors and ensure that expected plugins are installed.]`
+ );
+ });
+
+ it('throws a `CriticalError` with the correct processExitCode value', async () => {
+ expect.assertions(2);
+
+ configService.getUnusedPaths.mockResolvedValue(['some.key', 'some.other.key']);
+
+ try {
+ await ensureValidConfiguration(configService as any);
+ } catch (e) {
+ expect(e).toBeInstanceOf(CriticalError);
+ expect(e.processExitCode).toEqual(64);
+ }
+ });
+});
diff --git a/src/core/server/legacy/config/ensure_valid_configuration.ts b/src/core/server/config/ensure_valid_configuration.ts
similarity index 62%
rename from src/core/server/legacy/config/ensure_valid_configuration.ts
rename to src/core/server/config/ensure_valid_configuration.ts
index fd3dd29e3d3549..a33625cc0841d0 100644
--- a/src/core/server/legacy/config/ensure_valid_configuration.ts
+++ b/src/core/server/config/ensure_valid_configuration.ts
@@ -6,20 +6,13 @@
* Side Public License, v 1.
*/
-import { getUnusedConfigKeys } from './get_unused_config_keys';
-import { ConfigService } from '../../config';
-import { CriticalError } from '../../errors';
-import { LegacyServiceSetupConfig } from '../types';
+import { ConfigService } from '@kbn/config';
+import { CriticalError } from '../errors';
-export async function ensureValidConfiguration(
- configService: ConfigService,
- { legacyConfig, settings }: LegacyServiceSetupConfig
-) {
- const unusedConfigKeys = await getUnusedConfigKeys({
- coreHandledConfigPaths: await configService.getUsedPaths(),
- settings,
- legacyConfig,
- });
+export async function ensureValidConfiguration(configService: ConfigService) {
+ await configService.validate();
+
+ const unusedConfigKeys = await configService.getUnusedPaths();
if (unusedConfigKeys.length > 0) {
const message = `Unknown configuration key(s): ${unusedConfigKeys
diff --git a/src/core/server/config/index.ts b/src/core/server/config/index.ts
index b1086d4470335f..686564c6d678a0 100644
--- a/src/core/server/config/index.ts
+++ b/src/core/server/config/index.ts
@@ -7,6 +7,7 @@
*/
export { coreDeprecationProvider } from './deprecation';
+export { ensureValidConfiguration } from './ensure_valid_configuration';
export {
ConfigService,
diff --git a/src/core/server/core_app/bundle_routes/register_bundle_routes.test.ts b/src/core/server/core_app/bundle_routes/register_bundle_routes.test.ts
index d51c3691469575..830f4a9a943645 100644
--- a/src/core/server/core_app/bundle_routes/register_bundle_routes.test.ts
+++ b/src/core/server/core_app/bundle_routes/register_bundle_routes.test.ts
@@ -10,7 +10,7 @@ import { registerRouteForBundleMock } from './register_bundle_routes.test.mocks'
import { PackageInfo } from '@kbn/config';
import { httpServiceMock } from '../../http/http_service.mock';
-import { UiPlugins } from '../../plugins';
+import { InternalPluginInfo, UiPlugins } from '../../plugins';
import { registerBundleRoutes } from './register_bundle_routes';
import { FileHashCache } from './file_hash_cache';
@@ -29,9 +29,12 @@ const createUiPlugins = (...ids: string[]): UiPlugins => ({
internal: ids.reduce((map, id) => {
map.set(id, {
publicTargetDir: `/plugins/${id}/public-target-dir`,
+ publicAssetsDir: `/plugins/${id}/public-assets-dir`,
+ version: '8.0.0',
+ requiredBundles: [],
});
return map;
- }, new Map()),
+ }, new Map()),
});
describe('registerBundleRoutes', () => {
@@ -86,16 +89,16 @@ describe('registerBundleRoutes', () => {
fileHashCache: expect.any(FileHashCache),
isDist: true,
bundlesPath: '/plugins/plugin-a/public-target-dir',
- publicPath: '/server-base-path/42/bundles/plugin/plugin-a/',
- routePath: '/42/bundles/plugin/plugin-a/',
+ publicPath: '/server-base-path/42/bundles/plugin/plugin-a/8.0.0/',
+ routePath: '/42/bundles/plugin/plugin-a/8.0.0/',
});
expect(registerRouteForBundleMock).toHaveBeenCalledWith(router, {
fileHashCache: expect.any(FileHashCache),
isDist: true,
bundlesPath: '/plugins/plugin-b/public-target-dir',
- publicPath: '/server-base-path/42/bundles/plugin/plugin-b/',
- routePath: '/42/bundles/plugin/plugin-b/',
+ publicPath: '/server-base-path/42/bundles/plugin/plugin-b/8.0.0/',
+ routePath: '/42/bundles/plugin/plugin-b/8.0.0/',
});
});
});
diff --git a/src/core/server/core_app/bundle_routes/register_bundle_routes.ts b/src/core/server/core_app/bundle_routes/register_bundle_routes.ts
index ee54f8ef34622e..f313f100036317 100644
--- a/src/core/server/core_app/bundle_routes/register_bundle_routes.ts
+++ b/src/core/server/core_app/bundle_routes/register_bundle_routes.ts
@@ -8,10 +8,10 @@
import { join } from 'path';
import { PackageInfo } from '@kbn/config';
+import { fromRoot } from '@kbn/utils';
import { distDir as uiSharedDepsDistDir } from '@kbn/ui-shared-deps';
import { IRouter } from '../../http';
import { UiPlugins } from '../../plugins';
-import { fromRoot } from '../../utils';
import { FileHashCache } from './file_hash_cache';
import { registerRouteForBundle } from './bundles_route';
@@ -27,7 +27,7 @@ import { registerRouteForBundle } from './bundles_route';
*/
export function registerBundleRoutes({
router,
- serverBasePath, // serverBasePath
+ serverBasePath,
uiPlugins,
packageInfo,
}: {
@@ -57,10 +57,10 @@ export function registerBundleRoutes({
isDist,
});
- [...uiPlugins.internal.entries()].forEach(([id, { publicTargetDir }]) => {
+ [...uiPlugins.internal.entries()].forEach(([id, { publicTargetDir, version }]) => {
registerRouteForBundle(router, {
- publicPath: `${serverBasePath}/${buildNum}/bundles/plugin/${id}/`,
- routePath: `/${buildNum}/bundles/plugin/${id}/`,
+ publicPath: `${serverBasePath}/${buildNum}/bundles/plugin/${id}/${version}/`,
+ routePath: `/${buildNum}/bundles/plugin/${id}/${version}/`,
bundlesPath: publicTargetDir,
fileHashCache,
isDist,
diff --git a/src/core/server/core_app/core_app.ts b/src/core/server/core_app/core_app.ts
index dac941767ebb5b..bc1098832bac53 100644
--- a/src/core/server/core_app/core_app.ts
+++ b/src/core/server/core_app/core_app.ts
@@ -7,9 +7,11 @@
*/
import Path from 'path';
+import { stringify } from 'querystring';
import { Env } from '@kbn/config';
+import { schema } from '@kbn/config-schema';
+import { fromRoot } from '@kbn/utils';
-import { fromRoot } from '../utils';
import { InternalCoreSetup } from '../internal_types';
import { CoreContext } from '../core_context';
import { Logger } from '../logging';
@@ -49,6 +51,41 @@ export class CoreApp {
});
});
+ // remove trailing slash catch-all
+ router.get(
+ {
+ path: '/{path*}',
+ validate: {
+ params: schema.object({
+ path: schema.maybe(schema.string()),
+ }),
+ query: schema.maybe(schema.recordOf(schema.string(), schema.any())),
+ },
+ },
+ async (context, req, res) => {
+ const { query, params } = req;
+ const { path } = params;
+ if (!path || !path.endsWith('/')) {
+ return res.notFound();
+ }
+
+ const basePath = httpSetup.basePath.get(req);
+ let rewrittenPath = path.slice(0, -1);
+ if (`/${path}`.startsWith(basePath)) {
+ rewrittenPath = rewrittenPath.substring(basePath.length);
+ }
+
+ const querystring = query ? stringify(query) : undefined;
+ const url = `${basePath}/${rewrittenPath}${querystring ? `?${querystring}` : ''}`;
+
+ return res.redirected({
+ headers: {
+ location: url,
+ },
+ });
+ }
+ );
+
router.get({ path: '/core', validate: false }, async (context, req, res) =>
res.ok({ body: { version: '0.0.1' } })
);
diff --git a/src/core/server/core_app/integration_tests/core_app_routes.test.ts b/src/core/server/core_app/integration_tests/core_app_routes.test.ts
new file mode 100644
index 00000000000000..6b0643f7d1bc7b
--- /dev/null
+++ b/src/core/server/core_app/integration_tests/core_app_routes.test.ts
@@ -0,0 +1,58 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import * as kbnTestServer from '../../../test_helpers/kbn_server';
+import { Root } from '../../root';
+
+describe('Core app routes', () => {
+ let root: Root;
+
+ beforeAll(async function () {
+ root = kbnTestServer.createRoot({
+ plugins: { initialize: false },
+ server: {
+ basePath: '/base-path',
+ },
+ });
+
+ await root.setup();
+ await root.start();
+ });
+
+ afterAll(async function () {
+ await root.shutdown();
+ });
+
+ describe('`/{path*}` route', () => {
+ it('redirects requests to include the basePath', async () => {
+ const response = await kbnTestServer.request.get(root, '/some-path/').expect(302);
+ expect(response.get('location')).toEqual('/base-path/some-path');
+ });
+
+ it('includes the query in the redirect', async () => {
+ const response = await kbnTestServer.request.get(root, '/some-path/?foo=bar').expect(302);
+ expect(response.get('location')).toEqual('/base-path/some-path?foo=bar');
+ });
+
+ it('does not redirect if the path does not end with `/`', async () => {
+ await kbnTestServer.request.get(root, '/some-path').expect(404);
+ });
+
+ it('does not add the basePath if the path already contains it', async () => {
+ const response = await kbnTestServer.request.get(root, '/base-path/foo/').expect(302);
+ expect(response.get('location')).toEqual('/base-path/foo');
+ });
+ });
+
+ describe('`/` route', () => {
+ it('prevails on the `/{path*}` route', async () => {
+ const response = await kbnTestServer.request.get(root, '/').expect(302);
+ expect(response.get('location')).toEqual('/base-path/app/home');
+ });
+ });
+});
diff --git a/src/core/server/http/http_config.ts b/src/core/server/http/http_config.ts
index 356dad201ce95d..daf7424b8f8bd3 100644
--- a/src/core/server/http/http_config.ts
+++ b/src/core/server/http/http_config.ts
@@ -11,6 +11,7 @@ import { IHttpConfig, SslConfig, sslSchema } from '@kbn/server-http-tools';
import { hostname } from 'os';
import url from 'url';
+import { ServiceConfigDescriptor } from '../internal_types';
import { CspConfigType, CspConfig, ICspConfig } from '../csp';
import { ExternalUrlConfig, IExternalUrlConfig } from '../external_url';
@@ -20,141 +21,143 @@ const hostURISchema = schema.uri({ scheme: ['http', 'https'] });
const match = (regex: RegExp, errorMsg: string) => (str: string) =>
regex.test(str) ? undefined : errorMsg;
-// before update to make sure it's in sync with validation rules in Legacy
-// https://github.com/elastic/kibana/blob/master/src/legacy/server/config/schema.js
-export const config = {
- path: 'server' as const,
- schema: schema.object(
- {
- name: schema.string({ defaultValue: () => hostname() }),
- autoListen: schema.boolean({ defaultValue: true }),
- publicBaseUrl: schema.maybe(schema.uri({ scheme: ['http', 'https'] })),
- basePath: schema.maybe(
- schema.string({
- validate: match(validBasePathRegex, "must start with a slash, don't end with one"),
- })
- ),
- cors: schema.object(
- {
- enabled: schema.boolean({ defaultValue: false }),
- allowCredentials: schema.boolean({ defaultValue: false }),
- allowOrigin: schema.oneOf(
- [
- schema.arrayOf(hostURISchema, { minSize: 1 }),
- schema.arrayOf(schema.literal('*'), { minSize: 1, maxSize: 1 }),
- ],
- {
- defaultValue: ['*'],
- }
- ),
+const configSchema = schema.object(
+ {
+ name: schema.string({ defaultValue: () => hostname() }),
+ autoListen: schema.boolean({ defaultValue: true }),
+ publicBaseUrl: schema.maybe(schema.uri({ scheme: ['http', 'https'] })),
+ basePath: schema.maybe(
+ schema.string({
+ validate: match(validBasePathRegex, "must start with a slash, don't end with one"),
+ })
+ ),
+ cors: schema.object(
+ {
+ enabled: schema.boolean({ defaultValue: false }),
+ allowCredentials: schema.boolean({ defaultValue: false }),
+ allowOrigin: schema.oneOf(
+ [
+ schema.arrayOf(hostURISchema, { minSize: 1 }),
+ schema.arrayOf(schema.literal('*'), { minSize: 1, maxSize: 1 }),
+ ],
+ {
+ defaultValue: ['*'],
+ }
+ ),
+ },
+ {
+ validate(value) {
+ if (value.allowCredentials === true && value.allowOrigin.includes('*')) {
+ return 'Cannot specify wildcard origin "*" with "credentials: true". Please provide a list of allowed origins.';
+ }
},
- {
- validate(value) {
- if (value.allowCredentials === true && value.allowOrigin.includes('*')) {
- return 'Cannot specify wildcard origin "*" with "credentials: true". Please provide a list of allowed origins.';
- }
- },
+ }
+ ),
+ customResponseHeaders: schema.recordOf(schema.string(), schema.any(), {
+ defaultValue: {},
+ }),
+ host: schema.string({
+ defaultValue: 'localhost',
+ hostname: true,
+ validate(value) {
+ if (value === '0') {
+ return 'value 0 is not a valid hostname (use "0.0.0.0" to bind to all interfaces)';
}
+ },
+ }),
+ maxPayload: schema.byteSize({
+ defaultValue: '1048576b',
+ }),
+ port: schema.number({
+ defaultValue: 5601,
+ }),
+ rewriteBasePath: schema.boolean({ defaultValue: false }),
+ ssl: sslSchema,
+ keepaliveTimeout: schema.number({
+ defaultValue: 120000,
+ }),
+ socketTimeout: schema.number({
+ defaultValue: 120000,
+ }),
+ compression: schema.object({
+ enabled: schema.boolean({ defaultValue: true }),
+ referrerWhitelist: schema.maybe(
+ schema.arrayOf(
+ schema.string({
+ hostname: true,
+ }),
+ { minSize: 1 }
+ )
+ ),
+ }),
+ uuid: schema.maybe(
+ schema.string({
+ validate: match(uuidRegexp, 'must be a valid uuid'),
+ })
+ ),
+ xsrf: schema.object({
+ disableProtection: schema.boolean({ defaultValue: false }),
+ allowlist: schema.arrayOf(
+ schema.string({ validate: match(/^\//, 'must start with a slash') }),
+ { defaultValue: [] }
),
- customResponseHeaders: schema.recordOf(schema.string(), schema.any(), {
- defaultValue: {},
- }),
- host: schema.string({
- defaultValue: 'localhost',
- hostname: true,
+ }),
+ requestId: schema.object(
+ {
+ allowFromAnyIp: schema.boolean({ defaultValue: false }),
+ ipAllowlist: schema.arrayOf(schema.ip(), { defaultValue: [] }),
+ },
+ {
validate(value) {
- if (value === '0') {
- return 'value 0 is not a valid hostname (use "0.0.0.0" to bind to all interfaces)';
+ if (value.allowFromAnyIp === true && value.ipAllowlist?.length > 0) {
+ return `allowFromAnyIp must be set to 'false' if any values are specified in ipAllowlist`;
}
},
- }),
- maxPayload: schema.byteSize({
- defaultValue: '1048576b',
- }),
- port: schema.number({
- defaultValue: 5601,
- }),
- rewriteBasePath: schema.boolean({ defaultValue: false }),
- ssl: sslSchema,
- keepaliveTimeout: schema.number({
- defaultValue: 120000,
- }),
- socketTimeout: schema.number({
- defaultValue: 120000,
- }),
- compression: schema.object({
- enabled: schema.boolean({ defaultValue: true }),
- referrerWhitelist: schema.maybe(
- schema.arrayOf(
- schema.string({
- hostname: true,
- }),
- { minSize: 1 }
- )
- ),
- }),
- uuid: schema.maybe(
- schema.string({
- validate: match(uuidRegexp, 'must be a valid uuid'),
- })
- ),
- xsrf: schema.object({
- disableProtection: schema.boolean({ defaultValue: false }),
- allowlist: schema.arrayOf(
- schema.string({ validate: match(/^\//, 'must start with a slash') }),
- { defaultValue: [] }
- ),
- }),
- requestId: schema.object(
- {
- allowFromAnyIp: schema.boolean({ defaultValue: false }),
- ipAllowlist: schema.arrayOf(schema.ip(), { defaultValue: [] }),
- },
- {
- validate(value) {
- if (value.allowFromAnyIp === true && value.ipAllowlist?.length > 0) {
- return `allowFromAnyIp must be set to 'false' if any values are specified in ipAllowlist`;
- }
- },
+ }
+ ),
+ },
+ {
+ validate: (rawConfig) => {
+ if (!rawConfig.basePath && rawConfig.rewriteBasePath) {
+ return 'cannot use [rewriteBasePath] when [basePath] is not specified';
+ }
+
+ if (rawConfig.publicBaseUrl) {
+ const parsedUrl = url.parse(rawConfig.publicBaseUrl);
+ if (parsedUrl.query || parsedUrl.hash || parsedUrl.auth) {
+ return `[publicBaseUrl] may only contain a protocol, host, port, and pathname`;
}
- ),
- },
- {
- validate: (rawConfig) => {
- if (!rawConfig.basePath && rawConfig.rewriteBasePath) {
- return 'cannot use [rewriteBasePath] when [basePath] is not specified';
+ if (parsedUrl.path !== (rawConfig.basePath ?? '/')) {
+ return `[publicBaseUrl] must contain the [basePath]: ${parsedUrl.path} !== ${rawConfig.basePath}`;
}
+ }
- if (rawConfig.publicBaseUrl) {
- const parsedUrl = url.parse(rawConfig.publicBaseUrl);
- if (parsedUrl.query || parsedUrl.hash || parsedUrl.auth) {
- return `[publicBaseUrl] may only contain a protocol, host, port, and pathname`;
- }
- if (parsedUrl.path !== (rawConfig.basePath ?? '/')) {
- return `[publicBaseUrl] must contain the [basePath]: ${parsedUrl.path} !== ${rawConfig.basePath}`;
- }
- }
+ if (!rawConfig.compression.enabled && rawConfig.compression.referrerWhitelist) {
+ return 'cannot use [compression.referrerWhitelist] when [compression.enabled] is set to false';
+ }
- if (!rawConfig.compression.enabled && rawConfig.compression.referrerWhitelist) {
- return 'cannot use [compression.referrerWhitelist] when [compression.enabled] is set to false';
- }
+ if (
+ rawConfig.ssl.enabled &&
+ rawConfig.ssl.redirectHttpFromPort !== undefined &&
+ rawConfig.ssl.redirectHttpFromPort === rawConfig.port
+ ) {
+ return (
+ 'Kibana does not accept http traffic to [port] when ssl is ' +
+ 'enabled (only https is allowed), so [ssl.redirectHttpFromPort] ' +
+ `cannot be configured to the same value. Both are [${rawConfig.port}].`
+ );
+ }
+ },
+ }
+);
- if (
- rawConfig.ssl.enabled &&
- rawConfig.ssl.redirectHttpFromPort !== undefined &&
- rawConfig.ssl.redirectHttpFromPort === rawConfig.port
- ) {
- return (
- 'Kibana does not accept http traffic to [port] when ssl is ' +
- 'enabled (only https is allowed), so [ssl.redirectHttpFromPort] ' +
- `cannot be configured to the same value. Both are [${rawConfig.port}].`
- );
- }
- },
- }
- ),
+export type HttpConfigType = TypeOf;
+
+export const config: ServiceConfigDescriptor = {
+ path: 'server' as const,
+ schema: configSchema,
+ deprecations: ({ rename }) => [rename('maxPayloadBytes', 'maxPayload')],
};
-export type HttpConfigType = TypeOf;
export class HttpConfig implements IHttpConfig {
public name: string;
diff --git a/src/core/server/http/integration_tests/core_services.test.ts b/src/core/server/http/integration_tests/core_services.test.ts
index af358caae8bfc5..5433f0d3c3e31c 100644
--- a/src/core/server/http/integration_tests/core_services.test.ts
+++ b/src/core/server/http/integration_tests/core_services.test.ts
@@ -12,8 +12,6 @@ import {
legacyClusterClientInstanceMock,
} from './core_service.test.mocks';
-import Boom from '@hapi/boom';
-import { Request } from '@hapi/hapi';
import { errors as esErrors } from 'elasticsearch';
import { LegacyElasticsearchErrorHelpers } from '../../elasticsearch/legacy';
@@ -22,16 +20,6 @@ import { ResponseError } from '@elastic/elasticsearch/lib/errors';
import * as kbnTestServer from '../../../test_helpers/kbn_server';
import { InternalElasticsearchServiceStart } from '../../elasticsearch';
-interface User {
- id: string;
- roles?: string[];
-}
-
-interface StorageData {
- value: User;
- expires: number;
-}
-
const cookieOptions = {
name: 'sid',
encryptionKey: 'something_at_least_32_characters',
@@ -197,172 +185,6 @@ describe('http service', () => {
});
});
- describe('legacy server', () => {
- describe('#registerAuth()', () => {
- const sessionDurationMs = 1000;
-
- let root: ReturnType;
- beforeEach(async () => {
- root = kbnTestServer.createRoot({ plugins: { initialize: false } });
- }, 30000);
-
- afterEach(async () => {
- MockLegacyScopedClusterClient.mockClear();
- await root.shutdown();
- });
-
- it('runs auth for legacy routes and proxy request to legacy server route handlers', async () => {
- const { http } = await root.setup();
- const sessionStorageFactory = await http.createCookieSessionStorageFactory(
- cookieOptions
- );
- http.registerAuth((req, res, toolkit) => {
- if (req.headers.authorization) {
- const user = { id: '42' };
- const sessionStorage = sessionStorageFactory.asScoped(req);
- sessionStorage.set({ value: user, expires: Date.now() + sessionDurationMs });
- return toolkit.authenticated({ state: user });
- } else {
- return res.unauthorized();
- }
- });
- await root.start();
-
- const legacyUrl = '/legacy';
- const kbnServer = kbnTestServer.getKbnServer(root);
- kbnServer.server.route({
- method: 'GET',
- path: legacyUrl,
- handler: () => 'ok from legacy server',
- });
-
- const response = await kbnTestServer.request
- .get(root, legacyUrl)
- .expect(200, 'ok from legacy server');
-
- expect(response.header['set-cookie']).toHaveLength(1);
- });
-
- it('passes authHeaders as request headers to the legacy platform', async () => {
- const token = 'Basic: name:password';
- const { http } = await root.setup();
- const sessionStorageFactory = await http.createCookieSessionStorageFactory(
- cookieOptions
- );
- http.registerAuth((req, res, toolkit) => {
- if (req.headers.authorization) {
- const user = { id: '42' };
- const sessionStorage = sessionStorageFactory.asScoped(req);
- sessionStorage.set({ value: user, expires: Date.now() + sessionDurationMs });
- return toolkit.authenticated({
- state: user,
- requestHeaders: {
- authorization: token,
- },
- });
- } else {
- return res.unauthorized();
- }
- });
- await root.start();
-
- const legacyUrl = '/legacy';
- const kbnServer = kbnTestServer.getKbnServer(root);
- kbnServer.server.route({
- method: 'GET',
- path: legacyUrl,
- handler: (req: Request) => ({
- authorization: req.headers.authorization,
- custom: req.headers.custom,
- }),
- });
-
- await kbnTestServer.request
- .get(root, legacyUrl)
- .set({ custom: 'custom-header' })
- .expect(200, { authorization: token, custom: 'custom-header' });
- });
-
- it('attach security header to a successful response handled by Legacy platform', async () => {
- const authResponseHeader = {
- 'www-authenticate': 'Negotiate ade0234568a4209af8bc0280289eca',
- };
- const { http } = await root.setup();
- const { registerAuth } = http;
-
- registerAuth((req, res, toolkit) => {
- return toolkit.authenticated({ responseHeaders: authResponseHeader });
- });
-
- await root.start();
-
- const kbnServer = kbnTestServer.getKbnServer(root);
- kbnServer.server.route({
- method: 'GET',
- path: '/legacy',
- handler: () => 'ok',
- });
-
- const response = await kbnTestServer.request.get(root, '/legacy').expect(200);
- expect(response.header['www-authenticate']).toBe(authResponseHeader['www-authenticate']);
- });
-
- it('attach security header to an error response handled by Legacy platform', async () => {
- const authResponseHeader = {
- 'www-authenticate': 'Negotiate ade0234568a4209af8bc0280289eca',
- };
- const { http } = await root.setup();
- const { registerAuth } = http;
-
- registerAuth((req, res, toolkit) => {
- return toolkit.authenticated({ responseHeaders: authResponseHeader });
- });
-
- await root.start();
-
- const kbnServer = kbnTestServer.getKbnServer(root);
- kbnServer.server.route({
- method: 'GET',
- path: '/legacy',
- handler: () => {
- throw Boom.badRequest();
- },
- });
-
- const response = await kbnTestServer.request.get(root, '/legacy').expect(400);
- expect(response.header['www-authenticate']).toBe(authResponseHeader['www-authenticate']);
- });
- });
-
- describe('#basePath()', () => {
- let root: ReturnType;
- beforeEach(async () => {
- root = kbnTestServer.createRoot({ plugins: { initialize: false } });
- }, 30000);
-
- afterEach(async () => await root.shutdown());
- it('basePath information for an incoming request is available in legacy server', async () => {
- const reqBasePath = '/requests-specific-base-path';
- const { http } = await root.setup();
- http.registerOnPreRouting((req, res, toolkit) => {
- http.basePath.set(req, reqBasePath);
- return toolkit.next();
- });
-
- await root.start();
-
- const legacyUrl = '/legacy';
- const kbnServer = kbnTestServer.getKbnServer(root);
- kbnServer.server.route({
- method: 'GET',
- path: legacyUrl,
- handler: kbnServer.newPlatform.setup.core.http.basePath.get,
- });
-
- await kbnTestServer.request.get(root, legacyUrl).expect(200, reqBasePath);
- });
- });
- });
describe('legacy elasticsearch client', () => {
let root: ReturnType;
beforeEach(async () => {
diff --git a/src/core/server/i18n/get_kibana_translation_files.test.ts b/src/core/server/i18n/get_kibana_translation_files.test.ts
index 7ca0fe0e79337b..45e1a8dfec9cb2 100644
--- a/src/core/server/i18n/get_kibana_translation_files.test.ts
+++ b/src/core/server/i18n/get_kibana_translation_files.test.ts
@@ -14,7 +14,7 @@ const mockGetTranslationPaths = getTranslationPaths as jest.Mock;
jest.mock('./get_translation_paths', () => ({
getTranslationPaths: jest.fn().mockResolvedValue([]),
}));
-jest.mock('../utils', () => ({
+jest.mock('@kbn/utils', () => ({
fromRoot: jest.fn().mockImplementation((path: string) => path),
}));
diff --git a/src/core/server/i18n/get_kibana_translation_files.ts b/src/core/server/i18n/get_kibana_translation_files.ts
index 7b5ada2a25f4f5..4e7ee718113ce7 100644
--- a/src/core/server/i18n/get_kibana_translation_files.ts
+++ b/src/core/server/i18n/get_kibana_translation_files.ts
@@ -7,7 +7,7 @@
*/
import { basename } from 'path';
-import { fromRoot } from '../utils';
+import { fromRoot } from '@kbn/utils';
import { getTranslationPaths } from './get_translation_paths';
export const getKibanaTranslationFiles = async (
diff --git a/src/core/server/index.ts b/src/core/server/index.ts
index 963b69eac4f7f8..2c6fa74cb54a0c 100644
--- a/src/core/server/index.ts
+++ b/src/core/server/index.ts
@@ -406,8 +406,6 @@ export type {
SavedObjectsMigrationVersion,
} from './types';
-export type { LegacyServiceSetupDeps, LegacyServiceStartDeps, LegacyConfig } from './legacy';
-
export { ServiceStatusLevels } from './status';
export type { CoreStatus, ServiceStatus, ServiceStatusLevel, StatusServiceSetup } from './status';
diff --git a/src/core/server/legacy/__snapshots__/legacy_service.test.ts.snap b/src/core/server/legacy/__snapshots__/legacy_service.test.ts.snap
deleted file mode 100644
index 69b7f9fc783154..00000000000000
--- a/src/core/server/legacy/__snapshots__/legacy_service.test.ts.snap
+++ /dev/null
@@ -1,27 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`once LegacyService is set up with connection info reconfigures logging configuration if new config is received.: applyLoggingConfiguration params 1`] = `
-Array [
- Array [
- Object {
- "logging": Object {
- "verbose": true,
- },
- "path": Object {},
- },
- ],
-]
-`;
-
-exports[`once LegacyService is set up without connection info reconfigures logging configuration if new config is received.: applyLoggingConfiguration params 1`] = `
-Array [
- Array [
- Object {
- "logging": Object {
- "verbose": true,
- },
- "path": Object {},
- },
- ],
-]
-`;
diff --git a/src/core/server/legacy/config/ensure_valid_configuration.test.ts b/src/core/server/legacy/config/ensure_valid_configuration.test.ts
deleted file mode 100644
index febf91625378d3..00000000000000
--- a/src/core/server/legacy/config/ensure_valid_configuration.test.ts
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { ensureValidConfiguration } from './ensure_valid_configuration';
-import { getUnusedConfigKeys } from './get_unused_config_keys';
-import { configServiceMock } from '../../config/mocks';
-
-jest.mock('./get_unused_config_keys');
-
-describe('ensureValidConfiguration', () => {
- let configService: ReturnType;
-
- beforeEach(() => {
- jest.clearAllMocks();
- configService = configServiceMock.create();
- configService.getUsedPaths.mockReturnValue(Promise.resolve(['core', 'elastic']));
-
- (getUnusedConfigKeys as any).mockImplementation(() => []);
- });
-
- it('calls getUnusedConfigKeys with correct parameters', async () => {
- await ensureValidConfiguration(
- configService as any,
- {
- settings: 'settings',
- legacyConfig: 'pluginExtendedConfig',
- } as any
- );
- expect(getUnusedConfigKeys).toHaveBeenCalledTimes(1);
- expect(getUnusedConfigKeys).toHaveBeenCalledWith({
- coreHandledConfigPaths: ['core', 'elastic'],
- settings: 'settings',
- legacyConfig: 'pluginExtendedConfig',
- });
- });
-
- it('returns normally when there is no unused keys', async () => {
- await expect(
- ensureValidConfiguration(configService as any, {} as any)
- ).resolves.toBeUndefined();
-
- expect(getUnusedConfigKeys).toHaveBeenCalledTimes(1);
- });
-
- it('throws when there are some unused keys', async () => {
- (getUnusedConfigKeys as any).mockImplementation(() => ['some.key', 'some.other.key']);
-
- await expect(
- ensureValidConfiguration(configService as any, {} as any)
- ).rejects.toMatchInlineSnapshot(
- `[Error: Unknown configuration key(s): "some.key", "some.other.key". Check for spelling errors and ensure that expected plugins are installed.]`
- );
- });
-});
diff --git a/src/core/server/legacy/config/get_unused_config_keys.test.ts b/src/core/server/legacy/config/get_unused_config_keys.test.ts
deleted file mode 100644
index 86b4e0aeeea597..00000000000000
--- a/src/core/server/legacy/config/get_unused_config_keys.test.ts
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { LegacyConfig, LegacyVars } from '../types';
-import { getUnusedConfigKeys } from './get_unused_config_keys';
-
-describe('getUnusedConfigKeys', () => {
- beforeEach(() => {
- jest.resetAllMocks();
- });
-
- const getConfig = (values: LegacyVars = {}): LegacyConfig =>
- ({
- get: () => values as any,
- } as LegacyConfig);
-
- describe('not using core or plugin specs', () => {
- it('should return an empty list for empty parameters', async () => {
- expect(
- await getUnusedConfigKeys({
- coreHandledConfigPaths: [],
- settings: {},
- legacyConfig: getConfig(),
- })
- ).toEqual([]);
- });
-
- it('returns empty list when config and settings have the same properties', async () => {
- expect(
- await getUnusedConfigKeys({
- coreHandledConfigPaths: [],
- settings: {
- presentInBoth: true,
- alsoInBoth: 'someValue',
- },
- legacyConfig: getConfig({
- presentInBoth: true,
- alsoInBoth: 'someValue',
- }),
- })
- ).toEqual([]);
- });
-
- it('returns empty list when config has entries not present in settings', async () => {
- expect(
- await getUnusedConfigKeys({
- coreHandledConfigPaths: [],
- settings: {
- presentInBoth: true,
- },
- legacyConfig: getConfig({
- presentInBoth: true,
- onlyInConfig: 'someValue',
- }),
- })
- ).toEqual([]);
- });
-
- it('returns the list of properties from settings not present in config', async () => {
- expect(
- await getUnusedConfigKeys({
- coreHandledConfigPaths: [],
- settings: {
- presentInBoth: true,
- onlyInSetting: 'value',
- },
- legacyConfig: getConfig({
- presentInBoth: true,
- }),
- })
- ).toEqual(['onlyInSetting']);
- });
-
- it('correctly handle nested properties', async () => {
- expect(
- await getUnusedConfigKeys({
- coreHandledConfigPaths: [],
- settings: {
- elasticsearch: {
- username: 'foo',
- password: 'bar',
- },
- },
- legacyConfig: getConfig({
- elasticsearch: {
- username: 'foo',
- onlyInConfig: 'default',
- },
- }),
- })
- ).toEqual(['elasticsearch.password']);
- });
-
- it('correctly handle "env" specific case', async () => {
- expect(
- await getUnusedConfigKeys({
- coreHandledConfigPaths: [],
- settings: {
- env: 'development',
- },
- legacyConfig: getConfig({
- env: {
- name: 'development',
- },
- }),
- })
- ).toEqual([]);
- });
-
- it('correctly handle array properties', async () => {
- expect(
- await getUnusedConfigKeys({
- coreHandledConfigPaths: [],
- settings: {
- prop: ['a', 'b', 'c'],
- },
- legacyConfig: getConfig({
- prop: ['a'],
- }),
- })
- ).toEqual([]);
- });
- });
-
- it('ignores properties managed by the new platform', async () => {
- expect(
- await getUnusedConfigKeys({
- coreHandledConfigPaths: ['core', 'foo.bar'],
- settings: {
- core: {
- prop: 'value',
- },
- foo: {
- bar: true,
- dolly: true,
- },
- },
- legacyConfig: getConfig({}),
- })
- ).toEqual(['foo.dolly']);
- });
-
- it('handles array values', async () => {
- expect(
- await getUnusedConfigKeys({
- coreHandledConfigPaths: ['core', 'array'],
- settings: {
- core: {
- prop: 'value',
- array: [1, 2, 3],
- },
- array: ['some', 'values'],
- },
- legacyConfig: getConfig({}),
- })
- ).toEqual([]);
- });
-});
diff --git a/src/core/server/legacy/config/get_unused_config_keys.ts b/src/core/server/legacy/config/get_unused_config_keys.ts
deleted file mode 100644
index a2da6dc97225ed..00000000000000
--- a/src/core/server/legacy/config/get_unused_config_keys.ts
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { difference } from 'lodash';
-import { getFlattenedObject } from '@kbn/std';
-import { hasConfigPathIntersection } from '../../config';
-import { LegacyConfig, LegacyVars } from '../types';
-
-const getFlattenedKeys = (object: object) => Object.keys(getFlattenedObject(object));
-
-export async function getUnusedConfigKeys({
- coreHandledConfigPaths,
- settings,
- legacyConfig,
-}: {
- coreHandledConfigPaths: string[];
- settings: LegacyVars;
- legacyConfig: LegacyConfig;
-}) {
- const inputKeys = getFlattenedKeys(settings);
- const appliedKeys = getFlattenedKeys(legacyConfig.get());
-
- if (inputKeys.includes('env')) {
- // env is a special case key, see https://github.com/elastic/kibana/blob/848bf17b/src/legacy/server/config/config.js#L74
- // where it is deleted from the settings before being injected into the schema via context and
- // then renamed to `env.name` https://github.com/elastic/kibana/blob/848bf17/src/legacy/server/config/schema.js#L17
- inputKeys[inputKeys.indexOf('env')] = 'env.name';
- }
-
- // Filter out keys that are marked as used in the core (e.g. by new core plugins).
- return difference(inputKeys, appliedKeys).filter(
- (unusedConfigKey) =>
- !coreHandledConfigPaths.some((usedInCoreConfigKey) =>
- hasConfigPathIntersection(unusedConfigKey, usedInCoreConfigKey)
- )
- );
-}
diff --git a/src/core/server/legacy/config/index.ts b/src/core/server/legacy/config/index.ts
deleted file mode 100644
index b674b1386b786b..00000000000000
--- a/src/core/server/legacy/config/index.ts
+++ /dev/null
@@ -1,9 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-export { ensureValidConfiguration } from './ensure_valid_configuration';
diff --git a/src/core/server/legacy/index.ts b/src/core/server/legacy/index.ts
index 8614265e4375d4..39ffef501a9ec0 100644
--- a/src/core/server/legacy/index.ts
+++ b/src/core/server/legacy/index.ts
@@ -6,16 +6,6 @@
* Side Public License, v 1.
*/
-/** @internal */
-export { ensureValidConfiguration } from './config';
/** @internal */
export type { ILegacyService } from './legacy_service';
export { LegacyService } from './legacy_service';
-/** @internal */
-export type {
- LegacyVars,
- LegacyConfig,
- LegacyServiceSetupDeps,
- LegacyServiceStartDeps,
- LegacyServiceSetupConfig,
-} from './types';
diff --git a/src/core/server/legacy/integration_tests/legacy_service.test.ts b/src/core/server/legacy/integration_tests/legacy_service.test.ts
deleted file mode 100644
index 715749c6ef0cb4..00000000000000
--- a/src/core/server/legacy/integration_tests/legacy_service.test.ts
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import * as kbnTestServer from '../../../test_helpers/kbn_server';
-
-describe('legacy service', () => {
- describe('http server', () => {
- let root: ReturnType;
- beforeEach(() => {
- root = kbnTestServer.createRoot({
- migrations: { skip: true },
- plugins: { initialize: false },
- });
- }, 30000);
-
- afterEach(async () => await root.shutdown());
-
- it("handles http request in Legacy platform if New platform doesn't handle it", async () => {
- const { http } = await root.setup();
- const rootUrl = '/route';
- const router = http.createRouter(rootUrl);
- router.get({ path: '/new-platform', validate: false }, (context, req, res) =>
- res.ok({ body: 'from-new-platform' })
- );
-
- await root.start();
-
- const legacyPlatformUrl = `${rootUrl}/legacy-platform`;
- const kbnServer = kbnTestServer.getKbnServer(root);
- kbnServer.server.route({
- method: 'GET',
- path: legacyPlatformUrl,
- handler: () => 'ok from legacy server',
- });
-
- await kbnTestServer.request.get(root, '/route/new-platform').expect(200, 'from-new-platform');
-
- await kbnTestServer.request.get(root, legacyPlatformUrl).expect(200, 'ok from legacy server');
- });
- it('throws error if Legacy and New platforms register handler for the same route', async () => {
- const { http } = await root.setup();
- const rootUrl = '/route';
- const router = http.createRouter(rootUrl);
- router.get({ path: '', validate: false }, (context, req, res) =>
- res.ok({ body: 'from-new-platform' })
- );
-
- await root.start();
-
- const kbnServer = kbnTestServer.getKbnServer(root);
- expect(() =>
- kbnServer.server.route({
- method: 'GET',
- path: rootUrl,
- handler: () => 'ok from legacy server',
- })
- ).toThrowErrorMatchingInlineSnapshot(`"New route /route conflicts with existing /route"`);
- });
- });
-});
diff --git a/src/core/server/legacy/legacy_service.mock.ts b/src/core/server/legacy/legacy_service.mock.ts
index 1f4c308be0107e..0d72318a630e08 100644
--- a/src/core/server/legacy/legacy_service.mock.ts
+++ b/src/core/server/legacy/legacy_service.mock.ts
@@ -8,26 +8,14 @@
import type { PublicMethodsOf } from '@kbn/utility-types';
import { LegacyService } from './legacy_service';
-import { LegacyConfig, LegacyServiceSetupDeps } from './types';
-type LegacyServiceMock = jest.Mocked & { legacyId: symbol }>;
+type LegacyServiceMock = jest.Mocked>;
const createLegacyServiceMock = (): LegacyServiceMock => ({
- legacyId: Symbol(),
- setupLegacyConfig: jest.fn(),
setup: jest.fn(),
- start: jest.fn(),
stop: jest.fn(),
});
-const createLegacyConfigMock = (): jest.Mocked => ({
- get: jest.fn(),
- has: jest.fn(),
- set: jest.fn(),
-});
-
export const legacyServiceMock = {
create: createLegacyServiceMock,
- createSetupContract: (deps: LegacyServiceSetupDeps) => createLegacyServiceMock().setup(deps),
- createLegacyConfig: createLegacyConfigMock,
};
diff --git a/src/core/server/legacy/legacy_service.test.mocks.ts b/src/core/server/legacy/legacy_service.test.mocks.ts
new file mode 100644
index 00000000000000..506f0fd6f96d3d
--- /dev/null
+++ b/src/core/server/legacy/legacy_service.test.mocks.ts
@@ -0,0 +1,18 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+export const reconfigureLoggingMock = jest.fn();
+export const setupLoggingMock = jest.fn();
+export const setupLoggingRotateMock = jest.fn();
+
+jest.doMock('@kbn/legacy-logging', () => ({
+ ...(jest.requireActual('@kbn/legacy-logging') as any),
+ reconfigureLogging: reconfigureLoggingMock,
+ setupLogging: setupLoggingMock,
+ setupLoggingRotate: setupLoggingRotateMock,
+}));
diff --git a/src/core/server/legacy/legacy_service.test.ts b/src/core/server/legacy/legacy_service.test.ts
index d0a02b9859960b..6b20bd7434baf5 100644
--- a/src/core/server/legacy/legacy_service.test.ts
+++ b/src/core/server/legacy/legacy_service.test.ts
@@ -6,35 +6,22 @@
* Side Public License, v 1.
*/
-jest.mock('../../../legacy/server/kbn_server');
-
-import { BehaviorSubject, throwError } from 'rxjs';
+import {
+ setupLoggingMock,
+ setupLoggingRotateMock,
+ reconfigureLoggingMock,
+} from './legacy_service.test.mocks';
+
+import { BehaviorSubject } from 'rxjs';
+import moment from 'moment';
import { REPO_ROOT } from '@kbn/dev-utils';
-import KbnServer from '../../../legacy/server/kbn_server';
import { Config, Env, ObjectToConfigAdapter } from '../config';
-import { DiscoveredPlugin } from '../plugins';
import { getEnvOptions, configServiceMock } from '../config/mocks';
import { loggingSystemMock } from '../logging/logging_system.mock';
-import { contextServiceMock } from '../context/context_service.mock';
import { httpServiceMock } from '../http/http_service.mock';
-import { uiSettingsServiceMock } from '../ui_settings/ui_settings_service.mock';
-import { savedObjectsServiceMock } from '../saved_objects/saved_objects_service.mock';
-import { capabilitiesServiceMock } from '../capabilities/capabilities_service.mock';
-import { httpResourcesMock } from '../http_resources/http_resources_service.mock';
-import { setupMock as renderingServiceMock } from '../rendering/__mocks__/rendering_service';
-import { environmentServiceMock } from '../environment/environment_service.mock';
-import { LegacyServiceSetupDeps, LegacyServiceStartDeps } from './types';
-import { LegacyService } from './legacy_service';
-import { coreMock } from '../mocks';
-import { statusServiceMock } from '../status/status_service.mock';
-import { loggingServiceMock } from '../logging/logging_service.mock';
-import { metricsServiceMock } from '../metrics/metrics_service.mock';
-import { i18nServiceMock } from '../i18n/i18n_service.mock';
-import { deprecationsServiceMock } from '../deprecations/deprecations_service.mock';
-
-const MockKbnServer: jest.Mock = KbnServer as any;
+import { LegacyService, LegacyServiceSetupDeps } from './legacy_service';
let coreId: symbol;
let env: Env;
@@ -42,70 +29,16 @@ let config$: BehaviorSubject;
let setupDeps: LegacyServiceSetupDeps;
-let startDeps: LegacyServiceStartDeps;
-
const logger = loggingSystemMock.create();
let configService: ReturnType;
-let environmentSetup: ReturnType;
beforeEach(() => {
coreId = Symbol();
env = Env.createDefault(REPO_ROOT, getEnvOptions());
configService = configServiceMock.create();
- environmentSetup = environmentServiceMock.createSetupContract();
-
- MockKbnServer.prototype.ready = jest.fn().mockReturnValue(Promise.resolve());
- MockKbnServer.prototype.listen = jest.fn();
setupDeps = {
- core: {
- capabilities: capabilitiesServiceMock.createSetupContract(),
- context: contextServiceMock.createSetupContract(),
- elasticsearch: { legacy: {} } as any,
- i18n: i18nServiceMock.createSetupContract(),
- uiSettings: uiSettingsServiceMock.createSetupContract(),
- http: {
- ...httpServiceMock.createInternalSetupContract(),
- auth: {
- getAuthHeaders: () => undefined,
- } as any,
- },
- httpResources: httpResourcesMock.createSetupContract(),
- savedObjects: savedObjectsServiceMock.createInternalSetupContract(),
- plugins: {
- initialized: true,
- contracts: new Map([['plugin-id', 'plugin-value']]),
- },
- rendering: renderingServiceMock,
- environment: environmentSetup,
- status: statusServiceMock.createInternalSetupContract(),
- logging: loggingServiceMock.createInternalSetupContract(),
- metrics: metricsServiceMock.createInternalSetupContract(),
- deprecations: deprecationsServiceMock.createInternalSetupContract(),
- },
- plugins: { 'plugin-id': 'plugin-value' },
- uiPlugins: {
- public: new Map([['plugin-id', {} as DiscoveredPlugin]]),
- internal: new Map([
- [
- 'plugin-id',
- {
- requiredBundles: [],
- publicTargetDir: 'path/to/target/public',
- publicAssetsDir: '/plugins/name/assets/',
- },
- ],
- ]),
- browserConfigs: new Map(),
- },
- };
-
- startDeps = {
- core: {
- ...coreMock.createInternalStart(),
- plugins: { contracts: new Map() },
- },
- plugins: {},
+ http: httpServiceMock.createInternalSetupContract(),
};
config$ = new BehaviorSubject(
@@ -116,98 +49,78 @@ beforeEach(() => {
);
configService.getConfig$.mockReturnValue(config$);
- configService.getUsedPaths.mockResolvedValue(['foo.bar']);
});
afterEach(() => {
jest.clearAllMocks();
+ setupLoggingMock.mockReset();
+ setupLoggingRotateMock.mockReset();
+ reconfigureLoggingMock.mockReset();
});
-describe('once LegacyService is set up with connection info', () => {
- test('creates legacy kbnServer and calls `listen`.', async () => {
- configService.atPath.mockReturnValue(new BehaviorSubject({ autoListen: true }));
- const legacyService = new LegacyService({
- coreId,
- env,
- logger,
- configService,
+describe('#setup', () => {
+ it('initializes legacy logging', async () => {
+ const opsConfig = {
+ interval: moment.duration(5, 'second'),
+ };
+ const opsConfig$ = new BehaviorSubject(opsConfig);
+
+ const loggingConfig = {
+ foo: 'bar',
+ };
+ const loggingConfig$ = new BehaviorSubject(loggingConfig);
+
+ configService.atPath.mockImplementation((path) => {
+ if (path === 'ops') {
+ return opsConfig$;
+ }
+ if (path === 'logging') {
+ return loggingConfig$;
+ }
+ return new BehaviorSubject({});
});
- await legacyService.setupLegacyConfig();
- await legacyService.setup(setupDeps);
- await legacyService.start(startDeps);
-
- expect(MockKbnServer).toHaveBeenCalledTimes(1);
- expect(MockKbnServer).toHaveBeenCalledWith(
- { path: { autoListen: true }, server: { autoListen: true } }, // Because of the mock, path also gets the value
- expect.objectContaining({ get: expect.any(Function) }),
- expect.any(Object)
- );
- expect(MockKbnServer.mock.calls[0][1].get()).toEqual(
- expect.objectContaining({
- path: expect.objectContaining({ autoListen: true }),
- server: expect.objectContaining({ autoListen: true }),
- })
- );
-
- const [mockKbnServer] = MockKbnServer.mock.instances;
- expect(mockKbnServer.listen).toHaveBeenCalledTimes(1);
- expect(mockKbnServer.close).not.toHaveBeenCalled();
- });
-
- test('creates legacy kbnServer but does not call `listen` if `autoListen: false`.', async () => {
- configService.atPath.mockReturnValue(new BehaviorSubject({ autoListen: false }));
-
const legacyService = new LegacyService({
coreId,
env,
logger,
configService: configService as any,
});
- await legacyService.setupLegacyConfig();
+
await legacyService.setup(setupDeps);
- await legacyService.start(startDeps);
- expect(MockKbnServer).toHaveBeenCalledTimes(1);
- expect(MockKbnServer).toHaveBeenCalledWith(
- { path: { autoListen: false }, server: { autoListen: true } },
- expect.objectContaining({ get: expect.any(Function) }),
- expect.any(Object)
+ expect(setupLoggingMock).toHaveBeenCalledTimes(1);
+ expect(setupLoggingMock).toHaveBeenCalledWith(
+ setupDeps.http.server,
+ loggingConfig,
+ opsConfig.interval.asMilliseconds()
);
- const legacyConfig = MockKbnServer.mock.calls[0][1].get();
- expect(legacyConfig.path.autoListen).toBe(false);
- expect(legacyConfig.server.autoListen).toBe(true);
-
- const [mockKbnServer] = MockKbnServer.mock.instances;
- expect(mockKbnServer.ready).toHaveBeenCalledTimes(1);
- expect(mockKbnServer.listen).not.toHaveBeenCalled();
- expect(mockKbnServer.close).not.toHaveBeenCalled();
+ expect(setupLoggingRotateMock).toHaveBeenCalledTimes(1);
+ expect(setupLoggingRotateMock).toHaveBeenCalledWith(setupDeps.http.server, loggingConfig);
});
- test('creates legacy kbnServer and closes it if `listen` fails.', async () => {
- configService.atPath.mockReturnValue(new BehaviorSubject({ autoListen: true }));
- MockKbnServer.prototype.listen.mockRejectedValue(new Error('something failed'));
- const legacyService = new LegacyService({
- coreId,
- env,
- logger,
- configService: configService as any,
+ it('reloads the logging config when the config changes', async () => {
+ const opsConfig = {
+ interval: moment.duration(5, 'second'),
+ };
+ const opsConfig$ = new BehaviorSubject(opsConfig);
+
+ const loggingConfig = {
+ foo: 'bar',
+ };
+ const loggingConfig$ = new BehaviorSubject(loggingConfig);
+
+ configService.atPath.mockImplementation((path) => {
+ if (path === 'ops') {
+ return opsConfig$;
+ }
+ if (path === 'logging') {
+ return loggingConfig$;
+ }
+ return new BehaviorSubject({});
});
- await legacyService.setupLegacyConfig();
- await legacyService.setup(setupDeps);
- await expect(legacyService.start(startDeps)).rejects.toThrowErrorMatchingInlineSnapshot(
- `"something failed"`
- );
-
- const [mockKbnServer] = MockKbnServer.mock.instances;
- expect(mockKbnServer.listen).toHaveBeenCalled();
- expect(mockKbnServer.close).toHaveBeenCalled();
- });
-
- test('throws if fails to retrieve initial config.', async () => {
- configService.getConfig$.mockReturnValue(throwError(new Error('something failed')));
const legacyService = new LegacyService({
coreId,
env,
@@ -215,150 +128,70 @@ describe('once LegacyService is set up with connection info', () => {
configService: configService as any,
});
- await expect(legacyService.setupLegacyConfig()).rejects.toThrowErrorMatchingInlineSnapshot(
- `"something failed"`
- );
- await expect(legacyService.setup(setupDeps)).rejects.toThrowErrorMatchingInlineSnapshot(
- `"Legacy config not initialized yet. Ensure LegacyService.setupLegacyConfig() is called before LegacyService.setup()"`
- );
- await expect(legacyService.start(startDeps)).rejects.toThrowErrorMatchingInlineSnapshot(
- `"Legacy service is not setup yet."`
- );
-
- expect(MockKbnServer).not.toHaveBeenCalled();
- });
-
- test('reconfigures logging configuration if new config is received.', async () => {
- const legacyService = new LegacyService({
- coreId,
- env,
- logger,
- configService: configService as any,
- });
- await legacyService.setupLegacyConfig();
await legacyService.setup(setupDeps);
- await legacyService.start(startDeps);
-
- const [mockKbnServer] = MockKbnServer.mock.instances as Array>;
- expect(mockKbnServer.applyLoggingConfiguration).not.toHaveBeenCalled();
-
- config$.next(new ObjectToConfigAdapter({ logging: { verbose: true } }));
- expect(mockKbnServer.applyLoggingConfiguration.mock.calls).toMatchSnapshot(
- `applyLoggingConfiguration params`
+ expect(reconfigureLoggingMock).toHaveBeenCalledTimes(1);
+ expect(reconfigureLoggingMock).toHaveBeenCalledWith(
+ setupDeps.http.server,
+ loggingConfig,
+ opsConfig.interval.asMilliseconds()
);
- });
- test('logs error if re-configuring fails.', async () => {
- const legacyService = new LegacyService({
- coreId,
- env,
- logger,
- configService: configService as any,
+ loggingConfig$.next({
+ foo: 'changed',
});
- await legacyService.setupLegacyConfig();
- await legacyService.setup(setupDeps);
- await legacyService.start(startDeps);
- const [mockKbnServer] = MockKbnServer.mock.instances as Array>;
- expect(mockKbnServer.applyLoggingConfiguration).not.toHaveBeenCalled();
- expect(loggingSystemMock.collect(logger).error).toEqual([]);
+ expect(reconfigureLoggingMock).toHaveBeenCalledTimes(2);
+ expect(reconfigureLoggingMock).toHaveBeenCalledWith(
+ setupDeps.http.server,
+ { foo: 'changed' },
+ opsConfig.interval.asMilliseconds()
+ );
+ });
- const configError = new Error('something went wrong');
- mockKbnServer.applyLoggingConfiguration.mockImplementation(() => {
- throw configError;
+ it('stops reloading logging config once the service is stopped', async () => {
+ const opsConfig = {
+ interval: moment.duration(5, 'second'),
+ };
+ const opsConfig$ = new BehaviorSubject(opsConfig);
+
+ const loggingConfig = {
+ foo: 'bar',
+ };
+ const loggingConfig$ = new BehaviorSubject(loggingConfig);
+
+ configService.atPath.mockImplementation((path) => {
+ if (path === 'ops') {
+ return opsConfig$;
+ }
+ if (path === 'logging') {
+ return loggingConfig$;
+ }
+ return new BehaviorSubject({});
});
- config$.next(new ObjectToConfigAdapter({ logging: { verbose: true } }));
-
- expect(loggingSystemMock.collect(logger).error).toEqual([[configError]]);
- });
-
- test('logs error if config service fails.', async () => {
const legacyService = new LegacyService({
coreId,
env,
logger,
configService: configService as any,
});
- await legacyService.setupLegacyConfig();
- await legacyService.setup(setupDeps);
- await legacyService.start(startDeps);
-
- const [mockKbnServer] = MockKbnServer.mock.instances;
- expect(mockKbnServer.applyLoggingConfiguration).not.toHaveBeenCalled();
- expect(loggingSystemMock.collect(logger).error).toEqual([]);
-
- const configError = new Error('something went wrong');
- config$.error(configError);
-
- expect(mockKbnServer.applyLoggingConfiguration).not.toHaveBeenCalled();
- expect(loggingSystemMock.collect(logger).error).toEqual([[configError]]);
- });
-});
-describe('once LegacyService is set up without connection info', () => {
- let legacyService: LegacyService;
- beforeEach(async () => {
- legacyService = new LegacyService({ coreId, env, logger, configService: configService as any });
- await legacyService.setupLegacyConfig();
await legacyService.setup(setupDeps);
- await legacyService.start(startDeps);
- });
- test('creates legacy kbnServer with `autoListen: false`.', () => {
- expect(MockKbnServer).toHaveBeenCalledTimes(1);
- expect(MockKbnServer).toHaveBeenCalledWith(
- { path: {}, server: { autoListen: true } },
- expect.objectContaining({ get: expect.any(Function) }),
- expect.any(Object)
- );
- expect(MockKbnServer.mock.calls[0][1].get()).toEqual(
- expect.objectContaining({
- server: expect.objectContaining({ autoListen: true }),
- })
+ expect(reconfigureLoggingMock).toHaveBeenCalledTimes(1);
+ expect(reconfigureLoggingMock).toHaveBeenCalledWith(
+ setupDeps.http.server,
+ loggingConfig,
+ opsConfig.interval.asMilliseconds()
);
- });
-
- test('reconfigures logging configuration if new config is received.', async () => {
- const [mockKbnServer] = MockKbnServer.mock.instances as Array>;
- expect(mockKbnServer.applyLoggingConfiguration).not.toHaveBeenCalled();
- config$.next(new ObjectToConfigAdapter({ logging: { verbose: true } }));
+ await legacyService.stop();
- expect(mockKbnServer.applyLoggingConfiguration.mock.calls).toMatchSnapshot(
- `applyLoggingConfiguration params`
- );
- });
-});
-
-describe('start', () => {
- test('Cannot start without setup phase', async () => {
- const legacyService = new LegacyService({
- coreId,
- env,
- logger,
- configService: configService as any,
+ loggingConfig$.next({
+ foo: 'changed',
});
- await expect(legacyService.start(startDeps)).rejects.toThrowErrorMatchingInlineSnapshot(
- `"Legacy service is not setup yet."`
- );
- });
-});
-test('Sets the server.uuid property on the legacy configuration', async () => {
- configService.atPath.mockReturnValue(new BehaviorSubject({ autoListen: true }));
- const legacyService = new LegacyService({
- coreId,
- env,
- logger,
- configService: configService as any,
+ expect(reconfigureLoggingMock).toHaveBeenCalledTimes(1);
});
-
- environmentSetup.instanceUuid = 'UUID_FROM_SERVICE';
-
- const { legacyConfig } = await legacyService.setupLegacyConfig();
- await legacyService.setup(setupDeps);
-
- expect(legacyConfig.get('server.uuid')).toBe('UUID_FROM_SERVICE');
});
diff --git a/src/core/server/legacy/legacy_service.ts b/src/core/server/legacy/legacy_service.ts
index 43b348a5ff4a24..1d5343ff5311d9 100644
--- a/src/core/server/legacy/legacy_service.ts
+++ b/src/core/server/legacy/legacy_service.ts
@@ -6,141 +6,61 @@
* Side Public License, v 1.
*/
-import { combineLatest, ConnectableObservable, Observable, Subscription } from 'rxjs';
-import { first, map, publishReplay, tap } from 'rxjs/operators';
+import { combineLatest, Observable, Subscription } from 'rxjs';
+import { first } from 'rxjs/operators';
+import { Server } from '@hapi/hapi';
import type { PublicMethodsOf } from '@kbn/utility-types';
-import { PathConfigType } from '@kbn/utils';
+import {
+ reconfigureLogging,
+ setupLogging,
+ setupLoggingRotate,
+ LegacyLoggingConfig,
+} from '@kbn/legacy-logging';
-import type { RequestHandlerContext } from 'src/core/server';
-// @ts-expect-error legacy config class
-import { Config as LegacyConfigClass } from '../../../legacy/server/config';
-import { CoreService } from '../../types';
-import { Config } from '../config';
import { CoreContext } from '../core_context';
-import { CspConfigType, config as cspConfig } from '../csp';
-import {
- HttpConfig,
- HttpConfigType,
- config as httpConfig,
- IRouter,
- RequestHandlerContextProvider,
-} from '../http';
+import { config as loggingConfig } from '../logging';
+import { opsConfig, OpsConfigType } from '../metrics';
import { Logger } from '../logging';
-import { LegacyServiceSetupDeps, LegacyServiceStartDeps, LegacyConfig, LegacyVars } from './types';
-import { ExternalUrlConfigType, config as externalUrlConfig } from '../external_url';
-import { CoreSetup, CoreStart } from '..';
-
-interface LegacyKbnServer {
- applyLoggingConfiguration: (settings: Readonly) => void;
- listen: () => Promise;
- ready: () => Promise;
- close: () => Promise;
-}
+import { InternalHttpServiceSetup } from '../http';
-function getLegacyRawConfig(config: Config, pathConfig: PathConfigType) {
- const rawConfig = config.toRaw();
-
- // Elasticsearch config is solely handled by the core and legacy platform
- // shouldn't have direct access to it.
- if (rawConfig.elasticsearch !== undefined) {
- delete rawConfig.elasticsearch;
- }
-
- return {
- ...rawConfig,
- // We rely heavily in the default value of 'path.data' in the legacy world and,
- // since it has been moved to NP, it won't show up in RawConfig.
- path: pathConfig,
- };
+export interface LegacyServiceSetupDeps {
+ http: InternalHttpServiceSetup;
}
/** @internal */
export type ILegacyService = PublicMethodsOf;
/** @internal */
-export class LegacyService implements CoreService {
- /** Symbol to represent the legacy platform as a fake "plugin". Used by the ContextService */
- public readonly legacyId = Symbol();
+export class LegacyService {
private readonly log: Logger;
- private readonly httpConfig$: Observable;
- private kbnServer?: LegacyKbnServer;
+ private readonly opsConfig$: Observable;
+ private readonly legacyLoggingConfig$: Observable;
private configSubscription?: Subscription;
- private setupDeps?: LegacyServiceSetupDeps;
- private update$?: ConnectableObservable<[Config, PathConfigType]>;
- private legacyRawConfig?: LegacyConfig;
- private settings?: LegacyVars;
- constructor(private readonly coreContext: CoreContext) {
+ constructor(coreContext: CoreContext) {
const { logger, configService } = coreContext;
this.log = logger.get('legacy-service');
- this.httpConfig$ = combineLatest(
- configService.atPath(httpConfig.path),
- configService.atPath(cspConfig.path),
- configService.atPath(externalUrlConfig.path)
- ).pipe(map(([http, csp, externalUrl]) => new HttpConfig(http, csp, externalUrl)));
- }
-
- public async setupLegacyConfig() {
- this.update$ = combineLatest([
- this.coreContext.configService.getConfig$(),
- this.coreContext.configService.atPath('path'),
- ]).pipe(
- tap(([config, pathConfig]) => {
- if (this.kbnServer !== undefined) {
- this.kbnServer.applyLoggingConfiguration(getLegacyRawConfig(config, pathConfig));
- }
- }),
- tap({ error: (err) => this.log.error(err) }),
- publishReplay(1)
- ) as ConnectableObservable<[Config, PathConfigType]>;
-
- this.configSubscription = this.update$.connect();
-
- this.settings = await this.update$
- .pipe(
- first(),
- map(([config, pathConfig]) => getLegacyRawConfig(config, pathConfig))
- )
- .toPromise();
-
- this.legacyRawConfig = LegacyConfigClass.withDefaultSchema(this.settings);
-
- return {
- settings: this.settings,
- legacyConfig: this.legacyRawConfig!,
- };
+ this.legacyLoggingConfig$ = configService.atPath(loggingConfig.path);
+ this.opsConfig$ = configService.atPath(opsConfig.path);
}
public async setup(setupDeps: LegacyServiceSetupDeps) {
this.log.debug('setting up legacy service');
-
- if (!this.legacyRawConfig) {
- throw new Error(
- 'Legacy config not initialized yet. Ensure LegacyService.setupLegacyConfig() is called before LegacyService.setup()'
- );
- }
-
- // propagate the instance uuid to the legacy config, as it was the legacy way to access it.
- this.legacyRawConfig!.set('server.uuid', setupDeps.core.environment.instanceUuid);
-
- this.setupDeps = setupDeps;
+ await this.setupLegacyLogging(setupDeps.http.server);
}
- public async start(startDeps: LegacyServiceStartDeps) {
- const { setupDeps } = this;
-
- if (!setupDeps || !this.legacyRawConfig) {
- throw new Error('Legacy service is not setup yet.');
- }
+ private async setupLegacyLogging(server: Server) {
+ const legacyLoggingConfig = await this.legacyLoggingConfig$.pipe(first()).toPromise();
+ const currentOpsConfig = await this.opsConfig$.pipe(first()).toPromise();
- this.log.debug('starting legacy service');
+ await setupLogging(server, legacyLoggingConfig, currentOpsConfig.interval.asMilliseconds());
+ await setupLoggingRotate(server, legacyLoggingConfig);
- this.kbnServer = await this.createKbnServer(
- this.settings!,
- this.legacyRawConfig!,
- setupDeps,
- startDeps
+ this.configSubscription = combineLatest([this.legacyLoggingConfig$, this.opsConfig$]).subscribe(
+ ([newLoggingConfig, newOpsConfig]) => {
+ reconfigureLogging(server, newLoggingConfig, newOpsConfig.interval.asMilliseconds());
+ }
);
}
@@ -151,156 +71,5 @@ export class LegacyService implements CoreService {
this.configSubscription.unsubscribe();
this.configSubscription = undefined;
}
-
- if (this.kbnServer !== undefined) {
- await this.kbnServer.close();
- this.kbnServer = undefined;
- }
- }
-
- private async createKbnServer(
- settings: LegacyVars,
- config: LegacyConfig,
- setupDeps: LegacyServiceSetupDeps,
- startDeps: LegacyServiceStartDeps
- ) {
- const coreStart: CoreStart = {
- capabilities: startDeps.core.capabilities,
- elasticsearch: startDeps.core.elasticsearch,
- http: {
- auth: startDeps.core.http.auth,
- basePath: startDeps.core.http.basePath,
- getServerInfo: startDeps.core.http.getServerInfo,
- },
- savedObjects: {
- getScopedClient: startDeps.core.savedObjects.getScopedClient,
- createScopedRepository: startDeps.core.savedObjects.createScopedRepository,
- createInternalRepository: startDeps.core.savedObjects.createInternalRepository,
- createSerializer: startDeps.core.savedObjects.createSerializer,
- createExporter: startDeps.core.savedObjects.createExporter,
- createImporter: startDeps.core.savedObjects.createImporter,
- getTypeRegistry: startDeps.core.savedObjects.getTypeRegistry,
- },
- metrics: {
- collectionInterval: startDeps.core.metrics.collectionInterval,
- getOpsMetrics$: startDeps.core.metrics.getOpsMetrics$,
- },
- uiSettings: { asScopedToClient: startDeps.core.uiSettings.asScopedToClient },
- coreUsageData: {
- getCoreUsageData: () => {
- throw new Error('core.start.coreUsageData.getCoreUsageData is unsupported in legacy');
- },
- },
- };
-
- const router = setupDeps.core.http.createRouter('', this.legacyId);
- const coreSetup: CoreSetup = {
- capabilities: setupDeps.core.capabilities,
- context: setupDeps.core.context,
- elasticsearch: {
- legacy: setupDeps.core.elasticsearch.legacy,
- },
- http: {
- createCookieSessionStorageFactory: setupDeps.core.http.createCookieSessionStorageFactory,
- registerRouteHandlerContext: <
- Context extends RequestHandlerContext,
- ContextName extends keyof Context
- >(
- contextName: ContextName,
- provider: RequestHandlerContextProvider
- ) => setupDeps.core.http.registerRouteHandlerContext(this.legacyId, contextName, provider),
- createRouter: () =>
- router as IRouter,
- resources: setupDeps.core.httpResources.createRegistrar(router),
- registerOnPreRouting: setupDeps.core.http.registerOnPreRouting,
- registerOnPreAuth: setupDeps.core.http.registerOnPreAuth,
- registerAuth: setupDeps.core.http.registerAuth,
- registerOnPostAuth: setupDeps.core.http.registerOnPostAuth,
- registerOnPreResponse: setupDeps.core.http.registerOnPreResponse,
- basePath: setupDeps.core.http.basePath,
- auth: {
- get: setupDeps.core.http.auth.get,
- isAuthenticated: setupDeps.core.http.auth.isAuthenticated,
- },
- csp: setupDeps.core.http.csp,
- getServerInfo: setupDeps.core.http.getServerInfo,
- },
- i18n: setupDeps.core.i18n,
- logging: {
- configure: (config$) => setupDeps.core.logging.configure([], config$),
- },
- metrics: {
- collectionInterval: setupDeps.core.metrics.collectionInterval,
- getOpsMetrics$: setupDeps.core.metrics.getOpsMetrics$,
- },
- savedObjects: {
- setClientFactoryProvider: setupDeps.core.savedObjects.setClientFactoryProvider,
- addClientWrapper: setupDeps.core.savedObjects.addClientWrapper,
- registerType: setupDeps.core.savedObjects.registerType,
- },
- status: {
- isStatusPageAnonymous: setupDeps.core.status.isStatusPageAnonymous,
- core$: setupDeps.core.status.core$,
- overall$: setupDeps.core.status.overall$,
- set: () => {
- throw new Error(`core.status.set is unsupported in legacy`);
- },
- // @ts-expect-error
- get dependencies$() {
- throw new Error(`core.status.dependencies$ is unsupported in legacy`);
- },
- // @ts-expect-error
- get derivedStatus$() {
- throw new Error(`core.status.derivedStatus$ is unsupported in legacy`);
- },
- },
- uiSettings: {
- register: setupDeps.core.uiSettings.register,
- },
- deprecations: {
- registerDeprecations: () => {
- throw new Error('core.setup.deprecations.registerDeprecations is unsupported in legacy');
- },
- },
- getStartServices: () => Promise.resolve([coreStart, startDeps.plugins, {}]),
- };
-
- // eslint-disable-next-line @typescript-eslint/no-var-requires
- const KbnServer = require('../../../legacy/server/kbn_server');
- const kbnServer: LegacyKbnServer = new KbnServer(settings, config, {
- env: {
- mode: this.coreContext.env.mode,
- packageInfo: this.coreContext.env.packageInfo,
- },
- setupDeps: {
- core: coreSetup,
- plugins: setupDeps.plugins,
- },
- startDeps: {
- core: coreStart,
- plugins: startDeps.plugins,
- },
- __internals: {
- hapiServer: setupDeps.core.http.server,
- uiPlugins: setupDeps.uiPlugins,
- rendering: setupDeps.core.rendering,
- },
- logger: this.coreContext.logger,
- });
-
- const { autoListen } = await this.httpConfig$.pipe(first()).toPromise();
-
- if (autoListen) {
- try {
- await kbnServer.listen();
- } catch (err) {
- await kbnServer.close();
- throw err;
- }
- } else {
- await kbnServer.ready();
- }
-
- return kbnServer;
}
}
diff --git a/src/core/server/legacy/logging/appenders/legacy_appender.ts b/src/core/server/legacy/logging/appenders/legacy_appender.ts
index a89441a5671b55..7e02d00c7b2342 100644
--- a/src/core/server/legacy/logging/appenders/legacy_appender.ts
+++ b/src/core/server/legacy/logging/appenders/legacy_appender.ts
@@ -9,11 +9,10 @@
import { schema } from '@kbn/config-schema';
import { LegacyLoggingServer } from '@kbn/legacy-logging';
import { DisposableAppender, LogRecord } from '@kbn/logging';
-import { LegacyVars } from '../../types';
export interface LegacyAppenderConfig {
type: 'legacy-appender';
- legacyLoggingConfig?: any;
+ legacyLoggingConfig?: Record;
}
/**
@@ -23,7 +22,7 @@ export interface LegacyAppenderConfig {
export class LegacyAppender implements DisposableAppender {
public static configSchema = schema.object({
type: schema.literal('legacy-appender'),
- legacyLoggingConfig: schema.any(),
+ legacyLoggingConfig: schema.recordOf(schema.string(), schema.any()),
});
/**
@@ -34,7 +33,7 @@ export class LegacyAppender implements DisposableAppender {
private readonly loggingServer: LegacyLoggingServer;
- constructor(legacyLoggingConfig: Readonly) {
+ constructor(legacyLoggingConfig: any) {
this.loggingServer = new LegacyLoggingServer(legacyLoggingConfig);
}
diff --git a/src/core/server/legacy/merge_vars.test.ts b/src/core/server/legacy/merge_vars.test.ts
deleted file mode 100644
index e4268a52aa8ca7..00000000000000
--- a/src/core/server/legacy/merge_vars.test.ts
+++ /dev/null
@@ -1,188 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { mergeVars } from './merge_vars';
-
-describe('mergeVars', () => {
- it('merges two objects together', () => {
- const first = {
- otherName: 'value',
- otherCanFoo: true,
- otherNested: {
- otherAnotherVariable: 'ok',
- },
- };
- const second = {
- name: 'value',
- canFoo: true,
- nested: {
- anotherVariable: 'ok',
- },
- };
-
- expect(mergeVars(first, second)).toEqual({
- name: 'value',
- canFoo: true,
- nested: {
- anotherVariable: 'ok',
- },
- otherName: 'value',
- otherCanFoo: true,
- otherNested: {
- otherAnotherVariable: 'ok',
- },
- });
- });
-
- it('does not mutate the source objects', () => {
- const first = {
- var1: 'first',
- };
- const second = {
- var1: 'second',
- var2: 'second',
- };
- const third = {
- var1: 'third',
- var2: 'third',
- var3: 'third',
- };
- const fourth = {
- var1: 'fourth',
- var2: 'fourth',
- var3: 'fourth',
- var4: 'fourth',
- };
-
- mergeVars(first, second, third, fourth);
-
- expect(first).toEqual({ var1: 'first' });
- expect(second).toEqual({ var1: 'second', var2: 'second' });
- expect(third).toEqual({ var1: 'third', var2: 'third', var3: 'third' });
- expect(fourth).toEqual({ var1: 'fourth', var2: 'fourth', var3: 'fourth', var4: 'fourth' });
- });
-
- it('merges multiple objects together with precedence increasing from left-to-right', () => {
- const first = {
- var1: 'first',
- var2: 'first',
- var3: 'first',
- var4: 'first',
- };
- const second = {
- var1: 'second',
- var2: 'second',
- var3: 'second',
- };
- const third = {
- var1: 'third',
- var2: 'third',
- };
- const fourth = {
- var1: 'fourth',
- };
-
- expect(mergeVars(first, second, third, fourth)).toEqual({
- var1: 'fourth',
- var2: 'third',
- var3: 'second',
- var4: 'first',
- });
- });
-
- it('overwrites the original variable value if a duplicate entry is found', () => {
- const first = {
- nested: {
- otherAnotherVariable: 'ok',
- },
- };
- const second = {
- name: 'value',
- canFoo: true,
- nested: {
- anotherVariable: 'ok',
- },
- };
-
- expect(mergeVars(first, second)).toEqual({
- name: 'value',
- canFoo: true,
- nested: {
- anotherVariable: 'ok',
- },
- });
- });
-
- it('combines entries within "uiCapabilities"', () => {
- const first = {
- uiCapabilities: {
- firstCapability: 'ok',
- sharedCapability: 'shared',
- },
- };
- const second = {
- name: 'value',
- canFoo: true,
- uiCapabilities: {
- secondCapability: 'ok',
- },
- };
- const third = {
- name: 'value',
- canFoo: true,
- uiCapabilities: {
- thirdCapability: 'ok',
- sharedCapability: 'blocked',
- },
- };
-
- expect(mergeVars(first, second, third)).toEqual({
- name: 'value',
- canFoo: true,
- uiCapabilities: {
- firstCapability: 'ok',
- secondCapability: 'ok',
- thirdCapability: 'ok',
- sharedCapability: 'blocked',
- },
- });
- });
-
- it('does not deeply combine entries within "uiCapabilities"', () => {
- const first = {
- uiCapabilities: {
- firstCapability: 'ok',
- nestedCapability: {
- otherNestedProp: 'otherNestedValue',
- },
- },
- };
- const second = {
- name: 'value',
- canFoo: true,
- uiCapabilities: {
- secondCapability: 'ok',
- nestedCapability: {
- nestedProp: 'nestedValue',
- },
- },
- };
-
- expect(mergeVars(first, second)).toEqual({
- name: 'value',
- canFoo: true,
- uiCapabilities: {
- firstCapability: 'ok',
- secondCapability: 'ok',
- nestedCapability: {
- nestedProp: 'nestedValue',
- },
- },
- });
- });
-});
diff --git a/src/core/server/legacy/merge_vars.ts b/src/core/server/legacy/merge_vars.ts
deleted file mode 100644
index cd2cbb0d8cde2e..00000000000000
--- a/src/core/server/legacy/merge_vars.ts
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { LegacyVars } from './types';
-
-const ELIGIBLE_FLAT_MERGE_KEYS = ['uiCapabilities'];
-
-export function mergeVars(...sources: LegacyVars[]): LegacyVars {
- return Object.assign(
- {},
- ...sources,
- ...ELIGIBLE_FLAT_MERGE_KEYS.flatMap((key) =>
- sources.some((source) => key in source)
- ? [{ [key]: Object.assign({}, ...sources.map((source) => source[key] || {})) }]
- : []
- )
- );
-}
diff --git a/src/core/server/legacy/types.ts b/src/core/server/legacy/types.ts
deleted file mode 100644
index 9f562d3da30292..00000000000000
--- a/src/core/server/legacy/types.ts
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { InternalCoreSetup, InternalCoreStart } from '../internal_types';
-import { PluginsServiceSetup, PluginsServiceStart, UiPlugins } from '../plugins';
-import { InternalRenderingServiceSetup } from '../rendering';
-
-/**
- * @internal
- * @deprecated
- */
-export type LegacyVars = Record;
-
-type LegacyCoreSetup = InternalCoreSetup & {
- plugins: PluginsServiceSetup;
- rendering: InternalRenderingServiceSetup;
-};
-type LegacyCoreStart = InternalCoreStart & { plugins: PluginsServiceStart };
-
-/**
- * New platform representation of the legacy configuration (KibanaConfig)
- *
- * @internal
- * @deprecated
- */
-export interface LegacyConfig {
- get(key?: string): T;
- has(key: string): boolean;
- set(key: string, value: any): void;
- set(config: LegacyVars): void;
-}
-
-/**
- * @public
- * @deprecated
- */
-export interface LegacyServiceSetupDeps {
- core: LegacyCoreSetup;
- plugins: Record;
- uiPlugins: UiPlugins;
-}
-
-/**
- * @public
- * @deprecated
- */
-export interface LegacyServiceStartDeps {
- core: LegacyCoreStart;
- plugins: Record;
-}
-
-/**
- * @internal
- * @deprecated
- */
-export interface LegacyServiceSetupConfig {
- legacyConfig: LegacyConfig;
- settings: LegacyVars;
-}
diff --git a/src/core/server/logging/__snapshots__/logging_config.test.ts.snap b/src/core/server/logging/__snapshots__/logging_config.test.ts.snap
deleted file mode 100644
index fe1407563a6351..00000000000000
--- a/src/core/server/logging/__snapshots__/logging_config.test.ts.snap
+++ /dev/null
@@ -1,20 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`\`schema\` creates correct schema with defaults. 1`] = `
-Object {
- "appenders": Map {},
- "loggers": Array [],
- "root": Object {
- "appenders": Array [
- "default",
- ],
- "level": "info",
- },
-}
-`;
-
-exports[`\`schema\` throws if \`root\` logger does not have "default" appender configured. 1`] = `"[root]: \\"default\\" appender required for migration period till the next major release"`;
-
-exports[`\`schema\` throws if \`root\` logger does not have appenders configured. 1`] = `"[root.appenders]: array size is [0], but cannot be smaller than [1]"`;
-
-exports[`fails if loggers use unknown appenders. 1`] = `"Logger \\"some.nested.context\\" contains unsupported appender key \\"unknown\\"."`;
diff --git a/src/core/server/logging/logging_config.test.ts b/src/core/server/logging/logging_config.test.ts
index 83f3c139e371af..e0004ba992c176 100644
--- a/src/core/server/logging/logging_config.test.ts
+++ b/src/core/server/logging/logging_config.test.ts
@@ -9,7 +9,35 @@
import { LoggingConfig, config } from './logging_config';
test('`schema` creates correct schema with defaults.', () => {
- expect(config.schema.validate({})).toMatchSnapshot();
+ expect(config.schema.validate({})).toMatchInlineSnapshot(
+ { json: expect.any(Boolean) }, // default value depends on TTY
+ `
+ Object {
+ "appenders": Map {},
+ "dest": "stdout",
+ "events": Object {},
+ "filter": Object {},
+ "json": Any,
+ "loggers": Array [],
+ "quiet": false,
+ "root": Object {
+ "appenders": Array [
+ "default",
+ ],
+ "level": "info",
+ },
+ "rotate": Object {
+ "enabled": false,
+ "everyBytes": 10485760,
+ "keepFiles": 7,
+ "pollingInterval": 10000,
+ "usePolling": false,
+ },
+ "silent": false,
+ "verbose": false,
+ }
+ `
+ );
});
test('`schema` throws if `root` logger does not have appenders configured.', () => {
@@ -19,7 +47,9 @@ test('`schema` throws if `root` logger does not have appenders configured.', ()
appenders: [],
},
})
- ).toThrowErrorMatchingSnapshot();
+ ).toThrowErrorMatchingInlineSnapshot(
+ `"[root.appenders]: array size is [0], but cannot be smaller than [1]"`
+ );
});
test('`schema` throws if `root` logger does not have "default" appender configured.', () => {
@@ -29,7 +59,9 @@ test('`schema` throws if `root` logger does not have "default" appender configur
appenders: ['console'],
},
})
- ).toThrowErrorMatchingSnapshot();
+ ).toThrowErrorMatchingInlineSnapshot(
+ `"[root]: \\"default\\" appender required for migration period till the next major release"`
+ );
});
test('`getParentLoggerContext()` returns correct parent context name.', () => {
@@ -157,7 +189,9 @@ test('fails if loggers use unknown appenders.', () => {
],
});
- expect(() => new LoggingConfig(validateConfig)).toThrowErrorMatchingSnapshot();
+ expect(() => new LoggingConfig(validateConfig)).toThrowErrorMatchingInlineSnapshot(
+ `"Logger \\"some.nested.context\\" contains unsupported appender key \\"unknown\\"."`
+ );
});
describe('extend', () => {
diff --git a/src/core/server/logging/logging_config.ts b/src/core/server/logging/logging_config.ts
index 24496289fb4c84..f5b75d7bb739ca 100644
--- a/src/core/server/logging/logging_config.ts
+++ b/src/core/server/logging/logging_config.ts
@@ -7,6 +7,7 @@
*/
import { schema, TypeOf } from '@kbn/config-schema';
+import { legacyLoggingConfigSchema } from '@kbn/legacy-logging';
import { AppenderConfigType, Appenders } from './appenders/appenders';
// We need this helper for the types to be correct
@@ -59,7 +60,7 @@ export const loggerSchema = schema.object({
export type LoggerConfigType = TypeOf;
export const config = {
path: 'logging',
- schema: schema.object({
+ schema: legacyLoggingConfigSchema.extends({
appenders: schema.mapOf(schema.string(), Appenders.configSchema, {
defaultValue: new Map(),
}),
@@ -85,7 +86,7 @@ export const config = {
}),
};
-export type LoggingConfigType = Omit, 'appenders'> & {
+export type LoggingConfigType = Pick, 'loggers' | 'root'> & {
appenders: Map;
};
@@ -105,6 +106,7 @@ export const loggerContextConfigSchema = schema.object({
/** @public */
export type LoggerContextConfigType = TypeOf;
+
/** @public */
export interface LoggerContextConfigInput {
// config-schema knows how to handle either Maps or Records
diff --git a/src/core/server/logging/logging_system.test.ts b/src/core/server/logging/logging_system.test.ts
index 8a6fe71bc62220..b67be384732cb0 100644
--- a/src/core/server/logging/logging_system.test.ts
+++ b/src/core/server/logging/logging_system.test.ts
@@ -16,6 +16,7 @@ jest.mock('fs', () => ({
const dynamicProps = { process: { pid: expect.any(Number) } };
jest.mock('@kbn/legacy-logging', () => ({
+ ...(jest.requireActual('@kbn/legacy-logging') as any),
setupLoggingRotate: jest.fn().mockImplementation(() => Promise.resolve({})),
}));
diff --git a/src/core/server/metrics/index.ts b/src/core/server/metrics/index.ts
index 3e358edf3a01ee..0631bb2b358019 100644
--- a/src/core/server/metrics/index.ts
+++ b/src/core/server/metrics/index.ts
@@ -16,3 +16,4 @@ export type {
export type { OpsProcessMetrics, OpsServerMetrics, OpsOsMetrics } from './collectors';
export { MetricsService } from './metrics_service';
export { opsConfig } from './ops_config';
+export type { OpsConfigType } from './ops_config';
diff --git a/src/core/server/plugins/legacy_config.test.ts b/src/core/server/plugins/legacy_config.test.ts
index 5687c2dd551d22..0ea26f2e0333e0 100644
--- a/src/core/server/plugins/legacy_config.test.ts
+++ b/src/core/server/plugins/legacy_config.test.ts
@@ -13,7 +13,7 @@ import { getGlobalConfig, getGlobalConfig$ } from './legacy_config';
import { REPO_ROOT } from '@kbn/utils';
import { loggingSystemMock } from '../logging/logging_system.mock';
import { duration } from 'moment';
-import { fromRoot } from '../utils';
+import { fromRoot } from '@kbn/utils';
import { ByteSizeValue } from '@kbn/config-schema';
import { Server } from '../server';
diff --git a/src/core/server/plugins/plugin_context.test.ts b/src/core/server/plugins/plugin_context.test.ts
index b10bc47cb825b6..e37d985d423212 100644
--- a/src/core/server/plugins/plugin_context.test.ts
+++ b/src/core/server/plugins/plugin_context.test.ts
@@ -9,6 +9,7 @@
import { duration } from 'moment';
import { first } from 'rxjs/operators';
import { REPO_ROOT } from '@kbn/dev-utils';
+import { fromRoot } from '@kbn/utils';
import { createPluginInitializerContext, InstanceInfo } from './plugin_context';
import { CoreContext } from '../core_context';
import { Env } from '../config';
@@ -16,7 +17,6 @@ import { loggingSystemMock } from '../logging/logging_system.mock';
import { rawConfigServiceMock, getEnvOptions } from '../config/mocks';
import { PluginManifest } from './types';
import { Server } from '../server';
-import { fromRoot } from '../utils';
import { schema, ByteSizeValue } from '@kbn/config-schema';
import { ConfigService } from '@kbn/config';
diff --git a/src/core/server/plugins/plugins_config.ts b/src/core/server/plugins/plugins_config.ts
index d565513ebb35b7..45d80445f376e5 100644
--- a/src/core/server/plugins/plugins_config.ts
+++ b/src/core/server/plugins/plugins_config.ts
@@ -7,20 +7,24 @@
*/
import { schema, TypeOf } from '@kbn/config-schema';
+import { ServiceConfigDescriptor } from '../internal_types';
import { Env } from '../config';
-export type PluginsConfigType = TypeOf;
+const configSchema = schema.object({
+ initialize: schema.boolean({ defaultValue: true }),
-export const config = {
+ /**
+ * Defines an array of directories where another plugin should be loaded from.
+ */
+ paths: schema.arrayOf(schema.string(), { defaultValue: [] }),
+});
+
+export type PluginsConfigType = TypeOf;
+
+export const config: ServiceConfigDescriptor = {
path: 'plugins',
- schema: schema.object({
- initialize: schema.boolean({ defaultValue: true }),
-
- /**
- * Defines an array of directories where another plugin should be loaded from.
- */
- paths: schema.arrayOf(schema.string(), { defaultValue: [] }),
- }),
+ schema: configSchema,
+ deprecations: ({ unusedFromRoot }) => [unusedFromRoot('plugins.scanDirs')],
};
/** @internal */
diff --git a/src/core/server/plugins/plugins_service.test.ts b/src/core/server/plugins/plugins_service.test.ts
index 2d54648d229502..6bf7a1fadb4d3c 100644
--- a/src/core/server/plugins/plugins_service.test.ts
+++ b/src/core/server/plugins/plugins_service.test.ts
@@ -562,12 +562,12 @@ describe('PluginsService', () => {
plugin$: from([
createPlugin('plugin-1', {
path: 'path-1',
- version: 'some-version',
+ version: 'version-1',
configPath: 'plugin1',
}),
createPlugin('plugin-2', {
path: 'path-2',
- version: 'some-version',
+ version: 'version-2',
configPath: 'plugin2',
}),
]),
@@ -577,7 +577,7 @@ describe('PluginsService', () => {
});
describe('uiPlugins.internal', () => {
- it('includes disabled plugins', async () => {
+ it('contains internal properties for plugins', async () => {
config$.next({ plugins: { initialize: true }, plugin1: { enabled: false } });
const { uiPlugins } = await pluginsService.discover({ environment: environmentSetup });
expect(uiPlugins.internal).toMatchInlineSnapshot(`
@@ -586,15 +586,23 @@ describe('PluginsService', () => {
"publicAssetsDir": /path-1/public/assets,
"publicTargetDir": /path-1/target/public,
"requiredBundles": Array [],
+ "version": "version-1",
},
"plugin-2" => Object {
"publicAssetsDir": /path-2/public/assets,
"publicTargetDir": /path-2/target/public,
"requiredBundles": Array [],
+ "version": "version-2",
},
}
`);
});
+
+ it('includes disabled plugins', async () => {
+ config$.next({ plugins: { initialize: true }, plugin1: { enabled: false } });
+ const { uiPlugins } = await pluginsService.discover({ environment: environmentSetup });
+ expect([...uiPlugins.internal.keys()].sort()).toEqual(['plugin-1', 'plugin-2']);
+ });
});
describe('plugin initialization', () => {
diff --git a/src/core/server/plugins/plugins_service.ts b/src/core/server/plugins/plugins_service.ts
index 8b33e2cf4cc6be..09be40ecaf2a2c 100644
--- a/src/core/server/plugins/plugins_service.ts
+++ b/src/core/server/plugins/plugins_service.ts
@@ -222,6 +222,7 @@ export class PluginsService implements CoreService();
diff --git a/src/core/server/plugins/types.ts b/src/core/server/plugins/types.ts
index a6086bd6f17e8e..3a01049c5e1fe3 100644
--- a/src/core/server/plugins/types.ts
+++ b/src/core/server/plugins/types.ts
@@ -224,12 +224,15 @@ export interface DiscoveredPlugin {
*/
export interface InternalPluginInfo {
/**
- * Bundles that must be loaded for this plugoin
+ * Version of the plugin
+ */
+ readonly version: string;
+ /**
+ * Bundles that must be loaded for this plugin
*/
readonly requiredBundles: readonly string[];
/**
- * Path to the target/public directory of the plugin which should be
- * served
+ * Path to the target/public directory of the plugin which should be served
*/
readonly publicTargetDir: string;
/**
@@ -250,7 +253,9 @@ export interface Plugin<
TPluginsStart extends object = object
> {
setup(core: CoreSetup, plugins: TPluginsSetup): TSetup;
+
start(core: CoreStart, plugins: TPluginsStart): TStart;
+
stop?(): void;
}
@@ -267,7 +272,9 @@ export interface AsyncPlugin<
TPluginsStart extends object = object
> {
setup(core: CoreSetup, plugins: TPluginsSetup): TSetup | Promise;
+
start(core: CoreStart, plugins: TPluginsStart): TStart | Promise;
+
stop?(): void;
}
diff --git a/src/core/server/rendering/bootstrap/get_plugin_bundle_paths.test.ts b/src/core/server/rendering/bootstrap/get_plugin_bundle_paths.test.ts
index ea3843884df317..0abd8fd5a00576 100644
--- a/src/core/server/rendering/bootstrap/get_plugin_bundle_paths.test.ts
+++ b/src/core/server/rendering/bootstrap/get_plugin_bundle_paths.test.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { UiPlugins } from '../../plugins';
+import { InternalPluginInfo, UiPlugins } from '../../plugins';
import { getPluginsBundlePaths } from './get_plugin_bundle_paths';
const createUiPlugins = (pluginDeps: Record) => {
@@ -16,12 +16,13 @@ const createUiPlugins = (pluginDeps: Record) => {
browserConfigs: new Map(),
};
- Object.entries(pluginDeps).forEach(([pluginId, deps]) => {
+ const addPlugin = (pluginId: string, deps: string[]) => {
uiPlugins.internal.set(pluginId, {
requiredBundles: deps,
+ version: '8.0.0',
publicTargetDir: '',
publicAssetsDir: '',
- } as any);
+ } as InternalPluginInfo);
uiPlugins.public.set(pluginId, {
id: pluginId,
configPath: 'config-path',
@@ -29,6 +30,12 @@ const createUiPlugins = (pluginDeps: Record) => {
requiredPlugins: [],
requiredBundles: deps,
});
+
+ deps.forEach((dep) => addPlugin(dep, []));
+ };
+
+ Object.entries(pluginDeps).forEach(([pluginId, deps]) => {
+ addPlugin(pluginId, deps);
});
return uiPlugins;
@@ -56,13 +63,13 @@ describe('getPluginsBundlePaths', () => {
});
expect(pluginBundlePaths.get('a')).toEqual({
- bundlePath: '/regular-bundle-path/plugin/a/a.plugin.js',
- publicPath: '/regular-bundle-path/plugin/a/',
+ bundlePath: '/regular-bundle-path/plugin/a/8.0.0/a.plugin.js',
+ publicPath: '/regular-bundle-path/plugin/a/8.0.0/',
});
expect(pluginBundlePaths.get('b')).toEqual({
- bundlePath: '/regular-bundle-path/plugin/b/b.plugin.js',
- publicPath: '/regular-bundle-path/plugin/b/',
+ bundlePath: '/regular-bundle-path/plugin/b/8.0.0/b.plugin.js',
+ publicPath: '/regular-bundle-path/plugin/b/8.0.0/',
});
});
});
diff --git a/src/core/server/rendering/bootstrap/get_plugin_bundle_paths.ts b/src/core/server/rendering/bootstrap/get_plugin_bundle_paths.ts
index c8291b2720a92c..86ffdcf835f7b5 100644
--- a/src/core/server/rendering/bootstrap/get_plugin_bundle_paths.ts
+++ b/src/core/server/rendering/bootstrap/get_plugin_bundle_paths.ts
@@ -25,9 +25,15 @@ export const getPluginsBundlePaths = ({
while (pluginsToProcess.length > 0) {
const pluginId = pluginsToProcess.pop() as string;
+ const plugin = uiPlugins.internal.get(pluginId);
+ if (!plugin) {
+ continue;
+ }
+ const { version } = plugin;
+
pluginBundlePaths.set(pluginId, {
- publicPath: `${regularBundlePath}/plugin/${pluginId}/`,
- bundlePath: `${regularBundlePath}/plugin/${pluginId}/${pluginId}.plugin.js`,
+ publicPath: `${regularBundlePath}/plugin/${pluginId}/${version}/`,
+ bundlePath: `${regularBundlePath}/plugin/${pluginId}/${version}/${pluginId}.plugin.js`,
});
const pluginBundleIds = uiPlugins.internal.get(pluginId)?.requiredBundles ?? [];
diff --git a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.mock.ts b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.mock.ts
index 61de31e825d33b..530203e659086f 100644
--- a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.mock.ts
+++ b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.mock.ts
@@ -35,13 +35,14 @@ const createMigrator = (
) => {
const mockMigrator: jest.Mocked = {
kibanaVersion: '8.0.0-testing',
- savedObjectsConfig: {
+ soMigrationsConfig: {
batchSize: 100,
scrollDuration: '15m',
pollInterval: 1500,
skip: false,
- // TODO migrationsV2: remove/deprecate once we release migrations v2
+ // TODO migrationsV2: remove/deprecate once we remove migrations v1
enableV2: false,
+ retryAttempts: 10,
},
runMigrations: jest.fn(),
getActiveMappings: jest.fn(),
diff --git a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts
index 7ead37699980a5..40d18c3b5063a6 100644
--- a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts
+++ b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts
@@ -414,12 +414,13 @@ const mockOptions = ({ enableV2 }: { enableV2: boolean } = { enableV2: false })
enabled: true,
index: '.my-index',
} as KibanaMigratorOptions['kibanaConfig'],
- savedObjectsConfig: {
+ soMigrationsConfig: {
batchSize: 20,
pollInterval: 20000,
scrollDuration: '10m',
skip: false,
enableV2,
+ retryAttempts: 20,
},
client: elasticsearchClientMock.createElasticsearchClient(),
};
diff --git a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.ts b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.ts
index e5c64914e4c96d..29852f8ac64452 100644
--- a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.ts
+++ b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.ts
@@ -41,7 +41,7 @@ import { MigrationLogger } from '../core/migration_logger';
export interface KibanaMigratorOptions {
client: ElasticsearchClient;
typeRegistry: ISavedObjectTypeRegistry;
- savedObjectsConfig: SavedObjectsMigrationConfigType;
+ soMigrationsConfig: SavedObjectsMigrationConfigType;
kibanaConfig: KibanaConfigType;
kibanaVersion: string;
logger: Logger;
@@ -72,10 +72,10 @@ export class KibanaMigrator {
});
private readonly activeMappings: IndexMapping;
private migrationsRetryDelay?: number;
- // TODO migrationsV2: make private once we release migrations v2
- public kibanaVersion: string;
- // TODO migrationsV2: make private once we release migrations v2
- public readonly savedObjectsConfig: SavedObjectsMigrationConfigType;
+ // TODO migrationsV2: make private once we remove migrations v1
+ public readonly kibanaVersion: string;
+ // TODO migrationsV2: make private once we remove migrations v1
+ public readonly soMigrationsConfig: SavedObjectsMigrationConfigType;
/**
* Creates an instance of KibanaMigrator.
@@ -84,14 +84,14 @@ export class KibanaMigrator {
client,
typeRegistry,
kibanaConfig,
- savedObjectsConfig,
+ soMigrationsConfig,
kibanaVersion,
logger,
migrationsRetryDelay,
}: KibanaMigratorOptions) {
this.client = client;
this.kibanaConfig = kibanaConfig;
- this.savedObjectsConfig = savedObjectsConfig;
+ this.soMigrationsConfig = soMigrationsConfig;
this.typeRegistry = typeRegistry;
this.serializer = new SavedObjectsSerializer(this.typeRegistry);
this.mappingProperties = mergeTypes(this.typeRegistry.getAllTypes());
@@ -175,7 +175,7 @@ export class KibanaMigrator {
const migrators = Object.keys(indexMap).map((index) => {
// TODO migrationsV2: remove old migrations algorithm
- if (this.savedObjectsConfig.enableV2) {
+ if (this.soMigrationsConfig.enableV2) {
return {
migrate: (): Promise => {
return runResilientMigrator({
@@ -193,20 +193,21 @@ export class KibanaMigrator {
),
migrationVersionPerType: this.documentMigrator.migrationVersion,
indexPrefix: index,
+ migrationsConfig: this.soMigrationsConfig,
});
},
};
} else {
return new IndexMigrator({
- batchSize: this.savedObjectsConfig.batchSize,
+ batchSize: this.soMigrationsConfig.batchSize,
client: createMigrationEsClient(this.client, this.log, this.migrationsRetryDelay),
documentMigrator: this.documentMigrator,
index,
kibanaVersion: this.kibanaVersion,
log: this.log,
mappingProperties: indexMap[index].typeMappings,
- pollInterval: this.savedObjectsConfig.pollInterval,
- scrollDuration: this.savedObjectsConfig.scrollDuration,
+ pollInterval: this.soMigrationsConfig.pollInterval,
+ scrollDuration: this.soMigrationsConfig.scrollDuration,
serializer: this.serializer,
// Only necessary for the migrator of the kibana index.
obsoleteIndexTemplatePattern:
diff --git a/src/core/server/saved_objects/migrationsv2/actions/index.ts b/src/core/server/saved_objects/migrationsv2/actions/index.ts
index 22dfb03815052d..52fa99b7248737 100644
--- a/src/core/server/saved_objects/migrationsv2/actions/index.ts
+++ b/src/core/server/saved_objects/migrationsv2/actions/index.ts
@@ -9,7 +9,7 @@
import * as Either from 'fp-ts/lib/Either';
import * as TaskEither from 'fp-ts/lib/TaskEither';
import * as Option from 'fp-ts/lib/Option';
-import { ElasticsearchClientError } from '@elastic/elasticsearch/lib/errors';
+import { ElasticsearchClientError, ResponseError } from '@elastic/elasticsearch/lib/errors';
import { pipe } from 'fp-ts/lib/pipeable';
import { errors as EsErrors } from '@elastic/elasticsearch';
import { flow } from 'fp-ts/lib/function';
@@ -23,12 +23,6 @@ import {
} from './catch_retryable_es_client_errors';
export type { RetryableEsClientError };
-export const isRetryableEsClientResponse = (
- res: Either.Either
-): res is Either.Left => {
- return Either.isLeft(res) && res.left.type === 'retryable_es_client_error';
-};
-
/**
* Batch size for updateByQuery, reindex & search operations. Smaller batches
* reduce the memory pressure on Elasticsearch and Kibana so are less likely
@@ -45,6 +39,27 @@ const INDEX_NUMBER_OF_SHARDS = 1;
/** Wait for all shards to be active before starting an operation */
const WAIT_FOR_ALL_SHARDS_TO_BE_ACTIVE = 'all';
+// Map of left response 'type' string -> response interface
+export interface ActionErrorTypeMap {
+ wait_for_task_completion_timeout: WaitForTaskCompletionTimeout;
+ retryable_es_client_error: RetryableEsClientError;
+ index_not_found_exception: IndexNotFound;
+ target_index_had_write_block: TargetIndexHadWriteBlock;
+ incompatible_mapping_exception: IncompatibleMappingException;
+ alias_not_found_exception: AliasNotFound;
+ remove_index_not_a_concrete_index: RemoveIndexNotAConcreteIndex;
+}
+
+/**
+ * Type guard for narrowing the type of a left
+ */
+export function isLeftTypeof(
+ res: any,
+ typeString: T
+): res is ActionErrorTypeMap[T] {
+ return res.type === typeString;
+}
+
export type FetchIndexResponse = Record<
string,
{ aliases: Record; mappings: IndexMapping; settings: unknown }
@@ -74,6 +89,10 @@ export const fetchIndices = (
.catch(catchRetryableEsClientErrors);
};
+export interface IndexNotFound {
+ type: 'index_not_found_exception';
+ index: string;
+}
/**
* Sets a write block in place for the given index. If the response includes
* `acknowledged: true` all in-progress writes have drained and no further
@@ -87,7 +106,7 @@ export const setWriteBlock = (
client: ElasticsearchClient,
index: string
): TaskEither.TaskEither<
- { type: 'index_not_found_exception' } | RetryableEsClientError,
+ IndexNotFound | RetryableEsClientError,
'set_write_block_succeeded'
> => () => {
return client.indices
@@ -112,7 +131,7 @@ export const setWriteBlock = (
.catch((e: ElasticsearchClientError) => {
if (e instanceof EsErrors.ResponseError) {
if (e.message === 'index_not_found_exception') {
- return Either.left({ type: 'index_not_found_exception' as const });
+ return Either.left({ type: 'index_not_found_exception' as const, index });
}
}
throw e;
@@ -170,10 +189,11 @@ export const removeWriteBlock = (
*/
const waitForIndexStatusYellow = (
client: ElasticsearchClient,
- index: string
+ index: string,
+ timeout: string
): TaskEither.TaskEither => () => {
return client.cluster
- .health({ index, wait_for_status: 'yellow', timeout: '30s' })
+ .health({ index, wait_for_status: 'yellow', timeout })
.then(() => {
return Either.right({});
})
@@ -189,19 +209,18 @@ export type CloneIndexResponse = AcknowledgeResponse;
* This method adds some additional logic to the ES clone index API:
* - it is idempotent, if it gets called multiple times subsequent calls will
* wait for the first clone operation to complete (up to 60s)
- * - the first call will wait up to 90s for the cluster state and all shards
+ * - the first call will wait up to 120s for the cluster state and all shards
* to be updated.
*/
export const cloneIndex = (
client: ElasticsearchClient,
source: string,
- target: string
-): TaskEither.TaskEither<
- RetryableEsClientError | { type: 'index_not_found_exception'; index: string },
- CloneIndexResponse
-> => {
+ target: string,
+ /** only used for testing */
+ timeout = DEFAULT_TIMEOUT
+): TaskEither.TaskEither => {
const cloneTask: TaskEither.TaskEither<
- RetryableEsClientError | { type: 'index_not_found_exception'; index: string },
+ RetryableEsClientError | IndexNotFound,
AcknowledgeResponse
> = () => {
return client.indices
@@ -227,7 +246,7 @@ export const cloneIndex = (
},
},
},
- timeout: DEFAULT_TIMEOUT,
+ timeout,
},
{ maxRetries: 0 /** handle retry ourselves for now */ }
)
@@ -277,7 +296,7 @@ export const cloneIndex = (
} else {
// Otherwise, wait until the target index has a 'green' status.
return pipe(
- waitForIndexStatusYellow(client, target),
+ waitForIndexStatusYellow(client, target, timeout),
TaskEither.map((value) => {
/** When the index status is 'green' we know that all shards were started */
return { acknowledged: true, shardsAcknowledged: true };
@@ -295,6 +314,38 @@ interface WaitForTaskResponse {
description?: string;
}
+/**
+ * After waiting for the specificed timeout, the task has not yet completed.
+ *
+ * When querying the tasks API we use `wait_for_completion=true` to block the
+ * request until the task completes. If after the `timeout`, the task still has
+ * not completed we return this error. This does not mean that the task itelf
+ * has reached a timeout, Elasticsearch will continue to run the task.
+ */
+export interface WaitForTaskCompletionTimeout {
+ /** After waiting for the specificed timeout, the task has not yet completed. */
+ readonly type: 'wait_for_task_completion_timeout';
+ readonly message: string;
+ readonly error?: Error;
+}
+
+const catchWaitForTaskCompletionTimeout = (
+ e: ResponseError
+): Either.Either => {
+ if (
+ e.body?.error?.type === 'timeout_exception' ||
+ e.body?.error?.type === 'receive_timeout_transport_exception'
+ ) {
+ return Either.left({
+ type: 'wait_for_task_completion_timeout' as const,
+ message: `[${e.body.error.type}] ${e.body.error.reason}`,
+ error: e,
+ });
+ } else {
+ throw e;
+ }
+};
+
/**
* Blocks for up to 60s or until a task completes.
*
@@ -304,7 +355,10 @@ const waitForTask = (
client: ElasticsearchClient,
taskId: string,
timeout: string
-): TaskEither.TaskEither => () => {
+): TaskEither.TaskEither<
+ RetryableEsClientError | WaitForTaskCompletionTimeout,
+ WaitForTaskResponse
+> => () => {
return client.tasks
.get({
task_id: taskId,
@@ -322,6 +376,7 @@ const waitForTask = (
description: body.task.description,
});
})
+ .catch(catchWaitForTaskCompletionTimeout)
.catch(catchRetryableEsClientErrors);
};
@@ -424,7 +479,15 @@ export const reindex = (
};
interface WaitForReindexTaskFailure {
- cause: { type: string; reason: string };
+ readonly cause: { type: string; reason: string };
+}
+
+export interface TargetIndexHadWriteBlock {
+ type: 'target_index_had_write_block';
+}
+
+export interface IncompatibleMappingException {
+ type: 'incompatible_mapping_exception';
}
export const waitForReindexTask = flow(
@@ -433,10 +496,11 @@ export const waitForReindexTask = flow(
(
res
): TaskEither.TaskEither<
- | { type: 'index_not_found_exception'; index: string }
- | { type: 'target_index_had_write_block' }
- | { type: 'incompatible_mapping_exception' }
- | RetryableEsClientError,
+ | IndexNotFound
+ | TargetIndexHadWriteBlock
+ | IncompatibleMappingException
+ | RetryableEsClientError
+ | WaitForTaskCompletionTimeout,
'reindex_succeeded'
> => {
const failureIsAWriteBlock = ({ cause: { type, reason } }: WaitForReindexTaskFailure) =>
@@ -507,7 +571,12 @@ export const verifyReindex = (
export const waitForPickupUpdatedMappingsTask = flow(
waitForTask,
TaskEither.chain(
- (res): TaskEither.TaskEither => {
+ (
+ res
+ ): TaskEither.TaskEither<
+ RetryableEsClientError | WaitForTaskCompletionTimeout,
+ 'pickup_updated_mappings_succeeded'
+ > => {
// We don't catch or type failures/errors because they should never
// occur in our migration algorithm and we don't have any business logic
// for dealing with it. If something happens we'll just crash and try
@@ -529,6 +598,14 @@ export const waitForPickupUpdatedMappingsTask = flow(
)
);
+export interface AliasNotFound {
+ type: 'alias_not_found_exception';
+}
+
+export interface RemoveIndexNotAConcreteIndex {
+ type: 'remove_index_not_a_concrete_index';
+}
+
export type AliasAction =
| { remove_index: { index: string } }
| { remove: { index: string; alias: string; must_exist: boolean } }
@@ -541,10 +618,7 @@ export const updateAliases = (
client: ElasticsearchClient,
aliasActions: AliasAction[]
): TaskEither.TaskEither<
- | { type: 'index_not_found_exception'; index: string }
- | { type: 'alias_not_found_exception' }
- | { type: 'remove_index_not_a_concrete_index' }
- | RetryableEsClientError,
+ IndexNotFound | AliasNotFound | RemoveIndexNotAConcreteIndex | RetryableEsClientError,
'update_aliases_succeeded'
> => () => {
return client.indices
@@ -698,11 +772,11 @@ export const createIndex = (
// If the cluster state was updated and all shards ackd we're done
return TaskEither.right('create_index_succeeded');
} else {
- // Otherwise, wait until the target index has a 'green' status.
+ // Otherwise, wait until the target index has a 'yellow' status.
return pipe(
- waitForIndexStatusYellow(client, indexName),
+ waitForIndexStatusYellow(client, indexName, DEFAULT_TIMEOUT),
TaskEither.map(() => {
- /** When the index status is 'green' we know that all shards were started */
+ /** When the index status is 'yellow' we know that all shards were started */
return 'create_index_succeeded';
})
);
diff --git a/src/core/server/saved_objects/migrationsv2/index.ts b/src/core/server/saved_objects/migrationsv2/index.ts
index 0297aefdc7abdd..6e65a2e700fd30 100644
--- a/src/core/server/saved_objects/migrationsv2/index.ts
+++ b/src/core/server/saved_objects/migrationsv2/index.ts
@@ -14,6 +14,7 @@ import { MigrationResult } from '../migrations/core';
import { next, TransformRawDocs } from './next';
import { createInitialState, model } from './model';
import { migrationStateActionMachine } from './migrations_state_action_machine';
+import { SavedObjectsMigrationConfigType } from '../saved_objects_config';
/**
* Migrates the provided indexPrefix index using a resilient algorithm that is
@@ -29,6 +30,7 @@ export async function runResilientMigrator({
transformRawDocs,
migrationVersionPerType,
indexPrefix,
+ migrationsConfig,
}: {
client: ElasticsearchClient;
kibanaVersion: string;
@@ -38,6 +40,7 @@ export async function runResilientMigrator({
transformRawDocs: TransformRawDocs;
migrationVersionPerType: SavedObjectsMigrationVersion;
indexPrefix: string;
+ migrationsConfig: SavedObjectsMigrationConfigType;
}): Promise {
const initialState = createInitialState({
kibanaVersion,
@@ -45,6 +48,7 @@ export async function runResilientMigrator({
preMigrationScript,
migrationVersionPerType,
indexPrefix,
+ migrationsConfig,
});
return migrationStateActionMachine({
initialState,
diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts
index 2c052a87d028b5..1824efa0ed8d44 100644
--- a/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts
+++ b/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts
@@ -33,6 +33,7 @@ import {
} from '../actions';
import * as Either from 'fp-ts/lib/Either';
import * as Option from 'fp-ts/lib/Option';
+import { ResponseError } from '@elastic/elasticsearch/lib/errors';
const { startES } = kbnTestServer.createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
@@ -162,6 +163,7 @@ describe('migration actions', () => {
Object {
"_tag": "Left",
"left": Object {
+ "index": "no_such_index",
"type": "index_not_found_exception",
},
}
@@ -291,6 +293,45 @@ describe('migration actions', () => {
}
`);
});
+ it('resolves left with a retryable_es_client_error if clone target already exists but takes longer than the specified timeout before turning yellow', async () => {
+ // Create a red index
+ await client.indices
+ .create({
+ index: 'clone_red_index',
+ timeout: '5s',
+ body: {
+ mappings: { properties: {} },
+ settings: {
+ // Allocate 1 replica so that this index stays yellow
+ number_of_replicas: '1',
+ // Disable all shard allocation so that the index status is red
+ 'index.routing.allocation.enable': 'none',
+ },
+ },
+ })
+ .catch((e) => {});
+
+ // Call clone even though the index already exists
+ const cloneIndexPromise = cloneIndex(
+ client,
+ 'existing_index_with_write_block',
+ 'clone_red_index',
+ '0s'
+ )();
+
+ await cloneIndexPromise.then((res) => {
+ expect(res).toMatchInlineSnapshot(`
+ Object {
+ "_tag": "Left",
+ "left": Object {
+ "error": [ResponseError: Response Error],
+ "message": "Response Error",
+ "type": "retryable_es_client_error",
+ },
+ }
+ `);
+ });
+ });
});
// Reindex doesn't return any errors on it's own, so we have to test
@@ -587,6 +628,28 @@ describe('migration actions', () => {
}
`);
});
+ it('resolves left wait_for_task_completion_timeout when the task does not finish within the timeout', async () => {
+ const res = (await reindex(
+ client,
+ 'existing_index_with_docs',
+ 'reindex_target',
+ Option.none,
+ false
+ )()) as Either.Right;
+
+ const task = waitForReindexTask(client, res.right.taskId, '0s');
+
+ await expect(task()).resolves.toMatchObject({
+ _tag: 'Left',
+ left: {
+ error: expect.any(ResponseError),
+ message: expect.stringMatching(
+ /\[timeout_exception\] Timed out waiting for completion of \[org.elasticsearch.index.reindex.BulkByScrollTask/
+ ),
+ type: 'wait_for_task_completion_timeout',
+ },
+ });
+ });
});
describe('verifyReindex', () => {
@@ -702,6 +765,25 @@ describe('migration actions', () => {
{"type":"index_not_found_exception","reason":"no such index [no_such_index]","resource.type":"index_or_alias","resource.id":"no_such_index","index_uuid":"_na_","index":"no_such_index"}]
`);
});
+ it('resolves left wait_for_task_completion_timeout when the task does not complete within the timeout', async () => {
+ const res = (await pickupUpdatedMappings(
+ client,
+ 'existing_index_with_docs'
+ )()) as Either.Right;
+
+ const task = waitForPickupUpdatedMappingsTask(client, res.right.taskId, '0s');
+
+ await expect(task()).resolves.toMatchObject({
+ _tag: 'Left',
+ left: {
+ error: expect.any(ResponseError),
+ message: expect.stringMatching(
+ /\[timeout_exception\] Timed out waiting for completion of \[org.elasticsearch.index.reindex.BulkByScrollTask/
+ ),
+ type: 'wait_for_task_completion_timeout',
+ },
+ });
+ });
it('resolves right when successful', async () => {
const res = (await pickupUpdatedMappings(
client,
diff --git a/src/core/server/saved_objects/migrationsv2/migrations_state_action_machine.test.ts b/src/core/server/saved_objects/migrationsv2/migrations_state_action_machine.test.ts
index f7b9c4c368fa0c..99c06c0a3586ba 100644
--- a/src/core/server/saved_objects/migrationsv2/migrations_state_action_machine.test.ts
+++ b/src/core/server/saved_objects/migrationsv2/migrations_state_action_machine.test.ts
@@ -27,6 +27,14 @@ describe('migrationsStateActionMachine', () => {
targetMappings: { properties: {} },
migrationVersionPerType: {},
indexPrefix: '.my-so-index',
+ migrationsConfig: {
+ batchSize: 1000,
+ pollInterval: 0,
+ scrollDuration: '0s',
+ skip: false,
+ enableV2: true,
+ retryAttempts: 5,
+ },
});
const next = jest.fn((s: State) => {
@@ -221,6 +229,7 @@ describe('migrationsStateActionMachine', () => {
"_tag": "None",
},
"reason": "the fatal reason",
+ "retryAttempts": 5,
"retryCount": 0,
"retryDelay": 0,
"targetIndexMappings": Object {
@@ -280,6 +289,7 @@ describe('migrationsStateActionMachine', () => {
"_tag": "None",
},
"reason": "the fatal reason",
+ "retryAttempts": 5,
"retryCount": 0,
"retryDelay": 0,
"targetIndexMappings": Object {
@@ -424,6 +434,7 @@ describe('migrationsStateActionMachine', () => {
"_tag": "None",
},
"reason": "the fatal reason",
+ "retryAttempts": 5,
"retryCount": 0,
"retryDelay": 0,
"targetIndexMappings": Object {
@@ -478,6 +489,7 @@ describe('migrationsStateActionMachine', () => {
"_tag": "None",
},
"reason": "the fatal reason",
+ "retryAttempts": 5,
"retryCount": 0,
"retryDelay": 0,
"targetIndexMappings": Object {
diff --git a/src/core/server/saved_objects/migrationsv2/model.test.ts b/src/core/server/saved_objects/migrationsv2/model.test.ts
index 5531f847f8bb41..2813f01093e950 100644
--- a/src/core/server/saved_objects/migrationsv2/model.test.ts
+++ b/src/core/server/saved_objects/migrationsv2/model.test.ts
@@ -35,6 +35,7 @@ import { SavedObjectsRawDoc } from '..';
import { AliasAction, RetryableEsClientError } from './actions';
import { createInitialState, model } from './model';
import { ResponseType } from './next';
+import { SavedObjectsMigrationConfigType } from '../saved_objects_config';
describe('migrations v2 model', () => {
const baseState: BaseState = {
@@ -44,6 +45,7 @@ describe('migrations v2 model', () => {
logs: [],
retryCount: 0,
retryDelay: 0,
+ retryAttempts: 15,
indexPrefix: '.kibana',
outdatedDocumentsQuery: {},
targetIndexMappings: {
@@ -160,15 +162,15 @@ describe('migrations v2 model', () => {
expect(newState.retryDelay).toEqual(0);
});
- test('terminates to FATAL after 10 retries', () => {
+ test('terminates to FATAL after retryAttempts retries', () => {
const newState = model(
- { ...state, ...{ retryCount: 10, retryDelay: 64000 } },
+ { ...state, ...{ retryCount: 15, retryDelay: 64000 } },
Either.left(retryableError)
) as FatalState;
expect(newState.controlState).toEqual('FATAL');
expect(newState.reason).toMatchInlineSnapshot(
- `"Unable to complete the INIT step after 10 attempts, terminating."`
+ `"Unable to complete the INIT step after 15 attempts, terminating."`
);
});
});
@@ -610,6 +612,7 @@ describe('migrations v2 model', () => {
test('LEGACY_SET_WRITE_BLOCK -> LEGACY_CREATE_REINDEX_TARGET if action fails with index_not_found_exception', () => {
const res: ResponseType<'LEGACY_SET_WRITE_BLOCK'> = Either.left({
type: 'index_not_found_exception',
+ index: 'legacy_index_name',
});
const newState = model(legacySetWriteBlockState, res);
expect(newState.controlState).toEqual('LEGACY_CREATE_REINDEX_TARGET');
@@ -707,6 +710,16 @@ describe('migrations v2 model', () => {
expect(newState.retryCount).toEqual(0);
expect(newState.retryDelay).toEqual(0);
});
+ test('LEGACY_REINDEX_WAIT_FOR_TASK -> LEGACY_REINDEX_WAIT_FOR_TASK if action fails with wait_for_task_completion_timeout', () => {
+ const res: ResponseType<'LEGACY_REINDEX_WAIT_FOR_TASK'> = Either.left({
+ message: '[timeout_exception] Timeout waiting for ...',
+ type: 'wait_for_task_completion_timeout',
+ });
+ const newState = model(legacyReindexWaitForTaskState, res);
+ expect(newState.controlState).toEqual('LEGACY_REINDEX_WAIT_FOR_TASK');
+ expect(newState.retryCount).toEqual(1);
+ expect(newState.retryDelay).toEqual(2000);
+ });
});
describe('LEGACY_DELETE', () => {
const legacyDeleteState: LegacyDeleteState = {
@@ -846,6 +859,16 @@ describe('migrations v2 model', () => {
expect(newState.retryCount).toEqual(0);
expect(newState.retryDelay).toEqual(0);
});
+ test('REINDEX_SOURCE_TO_TEMP_WAIT_FOR_TASK -> REINDEX_SOURCE_TO_TEMP_WAIT_FOR_TASK when response is left wait_for_task_completion_timeout', () => {
+ const res: ResponseType<'REINDEX_SOURCE_TO_TEMP_WAIT_FOR_TASK'> = Either.left({
+ message: '[timeout_exception] Timeout waiting for ...',
+ type: 'wait_for_task_completion_timeout',
+ });
+ const newState = model(state, res);
+ expect(newState.controlState).toEqual('REINDEX_SOURCE_TO_TEMP_WAIT_FOR_TASK');
+ expect(newState.retryCount).toEqual(1);
+ expect(newState.retryDelay).toEqual(2000);
+ });
});
describe('SET_TEMP_WRITE_BLOCK', () => {
const state: SetTempWriteBlock = {
@@ -1025,6 +1048,19 @@ describe('migrations v2 model', () => {
expect(newState.retryCount).toEqual(0);
expect(newState.retryDelay).toEqual(0);
});
+ test('UPDATE_TARGET_MAPPINGS_WAIT_FOR_TASK -> UPDATE_TARGET_MAPPINGS_WAIT_FOR_TASK when response is left wait_for_task_completion_timeout', () => {
+ const res: ResponseType<'UPDATE_TARGET_MAPPINGS_WAIT_FOR_TASK'> = Either.left({
+ message: '[timeout_exception] Timeout waiting for ...',
+ type: 'wait_for_task_completion_timeout',
+ });
+ const newState = model(
+ updateTargetMappingsWaitForTaskState,
+ res
+ ) as UpdateTargetMappingsWaitForTaskState;
+ expect(newState.controlState).toEqual('UPDATE_TARGET_MAPPINGS_WAIT_FOR_TASK');
+ expect(newState.retryCount).toEqual(1);
+ expect(newState.retryDelay).toEqual(2000);
+ });
});
describe('CREATE_NEW_TARGET', () => {
const aliasActions = Option.some([Symbol('alias action')] as unknown) as Option.Some<
@@ -1144,6 +1180,9 @@ describe('migrations v2 model', () => {
});
});
describe('createInitialState', () => {
+ const migrationsConfig = ({
+ retryAttempts: 15,
+ } as unknown) as SavedObjectsMigrationConfigType;
it('creates the initial state for the model based on the passed in paramaters', () => {
expect(
createInitialState({
@@ -1154,6 +1193,7 @@ describe('migrations v2 model', () => {
},
migrationVersionPerType: {},
indexPrefix: '.kibana_task_manager',
+ migrationsConfig,
})
).toMatchInlineSnapshot(`
Object {
@@ -1171,6 +1211,7 @@ describe('migrations v2 model', () => {
"preMigrationScript": Object {
"_tag": "None",
},
+ "retryAttempts": 15,
"retryCount": 0,
"retryDelay": 0,
"targetIndexMappings": Object {
@@ -1214,6 +1255,7 @@ describe('migrations v2 model', () => {
preMigrationScript,
migrationVersionPerType: {},
indexPrefix: '.kibana_task_manager',
+ migrationsConfig,
});
expect(Option.isSome(initialState.preMigrationScript)).toEqual(true);
@@ -1233,6 +1275,7 @@ describe('migrations v2 model', () => {
preMigrationScript: undefined,
migrationVersionPerType: {},
indexPrefix: '.kibana_task_manager',
+ migrationsConfig,
}).preMigrationScript
)
).toEqual(true);
@@ -1248,6 +1291,7 @@ describe('migrations v2 model', () => {
preMigrationScript: "ctx._id = ctx._source.type + ':' + ctx._id",
migrationVersionPerType: { my_dashboard: '7.10.1', my_viz: '8.0.0' },
indexPrefix: '.kibana_task_manager',
+ migrationsConfig,
}).outdatedDocumentsQuery
).toMatchInlineSnapshot(`
Object {
diff --git a/src/core/server/saved_objects/migrationsv2/model.ts b/src/core/server/saved_objects/migrationsv2/model.ts
index 2e92f34429ea9f..5bdba980267925 100644
--- a/src/core/server/saved_objects/migrationsv2/model.ts
+++ b/src/core/server/saved_objects/migrationsv2/model.ts
@@ -10,33 +10,13 @@ import { gt, valid } from 'semver';
import * as Either from 'fp-ts/lib/Either';
import * as Option from 'fp-ts/lib/Option';
import { cloneDeep } from 'lodash';
-import { AliasAction, FetchIndexResponse, RetryableEsClientError } from './actions';
+import { AliasAction, FetchIndexResponse, isLeftTypeof, RetryableEsClientError } from './actions';
import { AllActionStates, InitState, State } from './types';
import { IndexMapping } from '../mappings';
import { ResponseType } from './next';
import { SavedObjectsMigrationVersion } from '../types';
import { disableUnknownTypeMappingFields } from '../migrations/core/migration_context';
-
-/**
- * How many times to retry a failing step.
- *
- * Waiting for a task to complete will cause a failing step every time the
- * wait_for_task action times out e.g. the following sequence has 3 retry
- * attempts:
- * LEGACY_REINDEX_WAIT_FOR_TASK (60s timeout) ->
- * LEGACY_REINDEX_WAIT_FOR_TASK (2s delay, 60s timeout) ->
- * LEGACY_REINDEX_WAIT_FOR_TASK (4s delay, 60s timeout) ->
- * LEGACY_REINDEX_WAIT_FOR_TASK (success) -> ...
- *
- * This places an upper limit to how long we will wait for a task to complete.
- * The duration of a step is the time it takes for the action to complete plus
- * the exponential retry delay:
- * max_task_runtime = 2+4+8+16+32+64*(MAX_RETRY_ATTEMPTS-5) + ACTION_DURATION*MAX_RETRY_ATTEMPTS
- *
- * For MAX_RETRY_ATTEMPTS=10, ACTION_DURATION=60
- * max_task_runtime = 16.46 minutes
- */
-const MAX_RETRY_ATTEMPTS = 10;
+import { SavedObjectsMigrationConfigType } from '../saved_objects_config';
/**
* A helper function/type for ensuring that all control state's are handled.
@@ -115,12 +95,17 @@ function getAliases(indices: FetchIndexResponse) {
}, {} as Record);
}
-const delayRetryState = (state: S, left: RetryableEsClientError): S => {
- if (state.retryCount === MAX_RETRY_ATTEMPTS) {
+const delayRetryState = (
+ state: S,
+ errorMessage: string,
+ /** How many times to retry a step that fails */
+ maxRetryAttempts: number
+): S => {
+ if (state.retryCount >= maxRetryAttempts) {
return {
...state,
controlState: 'FATAL',
- reason: `Unable to complete the ${state.controlState} step after ${MAX_RETRY_ATTEMPTS} attempts, terminating.`,
+ reason: `Unable to complete the ${state.controlState} step after ${maxRetryAttempts} attempts, terminating.`,
};
} else {
const retryCount = state.retryCount + 1;
@@ -134,9 +119,7 @@ const delayRetryState = (state: S, left: RetryableEsClientError
...state.logs,
{
level: 'error',
- message: `Action failed with '${
- left.message
- }'. Retrying attempt ${retryCount} out of ${MAX_RETRY_ATTEMPTS} in ${
+ message: `Action failed with '${errorMessage}'. Retrying attempt ${retryCount} in ${
retryDelay / 1000
} seconds.`,
},
@@ -175,9 +158,12 @@ export const model = (currentState: State, resW: ResponseType):
// Handle retryable_es_client_errors. Other left values need to be handled
// by the control state specific code below.
- if (Either.isLeft(resW) && resW.left.type === 'retryable_es_client_error') {
+ if (
+ Either.isLeft(resW) &&
+ isLeftTypeof(resW.left, 'retryable_es_client_error')
+ ) {
// Retry the same step after an exponentially increasing delay.
- return delayRetryState(stateP, resW.left);
+ return delayRetryState(stateP, resW.left.message, stateP.retryAttempts);
} else {
// If the action didn't fail with a retryable_es_client_error, reset the
// retry counter and retryDelay state
@@ -333,7 +319,7 @@ export const model = (currentState: State, resW: ResponseType):
// If the write block failed because the index doesn't exist, it means
// another instance already completed the legacy pre-migration. Proceed
// to the next step.
- if (res.left.type === 'index_not_found_exception') {
+ if (isLeftTypeof(res.left, 'index_not_found_exception')) {
return { ...stateP, controlState: 'LEGACY_CREATE_REINDEX_TARGET' };
} else {
// @ts-expect-error TS doesn't correctly narrow this type to never
@@ -376,8 +362,8 @@ export const model = (currentState: State, resW: ResponseType):
} else {
const left = res.left;
if (
- (left.type === 'index_not_found_exception' && left.index === stateP.legacyIndex) ||
- left.type === 'target_index_had_write_block'
+ (isLeftTypeof(left, 'index_not_found_exception') && left.index === stateP.legacyIndex) ||
+ isLeftTypeof(left, 'target_index_had_write_block')
) {
// index_not_found_exception for the LEGACY_REINDEX source index:
// another instance already complete the LEGACY_DELETE step.
@@ -390,12 +376,23 @@ export const model = (currentState: State, resW: ResponseType):
// step. However, by not skipping ahead we limit branches in the
// control state progression and simplify the implementation.
return { ...stateP, controlState: 'LEGACY_DELETE' };
- } else {
+ } else if (isLeftTypeof(left, 'wait_for_task_completion_timeout')) {
+ // After waiting for the specificed timeout, the task has not yet
+ // completed. Retry this step to see if the task has completed after an
+ // exponential delay. We will basically keep polling forever until the
+ // Elasticeasrch task succeeds or fails.
+ return delayRetryState(stateP, left.message, Number.MAX_SAFE_INTEGER);
+ } else if (
+ isLeftTypeof(left, 'index_not_found_exception') ||
+ isLeftTypeof(left, 'incompatible_mapping_exception')
+ ) {
// We don't handle the following errors as the algorithm will never
// run into these during the LEGACY_REINDEX_WAIT_FOR_TASK step:
// - index_not_found_exception for the LEGACY_REINDEX target index
- // - strict_dynamic_mapping_exception
+ // - incompatible_mapping_exception
throwBadResponse(stateP, left as never);
+ } else {
+ throwBadResponse(stateP, left);
}
}
} else if (stateP.controlState === 'LEGACY_DELETE') {
@@ -405,8 +402,8 @@ export const model = (currentState: State, resW: ResponseType):
} else if (Either.isLeft(res)) {
const left = res.left;
if (
- left.type === 'remove_index_not_a_concrete_index' ||
- (left.type === 'index_not_found_exception' && left.index === stateP.legacyIndex)
+ isLeftTypeof(left, 'remove_index_not_a_concrete_index') ||
+ (isLeftTypeof(left, 'index_not_found_exception') && left.index === stateP.legacyIndex)
) {
// index_not_found_exception, another Kibana instance already
// deleted the legacy index
@@ -419,13 +416,18 @@ export const model = (currentState: State, resW: ResponseType):
// step. However, by not skipping ahead we limit branches in the
// control state progression and simplify the implementation.
return { ...stateP, controlState: 'SET_SOURCE_WRITE_BLOCK' };
- } else {
+ } else if (
+ isLeftTypeof(left, 'index_not_found_exception') ||
+ isLeftTypeof(left, 'alias_not_found_exception')
+ ) {
// We don't handle the following errors as the migration algorithm
// will never cause them to occur:
// - alias_not_found_exception we're not using must_exist
// - index_not_found_exception for source index into which we reindex
// the legacy index
throwBadResponse(stateP, left as never);
+ } else {
+ throwBadResponse(stateP, left);
}
} else {
throwBadResponse(stateP, res);
@@ -438,11 +440,13 @@ export const model = (currentState: State, resW: ResponseType):
...stateP,
controlState: 'CREATE_REINDEX_TEMP',
};
- } else {
+ } else if (isLeftTypeof(res.left, 'index_not_found_exception')) {
// We don't handle the following errors as the migration algorithm
// will never cause them to occur:
// - index_not_found_exception
- return throwBadResponse(stateP, res as never);
+ return throwBadResponse(stateP, res.left as never);
+ } else {
+ return throwBadResponse(stateP, res.left);
}
} else if (stateP.controlState === 'CREATE_REINDEX_TEMP') {
const res = resW as ExcludeRetryableEsError>;
@@ -477,8 +481,8 @@ export const model = (currentState: State, resW: ResponseType):
} else {
const left = res.left;
if (
- left.type === 'target_index_had_write_block' ||
- (left.type === 'index_not_found_exception' && left.index === stateP.tempIndex)
+ isLeftTypeof(left, 'target_index_had_write_block') ||
+ (isLeftTypeof(left, 'index_not_found_exception') && left.index === stateP.tempIndex)
) {
// index_not_found_exception:
// another instance completed the MARK_VERSION_INDEX_READY and
@@ -493,10 +497,25 @@ export const model = (currentState: State, resW: ResponseType):
...stateP,
controlState: 'SET_TEMP_WRITE_BLOCK',
};
- } else {
- // Don't handle incompatible_mapping_exception as we will never add a write
- // block to the temp index or change the mappings.
+ } else if (isLeftTypeof(left, 'wait_for_task_completion_timeout')) {
+ // After waiting for the specificed timeout, the task has not yet
+ // completed. Retry this step to see if the task has completed after an
+ // exponential delay. We will basically keep polling forever until the
+ // Elasticeasrch task succeeds or fails.
+ return delayRetryState(stateP, left.message, Number.MAX_SAFE_INTEGER);
+ } else if (
+ isLeftTypeof(left, 'index_not_found_exception') ||
+ isLeftTypeof(left, 'incompatible_mapping_exception')
+ ) {
+ // Don't handle the following errors as the migration algorithm should
+ // never cause them to occur:
+ // - incompatible_mapping_exception the temp index has `dynamic: false`
+ // mappings
+ // - index_not_found_exception for the source index, we will never
+ // delete the source index
throwBadResponse(stateP, left as never);
+ } else {
+ throwBadResponse(stateP, left);
}
}
} else if (stateP.controlState === 'SET_TEMP_WRITE_BLOCK') {
@@ -508,7 +527,7 @@ export const model = (currentState: State, resW: ResponseType):
};
} else {
const left = res.left;
- if (left.type === 'index_not_found_exception') {
+ if (isLeftTypeof(left, 'index_not_found_exception')) {
// index_not_found_exception:
// another instance completed the MARK_VERSION_INDEX_READY and
// removed the temp index.
@@ -520,7 +539,6 @@ export const model = (currentState: State, resW: ResponseType):
controlState: 'CLONE_TEMP_TO_TARGET',
};
} else {
- // @ts-expect-error TS doesn't correctly narrow this to never
throwBadResponse(stateP, left);
}
}
@@ -533,7 +551,7 @@ export const model = (currentState: State, resW: ResponseType):
};
} else {
const left = res.left;
- if (left.type === 'index_not_found_exception') {
+ if (isLeftTypeof(left, 'index_not_found_exception')) {
// index_not_found_exception means another instance alread completed
// the MARK_VERSION_INDEX_READY step and removed the temp index
// We still perform the OUTDATED_DOCUMENTS_* and
@@ -543,8 +561,9 @@ export const model = (currentState: State, resW: ResponseType):
...stateP,
controlState: 'OUTDATED_DOCUMENTS_SEARCH',
};
+ } else {
+ throwBadResponse(stateP, left);
}
- throwBadResponse(stateP, res as never);
}
} else if (stateP.controlState === 'OUTDATED_DOCUMENTS_SEARCH') {
const res = resW as ExcludeRetryableEsError>;
@@ -611,7 +630,16 @@ export const model = (currentState: State, resW: ResponseType):
};
}
} else {
- throwBadResponse(stateP, res);
+ const left = res.left;
+ if (isLeftTypeof(left, 'wait_for_task_completion_timeout')) {
+ // After waiting for the specificed timeout, the task has not yet
+ // completed. Retry this step to see if the task has completed after an
+ // exponential delay. We will basically keep polling forever until the
+ // Elasticeasrch task succeeds or fails.
+ return delayRetryState(stateP, res.left.message, Number.MAX_SAFE_INTEGER);
+ } else {
+ throwBadResponse(stateP, left);
+ }
}
} else if (stateP.controlState === 'CREATE_NEW_TARGET') {
const res = resW as ExcludeRetryableEsError>;
@@ -632,13 +660,13 @@ export const model = (currentState: State, resW: ResponseType):
return { ...stateP, controlState: 'DONE' };
} else {
const left = res.left;
- if (left.type === 'alias_not_found_exception') {
+ if (isLeftTypeof(left, 'alias_not_found_exception')) {
// the versionIndexReadyActions checks that the currentAlias is still
// pointing to the source index. If this fails with an
// alias_not_found_exception another instance has completed a
// migration from the same source.
return { ...stateP, controlState: 'MARK_VERSION_INDEX_READY_CONFLICT' };
- } else if (left.type === 'index_not_found_exception') {
+ } else if (isLeftTypeof(left, 'index_not_found_exception')) {
if (left.index === stateP.tempIndex) {
// another instance has already completed the migration and deleted
// the temporary index
@@ -649,7 +677,7 @@ export const model = (currentState: State, resW: ResponseType):
// index handled above.
throwBadResponse(stateP, left as never);
}
- } else if (left.type === 'remove_index_not_a_concrete_index') {
+ } else if (isLeftTypeof(left, 'remove_index_not_a_concrete_index')) {
// We don't handle this error as the migration algorithm will never
// cause it to occur (this error is only relevant to the LEGACY_DELETE
// step).
@@ -708,12 +736,14 @@ export const createInitialState = ({
preMigrationScript,
migrationVersionPerType,
indexPrefix,
+ migrationsConfig,
}: {
kibanaVersion: string;
targetMappings: IndexMapping;
preMigrationScript?: string;
migrationVersionPerType: SavedObjectsMigrationVersion;
indexPrefix: string;
+ migrationsConfig: SavedObjectsMigrationConfigType;
}): InitState => {
const outdatedDocumentsQuery = {
bool: {
@@ -753,6 +783,7 @@ export const createInitialState = ({
outdatedDocumentsQuery,
retryCount: 0,
retryDelay: 0,
+ retryAttempts: migrationsConfig.retryAttempts,
logs: [],
};
return initialState;
diff --git a/src/core/server/saved_objects/migrationsv2/state_action_machine.test.ts b/src/core/server/saved_objects/migrationsv2/state_action_machine.test.ts
index 6625c446e22825..ebbb540c9b4fdf 100644
--- a/src/core/server/saved_objects/migrationsv2/state_action_machine.test.ts
+++ b/src/core/server/saved_objects/migrationsv2/state_action_machine.test.ts
@@ -89,12 +89,4 @@ describe('state action machine', () => {
}
`);
});
-
- test("rejects if control state doesn't change after 50 steps", async () => {
- await expect(
- stateActionMachine(state, next, countUntilModel(51))
- ).rejects.toThrowErrorMatchingInlineSnapshot(
- `"Control state didn't change after 50 steps aborting."`
- );
- });
});
diff --git a/src/core/server/saved_objects/migrationsv2/state_action_machine.ts b/src/core/server/saved_objects/migrationsv2/state_action_machine.ts
index c5aa4bf7c42c6c..b011ab694e1454 100644
--- a/src/core/server/saved_objects/migrationsv2/state_action_machine.ts
+++ b/src/core/server/saved_objects/migrationsv2/state_action_machine.ts
@@ -10,8 +10,6 @@ export interface ControlState {
controlState: string;
}
-const MAX_STEPS_WITHOUT_CONTROL_STATE_CHANGE = 50;
-
/**
* A state-action machine next function that returns the next action thunk
* based on the passed in state.
@@ -65,7 +63,6 @@ export async function stateActionMachine(
model: Model
) {
let state = initialState;
- let controlStateStepCounter = 0;
let nextAction = next(state);
while (nextAction != null) {
@@ -73,15 +70,6 @@ export async function stateActionMachine(
const actionResponse = await nextAction();
const newState = model(state, actionResponse);
- controlStateStepCounter =
- newState.controlState === state.controlState ? controlStateStepCounter + 1 : 0;
- if (controlStateStepCounter >= MAX_STEPS_WITHOUT_CONTROL_STATE_CHANGE) {
- // This is just a fail-safe to ensure we don't get stuck in an infinite loop
- throw new Error(
- `Control state didn't change after ${MAX_STEPS_WITHOUT_CONTROL_STATE_CHANGE} steps aborting.`
- );
- }
-
// Get ready for the next step
state = newState;
nextAction = next(state);
diff --git a/src/core/server/saved_objects/migrationsv2/types.ts b/src/core/server/saved_objects/migrationsv2/types.ts
index b8d67d04b33345..dbdd5774dfa62d 100644
--- a/src/core/server/saved_objects/migrationsv2/types.ts
+++ b/src/core/server/saved_objects/migrationsv2/types.ts
@@ -37,6 +37,23 @@ export interface BaseState extends ControlState {
readonly outdatedDocumentsQuery: Record;
readonly retryCount: number;
readonly retryDelay: number;
+ /**
+ * How many times to retry a step that fails with retryable_es_client_error
+ * such as a statusCode: 503 or a snapshot_in_progress_exception.
+ *
+ * We don't want to immediately crash Kibana and cause a reboot for these
+ * intermittent. However, if we're still receiving e.g. a 503 after 10 minutes
+ * this is probably not just a temporary problem so we stop trying and exit
+ * with a fatal error.
+ *
+ * Because of the exponential backoff the total time we will retry such errors
+ * is:
+ * max_retry_time = 2+4+8+16+32+64*(RETRY_ATTEMPTS-5) + ACTION_DURATION*RETRY_ATTEMPTS
+ *
+ * For RETRY_ATTEMPTS=15 (default), ACTION_DURATION=0
+ * max_retry_time = 11.7 minutes
+ */
+ readonly retryAttempts: number;
readonly logs: Array<{ level: 'error' | 'info'; message: string }>;
/**
* The current alias e.g. `.kibana` which always points to the latest
diff --git a/src/core/server/saved_objects/saved_objects_config.ts b/src/core/server/saved_objects/saved_objects_config.ts
index 1806bb6e0c8954..7228cb126d286b 100644
--- a/src/core/server/saved_objects/saved_objects_config.ts
+++ b/src/core/server/saved_objects/saved_objects_config.ts
@@ -13,12 +13,14 @@ export type SavedObjectsMigrationConfigType = TypeOf;
logQueries: Type;
ssl: import("@kbn/config-schema").ObjectType<{
- verificationMode: Type<"certificate" | "none" | "full">;
+ verificationMode: Type<"none" | "certificate" | "full">;
certificateAuthorities: Type;
certificate: Type;
key: Type;
@@ -1305,10 +1304,10 @@ export type KibanaResponseFactory = typeof kibanaResponseFactory;
// @public
export const kibanaResponseFactory: {
- custom: | Error | Buffer | {
+ custom: | Error | Buffer | Stream | {
message: string | Error;
attributes?: Record | undefined;
- } | Stream | undefined>(options: CustomHttpResponseOptions) => KibanaResponse;
+ } | undefined>(options: CustomHttpResponseOptions) => KibanaResponse;
badRequest: (options?: ErrorHttpResponseOptions) => KibanaResponse;
unauthorized: (options?: ErrorHttpResponseOptions) => KibanaResponse;
forbidden: (options?: ErrorHttpResponseOptions) => KibanaResponse;
@@ -1585,20 +1584,6 @@ export class LegacyClusterClient implements ILegacyClusterClient {
close(): void;
}
-// @internal @deprecated
-export interface LegacyConfig {
- // (undocumented)
- get(key?: string): T;
- // (undocumented)
- has(key: string): boolean;
- // (undocumented)
- set(key: string, value: any): void;
- // Warning: (ae-forgotten-export) The symbol "LegacyVars" needs to be exported by the entry point index.d.ts
- //
- // (undocumented)
- set(config: LegacyVars): void;
-}
-
// @public @deprecated (undocumented)
export type LegacyElasticsearchClientConfig = Pick & Pick & {
pingTimeout?: ElasticsearchConfig['pingTimeout'] | ConfigOptions['pingTimeout'];
@@ -1634,30 +1619,6 @@ export class LegacyScopedClusterClient implements ILegacyScopedClusterClient {
callAsInternalUser(endpoint: string, clientParams?: Record, options?: LegacyCallAPIOptions): Promise;
}
-// @public @deprecated (undocumented)
-export interface LegacyServiceSetupDeps {
- // Warning: (ae-forgotten-export) The symbol "LegacyCoreSetup" needs to be exported by the entry point index.d.ts
- //
- // (undocumented)
- core: LegacyCoreSetup;
- // (undocumented)
- plugins: Record;
- // Warning: (ae-forgotten-export) The symbol "UiPlugins" needs to be exported by the entry point index.d.ts
- //
- // (undocumented)
- uiPlugins: UiPlugins;
-}
-
-// @public @deprecated (undocumented)
-export interface LegacyServiceStartDeps {
- // Warning: (ae-forgotten-export) The symbol "LegacyCoreStart" needs to be exported by the entry point index.d.ts
- //
- // (undocumented)
- core: LegacyCoreStart;
- // (undocumented)
- plugins: Record;
-}
-
// Warning: (ae-forgotten-export) The symbol "lifecycleResponseFactory" needs to be exported by the entry point index.d.ts
//
// @public
@@ -3259,9 +3220,9 @@ export const validBodyOutput: readonly ["data", "stream"];
//
// src/core/server/elasticsearch/client/types.ts:94:7 - (ae-forgotten-export) The symbol "Explanation" needs to be exported by the entry point index.d.ts
// src/core/server/http/router/response.ts:297:3 - (ae-forgotten-export) The symbol "KibanaResponse" needs to be exported by the entry point index.d.ts
-// src/core/server/plugins/types.ts:286:3 - (ae-forgotten-export) The symbol "KibanaConfigType" needs to be exported by the entry point index.d.ts
-// src/core/server/plugins/types.ts:286:3 - (ae-forgotten-export) The symbol "SharedGlobalConfigKeys" needs to be exported by the entry point index.d.ts
-// src/core/server/plugins/types.ts:289:3 - (ae-forgotten-export) The symbol "SavedObjectsConfigType" needs to be exported by the entry point index.d.ts
-// src/core/server/plugins/types.ts:394:5 - (ae-unresolved-link) The @link reference could not be resolved: The package "kibana" does not have an export "create"
+// src/core/server/plugins/types.ts:293:3 - (ae-forgotten-export) The symbol "KibanaConfigType" needs to be exported by the entry point index.d.ts
+// src/core/server/plugins/types.ts:293:3 - (ae-forgotten-export) The symbol "SharedGlobalConfigKeys" needs to be exported by the entry point index.d.ts
+// src/core/server/plugins/types.ts:296:3 - (ae-forgotten-export) The symbol "SavedObjectsConfigType" needs to be exported by the entry point index.d.ts
+// src/core/server/plugins/types.ts:401:5 - (ae-unresolved-link) The @link reference could not be resolved: The package "kibana" does not have an export "create"
```
diff --git a/src/core/server/server.test.mocks.ts b/src/core/server/server.test.mocks.ts
index 96047dc6921ec5..2bd3028b2f1b65 100644
--- a/src/core/server/server.test.mocks.ts
+++ b/src/core/server/server.test.mocks.ts
@@ -58,7 +58,7 @@ jest.doMock('./ui_settings/ui_settings_service', () => ({
}));
export const mockEnsureValidConfiguration = jest.fn();
-jest.doMock('./legacy/config/ensure_valid_configuration', () => ({
+jest.doMock('./config/ensure_valid_configuration', () => ({
ensureValidConfiguration: mockEnsureValidConfiguration,
}));
diff --git a/src/core/server/server.test.ts b/src/core/server/server.test.ts
index fcf09b0295bcbd..534d7df9d94666 100644
--- a/src/core/server/server.test.ts
+++ b/src/core/server/server.test.ts
@@ -99,7 +99,6 @@ test('injects legacy dependency to context#setup()', async () => {
pluginDependencies: new Map([
[pluginA, []],
[pluginB, [pluginA]],
- [mockLegacyService.legacyId, [pluginA, pluginB]],
]),
});
});
@@ -108,12 +107,10 @@ test('runs services on "start"', async () => {
const server = new Server(rawConfigService, env, logger);
expect(mockHttpService.setup).not.toHaveBeenCalled();
- expect(mockLegacyService.start).not.toHaveBeenCalled();
await server.setup();
expect(mockHttpService.start).not.toHaveBeenCalled();
- expect(mockLegacyService.start).not.toHaveBeenCalled();
expect(mockSavedObjectsService.start).not.toHaveBeenCalled();
expect(mockUiSettingsService.start).not.toHaveBeenCalled();
expect(mockMetricsService.start).not.toHaveBeenCalled();
@@ -121,7 +118,6 @@ test('runs services on "start"', async () => {
await server.start();
expect(mockHttpService.start).toHaveBeenCalledTimes(1);
- expect(mockLegacyService.start).toHaveBeenCalledTimes(1);
expect(mockSavedObjectsService.start).toHaveBeenCalledTimes(1);
expect(mockUiSettingsService.start).toHaveBeenCalledTimes(1);
expect(mockMetricsService.start).toHaveBeenCalledTimes(1);
@@ -164,26 +160,6 @@ test('stops services on "stop"', async () => {
});
test(`doesn't setup core services if config validation fails`, async () => {
- mockConfigService.validate.mockImplementationOnce(() => {
- return Promise.reject(new Error('invalid config'));
- });
- const server = new Server(rawConfigService, env, logger);
- await expect(server.setup()).rejects.toThrowErrorMatchingInlineSnapshot(`"invalid config"`);
-
- expect(mockHttpService.setup).not.toHaveBeenCalled();
- expect(mockElasticsearchService.setup).not.toHaveBeenCalled();
- expect(mockPluginsService.setup).not.toHaveBeenCalled();
- expect(mockLegacyService.setup).not.toHaveBeenCalled();
- expect(mockSavedObjectsService.stop).not.toHaveBeenCalled();
- expect(mockUiSettingsService.setup).not.toHaveBeenCalled();
- expect(mockRenderingService.setup).not.toHaveBeenCalled();
- expect(mockMetricsService.setup).not.toHaveBeenCalled();
- expect(mockStatusService.setup).not.toHaveBeenCalled();
- expect(mockLoggingService.setup).not.toHaveBeenCalled();
- expect(mockI18nService.setup).not.toHaveBeenCalled();
-});
-
-test(`doesn't setup core services if legacy config validation fails`, async () => {
mockEnsureValidConfiguration.mockImplementation(() => {
throw new Error('Unknown configuration keys');
});
diff --git a/src/core/server/server.ts b/src/core/server/server.ts
index b575b2779082cf..b34d7fec3dcbf4 100644
--- a/src/core/server/server.ts
+++ b/src/core/server/server.ts
@@ -8,15 +8,20 @@
import apm from 'elastic-apm-node';
import { config as pathConfig } from '@kbn/utils';
-import { mapToObject } from '@kbn/std';
-import { ConfigService, Env, RawConfigurationProvider, coreDeprecationProvider } from './config';
+import {
+ ConfigService,
+ Env,
+ RawConfigurationProvider,
+ coreDeprecationProvider,
+ ensureValidConfiguration,
+} from './config';
import { CoreApp } from './core_app';
import { I18nService } from './i18n';
import { ElasticsearchService } from './elasticsearch';
import { HttpService } from './http';
import { HttpResourcesService } from './http_resources';
import { RenderingService } from './rendering';
-import { LegacyService, ensureValidConfiguration } from './legacy';
+import { LegacyService } from './legacy';
import { Logger, LoggerFactory, LoggingService, ILoggingSystem } from './logging';
import { UiSettingsService } from './ui_settings';
import { PluginsService, config as pluginsConfig } from './plugins';
@@ -121,22 +126,13 @@ export class Server {
const { pluginTree, pluginPaths, uiPlugins } = await this.plugins.discover({
environment: environmentSetup,
});
- const legacyConfigSetup = await this.legacy.setupLegacyConfig();
// Immediately terminate in case of invalid configuration
// This needs to be done after plugin discovery
- await this.configService.validate();
- await ensureValidConfiguration(this.configService, legacyConfigSetup);
+ await ensureValidConfiguration(this.configService);
const contextServiceSetup = this.context.setup({
- // We inject a fake "legacy plugin" with dependencies on every plugin so that legacy plugins:
- // 1) Can access context from any KP plugin
- // 2) Can register context providers that will only be available to other legacy plugins and will not leak into
- // New Platform plugins.
- pluginDependencies: new Map([
- ...pluginTree.asOpaqueIds,
- [this.legacy.legacyId, [...pluginTree.asOpaqueIds.keys()]],
- ]),
+ pluginDependencies: new Map([...pluginTree.asOpaqueIds]),
});
const httpSetup = await this.http.setup({
@@ -222,9 +218,7 @@ export class Server {
this.#pluginsInitialized = pluginsSetup.initialized;
await this.legacy.setup({
- core: { ...coreSetup, plugins: pluginsSetup, rendering: renderingSetup },
- plugins: mapToObject(pluginsSetup.contracts),
- uiPlugins,
+ http: httpSetup,
});
this.registerCoreContext(coreSetup);
@@ -266,15 +260,7 @@ export class Server {
coreUsageData: coreUsageDataStart,
};
- const pluginsStart = await this.plugins.start(this.coreStart);
-
- await this.legacy.start({
- core: {
- ...this.coreStart,
- plugins: pluginsStart,
- },
- plugins: mapToObject(pluginsStart.contracts),
- });
+ await this.plugins.start(this.coreStart);
await this.http.start();
diff --git a/src/core/server/types.ts b/src/core/server/types.ts
index ab1d6c6d95d0a9..be07a3cfb1fd32 100644
--- a/src/core/server/types.ts
+++ b/src/core/server/types.ts
@@ -39,6 +39,5 @@ export type {
} from './saved_objects/types';
export type { DomainDeprecationDetails, DeprecationsGetResponse } from './deprecations/types';
export * from './ui_settings/types';
-export * from './legacy/types';
export type { EnvironmentMode, PackageInfo } from '@kbn/config';
export type { ExternalUrlConfig, IExternalUrlPolicy } from './external_url';
diff --git a/src/core/server/ui_settings/integration_tests/doc_exists.ts b/src/core/server/ui_settings/integration_tests/doc_exists.ts
index 86a9a24fab6de6..59c27cc136174e 100644
--- a/src/core/server/ui_settings/integration_tests/doc_exists.ts
+++ b/src/core/server/ui_settings/integration_tests/doc_exists.ts
@@ -9,10 +9,10 @@
import { getServices, chance } from './lib';
export const docExistsSuite = (savedObjectsIndex: string) => () => {
- async function setup(options: any = {}) {
+ async function setup(options: { initialSettings?: Record } = {}) {
const { initialSettings } = options;
- const { kbnServer, uiSettings, callCluster } = getServices();
+ const { uiSettings, callCluster, supertest } = getServices();
// delete the kibana index to ensure we start fresh
await callCluster('deleteByQuery', {
@@ -21,31 +21,30 @@ export const docExistsSuite = (savedObjectsIndex: string) => () => {
conflicts: 'proceed',
query: { match_all: {} },
},
+ refresh: true,
+ wait_for_completion: true,
});
if (initialSettings) {
await uiSettings.setMany(initialSettings);
}
- return { kbnServer, uiSettings };
+ return { uiSettings, supertest };
}
describe('get route', () => {
it('returns a 200 and includes userValues', async () => {
const defaultIndex = chance.word({ length: 10 });
- const { kbnServer } = await setup({
+
+ const { supertest } = await setup({
initialSettings: {
defaultIndex,
},
});
- const { statusCode, result } = await kbnServer.inject({
- method: 'GET',
- url: '/api/kibana/settings',
- });
+ const { body } = await supertest('get', '/api/kibana/settings').expect(200);
- expect(statusCode).toBe(200);
- expect(result).toMatchObject({
+ expect(body).toMatchObject({
settings: {
buildNum: {
userValue: expect.any(Number),
@@ -64,20 +63,17 @@ export const docExistsSuite = (savedObjectsIndex: string) => () => {
describe('set route', () => {
it('returns a 200 and all values including update', async () => {
- const { kbnServer } = await setup();
+ const { supertest } = await setup();
const defaultIndex = chance.word();
- const { statusCode, result } = await kbnServer.inject({
- method: 'POST',
- url: '/api/kibana/settings/defaultIndex',
- payload: {
- value: defaultIndex,
- },
- });
- expect(statusCode).toBe(200);
+ const { body } = await supertest('post', '/api/kibana/settings/defaultIndex')
+ .send({
+ value: defaultIndex,
+ })
+ .expect(200);
- expect(result).toMatchObject({
+ expect(body).toMatchObject({
settings: {
buildNum: {
userValue: expect.any(Number),
@@ -94,18 +90,15 @@ export const docExistsSuite = (savedObjectsIndex: string) => () => {
});
it('returns a 400 if trying to set overridden value', async () => {
- const { kbnServer } = await setup();
+ const { supertest } = await setup();
- const { statusCode, result } = await kbnServer.inject({
- method: 'POST',
- url: '/api/kibana/settings/foo',
- payload: {
+ const { body } = await supertest('delete', '/api/kibana/settings/foo')
+ .send({
value: 'baz',
- },
- });
+ })
+ .expect(400);
- expect(statusCode).toBe(400);
- expect(result).toEqual({
+ expect(body).toEqual({
error: 'Bad Request',
message: 'Unable to update "foo" because it is overridden',
statusCode: 400,
@@ -115,22 +108,18 @@ export const docExistsSuite = (savedObjectsIndex: string) => () => {
describe('setMany route', () => {
it('returns a 200 and all values including updates', async () => {
- const { kbnServer } = await setup();
+ const { supertest } = await setup();
const defaultIndex = chance.word();
- const { statusCode, result } = await kbnServer.inject({
- method: 'POST',
- url: '/api/kibana/settings',
- payload: {
+ const { body } = await supertest('post', '/api/kibana/settings')
+ .send({
changes: {
defaultIndex,
},
- },
- });
+ })
+ .expect(200);
- expect(statusCode).toBe(200);
-
- expect(result).toMatchObject({
+ expect(body).toMatchObject({
settings: {
buildNum: {
userValue: expect.any(Number),
@@ -147,20 +136,17 @@ export const docExistsSuite = (savedObjectsIndex: string) => () => {
});
it('returns a 400 if trying to set overridden value', async () => {
- const { kbnServer } = await setup();
+ const { supertest } = await setup();
- const { statusCode, result } = await kbnServer.inject({
- method: 'POST',
- url: '/api/kibana/settings',
- payload: {
+ const { body } = await supertest('post', '/api/kibana/settings')
+ .send({
changes: {
foo: 'baz',
},
- },
- });
+ })
+ .expect(400);
- expect(statusCode).toBe(400);
- expect(result).toEqual({
+ expect(body).toEqual({
error: 'Bad Request',
message: 'Unable to update "foo" because it is overridden',
statusCode: 400,
@@ -172,19 +158,15 @@ export const docExistsSuite = (savedObjectsIndex: string) => () => {
it('returns a 200 and deletes the setting', async () => {
const defaultIndex = chance.word({ length: 10 });
- const { kbnServer, uiSettings } = await setup({
+ const { uiSettings, supertest } = await setup({
initialSettings: { defaultIndex },
});
expect(await uiSettings.get('defaultIndex')).toBe(defaultIndex);
- const { statusCode, result } = await kbnServer.inject({
- method: 'DELETE',
- url: '/api/kibana/settings/defaultIndex',
- });
+ const { body } = await supertest('delete', '/api/kibana/settings/defaultIndex').expect(200);
- expect(statusCode).toBe(200);
- expect(result).toMatchObject({
+ expect(body).toMatchObject({
settings: {
buildNum: {
userValue: expect.any(Number),
@@ -197,15 +179,11 @@ export const docExistsSuite = (savedObjectsIndex: string) => () => {
});
});
it('returns a 400 if deleting overridden value', async () => {
- const { kbnServer } = await setup();
+ const { supertest } = await setup();
- const { statusCode, result } = await kbnServer.inject({
- method: 'DELETE',
- url: '/api/kibana/settings/foo',
- });
+ const { body } = await supertest('delete', '/api/kibana/settings/foo').expect(400);
- expect(statusCode).toBe(400);
- expect(result).toEqual({
+ expect(body).toEqual({
error: 'Bad Request',
message: 'Unable to update "foo" because it is overridden',
statusCode: 400,
diff --git a/src/core/server/ui_settings/integration_tests/doc_missing.ts b/src/core/server/ui_settings/integration_tests/doc_missing.ts
index 9fa3e4c1cfe78a..29d1daf3b20328 100644
--- a/src/core/server/ui_settings/integration_tests/doc_missing.ts
+++ b/src/core/server/ui_settings/integration_tests/doc_missing.ts
@@ -11,14 +11,7 @@ import { getServices, chance } from './lib';
export const docMissingSuite = (savedObjectsIndex: string) => () => {
// ensure the kibana index has no documents
beforeEach(async () => {
- const { kbnServer, callCluster } = getServices();
-
- // write a setting to ensure kibana index is created
- await kbnServer.inject({
- method: 'POST',
- url: '/api/kibana/settings/defaultIndex',
- payload: { value: 'abc' },
- });
+ const { callCluster } = getServices();
// delete all docs from kibana index to ensure savedConfig is not found
await callCluster('deleteByQuery', {
@@ -31,15 +24,11 @@ export const docMissingSuite = (savedObjectsIndex: string) => () => {
describe('get route', () => {
it('creates doc, returns a 200 with settings', async () => {
- const { kbnServer } = getServices();
+ const { supertest } = getServices();
- const { statusCode, result } = await kbnServer.inject({
- method: 'GET',
- url: '/api/kibana/settings',
- });
+ const { body } = await supertest('get', '/api/kibana/settings').expect(200);
- expect(statusCode).toBe(200);
- expect(result).toMatchObject({
+ expect(body).toMatchObject({
settings: {
buildNum: {
userValue: expect.any(Number),
@@ -55,17 +44,17 @@ export const docMissingSuite = (savedObjectsIndex: string) => () => {
describe('set route', () => {
it('creates doc, returns a 200 with value set', async () => {
- const { kbnServer } = getServices();
+ const { supertest } = getServices();
const defaultIndex = chance.word();
- const { statusCode, result } = await kbnServer.inject({
- method: 'POST',
- url: '/api/kibana/settings/defaultIndex',
- payload: { value: defaultIndex },
- });
- expect(statusCode).toBe(200);
- expect(result).toMatchObject({
+ const { body } = await supertest('post', '/api/kibana/settings/defaultIndex')
+ .send({
+ value: defaultIndex,
+ })
+ .expect(200);
+
+ expect(body).toMatchObject({
settings: {
buildNum: {
userValue: expect.any(Number),
@@ -84,19 +73,17 @@ export const docMissingSuite = (savedObjectsIndex: string) => () => {
describe('setMany route', () => {
it('creates doc, returns 200 with updated values', async () => {
- const { kbnServer } = getServices();
+ const { supertest } = getServices();
const defaultIndex = chance.word();
- const { statusCode, result } = await kbnServer.inject({
- method: 'POST',
- url: '/api/kibana/settings',
- payload: {
+
+ const { body } = await supertest('post', '/api/kibana/settings')
+ .send({
changes: { defaultIndex },
- },
- });
+ })
+ .expect(200);
- expect(statusCode).toBe(200);
- expect(result).toMatchObject({
+ expect(body).toMatchObject({
settings: {
buildNum: {
userValue: expect.any(Number),
@@ -115,15 +102,11 @@ export const docMissingSuite = (savedObjectsIndex: string) => () => {
describe('delete route', () => {
it('creates doc, returns a 200 with just buildNum', async () => {
- const { kbnServer } = getServices();
+ const { supertest } = getServices();
- const { statusCode, result } = await kbnServer.inject({
- method: 'DELETE',
- url: '/api/kibana/settings/defaultIndex',
- });
+ const { body } = await supertest('delete', '/api/kibana/settings/defaultIndex').expect(200);
- expect(statusCode).toBe(200);
- expect(result).toMatchObject({
+ expect(body).toMatchObject({
settings: {
buildNum: {
userValue: expect.any(Number),
diff --git a/src/core/server/ui_settings/integration_tests/doc_missing_and_index_read_only.ts b/src/core/server/ui_settings/integration_tests/doc_missing_and_index_read_only.ts
deleted file mode 100644
index 78fdab7eb8c5d3..00000000000000
--- a/src/core/server/ui_settings/integration_tests/doc_missing_and_index_read_only.ts
+++ /dev/null
@@ -1,145 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { getServices, chance } from './lib';
-
-export const docMissingAndIndexReadOnlySuite = (savedObjectsIndex: string) => () => {
- // ensure the kibana index has no documents
- beforeEach(async () => {
- const { kbnServer, callCluster } = getServices();
-
- // write a setting to ensure kibana index is created
- await kbnServer.inject({
- method: 'POST',
- url: '/api/kibana/settings/defaultIndex',
- payload: { value: 'abc' },
- });
-
- // delete all docs from kibana index to ensure savedConfig is not found
- await callCluster('deleteByQuery', {
- index: savedObjectsIndex,
- body: {
- query: { match_all: {} },
- },
- });
-
- // set the index to read only
- await callCluster('indices.putSettings', {
- index: savedObjectsIndex,
- body: {
- index: {
- blocks: {
- read_only: true,
- },
- },
- },
- });
- });
-
- afterEach(async () => {
- const { callCluster } = getServices();
-
- // disable the read only block
- await callCluster('indices.putSettings', {
- index: savedObjectsIndex,
- body: {
- index: {
- blocks: {
- read_only: false,
- },
- },
- },
- });
- });
-
- describe('get route', () => {
- it('returns simulated doc with buildNum', async () => {
- const { kbnServer } = getServices();
-
- const { statusCode, result } = await kbnServer.inject({
- method: 'GET',
- url: '/api/kibana/settings',
- });
-
- expect(statusCode).toBe(200);
-
- expect(result).toMatchObject({
- settings: {
- buildNum: {
- userValue: expect.any(Number),
- },
- foo: {
- userValue: 'bar',
- isOverridden: true,
- },
- },
- });
- });
- });
-
- describe('set route', () => {
- it('fails with 403 forbidden', async () => {
- const { kbnServer } = getServices();
-
- const defaultIndex = chance.word();
- const { statusCode, result } = await kbnServer.inject({
- method: 'POST',
- url: '/api/kibana/settings/defaultIndex',
- payload: { value: defaultIndex },
- });
-
- expect(statusCode).toBe(403);
-
- expect(result).toEqual({
- error: 'Forbidden',
- message: expect.stringContaining('index read-only'),
- statusCode: 403,
- });
- });
- });
-
- describe('setMany route', () => {
- it('fails with 403 forbidden', async () => {
- const { kbnServer } = getServices();
-
- const defaultIndex = chance.word();
- const { statusCode, result } = await kbnServer.inject({
- method: 'POST',
- url: '/api/kibana/settings',
- payload: {
- changes: { defaultIndex },
- },
- });
-
- expect(statusCode).toBe(403);
- expect(result).toEqual({
- error: 'Forbidden',
- message: expect.stringContaining('index read-only'),
- statusCode: 403,
- });
- });
- });
-
- describe('delete route', () => {
- it('fails with 403 forbidden', async () => {
- const { kbnServer } = getServices();
-
- const { statusCode, result } = await kbnServer.inject({
- method: 'DELETE',
- url: '/api/kibana/settings/defaultIndex',
- });
-
- expect(statusCode).toBe(403);
- expect(result).toEqual({
- error: 'Forbidden',
- message: expect.stringContaining('index read-only'),
- statusCode: 403,
- });
- });
- });
-};
diff --git a/src/core/server/ui_settings/integration_tests/index.test.ts b/src/core/server/ui_settings/integration_tests/index.test.ts
index 6e6c357e6cccc6..6c7cdfa43cf57f 100644
--- a/src/core/server/ui_settings/integration_tests/index.test.ts
+++ b/src/core/server/ui_settings/integration_tests/index.test.ts
@@ -12,7 +12,6 @@ import { getEnvOptions } from '@kbn/config/target/mocks';
import { startServers, stopServers } from './lib';
import { docExistsSuite } from './doc_exists';
import { docMissingSuite } from './doc_missing';
-import { docMissingAndIndexReadOnlySuite } from './doc_missing_and_index_read_only';
const kibanaVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version;
const savedObjectIndex = `.kibana_${kibanaVersion}_001`;
@@ -23,7 +22,6 @@ describe('uiSettings/routes', function () {
beforeAll(startServers);
/* eslint-disable jest/valid-describe */
describe('doc missing', docMissingSuite(savedObjectIndex));
- describe('doc missing and index readonly', docMissingAndIndexReadOnlySuite(savedObjectIndex));
describe('doc exists', docExistsSuite(savedObjectIndex));
/* eslint-enable jest/valid-describe */
afterAll(stopServers);
diff --git a/src/core/server/ui_settings/integration_tests/lib/servers.ts b/src/core/server/ui_settings/integration_tests/lib/servers.ts
index 87176bed5de114..d019dc640f3850 100644
--- a/src/core/server/ui_settings/integration_tests/lib/servers.ts
+++ b/src/core/server/ui_settings/integration_tests/lib/servers.ts
@@ -6,6 +6,7 @@
* Side Public License, v 1.
*/
+import type supertest from 'supertest';
import { SavedObjectsClientContract, IUiSettingsClient } from 'src/core/server';
import {
@@ -13,6 +14,8 @@ import {
TestElasticsearchUtils,
TestKibanaUtils,
TestUtils,
+ HttpMethod,
+ getSupertest,
} from '../../../../test_helpers/kbn_server';
import { LegacyAPICaller } from '../../../elasticsearch/';
import { httpServerMock } from '../../../http/http_server.mocks';
@@ -21,13 +24,11 @@ let servers: TestUtils;
let esServer: TestElasticsearchUtils;
let kbn: TestKibanaUtils;
-let kbnServer: TestKibanaUtils['kbnServer'];
-
interface AllServices {
- kbnServer: TestKibanaUtils['kbnServer'];
savedObjectsClient: SavedObjectsClientContract;
callCluster: LegacyAPICaller;
uiSettings: IUiSettingsClient;
+ supertest: (method: HttpMethod, path: string) => supertest.Test;
}
let services: AllServices;
@@ -47,7 +48,6 @@ export async function startServers() {
});
esServer = await servers.startES();
kbn = await servers.startKibana();
- kbnServer = kbn.kbnServer;
}
export function getServices() {
@@ -61,12 +61,10 @@ export function getServices() {
httpServerMock.createKibanaRequest()
);
- const uiSettings = kbnServer.newPlatform.start.core.uiSettings.asScopedToClient(
- savedObjectsClient
- );
+ const uiSettings = kbn.coreStart.uiSettings.asScopedToClient(savedObjectsClient);
services = {
- kbnServer,
+ supertest: (method: HttpMethod, path: string) => getSupertest(kbn.root, method, path),
callCluster,
savedObjectsClient,
uiSettings,
@@ -77,7 +75,6 @@ export function getServices() {
export async function stopServers() {
services = null!;
- kbnServer = null!;
if (servers) {
await esServer.stop();
await kbn.stop();
diff --git a/src/core/server/utils/from_root.ts b/src/core/server/utils/from_root.ts
deleted file mode 100644
index 377f4d0e29ca57..00000000000000
--- a/src/core/server/utils/from_root.ts
+++ /dev/null
@@ -1,14 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { resolve } from 'path';
-import { pkg } from './package_json';
-
-export function fromRoot(...args: string[]) {
- return resolve(pkg.__dirname, ...args);
-}
diff --git a/src/core/server/utils/index.ts b/src/core/server/utils/index.ts
deleted file mode 100644
index b0776c48f3bed2..00000000000000
--- a/src/core/server/utils/index.ts
+++ /dev/null
@@ -1,10 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-export * from './from_root';
-export * from './package_json';
diff --git a/src/core/server/utils/package_json.ts b/src/core/server/utils/package_json.ts
deleted file mode 100644
index 57ca781d7d78ec..00000000000000
--- a/src/core/server/utils/package_json.ts
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { dirname } from 'path';
-
-export const pkg = {
- __filename: require.resolve('../../../../package.json'),
- __dirname: dirname(require.resolve('../../../../package.json')),
- ...require('../../../../package.json'),
-};
diff --git a/src/core/test_helpers/kbn_server.ts b/src/core/test_helpers/kbn_server.ts
index 1844b5de3dc354..950ab5f4392e15 100644
--- a/src/core/test_helpers/kbn_server.ts
+++ b/src/core/test_helpers/kbn_server.ts
@@ -29,11 +29,10 @@ import { resolve } from 'path';
import { BehaviorSubject } from 'rxjs';
import supertest from 'supertest';
-import { CoreStart } from 'src/core/server';
+import { InternalCoreSetup, InternalCoreStart } from '../server/internal_types';
import { LegacyAPICaller } from '../server/elasticsearch';
import { CliArgs, Env } from '../server/config';
import { Root } from '../server/root';
-import KbnServer from '../../legacy/server/kbn_server';
export type HttpMethod = 'delete' | 'get' | 'head' | 'post' | 'put';
@@ -125,14 +124,6 @@ export function createRootWithCorePlugins(settings = {}, cliArgs: Partial ReturnType
@@ -164,8 +155,8 @@ export interface TestElasticsearchUtils {
export interface TestKibanaUtils {
root: Root;
- coreStart: CoreStart;
- kbnServer: KbnServer;
+ coreSetup: InternalCoreSetup;
+ coreStart: InternalCoreStart;
stop: () => Promise;
}
@@ -283,14 +274,12 @@ export function createTestServers({
startKibana: async () => {
const root = createRootWithCorePlugins(kbnSettings);
- await root.setup();
+ const coreSetup = await root.setup();
const coreStart = await root.start();
- const kbnServer = getKbnServer(root);
-
return {
root,
- kbnServer,
+ coreSetup,
coreStart,
stop: async () => await root.shutdown(),
};
diff --git a/src/dev/build/tasks/bin/scripts/kibana b/src/dev/build/tasks/bin/scripts/kibana
index 3c12c8bbf58d0f..a4fc5385500b58 100755
--- a/src/dev/build/tasks/bin/scripts/kibana
+++ b/src/dev/build/tasks/bin/scripts/kibana
@@ -26,4 +26,4 @@ if [ -f "${CONFIG_DIR}/node.options" ]; then
KBN_NODE_OPTS="$(grep -v ^# < ${CONFIG_DIR}/node.options | xargs)"
fi
-NODE_OPTIONS="--no-warnings --max-http-header-size=65536 --tls-min-v1.0 $KBN_NODE_OPTS $NODE_OPTIONS" NODE_ENV=production exec "${NODE}" "${DIR}/src/cli/dist" ${@}
+NODE_OPTIONS="--no-warnings --max-http-header-size=65536 $KBN_NODE_OPTS $NODE_OPTIONS" NODE_ENV=production exec "${NODE}" "${DIR}/src/cli/dist" ${@}
diff --git a/src/dev/build/tasks/os_packages/create_os_package_tasks.ts b/src/dev/build/tasks/os_packages/create_os_package_tasks.ts
index e37a61582c6a85..2ae882000cae00 100644
--- a/src/dev/build/tasks/os_packages/create_os_package_tasks.ts
+++ b/src/dev/build/tasks/os_packages/create_os_package_tasks.ts
@@ -49,6 +49,7 @@ export const CreateRpmPackage: Task = {
},
};
+const dockerBuildDate = new Date().toISOString();
export const CreateDockerCentOS: Task = {
description: 'Creating Docker CentOS image',
@@ -57,11 +58,13 @@ export const CreateDockerCentOS: Task = {
architecture: 'x64',
context: false,
image: true,
+ dockerBuildDate,
});
await runDockerGenerator(config, log, build, {
architecture: 'aarch64',
context: false,
image: true,
+ dockerBuildDate,
});
},
};
@@ -76,6 +79,7 @@ export const CreateDockerUBI: Task = {
context: false,
ubi: true,
image: true,
+ dockerBuildDate,
});
}
},
@@ -88,6 +92,7 @@ export const CreateDockerContexts: Task = {
await runDockerGenerator(config, log, build, {
context: true,
image: false,
+ dockerBuildDate,
});
if (!build.isOss()) {
@@ -95,11 +100,13 @@ export const CreateDockerContexts: Task = {
ubi: true,
context: true,
image: false,
+ dockerBuildDate,
});
await runDockerGenerator(config, log, build, {
ironbank: true,
context: true,
image: false,
+ dockerBuildDate,
});
}
},
diff --git a/src/dev/build/tasks/os_packages/docker_generator/run.ts b/src/dev/build/tasks/os_packages/docker_generator/run.ts
index 8bf876b5584319..c72112b7b6b03d 100644
--- a/src/dev/build/tasks/os_packages/docker_generator/run.ts
+++ b/src/dev/build/tasks/os_packages/docker_generator/run.ts
@@ -33,6 +33,7 @@ export async function runDockerGenerator(
image: boolean;
ubi?: boolean;
ironbank?: boolean;
+ dockerBuildDate?: string;
}
) {
// UBI var config
@@ -53,7 +54,7 @@ export async function runDockerGenerator(
const artifactPrefix = `kibana${artifactFlavor}-${version}-linux`;
const artifactTarball = `${artifactPrefix}-${artifactArchitecture}.tar.gz`;
const artifactsDir = config.resolveFromTarget('.');
- const dockerBuildDate = new Date().toISOString();
+ const dockerBuildDate = flags.dockerBuildDate || new Date().toISOString();
// That would produce oss, default and default-ubi7
const dockerBuildDir = config.resolveFromRepo(
'build',
diff --git a/src/legacy/server/config/__snapshots__/config.test.js.snap b/src/legacy/server/config/__snapshots__/config.test.js.snap
deleted file mode 100644
index 3bf471f8aba20f..00000000000000
--- a/src/legacy/server/config/__snapshots__/config.test.js.snap
+++ /dev/null
@@ -1,5 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`lib/config/config class Config() #getDefault(key) array key should throw exception for unknown key 1`] = `"Unknown config key: foo,bar."`;
-
-exports[`lib/config/config class Config() #getDefault(key) dot notation key should throw exception for unknown key 1`] = `"Unknown config key: foo.bar."`;
diff --git a/src/legacy/server/config/config.js b/src/legacy/server/config/config.js
deleted file mode 100644
index 81cb0a36333bd8..00000000000000
--- a/src/legacy/server/config/config.js
+++ /dev/null
@@ -1,207 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import Joi from 'joi';
-import { set } from '@elastic/safer-lodash-set';
-import _ from 'lodash';
-import { override } from './override';
-import createDefaultSchema from './schema';
-import { unset, deepCloneWithBuffers as clone, IS_KIBANA_DISTRIBUTABLE } from '../../utils';
-// eslint-disable-next-line @kbn/eslint/no-restricted-paths
-import { pkg } from '../../../core/server/utils';
-const schema = Symbol('Joi Schema');
-const schemaExts = Symbol('Schema Extensions');
-const vals = Symbol('config values');
-
-export class Config {
- static withDefaultSchema(settings = {}) {
- const defaultSchema = createDefaultSchema();
- return new Config(defaultSchema, settings);
- }
-
- constructor(initialSchema, initialSettings) {
- this[schemaExts] = Object.create(null);
- this[vals] = Object.create(null);
-
- this.extendSchema(initialSchema, initialSettings);
- }
-
- extendSchema(extension, settings, key) {
- if (!extension) {
- return;
- }
-
- if (!key) {
- return _.each(extension._inner.children, (child) => {
- this.extendSchema(child.schema, _.get(settings, child.key), child.key);
- });
- }
-
- if (this.has(key)) {
- throw new Error(`Config schema already has key: ${key}`);
- }
-
- set(this[schemaExts], key, extension);
- this[schema] = null;
-
- this.set(key, settings);
- }
-
- removeSchema(key) {
- if (!_.has(this[schemaExts], key)) {
- throw new TypeError(`Unknown schema key: ${key}`);
- }
-
- this[schema] = null;
- unset(this[schemaExts], key);
- unset(this[vals], key);
- }
-
- resetTo(obj) {
- this._commit(obj);
- }
-
- set(key, value) {
- // clone and modify the config
- let config = clone(this[vals]);
- if (_.isPlainObject(key)) {
- config = override(config, key);
- } else {
- set(config, key, value);
- }
-
- // attempt to validate the config value
- this._commit(config);
- }
-
- _commit(newVals) {
- // resolve the current environment
- let env = newVals.env;
- delete newVals.env;
- if (_.isObject(env)) env = env.name;
- if (!env) env = 'production';
-
- const dev = env === 'development';
- const prod = env === 'production';
-
- // pass the environment as context so that it can be refed in config
- const context = {
- env: env,
- prod: prod,
- dev: dev,
- notProd: !prod,
- notDev: !dev,
- version: _.get(pkg, 'version'),
- branch: _.get(pkg, 'branch'),
- buildNum: IS_KIBANA_DISTRIBUTABLE ? pkg.build.number : Number.MAX_SAFE_INTEGER,
- buildSha: IS_KIBANA_DISTRIBUTABLE
- ? pkg.build.sha
- : 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
- dist: IS_KIBANA_DISTRIBUTABLE,
- };
-
- if (!context.dev && !context.prod) {
- throw new TypeError(
- `Unexpected environment "${env}", expected one of "development" or "production"`
- );
- }
-
- const results = Joi.validate(newVals, this.getSchema(), {
- context,
- abortEarly: false,
- });
-
- if (results.error) {
- const error = new Error(results.error.message);
- error.name = results.error.name;
- error.stack = results.error.stack;
- throw error;
- }
-
- this[vals] = results.value;
- }
-
- get(key) {
- if (!key) {
- return clone(this[vals]);
- }
-
- const value = _.get(this[vals], key);
- if (value === undefined) {
- if (!this.has(key)) {
- throw new Error('Unknown config key: ' + key);
- }
- }
- return clone(value);
- }
-
- getDefault(key) {
- const schemaKey = Array.isArray(key) ? key.join('.') : key;
-
- const subSchema = Joi.reach(this.getSchema(), schemaKey);
- if (!subSchema) {
- throw new Error(`Unknown config key: ${key}.`);
- }
-
- return clone(_.get(Joi.describe(subSchema), 'flags.default'));
- }
-
- has(key) {
- function has(key, schema, path) {
- path = path || [];
- // Catch the partial paths
- if (path.join('.') === key) return true;
- // Only go deep on inner objects with children
- if (_.size(schema._inner.children)) {
- for (let i = 0; i < schema._inner.children.length; i++) {
- const child = schema._inner.children[i];
- // If the child is an object recurse through it's children and return
- // true if there's a match
- if (child.schema._type === 'object') {
- if (has(key, child.schema, path.concat([child.key]))) return true;
- // if the child matches, return true
- } else if (path.concat([child.key]).join('.') === key) {
- return true;
- }
- }
- }
- }
-
- if (Array.isArray(key)) {
- // TODO: add .has() support for array keys
- key = key.join('.');
- }
-
- return !!has(key, this.getSchema());
- }
-
- getSchema() {
- if (!this[schema]) {
- this[schema] = (function convertToSchema(children) {
- let schema = Joi.object().keys({}).default();
-
- for (const key of Object.keys(children)) {
- const child = children[key];
- const childSchema = _.isPlainObject(child) ? convertToSchema(child) : child;
-
- if (!childSchema || !childSchema.isJoi) {
- throw new TypeError(
- 'Unable to convert configuration definition value to Joi schema: ' + childSchema
- );
- }
-
- schema = schema.keys({ [key]: childSchema });
- }
-
- return schema;
- })(this[schemaExts]);
- }
-
- return this[schema];
- }
-}
diff --git a/src/legacy/server/config/config.test.js b/src/legacy/server/config/config.test.js
deleted file mode 100644
index b617babb8262db..00000000000000
--- a/src/legacy/server/config/config.test.js
+++ /dev/null
@@ -1,345 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { Config } from './config';
-import _ from 'lodash';
-import Joi from 'joi';
-
-/**
- * Plugins should defined a config method that takes a joi object. By default
- * it should return a way to disallow config
- *
- * Config should be newed up with a joi schema (containing defaults via joi)
- *
- * let schema = { ... }
- * new Config(schema);
- *
- */
-
-const data = {
- test: {
- hosts: ['host-01', 'host-02'],
- client: {
- type: 'datastore',
- host: 'store-01',
- port: 5050,
- },
- },
-};
-
-const schema = Joi.object({
- test: Joi.object({
- enable: Joi.boolean().default(true),
- hosts: Joi.array().items(Joi.string()),
- client: Joi.object({
- type: Joi.string().default('datastore'),
- host: Joi.string(),
- port: Joi.number(),
- }).default(),
- undefValue: Joi.string(),
- }).default(),
-}).default();
-
-describe('lib/config/config', function () {
- describe('class Config()', function () {
- describe('constructor', function () {
- it('should not allow any config if the schema is not passed', function () {
- const config = new Config();
- const run = function () {
- config.set('something.enable', true);
- };
- expect(run).toThrow();
- });
-
- it('should allow keys in the schema', function () {
- const config = new Config(schema);
- const run = function () {
- config.set('test.client.host', 'http://localhost');
- };
- expect(run).not.toThrow();
- });
-
- it('should not allow keys not in the schema', function () {
- const config = new Config(schema);
- const run = function () {
- config.set('paramNotDefinedInTheSchema', true);
- };
- expect(run).toThrow();
- });
-
- it('should not allow child keys not in the schema', function () {
- const config = new Config(schema);
- const run = function () {
- config.set('test.client.paramNotDefinedInTheSchema', true);
- };
- expect(run).toThrow();
- });
-
- it('should set defaults', function () {
- const config = new Config(schema);
- expect(config.get('test.enable')).toBe(true);
- expect(config.get('test.client.type')).toBe('datastore');
- });
- });
-
- describe('#resetTo(object)', function () {
- let config;
- beforeEach(function () {
- config = new Config(schema);
- });
-
- it('should reset the config object with new values', function () {
- config.set(data);
- const newData = config.get();
- newData.test.enable = false;
- config.resetTo(newData);
- expect(config.get()).toEqual(newData);
- });
- });
-
- describe('#has(key)', function () {
- let config;
- beforeEach(function () {
- config = new Config(schema);
- });
-
- it('should return true for fields that exist in the schema', function () {
- expect(config.has('test.undefValue')).toBe(true);
- });
-
- it('should return true for partial objects that exist in the schema', function () {
- expect(config.has('test.client')).toBe(true);
- });
-
- it('should return false for fields that do not exist in the schema', function () {
- expect(config.has('test.client.pool')).toBe(false);
- });
- });
-
- describe('#set(key, value)', function () {
- let config;
-
- beforeEach(function () {
- config = new Config(schema);
- });
-
- it('should use a key and value to set a config value', function () {
- config.set('test.enable', false);
- expect(config.get('test.enable')).toBe(false);
- });
-
- it('should use an object to set config values', function () {
- const hosts = ['host-01', 'host-02'];
- config.set({ test: { enable: false, hosts: hosts } });
- expect(config.get('test.enable')).toBe(false);
- expect(config.get('test.hosts')).toEqual(hosts);
- });
-
- it('should use a flatten object to set config values', function () {
- const hosts = ['host-01', 'host-02'];
- config.set({ 'test.enable': false, 'test.hosts': hosts });
- expect(config.get('test.enable')).toBe(false);
- expect(config.get('test.hosts')).toEqual(hosts);
- });
-
- it('should override values with just the values present', function () {
- const newData = _.cloneDeep(data);
- config.set(data);
- newData.test.enable = false;
- config.set({ test: { enable: false } });
- expect(config.get()).toEqual(newData);
- });
-
- it('should thow an exception when setting a value with the wrong type', function (done) {
- expect.assertions(4);
-
- const run = function () {
- config.set('test.enable', 'something');
- };
-
- try {
- run();
- } catch (err) {
- expect(err).toHaveProperty('name', 'ValidationError');
- expect(err).toHaveProperty(
- 'message',
- 'child "test" fails because [child "enable" fails because ["enable" must be a boolean]]'
- );
- expect(err).not.toHaveProperty('details');
- expect(err).not.toHaveProperty('_object');
- }
-
- done();
- });
- });
-
- describe('#get(key)', function () {
- let config;
-
- beforeEach(function () {
- config = new Config(schema);
- config.set(data);
- });
-
- it('should return the whole config object when called without a key', function () {
- const newData = _.cloneDeep(data);
- newData.test.enable = true;
- expect(config.get()).toEqual(newData);
- });
-
- it('should return the value using dot notation', function () {
- expect(config.get('test.enable')).toBe(true);
- });
-
- it('should return the clone of partial object using dot notation', function () {
- expect(config.get('test.client')).not.toBe(data.test.client);
- expect(config.get('test.client')).toEqual(data.test.client);
- });
-
- it('should throw exception for unknown config values', function () {
- const run = function () {
- config.get('test.does.not.exist');
- };
- expect(run).toThrowError(/Unknown config key: test.does.not.exist/);
- });
-
- it('should not throw exception for undefined known config values', function () {
- const run = function getUndefValue() {
- config.get('test.undefValue');
- };
- expect(run).not.toThrow();
- });
- });
-
- describe('#getDefault(key)', function () {
- let config;
-
- beforeEach(function () {
- config = new Config(schema);
- config.set(data);
- });
-
- describe('dot notation key', function () {
- it('should return undefined if there is no default', function () {
- const hostDefault = config.getDefault('test.client.host');
- expect(hostDefault).toBeUndefined();
- });
-
- it('should return default if specified', function () {
- const typeDefault = config.getDefault('test.client.type');
- expect(typeDefault).toBe('datastore');
- });
-
- it('should throw exception for unknown key', function () {
- expect(() => {
- config.getDefault('foo.bar');
- }).toThrowErrorMatchingSnapshot();
- });
- });
-
- describe('array key', function () {
- it('should return undefined if there is no default', function () {
- const hostDefault = config.getDefault(['test', 'client', 'host']);
- expect(hostDefault).toBeUndefined();
- });
-
- it('should return default if specified', function () {
- const typeDefault = config.getDefault(['test', 'client', 'type']);
- expect(typeDefault).toBe('datastore');
- });
-
- it('should throw exception for unknown key', function () {
- expect(() => {
- config.getDefault(['foo', 'bar']);
- }).toThrowErrorMatchingSnapshot();
- });
- });
-
- it('object schema with no default should return default value for property', function () {
- const noDefaultSchema = Joi.object()
- .keys({
- foo: Joi.array().items(Joi.string().min(1)).default(['bar']),
- })
- .required();
-
- const config = new Config(noDefaultSchema);
- config.set({
- foo: ['baz'],
- });
-
- const fooDefault = config.getDefault('foo');
- expect(fooDefault).toEqual(['bar']);
- });
-
- it('should return clone of the default', function () {
- const schemaWithArrayDefault = Joi.object()
- .keys({
- foo: Joi.array().items(Joi.string().min(1)).default(['bar']),
- })
- .default();
-
- const config = new Config(schemaWithArrayDefault);
- config.set({
- foo: ['baz'],
- });
-
- expect(config.getDefault('foo')).not.toBe(config.getDefault('foo'));
- expect(config.getDefault('foo')).toEqual(config.getDefault('foo'));
- });
- });
-
- describe('#extendSchema(key, schema)', function () {
- let config;
- beforeEach(function () {
- config = new Config(schema);
- });
-
- it('should allow you to extend the schema at the top level', function () {
- const newSchema = Joi.object({ test: Joi.boolean().default(true) }).default();
- config.extendSchema(newSchema, {}, 'myTest');
- expect(config.get('myTest.test')).toBe(true);
- });
-
- it('should allow you to extend the schema with a prefix', function () {
- const newSchema = Joi.object({ test: Joi.boolean().default(true) }).default();
- config.extendSchema(newSchema, {}, 'prefix.myTest');
- expect(config.get('prefix')).toEqual({ myTest: { test: true } });
- expect(config.get('prefix.myTest')).toEqual({ test: true });
- expect(config.get('prefix.myTest.test')).toBe(true);
- });
-
- it('should NOT allow you to extend the schema if something else is there', function () {
- const newSchema = Joi.object({ test: Joi.boolean().default(true) }).default();
- const run = function () {
- config.extendSchema('test', newSchema);
- };
- expect(run).toThrow();
- });
- });
-
- describe('#removeSchema(key)', function () {
- it('should completely remove the key', function () {
- const config = new Config(
- Joi.object().keys({
- a: Joi.number().default(1),
- })
- );
-
- expect(config.get('a')).toBe(1);
- config.removeSchema('a');
- expect(() => config.get('a')).toThrowError('Unknown config key');
- });
-
- it('only removes existing keys', function () {
- const config = new Config(Joi.object());
-
- expect(() => config.removeSchema('b')).toThrowError('Unknown schema');
- });
- });
- });
-});
diff --git a/src/legacy/server/config/index.js b/src/legacy/server/config/index.js
deleted file mode 100644
index 6fb77eb2a37770..00000000000000
--- a/src/legacy/server/config/index.js
+++ /dev/null
@@ -1,9 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-export { Config } from './config';
diff --git a/src/legacy/server/config/override.test.ts b/src/legacy/server/config/override.test.ts
deleted file mode 100644
index d3046eb7bc8afd..00000000000000
--- a/src/legacy/server/config/override.test.ts
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { override } from './override';
-
-describe('override(target, source)', function () {
- it('should override the values form source to target', function () {
- const target = {
- test: {
- enable: true,
- host: ['something else'],
- client: {
- type: 'sql',
- },
- },
- };
-
- const source = {
- test: {
- host: ['host-01', 'host-02'],
- client: {
- type: 'nosql',
- },
- foo: {
- bar: {
- baz: 1,
- },
- },
- },
- };
-
- expect(override(target, source)).toMatchInlineSnapshot(`
- Object {
- "test": Object {
- "client": Object {
- "type": "nosql",
- },
- "enable": true,
- "foo": Object {
- "bar": Object {
- "baz": 1,
- },
- },
- "host": Array [
- "host-01",
- "host-02",
- ],
- },
- }
- `);
- });
-
- it('does not mutate arguments', () => {
- const target = {
- foo: {
- bar: 1,
- baz: 1,
- },
- };
-
- const source = {
- foo: {
- bar: 2,
- },
- box: 2,
- };
-
- expect(override(target, source)).toMatchInlineSnapshot(`
- Object {
- "box": 2,
- "foo": Object {
- "bar": 2,
- "baz": 1,
- },
- }
- `);
- expect(target).not.toHaveProperty('box');
- expect(source.foo).not.toHaveProperty('baz');
- });
-
- it('explodes keys with dots in them', () => {
- const target = {
- foo: {
- bar: 1,
- },
- 'baz.box.boot.bar.bar': 20,
- };
-
- const source = {
- 'foo.bar': 2,
- 'baz.box.boot': {
- 'bar.foo': 10,
- },
- };
-
- expect(override(target, source)).toMatchInlineSnapshot(`
- Object {
- "baz": Object {
- "box": Object {
- "boot": Object {
- "bar": Object {
- "bar": 20,
- "foo": 10,
- },
- },
- },
- },
- "foo": Object {
- "bar": 2,
- },
- }
- `);
- });
-});
diff --git a/src/legacy/server/config/override.ts b/src/legacy/server/config/override.ts
deleted file mode 100644
index 55147c955539ef..00000000000000
--- a/src/legacy/server/config/override.ts
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-const isObject = (v: any): v is Record =>
- typeof v === 'object' && v !== null && !Array.isArray(v);
-
-const assignDeep = (target: Record, source: Record) => {
- for (let [key, value] of Object.entries(source)) {
- // unwrap dot-separated keys
- if (key.includes('.')) {
- const [first, ...others] = key.split('.');
- key = first;
- value = { [others.join('.')]: value };
- }
-
- if (isObject(value)) {
- if (!target.hasOwnProperty(key)) {
- target[key] = {};
- }
-
- assignDeep(target[key], value);
- } else {
- target[key] = value;
- }
- }
-};
-
-export const override = (...sources: Array>): Record => {
- const result = {};
-
- for (const object of sources) {
- assignDeep(result, object);
- }
-
- return result;
-};
diff --git a/src/legacy/server/config/schema.js b/src/legacy/server/config/schema.js
deleted file mode 100644
index 81fdfe04290d57..00000000000000
--- a/src/legacy/server/config/schema.js
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import Joi from 'joi';
-import os from 'os';
-import { legacyLoggingConfigSchema } from '@kbn/legacy-logging';
-
-const HANDLED_IN_NEW_PLATFORM = Joi.any().description(
- 'This key is handled in the new platform ONLY'
-);
-export default () =>
- Joi.object({
- elastic: Joi.object({
- apm: HANDLED_IN_NEW_PLATFORM,
- }).default(),
-
- pkg: Joi.object({
- version: Joi.string().default(Joi.ref('$version')),
- branch: Joi.string().default(Joi.ref('$branch')),
- buildNum: Joi.number().default(Joi.ref('$buildNum')),
- buildSha: Joi.string().default(Joi.ref('$buildSha')),
- }).default(),
-
- env: Joi.object({
- name: Joi.string().default(Joi.ref('$env')),
- dev: Joi.boolean().default(Joi.ref('$dev')),
- prod: Joi.boolean().default(Joi.ref('$prod')),
- }).default(),
-
- dev: HANDLED_IN_NEW_PLATFORM,
- pid: HANDLED_IN_NEW_PLATFORM,
- csp: HANDLED_IN_NEW_PLATFORM,
-
- server: Joi.object({
- name: Joi.string().default(os.hostname()),
- // keep them for BWC, remove when not used in Legacy.
- // validation should be in sync with one in New platform.
- // https://github.com/elastic/kibana/blob/master/src/core/server/http/http_config.ts
- basePath: Joi.string()
- .default('')
- .allow('')
- .regex(/(^$|^\/.*[^\/]$)/, `start with a slash, don't end with one`),
- host: Joi.string().hostname().default('localhost'),
- port: Joi.number().default(5601),
- rewriteBasePath: Joi.boolean().when('basePath', {
- is: '',
- then: Joi.default(false).valid(false),
- otherwise: Joi.default(false),
- }),
-
- autoListen: HANDLED_IN_NEW_PLATFORM,
- cors: HANDLED_IN_NEW_PLATFORM,
- customResponseHeaders: HANDLED_IN_NEW_PLATFORM,
- keepaliveTimeout: HANDLED_IN_NEW_PLATFORM,
- maxPayloadBytes: HANDLED_IN_NEW_PLATFORM,
- publicBaseUrl: HANDLED_IN_NEW_PLATFORM,
- socketTimeout: HANDLED_IN_NEW_PLATFORM,
- ssl: HANDLED_IN_NEW_PLATFORM,
- compression: HANDLED_IN_NEW_PLATFORM,
- uuid: HANDLED_IN_NEW_PLATFORM,
- xsrf: HANDLED_IN_NEW_PLATFORM,
- }).default(),
-
- uiSettings: HANDLED_IN_NEW_PLATFORM,
-
- logging: legacyLoggingConfigSchema,
-
- ops: Joi.object({
- interval: Joi.number().default(5000),
- cGroupOverrides: HANDLED_IN_NEW_PLATFORM,
- }).default(),
-
- plugins: HANDLED_IN_NEW_PLATFORM,
- path: HANDLED_IN_NEW_PLATFORM,
- stats: HANDLED_IN_NEW_PLATFORM,
- status: HANDLED_IN_NEW_PLATFORM,
- map: HANDLED_IN_NEW_PLATFORM,
- i18n: HANDLED_IN_NEW_PLATFORM,
-
- // temporarily moved here from the (now deleted) kibana legacy plugin
- kibana: Joi.object({
- enabled: Joi.boolean().default(true),
- index: Joi.string().default('.kibana'),
- autocompleteTerminateAfter: Joi.number().integer().min(1).default(100000),
- // TODO Also allow units here like in elasticsearch config once this is moved to the new platform
- autocompleteTimeout: Joi.number().integer().min(1).default(1000),
- }).default(),
-
- savedObjects: HANDLED_IN_NEW_PLATFORM,
- }).default();
diff --git a/src/legacy/server/config/schema.test.js b/src/legacy/server/config/schema.test.js
deleted file mode 100644
index c57e6cf9a933a8..00000000000000
--- a/src/legacy/server/config/schema.test.js
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import schemaProvider from './schema';
-import Joi from 'joi';
-
-describe('Config schema', function () {
- let schema;
- beforeEach(async () => (schema = await schemaProvider()));
-
- function validate(data, options) {
- return Joi.validate(data, schema, options);
- }
-
- describe('server', function () {
- it('everything is optional', function () {
- const { error } = validate({});
- expect(error).toBe(null);
- });
-
- describe('basePath', function () {
- it('accepts empty strings', function () {
- const { error, value } = validate({ server: { basePath: '' } });
- expect(error).toBe(null);
- expect(value.server.basePath).toBe('');
- });
-
- it('accepts strings with leading slashes', function () {
- const { error, value } = validate({ server: { basePath: '/path' } });
- expect(error).toBe(null);
- expect(value.server.basePath).toBe('/path');
- });
-
- it('rejects strings with trailing slashes', function () {
- const { error } = validate({ server: { basePath: '/path/' } });
- expect(error).toHaveProperty('details');
- expect(error.details[0]).toHaveProperty('path', ['server', 'basePath']);
- });
-
- it('rejects strings without leading slashes', function () {
- const { error } = validate({ server: { basePath: 'path' } });
- expect(error).toHaveProperty('details');
- expect(error.details[0]).toHaveProperty('path', ['server', 'basePath']);
- });
-
- it('rejects things that are not strings', function () {
- for (const value of [1, true, {}, [], /foo/]) {
- const { error } = validate({ server: { basePath: value } });
- expect(error).toHaveProperty('details');
- expect(error.details[0]).toHaveProperty('path', ['server', 'basePath']);
- }
- });
- });
-
- describe('rewriteBasePath', function () {
- it('defaults to false', () => {
- const { error, value } = validate({});
- expect(error).toBe(null);
- expect(value.server.rewriteBasePath).toBe(false);
- });
-
- it('accepts false', function () {
- const { error, value } = validate({ server: { rewriteBasePath: false } });
- expect(error).toBe(null);
- expect(value.server.rewriteBasePath).toBe(false);
- });
-
- it('accepts true if basePath set', function () {
- const { error, value } = validate({ server: { basePath: '/foo', rewriteBasePath: true } });
- expect(error).toBe(null);
- expect(value.server.rewriteBasePath).toBe(true);
- });
-
- it('rejects true if basePath not set', function () {
- const { error } = validate({ server: { rewriteBasePath: true } });
- expect(error).toHaveProperty('details');
- expect(error.details[0]).toHaveProperty('path', ['server', 'rewriteBasePath']);
- });
-
- it('rejects strings', function () {
- const { error } = validate({ server: { rewriteBasePath: 'foo' } });
- expect(error).toHaveProperty('details');
- expect(error.details[0]).toHaveProperty('path', ['server', 'rewriteBasePath']);
- });
- });
- });
-});
diff --git a/src/legacy/server/core/index.ts b/src/legacy/server/core/index.ts
deleted file mode 100644
index 2bdd9f26b2c228..00000000000000
--- a/src/legacy/server/core/index.ts
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { Server } from '@hapi/hapi';
-import KbnServer from '../kbn_server';
-
-/**
- * Exposes `kbnServer.newPlatform` through Hapi API.
- * @param kbnServer KbnServer singleton instance.
- * @param server Hapi server instance to expose `core` on.
- */
-export function coreMixin(kbnServer: KbnServer, server: Server) {
- // we suppress type error because hapi expect a function here not an object
- server.decorate('server', 'newPlatform', kbnServer.newPlatform as any);
-}
diff --git a/src/legacy/server/http/index.js b/src/legacy/server/http/index.js
deleted file mode 100644
index 0fb51b341c3dde..00000000000000
--- a/src/legacy/server/http/index.js
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { format } from 'url';
-import Boom from '@hapi/boom';
-
-export default async function (kbnServer, server) {
- server = kbnServer.server;
-
- const getBasePath = (request) => kbnServer.newPlatform.setup.core.http.basePath.get(request);
-
- server.route({
- method: 'GET',
- path: '/{p*}',
- handler: function (req, h) {
- const path = req.path;
- if (path === '/' || path.charAt(path.length - 1) !== '/') {
- throw Boom.notFound();
- }
- const basePath = getBasePath(req);
- const pathPrefix = basePath ? `${basePath}/` : '';
- return h
- .redirect(
- format({
- search: req.url.search,
- pathname: pathPrefix + path.slice(0, -1),
- })
- )
- .permanent(true);
- },
- });
-}
diff --git a/src/legacy/server/jest.config.js b/src/legacy/server/jest.config.js
deleted file mode 100644
index 0a7322d2985fae..00000000000000
--- a/src/legacy/server/jest.config.js
+++ /dev/null
@@ -1,13 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-module.exports = {
- preset: '@kbn/test',
- rootDir: '../../..',
- roots: ['