diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index fd48ca104acb6..72ce61bb738dc 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -656,7 +656,6 @@ - diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index 9cbc1b6563242..b2a90792f2945 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -43,6 +43,7 @@ import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; /** * Create snapshot request @@ -366,14 +367,14 @@ public CreateSnapshotRequest source(Map source) { throw new IllegalArgumentException("malformed indices section, should be an array of strings"); } } else if (name.equals("partial")) { - partial(lenientNodeBooleanValue(entry.getValue())); + partial(lenientNodeBooleanValue(entry.getValue(), name)); } else if (name.equals("settings")) { if (!(entry.getValue() instanceof Map)) { throw new IllegalArgumentException("malformed settings section, should indices an inner object"); } settings((Map) entry.getValue()); } else if (name.equals("include_global_state")) { - includeGlobalState = lenientNodeBooleanValue(entry.getValue()); + includeGlobalState = lenientNodeBooleanValue(entry.getValue(), name); } } indicesOptions(IndicesOptions.fromMap((Map) source, IndicesOptions.lenientExpandOpen())); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index 641525f00e8bd..c5afd5b896a33 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -481,16 +481,16 @@ public RestoreSnapshotRequest source(Map source) { throw new IllegalArgumentException("malformed indices section, should be an array of strings"); } } else if (name.equals("partial")) { - partial(lenientNodeBooleanValue(entry.getValue())); + partial(lenientNodeBooleanValue(entry.getValue(), name)); } else if (name.equals("settings")) { if (!(entry.getValue() instanceof Map)) { throw new IllegalArgumentException("malformed settings section"); } settings((Map) entry.getValue()); } else if (name.equals("include_global_state")) { - includeGlobalState = lenientNodeBooleanValue(entry.getValue()); + includeGlobalState = lenientNodeBooleanValue(entry.getValue(), name); } else if (name.equals("include_aliases")) { - includeAliases = lenientNodeBooleanValue(entry.getValue()); + includeAliases = lenientNodeBooleanValue(entry.getValue(), name); } else if (name.equals("rename_pattern")) { if (entry.getValue() instanceof String) { renamePattern((String) entry.getValue()); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java index 344387cc7cfed..d79668ea73ed9 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; -import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -68,7 +68,7 @@ protected ReplicaResult shardOperationOnReplica(ShardFlushRequest request, Index } @Override - protected boolean shouldExecuteReplication(Settings settings) { + protected boolean shouldExecuteReplication(IndexMetaData indexMetaData) { return true; } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index d8e9d8c0b9e72..d1d8b4078b647 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; -import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -68,7 +68,7 @@ protected ReplicaResult shardOperationOnReplica(BasicReplicationRequest request, } @Override - protected boolean shouldExecuteReplication(Settings settings) { + protected boolean shouldExecuteReplication(IndexMetaData indexMetaData) { return true; } } diff --git a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java index ff576a75671a8..76ffdab54dfe1 100644 --- a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java +++ b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java @@ -24,6 +24,8 @@ import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; @@ -40,6 +42,7 @@ * a write operation is about to happen in a non existing index. */ public final class AutoCreateIndex { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(AutoCreateIndex.class)); public static final Setting AUTO_CREATE_INDEX_SETTING = new Setting<>("action.auto_create_index", "true", AutoCreate::new, Property.NodeScope, Setting.Property.Dynamic); @@ -116,6 +119,10 @@ private AutoCreate(String value) { List> expressions = new ArrayList<>(); try { autoCreateIndex = Booleans.parseBooleanExact(value); + if (Booleans.isStrictlyBoolean(value) == false) { + DEPRECATION_LOGGER.deprecated("Expected a boolean for setting [{}] but got [{}]", + AUTO_CREATE_INDEX_SETTING.getKey(), value); + } } catch (IllegalArgumentException ex) { try { String[] patterns = Strings.commaDelimitedListToStringArray(value); diff --git a/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index 2bc49f7e9f869..a79044f2d6923 100644 --- a/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -195,8 +195,8 @@ public static IndicesOptions fromParameters(Object wildcardsString, Object ignor //note that allowAliasesToMultipleIndices is not exposed, always true (only for internal use) return fromOptions( - lenientNodeBooleanValue(ignoreUnavailableString, defaultSettings.ignoreUnavailable()), - lenientNodeBooleanValue(allowNoIndicesString, defaultSettings.allowNoIndices()), + lenientNodeBooleanValue(ignoreUnavailableString, "ignore_unavailable", defaultSettings.ignoreUnavailable()), + lenientNodeBooleanValue(allowNoIndicesString, "allow_no_indices", defaultSettings.allowNoIndices()), expandWildcardsOpen, expandWildcardsClosed, defaultSettings.allowAliasesToMultipleIndices(), diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 3d231df708e21..8b5882c4c76b7 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -312,7 +312,7 @@ public void handleException(TransportException exp) { } else { setPhase(replicationTask, "primary"); final IndexMetaData indexMetaData = clusterService.state().getMetaData().index(request.shardId().getIndex()); - final boolean executeOnReplicas = (indexMetaData == null) || shouldExecuteReplication(indexMetaData.getSettings()); + final boolean executeOnReplicas = (indexMetaData == null) || shouldExecuteReplication(indexMetaData); final ActionListener listener = createResponseListener(primaryShardReference); createReplicatedOperation(request, new ActionListener() { @Override @@ -876,8 +876,8 @@ public void onFailure(Exception e) { * Indicated whether this operation should be replicated to shadow replicas or not. If this method returns true the replication phase * will be skipped. For example writes such as index and delete don't need to be replicated on shadow replicas but refresh and flush do. */ - protected boolean shouldExecuteReplication(Settings settings) { - return IndexMetaData.isIndexUsingShadowReplicas(settings) == false; + protected boolean shouldExecuteReplication(IndexMetaData indexMetaData) { + return IndexMetaData.isIndexUsingShadowReplicas(indexMetaData.getSettings()) == false; } class PrimaryShardReference implements ReplicationOperation.Primary, Releasable { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java index 4b4a8e54d7c6c..0031aaf19de02 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java @@ -19,6 +19,8 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -28,12 +30,17 @@ * based on the number of datanodes in the cluster. This class handles all the parsing and streamlines the access to these values. */ final class AutoExpandReplicas { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(AutoExpandReplicas.class)); + // the value we recognize in the "max" position to mean all the nodes private static final String ALL_NODES_VALUE = "all"; public static final Setting SETTING = new Setting<>(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "false", (value) -> { final int min; final int max; - if (Booleans.parseBoolean(value, true) == false) { + if (Booleans.isExplicitFalse(value)) { + if (Booleans.isStrictlyBoolean(value) == false) { + DEPRECATION_LOGGER.deprecated("Expected [false] for setting [{}] but got [{}]", IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, value); + } return new AutoExpandReplicas(0, 0, false); } final int dash = value.indexOf('-'); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index 43a31f6637899..7c9f1c4569356 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -215,7 +215,7 @@ private void initMappers(Map withoutType) { String fieldName = entry.getKey(); Object fieldNode = entry.getValue(); if (fieldName.equals("required")) { - required = lenientNodeBooleanValue(fieldNode); + required = lenientNodeBooleanValue(fieldNode, fieldName); } } this.routing = new Routing(required); @@ -232,13 +232,13 @@ private void initMappers(Map withoutType) { String fieldName = entry.getKey(); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - enabled = lenientNodeBooleanValue(fieldNode); + enabled = lenientNodeBooleanValue(fieldNode, fieldName); } else if (fieldName.equals("format")) { format = fieldNode.toString(); } else if (fieldName.equals("default") && fieldNode != null) { defaultTimestamp = fieldNode.toString(); } else if (fieldName.equals("ignore_missing")) { - ignoreMissing = lenientNodeBooleanValue(fieldNode); + ignoreMissing = lenientNodeBooleanValue(fieldNode, fieldName); } } this.timestamp = new Timestamp(enabled, format, defaultTimestamp, ignoreMissing); diff --git a/core/src/main/java/org/elasticsearch/common/Booleans.java b/core/src/main/java/org/elasticsearch/common/Booleans.java index 9c5f574663363..090c82acc0489 100644 --- a/core/src/main/java/org/elasticsearch/common/Booleans.java +++ b/core/src/main/java/org/elasticsearch/common/Booleans.java @@ -24,30 +24,6 @@ */ public class Booleans { - /** - * Returns false if text is in false, 0, off, no; else, true - */ - public static boolean parseBoolean(char[] text, int offset, int length, boolean defaultValue) { - // TODO: the leniency here is very dangerous: a simple typo will be misinterpreted and the user won't know. - // We should remove it and cutover to https://github.com/rmuir/booleanparser - if (text == null || length == 0) { - return defaultValue; - } - if (length == 1) { - return text[offset] != '0'; - } - if (length == 2) { - return !(text[offset] == 'n' && text[offset + 1] == 'o'); - } - if (length == 3) { - return !(text[offset] == 'o' && text[offset + 1] == 'f' && text[offset + 2] == 'f'); - } - if (length == 5) { - return !(text[offset] == 'f' && text[offset + 1] == 'a' && text[offset + 2] == 'l' && text[offset + 3] == 's' && text[offset + 4] == 'e'); - } - return true; - } - /** * returns true if the a sequence of chars is one of "true","false","on","off","yes","no","0","1" * @@ -78,6 +54,13 @@ public static boolean isBoolean(char[] text, int offset, int length) { return false; } + public static Boolean parseBooleanExact(String value, Boolean defaultValue) { + if (Strings.hasText(value)) { + return parseBooleanExact(value); + } + return defaultValue; + } + /*** * * @return true/false @@ -96,6 +79,13 @@ public static Boolean parseBooleanExact(String value) { throw new IllegalArgumentException("Failed to parse value [" + value + "] cannot be parsed to boolean [ true/1/on/yes OR false/0/off/no ]"); } + /** + * @return true iff the provided value is either "true" or "false". + */ + public static boolean isStrictlyBoolean(String value) { + return "false".equals(value) || "true".equals(value); + } + public static Boolean parseBoolean(String value, Boolean defaultValue) { if (value == null) { // only for the null case we do that here! return defaultValue; diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 45ebe1b061c62..f4548db3456a9 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -42,7 +42,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.EnumSet; -import java.util.HashMap; import java.util.IdentityHashMap; import java.util.List; import java.util.Map; @@ -668,15 +667,25 @@ public static Setting intSetting(String key, int defaultValue, Property } public static Setting boolSetting(String key, boolean defaultValue, Property... properties) { - return new Setting<>(key, (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, properties); + return new Setting<>(key, (s) -> Boolean.toString(defaultValue), (value) -> parseBoolean(key, value), properties); } public static Setting boolSetting(String key, Setting fallbackSetting, Property... properties) { - return new Setting<>(key, fallbackSetting, Booleans::parseBooleanExact, properties); + return new Setting<>(key, fallbackSetting, (value) -> parseBoolean(key, value), properties); } public static Setting boolSetting(String key, Function defaultValueFn, Property... properties) { - return new Setting<>(key, defaultValueFn, Booleans::parseBooleanExact, properties); + return new Setting<>(key, defaultValueFn, (value) -> parseBoolean(key, value), properties); + } + + private static Boolean parseBoolean(String key, String value) { + // let the parser handle all cases for non-proper booleans without a deprecation warning by throwing IAE + boolean booleanValue = Booleans.parseBooleanExact(value); + if (Booleans.isStrictlyBoolean(value) == false) { + DeprecationLogger deprecationLogger = new DeprecationLogger(Loggers.getLogger(Setting.class)); + deprecationLogger.deprecated("Expected a boolean for setting [{}] but got [{}]", key, value); + } + return booleanValue; } public static Setting byteSizeSetting(String key, ByteSizeValue value, Property... properties) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index ef9ff00a1f029..c9f291f8348a5 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -26,6 +26,8 @@ import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.settings.loader.SettingsLoaderFactory; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -74,7 +76,6 @@ * An immutable settings implementation. */ public final class Settings implements ToXContent { - public static final Settings EMPTY = new Builder().build(); private static final Pattern ARRAY_PATTERN = Pattern.compile("(.*)\\.\\d+$"); @@ -310,7 +311,13 @@ public Long getAsLong(String setting, Long defaultValue) { * returns the default value provided. */ public Boolean getAsBoolean(String setting, Boolean defaultValue) { - return Booleans.parseBoolean(get(setting), defaultValue); + String rawValue = get(setting); + Boolean booleanValue = Booleans.parseBooleanExact(rawValue, defaultValue); + if (rawValue != null && Booleans.isStrictlyBoolean(rawValue) == false) { + DeprecationLogger deprecationLogger = new DeprecationLogger(Loggers.getLogger(Settings.class)); + deprecationLogger.deprecated("Expected a boolean for setting [{}] but got [{}]", setting, rawValue); + } + return booleanValue; } /** diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/ToXContent.java b/core/src/main/java/org/elasticsearch/common/xcontent/ToXContent.java index 3006363a4ddd4..da923aecb6dcb 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/ToXContent.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/ToXContent.java @@ -20,6 +20,8 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import java.io.IOException; import java.util.Map; @@ -30,7 +32,6 @@ * but those that don't may or may not require emitting a startObject and an endObject. */ public interface ToXContent { - interface Params { String param(String key); @@ -65,6 +66,7 @@ public Boolean paramAsBoolean(String key, Boolean defaultValue) { }; class MapParams implements Params { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(MapParams.class)); private final Map params; @@ -88,12 +90,16 @@ public String param(String key, String defaultValue) { @Override public boolean paramAsBoolean(String key, boolean defaultValue) { - return Booleans.parseBoolean(param(key), defaultValue); + return paramAsBoolean(key, (Boolean) defaultValue); } @Override public Boolean paramAsBoolean(String key, Boolean defaultValue) { - return Booleans.parseBoolean(param(key), defaultValue); + String rawParam = param(key); + if (rawParam != null && Booleans.isStrictlyBoolean(rawParam) == false) { + DEPRECATION_LOGGER.deprecated("Expected a boolean for [{}] but got [{}]", key, rawParam); + } + return Booleans.parseBoolean(rawParam, defaultValue); } } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java index 5563d8a7582a6..41a11acb5247d 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java @@ -22,6 +22,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; @@ -36,7 +38,6 @@ * */ public abstract class AbstractXContentParser implements XContentParser { - // Currently this is not a setting that can be changed and is a policy // that relates to how parsing of things like "boost" are done across // the whole of Elasticsearch (eg if String "1.0" is a valid float). @@ -53,6 +54,8 @@ private static void checkCoerceString(boolean coerce, Class cl } } + private final DeprecationLogger deprecationLogger = new DeprecationLogger(Loggers.getLogger(getClass())); + private final NamedXContentRegistry xContentRegistry; public AbstractXContentParser(NamedXContentRegistry xContentRegistry) { @@ -92,13 +95,27 @@ public boolean isBooleanValue() throws IOException { @Override public boolean booleanValue() throws IOException { + boolean interpretedAsLenient = false; + boolean booleanValue; + String rawValue = null; + Token token = currentToken(); if (token == Token.VALUE_NUMBER) { - return intValue() != 0; + interpretedAsLenient = true; + booleanValue = intValue() != 0; + rawValue = String.valueOf(intValue()); } else if (token == Token.VALUE_STRING) { - return Booleans.parseBoolean(textCharacters(), textOffset(), textLength(), false /* irrelevant */); + rawValue = new String(textCharacters(), textOffset(), textLength()); + interpretedAsLenient = Booleans.isStrictlyBoolean(rawValue) == false; + booleanValue = Booleans.parseBoolean(rawValue, false /* irrelevant */); + } else { + booleanValue = doBooleanValue(); + } + if (interpretedAsLenient) { + deprecationLogger.deprecated("Expected a boolean for property [{}] but got [{}]", currentName(), rawValue); } - return doBooleanValue(); + return booleanValue; + } protected abstract boolean doBooleanValue() throws IOException; diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java b/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java index fabbdd0114001..b4dd285c798b3 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java @@ -24,8 +24,11 @@ import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Numbers; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.TimeValue; @@ -40,6 +43,7 @@ * */ public class XContentMapValues { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(XContentMapValues.class)); /** * Extracts raw values (string, int, and so on) based on the path provided returning all of them @@ -418,25 +422,35 @@ public static long nodeLongValue(Object node) { /** * This method is very lenient, use {@link #nodeBooleanValue} instead. */ - public static boolean lenientNodeBooleanValue(Object node, boolean defaultValue) { + public static boolean lenientNodeBooleanValue(Object node, String name, boolean defaultValue) { if (node == null) { return defaultValue; } - return lenientNodeBooleanValue(node); + return lenientNodeBooleanValue(node, name); } /** * This method is very lenient, use {@link #nodeBooleanValue} instead. */ - public static boolean lenientNodeBooleanValue(Object node) { + public static boolean lenientNodeBooleanValue(Object node, String name) { + boolean interpretedAsLenient = false; + boolean booleanValue; + if (node instanceof Boolean) { - return (Boolean) node; + booleanValue = (Boolean) node; + } else if (node instanceof Number) { + interpretedAsLenient = true; + booleanValue = ((Number) node).intValue() != 0; + } else { + String value = node.toString(); + booleanValue = !(value.equals("false") || value.equals("0") || value.equals("off")); + interpretedAsLenient = Booleans.isStrictlyBoolean(value) == false; } - if (node instanceof Number) { - return ((Number) node).intValue() != 0; + + if (interpretedAsLenient) { + DEPRECATION_LOGGER.deprecated("Expected a boolean for property [{}] but got [{}]", name, node.toString()); } - String value = node.toString(); - return !(value.equals("false") || value.equals("0") || value.equals("off")); + return booleanValue; } public static boolean nodeBooleanValue(Object node) { diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index b850ccb83e513..cc680b2053b84 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -337,8 +337,8 @@ public synchronized IndexShard createShard(ShardRouting routing) throws IOExcept logger.debug("creating shard_id {}", shardId); // if we are on a shared FS we only own the shard (ie. we can safely delete it) if we are the primary. - final boolean canDeleteShardContent = IndexMetaData.isOnSharedFilesystem(indexSettings) == false || - (primary && IndexMetaData.isOnSharedFilesystem(indexSettings)); + final boolean canDeleteShardContent = this.indexSettings.isOnSharedFilesystem() == false || + (primary && this.indexSettings.isOnSharedFilesystem()); final Engine.Warmer engineWarmer = (searcher) -> { IndexShard shard = getShardOrNull(shardId.getId()); if (shard != null) { @@ -347,7 +347,7 @@ public synchronized IndexShard createShard(ShardRouting routing) throws IOExcept }; store = new Store(shardId, this.indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> eventListener.onStoreClosed(shardId))); - if (useShadowEngine(primary, indexSettings)) { + if (useShadowEngine(primary, this.indexSettings)) { indexShard = new ShadowIndexShard(routing, this.indexSettings, path, store, indexCache, mapperService, similarityService, indexFieldData, engineFactory, eventListener, searcherWrapper, threadPool, bigArrays, engineWarmer, searchOperationListeners); @@ -374,8 +374,8 @@ public synchronized IndexShard createShard(ShardRouting routing) throws IOExcept } } - static boolean useShadowEngine(boolean primary, Settings indexSettings) { - return primary == false && IndexMetaData.isIndexUsingShadowReplicas(indexSettings); + static boolean useShadowEngine(boolean primary, IndexSettings indexSettings) { + return primary == false && indexSettings.isShadowReplicaIndex(); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index 300b2c37c85fd..63af0a90a123b 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -331,15 +331,6 @@ public boolean isOnSharedFilesystem() { return IndexMetaData.isOnSharedFilesystem(getSettings()); } - /** - * Returns true iff the given settings indicate that the index associated - * with these settings uses shadow replicas. Otherwise false. The default - * setting for this is false. - */ - public boolean isIndexUsingShadowReplicas() { - return IndexMetaData.isOnSharedFilesystem(getSettings()); - } - /** * Returns the version the index was created on. * @see Version#indexCreated(Settings) diff --git a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java index d00017da57d71..3733b4741eec2 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java +++ b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java @@ -22,6 +22,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -36,6 +37,8 @@ import java.util.concurrent.TimeUnit; public final class IndexingSlowLog implements IndexingOperationListener { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(IndexingSlowLog.class)); + private final Index index; private boolean reformat; private long indexWarnThreshold; @@ -82,7 +85,12 @@ public final class IndexingSlowLog implements IndexingOperationListener { try { return Integer.parseInt(value, 10); } catch (NumberFormatException e) { - return Booleans.parseBoolean(value, true) ? Integer.MAX_VALUE : 0; + boolean booleanValue = Booleans.parseBoolean(value, true); + if (value != null && Booleans.isStrictlyBoolean(value) == false) { + DEPRECATION_LOGGER.deprecated("Expected a boolean for setting [{}] but got [{}]", + INDEX_INDEXING_SLOWLOG_PREFIX + ".source", value); + } + return booleanValue ? Integer.MAX_VALUE : 0; } }, Property.Dynamic, Property.IndexScope); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java index 2c8e7b4619549..c5e4d6be45ccf 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java @@ -39,7 +39,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue; import static org.elasticsearch.index.mapper.TypeParsers.parseTextField; @@ -115,7 +114,7 @@ public MetadataFieldMapper.Builder parse(String name, Map n // the AllFieldMapper ctor in the builder since it is not valid. Here we validate // the doc values settings (old and new) are rejected Object docValues = node.get("doc_values"); - if (docValues != null && lenientNodeBooleanValue(docValues)) { + if (docValues != null && TypeParsers.nodeBooleanValue(name, "doc_values", docValues)) { throw new MapperParsingException("Field [" + name + "] is always tokenized and cannot have doc values"); } @@ -136,8 +135,8 @@ public MetadataFieldMapper.Builder parse(String name, Map n String fieldName = entry.getKey(); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - builder.enabled(lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : - EnabledAttributeMapper.DISABLED); + boolean enabled = TypeParsers.nodeBooleanValue(name, "enabled", fieldNode); + builder.enabled(enabled ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java index 8d46fac47f1e8..fd8857de9444b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java @@ -221,7 +221,7 @@ public abstract static class TypeParser implements Mapper.TypeParser { if (propName.equals("lat_lon")) { deprecationLogger.deprecated(CONTENT_TYPE + " lat_lon parameter is deprecated and will be removed " + "in the next major release"); - builder.enableLatLon(XContentMapValues.lenientNodeBooleanValue(propNode)); + builder.enableLatLon(XContentMapValues.lenientNodeBooleanValue(propNode, propName)); iterator.remove(); } else if (propName.equals("precision_step")) { deprecationLogger.deprecated(CONTENT_TYPE + " precision_step parameter is deprecated and will be removed " @@ -231,13 +231,13 @@ public abstract static class TypeParser implements Mapper.TypeParser { } else if (propName.equals("geohash")) { deprecationLogger.deprecated(CONTENT_TYPE + " geohash parameter is deprecated and will be removed " + "in the next major release"); - builder.enableGeoHash(XContentMapValues.lenientNodeBooleanValue(propNode)); + builder.enableGeoHash(XContentMapValues.lenientNodeBooleanValue(propNode, propName)); iterator.remove(); } else if (propName.equals("geohash_prefix")) { deprecationLogger.deprecated(CONTENT_TYPE + " geohash_prefix parameter is deprecated and will be removed " + "in the next major release"); - builder.geoHashPrefix(XContentMapValues.lenientNodeBooleanValue(propNode)); - if (XContentMapValues.lenientNodeBooleanValue(propNode)) { + builder.geoHashPrefix(XContentMapValues.lenientNodeBooleanValue(propNode, propName)); + if (XContentMapValues.lenientNodeBooleanValue(propNode, propName)) { builder.enableGeoHash(true); } iterator.remove(); @@ -254,7 +254,7 @@ public abstract static class TypeParser implements Mapper.TypeParser { } if (propName.equals(Names.IGNORE_MALFORMED)) { - builder.ignoreMalformed(XContentMapValues.lenientNodeBooleanValue(propNode)); + builder.ignoreMalformed(TypeParsers.nodeBooleanValue(name, Names.IGNORE_MALFORMED, propNode)); iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 148762c2c2705..03d1dc7d57187 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -46,7 +46,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.index.mapper.TypeParsers.parseField; /** @@ -112,7 +111,7 @@ public Mapper.Builder parse(String name, Map node, ParserContext if (propNode == null) { throw new MapperParsingException("Property [null_value] cannot be null."); } - builder.nullValue(lenientNodeBooleanValue(propNode)); + builder.nullValue(TypeParsers.nodeBooleanValue(name, "null_value", propNode)); iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index ec3fd90f8b17c..0ded2e05bfec3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -152,7 +152,7 @@ public Mapper.Builder parse(String name, Map node, ParserCo builder.nullValue(propNode.toString()); iterator.remove(); } else if (propName.equals("ignore_malformed")) { - builder.ignoreMalformed(TypeParsers.nodeBooleanValue("ignore_malformed", propNode, parserContext)); + builder.ignoreMalformed(TypeParsers.nodeBooleanValue(name, "ignore_malformed", propNode)); iterator.remove(); } else if (propName.equals("locale")) { builder.locale(LocaleUtils.parse(propNode.toString())); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java index 764586562d29c..6235544b6be99 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java @@ -36,8 +36,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; - /** * A mapper that indexes the field names of a document under _field_names. This mapper is typically useful in order * to have fast exists and missing queries/filters. @@ -107,7 +105,7 @@ public MetadataFieldMapper.Builder parse(String name, Map n String fieldName = entry.getKey(); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - builder.enabled(lenientNodeBooleanValue(fieldNode)); + builder.enabled(TypeParsers.nodeBooleanValue(name, "enabled", fieldNode)); iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index f1a73308692f5..355c515bdd0e2 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -41,7 +41,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; import org.locationtech.spatial4j.shape.Point; @@ -54,9 +53,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; - - /** * FieldMapper for indexing {@link org.locationtech.spatial4j.shape.Shape}s. *

@@ -185,11 +181,12 @@ public Mapper.Builder parse(String name, Map node, ParserContext builder.fieldType().setStrategyName(fieldNode.toString()); iterator.remove(); } else if (Names.COERCE.equals(fieldName)) { - builder.coerce(lenientNodeBooleanValue(fieldNode)); + builder.coerce(TypeParsers.nodeBooleanValue(fieldName, Names.COERCE, fieldNode)); iterator.remove(); } else if (Names.STRATEGY_POINTS_ONLY.equals(fieldName) && builder.fieldType().strategyName.equals(SpatialStrategy.TERM.getStrategyName()) == false) { - builder.fieldType().setPointsOnly(XContentMapValues.lenientNodeBooleanValue(fieldNode)); + boolean pointsOnly = TypeParsers.nodeBooleanValue(fieldName, Names.STRATEGY_POINTS_ONLY, fieldNode); + builder.fieldType().setPointsOnly(pointsOnly); iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index ee2f707fb3a35..ca739d8e03702 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -115,7 +115,7 @@ public Mapper.Builder parse(String name, Map node, ParserCo builder.nullValue(InetAddresses.forString(propNode.toString())); iterator.remove(); } else if (propName.equals("ignore_malformed")) { - builder.ignoreMalformed(TypeParsers.nodeBooleanValue("ignore_malformed", propNode, parserContext)); + builder.ignoreMalformed(TypeParsers.nodeBooleanValue(name, "ignore_malformed", propNode)); iterator.remove(); } else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) { iterator.remove(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java index fc46a08ce1ae8..619a0a824ef18 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java @@ -123,7 +123,7 @@ public static Builder parse(Builder builder, Map node, Mapper.Ty String propName = entry.getKey(); Object propNode = entry.getValue(); if (propName.equals(Names.COERCE)) { - builder.coerce = XContentMapValues.lenientNodeBooleanValue(propNode); + builder.coerce = XContentMapValues.lenientNodeBooleanValue(propNode, propName); iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 4cb31739232fc..99fd53a4f7000 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -165,10 +165,10 @@ public Mapper.Builder parse(String name, Map node, builder.nullValue(type.parse(propNode, false)); iterator.remove(); } else if (propName.equals("ignore_malformed")) { - builder.ignoreMalformed(TypeParsers.nodeBooleanValue("ignore_malformed", propNode, parserContext)); + builder.ignoreMalformed(TypeParsers.nodeBooleanValue(name,"ignore_malformed", propNode)); iterator.remove(); } else if (propName.equals("coerce")) { - builder.coerce(TypeParsers.nodeBooleanValue("coerce", propNode, parserContext)); + builder.coerce(TypeParsers.nodeBooleanValue(name, "coerce", propNode)); iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index e843e7170d8a3..b1ae69992c7ee 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -42,11 +42,6 @@ import java.util.Locale; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; - -/** - * - */ public class ObjectMapper extends Mapper implements Cloneable { public static final String CONTENT_TYPE = "object"; @@ -188,11 +183,12 @@ protected static boolean parseObjectOrDocumentTypeProperties(String fieldName, O if (value.equalsIgnoreCase("strict")) { builder.dynamic(Dynamic.STRICT); } else { - builder.dynamic(lenientNodeBooleanValue(fieldNode) ? Dynamic.TRUE : Dynamic.FALSE); + boolean dynamic = TypeParsers.nodeBooleanValue(fieldName, "dynamic", fieldNode); + builder.dynamic(dynamic ? Dynamic.TRUE : Dynamic.FALSE); } return true; } else if (fieldName.equals("enabled")) { - builder.enabled(lenientNodeBooleanValue(fieldNode)); + builder.enabled(TypeParsers.nodeBooleanValue(fieldName, "enabled", fieldNode)); return true; } else if (fieldName.equals("properties")) { if (fieldNode instanceof Collection && ((Collection) fieldNode).isEmpty()) { @@ -204,7 +200,7 @@ protected static boolean parseObjectOrDocumentTypeProperties(String fieldName, O } return true; } else if (fieldName.equals("include_in_all")) { - builder.includeInAll(lenientNodeBooleanValue(fieldNode)); + builder.includeInAll(TypeParsers.nodeBooleanValue(fieldName, "include_in_all", fieldNode)); return true; } return false; @@ -227,12 +223,12 @@ protected static void parseNested(String name, Map node, ObjectM } fieldNode = node.get("include_in_parent"); if (fieldNode != null) { - nestedIncludeInParent = lenientNodeBooleanValue(fieldNode); + nestedIncludeInParent = TypeParsers.nodeBooleanValue(name, "include_in_parent", fieldNode); node.remove("include_in_parent"); } fieldNode = node.get("include_in_root"); if (fieldNode != null) { - nestedIncludeInRoot = lenientNodeBooleanValue(fieldNode); + nestedIncludeInRoot = TypeParsers.nodeBooleanValue(name, "include_in_root", fieldNode); node.remove("include_in_root"); } if (nested) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java index b0a809c12dfce..f7425f377558e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java @@ -166,7 +166,7 @@ public Mapper.Builder parse(String name, Map node, throw new MapperParsingException("Property [null_value] is not supported for [" + this.type.name + "] field types."); } else if (propName.equals("coerce")) { - builder.coerce(TypeParsers.nodeBooleanValue("coerce", propNode, parserContext)); + builder.coerce(TypeParsers.nodeBooleanValue(name, "coerce", propNode)); iterator.remove(); } else if (propName.equals("locale")) { builder.locale(LocaleUtils.parse(propNode.toString())); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java index a69f141ecad8b..759781d55754d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java @@ -32,11 +32,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; - -/** - * - */ public class RoutingFieldMapper extends MetadataFieldMapper { public static final String NAME = "_routing"; @@ -89,7 +84,7 @@ public MetadataFieldMapper.Builder parse(String name, Map n String fieldName = entry.getKey(); Object fieldNode = entry.getValue(); if (fieldName.equals("required")) { - builder.required(lenientNodeBooleanValue(fieldNode)); + builder.required(TypeParsers.nodeBooleanValue(name, "required", fieldNode)); iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java index 0f7a89058343a..400f16bed1597 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java @@ -147,10 +147,10 @@ public Mapper.Builder parse(String name, Map node, builder.nullValue(NumberFieldMapper.NumberType.DOUBLE.parse(propNode, false)); iterator.remove(); } else if (propName.equals("ignore_malformed")) { - builder.ignoreMalformed(TypeParsers.nodeBooleanValue("ignore_malformed", propNode, parserContext)); + builder.ignoreMalformed(TypeParsers.nodeBooleanValue(name, "ignore_malformed", propNode)); iterator.remove(); } else if (propName.equals("coerce")) { - builder.coerce(TypeParsers.nodeBooleanValue("coerce", propNode, parserContext)); + builder.coerce(TypeParsers.nodeBooleanValue(name, "coerce", propNode)); iterator.remove(); } else if (propName.equals("scaling_factor")) { builder.scalingFactor(NumberFieldMapper.NumberType.DOUBLE.parse(propNode, false).doubleValue()); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index be6416c89b767..ae839b19d799f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -47,11 +47,6 @@ import java.util.Map; import java.util.function.Function; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; - -/** - * - */ public class SourceFieldMapper extends MetadataFieldMapper { public static final String NAME = "_source"; @@ -119,7 +114,7 @@ public MetadataFieldMapper.Builder parse(String name, Map n String fieldName = entry.getKey(); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - builder.enabled(lenientNodeBooleanValue(fieldNode)); + builder.enabled(TypeParsers.nodeBooleanValue(name, "enabled", fieldNode)); iterator.remove(); } else if ("format".equals(fieldName) && parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1)) { // ignore on old indices, reject on and after 5.0 diff --git a/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java index 69dd337fddd7b..acb3a5d9805ed 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java @@ -229,11 +229,11 @@ public Mapper.Builder parse(String fieldName, Map node, ParserCo norms = ((Map) norms).get("enabled"); } if (norms != null) { - node.put("norms", TypeParsers.nodeBooleanValue("norms", norms, parserContext)); + node.put("norms", TypeParsers.nodeBooleanValue(fieldName,"norms", norms)); } Object omitNorms = node.remove("omit_norms"); if (omitNorms != null) { - node.put("norms", TypeParsers.nodeBooleanValue("omit_norms", omitNorms, parserContext) == false); + node.put("norms", TypeParsers.nodeBooleanValue(fieldName, "omit_norms", omitNorms) == false); } } { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TTLFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TTLFieldMapper.java index 7fb30c9239ac8..8c3ccf3c765d1 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/TTLFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/TTLFieldMapper.java @@ -102,7 +102,7 @@ public MetadataFieldMapper.Builder parse(String name, Map node, String fieldName = entry.getKey(); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; + EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode, fieldName) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; builder.enabled(enabledState); iterator.remove(); } else if (fieldName.equals("default")) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TimestampFieldMapper.java index 00040d90d4b56..be315b4af8c87 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/TimestampFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/TimestampFieldMapper.java @@ -130,7 +130,7 @@ public MetadataFieldMapper.Builder parse(String name, Map node, String fieldName = entry.getKey(); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; + EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode, fieldName) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; builder.enabled(enabledState); iterator.remove(); } else if (fieldName.equals("format")) { @@ -145,7 +145,7 @@ public MetadataFieldMapper.Builder parse(String name, Map node, } iterator.remove(); } else if (fieldName.equals("ignore_missing")) { - ignoreMissing = lenientNodeBooleanValue(fieldNode); + ignoreMissing = lenientNodeBooleanValue(fieldNode, fieldName); builder.ignoreMissing(ignoreMissing); iterator.remove(); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java index 22b9730a92a41..3bf9584673816 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java @@ -31,17 +31,13 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.similarity.SimilarityProvider; -import java.util.Arrays; import java.util.Collections; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Set; import static org.elasticsearch.common.xcontent.support.XContentMapValues.isArray; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue; @@ -59,14 +55,9 @@ public class TypeParsers { public static final String INDEX_OPTIONS_OFFSETS = "offsets"; private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(TypeParsers.class)); - private static final Set BOOLEAN_STRINGS = new HashSet<>(Arrays.asList("true", "false")); - public static boolean nodeBooleanValue(String name, Object node, Mapper.TypeParser.ParserContext parserContext) { - // TODO: remove this leniency in 6.0 - if (BOOLEAN_STRINGS.contains(node.toString()) == false) { - DEPRECATION_LOGGER.deprecated("Expected a boolean for property [{}] but got [{}]", name, node); - } - return XContentMapValues.lenientNodeBooleanValue(node); + public static boolean nodeBooleanValue(String fieldName, String propertyName, Object node) { + return XContentMapValues.lenientNodeBooleanValue(node, fieldName + "." + propertyName); } @Deprecated // for legacy ints only @@ -80,10 +71,10 @@ public static void parseNumberField(LegacyNumberFieldMapper.Builder builder, Str builder.precisionStep(nodeIntegerValue(propNode)); iterator.remove(); } else if (propName.equals("ignore_malformed")) { - builder.ignoreMalformed(nodeBooleanValue("ignore_malformed", propNode, parserContext)); + builder.ignoreMalformed(nodeBooleanValue(name, "ignore_malformed", propNode)); iterator.remove(); } else if (propName.equals("coerce")) { - builder.coerce(nodeBooleanValue("coerce", propNode, parserContext)); + builder.coerce(nodeBooleanValue(name, "coerce", propNode)); iterator.remove(); } else if (propName.equals("similarity")) { SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); @@ -95,7 +86,8 @@ public static void parseNumberField(LegacyNumberFieldMapper.Builder builder, Str } } - private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, String name, Map fieldNode, Mapper.TypeParser.ParserContext parserContext) { + private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, String name, Map fieldNode, + Mapper.TypeParser.ParserContext parserContext) { NamedAnalyzer indexAnalyzer = null; NamedAnalyzer searchAnalyzer = null; NamedAnalyzer searchQuoteAnalyzer = null; @@ -108,16 +100,17 @@ private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, St parseTermVector(name, propNode.toString(), builder); iterator.remove(); } else if (propName.equals("store_term_vectors")) { - builder.storeTermVectors(nodeBooleanValue("store_term_vectors", propNode, parserContext)); + builder.storeTermVectors(nodeBooleanValue(name, "store_term_vectors", propNode)); iterator.remove(); } else if (propName.equals("store_term_vector_offsets")) { - builder.storeTermVectorOffsets(nodeBooleanValue("store_term_vector_offsets", propNode, parserContext)); + builder.storeTermVectorOffsets(nodeBooleanValue(name, "store_term_vector_offsets", propNode)); iterator.remove(); } else if (propName.equals("store_term_vector_positions")) { - builder.storeTermVectorPositions(nodeBooleanValue("store_term_vector_positions", propNode, parserContext)); + builder.storeTermVectorPositions( + nodeBooleanValue(name, "store_term_vector_positions", propNode)); iterator.remove(); } else if (propName.equals("store_term_vector_payloads")) { - builder.storeTermVectorPayloads(nodeBooleanValue("store_term_vector_payloads", propNode, parserContext)); + builder.storeTermVectorPayloads(nodeBooleanValue(name,"store_term_vector_payloads", propNode)); iterator.remove(); } else if (propName.equals("analyzer")) { NamedAnalyzer analyzer = parserContext.getIndexAnalyzers().get(propNode.toString()); @@ -148,7 +141,8 @@ private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, St } if (searchAnalyzer == null && searchQuoteAnalyzer != null) { - throw new MapperParsingException("analyzer and search_analyzer on field [" + name + "] must be set when search_quote_analyzer is set"); + throw new MapperParsingException("analyzer and search_analyzer on field [" + name + + "] must be set when search_quote_analyzer is set"); } if (searchAnalyzer == null) { @@ -170,16 +164,17 @@ private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, St } } - public static boolean parseNorms(FieldMapper.Builder builder, String propName, Object propNode, Mapper.TypeParser.ParserContext parserContext) { + public static boolean parseNorms(FieldMapper.Builder builder, String fieldName, String propName, Object propNode, + Mapper.TypeParser.ParserContext parserContext) { if (propName.equals("norms")) { if (propNode instanceof Map) { final Map properties = nodeMapValue(propNode, "norms"); - for (Iterator> propsIterator = properties.entrySet().iterator(); propsIterator.hasNext();) { + for (Iterator> propsIterator = properties.entrySet().iterator(); propsIterator.hasNext(); ) { Entry entry2 = propsIterator.next(); final String propName2 = entry2.getKey(); final Object propNode2 = entry2.getValue(); if (propName2.equals("enabled")) { - builder.omitNorms(!lenientNodeBooleanValue(propNode2)); + builder.omitNorms(nodeBooleanValue(fieldName, "enabled", propNode2) == false); propsIterator.remove(); } else if (propName2.equals("loading")) { // ignore for bw compat @@ -187,13 +182,14 @@ public static boolean parseNorms(FieldMapper.Builder builder, String propName, O } } DocumentMapperParser.checkNoRemainingFields(propName, properties, parserContext.indexVersionCreated()); - DEPRECATION_LOGGER.deprecated("The [norms{enabled:true/false}] way of specifying norms is deprecated, please use [norms:true/false] instead"); + DEPRECATION_LOGGER.deprecated("The [norms{enabled:true/false}] way of specifying norms is deprecated, please use " + + "[norms:true/false] instead"); } else { - builder.omitNorms(nodeBooleanValue("norms", propNode, parserContext) == false); + builder.omitNorms(nodeBooleanValue(fieldName,"norms", propNode) == false); } return true; } else if (propName.equals("omit_norms")) { - builder.omitNorms(nodeBooleanValue("norms", propNode, parserContext)); + builder.omitNorms(nodeBooleanValue(fieldName,"norms", propNode)); DEPRECATION_LOGGER.deprecated("[omit_norms] is deprecated, please use [norms] instead with the opposite boolean value"); return true; } else { @@ -205,14 +201,15 @@ public static boolean parseNorms(FieldMapper.Builder builder, String propName, O * Parse text field attributes. In addition to {@link #parseField common attributes} * this will parse analysis and term-vectors related settings. */ - public static void parseTextField(FieldMapper.Builder builder, String name, Map fieldNode, Mapper.TypeParser.ParserContext parserContext) { + public static void parseTextField(FieldMapper.Builder builder, String name, Map fieldNode, + Mapper.TypeParser.ParserContext parserContext) { parseField(builder, name, fieldNode, parserContext); parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext); - for (Iterator> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) { + for (Iterator> iterator = fieldNode.entrySet().iterator(); iterator.hasNext(); ) { Map.Entry entry = iterator.next(); final String propName = entry.getKey(); final Object propNode = entry.getValue(); - if (parseNorms(builder, propName, propNode, parserContext)) { + if (parseNorms(builder, name, propName, propNode, parserContext)) { iterator.remove(); } } @@ -235,28 +232,29 @@ public static void parseField(FieldMapper.Builder builder, String name, Map) propNode; } else { throw new MapperParsingException("expected map for property [fields] on field [" + propNode + "] or " + - "[" + propName + "] but got a " + propNode.getClass()); + "[" + propName + "] but got a " + propNode.getClass()); } for (Map.Entry multiFieldEntry : multiFieldsPropNodes.entrySet()) { String multiFieldName = multiFieldEntry.getKey(); if (multiFieldName.contains(".")) { - throw new MapperParsingException("Field name [" + multiFieldName + "] which is a multi field of [" + name + "] cannot contain '.'"); + throw new MapperParsingException("Field name [" + multiFieldName + "] which is a multi field of [" + name + "] cannot" + + " contain '.'"); } if (!(multiFieldEntry.getValue() instanceof Map)) { throw new MapperParsingException("illegal field [" + multiFieldName + "], only fields can be specified inside fields"); @@ -400,24 +400,11 @@ private static boolean parseIndex(String fieldName, String index) throws MapperP } } - private static boolean parseStore(String store) throws MapperParsingException { - if (BOOLEAN_STRINGS.contains(store) == false) { - DEPRECATION_LOGGER.deprecated("Expected a boolean for property [store] but got [{}]", store); - } - if ("no".equals(store)) { - return false; - } else if ("yes".equals(store)) { - return true; - } else { - return lenientNodeBooleanValue(store); - } - } - @SuppressWarnings("unchecked") public static void parseCopyFields(Object propNode, FieldMapper.Builder builder) { FieldMapper.CopyTo.Builder copyToBuilder = new FieldMapper.CopyTo.Builder(); if (isArray(propNode)) { - for(Object node : (List) propNode) { + for (Object node : (List) propNode) { copyToBuilder.add(nodeStringValue(node, null)); } } else { diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 57f0d30b9e66c..d6a4f02a981af 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -994,6 +994,9 @@ private void internalPerformTranslogRecovery(boolean skipTranslogRecovery, boole recoveryState.setStage(RecoveryState.Stage.VERIFY_INDEX); // also check here, before we apply the translog if (Booleans.parseBoolean(checkIndexOnStartup, false)) { + if (Booleans.isStrictlyBoolean(checkIndexOnStartup) == false) { + deprecationLogger.deprecated("Expected a boolean for setting [{}] but got [{}]", IndexSettings.INDEX_CHECK_ON_STARTUP, checkIndexOnStartup); + } try { checkIndex(); } catch (IOException ex) { diff --git a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java index 24b528ccd3fb9..c758d43778352 100644 --- a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java +++ b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java @@ -91,7 +91,7 @@ public Directory newDirectory() throws IOException { Set preLoadExtensions = new HashSet<>( indexSettings.getValue(IndexModule.INDEX_STORE_PRE_LOAD_SETTING)); wrapped = setPreload(wrapped, location, lockFactory, preLoadExtensions); - if (IndexMetaData.isOnSharedFilesystem(indexSettings.getSettings())) { + if (indexSettings.isOnSharedFilesystem()) { wrapped = new SleepingLockWrapper(wrapped, 5000); } return new RateLimitedFSDirectory(wrapped, this, this) ; diff --git a/core/src/main/java/org/elasticsearch/rest/RestRequest.java b/core/src/main/java/org/elasticsearch/rest/RestRequest.java index 8c05a2b3ae71d..be0606f66cdb6 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/core/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -26,6 +26,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -46,6 +48,7 @@ import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; public abstract class RestRequest implements ToXContent.Params { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(RestRequest.class)); private final NamedXContentRegistry xContentRegistry; private final Map params; @@ -200,12 +203,21 @@ public long paramAsLong(String key, long defaultValue) { @Override public boolean paramAsBoolean(String key, boolean defaultValue) { - return Booleans.parseBoolean(param(key), defaultValue); + return paramAsBoolean(key, (Boolean) defaultValue); } @Override public Boolean paramAsBoolean(String key, Boolean defaultValue) { - return Booleans.parseBoolean(param(key), defaultValue); + String rawParam = param(key); + // Treat empty string as true because that allows the presence of the url parameter to mean "turn this on" + if (rawParam != null && rawParam.length() == 0) { + return true; + } else { + if (rawParam != null && Booleans.isStrictlyBoolean(rawParam) == false) { + DEPRECATION_LOGGER.deprecated("Expected a boolean for request parameter [{}] but got [{}]", key, rawParam); + } + return Booleans.parseBoolean(rawParam, defaultValue); + } } public TimeValue paramAsTime(String key, TimeValue defaultValue) { diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestTable.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestTable.java index 0ab2c86453ea6..b6c20f87b7562 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestTable.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestTable.java @@ -24,6 +24,8 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.io.UTF8StreamWriter; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.SizeValue; @@ -47,6 +49,7 @@ import java.util.Set; public class RestTable { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(RestRequest.class)); public static RestResponse buildResponse(Table table, RestChannel channel) throws Exception { RestRequest request = channel.request(); @@ -190,7 +193,12 @@ static List buildDisplayHeaders(Table table, RestRequest request) } else { for (Table.Cell cell : table.getHeaders()) { String d = cell.attr.get("default"); - if (Booleans.parseBoolean(d, true) && checkOutputTimestamp(cell.value.toString(), request)) { + boolean defaultValue = Booleans.parseBoolean(d, true); + if (d != null && Booleans.isStrictlyBoolean(d) == false) { + DEPRECATION_LOGGER.deprecated("Expected a boolean for attribute [default] of table header [{}] but got [{}]", + cell.value.toString(), d); + } + if (defaultValue && checkOutputTimestamp(cell.value.toString(), request)) { display.add(new DisplayHeader(cell.value.toString(), cell.value.toString())); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index f7323411d1701..4a33afc6e64f4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -160,7 +160,7 @@ public static void parseMultiLineRequest(RestRequest request, IndicesOptions ind } else if ("search_type".equals(entry.getKey()) || "searchType".equals(entry.getKey())) { searchRequest.searchType(nodeStringValue(value, null)); } else if ("request_cache".equals(entry.getKey()) || "requestCache".equals(entry.getKey())) { - searchRequest.requestCache(lenientNodeBooleanValue(value)); + searchRequest.requestCache(lenientNodeBooleanValue(value, entry.getKey())); } else if ("preference".equals(entry.getKey())) { searchRequest.preference(nodeStringValue(value, null)); } else if ("routing".equals(entry.getKey())) { diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java index f3e8bab93fa70..0923bc1d10d75 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java @@ -26,6 +26,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -43,6 +45,7 @@ * Context used to fetch the {@code _source}. */ public class FetchSourceContext implements Writeable, ToXContent { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(FetchSourceContext.class)); public static final ParseField INCLUDES_FIELD = new ParseField("includes", "include"); public static final ParseField EXCLUDES_FIELD = new ParseField("excludes", "exclude"); @@ -103,6 +106,10 @@ public static FetchSourceContext parseFromRestRequest(RestRequest request) { } else { source_includes = Strings.splitStringByCommaToArray(source); } + if (fetchSource != null && Booleans.isStrictlyBoolean(source) == false) { + DEPRECATION_LOGGER.deprecated("Expected a boolean for request parameter [_source] but got [{}]", source); + } + } String sIncludes = request.param("_source_includes"); sIncludes = request.param("_source_include", sIncludes); diff --git a/core/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java b/core/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java new file mode 100644 index 0000000000000..c324eebe0e4cc --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.get; + +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.test.ESTestCase; + +public class MultiGetRequestTests extends ESTestCase { + + public void testAddWithInvalidSourceValueIsRejected() throws Exception { + String sourceValue = randomFrom("on", "off", "0", "1"); + XContentParser parser = createParser(XContentFactory.jsonBuilder() + .startObject() + .startArray("docs") + .startObject() + .field("_source", sourceValue) + .endObject() + .endArray() + .endObject() + ); + + MultiGetRequest multiGetRequest = new MultiGetRequest(); + multiGetRequest.add( + randomAsciiOfLength(5), randomAsciiOfLength(3), null, FetchSourceContext.FETCH_SOURCE, null, parser, true); + + assertEquals(1, multiGetRequest.getItems().size()); + assertWarnings("Expected a boolean for property [_source] but got ["+ sourceValue + "]"); + } + + public void testAddWithValidSourceValueIsAccepted() throws Exception { + XContentParser parser = createParser(XContentFactory.jsonBuilder() + .startObject() + .startArray("docs") + .startObject() + .field("_source", randomFrom("false", "true")) + .endObject() + .startObject() + .field("_source", randomBoolean()) + .endObject() + .endArray() + .endObject() + ); + + MultiGetRequest multiGetRequest = new MultiGetRequest(); + multiGetRequest.add( + randomAsciiOfLength(5), randomAsciiOfLength(3), null, FetchSourceContext.FETCH_SOURCE, null, parser, true); + + assertEquals(2, multiGetRequest.getItems().size()); + } +} diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index 47d7128420161..299150e1780e7 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -54,6 +54,7 @@ import java.util.TreeSet; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -255,7 +256,15 @@ private void testOldSnapshot(String version, String repo, String snapshot) throw assertThat(template.template(), equalTo("te*")); assertThat(template.settings().getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, -1), equalTo(1)); assertThat(template.mappings().size(), equalTo(1)); - assertThat(template.mappings().get("type1").string(), equalTo("{\"type1\":{\"_source\":{\"enabled\":false}}}")); + assertThat(template.mappings().get("type1").string(), + anyOf( + equalTo("{\"type1\":{\"_source\":{\"enabled\":false}}}"), + equalTo("{\"type1\":{\"_source\":{\"enabled\":\"false\"}}}"), + equalTo("{\"type1\":{\"_source\":{\"enabled\":\"0\"}}}"), + equalTo("{\"type1\":{\"_source\":{\"enabled\":0}}}"), + equalTo("{\"type1\":{\"_source\":{\"enabled\":\"off\"}}}"), + equalTo("{\"type1\":{\"_source\":{\"enabled\":\"no\"}}}") + )); assertThat(template.aliases().size(), equalTo(3)); assertThat(template.aliases().get("alias1"), notNullValue()); assertThat(template.aliases().get("alias2").filter().string(), containsString(version)); diff --git a/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java b/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java index c396350f9de05..ecdb476dcc33a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java @@ -94,18 +94,37 @@ public void testIndexTemplates() throws Exception { client().admin().indices().preparePutTemplate("foo_template") .setTemplate("te*") .setOrder(0) - .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field1").field("type", "text").field("store", true).endObject() - .startObject("field2").field("type", "keyword").field("store", true).endObject() - .endObject().endObject().endObject()) + .addMapping("type1", XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("properties") + .startObject("field1") + .field("type", "text") + .field("store", true) + .endObject() + .startObject("field2") + .field("type", "keyword") + .field("store", true) + .endObject() + .endObject() + .endObject() + .endObject()) .get(); client().admin().indices().preparePutTemplate("fuu_template") .setTemplate("test*") .setOrder(1) - .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field2").field("type", "text").field("store", "no").endObject() - .endObject().endObject().endObject()) + .addMapping("type1", XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("properties") + .startObject("field2") + .field("type", "text") + .field("store", false) + .endObject() + .endObject() + .endObject() + .endObject()) .get(); ClusterStateResponse clusterStateResponseUnfiltered = client().admin().cluster().prepareState().get(); diff --git a/core/src/test/java/org/elasticsearch/common/BooleansTests.java b/core/src/test/java/org/elasticsearch/common/BooleansTests.java index 176c4c75dc7a6..3e6b4d74b77c4 100644 --- a/core/src/test/java/org/elasticsearch/common/BooleansTests.java +++ b/core/src/test/java/org/elasticsearch/common/BooleansTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.common; import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matchers; import java.util.Locale; @@ -28,59 +27,84 @@ import static org.hamcrest.Matchers.nullValue; public class BooleansTests extends ESTestCase { + private static final String[] NON_BOOLEANS = new String[]{"11", "00", "sdfsdfsf", "F", "T", "True", "False"}; + private static final String[] BOOLEANS = new String[]{"true", "false", "on", "off", "yes", "no", "0", "1"}; + private static final String[] TRUTHY = new String[]{"true", "on", "yes", "1"}; + private static final String[] FALSY = new String[]{"false", "off", "no", "0"}; + + public void testIsNonBoolean() { + assertThat(Booleans.isBoolean(null, 0, 1), is(false)); + + for (String nb : NON_BOOLEANS) { + String t = "prefix" + nb + "suffix"; + assertFalse("recognized [" + nb + "] as boolean", Booleans.isBoolean(t.toCharArray(), "prefix".length(), nb.length())); + assertFalse("recognized [" + nb + "] as boolean", Booleans.isStrictlyBoolean(t)); + } + } + + public void testParseBooleanWithFallback() { + assertFalse(Booleans.parseBoolean(null, false)); + assertTrue(Booleans.parseBoolean(null, true)); + assertNull(Booleans.parseBoolean(null, null)); + assertFalse(Booleans.parseBoolean(null, Boolean.FALSE)); + assertTrue(Booleans.parseBoolean(null, Boolean.TRUE)); + + assertTrue(Booleans.parseBoolean("true", randomFrom(Boolean.TRUE, Boolean.FALSE, null))); + assertFalse(Booleans.parseBoolean("false", randomFrom(Boolean.TRUE, Boolean.FALSE, null))); + } + + public void testParseBooleanExact() { + assertTrue(Booleans.parseBooleanExact(randomFrom(TRUTHY))); + assertFalse(Booleans.parseBooleanExact(randomFrom(FALSY))); + } + + public void testParseNonBooleanExact() { + expectThrows(IllegalArgumentException.class, () -> Booleans.parseBooleanExact(null)); + for (String nonBoolean : NON_BOOLEANS) { + expectThrows(IllegalArgumentException.class, () -> Booleans.parseBooleanExact(nonBoolean)); + } + } + + public void testParseNonBooleanExactWithFallback() { + for (String nonBoolean : NON_BOOLEANS) { + boolean defaultValue = randomFrom(Boolean.TRUE, Boolean.FALSE); + + expectThrows(IllegalArgumentException.class, + () -> Booleans.parseBooleanExact(nonBoolean, defaultValue)); + } + } + public void testIsBoolean() { - String[] booleans = new String[]{"true", "false", "on", "off", "yes", "no", "0", "1"}; - String[] notBooleans = new String[]{"11", "00", "sdfsdfsf", "F", "T"}; assertThat(Booleans.isBoolean(null, 0, 1), is(false)); - for (String b : booleans) { + for (String b : BOOLEANS) { String t = "prefix" + b + "suffix"; - assertThat("failed to recognize [" + b + "] as boolean", Booleans.isBoolean(t.toCharArray(), "prefix".length(), b.length()), Matchers.equalTo(true)); + assertTrue("failed to recognize [" + b + "] as boolean", + Booleans.isBoolean(t.toCharArray(), "prefix".length(), b.length())); } - for (String nb : notBooleans) { + for (String nb : NON_BOOLEANS) { String t = "prefix" + nb + "suffix"; - assertThat("recognized [" + nb + "] as boolean", Booleans.isBoolean(t.toCharArray(), "prefix".length(), nb.length()), Matchers.equalTo(false)); + assertFalse("recognized [" + nb + "] as boolean", + Booleans.isBoolean(t.toCharArray(), "prefix".length(), nb.length())); } } public void testParseBoolean() { - assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes", "1"), randomBoolean()), is(true)); - assertThat(Booleans.parseBoolean(randomFrom("false", "off", "no", "0"), randomBoolean()), is(false)); + assertThat(Booleans.parseBoolean(randomFrom(TRUTHY), randomBoolean()), is(true)); + assertThat(Booleans.parseBoolean(randomFrom(FALSY), randomBoolean()), is(false)); assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes").toUpperCase(Locale.ROOT), randomBoolean()), is(true)); assertThat(Booleans.parseBoolean(null, false), is(false)); assertThat(Booleans.parseBoolean(null, true), is(true)); - assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes", "1"), randomFrom(Boolean.TRUE, Boolean.FALSE, null)), is(true)); - assertThat(Booleans.parseBoolean(randomFrom("false", "off", "no", "0"), randomFrom(Boolean.TRUE, Boolean.FALSE, null)), is(false)); - assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes").toUpperCase(Locale.ROOT),randomFrom(Boolean.TRUE, Boolean.FALSE, null)), is(true)); + assertThat(Booleans.parseBoolean( + randomFrom("true", "on", "yes", "1"), randomFrom(Boolean.TRUE, Boolean.FALSE, null)), is(true)); + assertThat(Booleans.parseBoolean( + randomFrom("false", "off", "no", "0"), randomFrom(Boolean.TRUE, Boolean.FALSE, null)), is(false)); + assertThat(Booleans.parseBoolean( + randomFrom("true", "on", "yes").toUpperCase(Locale.ROOT),randomFrom(Boolean.TRUE, Boolean.FALSE, null)), is(true)); assertThat(Booleans.parseBoolean(null, Boolean.FALSE), is(false)); assertThat(Booleans.parseBoolean(null, Boolean.TRUE), is(true)); assertThat(Booleans.parseBoolean(null, null), nullValue()); - - char[] chars = randomFrom("true", "on", "yes", "1").toCharArray(); - assertThat(Booleans.parseBoolean(chars, 0, chars.length, randomBoolean()), is(true)); - chars = randomFrom("false", "off", "no", "0").toCharArray(); - assertThat(Booleans.parseBoolean(chars,0, chars.length, randomBoolean()), is(false)); - chars = randomFrom("true", "on", "yes").toUpperCase(Locale.ROOT).toCharArray(); - assertThat(Booleans.parseBoolean(chars,0, chars.length, randomBoolean()), is(true)); - } - - public void testParseBooleanExact() { - assertThat(Booleans.parseBooleanExact(randomFrom("true", "on", "yes", "1")), is(true)); - assertThat(Booleans.parseBooleanExact(randomFrom("false", "off", "no", "0")), is(false)); - try { - Booleans.parseBooleanExact(randomFrom("fred", "foo", "barney", null)); - fail("Expected exception while parsing invalid boolean value "); - } catch (Exception ex) { - assertTrue(ex instanceof IllegalArgumentException); - } - } - - public void testIsExplicit() { - assertThat(Booleans.isExplicitFalse(randomFrom("true", "on", "yes", "1", "foo", null)), is(false)); - assertThat(Booleans.isExplicitFalse(randomFrom("false", "off", "no", "0")), is(true)); - assertThat(Booleans.isExplicitTrue(randomFrom("true", "on", "yes", "1")), is(true)); - assertThat(Booleans.isExplicitTrue(randomFrom("false", "off", "no", "0", "foo", null)), is(false)); } } diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java index a7257fbca8b45..d82c69017c3fd 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java @@ -19,11 +19,13 @@ package org.elasticsearch.common.xcontent; +import com.fasterxml.jackson.core.JsonParseException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.util.Arrays; import java.util.List; import java.util.Map; @@ -32,6 +34,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.isIn; import static org.hamcrest.Matchers.nullValue; public class XContentParserTests extends ESTestCase { @@ -102,4 +105,29 @@ private Map readMapStrings(String source) throws IOException { return randomBoolean() ? parser.mapStringsOrdered() : parser.mapStrings(); } } + + public void testReadBooleans() throws IOException { + String falsy = randomFrom("\"false\"", "false"); + String truthy = randomFrom("\"true\"", "true"); + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"foo\": " + falsy + ", \"bar\": " + truthy + "}")) { + XContentParser.Token token = parser.nextToken(); + assertThat(token, equalTo(XContentParser.Token.START_OBJECT)); + token = parser.nextToken(); + assertThat(token, equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.currentName(), equalTo("foo")); + token = parser.nextToken(); + assertThat(token, isIn(Arrays.asList(XContentParser.Token.VALUE_STRING, XContentParser.Token.VALUE_BOOLEAN))); + assertTrue(parser.isBooleanValue()); + assertFalse(parser.booleanValue()); + + token = parser.nextToken(); + assertThat(token, equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.currentName(), equalTo("bar")); + token = parser.nextToken(); + assertThat(token, isIn(Arrays.asList(XContentParser.Token.VALUE_STRING, XContentParser.Token.VALUE_BOOLEAN))); + assertTrue(parser.isBooleanValue()); + assertTrue(parser.booleanValue()); + } + } } diff --git a/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java index 7ba78afb8c633..6f417a48f1ff5 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -21,9 +21,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TopDocs; -import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -34,7 +33,6 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.indices.InvalidAliasNameException; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -45,24 +43,32 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; /** Unit test(s) for IndexService */ public class IndexServiceTests extends ESSingleNodeTestCase { public void testDetermineShadowEngineShouldBeUsed() { - Settings regularSettings = Settings.builder() - .put(SETTING_NUMBER_OF_SHARDS, 2) - .put(SETTING_NUMBER_OF_REPLICAS, 1) - .build(); - - Settings shadowSettings = Settings.builder() - .put(SETTING_NUMBER_OF_SHARDS, 2) - .put(SETTING_NUMBER_OF_REPLICAS, 1) - .put(IndexMetaData.SETTING_SHADOW_REPLICAS, true) - .build(); + IndexSettings regularSettings = new IndexSettings( + IndexMetaData + .builder("regular") + .settings(Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, 2) + .put(SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()) + .build(), + Settings.EMPTY); + + IndexSettings shadowSettings = new IndexSettings( + IndexMetaData + .builder("shadow") + .settings(Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, 2) + .put(SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_SHADOW_REPLICAS, true) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()) + .build(), + Settings.EMPTY); assertFalse("no shadow replicas for normal settings", IndexService.useShadowEngine(true, regularSettings)); assertFalse("no shadow replicas for normal settings", IndexService.useShadowEngine(false, regularSettings)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index 0d249c113885a..ac98d4105daae 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -31,6 +31,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -46,15 +47,16 @@ import org.junit.Before; import java.io.IOException; +import java.util.ArrayList; import java.util.Collection; +import java.util.List; import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.hamcrest.Matchers.containsString; public class BooleanFieldMapperTests extends ESSingleNodeTestCase { - - IndexService indexService; - DocumentMapperParser parser; + private IndexService indexService; + private DocumentMapperParser parser; @Before public void setup() { @@ -125,6 +127,48 @@ public void testSerialization() throws IOException { assertEquals("{\"field\":{\"type\":\"boolean\",\"doc_values\":false,\"null_value\":true}}", builder.string()); } + public void testParsesBooleansLenient() throws IOException { + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("field1") + .field("type", "boolean") + .endObject() + .startObject("field2") + .field("type", "boolean") + .endObject() + .endObject() + .endObject() + .endObject().string(); + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); + + String falsy = randomFrom("false", "off", "no", "0"); + String truthy = randomFrom("true", "on", "yes", "1"); + + ParsedDocument parsedDoc = defaultMapper.parse("legacy", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field1", falsy) + .field("field2", truthy) + .endObject() + .bytes()); + Document doc = parsedDoc.rootDoc(); + assertEquals("F", doc.getField("field1").stringValue()); + assertEquals("T", doc.getField("field2").stringValue()); + + List expectedDeprecationWarnings = new ArrayList<>(); + if (Booleans.isStrictlyBoolean(falsy) == false) { + expectedDeprecationWarnings.add("Expected a boolean for property [field1] but got ["+ falsy + "]"); + } + if (Booleans.isStrictlyBoolean(truthy) == false) { + expectedDeprecationWarnings.add("Expected a boolean for property [field2] but got [" + truthy + "]"); + } + + if (expectedDeprecationWarnings.isEmpty() == false) { + assertWarnings(expectedDeprecationWarnings.toArray(new String[1])); + } + } + public void testMultiFields() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") @@ -137,7 +181,8 @@ public void testMultiFields() throws IOException { .endObject() .endObject().endObject() .endObject().endObject().string(); - DocumentMapper mapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); + DocumentMapper mapper = indexService.mapperService() + .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertEquals(mapping, mapper.mappingSource().toString()); BytesReference source = XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/StringMappingUpgradeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/StringMappingUpgradeTests.java index b4d248c9c375a..5716e27ad1afc 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/StringMappingUpgradeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/StringMappingUpgradeTests.java @@ -295,7 +295,7 @@ private List doTestUpgradeRandomMapping(int iter) throws IOException { Object store; if (randomBoolean()) { store = randomFrom("yes", "no"); - warnings.add("Expected a boolean for property [store] but got [" + store + "]"); + warnings.add("Expected a boolean for property [field.store] but got [" + store + "]"); } else { store = randomFrom(true, false); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TTLFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TTLFieldMapperTests.java index 3856c3a2aba81..055670166745c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/TTLFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/TTLFieldMapperTests.java @@ -87,7 +87,7 @@ public void testSimpleDisabled() throws Exception { public void testEnabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl").field("enabled", "yes").endObject() + .startObject("_ttl").field("enabled", "true").endObject() .endObject().endObject().string(); DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() @@ -117,7 +117,7 @@ public void testThatEnablingTTLFieldOnMergeWorks() throws Exception { String mappingWithTtl = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_ttl") - .field("enabled", "yes") + .field("enabled", "true") .endObject() .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() .endObject().endObject().string(); @@ -133,7 +133,7 @@ public void testThatEnablingTTLFieldOnMergeWorks() throws Exception { public void testThatChangingTTLKeepsMapperEnabled() throws Exception { String mappingWithTtl = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_ttl") - .field("enabled", "yes") + .field("enabled", "true") .endObject() .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() .endObject().endObject().string(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldMapperTests.java index f304cb03776eb..ddde16973d2a7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldMapperTests.java @@ -96,7 +96,7 @@ public void testSimpleDisabled() throws Exception { public void testEnabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", "yes").endObject() + .startObject("_timestamp").field("enabled", "true").endObject() .endObject().endObject().string(); DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); BytesReference source = XContentFactory.jsonBuilder() @@ -146,7 +146,7 @@ public void testThatDisablingDuringMergeIsWorking() throws Exception { public void testTimestampMissingDefaultToEpochValue() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp") - .field("enabled", "yes") + .field("enabled", "true") .field("default", "1970-01-01") .field("format", "YYYY-MM-dd") .endObject() @@ -171,7 +171,7 @@ public void testTimestampMissingDefaultToEpochValue() throws Exception { public void testTimestampMissingNowDefaultValue() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp") - .field("enabled", "yes") + .field("enabled", "true") .field("default", "now") .field("format", "YYYY-MM-dd") .endObject() @@ -199,7 +199,7 @@ public void testTimestampMissingNowDefaultValue() throws Exception { public void testPathMissingWithForcedNullDefaultShouldFail() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp") - .field("enabled", "yes") + .field("enabled", "true") .field("path", "timestamp") .field("default", (String) null) .endObject() @@ -213,7 +213,7 @@ public void testPathMissingWithForcedNullDefaultShouldFail() throws Exception { public void testTimestampMissingWithForcedNullDefaultShouldFail() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp") - .field("enabled", "yes") + .field("enabled", "true") .field("default", (String) null) .endObject() .endObject().endObject(); @@ -227,7 +227,7 @@ public void testTimestampMissingWithForcedNullDefaultShouldFail() throws Excepti public void testTimestampDefaultAndIgnore() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp") - .field("enabled", "yes") + .field("enabled", "true") .field("default", "1971-12-26") .field("ignore_missing", false) .endObject() @@ -242,7 +242,7 @@ public void testTimestampDefaultAndIgnore() throws Exception { public void testTimestampMissingShouldNotFail() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp") - .field("enabled", "yes") + .field("enabled", "true") .endObject() .endObject().endObject(); XContentBuilder doc = XContentFactory.jsonBuilder() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java index d9b11d9773313..9d46e30e4cb5b 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java @@ -87,13 +87,29 @@ private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping public void testConflictFieldsMapping(String fieldName) throws Exception { //test store, ... all the parameters that are not to be changed just like in other fields - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject(fieldName).field("enabled", true).field("store", "no").endObject() - .endObject().endObject(); - XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject(fieldName).field("enabled", true).field("store", "yes").endObject() - .startObject("properties").startObject("text").field("type", "text").endObject().endObject() - .endObject().endObject(); + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject(fieldName) + .field("enabled", true) + .field("store", false) + .endObject() + .endObject() + .endObject(); + XContentBuilder mappingUpdate = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject(fieldName) + .field("enabled", true) + .field("store", true) + .endObject() + .startObject("properties") + .startObject("text") + .field("type", "text") + .endObject() + .endObject() + .endObject() + .endObject(); testConflictWhileMergingAndMappingUnchanged(mapping, mappingUpdate); } diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index fe3b020b6f23c..d35ca9d46ec9a 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -155,18 +155,37 @@ public void testDeleteIndexTemplate() throws Exception { client().admin().indices().preparePutTemplate("template_1") .setTemplate("te*") .setOrder(0) - .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field1").field("type", "text").field("store", true).endObject() - .startObject("field2").field("type", "text").field("store", true).endObject() - .endObject().endObject().endObject()) + .addMapping("type1", XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("properties") + .startObject("field1") + .field("type", "text") + .field("store", true) + .endObject() + .startObject("field2") + .field("type", "text") + .field("store", true) + .endObject() + .endObject() + .endObject() + .endObject()) .execute().actionGet(); client().admin().indices().preparePutTemplate("template_2") .setTemplate("test*") .setOrder(1) - .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field2").field("type", "text").field("store", "no").endObject() - .endObject().endObject().endObject()) + .addMapping("type1", XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("properties") + .startObject("field2") + .field("type", "text") + .field("store", false) + .endObject() + .endObject() + .endObject() + .endObject()) .execute().actionGet(); logger.info("--> explicitly delete template_1"); diff --git a/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java b/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java index 3e3d310b33ce4..80ee4dd6d0de8 100644 --- a/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java @@ -160,8 +160,8 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli final HashMap params = new HashMap<>(); params.put("format", randomAsciiOfLength(8)); params.put("filter_path", randomAsciiOfLength(8)); - params.put("pretty", randomAsciiOfLength(8)); - params.put("human", randomAsciiOfLength(8)); + params.put("pretty", randomFrom("true", "false", "", null)); + params.put("human", null); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mock(NodeClient.class)); diff --git a/core/src/test/java/org/elasticsearch/rest/action/cat/RestTableTests.java b/core/src/test/java/org/elasticsearch/rest/action/cat/RestTableTests.java index cf42f2b1b3c0a..38ed76cae5249 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/cat/RestTableTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/cat/RestTableTests.java @@ -139,7 +139,7 @@ public void testIgnoreContentType() throws Exception { public void testThatDisplayHeadersWithoutTimestamp() throws Exception { restRequest.params().put("h", "timestamp,epoch,bulk*"); - restRequest.params().put("ts", "0"); + restRequest.params().put("ts", "false"); List headers = buildDisplayHeaders(table, restRequest); List headerNames = getHeaderNames(headers); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java index 9578e0c628155..d961fd677aa0a 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java @@ -37,7 +37,7 @@ ScriptService makeScriptService() throws Exception { Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) // no file watching, so we don't need a ResourceWatcherService - .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), "off") + .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), "false") .put("script." + PLUGIN_NAME + "_custom_globally_disabled_op", "false") .put("script.engine." + MockScriptEngine.NAME + ".inline." + PLUGIN_NAME + "_custom_exp_disabled_op", "false") .build(); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 3bcbe69856c93..8dea8822b6934 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -447,8 +447,8 @@ public void testRestoreTemplates() throws Exception { logger.info("--> creating test template"); assertThat(client.admin().indices().preparePutTemplate("test-template").setTemplate("te*").addMapping("test-mapping", XContentFactory.jsonBuilder().startObject().startObject("test-mapping").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "string").field("store", "true").endObject() + .startObject("field2").field("type", "string").field("store", "true").field("index", "not_analyzed").endObject() .endObject().endObject().endObject()).get().isAcknowledged(), equalTo(true)); logger.info("--> snapshot"); @@ -488,8 +488,8 @@ public void testIncludeGlobalState() throws Exception { if(testTemplate) { logger.info("--> creating test template"); assertThat(client.admin().indices().preparePutTemplate("test-template").setTemplate("te*").addMapping("test-mapping", XContentFactory.jsonBuilder().startObject().startObject("test-mapping").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "string").field("store", "true").endObject() + .startObject("field2").field("type", "string").field("store", "true").field("index", "not_analyzed").endObject() .endObject().endObject().endObject()).get().isAcknowledged(), equalTo(true)); } diff --git a/core/src/test/resources/indices/bwc/index-5.1.1.zip b/core/src/test/resources/indices/bwc/index-5.1.1.zip index e809431b7e575..9df43e70f4d55 100644 Binary files a/core/src/test/resources/indices/bwc/index-5.1.1.zip and b/core/src/test/resources/indices/bwc/index-5.1.1.zip differ diff --git a/core/src/test/resources/indices/bwc/index-5.1.2.zip b/core/src/test/resources/indices/bwc/index-5.1.2.zip index 8f85605fdd92d..021735879d104 100644 Binary files a/core/src/test/resources/indices/bwc/index-5.1.2.zip and b/core/src/test/resources/indices/bwc/index-5.1.2.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-5.1.1.zip b/core/src/test/resources/indices/bwc/repo-5.1.1.zip index f4fd3437ef80f..8300b4d0dea64 100644 Binary files a/core/src/test/resources/indices/bwc/repo-5.1.1.zip and b/core/src/test/resources/indices/bwc/repo-5.1.1.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-5.1.2.zip b/core/src/test/resources/indices/bwc/repo-5.1.2.zip index 859e4ae959e61..e371b79e4c360 100644 Binary files a/core/src/test/resources/indices/bwc/repo-5.1.2.zip and b/core/src/test/resources/indices/bwc/repo-5.1.2.zip differ diff --git a/dev-tools/create_bwc_index.py b/dev-tools/create_bwc_index.py index a6814e52f8fe9..5e506dde234e6 100644 --- a/dev-tools/create_bwc_index.py +++ b/dev-tools/create_bwc_index.py @@ -48,6 +48,28 @@ def rarely(): def frequently(): return not rarely() +def capabilities_of(version): + current_version = parse_version(version) + + return { + 'warmers': current_version < parse_version('2.0.0-alpha1'), + 'dots_in_field_names': current_version >= parse_version('2.4.0'), + 'lenient_booleans': current_version < parse_version('6.0.0-alpha1') + } + + +def falsy(lenient): + return random.choice(['off', 'no', '0', 0, 'false', False]) if lenient else False + + +def truthy(lenient): + return random.choice(['on', 'yes', '1', 1, 'true', True]) if lenient else True + + +def random_bool(lenient): + return random.choice([falsy, truthy])(lenient) + + # asserts the correctness of the given hits given they are sorted asc def assert_sort(hits): values = [hit['sort'] for hit in hits['hits']['hits']] @@ -59,19 +81,23 @@ def assert_sort(hits): # Indexes the given number of document into the given index # and randomly runs refresh, optimize and flush commands -def index_documents(es, index_name, type, num_docs, supports_dots_in_field_names): +def index_documents(es, index_name, type, num_docs, capabilities): logging.info('Indexing %s docs' % num_docs) - index(es, index_name, type, num_docs, supports_dots_in_field_names, True) + index(es, index_name, type, num_docs, capabilities, flush=True) logging.info('Flushing index') es.indices.flush(index=index_name) -def index(es, index_name, type, num_docs, supports_dots_in_field_names, flush=False): +def index(es, index_name, type, num_docs, capabilities, flush=False): for id in range(0, num_docs): - body = {'string': str(random.randint(0, 100)), - 'long_sort': random.randint(0, 100), - 'double_sort' : float(random.randint(0, 100)), - 'bool' : random.choice([True, False])} - if supports_dots_in_field_names: + lenient_bool = capabilities['lenient_booleans'] + body = { + 'string': str(random.randint(0, 100)), + 'long_sort': random.randint(0, 100), + 'double_sort': float(random.randint(0, 100)), + # be sure to create a "proper" boolean (True, False) for the first document so that automapping is correct + 'bool': random_bool(lenient_bool) if id > 0 else random.choice([True, False]) + } + if capabilities['dots_in_field_names']: body['field.with.dots'] = str(random.randint(0, 100)) body['binary'] = base64.b64encode(bytearray(random.getrandbits(8) for _ in range(16))).decode('ascii') @@ -83,10 +109,11 @@ def index(es, index_name, type, num_docs, supports_dots_in_field_names, flush=Fa if rarely() and flush: es.indices.flush(index=index_name, force=frequently()) -def reindex_docs(es, index_name, type, num_docs, supports_dots_in_field_names): +def reindex_docs(es, index_name, type, num_docs, capabilities): logging.info('Re-indexing %s docs' % num_docs) + capabilities['lenient_booleans'] = False # reindex some docs after the flush such that we have something in the translog - index(es, index_name, type, num_docs, supports_dots_in_field_names) + index(es, index_name, type, num_docs, capabilities) def delete_by_query(es, version, index_name, doc_type): @@ -200,9 +227,12 @@ def generate_index(client, version, index_name): client.indices.delete(index=index_name, ignore=404) logging.info('Create single shard test index') + capabilities = capabilities_of(version) + lenient_booleans = capabilities['lenient_booleans'] + mappings = {} warmers = {} - if parse_version(version) < parse_version('2.0.0-alpha1'): + if capabilities['warmers']: warmers['warmer1'] = { 'source': { 'query': { @@ -249,7 +279,7 @@ def generate_index(client, version, index_name): } mappings['meta_fields'] = { '_routing': { - 'required': 'false' + 'required': falsy(lenient_booleans) }, } mappings['custom_formats'] = { @@ -266,13 +296,12 @@ def generate_index(client, version, index_name): } mappings['auto_boost'] = { '_all': { - 'auto_boost': True + 'auto_boost': truthy(lenient_booleans) } } mappings['doc'] = {'properties' : {}} - supports_dots_in_field_names = parse_version(version) >= parse_version("2.4.0") - if supports_dots_in_field_names: + if capabilities['dots_in_field_names']: if parse_version(version) < parse_version("5.0.0-alpha1"): mappings["doc"]['properties'].update({ 'field.with.dots': { @@ -320,7 +349,7 @@ def generate_index(client, version, index_name): 'properties': { 'string_with_norms_disabled': { 'type': 'text', - 'norms' : False + 'norms': False }, 'string_with_norms_enabled': { 'type': 'keyword', @@ -340,7 +369,7 @@ def generate_index(client, version, index_name): # test back-compat of stored binary fields mappings['doc']['properties']['binary'] = { 'type': 'binary', - 'store': True, + 'store': truthy(lenient_booleans), } settings = { @@ -363,7 +392,7 @@ def generate_index(client, version, index_name): if parse_version(version) < parse_version("5.0.0-alpha1"): health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0) else: - health = client.cluster.health(wait_for_status='green', wait_for_no_relocating_shards=True) + health = client.cluster.health(wait_for_status='green', params={"wait_for_no_relocating_shards": "true"}) assert health['timed_out'] == False, 'cluster health timed out %s' % health num_docs = random.randint(2000, 3000) @@ -372,13 +401,13 @@ def generate_index(client, version, index_name): # lighter index for it to keep bw tests reasonable # see https://github.com/elastic/elasticsearch/issues/5817 num_docs = int(num_docs / 10) - index_documents(client, index_name, 'doc', num_docs, supports_dots_in_field_names) + index_documents(client, index_name, 'doc', num_docs, capabilities) if parse_version(version) < parse_version('5.1.0'): logging.info("Adding a alias that can't be created in 5.1+ so we can assert that we can still use it") client.indices.put_alias(index=index_name, name='#' + index_name) logging.info('Running basic asserts on the data added') run_basic_asserts(client, version, index_name, 'doc', num_docs) - return num_docs, supports_dots_in_field_names + return num_docs, capabilities def snapshot_index(client, version, repo_dir): persistent = { @@ -400,7 +429,9 @@ def snapshot_index(client, version, repo_dir): }, "mappings": { "type1": { - "_source": { "enabled" : False } + "_source": { + "enabled": falsy(capabilities_of(version)['lenient_booleans']) + } } }, "aliases": { @@ -488,7 +519,7 @@ def create_bwc_index(cfg, version): node = start_node(version, release_dir, data_dir, repo_dir, cfg.tcp_port, cfg.http_port) client = create_client(cfg.http_port) index_name = 'index-%s' % version.lower() - num_docs, supports_dots_in_field_names = generate_index(client, version, index_name) + num_docs, capabilities = generate_index(client, version, index_name) if snapshot_supported: snapshot_index(client, version, repo_dir) @@ -497,7 +528,7 @@ def create_bwc_index(cfg, version): # will already have the deletions applied on upgrade. if version.startswith('0.') or version.startswith('1.'): delete_by_query(client, version, index_name, 'doc') - reindex_docs(client, index_name, 'doc', min(100, num_docs), supports_dots_in_field_names) + reindex_docs(client, index_name, 'doc', min(100, num_docs), capabilities) shutdown_node(node) node = None diff --git a/docs/reference/analysis/tokenfilters/pattern-capture-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/pattern-capture-tokenfilter.asciidoc index ccde46a3fd2ad..b233081d54049 100644 --- a/docs/reference/analysis/tokenfilters/pattern-capture-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/pattern-capture-tokenfilter.asciidoc @@ -53,7 +53,7 @@ curl -XPUT localhost:9200/test/ -d ' "filter" : { "code" : { "type" : "pattern_capture", - "preserve_original" : 1, + "preserve_original" : true, "patterns" : [ "(\\p{Ll}+|\\p{Lu}\\p{Ll}+|\\p{Lu}+)", "(\\d+)" @@ -94,7 +94,7 @@ curl -XPUT localhost:9200/test/ -d ' "filter" : { "email" : { "type" : "pattern_capture", - "preserve_original" : 1, + "preserve_original" : true, "patterns" : [ "([^@]+)", "(\\p{L}+)", diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index efec2efe1a4a0..107849f986f19 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -477,8 +477,9 @@ convention of using underscore casing. All REST APIs parameters (both request parameters and JSON body) support providing boolean "false" as the values: `false`, `0`, `no` and `off`. -All other values are considered "true". Note, this is not related to -fields within a document indexed treated as boolean fields. +All other values are considered "true". + +deprecated[5.3.0,Usage of any value other than "false" and "true" is deprecated.] [float] === Number Values diff --git a/docs/reference/cat/health.asciidoc b/docs/reference/cat/health.asciidoc index cca24c66a3612..a87fe4e5e4a6a 100644 --- a/docs/reference/cat/health.asciidoc +++ b/docs/reference/cat/health.asciidoc @@ -22,7 +22,7 @@ It has one option `ts` to disable the timestamping: [source,js] -------------------------------------------------- -GET /_cat/health?v&ts=0 +GET /_cat/health?v&ts=false -------------------------------------------------- // CONSOLE // TEST[s/^/PUT twitter\n{"settings":{"number_of_replicas": 0}}\n/] diff --git a/docs/reference/mapping/types/boolean.asciidoc b/docs/reference/mapping/types/boolean.asciidoc index 260715d5cc3b8..9bd775fa4e3b4 100644 --- a/docs/reference/mapping/types/boolean.asciidoc +++ b/docs/reference/mapping/types/boolean.asciidoc @@ -15,6 +15,8 @@ True values:: deprecated[5.1.0,While Elasticsearch will currently accept the above values during index time. Searching a boolean field using these pseudo-boolean values is deprecated. Please use "true" or "false" instead.] +deprecated[5.3.0,Usage of any value other than `false`, `"false", `true` and `"true"` is deprecated.] + For example: [source,js] @@ -34,7 +36,7 @@ PUT my_index POST my_index/my_type/1 { - "is_published": 1 <1> + "is_published": "true" <1> } GET my_index/_search @@ -47,7 +49,7 @@ GET my_index/_search } -------------------------------------------------- // CONSOLE -<1> Indexing a document with `1`, which is interpreted as `true`. +<1> Indexing a document with `"true"`, which is interpreted as `true`. <2> Searching for documents with a JSON `true`. Aggregations like the <