Skip to content

Commit 1ae02b4

Browse files
committed
Merge branch 'trunk' into HADOOP-18547
2 parents 2959a9c + 17c8cdf commit 1ae02b4

File tree

474 files changed

+14851
-3697
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

474 files changed

+14851
-3697
lines changed

LICENSE-binary

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -251,7 +251,7 @@ commons-collections:commons-collections:3.2.2
251251
commons-daemon:commons-daemon:1.0.13
252252
commons-io:commons-io:2.8.0
253253
commons-logging:commons-logging:1.1.3
254-
commons-net:commons-net:3.8.0
254+
commons-net:commons-net:3.9.0
255255
de.ruedigermoeller:fst:2.50
256256
io.grpc:grpc-api:1.26.0
257257
io.grpc:grpc-context:1.26.0
@@ -260,7 +260,6 @@ io.grpc:grpc-netty:1.26.0
260260
io.grpc:grpc-protobuf:1.26.0
261261
io.grpc:grpc-protobuf-lite:1.26.0
262262
io.grpc:grpc-stub:1.26.0
263-
io.netty:netty:3.10.6.Final
264263
io.netty:netty-all:4.1.77.Final
265264
io.netty:netty-buffer:4.1.77.Final
266265
io.netty:netty-codec:4.1.77.Final
@@ -324,7 +323,7 @@ org.apache.htrace:htrace-core:3.1.0-incubating
324323
org.apache.htrace:htrace-core4:4.1.0-incubating
325324
org.apache.httpcomponents:httpclient:4.5.6
326325
org.apache.httpcomponents:httpcore:4.4.10
327-
org.apache.kafka:kafka-clients:2.8.1
326+
org.apache.kafka:kafka-clients:2.8.2
328327
org.apache.kerby:kerb-admin:2.0.2
329328
org.apache.kerby:kerb-client:2.0.2
330329
org.apache.kerby:kerb-common:2.0.2
@@ -343,7 +342,7 @@ org.apache.kerby:token-provider:2.0.2
343342
org.apache.solr:solr-solrj:8.8.2
344343
org.apache.yetus:audience-annotations:0.5.0
345344
org.apache.zookeeper:zookeeper:3.6.3
346-
org.codehaus.jettison:jettison:1.5.1
345+
org.codehaus.jettison:jettison:1.5.3
347346
org.eclipse.jetty:jetty-annotations:9.4.48.v20220622
348347
org.eclipse.jetty:jetty-http:9.4.48.v20220622
349348
org.eclipse.jetty:jetty-io:9.4.48.v20220622
@@ -363,7 +362,7 @@ org.lz4:lz4-java:1.7.1
363362
org.objenesis:objenesis:2.6
364363
org.xerial.snappy:snappy-java:1.0.5
365364
org.yaml:snakeyaml:1.33
366-
org.wildfly.openssl:wildfly-openssl:1.0.7.Final
365+
org.wildfly.openssl:wildfly-openssl:1.1.3.Final
367366

368367

369368
--------------------------------------------------------------------------------

hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/util/PlatformName.java

Lines changed: 60 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,10 @@
1818

1919
package org.apache.hadoop.util;
2020

21+
import java.security.AccessController;
22+
import java.security.PrivilegedAction;
23+
import java.util.Arrays;
24+
2125
import org.apache.hadoop.classification.InterfaceAudience;
2226
import org.apache.hadoop.classification.InterfaceStability;
2327

@@ -33,21 +37,71 @@ public class PlatformName {
3337
* per the java-vm.
3438
*/
3539
public static final String PLATFORM_NAME =
36-
(System.getProperty("os.name").startsWith("Windows")
37-
? System.getenv("os") : System.getProperty("os.name"))
38-
+ "-" + System.getProperty("os.arch")
39-
+ "-" + System.getProperty("sun.arch.data.model");
40+
(System.getProperty("os.name").startsWith("Windows") ?
41+
System.getenv("os") : System.getProperty("os.name"))
42+
+ "-" + System.getProperty("os.arch") + "-"
43+
+ System.getProperty("sun.arch.data.model");
4044

4145
/**
4246
* The java vendor name used in this platform.
4347
*/
4448
public static final String JAVA_VENDOR_NAME = System.getProperty("java.vendor");
4549

50+
/**
51+
* Define a system class accessor that is open to changes in underlying implementations
52+
* of the system class loader modules.
53+
*/
54+
private static final class SystemClassAccessor extends ClassLoader {
55+
public Class<?> getSystemClass(String className) throws ClassNotFoundException {
56+
return findSystemClass(className);
57+
}
58+
}
59+
4660
/**
4761
* A public static variable to indicate the current java vendor is
48-
* IBM java or not.
62+
* IBM and the type is Java Technology Edition which provides its
63+
* own implementations of many security packages and Cipher suites.
64+
* Note that these are not provided in Semeru runtimes:
65+
* See https://developer.ibm.com/languages/java/semeru-runtimes for details.
4966
*/
50-
public static final boolean IBM_JAVA = JAVA_VENDOR_NAME.contains("IBM");
67+
public static final boolean IBM_JAVA = JAVA_VENDOR_NAME.contains("IBM") &&
68+
hasIbmTechnologyEditionModules();
69+
70+
private static boolean hasIbmTechnologyEditionModules() {
71+
return Arrays.asList(
72+
"com.ibm.security.auth.module.JAASLoginModule",
73+
"com.ibm.security.auth.module.Win64LoginModule",
74+
"com.ibm.security.auth.module.NTLoginModule",
75+
"com.ibm.security.auth.module.AIX64LoginModule",
76+
"com.ibm.security.auth.module.LinuxLoginModule",
77+
"com.ibm.security.auth.module.Krb5LoginModule"
78+
).stream().anyMatch((module) -> isSystemClassAvailable(module));
79+
}
80+
81+
/**
82+
* In rare cases where different behaviour is performed based on the JVM vendor
83+
* this method should be used to test for a unique JVM class provided by the
84+
* vendor rather than using the vendor method. For example if on JVM provides a
85+
* different Kerberos login module testing for that login module being loadable
86+
* before configuring to use it is preferable to using the vendor data.
87+
*
88+
* @param className the name of a class in the JVM to test for
89+
* @return true if the class is available, false otherwise.
90+
*/
91+
private static boolean isSystemClassAvailable(String className) {
92+
return AccessController.doPrivileged((PrivilegedAction<Boolean>) () -> {
93+
try {
94+
// Using ClassLoader.findSystemClass() instead of
95+
// Class.forName(className, false, null) because Class.forName with a null
96+
// ClassLoader only looks at the boot ClassLoader with Java 9 and above
97+
// which doesn't look at all the modules available to the findSystemClass.
98+
new SystemClassAccessor().getSystemClass(className);
99+
return true;
100+
} catch (Exception ignored) {
101+
return false;
102+
}
103+
});
104+
}
51105

52106
public static void main(String[] args) {
53107
System.out.println(PLATFORM_NAME);

hadoop-common-project/hadoop-auth/src/site/markdown/Configuration.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ This filter must be configured in front of all the web application resources tha
2424

2525
The Hadoop Auth and dependent JAR files must be in the web application classpath (commonly the `WEB-INF/lib` directory).
2626

27-
Hadoop Auth uses SLF4J-API for logging. Auth Maven POM dependencies define the SLF4J API dependency but it does not define the dependency on a concrete logging implementation, this must be addded explicitly to the web application. For example, if the web applicationan uses Log4j, the SLF4J-LOG4J12 and LOG4J jar files must be part part of the web application classpath as well as the Log4j configuration file.
27+
Hadoop Auth uses SLF4J-API for logging. Auth Maven POM dependencies define the SLF4J API dependency but it does not define the dependency on a concrete logging implementation, this must be addded explicitly to the web application. For example, if the web applicationan uses Log4j, the SLF4J-LOG4J12 and LOG4J jar files must be part of the web application classpath as well as the Log4j configuration file.
2828

2929
### Common Configuration parameters
3030

hadoop-common-project/hadoop-common/pom.xml

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -200,11 +200,6 @@
200200
<artifactId>assertj-core</artifactId>
201201
<scope>test</scope>
202202
</dependency>
203-
<dependency>
204-
<groupId>org.glassfish.grizzly</groupId>
205-
<artifactId>grizzly-http-servlet</artifactId>
206-
<scope>test</scope>
207-
</dependency>
208203
<dependency>
209204
<groupId>commons-beanutils</groupId>
210205
<artifactId>commons-beanutils</artifactId>

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -241,12 +241,15 @@ public synchronized void close() throws IOException {
241241
return;
242242
}
243243
try {
244-
flush();
245-
if (closeOutputStream) {
246-
super.close();
247-
codec.close();
244+
try {
245+
flush();
246+
} finally {
247+
if (closeOutputStream) {
248+
super.close();
249+
codec.close();
250+
}
251+
freeBuffers();
248252
}
249-
freeBuffers();
250253
} finally {
251254
closed = true;
252255
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,6 @@ public AvroFSInput(final FileContext fc, final Path p) throws IOException {
6060
FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL)
6161
.withFileStatus(status)
6262
.build());
63-
fc.open(p);
6463
}
6564

6665
@Override

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -402,7 +402,8 @@ public void setSymlink(final Path p) {
402402
}
403403

404404
/**
405-
* Compare this FileStatus to another FileStatus
405+
* Compare this FileStatus to another FileStatus based on lexicographical
406+
* order of path.
406407
* @param o the FileStatus to be compared.
407408
* @return a negative integer, zero, or a positive integer as this object
408409
* is less than, equal to, or greater than the specified object.
@@ -412,7 +413,8 @@ public int compareTo(FileStatus o) {
412413
}
413414

414415
/**
415-
* Compare this FileStatus to another FileStatus.
416+
* Compare this FileStatus to another FileStatus based on lexicographical
417+
* order of path.
416418
* This method was added back by HADOOP-14683 to keep binary compatibility.
417419
*
418420
* @param o the FileStatus to be compared.

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ public Trash(FileSystem fs, Configuration conf) throws IOException {
6969
* Hence we get the file system of the fully-qualified resolved-path and
7070
* then move the path p to the trashbin in that volume,
7171
* @param fs - the filesystem of path p
72-
* @param p - the path being deleted - to be moved to trasg
72+
* @param p - the path being deleted - to be moved to trash
7373
* @param conf - configuration
7474
* @return false if the item is already in the trash or trash is disabled
7575
* @throws IOException on error

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/audit/AuditConstants.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,11 @@ private AuditConstants() {
9090
*/
9191
public static final String PARAM_PROCESS = "ps";
9292

93+
/**
94+
* Header: Range for GET request data: {@value}.
95+
*/
96+
public static final String PARAM_RANGE = "rg";
97+
9398
/**
9499
* Task Attempt ID query header: {@value}.
95100
*/
Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.fs.impl;
20+
21+
import java.lang.ref.WeakReference;
22+
23+
import org.apache.hadoop.classification.InterfaceAudience;
24+
import org.apache.hadoop.metrics2.MetricsCollector;
25+
import org.apache.hadoop.metrics2.MetricsSource;
26+
27+
import static java.util.Objects.requireNonNull;
28+
29+
/**
30+
* A weak referenced metrics source which avoids hanging on to large objects
31+
* if somehow they don't get fully closed/cleaned up.
32+
* The JVM may clean up all objects which are only weakly referenced whenever
33+
* it does a GC, <i>even if there is no memory pressure</i>.
34+
* To avoid these refs being removed, always keep a strong reference around
35+
* somewhere.
36+
*/
37+
@InterfaceAudience.Private
38+
public class WeakRefMetricsSource implements MetricsSource {
39+
40+
/**
41+
* Name to know when unregistering.
42+
*/
43+
private final String name;
44+
45+
/**
46+
* Underlying metrics source.
47+
*/
48+
private final WeakReference<MetricsSource> sourceWeakReference;
49+
50+
/**
51+
* Constructor.
52+
* @param name Name to know when unregistering.
53+
* @param source metrics source
54+
*/
55+
public WeakRefMetricsSource(final String name, final MetricsSource source) {
56+
this.name = name;
57+
this.sourceWeakReference = new WeakReference<>(requireNonNull(source));
58+
}
59+
60+
/**
61+
* If the weak reference is non null, update the metrics.
62+
* @param collector to contain the resulting metrics snapshot
63+
* @param all if true, return all metrics even if unchanged.
64+
*/
65+
@Override
66+
public void getMetrics(final MetricsCollector collector, final boolean all) {
67+
MetricsSource metricsSource = sourceWeakReference.get();
68+
if (metricsSource != null) {
69+
metricsSource.getMetrics(collector, all);
70+
}
71+
}
72+
73+
/**
74+
* Name to know when unregistering.
75+
* @return the name passed in during construction.
76+
*/
77+
public String getName() {
78+
return name;
79+
}
80+
81+
/**
82+
* Get the source, will be null if the reference has been GC'd
83+
* @return the source reference
84+
*/
85+
public MetricsSource getSource() {
86+
return sourceWeakReference.get();
87+
}
88+
89+
@Override
90+
public String toString() {
91+
return "WeakRefMetricsSource{" +
92+
"name='" + name + '\'' +
93+
", sourceWeakReference is " +
94+
(sourceWeakReference.get() == null ? "unset" : "set") +
95+
'}';
96+
}
97+
}

0 commit comments

Comments
 (0)