Skip to content

Commit

Permalink
Store file digests as byte[] in WorkerKey.
Browse files Browse the repository at this point in the history
Do not wrap worker file digests in `HashCode` objects and use the "digest" nomenclature which matches the one from action execution.

Avoid unnecessary wrapping of the `byte[]` and copying it twice when on creation of the `HashCode` and `HashCode::asBytes`.

PiperOrigin-RevId: 426514694
  • Loading branch information
alexjski authored and copybara-github committed Feb 5, 2022
1 parent c673bc5 commit 56df8e9
Show file tree
Hide file tree
Showing 7 changed files with 42 additions and 43 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@ HashCode getWorkerFilesCombinedHash() {
return workerKey.getWorkerFilesCombinedHash();
}

SortedMap<PathFragment, HashCode> getWorkerFilesWithHashes() {
return workerKey.getWorkerFilesWithHashes();
SortedMap<PathFragment, byte[]> getWorkerFilesWithDigests() {
return workerKey.getWorkerFilesWithDigests();
}

/** Returns true if this worker is sandboxed. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,19 @@
// limitations under the License.
package com.google.devtools.build.lib.worker;

import com.google.common.hash.HashCode;
import com.google.common.io.BaseEncoding;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.Reporter;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.io.IOException;
import java.util.Arrays;
import java.util.Locale;
import java.util.Objects;
import java.util.Optional;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.Nullable;
import org.apache.commons.pool2.BaseKeyedPooledObjectFactory;
import org.apache.commons.pool2.PooledObject;
import org.apache.commons.pool2.impl.DefaultPooledObject;
Expand Down Expand Up @@ -164,18 +167,18 @@ public boolean validateObject(WorkerKey key, PooledObject<Worker> p) {
"%s %s (id %d) can no longer be used, because its files have changed on disk:",
key.getMnemonic(), key.getWorkerTypeName(), worker.getWorkerId()));
TreeSet<PathFragment> files = new TreeSet<>();
files.addAll(key.getWorkerFilesWithHashes().keySet());
files.addAll(worker.getWorkerFilesWithHashes().keySet());
files.addAll(key.getWorkerFilesWithDigests().keySet());
files.addAll(worker.getWorkerFilesWithDigests().keySet());
for (PathFragment file : files) {
HashCode oldHash = worker.getWorkerFilesWithHashes().get(file);
HashCode newHash = key.getWorkerFilesWithHashes().get(file);
if (!oldHash.equals(newHash)) {
byte[] oldDigest = worker.getWorkerFilesWithDigests().get(file);
byte[] newDigest = key.getWorkerFilesWithDigests().get(file);
if (!Arrays.equals(oldDigest, newDigest)) {
msg.append("\n")
.append(file.getPathString())
.append(": ")
.append(oldHash != null ? oldHash : "<none>")
.append(hexStringForDebugging(oldDigest))
.append(" -> ")
.append(newHash != null ? newHash : "<none>");
.append(hexStringForDebugging(newDigest));
}
}

Expand All @@ -185,6 +188,10 @@ public boolean validateObject(WorkerKey key, PooledObject<Worker> p) {
return !filesChanged;
}

private static String hexStringForDebugging(@Nullable byte[] bytes) {
return bytes != null ? BaseEncoding.base16().encode(bytes).toLowerCase(Locale.ROOT) : "<none>";
}

@Override
public boolean equals(Object o) {
if (this == o) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,30 +39,29 @@
*/
class WorkerFilesHash {

static HashCode getCombinedHash(SortedMap<PathFragment, HashCode> workerFilesMap) {
static HashCode getCombinedHash(SortedMap<PathFragment, byte[]> workerFilesMap) {
Hasher hasher = Hashing.sha256().newHasher();
for (Map.Entry<PathFragment, HashCode> workerFile : workerFilesMap.entrySet()) {
hasher.putString(workerFile.getKey().getPathString(), Charset.defaultCharset());
hasher.putBytes(workerFile.getValue().asBytes());
}
workerFilesMap.forEach(
(execPath, digest) -> {
hasher.putString(execPath.getPathString(), Charset.defaultCharset());
hasher.putBytes(digest);
});
return hasher.hash();
}

/**
* Return a map that contains the execroot relative path and hash of each tool and runfiles
* artifact of the given spawn.
*/
static SortedMap<PathFragment, HashCode> getWorkerFilesWithHashes(
static SortedMap<PathFragment, byte[]> getWorkerFilesWithDigests(
Spawn spawn, ArtifactExpander artifactExpander, MetadataProvider actionInputFileCache)
throws IOException {
TreeMap<PathFragment, HashCode> workerFilesMap = new TreeMap<>();
TreeMap<PathFragment, byte[]> workerFilesMap = new TreeMap<>();

List<ActionInput> tools =
ActionInputHelper.expandArtifacts(spawn.getToolFiles(), artifactExpander);
for (ActionInput tool : tools) {
workerFilesMap.put(
tool.getExecPath(),
HashCode.fromBytes(actionInputFileCache.getMetadata(tool).getDigest()));
workerFilesMap.put(tool.getExecPath(), actionInputFileCache.getMetadata(tool).getDigest());
}

for (Map.Entry<PathFragment, Map<PathFragment, Artifact>> rootAndMappings :
Expand All @@ -74,9 +73,7 @@ static SortedMap<PathFragment, HashCode> getWorkerFilesWithHashes(
if (localArtifact != null) {
FileArtifactValue metadata = actionInputFileCache.getMetadata(localArtifact);
if (metadata.getType().isFile()) {
workerFilesMap.put(
root.getRelative(mapping.getKey()),
HashCode.fromBytes(metadata.getDigest()));
workerFilesMap.put(root.getRelative(mapping.getKey()), metadata.getDigest());
}
}
}
Expand Down
24 changes: 9 additions & 15 deletions src/main/java/com/google/devtools/build/lib/worker/WorkerKey.java
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ final class WorkerKey {
* methods.
*/
private final HashCode workerFilesCombinedHash;
/** Worker files with the corresponding hash code. */
private final SortedMap<PathFragment, HashCode> workerFilesWithHashes;
/** Worker files with the corresponding digest. */
private final SortedMap<PathFragment, byte[]> workerFilesWithDigests;
/** If true, the workers run inside a sandbox. */
private final boolean sandboxed;
/** A WorkerProxy will be instantiated if true, instantiate a regular Worker if false. */
Expand All @@ -70,7 +70,7 @@ final class WorkerKey {
Path execRoot,
String mnemonic,
HashCode workerFilesCombinedHash,
SortedMap<PathFragment, HashCode> workerFilesWithHashes,
SortedMap<PathFragment, byte[]> workerFilesWithDigests,
boolean sandboxed,
boolean multiplex,
boolean cancellable,
Expand All @@ -80,42 +80,36 @@ final class WorkerKey {
this.execRoot = Preconditions.checkNotNull(execRoot);
this.mnemonic = Preconditions.checkNotNull(mnemonic);
this.workerFilesCombinedHash = Preconditions.checkNotNull(workerFilesCombinedHash);
this.workerFilesWithHashes = Preconditions.checkNotNull(workerFilesWithHashes);
this.workerFilesWithDigests = Preconditions.checkNotNull(workerFilesWithDigests);
this.sandboxed = sandboxed;
this.multiplex = multiplex;
this.cancellable = cancellable;
this.protocolFormat = protocolFormat;
hash = calculateHashCode();
}

/** Getter function for variable args. */
public ImmutableList<String> getArgs() {
return args;
}

/** Getter function for variable env. */
public ImmutableMap<String, String> getEnv() {
return env;
}

/** Getter function for variable execRoot. */
public Path getExecRoot() {
return execRoot;
}

/** Getter function for variable mnemonic. */
public String getMnemonic() {
return mnemonic;
}

/** Getter function for variable workerFilesCombinedHash. */
public HashCode getWorkerFilesCombinedHash() {
return workerFilesCombinedHash;
}

/** Getter function for variable workerFilesWithHashes. */
public SortedMap<PathFragment, HashCode> getWorkerFilesWithHashes() {
return workerFilesWithHashes;
public SortedMap<PathFragment, byte[]> getWorkerFilesWithDigests() {
return workerFilesWithDigests;
}

/** Returns true if workers are sandboxed. */
Expand Down Expand Up @@ -217,11 +211,11 @@ public String toString() {
// debugging.
return CommandFailureUtils.describeCommand(
CommandDescriptionForm.COMPLETE,
/* prettyPrintArgs= */ false,
/*prettyPrintArgs=*/ false,
args,
env,
execRoot.getPathString(),
/* configurationChecksum=*/ null,
/* executionPlatform= */ null);
/*configurationChecksum=*/ null,
/*executionPlatformAsLabelString=*/ null);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,8 @@ public WorkerConfig compute(Spawn spawn, SpawnExecutionContext context)
ImmutableMap<String, String> env =
localEnvProvider.rewriteLocalEnv(spawn.getEnvironment(), binTools, "/tmp");

SortedMap<PathFragment, HashCode> workerFiles =
WorkerFilesHash.getWorkerFilesWithHashes(
SortedMap<PathFragment, byte[]> workerFiles =
WorkerFilesHash.getWorkerFilesWithDigests(
spawn, context.getArtifactExpander(), context.getMetadataProvider());

HashCode workerFilesCombinedHash = WorkerFilesHash.getCombinedHash(workerFiles);
Expand Down Expand Up @@ -123,7 +123,7 @@ static WorkerKey createWorkerKey(
ImmutableMap<String, String> env,
Path execRoot,
HashCode workerFilesCombinedHash,
SortedMap<PathFragment, HashCode> workerFiles,
SortedMap<PathFragment, byte[]> workerFiles,
WorkerOptions options,
boolean dynamic,
WorkerProtocolFormat protocolFormat) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -411,7 +411,7 @@ WorkResponse execInWorker(
String.format("Worker #%d preparing execution", worker.getWorkerId()))) {
// We consider `prepareExecution` to be also part of setup.
Stopwatch prepareExecutionStopwatch = Stopwatch.createStarted();
worker.prepareExecution(inputFiles, outputs, key.getWorkerFilesWithHashes().keySet());
worker.prepareExecution(inputFiles, outputs, key.getWorkerFilesWithDigests().keySet());
initializeMetricsSet(key, worker);
spawnMetrics.setSetupTime(setupInputsTime.plus(prepareExecutionStopwatch.elapsed()));
} catch (IOException e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,8 @@ private TestWorker createTestWorker(byte[] outputStreamBytes, WorkerProtocolForm

SandboxInputs sandboxInputs = null;
SandboxOutputs sandboxOutputs = null;
worker.prepareExecution(sandboxInputs, sandboxOutputs, key.getWorkerFilesWithHashes().keySet());
worker.prepareExecution(
sandboxInputs, sandboxOutputs, key.getWorkerFilesWithDigests().keySet());

workerForCleanup = worker;

Expand Down

0 comments on commit 56df8e9

Please sign in to comment.