From 93725daca7820634d216875a997a5d56612ec0ce Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 7 Sep 2022 03:31:45 -0700 Subject: [PATCH] Allow `download`, `download_and_extract`, and `file` in `module_ctx` This involves some changes in logging and reporting, since a "download event" is not necessarily tied to a repository any more, but could be from a module extension impl function. So we generally convert any logging of repo names to a "context", which could either be a repo or a module extension. Fixes https://github.com/bazelbuild/bazel/issues/16144 PiperOrigin-RevId: 472682924 Change-Id: I8c5b7477e6993a6fb77e07d82fc97260c19674b4 --- .../bazel/bzlmod/ModuleExtensionContext.java | 6 +- .../bzlmod/SingleExtensionEvalFunction.java | 4 +- .../lib/bazel/debug/WorkspaceRuleEvent.java | 74 +- .../build/lib/bazel/debug/workspace_log.proto | 5 +- .../repository/CacheHitReportingModule.java | 26 +- .../cache/RepositoryCacheHitEvent.java | 11 +- .../downloader/DownloadManager.java | 13 +- .../starlark/StarlarkBaseExternalContext.java | 763 ++++++++++++++++- .../starlark/StarlarkRepositoryContext.java | 785 +----------------- .../bazel/debug/WorkspaceRuleEventTest.java | 2 +- src/test/py/bazel/bzlmod/bazel_module_test.py | 26 + src/test/shell/bazel/bazel_workspaces_test.sh | 72 +- .../shell/bazel/starlark_repository_test.sh | 2 +- .../workspacelog/WorkspaceLogParser.java | 2 +- .../workspacelog/WorkspaceLogParserTest.java | 28 +- 15 files changed, 923 insertions(+), 896 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleExtensionContext.java b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleExtensionContext.java index 0ef2b8e00ac546..6dbf5e3e4a6748 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleExtensionContext.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleExtensionContext.java @@ -40,6 +40,7 @@ + " argument to the implementation function when you create a module" + " extension.") public class ModuleExtensionContext extends StarlarkBaseExternalContext { + private final ModuleExtensionId extensionId; private final StarlarkList modules; protected ModuleExtensionContext( @@ -51,6 +52,7 @@ protected ModuleExtensionContext( @Nullable ProcessWrapper processWrapper, StarlarkSemantics starlarkSemantics, @Nullable RepositoryRemoteExecutor remoteExecutor, + ModuleExtensionId extensionId, StarlarkList modules) { super( workingDirectory, @@ -61,6 +63,7 @@ protected ModuleExtensionContext( processWrapper, starlarkSemantics, remoteExecutor); + this.extensionId = extensionId; this.modules = modules; } @@ -70,7 +73,8 @@ public Path getWorkingDirectory() { @Override protected String getIdentifyingStringForLogging() { - return "TODO"; + return String.format( + "module extension %s in %s", extensionId.getExtensionName(), extensionId.getBzlFileLabel()); } @Override diff --git a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/SingleExtensionEvalFunction.java b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/SingleExtensionEvalFunction.java index f7fc23dbd61755..23cbd57d10021e 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/SingleExtensionEvalFunction.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/SingleExtensionEvalFunction.java @@ -171,7 +171,7 @@ public SkyValue compute(SkyKey skyKey, Environment env) StarlarkThread thread = new StarlarkThread(mu, starlarkSemantics); thread.setPrintHandler(Event.makeDebugPrintHandler(env.getListener())); ModuleExtensionContext moduleContext = - createContext(env, usagesValue, starlarkSemantics, extension); + createContext(env, usagesValue, starlarkSemantics, extensionId, extension); threadContext.storeInThread(thread); try { Starlark.fastcall( @@ -232,6 +232,7 @@ private ModuleExtensionContext createContext( Environment env, SingleExtensionUsagesValue usagesValue, StarlarkSemantics starlarkSemantics, + ModuleExtensionId extensionId, ModuleExtension extension) throws SingleExtensionEvalFunctionException { Path workingDirectory = @@ -262,6 +263,7 @@ private ModuleExtensionContext createContext( processWrapper, starlarkSemantics, repositoryRemoteExecutor, + extensionId, StarlarkList.immutableCopyOf(modules)); } diff --git a/src/main/java/com/google/devtools/build/lib/bazel/debug/WorkspaceRuleEvent.java b/src/main/java/com/google/devtools/build/lib/bazel/debug/WorkspaceRuleEvent.java index d1a33ba472d387..1e0164ea7fc69d 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/debug/WorkspaceRuleEvent.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/debug/WorkspaceRuleEvent.java @@ -46,7 +46,7 @@ public static WorkspaceRuleEvent newExecuteEvent( Map customEnvironment, String outputDirectory, boolean quiet, - String ruleLabel, + String context, Location location) { WorkspaceLogProtos.ExecuteEvent.Builder e = @@ -71,8 +71,8 @@ public static WorkspaceRuleEvent newExecuteEvent( if (location != null) { result = result.setLocation(location.toString()); } - if (ruleLabel != null) { - result = result.setRule(ruleLabel); + if (context != null) { + result = result.setContext(context); } return new WorkspaceRuleEvent(result.build()); } @@ -84,7 +84,7 @@ public static WorkspaceRuleEvent newDownloadEvent( String sha256, String integrity, Boolean executable, - String ruleLabel, + String context, Location location) { WorkspaceLogProtos.DownloadEvent.Builder e = WorkspaceLogProtos.DownloadEvent.newBuilder() @@ -102,8 +102,8 @@ public static WorkspaceRuleEvent newDownloadEvent( if (location != null) { result = result.setLocation(location.toString()); } - if (ruleLabel != null) { - result = result.setRule(ruleLabel); + if (context != null) { + result = result.setContext(context); } return new WorkspaceRuleEvent(result.build()); } @@ -114,9 +114,8 @@ public static WorkspaceRuleEvent newExtractEvent( String output, String stripPrefix, Map renameFiles, - String ruleLabel, + String context, Location location) { - ExtractEvent e = WorkspaceLogProtos.ExtractEvent.newBuilder() .setArchive(archive) @@ -131,8 +130,8 @@ public static WorkspaceRuleEvent newExtractEvent( if (location != null) { result = result.setLocation(location.toString()); } - if (ruleLabel != null) { - result = result.setRule(ruleLabel); + if (context != null) { + result = result.setContext(context); } return new WorkspaceRuleEvent(result.build()); } @@ -146,7 +145,7 @@ public static WorkspaceRuleEvent newDownloadAndExtractEvent( String type, String stripPrefix, Map renameFiles, - String ruleLabel, + String context, Location location) { WorkspaceLogProtos.DownloadAndExtractEvent.Builder e = WorkspaceLogProtos.DownloadAndExtractEvent.newBuilder() @@ -166,15 +165,15 @@ public static WorkspaceRuleEvent newDownloadAndExtractEvent( if (location != null) { result = result.setLocation(location.toString()); } - if (ruleLabel != null) { - result = result.setRule(ruleLabel); + if (context != null) { + result = result.setContext(context); } return new WorkspaceRuleEvent(result.build()); } /** Creates a new WorkspaceRuleEvent for a file event. */ public static WorkspaceRuleEvent newFileEvent( - String path, String content, boolean executable, String ruleLabel, Location location) { + String path, String content, boolean executable, String context, Location location) { FileEvent e = WorkspaceLogProtos.FileEvent.newBuilder() .setPath(path) @@ -188,14 +187,14 @@ public static WorkspaceRuleEvent newFileEvent( if (location != null) { result = result.setLocation(location.toString()); } - if (ruleLabel != null) { - result = result.setRule(ruleLabel); + if (context != null) { + result = result.setContext(context); } return new WorkspaceRuleEvent(result.build()); } /** Creates a new WorkspaceRuleEvent for a file read event. */ - public static WorkspaceRuleEvent newReadEvent(String path, String ruleLabel, Location location) { + public static WorkspaceRuleEvent newReadEvent(String path, String context, Location location) { WorkspaceLogProtos.ReadEvent e = WorkspaceLogProtos.ReadEvent.newBuilder().setPath(path).build(); @@ -205,15 +204,14 @@ public static WorkspaceRuleEvent newReadEvent(String path, String ruleLabel, Loc if (location != null) { result = result.setLocation(location.toString()); } - if (ruleLabel != null) { - result = result.setRule(ruleLabel); + if (context != null) { + result = result.setContext(context); } return new WorkspaceRuleEvent(result.build()); } /** Creates a new WorkspaceRuleEvent for a file read event. */ - public static WorkspaceRuleEvent newDeleteEvent( - String path, String ruleLabel, Location location) { + public static WorkspaceRuleEvent newDeleteEvent(String path, String context, Location location) { WorkspaceLogProtos.DeleteEvent e = WorkspaceLogProtos.DeleteEvent.newBuilder().setPath(path).build(); @@ -223,15 +221,15 @@ public static WorkspaceRuleEvent newDeleteEvent( if (location != null) { result = result.setLocation(location.toString()); } - if (ruleLabel != null) { - result = result.setRule(ruleLabel); + if (context != null) { + result = result.setContext(context); } return new WorkspaceRuleEvent(result.build()); } /** Creates a new WorkspaceRuleEvent for a patch event. */ public static WorkspaceRuleEvent newPatchEvent( - String patchFile, int strip, String ruleLabel, Location location) { + String patchFile, int strip, String context, Location location) { WorkspaceLogProtos.PatchEvent e = WorkspaceLogProtos.PatchEvent.newBuilder().setPatchFile(patchFile).setStrip(strip).build(); @@ -241,14 +239,14 @@ public static WorkspaceRuleEvent newPatchEvent( if (location != null) { result = result.setLocation(location.toString()); } - if (ruleLabel != null) { - result = result.setRule(ruleLabel); + if (context != null) { + result = result.setContext(context); } return new WorkspaceRuleEvent(result.build()); } /** Creates a new WorkspaceRuleEvent for an os event. */ - public static WorkspaceRuleEvent newOsEvent(String ruleLabel, Location location) { + public static WorkspaceRuleEvent newOsEvent(String context, Location location) { OsEvent e = WorkspaceLogProtos.OsEvent.getDefaultInstance(); WorkspaceLogProtos.WorkspaceEvent.Builder result = @@ -257,15 +255,15 @@ public static WorkspaceRuleEvent newOsEvent(String ruleLabel, Location location) if (location != null) { result = result.setLocation(location.toString()); } - if (ruleLabel != null) { - result = result.setRule(ruleLabel); + if (context != null) { + result = result.setContext(context); } return new WorkspaceRuleEvent(result.build()); } /** Creates a new WorkspaceRuleEvent for a symlink event. */ public static WorkspaceRuleEvent newSymlinkEvent( - String from, String to, String ruleLabel, Location location) { + String from, String to, String context, Location location) { SymlinkEvent e = WorkspaceLogProtos.SymlinkEvent.newBuilder().setTarget(from).setPath(to).build(); @@ -275,8 +273,8 @@ public static WorkspaceRuleEvent newSymlinkEvent( if (location != null) { result = result.setLocation(location.toString()); } - if (ruleLabel != null) { - result = result.setRule(ruleLabel); + if (context != null) { + result = result.setContext(context); } return new WorkspaceRuleEvent(result.build()); } @@ -287,7 +285,7 @@ public static WorkspaceRuleEvent newTemplateEvent( String template, Map substitutions, boolean executable, - String ruleLabel, + String context, Location location) { TemplateEvent e = WorkspaceLogProtos.TemplateEvent.newBuilder() @@ -303,15 +301,15 @@ public static WorkspaceRuleEvent newTemplateEvent( if (location != null) { result = result.setLocation(location.toString()); } - if (ruleLabel != null) { - result = result.setRule(ruleLabel); + if (context != null) { + result = result.setContext(context); } return new WorkspaceRuleEvent(result.build()); } /** Creates a new WorkspaceRuleEvent for a which event. */ public static WorkspaceRuleEvent newWhichEvent( - String program, String ruleLabel, Location location) { + String program, String context, Location location) { WhichEvent e = WorkspaceLogProtos.WhichEvent.newBuilder().setProgram(program).build(); WorkspaceLogProtos.WorkspaceEvent.Builder result = @@ -320,8 +318,8 @@ public static WorkspaceRuleEvent newWhichEvent( if (location != null) { result = result.setLocation(location.toString()); } - if (ruleLabel != null) { - result = result.setRule(ruleLabel); + if (context != null) { + result = result.setContext(context); } return new WorkspaceRuleEvent(result.build()); } diff --git a/src/main/java/com/google/devtools/build/lib/bazel/debug/workspace_log.proto b/src/main/java/com/google/devtools/build/lib/bazel/debug/workspace_log.proto index f975e8ef0fd003..a16955d64b2182 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/debug/workspace_log.proto +++ b/src/main/java/com/google/devtools/build/lib/bazel/debug/workspace_log.proto @@ -145,8 +145,9 @@ message WorkspaceEvent { // Location in the code (.bzl file) where the event originates. string location = 1; - // Label of the rule whose evaluation caused this event. - string rule = 2; + // The context in which this event happened. Can be "repository @foo", or + // "module extension foo in @bar//:quux.bzl". + string context = 2; oneof event { ExecuteEvent execute_event = 3; diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/CacheHitReportingModule.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/CacheHitReportingModule.java index 90d551c49ef99b..d5bf0ab37cd01d 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/repository/CacheHitReportingModule.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/CacheHitReportingModule.java @@ -30,39 +30,39 @@ /** Module reporting about cache hits in external repositories in case of failures */ public final class CacheHitReportingModule extends BlazeModule { private Reporter reporter; - private Map>> cacheHitsByRepo; + private Map>> cacheHitsByContext; @Override public void beforeCommand(CommandEnvironment env) { env.getEventBus().register(this); this.reporter = env.getReporter(); - this.cacheHitsByRepo = new HashMap>>(); + this.cacheHitsByContext = new HashMap<>(); } @Override public void afterCommand() { this.reporter = null; - this.cacheHitsByRepo = null; + this.cacheHitsByContext = null; } @Subscribe public synchronized void cacheHit(RepositoryCacheHitEvent event) { - String repo = event.getRepo().getName(); - if (cacheHitsByRepo.get(repo) == null) { - cacheHitsByRepo.put(repo, new HashSet>()); - } - cacheHitsByRepo.get(repo).add(Pair.of(event.getFileHash(), event.getUrl())); + cacheHitsByContext + .computeIfAbsent(event.getContext(), k -> new HashSet<>()) + .add(Pair.of(event.getFileHash(), event.getUrl())); } @Subscribe public void failed(RepositoryFailedEvent event) { - String repo = event.getRepo().getName(); - Set> cacheHits = cacheHitsByRepo.get(repo); + // TODO(wyv): figure out where to put this context generation logic (right now it needs to be + // kept in sync with StarlarkRepositoryContext.getIdentifyingStringForLogging), and add an + // event for the failure of a module extension too + String context = "repository " + event.getRepo().getNameWithAt(); + Set> cacheHits = cacheHitsByContext.get(context); if (cacheHits != null && !cacheHits.isEmpty()) { StringBuilder info = new StringBuilder(); - info.append("Repository '") - .append(repo) + info.append(context) .append( "' used the following cache hits instead of downloading the corresponding file.\n"); for (Pair hit : cacheHits) { @@ -73,7 +73,7 @@ public void failed(RepositoryFailedEvent event) { .append("\n"); } info.append("If the definition of '") - .append(repo) + .append(context) .append("' was updated, verify that the hashes were also updated."); reporter.handle(Event.info(info.toString())); } diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/cache/RepositoryCacheHitEvent.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/cache/RepositoryCacheHitEvent.java index aeb3b1d0ccd680..0c3545531f07c5 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/repository/cache/RepositoryCacheHitEvent.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/cache/RepositoryCacheHitEvent.java @@ -14,24 +14,23 @@ package com.google.devtools.build.lib.bazel.repository.cache; -import com.google.devtools.build.lib.cmdline.RepositoryName; import com.google.devtools.build.lib.events.ExtendedEventHandler.Postable; import java.net.URL; /** Event reporting about cache hits for download requests. */ public final class RepositoryCacheHitEvent implements Postable { - private final RepositoryName repo; + private final String context; private final String hash; private final URL url; - public RepositoryCacheHitEvent(RepositoryName repo, String hash, URL url) { - this.repo = repo; + public RepositoryCacheHitEvent(String context, String hash, URL url) { + this.context = context; this.hash = hash; this.url = url; } - public RepositoryName getRepo() { - return repo; + public String getContext() { + return context; } public URL getUrl() { diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/downloader/DownloadManager.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/downloader/DownloadManager.java index b75d703c2ca11f..3d168a285dc36e 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/repository/downloader/DownloadManager.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/downloader/DownloadManager.java @@ -28,7 +28,6 @@ import com.google.devtools.build.lib.bazel.repository.cache.RepositoryCache.KeyType; import com.google.devtools.build.lib.bazel.repository.cache.RepositoryCacheHitEvent; import com.google.devtools.build.lib.bazel.repository.downloader.UrlRewriter.RewrittenURL; -import com.google.devtools.build.lib.cmdline.RepositoryName; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.vfs.FileSystemUtils; @@ -105,8 +104,7 @@ public void setNetrcCreds(Credentials netrcCreds) { * @param output destination filename if {@code type} is absent, otherwise output directory * @param eventHandler CLI progress reporter * @param clientEnv environment variables in shell issuing this command - * @param repo the name of the external repository for which the file was fetched; used only for - * reporting + * @param context the context in which the file was fetched; used only for reporting * @throws IllegalArgumentException on parameter badness, which should be checked beforehand * @throws IOException if download was attempted and ended up failing * @throws InterruptedException if this thread is being cast into oblivion @@ -120,7 +118,7 @@ public Path download( Path output, ExtendedEventHandler eventHandler, Map clientEnv, - String repo) + String context) throws IOException, InterruptedException { if (Thread.interrupted()) { throw new InterruptedException(); @@ -187,9 +185,7 @@ public Path download( repositoryCache.get(cacheKey, destination, cacheKeyType, canonicalId); if (cachedDestination != null) { // Cache hit! - eventHandler.post( - new RepositoryCacheHitEvent( - RepositoryName.createUnvalidated(repo), cacheKey, mainUrl)); + eventHandler.post(new RepositoryCacheHitEvent(context, cacheKey, mainUrl)); return cachedDestination; } } catch (IOException e) { @@ -249,8 +245,7 @@ public Path download( } if (disableDownload) { - throw new IOException( - String.format("Failed to download repo %s: download is disabled.", repo)); + throw new IOException(String.format("Failed to download %s: download is disabled.", context)); } if (rewrittenUrls.isEmpty() && !originalUrls.isEmpty()) { diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkBaseExternalContext.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkBaseExternalContext.java index 4351a0cb09754a..7b9c12d178cfbb 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkBaseExternalContext.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkBaseExternalContext.java @@ -14,15 +14,31 @@ package com.google.devtools.build.lib.bazel.repository.starlark; +import static java.nio.charset.StandardCharsets.ISO_8859_1; +import static java.nio.charset.StandardCharsets.UTF_8; + +import com.google.common.base.Ascii; +import com.google.common.base.Optional; import com.google.common.base.Preconditions; +import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.Maps; import com.google.devtools.build.lib.actions.FileValue; import com.google.devtools.build.lib.bazel.debug.WorkspaceRuleEvent; +import com.google.devtools.build.lib.bazel.repository.DecompressorDescriptor; +import com.google.devtools.build.lib.bazel.repository.DecompressorValue; +import com.google.devtools.build.lib.bazel.repository.cache.RepositoryCache; +import com.google.devtools.build.lib.bazel.repository.cache.RepositoryCache.KeyType; +import com.google.devtools.build.lib.bazel.repository.downloader.Checksum; import com.google.devtools.build.lib.bazel.repository.downloader.DownloadManager; +import com.google.devtools.build.lib.bazel.repository.downloader.HttpUtils; import com.google.devtools.build.lib.cmdline.Label; +import com.google.devtools.build.lib.events.ExtendedEventHandler.FetchProgress; +import com.google.devtools.build.lib.packages.StarlarkInfo; +import com.google.devtools.build.lib.packages.StructImpl; +import com.google.devtools.build.lib.packages.StructProvider; import com.google.devtools.build.lib.packages.semantics.BuildLanguageOptions; import com.google.devtools.build.lib.profiler.Profiler; import com.google.devtools.build.lib.profiler.ProfilerTask; @@ -45,10 +61,19 @@ import com.google.devtools.build.skyframe.SkyKey; import java.io.File; import java.io.IOException; +import java.io.OutputStream; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.nio.file.InvalidPathException; +import java.nio.file.Paths; import java.time.Duration; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Base64; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -110,6 +135,711 @@ public ImmutableMap getAccumulatedFileDigests() { return ImmutableMap.copyOf(accumulatedFileDigests); } + protected void checkInOutputDirectory(String operation, StarlarkPath path) + throws RepositoryFunctionException { + if (!path.getPath().getPathString().startsWith(workingDirectory.getPathString())) { + throw new RepositoryFunctionException( + Starlark.errorf( + "Cannot %s outside of the repository directory for path %s", operation, path), + Transience.PERSISTENT); + } + } + + /** + * From an authentication dict extract a map of headers. + * + *

Given a dict as provided as "auth" argument, compute a map specifying for each URI provided + * which additional headers (as usual, represented as a map from Strings to Strings) should + * additionally be added to the request. For some form of authentication, in particular basic + * authentication, adding those headers is enough; for other forms of authentication other + * measures might be necessary. + */ + private static ImmutableMap> getAuthHeaders(Map> auth) + throws RepositoryFunctionException, EvalException { + ImmutableMap.Builder> headers = new ImmutableMap.Builder<>(); + for (Map.Entry> entry : auth.entrySet()) { + try { + URL url = new URL(entry.getKey()); + Dict authMap = entry.getValue(); + if (authMap.containsKey("type")) { + if ("basic".equals(authMap.get("type"))) { + if (!authMap.containsKey("login") || !authMap.containsKey("password")) { + throw Starlark.errorf( + "Found request to do basic auth for %s without 'login' and 'password' being" + + " provided.", + entry.getKey()); + } + String credentials = authMap.get("login") + ":" + authMap.get("password"); + headers.put( + url.toURI(), + ImmutableMap.of( + "Authorization", + "Basic " + Base64.getEncoder().encodeToString(credentials.getBytes(UTF_8)))); + } else if ("pattern".equals(authMap.get("type"))) { + if (!authMap.containsKey("pattern")) { + throw Starlark.errorf( + "Found request to do pattern auth for %s without a pattern being provided", + entry.getKey()); + } + + String result = (String) authMap.get("pattern"); + + for (String component : Arrays.asList("password", "login")) { + String demarcatedComponent = "<" + component + ">"; + + if (result.contains(demarcatedComponent)) { + if (!authMap.containsKey(component)) { + throw Starlark.errorf( + "Auth pattern contains %s but it was not provided in auth dict.", + demarcatedComponent); + } + } else { + // component isn't in the pattern, ignore it + continue; + } + + result = result.replaceAll(demarcatedComponent, (String) authMap.get(component)); + } + + headers.put(url.toURI(), ImmutableMap.of("Authorization", result)); + } + } + } catch (MalformedURLException e) { + throw new RepositoryFunctionException(e, Transience.PERSISTENT); + } catch (URISyntaxException e) { + throw new EvalException(e); + } + } + return headers.buildOrThrow(); + } + + private static Map> getAuthContents(Dict x, String what) + throws EvalException { + // Dict.cast returns Dict. + @SuppressWarnings({"unchecked", "rawtypes"}) + Map> res = (Map) Dict.cast(x, String.class, Dict.class, what); + return res; + } + + private static ImmutableList checkAllUrls(Iterable urlList) throws EvalException { + ImmutableList.Builder result = ImmutableList.builder(); + + for (Object o : urlList) { + if (!(o instanceof String)) { + throw Starlark.errorf( + "Expected a string or sequence of strings for 'url' argument, but got '%s' item in the" + + " sequence", + Starlark.type(o)); + } + result.add((String) o); + } + + return result.build(); + } + + private static ImmutableList getUrls( + Object urlOrList, boolean ensureNonEmpty, boolean checksumGiven) + throws RepositoryFunctionException, EvalException { + ImmutableList urlStrings; + if (urlOrList instanceof String) { + urlStrings = ImmutableList.of((String) urlOrList); + } else { + urlStrings = checkAllUrls((Iterable) urlOrList); + } + if (ensureNonEmpty && urlStrings.isEmpty()) { + throw new RepositoryFunctionException(new IOException("urls not set"), Transience.PERSISTENT); + } + ImmutableList.Builder urls = ImmutableList.builder(); + for (String urlString : urlStrings) { + URL url; + try { + url = new URL(urlString); + } catch (MalformedURLException e) { + throw new RepositoryFunctionException( + new IOException("Bad URL: " + urlString, e), Transience.PERSISTENT); + } + if (!HttpUtils.isUrlSupportedByDownloader(url)) { + throw new RepositoryFunctionException( + new IOException("Unsupported protocol: " + url.getProtocol()), Transience.PERSISTENT); + } + if (!checksumGiven) { + if (!Ascii.equalsIgnoreCase("http", url.getProtocol())) { + urls.add(url); + } + } else { + urls.add(url); + } + } + ImmutableList urlsResult = urls.build(); + if (ensureNonEmpty && urlsResult.isEmpty()) { + throw new RepositoryFunctionException( + new IOException( + "No URLs left after removing plain http URLs due to missing checksum." + + " Please provide either a checksum or an https download location."), + Transience.PERSISTENT); + } + return urlsResult; + } + + private void warnAboutChecksumError(List urls, String errorMessage) { + // Inform the user immediately, even though the file will still be downloaded. + // This cannot be done by a regular error event, as all regular events are recorded + // and only shown once the execution of the repository rule is finished. + // So we have to provide the information as update on the progress + String url = urls.isEmpty() ? "(unknown)" : urls.get(0).toString(); + reportProgress("Will fail after download of " + url + ". " + errorMessage); + } + + private Optional validateChecksum(String sha256, String integrity, List urls) + throws RepositoryFunctionException, EvalException { + if (!sha256.isEmpty()) { + if (!integrity.isEmpty()) { + throw Starlark.errorf("Expected either 'sha256' or 'integrity', but not both"); + } + try { + return Optional.of(Checksum.fromString(KeyType.SHA256, sha256)); + } catch (Checksum.InvalidChecksumException e) { + warnAboutChecksumError(urls, e.getMessage()); + throw new RepositoryFunctionException( + Starlark.errorf( + "Checksum error in %s: %s", getIdentifyingStringForLogging(), e.getMessage()), + Transience.PERSISTENT); + } + } + + if (integrity.isEmpty()) { + return Optional.absent(); + } + + try { + return Optional.of(Checksum.fromSubresourceIntegrity(integrity)); + } catch (Checksum.InvalidChecksumException e) { + warnAboutChecksumError(urls, e.getMessage()); + throw new RepositoryFunctionException( + Starlark.errorf( + "Checksum error in %s: %s", getIdentifyingStringForLogging(), e.getMessage()), + Transience.PERSISTENT); + } + } + + private Checksum calculateChecksum(Optional originalChecksum, Path path) + throws IOException, InterruptedException { + if (originalChecksum.isPresent()) { + // The checksum is checked on download, so if we got here, the user provided checksum is good + return originalChecksum.get(); + } + try { + return Checksum.fromString(KeyType.SHA256, RepositoryCache.getChecksum(KeyType.SHA256, path)); + } catch (Checksum.InvalidChecksumException e) { + throw new IllegalStateException( + "Unexpected invalid checksum from internal computation of SHA-256 checksum on " + + path.getPathString(), + e); + } + } + + private StructImpl calculateDownloadResult(Optional checksum, Path downloadedPath) + throws InterruptedException, RepositoryFunctionException { + Checksum finalChecksum; + try { + finalChecksum = calculateChecksum(checksum, downloadedPath); + } catch (IOException e) { + throw new RepositoryFunctionException( + new IOException( + "Couldn't hash downloaded file (" + downloadedPath.getPathString() + ")", e), + Transience.PERSISTENT); + } + + ImmutableMap.Builder out = ImmutableMap.builder(); + out.put("success", true); + out.put("integrity", finalChecksum.toSubresourceIntegrity()); + + // For compatibility with older Bazel versions that don't support non-SHA256 checksums. + if (finalChecksum.getKeyType() == KeyType.SHA256) { + out.put("sha256", finalChecksum.toString()); + } + return StarlarkInfo.create(StructProvider.STRUCT, out.buildOrThrow(), Location.BUILTIN); + } + + @StarlarkMethod( + name = "download", + doc = + "Downloads a file to the output path for the provided url and returns a struct" + + " containing success, a flag which is true if the" + + " download completed successfully, and if successful, a hash of the file" + + " with the fields sha256 and integrity.", + useStarlarkThread = true, + parameters = { + @Param( + name = "url", + allowedTypes = { + @ParamType(type = String.class), + @ParamType(type = Iterable.class, generic1 = String.class), + }, + named = true, + doc = "List of mirror URLs referencing the same file."), + @Param( + name = "output", + allowedTypes = { + @ParamType(type = String.class), + @ParamType(type = Label.class), + @ParamType(type = StarlarkPath.class) + }, + defaultValue = "''", + named = true, + doc = "path to the output file, relative to the repository directory."), + @Param( + name = "sha256", + defaultValue = "''", + named = true, + doc = + "the expected SHA-256 hash of the file downloaded." + + " This must match the SHA-256 hash of the file downloaded. It is a security" + + " risk to omit the SHA-256 as remote files can change. At best omitting this" + + " field will make your build non-hermetic. It is optional to make development" + + " easier but should be set before shipping."), + @Param( + name = "executable", + defaultValue = "False", + named = true, + doc = "set the executable flag on the created file, false by default."), + @Param( + name = "allow_fail", + defaultValue = "False", + named = true, + doc = + "If set, indicate the error in the return value" + + " instead of raising an error for failed downloads"), + @Param( + name = "canonical_id", + defaultValue = "''", + named = true, + doc = + "If set, restrict cache hits to those cases where the file was added to the cache" + + " with the same canonical id"), + @Param( + name = "auth", + defaultValue = "{}", + named = true, + doc = "An optional dict specifying authentication information for some of the URLs."), + @Param( + name = "integrity", + defaultValue = "''", + named = true, + positional = false, + doc = + "Expected checksum of the file downloaded, in Subresource Integrity format." + + " This must match the checksum of the file downloaded. It is a security" + + " risk to omit the checksum as remote files can change. At best omitting this" + + " field will make your build non-hermetic. It is optional to make development" + + " easier but should be set before shipping."), + }) + public StructImpl download( + Object url, + Object output, + String sha256, + Boolean executable, + Boolean allowFail, + String canonicalId, + Dict authUnchecked, // expected + String integrity, + StarlarkThread thread) + throws RepositoryFunctionException, EvalException, InterruptedException { + ImmutableMap> authHeaders = + getAuthHeaders(getAuthContents(authUnchecked, "auth")); + + ImmutableList urls = + getUrls( + url, + /*ensureNonEmpty=*/ !allowFail, + /*checksumGiven=*/ !Strings.isNullOrEmpty(sha256) || !Strings.isNullOrEmpty(integrity)); + Optional checksum; + RepositoryFunctionException checksumValidation = null; + try { + checksum = validateChecksum(sha256, integrity, urls); + } catch (RepositoryFunctionException e) { + checksum = Optional.absent(); + checksumValidation = e; + } + + StarlarkPath outputPath = getPath("download()", output); + WorkspaceRuleEvent w = + WorkspaceRuleEvent.newDownloadEvent( + urls, + output.toString(), + sha256, + integrity, + executable, + getIdentifyingStringForLogging(), + thread.getCallerLocation()); + env.getListener().post(w); + Path downloadedPath; + try (SilentCloseable c = + Profiler.instance().profile("fetching: " + getIdentifyingStringForLogging())) { + checkInOutputDirectory("write", outputPath); + makeDirectories(outputPath.getPath()); + downloadedPath = + downloadManager.download( + urls, + authHeaders, + checksum, + canonicalId, + Optional.absent(), + outputPath.getPath(), + env.getListener(), + envVariables, + getIdentifyingStringForLogging()); + if (executable) { + outputPath.getPath().setExecutable(true); + } + } catch (InterruptedException e) { + throw new RepositoryFunctionException( + new IOException("thread interrupted"), Transience.TRANSIENT); + } catch (IOException e) { + if (allowFail) { + return StarlarkInfo.create( + StructProvider.STRUCT, ImmutableMap.of("success", false), Location.BUILTIN); + } else { + throw new RepositoryFunctionException(e, Transience.TRANSIENT); + } + } catch (InvalidPathException e) { + throw new RepositoryFunctionException( + Starlark.errorf("Could not create output path %s: %s", outputPath, e.getMessage()), + Transience.PERSISTENT); + } + if (checksumValidation != null) { + throw checksumValidation; + } + + return calculateDownloadResult(checksum, downloadedPath); + } + + @StarlarkMethod( + name = "download_and_extract", + doc = + "Downloads a file to the output path for the provided url, extracts it, and returns a" + + " struct containing success, a flag which is true if the" + + " download completed successfully, and if successful, a hash of the file with the" + + " fields sha256 and integrity.", + useStarlarkThread = true, + parameters = { + @Param( + name = "url", + allowedTypes = { + @ParamType(type = String.class), + @ParamType(type = Iterable.class, generic1 = String.class), + }, + named = true, + doc = "List of mirror URLs referencing the same file."), + @Param( + name = "output", + allowedTypes = { + @ParamType(type = String.class), + @ParamType(type = Label.class), + @ParamType(type = StarlarkPath.class) + }, + defaultValue = "''", + named = true, + doc = + "path to the directory where the archive will be unpacked," + + " relative to the repository directory."), + @Param( + name = "sha256", + defaultValue = "''", + named = true, + doc = + "the expected SHA-256 hash of the file downloaded." + + " This must match the SHA-256 hash of the file downloaded. It is a security" + + " risk to omit the SHA-256 as remote files can change. At best omitting this" + + " field will make your build non-hermetic. It is optional to make development" + + " easier but should be set before shipping." + + " If provided, the repository cache will first be checked for a file with the" + + " given hash; a download will only be attempted if the file was not found in" + + " the cache. After a successful download, the file will be added to the" + + " cache."), + @Param( + name = "type", + defaultValue = "''", + named = true, + doc = + "the archive type of the downloaded file." + + " By default, the archive type is determined from the file extension of" + + " the URL." + + " If the file has no extension, you can explicitly specify either \"zip\"," + + " \"jar\", \"war\", \"aar\", \"tar\", \"tar.gz\", \"tgz\", \"tar.xz\"," + + " \"txz\", \".tar.zst\", \".tzst\", \"tar.bz2\", \".ar\", or \".deb\"" + + " here."), + @Param( + name = "stripPrefix", + defaultValue = "''", + named = true, + doc = + "a directory prefix to strip from the extracted files." + + "\nMany archives contain a top-level directory that contains all files in the" + + " archive. Instead of needing to specify this prefix over and over in the" + + " build_file, this field can be used to strip it from extracted" + + " files."), + @Param( + name = "allow_fail", + defaultValue = "False", + named = true, + doc = + "If set, indicate the error in the return value" + + " instead of raising an error for failed downloads"), + @Param( + name = "canonical_id", + defaultValue = "''", + named = true, + doc = + "If set, restrict cache hits to those cases where the file was added to the cache" + + " with the same canonical id"), + @Param( + name = "auth", + defaultValue = "{}", + named = true, + doc = "An optional dict specifying authentication information for some of the URLs."), + @Param( + name = "integrity", + defaultValue = "''", + named = true, + positional = false, + doc = + "Expected checksum of the file downloaded, in Subresource Integrity format." + + " This must match the checksum of the file downloaded. It is a security" + + " risk to omit the checksum as remote files can change. At best omitting this" + + " field will make your build non-hermetic. It is optional to make development" + + " easier but should be set before shipping."), + @Param( + name = "rename_files", + defaultValue = "{}", + named = true, + positional = false, + doc = + "An optional dict specifying files to rename during the extraction. Archive entries" + + " with names exactly matching a key will be renamed to the value, prior to" + + " any directory prefix adjustment. This can be used to extract archives that" + + " contain non-Unicode filenames, or which have files that would extract to" + + " the same path on case-insensitive filesystems."), + }) + public StructImpl downloadAndExtract( + Object url, + Object output, + String sha256, + String type, + String stripPrefix, + Boolean allowFail, + String canonicalId, + Dict auth, // expected + String integrity, + Dict renameFiles, // expected + StarlarkThread thread) + throws RepositoryFunctionException, InterruptedException, EvalException { + ImmutableMap> authHeaders = + getAuthHeaders(getAuthContents(auth, "auth")); + + ImmutableList urls = + getUrls( + url, + /*ensureNonEmpty=*/ !allowFail, + /*checksumGiven=*/ !Strings.isNullOrEmpty(sha256) || !Strings.isNullOrEmpty(integrity)); + Optional checksum; + RepositoryFunctionException checksumValidation = null; + try { + checksum = validateChecksum(sha256, integrity, urls); + } catch (RepositoryFunctionException e) { + checksum = Optional.absent(); + checksumValidation = e; + } + + Map renameFilesMap = + Dict.cast(renameFiles, String.class, String.class, "rename_files"); + + WorkspaceRuleEvent w = + WorkspaceRuleEvent.newDownloadAndExtractEvent( + urls, + output.toString(), + sha256, + integrity, + type, + stripPrefix, + renameFilesMap, + getIdentifyingStringForLogging(), + thread.getCallerLocation()); + + StarlarkPath outputPath = getPath("download_and_extract()", output); + checkInOutputDirectory("write", outputPath); + createDirectory(outputPath.getPath()); + + Path downloadedPath; + Path downloadDirectory; + try (SilentCloseable c = + Profiler.instance().profile("fetching: " + getIdentifyingStringForLogging())) { + + // Download to temp directory inside the outputDirectory and delete it after extraction + java.nio.file.Path tempDirectory = + Files.createTempDirectory(Paths.get(outputPath.toString()), "temp"); + downloadDirectory = + workingDirectory.getFileSystem().getPath(tempDirectory.toFile().getAbsolutePath()); + + downloadedPath = + downloadManager.download( + urls, + authHeaders, + checksum, + canonicalId, + Optional.of(type), + downloadDirectory, + env.getListener(), + envVariables, + getIdentifyingStringForLogging()); + } catch (InterruptedException e) { + env.getListener().post(w); + throw new RepositoryFunctionException( + new IOException("thread interrupted"), Transience.TRANSIENT); + } catch (IOException e) { + env.getListener().post(w); + if (allowFail) { + return StarlarkInfo.create( + StructProvider.STRUCT, ImmutableMap.of("success", false), Location.BUILTIN); + } else { + throw new RepositoryFunctionException(e, Transience.TRANSIENT); + } + } + if (checksumValidation != null) { + throw checksumValidation; + } + env.getListener().post(w); + try (SilentCloseable c = + Profiler.instance().profile("extracting: " + getIdentifyingStringForLogging())) { + env.getListener() + .post( + new ExtractProgress(outputPath.getPath().toString(), "Extracting " + downloadedPath)); + DecompressorValue.decompress( + DecompressorDescriptor.builder() + .setContext(getIdentifyingStringForLogging()) + .setArchivePath(downloadedPath) + .setDestinationPath(outputPath.getPath()) + .setPrefix(stripPrefix) + .setRenameFiles(renameFilesMap) + .build()); + env.getListener().post(new ExtractProgress(outputPath.getPath().toString())); + } + + StructImpl downloadResult = calculateDownloadResult(checksum, downloadedPath); + try { + if (downloadDirectory.exists()) { + downloadDirectory.deleteTree(); + } + } catch (IOException e) { + throw new RepositoryFunctionException( + new IOException( + "Couldn't delete temporary directory (" + downloadDirectory.getPathString() + ")", e), + Transience.TRANSIENT); + } + return downloadResult; + } + + /** A progress event that reports about archive extraction. */ + protected static class ExtractProgress implements FetchProgress { + private final String repositoryPath; + private final String progress; + private final boolean isFinished; + + ExtractProgress(String repositoryPath, String progress) { + this.repositoryPath = repositoryPath; + this.progress = progress; + this.isFinished = false; + } + + ExtractProgress(String repositoryPath) { + this.repositoryPath = repositoryPath; + this.progress = ""; + this.isFinished = true; + } + + @Override + public String getResourceIdentifier() { + return repositoryPath; + } + + @Override + public String getProgress() { + return progress; + } + + @Override + public boolean isFinished() { + return isFinished; + } + } + + @StarlarkMethod( + name = "file", + doc = "Generates a file in the repository directory with the provided content.", + useStarlarkThread = true, + parameters = { + @Param( + name = "path", + allowedTypes = { + @ParamType(type = String.class), + @ParamType(type = Label.class), + @ParamType(type = StarlarkPath.class) + }, + doc = "path of the file to create, relative to the repository directory."), + @Param( + name = "content", + named = true, + defaultValue = "''", + doc = "the content of the file to create, empty by default."), + @Param( + name = "executable", + named = true, + defaultValue = "True", + doc = "set the executable flag on the created file, true by default."), + @Param( + name = "legacy_utf8", + named = true, + defaultValue = "True", + doc = + "encode file content to UTF-8, true by default. Future versions will change" + + " the default and remove this parameter."), + }) + public void createFile( + Object path, String content, Boolean executable, Boolean legacyUtf8, StarlarkThread thread) + throws RepositoryFunctionException, EvalException, InterruptedException { + StarlarkPath p = getPath("file()", path); + byte[] contentBytes; + if (legacyUtf8) { + contentBytes = content.getBytes(UTF_8); + } else { + contentBytes = content.getBytes(ISO_8859_1); + } + WorkspaceRuleEvent w = + WorkspaceRuleEvent.newFileEvent( + p.toString(), + content, + executable, + getIdentifyingStringForLogging(), + thread.getCallerLocation()); + env.getListener().post(w); + try { + checkInOutputDirectory("write", p); + makeDirectories(p.getPath()); + p.getPath().delete(); + try (OutputStream stream = p.getPath().getOutputStream()) { + stream.write(contentBytes); + } + if (executable) { + p.getPath().setExecutable(true); + } + } catch (IOException e) { + throw new RepositoryFunctionException(e, Transience.TRANSIENT); + } catch (InvalidPathException e) { + throw new RepositoryFunctionException( + Starlark.errorf("Could not create %s: %s", p, e.getMessage()), Transience.PERSISTENT); + } + } + @StarlarkMethod( name = "path", doc = @@ -172,7 +902,7 @@ public String readFile(Object path, StarlarkThread thread) p.toString(), getIdentifyingStringForLogging(), thread.getCallerLocation()); env.getListener().post(w); try { - return FileSystemUtils.readContent(p.getPath(), StandardCharsets.ISO_8859_1); + return FileSystemUtils.readContent(p.getPath(), ISO_8859_1); } catch (IOException e) { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } @@ -186,6 +916,37 @@ protected static void makeDirectories(Path path) throws IOException { } } + @StarlarkMethod( + name = "report_progress", + doc = "Updates the progress status for the fetching of this repository or module extension", + parameters = { + @Param( + name = "status", + defaultValue = "''", + allowedTypes = {@ParamType(type = String.class)}, + doc = "string describing the current status of the fetch progress") + }) + public void reportProgress(String status) { + env.getListener() + .post( + new FetchProgress() { + @Override + public String getResourceIdentifier() { + return getIdentifyingStringForLogging(); + } + + @Override + public String getProgress() { + return status; + } + + @Override + public boolean isFinished() { + return false; + } + }); + } + @StarlarkMethod( name = "os", structField = true, diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryContext.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryContext.java index c78f240614b898..c360521ef5e866 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryContext.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryContext.java @@ -15,10 +15,6 @@ package com.google.devtools.build.lib.bazel.repository.starlark; import com.github.difflib.patch.PatchFailedException; -import com.google.common.base.Ascii; -import com.google.common.base.Optional; -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.docgen.annot.DocCategory; @@ -26,21 +22,14 @@ import com.google.devtools.build.lib.bazel.repository.DecompressorDescriptor; import com.google.devtools.build.lib.bazel.repository.DecompressorValue; import com.google.devtools.build.lib.bazel.repository.PatchUtil; -import com.google.devtools.build.lib.bazel.repository.cache.RepositoryCache; -import com.google.devtools.build.lib.bazel.repository.cache.RepositoryCache.KeyType; -import com.google.devtools.build.lib.bazel.repository.downloader.Checksum; import com.google.devtools.build.lib.bazel.repository.downloader.DownloadManager; -import com.google.devtools.build.lib.bazel.repository.downloader.HttpUtils; import com.google.devtools.build.lib.cmdline.Label; -import com.google.devtools.build.lib.events.ExtendedEventHandler.FetchProgress; +import com.google.devtools.build.lib.cmdline.RepositoryName; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.Rule; -import com.google.devtools.build.lib.packages.StarlarkInfo; import com.google.devtools.build.lib.packages.StructImpl; import com.google.devtools.build.lib.packages.StructProvider; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; -import com.google.devtools.build.lib.profiler.Profiler; -import com.google.devtools.build.lib.profiler.SilentCloseable; import com.google.devtools.build.lib.rules.repository.RepositoryFunction.RepositoryFunctionException; import com.google.devtools.build.lib.rules.repository.WorkspaceAttributeMapper; import com.google.devtools.build.lib.runtime.ProcessWrapper; @@ -54,18 +43,8 @@ import com.google.devtools.build.skyframe.SkyFunctionException.Transience; import java.io.IOException; import java.io.OutputStream; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; import java.nio.charset.StandardCharsets; -import java.nio.file.Files; import java.nio.file.InvalidPathException; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Base64; -import java.util.List; import java.util.Map; import javax.annotation.Nullable; import net.starlark.java.annot.Param; @@ -79,7 +58,6 @@ import net.starlark.java.eval.StarlarkInt; import net.starlark.java.eval.StarlarkSemantics; import net.starlark.java.eval.StarlarkThread; -import net.starlark.java.syntax.Location; /** Starlark API for the repository_rule's context. */ @StarlarkBuiltin( @@ -92,6 +70,7 @@ + " repository rule.") public class StarlarkRepositoryContext extends StarlarkBaseExternalContext { private final Rule rule; + private final RepositoryName repoName; private final PathPackageLocator packageLocator; private final Path workspaceRoot; private final StructImpl attrObject; @@ -127,6 +106,7 @@ public class StarlarkRepositoryContext extends StarlarkBaseExternalContext { starlarkSemantics, remoteExecutor); this.rule = rule; + this.repoName = RepositoryName.createUnvalidated(rule.getName()); this.packageLocator = packageLocator; this.ignoredPatterns = ignoredPatterns; this.syscallCache = syscallCache; @@ -145,7 +125,7 @@ public class StarlarkRepositoryContext extends StarlarkBaseExternalContext { @Override protected String getIdentifyingStringForLogging() { - return rule.getLabel().toString(); + return "repository " + repoName.getNameWithAt(); } @StarlarkMethod( @@ -195,40 +175,6 @@ private StarlarkPath externalPath(String method, Object pathObject) method); } - @StarlarkMethod( - name = "report_progress", - doc = "Updates the progress status for the fetching of this repository", - parameters = { - @Param( - name = "status", - allowedTypes = {@ParamType(type = String.class)}, - doc = "string describing the current status of the fetch progress") - }) - // TODO(wyv): migrate this to the base context. - public void reportProgress(String status) { - final String message = status == null ? "" : status; - final String id = "@" + getName(); - - env.getListener() - .post( - new FetchProgress() { - @Override - public String getResourceIdentifier() { - return id; - } - - @Override - public String getProgress() { - return message; - } - - @Override - public boolean isFinished() { - return false; - } - }); - } - @StarlarkMethod( name = "symlink", doc = "Creates a symlink on the filesystem.", @@ -259,7 +205,7 @@ public void symlink(Object from, Object to, StarlarkThread thread) WorkspaceRuleEvent.newSymlinkEvent( fromPath.toString(), toPath.toString(), - rule.getLabel().toString(), + getIdentifyingStringForLogging(), thread.getCallerLocation()); env.getListener().post(w); try { @@ -279,83 +225,6 @@ public void symlink(Object from, Object to, StarlarkThread thread) } } - private void checkInOutputDirectory(String operation, StarlarkPath path) - throws RepositoryFunctionException { - if (!path.getPath().getPathString().startsWith(workingDirectory.getPathString())) { - throw new RepositoryFunctionException( - Starlark.errorf( - "Cannot %s outside of the repository directory for path %s", operation, path), - Transience.PERSISTENT); - } - } - - @StarlarkMethod( - name = "file", - doc = "Generates a file in the repository directory with the provided content.", - useStarlarkThread = true, - parameters = { - @Param( - name = "path", - allowedTypes = { - @ParamType(type = String.class), - @ParamType(type = Label.class), - @ParamType(type = StarlarkPath.class) - }, - doc = "path of the file to create, relative to the repository directory."), - @Param( - name = "content", - named = true, - defaultValue = "''", - doc = "the content of the file to create, empty by default."), - @Param( - name = "executable", - named = true, - defaultValue = "True", - doc = "set the executable flag on the created file, true by default."), - @Param( - name = "legacy_utf8", - named = true, - defaultValue = "True", - doc = - "encode file content to UTF-8, true by default. Future versions will change" - + " the default and remove this parameter."), - }) - public void createFile( - Object path, String content, Boolean executable, Boolean legacyUtf8, StarlarkThread thread) - throws RepositoryFunctionException, EvalException, InterruptedException { - StarlarkPath p = getPath("file()", path); - byte[] contentBytes; - if (legacyUtf8) { - contentBytes = content.getBytes(StandardCharsets.UTF_8); - } else { - contentBytes = content.getBytes(StandardCharsets.ISO_8859_1); - } - WorkspaceRuleEvent w = - WorkspaceRuleEvent.newFileEvent( - p.toString(), - content, - executable, - rule.getLabel().toString(), - thread.getCallerLocation()); - env.getListener().post(w); - try { - checkInOutputDirectory("write", p); - makeDirectories(p.getPath()); - p.getPath().delete(); - try (OutputStream stream = p.getPath().getOutputStream()) { - stream.write(contentBytes); - } - if (executable) { - p.getPath().setExecutable(true); - } - } catch (IOException e) { - throw new RepositoryFunctionException(e, Transience.TRANSIENT); - } catch (InvalidPathException e) { - throw new RepositoryFunctionException( - Starlark.errorf("Could not create %s: %s", p, e.getMessage()), Transience.PERSISTENT); - } - } - @StarlarkMethod( name = "template", doc = @@ -410,7 +279,7 @@ public void createFileFromTemplate( t.toString(), substitutionMap, executable, - rule.getLabel().toString(), + getIdentifyingStringForLogging(), thread.getCallerLocation()); env.getListener().post(w); try { @@ -471,7 +340,7 @@ public boolean delete(Object pathObject, StarlarkThread thread) StarlarkPath starlarkPath = externalPath("delete()", pathObject); WorkspaceRuleEvent w = WorkspaceRuleEvent.newDeleteEvent( - starlarkPath.toString(), rule.getLabel().toString(), thread.getCallerLocation()); + starlarkPath.toString(), getIdentifyingStringForLogging(), thread.getCallerLocation()); env.getListener().post(w); try { Path path = starlarkPath.getPath(); @@ -515,7 +384,10 @@ public void patch(Object patchFile, StarlarkInt stripI, StarlarkThread thread) StarlarkPath starlarkPath = getPath("patch()", patchFile); WorkspaceRuleEvent w = WorkspaceRuleEvent.newPatchEvent( - starlarkPath.toString(), strip, rule.getLabel().toString(), thread.getCallerLocation()); + starlarkPath.toString(), + strip, + getIdentifyingStringForLogging(), + thread.getCallerLocation()); env.getListener().post(w); try { PatchUtil.apply(starlarkPath.getPath(), strip, workingDirectory); @@ -528,180 +400,6 @@ public void patch(Object patchFile, StarlarkInt stripI, StarlarkThread thread) } } - private void warnAboutChecksumError(List urls, String errorMessage) { - // Inform the user immediately, even though the file will still be downloaded. - // This cannot be done by a regular error event, as all regular events are recorded - // and only shown once the execution of the repository rule is finished. - // So we have to provide the information as update on the progress - String url = "(unknown)"; - if (urls.size() > 0) { - url = urls.get(0).toString(); - } - reportProgress("Will fail after download of " + url + ". " + errorMessage); - } - - private static Map> getAuthContents(Dict x, String what) - throws EvalException { - // Dict.cast returns Dict. - @SuppressWarnings({"unchecked", "rawtypes"}) - Map> res = (Map) Dict.cast(x, String.class, Dict.class, what); - return res; - } - - @StarlarkMethod( - name = "download", - doc = - "Downloads a file to the output path for the provided url and returns a struct" - + " containing success, a flag which is true if the" - + " download completed successfully, and if successful, a hash of the file" - + " with the fields sha256 and integrity.", - useStarlarkThread = true, - parameters = { - @Param( - name = "url", - allowedTypes = { - @ParamType(type = String.class), - @ParamType(type = Iterable.class, generic1 = String.class), - }, - named = true, - doc = "List of mirror URLs referencing the same file."), - @Param( - name = "output", - allowedTypes = { - @ParamType(type = String.class), - @ParamType(type = Label.class), - @ParamType(type = StarlarkPath.class) - }, - defaultValue = "''", - named = true, - doc = "path to the output file, relative to the repository directory."), - @Param( - name = "sha256", - defaultValue = "''", - named = true, - doc = - "the expected SHA-256 hash of the file downloaded." - + " This must match the SHA-256 hash of the file downloaded. It is a security" - + " risk to omit the SHA-256 as remote files can change. At best omitting this" - + " field will make your build non-hermetic. It is optional to make development" - + " easier but should be set before shipping."), - @Param( - name = "executable", - defaultValue = "False", - named = true, - doc = "set the executable flag on the created file, false by default."), - @Param( - name = "allow_fail", - defaultValue = "False", - named = true, - doc = - "If set, indicate the error in the return value" - + " instead of raising an error for failed downloads"), - @Param( - name = "canonical_id", - defaultValue = "''", - named = true, - doc = - "If set, restrict cache hits to those cases where the file was added to the cache" - + " with the same canonical id"), - @Param( - name = "auth", - defaultValue = "{}", - named = true, - doc = "An optional dict specifying authentication information for some of the URLs."), - @Param( - name = "integrity", - defaultValue = "''", - named = true, - positional = false, - doc = - "Expected checksum of the file downloaded, in Subresource Integrity format." - + " This must match the checksum of the file downloaded. It is a security" - + " risk to omit the checksum as remote files can change. At best omitting this" - + " field will make your build non-hermetic. It is optional to make development" - + " easier but should be set before shipping."), - }) - public StructImpl download( - Object url, - Object output, - String sha256, - Boolean executable, - Boolean allowFail, - String canonicalId, - Dict authUnchecked, // expected - String integrity, - StarlarkThread thread) - throws RepositoryFunctionException, EvalException, InterruptedException { - Map> authHeaders = - getAuthHeaders(getAuthContents(authUnchecked, "auth")); - - List urls = - getUrls( - url, - /* ensureNonEmpty= */ !allowFail, - /* checksumGiven= */ !Strings.isNullOrEmpty(sha256) - || !Strings.isNullOrEmpty(integrity)); - Optional checksum; - RepositoryFunctionException checksumValidation = null; - try { - checksum = validateChecksum(sha256, integrity, urls); - } catch (RepositoryFunctionException e) { - checksum = Optional.absent(); - checksumValidation = e; - } - - StarlarkPath outputPath = getPath("download()", output); - WorkspaceRuleEvent w = - WorkspaceRuleEvent.newDownloadEvent( - urls, - output.toString(), - sha256, - integrity, - executable, - rule.getLabel().toString(), - thread.getCallerLocation()); - env.getListener().post(w); - Path downloadedPath; - try (SilentCloseable c = - Profiler.instance().profile("fetching: " + rule.getLabel().toString())) { - checkInOutputDirectory("write", outputPath); - makeDirectories(outputPath.getPath()); - downloadedPath = - downloadManager.download( - urls, - authHeaders, - checksum, - canonicalId, - Optional.absent(), - outputPath.getPath(), - env.getListener(), - envVariables, - getName()); - if (executable) { - outputPath.getPath().setExecutable(true); - } - } catch (InterruptedException e) { - throw new RepositoryFunctionException( - new IOException("thread interrupted"), Transience.TRANSIENT); - } catch (IOException e) { - if (allowFail) { - return StarlarkInfo.create( - StructProvider.STRUCT, ImmutableMap.of("success", false), Location.BUILTIN); - } else { - throw new RepositoryFunctionException(e, Transience.TRANSIENT); - } - } catch (InvalidPathException e) { - throw new RepositoryFunctionException( - Starlark.errorf("Could not create output path %s: %s", outputPath, e.getMessage()), - Transience.PERSISTENT); - } - if (checksumValidation != null) { - throw checksumValidation; - } - - return calculateDownloadResult(checksum, downloadedPath); - } - @StarlarkMethod( name = "extract", doc = "Extract an archive to the repository directory.", @@ -778,7 +476,7 @@ public void extract( output.toString(), stripPrefix, renameFilesMap, - rule.getLabel().toString(), + getIdentifyingStringForLogging(), thread.getCallerLocation()); env.getListener().post(w); @@ -797,362 +495,6 @@ public void extract( env.getListener().post(new ExtractProgress(outputPath.getPath().toString())); } - @StarlarkMethod( - name = "download_and_extract", - doc = - "Downloads a file to the output path for the provided url, extracts it, and returns a" - + " struct containing success, a flag which is true if the" - + " download completed successfully, and if successful, a hash of the file with the" - + " fields sha256 and integrity.", - useStarlarkThread = true, - parameters = { - @Param( - name = "url", - allowedTypes = { - @ParamType(type = String.class), - @ParamType(type = Iterable.class, generic1 = String.class), - }, - named = true, - doc = "List of mirror URLs referencing the same file."), - @Param( - name = "output", - allowedTypes = { - @ParamType(type = String.class), - @ParamType(type = Label.class), - @ParamType(type = StarlarkPath.class) - }, - defaultValue = "''", - named = true, - doc = - "path to the directory where the archive will be unpacked," - + " relative to the repository directory."), - @Param( - name = "sha256", - defaultValue = "''", - named = true, - doc = - "the expected SHA-256 hash of the file downloaded." - + " This must match the SHA-256 hash of the file downloaded. It is a security" - + " risk to omit the SHA-256 as remote files can change. At best omitting this" - + " field will make your build non-hermetic. It is optional to make development" - + " easier but should be set before shipping." - + " If provided, the repository cache will first be checked for a file with the" - + " given hash; a download will only be attempted if the file was not found in" - + " the cache. After a successful download, the file will be added to the" - + " cache."), - @Param( - name = "type", - defaultValue = "''", - named = true, - doc = - "the archive type of the downloaded file." - + " By default, the archive type is determined from the file extension of" - + " the URL." - + " If the file has no extension, you can explicitly specify either \"zip\"," - + " \"jar\", \"war\", \"aar\", \"tar\", \"tar.gz\", \"tgz\", \"tar.xz\"," - + " \"txz\", \".tar.zst\", \".tzst\", \"tar.bz2\", \".ar\", or \".deb\"" - + " here."), - @Param( - name = "stripPrefix", - defaultValue = "''", - named = true, - doc = - "a directory prefix to strip from the extracted files." - + "\nMany archives contain a top-level directory that contains all files in the" - + " archive. Instead of needing to specify this prefix over and over in the" - + " build_file, this field can be used to strip it from extracted" - + " files."), - @Param( - name = "allow_fail", - defaultValue = "False", - named = true, - doc = - "If set, indicate the error in the return value" - + " instead of raising an error for failed downloads"), - @Param( - name = "canonical_id", - defaultValue = "''", - named = true, - doc = - "If set, restrict cache hits to those cases where the file was added to the cache" - + " with the same canonical id"), - @Param( - name = "auth", - defaultValue = "{}", - named = true, - doc = "An optional dict specifying authentication information for some of the URLs."), - @Param( - name = "integrity", - defaultValue = "''", - named = true, - positional = false, - doc = - "Expected checksum of the file downloaded, in Subresource Integrity format." - + " This must match the checksum of the file downloaded. It is a security" - + " risk to omit the checksum as remote files can change. At best omitting this" - + " field will make your build non-hermetic. It is optional to make development" - + " easier but should be set before shipping."), - @Param( - name = "rename_files", - defaultValue = "{}", - named = true, - positional = false, - doc = - "An optional dict specifying files to rename during the extraction. Archive entries" - + " with names exactly matching a key will be renamed to the value, prior to" - + " any directory prefix adjustment. This can be used to extract archives that" - + " contain non-Unicode filenames, or which have files that would extract to" - + " the same path on case-insensitive filesystems."), - }) - public StructImpl downloadAndExtract( - Object url, - Object output, - String sha256, - String type, - String stripPrefix, - Boolean allowFail, - String canonicalId, - Dict auth, // expected - String integrity, - Dict renameFiles, // expected - StarlarkThread thread) - throws RepositoryFunctionException, InterruptedException, EvalException { - Map> authHeaders = getAuthHeaders(getAuthContents(auth, "auth")); - - List urls = - getUrls( - url, - /* ensureNonEmpty= */ !allowFail, - /* checksumGiven= */ !Strings.isNullOrEmpty(sha256) - || !Strings.isNullOrEmpty(integrity)); - Optional checksum; - RepositoryFunctionException checksumValidation = null; - try { - checksum = validateChecksum(sha256, integrity, urls); - } catch (RepositoryFunctionException e) { - checksum = Optional.absent(); - checksumValidation = e; - } - - Map renameFilesMap = - Dict.cast(renameFiles, String.class, String.class, "rename_files"); - - WorkspaceRuleEvent w = - WorkspaceRuleEvent.newDownloadAndExtractEvent( - urls, - output.toString(), - sha256, - integrity, - type, - stripPrefix, - renameFilesMap, - rule.getLabel().toString(), - thread.getCallerLocation()); - - StarlarkPath outputPath = getPath("download_and_extract()", output); - checkInOutputDirectory("write", outputPath); - createDirectory(outputPath.getPath()); - - Path downloadedPath; - Path downloadDirectory; - try (SilentCloseable c = - Profiler.instance().profile("fetching: " + rule.getLabel().toString())) { - - // Download to temp directory inside the outputDirectory and delete it after extraction - java.nio.file.Path tempDirectory = - Files.createTempDirectory(Paths.get(outputPath.toString()), "temp"); - downloadDirectory = - workingDirectory.getFileSystem().getPath(tempDirectory.toFile().getAbsolutePath()); - - downloadedPath = - downloadManager.download( - urls, - authHeaders, - checksum, - canonicalId, - Optional.of(type), - downloadDirectory, - env.getListener(), - envVariables, - getName()); - } catch (InterruptedException e) { - env.getListener().post(w); - throw new RepositoryFunctionException( - new IOException("thread interrupted"), Transience.TRANSIENT); - } catch (IOException e) { - env.getListener().post(w); - if (allowFail) { - return StarlarkInfo.create( - StructProvider.STRUCT, ImmutableMap.of("success", false), Location.BUILTIN); - } else { - throw new RepositoryFunctionException(e, Transience.TRANSIENT); - } - } - if (checksumValidation != null) { - throw checksumValidation; - } - env.getListener().post(w); - try (SilentCloseable c = - Profiler.instance().profile("extracting: " + rule.getLabel().toString())) { - env.getListener() - .post( - new ExtractProgress(outputPath.getPath().toString(), "Extracting " + downloadedPath)); - DecompressorValue.decompress( - DecompressorDescriptor.builder() - .setContext(getIdentifyingStringForLogging()) - .setArchivePath(downloadedPath) - .setDestinationPath(outputPath.getPath()) - .setPrefix(stripPrefix) - .setRenameFiles(renameFilesMap) - .build()); - env.getListener().post(new ExtractProgress(outputPath.getPath().toString())); - } - - StructImpl downloadResult = calculateDownloadResult(checksum, downloadedPath); - try { - if (downloadDirectory.exists()) { - downloadDirectory.deleteTree(); - } - } catch (IOException e) { - throw new RepositoryFunctionException( - new IOException( - "Couldn't delete temporary directory (" + downloadDirectory.getPathString() + ")", e), - Transience.TRANSIENT); - } - return downloadResult; - } - - private Checksum calculateChecksum(Optional originalChecksum, Path path) - throws IOException, InterruptedException { - if (originalChecksum.isPresent()) { - // The checksum is checked on download, so if we got here, the user provided checksum is good - return originalChecksum.get(); - } - try { - return Checksum.fromString(KeyType.SHA256, RepositoryCache.getChecksum(KeyType.SHA256, path)); - } catch (Checksum.InvalidChecksumException e) { - throw new IllegalStateException( - "Unexpected invalid checksum from internal computation of SHA-256 checksum on " - + path.getPathString(), - e); - } - } - - private Optional validateChecksum(String sha256, String integrity, List urls) - throws RepositoryFunctionException, EvalException { - if (!sha256.isEmpty()) { - if (!integrity.isEmpty()) { - throw Starlark.errorf("Expected either 'sha256' or 'integrity', but not both"); - } - try { - return Optional.of(Checksum.fromString(KeyType.SHA256, sha256)); - } catch (Checksum.InvalidChecksumException e) { - warnAboutChecksumError(urls, e.getMessage()); - throw new RepositoryFunctionException( - Starlark.errorf( - "Definition of repository %s: %s at %s", - rule.getName(), e.getMessage(), rule.getLocation()), - Transience.PERSISTENT); - } - } - - if (integrity.isEmpty()) { - return Optional.absent(); - } - - try { - return Optional.of(Checksum.fromSubresourceIntegrity(integrity)); - } catch (Checksum.InvalidChecksumException e) { - warnAboutChecksumError(urls, e.getMessage()); - throw new RepositoryFunctionException( - Starlark.errorf( - "Definition of repository %s: %s at %s", - rule.getName(), e.getMessage(), rule.getLocation()), - Transience.PERSISTENT); - } - } - - private StructImpl calculateDownloadResult(Optional checksum, Path downloadedPath) - throws EvalException, InterruptedException, RepositoryFunctionException { - Checksum finalChecksum; - try { - finalChecksum = calculateChecksum(checksum, downloadedPath); - } catch (IOException e) { - throw new RepositoryFunctionException( - new IOException( - "Couldn't hash downloaded file (" + downloadedPath.getPathString() + ")", e), - Transience.PERSISTENT); - } - - ImmutableMap.Builder out = ImmutableMap.builder(); - out.put("success", true); - out.put("integrity", finalChecksum.toSubresourceIntegrity()); - - // For compatibility with older Bazel versions that don't support non-SHA256 checksums. - if (finalChecksum.getKeyType() == KeyType.SHA256) { - out.put("sha256", finalChecksum.toString()); - } - return StarlarkInfo.create(StructProvider.STRUCT, out.buildOrThrow(), Location.BUILTIN); - } - - private static ImmutableList checkAllUrls(Iterable urlList) throws EvalException { - ImmutableList.Builder result = ImmutableList.builder(); - - for (Object o : urlList) { - if (!(o instanceof String)) { - throw Starlark.errorf( - "Expected a string or sequence of strings for 'url' argument, but got '%s' item in the" - + " sequence", - Starlark.type(o)); - } - result.add((String) o); - } - - return result.build(); - } - - private static List getUrls(Object urlOrList, boolean ensureNonEmpty, boolean checksumGiven) - throws RepositoryFunctionException, EvalException, InterruptedException { - List urlStrings; - if (urlOrList instanceof String) { - urlStrings = ImmutableList.of((String) urlOrList); - } else { - urlStrings = checkAllUrls((Iterable) urlOrList); - } - if (ensureNonEmpty && urlStrings.isEmpty()) { - throw new RepositoryFunctionException(new IOException("urls not set"), Transience.PERSISTENT); - } - List urls = new ArrayList<>(); - for (String urlString : urlStrings) { - URL url; - try { - url = new URL(urlString); - } catch (MalformedURLException e) { - throw new RepositoryFunctionException( - new IOException("Bad URL: " + urlString), Transience.PERSISTENT); - } - if (!HttpUtils.isUrlSupportedByDownloader(url)) { - throw new RepositoryFunctionException( - new IOException("Unsupported protocol: " + url.getProtocol()), Transience.PERSISTENT); - } - if (!checksumGiven) { - if (!Ascii.equalsIgnoreCase("http", url.getProtocol())) { - urls.add(url); - } - } else { - urls.add(url); - } - } - if (ensureNonEmpty && urls.isEmpty()) { - throw new RepositoryFunctionException( - new IOException( - "No URLs left after removing plain http URLs due to missing checksum." - + " Please provide either a checksum or an https download location."), - Transience.PERSISTENT); - } - return urls; - } - @Override public String toString() { return "repository_ctx[" + rule.getLabel() + "]"; @@ -1190,107 +532,4 @@ public void enforceLabelAttributes() throws EvalException, InterruptedException } } } - - /** - * From an authentication dict extract a map of headers. - * - *

Given a dict as provided as "auth" argument, compute a map specifying for each URI provided - * which additional headers (as usual, represented as a map from Strings to Strings) should - * additionally be added to the request. For some form of authentication, in particular basic - * authentication, adding those headers is enough; for other forms of authentication other - * measures might be necessary. - */ - private static Map> getAuthHeaders(Map> auth) - throws RepositoryFunctionException, EvalException { - ImmutableMap.Builder> headers = new ImmutableMap.Builder<>(); - for (Map.Entry> entry : auth.entrySet()) { - try { - URL url = new URL(entry.getKey()); - Dict authMap = entry.getValue(); - if (authMap.containsKey("type")) { - if ("basic".equals(authMap.get("type"))) { - if (!authMap.containsKey("login") || !authMap.containsKey("password")) { - throw Starlark.errorf( - "Found request to do basic auth for %s without 'login' and 'password' being" - + " provided.", - entry.getKey()); - } - String credentials = authMap.get("login") + ":" + authMap.get("password"); - headers.put( - url.toURI(), - ImmutableMap.of( - "Authorization", - "Basic " - + Base64.getEncoder() - .encodeToString(credentials.getBytes(StandardCharsets.UTF_8)))); - } else if ("pattern".equals(authMap.get("type"))) { - if (!authMap.containsKey("pattern")) { - throw Starlark.errorf( - "Found request to do pattern auth for %s without a pattern being provided", - entry.getKey()); - } - - String result = (String) authMap.get("pattern"); - - for (String component : Arrays.asList("password", "login")) { - String demarcatedComponent = "<" + component + ">"; - - if (result.contains(demarcatedComponent)) { - if (!authMap.containsKey(component)) { - throw Starlark.errorf( - "Auth pattern contains %s but it was not provided in auth dict.", - demarcatedComponent); - } - } else { - // component isn't in the pattern, ignore it - continue; - } - - result = result.replaceAll(demarcatedComponent, (String) authMap.get(component)); - } - - headers.put(url.toURI(), ImmutableMap.of("Authorization", result)); - } - } - } catch (MalformedURLException e) { - throw new RepositoryFunctionException(e, Transience.PERSISTENT); - } catch (URISyntaxException e) { - throw new EvalException(e); - } - } - return headers.buildOrThrow(); - } - - private static class ExtractProgress implements FetchProgress { - private final String repositoryPath; - private final String progress; - private final boolean isFinished; - - ExtractProgress(String repositoryPath, String progress) { - this.repositoryPath = repositoryPath; - this.progress = progress; - this.isFinished = false; - } - - ExtractProgress(String repositoryPath) { - this.repositoryPath = repositoryPath; - this.progress = ""; - this.isFinished = true; - } - - @Override - public String getResourceIdentifier() { - return repositoryPath; - } - - @Override - public String getProgress() { - return progress; - } - - @Override - public boolean isFinished() { - return isFinished; - } - } } diff --git a/src/test/java/com/google/devtools/build/lib/bazel/debug/WorkspaceRuleEventTest.java b/src/test/java/com/google/devtools/build/lib/bazel/debug/WorkspaceRuleEventTest.java index 1f8e7532aac80b..1166cf1420ae53 100644 --- a/src/test/java/com/google/devtools/build/lib/bazel/debug/WorkspaceRuleEventTest.java +++ b/src/test/java/com/google/devtools/build/lib/bazel/debug/WorkspaceRuleEventTest.java @@ -65,7 +65,7 @@ public void newExecuteEvent_expectedResult() { "key2", "val2!", "key3", "val3!"); - assertThat(event.getRule()).isEqualTo("my_rule"); + assertThat(event.getContext()).isEqualTo("my_rule"); assertThat(event.getLocation()).isEqualTo("foo:10:20"); WorkspaceLogProtos.ExecuteEvent executeEvent = event.getExecuteEvent(); diff --git a/src/test/py/bazel/bzlmod/bazel_module_test.py b/src/test/py/bazel/bzlmod/bazel_module_test.py index bb943ff20b66c7..321df9ba826b20 100644 --- a/src/test/py/bazel/bzlmod/bazel_module_test.py +++ b/src/test/py/bazel/bzlmod/bazel_module_test.py @@ -14,6 +14,7 @@ # limitations under the License. import os +import pathlib import tempfile import unittest @@ -400,5 +401,30 @@ def testCommandLineModuleOverride(self): self.assertIn( 'Target @ss~override//:choose_me up-to-date (nothing to build)', stderr) + def testDownload(self): + data_path = self.ScratchFile('data.txt', ['some data']) + data_url = pathlib.Path(data_path).resolve().as_uri() + self.ScratchFile('MODULE.bazel', [ + 'data_ext = use_extension("//:ext.bzl", "data_ext")', + 'use_repo(data_ext, "no_op")', + ]) + self.ScratchFile('BUILD') + self.ScratchFile('WORKSPACE') + self.ScratchFile('ext.bzl', [ + 'def _no_op_impl(ctx):', + ' ctx.file("WORKSPACE")', + ' ctx.file("BUILD", "filegroup(name=\\"no_op\\")")', + 'no_op = repository_rule(_no_op_impl)', + 'def _data_ext_impl(ctx):', + ' if not ctx.download(url="%s", output="data.txt").success:' % + data_url, + ' fail("download failed")', + ' if ctx.read("data.txt").strip() != "some data":', + ' fail("unexpected downloaded content: %s" % ctx.read("data.txt").strip())', + ' no_op(name="no_op")', + 'data_ext = module_extension(_data_ext_impl)', + ]) + self.RunBazel(['build', '@no_op//:no_op'], allow_failure=False) + if __name__ == '__main__': unittest.main() diff --git a/src/test/shell/bazel/bazel_workspaces_test.sh b/src/test/shell/bazel/bazel_workspaces_test.sh index 02660bdd123e10..a0669133572c25 100755 --- a/src/test/shell/bazel/bazel_workspaces_test.sh +++ b/src/test/shell/bazel/bazel_workspaces_test.sh @@ -144,7 +144,7 @@ EOF function test_execute2() { set_workspace_command 'repository_ctx.execute(["echo", "test_contents"], 21, {"Arg1": "Val1"}, True)' - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:2:25' 1 ensure_contains_exactly 'arguments: "echo"' 1 @@ -154,13 +154,13 @@ function test_execute2() { ensure_contains_exactly 'key: "Arg1"' 1 ensure_contains_exactly 'value: "Val1"' 1 # Workspace contains 2 file commands - ensure_contains_atleast 'rule: "//external:repo"' 3 + ensure_contains_atleast 'context: "repository @repo"' 3 } function test_execute_quiet2() { set_workspace_command 'repository_ctx.execute(["echo", "test2"], 32, {"A1": "V1"}, False)' - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:2:25' 1 ensure_contains_exactly 'arguments: "echo"' 1 @@ -172,7 +172,7 @@ function test_execute_quiet2() { ensure_contains_exactly 'key: "A1"' 1 ensure_contains_exactly 'value: "V1"' 1 # Workspace contains 2 file commands - ensure_contains_atleast 'rule: "//external:repo"' 3 + ensure_contains_atleast 'context: "repository @repo"' 3 } function test_download() { @@ -188,10 +188,10 @@ function test_download() { set_workspace_command "repository_ctx.download(\"http://localhost:${fileserver_port}/file.txt\", \"file.txt\", \"${file_sha256}\")" - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:2:26' 1 - ensure_contains_atleast 'rule: "//external:repo"' 1 + ensure_contains_atleast 'context: "repository @repo"' 1 ensure_contains_exactly 'download_event' 1 ensure_contains_exactly "url: \"http://localhost:${fileserver_port}/file.txt\"" 1 ensure_contains_exactly 'output: "file.txt"' 1 @@ -216,10 +216,10 @@ function test_download_multiple() { set_workspace_command "repository_ctx.download([\"http://localhost:${fileserver_port}/file1.txt\",\"http://localhost:${fileserver_port}/file2.txt\"], \"out_for_list.txt\", sha256='${sha256}')" - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:2:26' 1 - ensure_contains_atleast 'rule: "//external:repo"' 1 + ensure_contains_atleast 'context: "repository @repo"' 1 ensure_contains_exactly 'download_event' 1 ensure_contains_exactly "url: \"http://localhost:${fileserver_port}/file1.txt\"" 1 ensure_contains_exactly "url: \"http://localhost:${fileserver_port}/file2.txt\"" 1 @@ -243,10 +243,10 @@ function test_download_integrity_sha256() { set_workspace_command "repository_ctx.download(\"http://localhost:${fileserver_port}/file.txt\", \"file.txt\", integrity=\"${file_integrity}\")" - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:2:26' 1 - ensure_contains_atleast 'rule: "//external:repo"' 1 + ensure_contains_atleast 'context: "repository @repo"' 1 ensure_contains_exactly 'download_event' 1 ensure_contains_exactly "url: \"http://localhost:${fileserver_port}/file.txt\"" 1 ensure_contains_exactly 'output: "file.txt"' 1 @@ -272,10 +272,10 @@ function test_download_integrity_sha512() { set_workspace_command "repository_ctx.download(\"http://localhost:${fileserver_port}/file.txt\", \"file.txt\", integrity=\"${file_integrity}\")" - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:2:26' 1 - ensure_contains_atleast 'rule: "//external:repo"' 1 + ensure_contains_atleast 'context: "repository @repo"' 1 ensure_contains_exactly 'download_event' 1 ensure_contains_exactly "url: \"http://localhost:${fileserver_port}/file.txt\"" 1 ensure_contains_exactly 'output: "file.txt"' 1 @@ -332,11 +332,11 @@ function test_download_then_extract() { repository_ctx.download(\"http://localhost:${fileserver_port}/download_then_extract.zip\", \"downloaded_file.zip\", \"${file_sha256}\") repository_ctx.extract(\"downloaded_file.zip\", \"out_dir\", \"server_dir/\")" - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:3:26' 1 ensure_contains_exactly 'location: .*repos.bzl:4:25' 1 - ensure_contains_atleast 'rule: "//external:repo"' 2 + ensure_contains_atleast 'context: "repository @repo"' 2 ensure_contains_exactly 'download_event' 1 ensure_contains_exactly "url: \"http://localhost:${fileserver_port}/download_then_extract.zip\"" 1 ensure_contains_exactly 'output: "downloaded_file.zip"' 1 @@ -369,11 +369,11 @@ function test_download_then_extract_tar() { repository_ctx.download(\"http://localhost:${fileserver_port}/download_then_extract.tar.gz\", \"downloaded_file.tar.gz\", \"${file_sha256}\") repository_ctx.extract(\"downloaded_file.tar.gz\", \"out_dir\", \"data_dir/\")" - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:3:26' 1 ensure_contains_exactly 'location: .*repos.bzl:4:25' 1 - ensure_contains_atleast 'rule: "//external:repo"' 2 + ensure_contains_atleast 'context: "repository @repo"' 2 ensure_contains_exactly 'download_event' 1 ensure_contains_exactly "url: \"http://localhost:${fileserver_port}/download_then_extract.tar.gz\"" 1 ensure_contains_exactly 'output: "downloaded_file.tar.gz"' 1 @@ -403,10 +403,10 @@ function test_download_and_extract() { set_workspace_command "repository_ctx.download_and_extract(\"http://localhost:${fileserver_port}/download_and_extract.zip\", \"out_dir\", \"${file_sha256}\", \"zip\", \"server_dir/\")" - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:2:38' 1 - ensure_contains_atleast 'rule: "//external:repo"' 1 + ensure_contains_atleast 'context: "repository @repo"' 1 ensure_contains_exactly 'download_and_extract_event' 1 ensure_contains_exactly "url: \"http://localhost:${fileserver_port}/download_and_extract.zip\"" 1 ensure_contains_exactly 'output: "out_dir"' 1 @@ -436,10 +436,10 @@ function test_extract_rename_files() { 'prefix/A.txt': 'prefix/renamed-A.txt', })" - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:3:25' 1 - ensure_contains_atleast 'rule: "//external:repo"' 2 + ensure_contains_atleast 'context: "repository @repo"' 2 ensure_contains_exactly 'extract_event' 1 ensure_contains_exactly 'rename_files' 1 ensure_contains_exactly 'key: "prefix/A.txt"' 1 @@ -452,10 +452,10 @@ function test_extract_rename_files() { function test_file() { set_workspace_command 'repository_ctx.file("filefile.sh", "echo filefile", True)' - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:2:22' 1 - ensure_contains_atleast 'rule: "//external:repo"' 1 + ensure_contains_atleast 'context: "repository @repo"' 1 # There are 3 file_event in external:repo as it is currently set up ensure_contains_exactly 'file_event' 3 @@ -467,10 +467,10 @@ function test_file() { function test_file_nonascii() { set_workspace_command 'repository_ctx.file("filefile.sh", "echo fïlëfïlë", True)' - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:2:22' 1 - ensure_contains_atleast 'rule: "//external:repo"' 1 + ensure_contains_atleast 'context: "repository @repo"' 1 # There are 3 file_event in external:repo as it is currently set up ensure_contains_exactly 'file_event' 3 @@ -498,11 +498,11 @@ function test_read() { if read_result != content: fail("read(): expected %r, got %r" % (content, read_result))' - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:4:22' 1 ensure_contains_exactly 'location: .*repos.bzl:5:36' 1 - ensure_contains_atleast 'rule: "//external:repo"' 2 + ensure_contains_atleast 'context: "repository @repo"' 1 ensure_contains_exactly 'read_event' 1 ensure_contains_exactly 'path: ".*filefile.sh"' 2 @@ -519,7 +519,7 @@ function test_read_roundtrip_legacy_utf8() { if read_result != corrupted_content: fail("read(): expected %r, got %r" % (corrupted_content, read_result))' - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" } function test_read_roundtrip_nolegacy_utf8() { @@ -530,15 +530,15 @@ function test_read_roundtrip_nolegacy_utf8() { if read_result != content: fail("read(): expected %r, got %r" % (content, read_result))' - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" } function test_os() { set_workspace_command 'print(repository_ctx.os.name)' - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" - ensure_contains_atleast 'rule: "//external:repo"' 1 + ensure_contains_atleast 'context: "repository @repo"' 1 ensure_contains_exactly 'os_event' 1 } @@ -546,10 +546,10 @@ function test_symlink() { set_workspace_command 'repository_ctx.file("symlink.txt", "something") repository_ctx.symlink("symlink.txt", "symlink_out.txt")' - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:2:22' 1 - ensure_contains_atleast 'rule: "//external:repo"' 1 + ensure_contains_atleast 'context: "repository @repo"' 1 ensure_contains_exactly 'symlink_event' 1 ensure_contains_exactly 'target: ".*symlink.txt"' 1 ensure_contains_exactly 'path: ".*symlink_out.txt"' 1 @@ -559,10 +559,10 @@ function test_template() { set_workspace_command 'repository_ctx.file("template_in.txt", "%{subKey}", False) repository_ctx.template("template_out.txt", "template_in.txt", {"subKey": "subVal"}, True)' - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:2:22' 1 - ensure_contains_atleast 'rule: "//external:repo"' 1 + ensure_contains_atleast 'context: "repository @repo"' 1 ensure_contains_exactly 'template_event' 1 ensure_contains_exactly 'path: ".*template_out.txt"' 1 ensure_contains_exactly 'template: ".*template_in.txt"' 1 @@ -574,10 +574,10 @@ function test_template() { function test_which() { set_workspace_command 'print(repository_ctx.which("which_prog"))' - build_and_process_log --exclude_rule "//external:local_config_cc" + build_and_process_log --exclude_rule "repository @local_config_cc" ensure_contains_exactly 'location: .*repos.bzl:2:29' 1 - ensure_contains_atleast 'rule: "//external:repo"' 1 + ensure_contains_atleast 'context: "repository @repo"' 1 ensure_contains_exactly 'which_event' 1 ensure_contains_exactly 'program: "which_prog"' 1 } diff --git a/src/test/shell/bazel/starlark_repository_test.sh b/src/test/shell/bazel/starlark_repository_test.sh index 5417940c2273a3..afec945adb7ebc 100755 --- a/src/test/shell/bazel/starlark_repository_test.sh +++ b/src/test/shell/bazel/starlark_repository_test.sh @@ -2147,7 +2147,7 @@ EOF bazel build --experimental_repository_disable_download //:it > "${TEST_log}" 2>&1 \ && fail "Expected failure" || : - expect_log "Failed to download repo ext: download is disabled" + expect_log "Failed to download repository @ext: download is disabled" } function test_disable_download_should_allow_distdir() { diff --git a/src/tools/workspacelog/src/main/java/com/google/devtools/build/workspacelog/WorkspaceLogParser.java b/src/tools/workspacelog/src/main/java/com/google/devtools/build/workspacelog/WorkspaceLogParser.java index d5375f029537ab..1a29d3a651a392 100644 --- a/src/tools/workspacelog/src/main/java/com/google/devtools/build/workspacelog/WorkspaceLogParser.java +++ b/src/tools/workspacelog/src/main/java/com/google/devtools/build/workspacelog/WorkspaceLogParser.java @@ -59,7 +59,7 @@ public WorkspaceEvent getNext() throws IOException { return null; } w = WorkspaceEvent.parseDelimitedFrom(in); - } while (excludedRules.contains(w.getRule())); + } while (excludedRules.contains(w.getContext())); return w; } } diff --git a/src/tools/workspacelog/test/main/java/com/google/devtools/build/workspacelog/WorkspaceLogParserTest.java b/src/tools/workspacelog/test/main/java/com/google/devtools/build/workspacelog/WorkspaceLogParserTest.java index 1d53fdf2c1b5b6..49db7ed52adc60 100644 --- a/src/tools/workspacelog/test/main/java/com/google/devtools/build/workspacelog/WorkspaceLogParserTest.java +++ b/src/tools/workspacelog/test/main/java/com/google/devtools/build/workspacelog/WorkspaceLogParserTest.java @@ -58,7 +58,8 @@ public void getNextEmptyWithExclusions() throws Exception { @Test public void getNextSingleExcluded1() throws Exception { - WorkspaceEvent a = WorkspaceEvent.newBuilder().setRule("a").setLocation("SomeLocation").build(); + WorkspaceEvent a = + WorkspaceEvent.newBuilder().setContext("a").setLocation("SomeLocation").build(); // Excluded by first exclusion ExcludingLogParser p = @@ -69,7 +70,8 @@ public void getNextSingleExcluded1() throws Exception { @Test public void getNextSingleExcluded2() throws Exception { - WorkspaceEvent a = WorkspaceEvent.newBuilder().setRule("a").setLocation("SomeLocation").build(); + WorkspaceEvent a = + WorkspaceEvent.newBuilder().setContext("a").setLocation("SomeLocation").build(); // Excluded by second exclusion ExcludingLogParser p = @@ -81,7 +83,7 @@ public void getNextSingleExcluded2() throws Exception { @Test public void getNextSingleIncluded() throws Exception { WorkspaceEvent a = - WorkspaceEvent.newBuilder().setRule("onOnList").setLocation("SomeLocation").build(); + WorkspaceEvent.newBuilder().setContext("onOnList").setLocation("SomeLocation").build(); ExcludingLogParser p = new ExcludingLogParser( @@ -92,11 +94,11 @@ public void getNextSingleIncluded() throws Exception { @Test public void getNextSingleLongerList1() throws Exception { - WorkspaceEvent a = WorkspaceEvent.newBuilder().setRule("a").setLocation("a1").build(); - WorkspaceEvent b = WorkspaceEvent.newBuilder().setRule("b").setLocation("b1").build(); - WorkspaceEvent c = WorkspaceEvent.newBuilder().setRule("a").setLocation("a2").build(); - WorkspaceEvent d = WorkspaceEvent.newBuilder().setRule("b").setLocation("b2").build(); - WorkspaceEvent e = WorkspaceEvent.newBuilder().setRule("d").build(); + WorkspaceEvent a = WorkspaceEvent.newBuilder().setContext("a").setLocation("a1").build(); + WorkspaceEvent b = WorkspaceEvent.newBuilder().setContext("b").setLocation("b1").build(); + WorkspaceEvent c = WorkspaceEvent.newBuilder().setContext("a").setLocation("a2").build(); + WorkspaceEvent d = WorkspaceEvent.newBuilder().setContext("b").setLocation("b2").build(); + WorkspaceEvent e = WorkspaceEvent.newBuilder().setContext("d").build(); ExcludingLogParser p = new ExcludingLogParser( @@ -107,11 +109,11 @@ public void getNextSingleLongerList1() throws Exception { @Test public void getNextSingleLongerList2() throws Exception { - WorkspaceEvent a = WorkspaceEvent.newBuilder().setRule("a").setLocation("a1").build(); - WorkspaceEvent b = WorkspaceEvent.newBuilder().setRule("b").setLocation("b1").build(); - WorkspaceEvent c = WorkspaceEvent.newBuilder().setRule("a").setLocation("a2").build(); - WorkspaceEvent d = WorkspaceEvent.newBuilder().setRule("b").setLocation("b2").build(); - WorkspaceEvent e = WorkspaceEvent.newBuilder().setRule("d").build(); + WorkspaceEvent a = WorkspaceEvent.newBuilder().setContext("a").setLocation("a1").build(); + WorkspaceEvent b = WorkspaceEvent.newBuilder().setContext("b").setLocation("b1").build(); + WorkspaceEvent c = WorkspaceEvent.newBuilder().setContext("a").setLocation("a2").build(); + WorkspaceEvent d = WorkspaceEvent.newBuilder().setContext("b").setLocation("b2").build(); + WorkspaceEvent e = WorkspaceEvent.newBuilder().setContext("d").build(); ExcludingLogParser p = new ExcludingLogParser(