From b652d6debca47589312571ad6ef00dc6a78ac256 Mon Sep 17 00:00:00 2001 From: Googler Date: Tue, 27 Jun 2023 02:20:12 -0700 Subject: [PATCH 01/68] Remove the non-parallel StreamedOutputHandler for aquery. --experimental_parallel_aquery_output was flipped. Now we can clean up the code. This CL would make the flag no-op and to be removed in a follow-up CL. PiperOrigin-RevId: 543680809 Change-Id: I6092a0b9981dfe69281935688b342d29f6c06fde --- .../lib/bazel/rules/BazelRulesModule.java | 9 ++ .../build/lib/buildtool/AqueryProcessor.java | 3 +- ...tionGraphProtoOutputFormatterCallback.java | 32 ++--- .../aquery/ActionGraphQueryEnvironment.java | 10 +- .../lib/query2/aquery/AqueryOptions.java | 10 -- .../actiongraph/v2/StreamedOutputHandler.java | 132 ------------------ 6 files changed, 23 insertions(+), 173 deletions(-) delete mode 100644 src/main/java/com/google/devtools/build/lib/skyframe/actiongraph/v2/StreamedOutputHandler.java diff --git a/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRulesModule.java b/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRulesModule.java index 5a4112308ba75f..a0fd5fbb6df893 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRulesModule.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRulesModule.java @@ -386,6 +386,15 @@ public static class BuildGraveyardOptions extends OptionsBase { metadataTags = {OptionMetadataTag.EXPERIMENTAL}, help = "Deprecated no-op.") public boolean enableBzlDocDump; + + // TODO(b/274595070): Remove this option. + @Option( + name = "experimental_parallel_aquery_output", + defaultValue = "true", + documentationCategory = OptionDocumentationCategory.QUERY, + effectTags = {OptionEffectTag.UNKNOWN}, + help = "No-op.") + public boolean parallelAqueryOutput; } /** This is where deprecated Bazel-specific options only used by the build command go to die. */ diff --git a/src/main/java/com/google/devtools/build/lib/buildtool/AqueryProcessor.java b/src/main/java/com/google/devtools/build/lib/buildtool/AqueryProcessor.java index d8095da4d90a98..f804dd30db86ca 100644 --- a/src/main/java/com/google/devtools/build/lib/buildtool/AqueryProcessor.java +++ b/src/main/java/com/google/devtools/build/lib/buildtool/AqueryProcessor.java @@ -80,8 +80,7 @@ public BlazeCommandResult dumpActionGraphFromSkyframe(CommandEnvironment env) { ActionGraphProtoOutputFormatterCallback.constructAqueryOutputHandler( OutputType.fromString(aqueryOptions.outputFormat), queryRuntimeHelper.getOutputStreamForQueryOutput(), - printStream, - aqueryOptions.parallelAqueryOutput)) { + printStream)) { ActionGraphDump actionGraphDump = new ActionGraphDump( aqueryOptions.includeCommandline, diff --git a/src/main/java/com/google/devtools/build/lib/query2/aquery/ActionGraphProtoOutputFormatterCallback.java b/src/main/java/com/google/devtools/build/lib/query2/aquery/ActionGraphProtoOutputFormatterCallback.java index e13d61a558058e..5cbaf4846d321f 100644 --- a/src/main/java/com/google/devtools/build/lib/query2/aquery/ActionGraphProtoOutputFormatterCallback.java +++ b/src/main/java/com/google/devtools/build/lib/query2/aquery/ActionGraphProtoOutputFormatterCallback.java @@ -31,7 +31,6 @@ import com.google.devtools.build.lib.skyframe.actiongraph.v2.AqueryOutputHandler.OutputType; import com.google.devtools.build.lib.skyframe.actiongraph.v2.MonolithicOutputHandler; import com.google.devtools.build.lib.skyframe.actiongraph.v2.StreamedConsumingOutputHandler; -import com.google.devtools.build.lib.skyframe.actiongraph.v2.StreamedOutputHandler; import com.google.protobuf.CodedOutputStream; import java.io.IOException; import java.io.OutputStream; @@ -45,8 +44,6 @@ /** Default output callback for aquery, prints proto output. */ public class ActionGraphProtoOutputFormatterCallback extends AqueryThreadsafeCallback { - // TODO(b/274595070): Clean this up after flag flip. - // Arbitrarily chosen. Large enough for good performance, small enough not to cause OOMs. private static final int BLOCKING_QUEUE_SIZE = Runtime.getRuntime().availableProcessors() * 2; private final OutputType outputType; @@ -70,8 +67,7 @@ public class ActionGraphProtoOutputFormatterCallback extends AqueryThreadsafeCal super(eventHandler, options, out, accessor); this.outputType = outputType; this.actionFilters = actionFilters; - this.aqueryOutputHandler = - constructAqueryOutputHandler(outputType, out, printStream, options.parallelAqueryOutput); + this.aqueryOutputHandler = constructAqueryOutputHandler(outputType, out, printStream); this.actionGraphDump = new ActionGraphDump( options.includeCommandline, @@ -86,27 +82,16 @@ public class ActionGraphProtoOutputFormatterCallback extends AqueryThreadsafeCal public static AqueryOutputHandler constructAqueryOutputHandler( OutputType outputType, OutputStream out, PrintStream printStream) { - return constructAqueryOutputHandler(outputType, out, printStream, /* parallelized= */ false); - } - - public static AqueryOutputHandler constructAqueryOutputHandler( - OutputType outputType, OutputStream out, PrintStream printStream, boolean parallelized) { switch (outputType) { case BINARY: case DELIMITED_BINARY: case TEXT: - return parallelized - ? new StreamedConsumingOutputHandler( - outputType, - out, - CodedOutputStream.newInstance(out, OUTPUT_BUFFER_SIZE), - printStream, - new LinkedBlockingQueue<>(BLOCKING_QUEUE_SIZE)) - : new StreamedOutputHandler( - outputType, - out, - CodedOutputStream.newInstance(out, OUTPUT_BUFFER_SIZE), - printStream); + return new StreamedConsumingOutputHandler( + outputType, + out, + CodedOutputStream.newInstance(out, OUTPUT_BUFFER_SIZE), + printStream, + new LinkedBlockingQueue<>(BLOCKING_QUEUE_SIZE)); case JSON: return new MonolithicOutputHandler(printStream); } @@ -131,8 +116,7 @@ public void close(boolean failFast) throws IOException { @Override public void processOutput(Iterable partialResult) throws IOException, InterruptedException { - if (options.parallelAqueryOutput - && aqueryOutputHandler instanceof AqueryConsumingOutputHandler) { + if (aqueryOutputHandler instanceof AqueryConsumingOutputHandler) { processOutputInParallel(partialResult); return; } diff --git a/src/main/java/com/google/devtools/build/lib/query2/aquery/ActionGraphQueryEnvironment.java b/src/main/java/com/google/devtools/build/lib/query2/aquery/ActionGraphQueryEnvironment.java index 5e5382ad940fb1..fa716e1e4e4458 100644 --- a/src/main/java/com/google/devtools/build/lib/query2/aquery/ActionGraphQueryEnvironment.java +++ b/src/main/java/com/google/devtools/build/lib/query2/aquery/ActionGraphQueryEnvironment.java @@ -45,7 +45,7 @@ import com.google.devtools.build.lib.query2.engine.QueryUtil.ThreadSafeMutableKeyExtractorBackedSetImpl; import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey; import com.google.devtools.build.lib.skyframe.SkyframeExecutor; -import com.google.devtools.build.lib.skyframe.actiongraph.v2.StreamedOutputHandler; +import com.google.devtools.build.lib.skyframe.actiongraph.v2.AqueryOutputHandler; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.WalkableGraph; import java.io.OutputStream; @@ -145,28 +145,28 @@ public ConfiguredTargetValueAccessor getAccessor() { aqueryOptions, out, accessor, - StreamedOutputHandler.OutputType.BINARY, + AqueryOutputHandler.OutputType.BINARY, actionFilters), new ActionGraphProtoOutputFormatterCallback( eventHandler, aqueryOptions, out, accessor, - StreamedOutputHandler.OutputType.DELIMITED_BINARY, + AqueryOutputHandler.OutputType.DELIMITED_BINARY, actionFilters), new ActionGraphProtoOutputFormatterCallback( eventHandler, aqueryOptions, out, accessor, - StreamedOutputHandler.OutputType.TEXT, + AqueryOutputHandler.OutputType.TEXT, actionFilters), new ActionGraphProtoOutputFormatterCallback( eventHandler, aqueryOptions, out, accessor, - StreamedOutputHandler.OutputType.JSON, + AqueryOutputHandler.OutputType.JSON, actionFilters), new ActionGraphTextOutputFormatterCallback( eventHandler, aqueryOptions, out, accessor, actionFilters, getMainRepoMapping()), diff --git a/src/main/java/com/google/devtools/build/lib/query2/aquery/AqueryOptions.java b/src/main/java/com/google/devtools/build/lib/query2/aquery/AqueryOptions.java index f678743c25e478..0fed2c2058133b 100644 --- a/src/main/java/com/google/devtools/build/lib/query2/aquery/AqueryOptions.java +++ b/src/main/java/com/google/devtools/build/lib/query2/aquery/AqueryOptions.java @@ -89,14 +89,4 @@ public class AqueryOptions extends CommonQueryOptions { + " output. This does not deduplicate depsets that don't share an immediate parent." + " This does not affect the final effective list of input artifacts of the actions.") public boolean deduplicateDepsets; - - @Option( - name = "experimental_parallel_aquery_output", - defaultValue = "false", - documentationCategory = OptionDocumentationCategory.QUERY, - effectTags = {OptionEffectTag.UNKNOWN}, - help = - "Whether aquery proto/textproto output should be written in parallel. No-op for the " - + "other output formats.") - public boolean parallelAqueryOutput; } diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/actiongraph/v2/StreamedOutputHandler.java b/src/main/java/com/google/devtools/build/lib/skyframe/actiongraph/v2/StreamedOutputHandler.java deleted file mode 100644 index 9ca936bad10c06..00000000000000 --- a/src/main/java/com/google/devtools/build/lib/skyframe/actiongraph/v2/StreamedOutputHandler.java +++ /dev/null @@ -1,132 +0,0 @@ -// Copyright 2019 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -package com.google.devtools.build.lib.skyframe.actiongraph.v2; - -import static com.google.devtools.build.lib.skyframe.actiongraph.v2.AqueryOutputHandler.OutputType.BINARY; -import static com.google.devtools.build.lib.skyframe.actiongraph.v2.AqueryOutputHandler.OutputType.DELIMITED_BINARY; -import static com.google.devtools.build.lib.skyframe.actiongraph.v2.AqueryOutputHandler.OutputType.TEXT; - -import com.google.common.base.Preconditions; -import com.google.devtools.build.lib.analysis.AnalysisProtosV2.Action; -import com.google.devtools.build.lib.analysis.AnalysisProtosV2.ActionGraphContainer; -import com.google.devtools.build.lib.analysis.AnalysisProtosV2.Artifact; -import com.google.devtools.build.lib.analysis.AnalysisProtosV2.AspectDescriptor; -import com.google.devtools.build.lib.analysis.AnalysisProtosV2.Configuration; -import com.google.devtools.build.lib.analysis.AnalysisProtosV2.DepSetOfFiles; -import com.google.devtools.build.lib.analysis.AnalysisProtosV2.PathFragment; -import com.google.devtools.build.lib.analysis.AnalysisProtosV2.RuleClass; -import com.google.devtools.build.lib.analysis.AnalysisProtosV2.Target; -import com.google.devtools.build.lib.skyframe.actiongraph.v2.PrintTask.ProtoPrintTask; -import com.google.devtools.build.lib.skyframe.actiongraph.v2.PrintTask.StreamedProtoPrintTask; -import com.google.devtools.build.lib.skyframe.actiongraph.v2.PrintTask.TextProtoPrintTask; -import com.google.protobuf.CodedOutputStream; -import com.google.protobuf.Message; -import java.io.IOException; -import java.io.OutputStream; -import java.io.PrintStream; - -/** - * Manages the various streamed output channels of aquery. This does not support JSON format. - * TODO(b/274595070) Remove this class after the flag flip. - */ -public class StreamedOutputHandler implements AqueryOutputHandler { - private final OutputType outputType; - private final OutputStream outputStream; - private final CodedOutputStream codedOutputStream; - private final PrintStream printStream; - - public StreamedOutputHandler( - OutputType outputType, - OutputStream outputStream, - CodedOutputStream codedOutputStream, - PrintStream printStream) { - this.outputType = outputType; - Preconditions.checkArgument( - outputType == BINARY || outputType == DELIMITED_BINARY || outputType == TEXT, - "Only proto, streamed_proto, textproto outputs should be streamed."); - this.outputStream = outputStream; - this.codedOutputStream = codedOutputStream; - this.printStream = printStream; - } - - @Override - public void outputArtifact(Artifact message) throws IOException { - printMessage(message, ActionGraphContainer.ARTIFACTS_FIELD_NUMBER, "artifacts"); - } - - @Override - public void outputAction(Action message) throws IOException { - printMessage(message, ActionGraphContainer.ACTIONS_FIELD_NUMBER, "actions"); - } - - @Override - public void outputTarget(Target message) throws IOException { - printMessage(message, ActionGraphContainer.TARGETS_FIELD_NUMBER, "targets"); - } - - @Override - public void outputDepSetOfFiles(DepSetOfFiles message) throws IOException { - printMessage(message, ActionGraphContainer.DEP_SET_OF_FILES_FIELD_NUMBER, "dep_set_of_files"); - } - - @Override - public void outputConfiguration(Configuration message) throws IOException { - printMessage(message, ActionGraphContainer.CONFIGURATION_FIELD_NUMBER, "configuration"); - } - - @Override - public void outputAspectDescriptor(AspectDescriptor message) throws IOException { - printMessage( - message, ActionGraphContainer.ASPECT_DESCRIPTORS_FIELD_NUMBER, "aspect_descriptors"); - } - - @Override - public void outputRuleClass(RuleClass message) throws IOException { - printMessage(message, ActionGraphContainer.RULE_CLASSES_FIELD_NUMBER, "rule_classes"); - } - - @Override - public void outputPathFragment(PathFragment message) throws IOException { - printMessage(message, ActionGraphContainer.PATH_FRAGMENTS_FIELD_NUMBER, "path_fragments"); - } - - /** - * Prints the Message to the appropriate output channel. - * - * @param message The message to be printed. - */ - private void printMessage(Message message, int fieldNumber, String messageLabel) - throws IOException { - switch (outputType) { - case BINARY: - ProtoPrintTask.print(codedOutputStream, message, fieldNumber); - break; - case DELIMITED_BINARY: - StreamedProtoPrintTask.print(outputStream, message, fieldNumber); - break; - case TEXT: - TextProtoPrintTask.print(printStream, message, messageLabel); - break; - default: - throw new IllegalStateException("Unknown outputType " + outputType.formatName()); - } - } - - @Override - public void close() throws IOException { - outputStream.flush(); - codedOutputStream.flush(); - printStream.flush(); - } -} From 5fa47187803c22e72a312739fd48514568e6d617 Mon Sep 17 00:00:00 2001 From: Googler Date: Tue, 27 Jun 2023 02:22:08 -0700 Subject: [PATCH 02/68] Remove dead code from CppCompileAction.java. VALIDATION_DEBUG_WARN is always false and no other references exist, so this should be safe? RELNOTES: None. PiperOrigin-RevId: 543681170 Change-Id: Ieef1262681ad8c75c63bc157d25d36f73496d38f --- .../build/lib/rules/cpp/CppCompileAction.java | 26 ------------------- 1 file changed, 26 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java index 655a5e3192baba..3e43c5a62d0b0d 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java @@ -30,7 +30,6 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import com.google.common.io.ByteStreams; import com.google.devtools.build.lib.actions.AbstractAction; import com.google.devtools.build.lib.actions.ActionEnvironment; @@ -124,8 +123,6 @@ public class CppCompileAction extends AbstractAction implements IncludeScannable private static final PathFragment BUILD_PATH_FRAGMENT = PathFragment.create("BUILD"); - private static final boolean VALIDATION_DEBUG_WARN = false; - @VisibleForTesting static final String CPP_COMPILE_MNEMONIC = "CppCompile"; @VisibleForTesting static final String OBJC_COMPILE_MNEMONIC = "ObjcCompile"; @@ -1058,29 +1055,6 @@ public void validateInclusions( errors.add(input.getExecPath().toString()); } } - if (VALIDATION_DEBUG_WARN) { - synchronized (System.err) { - if (errors.hasProblems()) { - if (errors.hasProblems()) { - System.err.println("ERROR: Include(s) were not in declared srcs:"); - } else { - System.err.println( - "INFO: Include(s) were OK for '" + getSourceFile() + "', declared srcs:"); - } - for (Artifact a : ccCompilationContext.getDeclaredIncludeSrcs().toList()) { - System.err.println(" '" + a.toDetailString() + "'"); - } - System.err.println(" or under loose headers dirs:"); - for (PathFragment f : Sets.newTreeSet(ccCompilationContext.getLooseHdrsDirs().toList())) { - System.err.println(" '" + f + "'"); - } - System.err.println(" with prefixes:"); - for (PathFragment dirpath : ccCompilationContext.getQuoteIncludeDirs()) { - System.err.println(" '" + dirpath + "'"); - } - } - } - } errors.assertProblemFree(this, getSourceFile()); } From 88412ce8ef94dfc7b27c0331d625e14056fc0bc8 Mon Sep 17 00:00:00 2001 From: Tyler Williams Date: Tue, 27 Jun 2023 03:15:05 -0700 Subject: [PATCH 03/68] Support new-style digest functions Support new-style digest functions. This PR adds support for new-style digest functions to the remote execution library code. The remote-apis spec says: ``` // * `digest_function` is a lowercase string form of a `DigestFunction.Value` // enum, indicating which digest function was used to compute `hash`. If the // digest function used is one of MD5, MURMUR3, SHA1, SHA256, SHA384, SHA512, // or VSO, this component MUST be omitted. In that case the server SHOULD // infer the digest function using the length of the `hash` and the digest // functions announced in the server's capabilities. ``` This is a partial commit for #18658. Closes #18731. PiperOrigin-RevId: 543691155 Change-Id: If8c386d923db1b24dff6054c8ab3f783409b7f13 --- .../build/lib/remote/ByteStreamUploader.java | 37 +++++- .../build/lib/remote/GrpcCacheClient.java | 24 +++- .../lib/remote/ByteStreamUploaderTest.java | 120 ++++++++++++++---- ...SpawnRunnerWithGrpcRemoteExecutorTest.java | 12 +- 4 files changed, 156 insertions(+), 37 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/remote/ByteStreamUploader.java b/src/main/java/com/google/devtools/build/lib/remote/ByteStreamUploader.java index 18aaa6a807a7cf..1bceff3b6c1b9b 100644 --- a/src/main/java/com/google/devtools/build/lib/remote/ByteStreamUploader.java +++ b/src/main/java/com/google/devtools/build/lib/remote/ByteStreamUploader.java @@ -21,6 +21,7 @@ import static java.util.concurrent.TimeUnit.SECONDS; import build.bazel.remote.execution.v2.Digest; +import build.bazel.remote.execution.v2.DigestFunction; import com.google.bytestream.ByteStreamGrpc; import com.google.bytestream.ByteStreamGrpc.ByteStreamFutureStub; import com.google.bytestream.ByteStreamGrpc.ByteStreamStub; @@ -69,6 +70,7 @@ final class ByteStreamUploader { private final CallCredentialsProvider callCredentialsProvider; private final long callTimeoutSecs; private final RemoteRetrier retrier; + private final DigestFunction.Value digestFunction; @Nullable private final Semaphore openedFilePermits; @@ -89,7 +91,8 @@ final class ByteStreamUploader { CallCredentialsProvider callCredentialsProvider, long callTimeoutSecs, RemoteRetrier retrier, - int maximumOpenFiles) { + int maximumOpenFiles, + DigestFunction.Value digestFunction) { checkArgument(callTimeoutSecs > 0, "callTimeoutSecs must be gt 0."); this.instanceName = instanceName; this.channel = channel; @@ -97,6 +100,7 @@ final class ByteStreamUploader { this.callTimeoutSecs = callTimeoutSecs; this.retrier = retrier; this.openedFilePermits = maximumOpenFiles != -1 ? new Semaphore(maximumOpenFiles) : null; + this.digestFunction = digestFunction; } @VisibleForTesting @@ -175,11 +179,34 @@ public ListenableFuture uploadBlobAsync( MoreExecutors.directExecutor()); } - private static String buildUploadResourceName( + private boolean isOldStyleDigestFunction() { + // Old-style digest functions (SHA256, etc) are distinguishable by the length + // of their hash alone and do not require extra specification, but newer + // digest functions (which may have the same length hashes as the older + // functions!) must be explicitly specified in the upload resource name. + return digestFunction.getNumber() <= 7; + } + + private String buildUploadResourceName( String instanceName, UUID uuid, Digest digest, boolean compressed) { - String template = - compressed ? "uploads/%s/compressed-blobs/zstd/%s/%d" : "uploads/%s/blobs/%s/%d"; - String resourceName = format(template, uuid, digest.getHash(), digest.getSizeBytes()); + + String resourceName; + + if (isOldStyleDigestFunction()) { + String template = + compressed ? "uploads/%s/compressed-blobs/zstd/%s/%d" : "uploads/%s/blobs/%s/%d"; + resourceName = format(template, uuid, digest.getHash(), digest.getSizeBytes()); + } else { + String template = + compressed ? "uploads/%s/compressed-blobs/zstd/%s/%s/%d" : "uploads/%s/blobs/%s/%s/%d"; + resourceName = + format( + template, + uuid, + Ascii.toLowerCase(digestFunction.getValueDescriptor().getName()), + digest.getHash(), + digest.getSizeBytes()); + } if (!Strings.isNullOrEmpty(instanceName)) { resourceName = instanceName + "/" + resourceName; } diff --git a/src/main/java/com/google/devtools/build/lib/remote/GrpcCacheClient.java b/src/main/java/com/google/devtools/build/lib/remote/GrpcCacheClient.java index 470d3845dc3273..8c86a31d9bafa8 100644 --- a/src/main/java/com/google/devtools/build/lib/remote/GrpcCacheClient.java +++ b/src/main/java/com/google/devtools/build/lib/remote/GrpcCacheClient.java @@ -22,6 +22,7 @@ import build.bazel.remote.execution.v2.ContentAddressableStorageGrpc; import build.bazel.remote.execution.v2.ContentAddressableStorageGrpc.ContentAddressableStorageFutureStub; import build.bazel.remote.execution.v2.Digest; +import build.bazel.remote.execution.v2.DigestFunction; import build.bazel.remote.execution.v2.FindMissingBlobsRequest; import build.bazel.remote.execution.v2.FindMissingBlobsResponse; import build.bazel.remote.execution.v2.GetActionResultRequest; @@ -107,7 +108,8 @@ public GrpcCacheClient( callCredentialsProvider, options.remoteTimeout.getSeconds(), retrier, - options.maximumOpenFiles); + options.maximumOpenFiles, + digestUtil.getDigestFunction()); maxMissingBlobsDigestsPerMessage = computeMaxMissingBlobsDigestsPerMessage(); Preconditions.checkState( maxMissingBlobsDigestsPerMessage > 0, "Error: gRPC message size too small."); @@ -352,12 +354,24 @@ private ListenableFuture downloadBlob( MoreExecutors.directExecutor()); } - public static String getResourceName(String instanceName, Digest digest, boolean compressed) { + private static boolean isOldStyleDigestFunction(DigestFunction.Value digestFunction) { + // Old-style digest functions (SHA256, etc) are distinguishable by the length + // of their hash alone and do not require extra specification, but newer + // digest functions (which may have the same length hashes as the older + // functions!) must be explicitly specified in the upload resource name. + return digestFunction.getNumber() <= 7; + } + + public static String getResourceName( + String instanceName, Digest digest, boolean compressed, DigestFunction.Value digestFunction) { String resourceName = ""; if (!instanceName.isEmpty()) { resourceName += instanceName + "/"; } resourceName += compressed ? "compressed-blobs/zstd/" : "blobs/"; + if (!isOldStyleDigestFunction(digestFunction)) { + resourceName += Ascii.toLowerCase(digestFunction.getValueDescriptor().getName()) + "/"; + } return resourceName + DigestUtil.toString(digest); } @@ -369,7 +383,11 @@ private ListenableFuture requestRead( @Nullable Supplier digestSupplier, Channel channel) { String resourceName = - getResourceName(options.remoteInstanceName, digest, options.cacheCompression); + getResourceName( + options.remoteInstanceName, + digest, + options.cacheCompression, + digestUtil.getDigestFunction()); SettableFuture future = SettableFuture.create(); OutputStream out; try { diff --git a/src/test/java/com/google/devtools/build/lib/remote/ByteStreamUploaderTest.java b/src/test/java/com/google/devtools/build/lib/remote/ByteStreamUploaderTest.java index fd19dddc9e9114..53d433faa03e21 100644 --- a/src/test/java/com/google/devtools/build/lib/remote/ByteStreamUploaderTest.java +++ b/src/test/java/com/google/devtools/build/lib/remote/ByteStreamUploaderTest.java @@ -21,6 +21,7 @@ import static org.mockito.ArgumentMatchers.any; import build.bazel.remote.execution.v2.Digest; +import build.bazel.remote.execution.v2.DigestFunction; import build.bazel.remote.execution.v2.RequestMetadata; import com.github.luben.zstd.Zstd; import com.github.luben.zstd.ZstdInputStream; @@ -165,7 +166,8 @@ public void singleBlobUploadShouldWork() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, /* callTimeoutSecs= */ 60, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; new Random().nextBytes(blob); @@ -192,7 +194,8 @@ public void singleChunkCompressedUploadAlreadyExists() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, /* callTimeoutSecs= */ 60, retrier, - /* maximumOpenFiles= */ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = {'A'}; @@ -232,8 +235,7 @@ public void onError(Throwable throwable) { } @Override - public void onCompleted() { - } + public void onCompleted() {} }; } }); @@ -256,7 +258,8 @@ public void progressiveUploadShouldWork() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, 3, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; new Random().nextBytes(blob); @@ -372,7 +375,8 @@ public void progressiveCompressedUploadShouldWork() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, 300, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); int chunkSize = 1024; int skipSize = chunkSize + 1; @@ -491,7 +495,8 @@ public void progressiveCompressedUploadSeesAlreadyExistsAtTheEnd() throws Except CallCredentialsProvider.NO_CREDENTIALS, 300, retrier, - /* maximumOpenFiles= */ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); int chunkSize = 1024; byte[] blob = new byte[chunkSize * 2 + 1]; @@ -549,7 +554,8 @@ public void concurrentlyCompletedUploadIsNotRetried() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, 1, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; new Random().nextBytes(blob); @@ -607,7 +613,8 @@ public void unimplementedQueryShouldRestartUpload() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, 3, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; new Random().nextBytes(blob); @@ -676,7 +683,8 @@ public void earlyWriteResponseShouldCompleteUpload() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, 3, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; new Random().nextBytes(blob); @@ -713,7 +721,8 @@ public void incorrectCommittedSizeFailsCompletedUpload() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, 3, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; new Random().nextBytes(blob); @@ -766,7 +775,8 @@ public void incorrectCommittedSizeDoesNotFailIncompleteUpload() throws Exception CallCredentialsProvider.NO_CREDENTIALS, 300, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; new Random().nextBytes(blob); @@ -798,7 +808,8 @@ public void multipleBlobsUploadShouldWork() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, /* callTimeoutSecs= */ 60, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); int numUploads = 10; Map blobsByHash = Maps.newHashMap(); @@ -830,7 +841,8 @@ public void tooManyFilesIOException_adviseMaximumOpenFilesFlag() throws Exceptio CallCredentialsProvider.NO_CREDENTIALS, /* callTimeoutSecs= */ 60, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = new byte[CHUNK_SIZE]; Chunker chunker = Mockito.mock(Chunker.class); Digest digest = DIGEST_UTIL.compute(blob); @@ -862,7 +874,8 @@ public void availablePermitsOpenFileSemaphore_fewerPermitsThanUploads_endWithAll CallCredentialsProvider.NO_CREDENTIALS, /* callTimeoutSecs= */ 60, retrier, - maximumOpenFiles); + maximumOpenFiles, + /* digestFunction= */ DigestFunction.Value.SHA256); assertThat(uploader.getOpenedFilePermits().availablePermits()).isEqualTo(999); @@ -900,7 +913,8 @@ public void noMaximumOpenFilesFlags_nullSemaphore() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, /* callTimeoutSecs= */ 60, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); assertThat(uploader.getOpenedFilePermits()).isNull(); int numUploads = 10; @@ -936,7 +950,8 @@ public void contextShouldBePreservedUponRetries() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, /* callTimeoutSecs= */ 60, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); List toUpload = ImmutableList.of("aaaaaaaaaa", "bbbbbbbbbb", "cccccccccc"); Map chunkers = Maps.newHashMapWithExpectedSize(toUpload.size()); @@ -1066,7 +1081,8 @@ public int maxConcurrency() { CallCredentialsProvider.NO_CREDENTIALS, /* callTimeoutSecs= */ 60, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = new byte[CHUNK_SIZE]; Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); @@ -1127,7 +1143,8 @@ public void errorsShouldBeReported() throws IOException, InterruptedException { CallCredentialsProvider.NO_CREDENTIALS, /* callTimeoutSecs= */ 60, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = new byte[CHUNK_SIZE]; Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); @@ -1163,7 +1180,8 @@ public void failureInRetryExecutorShouldBeHandled() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, /* callTimeoutSecs= */ 60, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); serviceRegistry.addService( new ByteStreamImplBase() { @@ -1202,7 +1220,8 @@ public void resourceNameWithoutInstanceName() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, /* callTimeoutSecs= */ 60, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); serviceRegistry.addService( new ByteStreamImplBase() { @@ -1234,6 +1253,50 @@ public void onCompleted() { uploader.uploadBlob(context, digest, chunker); } + @Test + public void resourceWithNewStyleDigestFunction() throws Exception { + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> mockBackoff, (e) -> true, retryService); + ByteStreamUploader uploader = + new ByteStreamUploader( + /* instanceName= */ null, + referenceCountedChannel, + CallCredentialsProvider.NO_CREDENTIALS, + /* callTimeoutSecs= */ 60, + retrier, + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.BLAKE3); + + serviceRegistry.addService( + new ByteStreamImplBase() { + @Override + public StreamObserver write(StreamObserver response) { + return new StreamObserver() { + @Override + public void onNext(WriteRequest writeRequest) { + // Test that the resource name contains the digest function. + assertThat(writeRequest.getResourceName()).contains("blobs/blake3/"); + } + + @Override + public void onError(Throwable throwable) {} + + @Override + public void onCompleted() { + response.onNext(WriteResponse.newBuilder().setCommittedSize(1).build()); + response.onCompleted(); + } + }; + } + }); + + byte[] blob = new byte[1]; + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + Digest digest = DIGEST_UTIL.compute(blob); + + uploader.uploadBlob(context, digest, chunker); + } + @Test public void nonRetryableStatusShouldNotBeRetried() throws Exception { RemoteRetrier retrier = @@ -1246,7 +1309,8 @@ public void nonRetryableStatusShouldNotBeRetried() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, /* callTimeoutSecs= */ 60, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); AtomicInteger numCalls = new AtomicInteger(); @@ -1299,7 +1363,8 @@ public void refresh() throws IOException { callCredentialsProvider, /* callTimeoutSecs= */ 60, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; new Random().nextBytes(blob); @@ -1355,7 +1420,8 @@ public void refresh() throws IOException { callCredentialsProvider, /* callTimeoutSecs= */ 60, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; new Random().nextBytes(blob); @@ -1425,7 +1491,8 @@ public void failureAfterUploadCompletes() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, /* callTimeoutSecs= */ 60, retrier, - -1); + -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = new byte[CHUNK_SIZE - 1]; new Random().nextBytes(blob); @@ -1484,7 +1551,8 @@ public void testCompressedUploads() throws Exception { CallCredentialsProvider.NO_CREDENTIALS, /* callTimeoutSecs= */ 60, retrier, - /*maximumOpenFiles=*/ -1); + /* maximumOpenFiles= */ -1, + /* digestFunction= */ DigestFunction.Value.SHA256); byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; new Random().nextBytes(blob); diff --git a/src/test/java/com/google/devtools/build/lib/remote/RemoteSpawnRunnerWithGrpcRemoteExecutorTest.java b/src/test/java/com/google/devtools/build/lib/remote/RemoteSpawnRunnerWithGrpcRemoteExecutorTest.java index c03e0a6d7395f9..0663ba4aeff8a4 100644 --- a/src/test/java/com/google/devtools/build/lib/remote/RemoteSpawnRunnerWithGrpcRemoteExecutorTest.java +++ b/src/test/java/com/google/devtools/build/lib/remote/RemoteSpawnRunnerWithGrpcRemoteExecutorTest.java @@ -31,6 +31,7 @@ import build.bazel.remote.execution.v2.Command; import build.bazel.remote.execution.v2.ContentAddressableStorageGrpc.ContentAddressableStorageImplBase; import build.bazel.remote.execution.v2.Digest; +import build.bazel.remote.execution.v2.DigestFunction; import build.bazel.remote.execution.v2.Directory; import build.bazel.remote.execution.v2.ExecuteRequest; import build.bazel.remote.execution.v2.ExecuteResponse; @@ -1110,7 +1111,8 @@ public void findMissingBlobs( } }); String stdOutResourceName = - getResourceName(remoteOptions.remoteInstanceName, stdOutDigest, false); + getResourceName( + remoteOptions.remoteInstanceName, stdOutDigest, false, DigestFunction.Value.SHA256); serviceRegistry.addService( new ByteStreamImplBase() { @Override @@ -1171,7 +1173,8 @@ public void findMissingBlobs( } }); String stdOutResourceName = - getResourceName(remoteOptions.remoteInstanceName, stdOutDigest, false); + getResourceName( + remoteOptions.remoteInstanceName, stdOutDigest, false, DigestFunction.Value.SHA256); serviceRegistry.addService( new ByteStreamImplBase() { @Override @@ -1297,7 +1300,10 @@ public void getActionResult( }); String dummyTreeResourceName = getResourceName( - remoteOptions.remoteInstanceName, DUMMY_OUTPUT_DIRECTORY.getTreeDigest(), false); + remoteOptions.remoteInstanceName, + DUMMY_OUTPUT_DIRECTORY.getTreeDigest(), + false, + DigestFunction.Value.SHA256); serviceRegistry.addService( new ByteStreamImplBase() { private boolean first = true; From b0942bbde6ccdcce53e299988b16f651308c24a1 Mon Sep 17 00:00:00 2001 From: Googler Date: Tue, 27 Jun 2023 04:37:31 -0700 Subject: [PATCH 04/68] Apply discussed changes to stale workflow PiperOrigin-RevId: 543705441 Change-Id: I1196a2dff58a7e5799ccf4b58fa7410de8cadb2a --- .github/workflows/stale.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 3109f7f3edaf5b..5fcb5010d3c568 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -33,11 +33,11 @@ jobs: repo-token: ${{ secrets.GITHUB_TOKEN }} days-before-issue-stale: 430 days-before-pr-stale: 430 - days-before-issue-close: 14 - days-before-pr-close: 14 + days-before-issue-close: 90 + days-before-pr-close: 90 stale-issue-message: > Thank you for contributing to the Bazel repository! - This issue has been marked as stale since it has not had any activity in the last 1+ years. It will be closed in the next 14 + This issue has been marked as stale since it has not had any activity in the last 1+ years. It will be closed in the next 90 days unless any other activity occurs or one of the following labels is added: "not stale", "awaiting-bazeler". Please reach out to the triage team (`@bazelbuild/triage`) if you think this issue is still relevant or you are interested in getting the issue resolved. @@ -47,17 +47,17 @@ jobs: stale-pr-message: > Thank you for contributing to the Bazel repository! This pull request has been marked as stale since it has not had any activity in the last 1+ years. It will be closed in the next - 14 days unless any other activity occurs or one of the following labels is added: "not stale", "awaiting-review", "awaiting-PR-merge". + 90 days unless any other activity occurs or one of the following labels is added: "not stale", "awaiting-review", "awaiting-PR-merge". Please reach out to the triage team (`@bazelbuild/triage`) if you think this PR is still relevant or you are interested in getting the PR merged. close-pr-message: > This pull request has been automatically closed due to inactivity. If you're still interested in pursuing this, please reach out to the triage team (`@bazelbuild/triage`). Thanks! stale-issue-label: 'stale' - exempt-issue-labels: 'not stale,awaiting-bazeler,untriaged,P0,P1' + exempt-issue-labels: 'not stale,awaiting-bazeler,untriaged,P0,P1,P2,good first issue,help wanted' close-issue-reason: "not_planned" stale-pr-label: 'stale' - exempt-pr-labels: 'not stale,awaiting-review,awaiting-PR-merge,P0,P1' + exempt-pr-labels: 'not stale,awaiting-review,awaiting-PR-merge,P0,P1,P2' exempt-draft-pr: true operations-per-run: 500 ascending: true From 75b02216b30d3c3b4454cedb94464f10c21b768e Mon Sep 17 00:00:00 2001 From: Googler Date: Tue, 27 Jun 2023 04:57:06 -0700 Subject: [PATCH 05/68] Polish doc related to Bazel's release policy and support level PiperOrigin-RevId: 543708599 Change-Id: I7d38606ba5d88208f1dccc65b244c40188fa337c --- site/en/contribute/support.md | 21 ------------------- site/en/docs/_index.yaml | 2 +- site/en/help.md | 5 +++++ .../docgen/templates/be/make-variables.vm | 4 ++-- 4 files changed, 8 insertions(+), 24 deletions(-) delete mode 100644 site/en/contribute/support.md diff --git a/site/en/contribute/support.md b/site/en/contribute/support.md deleted file mode 100644 index 32cb7e37ff3544..00000000000000 --- a/site/en/contribute/support.md +++ /dev/null @@ -1,21 +0,0 @@ -Project: /_project.yaml -Book: /_book.yaml - -# Support Policy - -{% include "_buttons.html" %} - -The Bazel team generally avoids making backwards-incompatible changes. However, -these changes are sometimes necessary to fix bugs, make improvements (such as -improving performance or usability) to the system, or to lock down APIs that -are known to be brittle. - -Major changes are announced in advance on the -[bazel-discuss](https://groups.google.com/forum/#!forum/bazel-discuss){: .external} mailing -list. Both undocumented features (attributes, rules, "Make" variables, and -flags) and documented features that are marked *experimental* are subject to -change at any time without prior notice. - -Report any bugs or regressions you find on -[GitHub](https://github.com/bazelbuild/bazel/issues){: .external}. The repository maintainers -make an effort to triage reported issues within 2 business days. diff --git a/site/en/docs/_index.yaml b/site/en/docs/_index.yaml index 200e6f5b6922b2..177e2b0862e945 100644 --- a/site/en/docs/_index.yaml +++ b/site/en/docs/_index.yaml @@ -49,7 +49,7 @@ landing_page: Learn about Bazel's release model, latest releases, and compatibility policies. items_across: 4 items: - - heading: Release policy + - heading: Release model path: /release/ - heading: Backward compatibility path: /release/backward-compatibility diff --git a/site/en/help.md b/site/en/help.md index 3b8671f33cd72f..f9bf54b4c5ac6f 100644 --- a/site/en/help.md +++ b/site/en/help.md @@ -44,6 +44,11 @@ If there are no existing answers, you can ask the community by: * Chatting with other Bazel contributors on [Slack](https://slack.bazel.build/) * Consulting a [Bazel community expert](/community/experts) +## Understand Bazel's support level {:#support-level} + +Please read the [release page](/release) to understand Bazel's release model and +what level of support Bazel provides. + ## File a bug {:#file-bug} If you encounter a bug or want to request a feature, file a [GitHub diff --git a/src/main/java/com/google/devtools/build/docgen/templates/be/make-variables.vm b/src/main/java/com/google/devtools/build/docgen/templates/be/make-variables.vm index 26f536461b47c6..76911b1efadf92 100644 --- a/src/main/java/com/google/devtools/build/docgen/templates/be/make-variables.vm +++ b/src/main/java/com/google/devtools/build/docgen/templates/be/make-variables.vm @@ -412,7 +412,7 @@

These variables are a fallback mechanism to be used by language experts in rare cases. If you are tempted to use them, please contact the Bazel devs first. + href="https://bazel.build/help">contact the Bazel devs first.

    @@ -463,7 +463,7 @@

    These variables are a fallback mechanism to be used by language experts in rare cases. If you are tempted to use them, please contact the Bazel devs first. + href="https://bazel.build/help">contact the Bazel devs first.

      From 39758b651e6138b55dbb0e74aa42f78950614148 Mon Sep 17 00:00:00 2001 From: Googler Date: Tue, 27 Jun 2023 05:28:30 -0700 Subject: [PATCH 06/68] Adjust install base size in minimal_jdk_test Required by https://github.com/bazelbuild/bazel/pull/18497 which increases the size by ~10mb. PiperOrigin-RevId: 543714205 Change-Id: I3aaacfca6f2c58cf41bb88b4cd07e269ede45515 --- src/test/shell/integration/minimal_jdk_test.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/test/shell/integration/minimal_jdk_test.sh b/src/test/shell/integration/minimal_jdk_test.sh index e114b279f34169..76e9ab3de3e91f 100755 --- a/src/test/shell/integration/minimal_jdk_test.sh +++ b/src/test/shell/integration/minimal_jdk_test.sh @@ -42,13 +42,13 @@ export BAZEL_SUFFIX="_jdk_minimal" source "$(rlocation "io_bazel/src/test/shell/integration_test_setup.sh")" \ || { echo "integration_test_setup.sh not found!" >&2; exit 1; } -# Bazel's install base is < 350MB with minimal JDK and > 350MB with an all +# Bazel's install base is < 360MB with minimal JDK and > 360MB with an all # modules JDK. -function test_size_less_than_350MB() { +function test_size_less_than_360MB() { bazel info ib=$(bazel info install_base) size=$(du -s "$ib" | cut -d\ -f1) - maxsize=$((1024*350)) + maxsize=$((1024*360)) if [ $size -gt $maxsize ]; then echo "$ib was too big:" 1>&2 du -a "$ib" 1>&2 From 788a954181101b8210ae8c745f7450fcdf4fb9d0 Mon Sep 17 00:00:00 2001 From: Florian Weikert Date: Thu, 3 Nov 2022 16:50:21 +0100 Subject: [PATCH 07/68] Upgrade absl-py from 0.1.1 to 1.3.0 Partial commit for third_party/*, see #18497. Signed-off-by: fweikert Progress towards https://github.com/bazelbuild/bazel/issues/16975 --- third_party/py/abseil/AUTHORS | 7 + third_party/py/abseil/LICENSE | 202 +++ third_party/py/abseil/MANIFEST.in | 1 + third_party/py/abseil/PKG-INFO | 83 +- third_party/py/abseil/README.md | 64 +- third_party/py/abseil/absl/app.py | 152 +- third_party/py/abseil/absl/command_name.py | 4 - third_party/py/abseil/absl/flags/__init__.py | 113 +- .../py/abseil/absl/flags/_argument_parser.py | 196 ++- third_party/py/abseil/absl/flags/_defines.py | 680 +++++++-- .../py/abseil/absl/flags/_exceptions.py | 22 +- third_party/py/abseil/absl/flags/_flag.py | 229 ++- .../py/abseil/absl/flags/_flagvalues.py | 518 ++++--- third_party/py/abseil/absl/flags/_helpers.py | 81 +- .../py/abseil/absl/flags/_validators.py | 358 ++--- .../abseil/absl/flags/_validators_classes.py | 172 +++ .../py/abseil/absl/flags/argparse_flags.py | 388 +++++ .../py/abseil/absl/logging/__init__.py | 518 +++++-- .../py/abseil/absl/logging/converter.py | 76 +- .../abseil/absl/testing/_bazelize_command.py | 57 +- .../absl/testing/_pretty_print_reporter.py | 91 ++ .../py/abseil/absl/testing/absltest.py | 1330 ++++++++++++++--- .../py/abseil/absl/testing/flagsaver.py | 95 +- .../py/abseil/absl/testing/parameterized.py | 542 ++++--- .../py/abseil/absl/testing/xml_reporter.py | 179 ++- .../py/abseil/absl_py.egg-info/PKG-INFO | 83 +- .../py/abseil/absl_py.egg-info/SOURCES.txt | 8 +- .../py/abseil/absl_py.egg-info/requires.txt | 1 - third_party/py/abseil/setup.cfg | 1 - third_party/py/abseil/setup.py | 47 +- 30 files changed, 4822 insertions(+), 1476 deletions(-) create mode 100644 third_party/py/abseil/AUTHORS create mode 100644 third_party/py/abseil/LICENSE create mode 100644 third_party/py/abseil/MANIFEST.in create mode 100644 third_party/py/abseil/absl/flags/_validators_classes.py create mode 100644 third_party/py/abseil/absl/flags/argparse_flags.py create mode 100644 third_party/py/abseil/absl/testing/_pretty_print_reporter.py mode change 100755 => 100644 third_party/py/abseil/absl/testing/flagsaver.py mode change 100755 => 100644 third_party/py/abseil/absl/testing/parameterized.py mode change 100755 => 100644 third_party/py/abseil/absl/testing/xml_reporter.py delete mode 100644 third_party/py/abseil/absl_py.egg-info/requires.txt diff --git a/third_party/py/abseil/AUTHORS b/third_party/py/abseil/AUTHORS new file mode 100644 index 00000000000000..23b11ada16bb8e --- /dev/null +++ b/third_party/py/abseil/AUTHORS @@ -0,0 +1,7 @@ +# This is the list of Abseil authors for copyright purposes. +# +# This does not necessarily list everyone who has contributed code, since in +# some cases, their employer may be the copyright holder. To see the full list +# of contributors, see the revision history in source control. + +Google Inc. diff --git a/third_party/py/abseil/LICENSE b/third_party/py/abseil/LICENSE new file mode 100644 index 00000000000000..d645695673349e --- /dev/null +++ b/third_party/py/abseil/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/third_party/py/abseil/MANIFEST.in b/third_party/py/abseil/MANIFEST.in new file mode 100644 index 00000000000000..1aba38f67a2211 --- /dev/null +++ b/third_party/py/abseil/MANIFEST.in @@ -0,0 +1 @@ +include LICENSE diff --git a/third_party/py/abseil/PKG-INFO b/third_party/py/abseil/PKG-INFO index c9b961612ae9ab..124b9d470d4ef5 100644 --- a/third_party/py/abseil/PKG-INFO +++ b/third_party/py/abseil/PKG-INFO @@ -1,21 +1,84 @@ -Metadata-Version: 1.1 +Metadata-Version: 2.1 Name: absl-py -Version: 0.1.1 -Summary: Abseil Python Common Libraries +Version: 1.3.0 +Summary: Abseil Python Common Libraries, see https://github.com/abseil/abseil-py. Home-page: https://github.com/abseil/abseil-py Author: The Abseil Authors -Author-email: UNKNOWN License: Apache 2.0 -Description: UNKNOWN -Platform: UNKNOWN Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 Classifier: Intended Audience :: Developers Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: OS Independent +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +License-File: LICENSE +License-File: AUTHORS + +# Abseil Python Common Libraries + +This repository is a collection of Python library code for building Python +applications. The code is collected from Google's own Python code base, and has +been extensively tested and used in production. + +## Features + +* Simple application startup +* Distributed commandline flags system +* Custom logging module with additional features +* Testing utilities + +## Getting Started + +### Installation + +To install the package, simply run: + +```bash +pip install absl-py +``` + +Or install from source: + +```bash +python setup.py install +``` + +### Running Tests + +To run Abseil tests, you can clone the git repo and run +[bazel](https://bazel.build/): + +```bash +git clone https://github.com/abseil/abseil-py.git +cd abseil-py +bazel test absl/... +``` + +### Example Code + +Please refer to +[smoke_tests/sample_app.py](https://github.com/abseil/abseil-py/blob/main/smoke_tests/sample_app.py) +as an example to get started. + +## Documentation + +See the [Abseil Python Developer Guide](https://abseil.io/docs/python/). + +## Future Releases + +The current repository includes an initial set of libraries for early adoption. +More components and interoperability with Abseil C++ Common Libraries +will come in future releases. + +## License + +The Abseil Python library is licensed under the terms of the Apache +license. See [LICENSE](LICENSE) for more information. diff --git a/third_party/py/abseil/README.md b/third_party/py/abseil/README.md index b4adbbb2243fda..5ab2365171f7ea 100644 --- a/third_party/py/abseil/README.md +++ b/third_party/py/abseil/README.md @@ -1,6 +1,60 @@ -[abseil-py](https://github.com/abseil/abseil-py) --------- +# Abseil Python Common Libraries -* Version: 0.1.1 -* License: Apache 2.0 -* From: [https://pypi.python.org/packages/ce/7b/a15c0c6647010bae2b06698af7039db34f4d5c723cde14dea4446e746448/absl-py-0.1.1.tar.gz](https://pypi.python.org/packages/ce/7b/a15c0c6647010bae2b06698af7039db34f4d5c723cde14dea4446e746448/absl-py-0.1.1.tar.gz) +This repository is a collection of Python library code for building Python +applications. The code is collected from Google's own Python code base, and has +been extensively tested and used in production. + +## Features + +* Simple application startup +* Distributed commandline flags system +* Custom logging module with additional features +* Testing utilities + +## Getting Started + +### Installation + +To install the package, simply run: + +```bash +pip install absl-py +``` + +Or install from source: + +```bash +python setup.py install +``` + +### Running Tests + +To run Abseil tests, you can clone the git repo and run +[bazel](https://bazel.build/): + +```bash +git clone https://github.com/abseil/abseil-py.git +cd abseil-py +bazel test absl/... +``` + +### Example Code + +Please refer to +[smoke_tests/sample_app.py](https://github.com/abseil/abseil-py/blob/main/smoke_tests/sample_app.py) +as an example to get started. + +## Documentation + +See the [Abseil Python Developer Guide](https://abseil.io/docs/python/). + +## Future Releases + +The current repository includes an initial set of libraries for early adoption. +More components and interoperability with Abseil C++ Common Libraries +will come in future releases. + +## License + +The Abseil Python library is licensed under the terms of the Apache +license. See [LICENSE](LICENSE) for more information. diff --git a/third_party/py/abseil/absl/app.py b/third_party/py/abseil/absl/app.py index 370cfc5fb6dc04..43d8ca3341a1e3 100644 --- a/third_party/py/abseil/absl/app.py +++ b/third_party/py/abseil/absl/app.py @@ -14,24 +14,23 @@ """Generic entry point for Abseil Python applications. -To use this module, define a 'main' function with a single 'argv' argument and -call app.run(main). For example: +To use this module, define a ``main`` function with a single ``argv`` argument +and call ``app.run(main)``. For example:: -def main(argv): - del argv # Unused. + def main(argv): + if len(argv) > 1: + raise app.UsageError('Too many command-line arguments.') -if __name__ == '__main__': - app.run(main) + if __name__ == '__main__': + app.run(main) """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - +import collections import errno import os import pdb import sys +import textwrap import traceback from absl import command_name @@ -43,13 +42,13 @@ def main(argv): except ImportError: faulthandler = None - FLAGS = flags.FLAGS flags.DEFINE_boolean('run_with_pdb', False, 'Set to true for PDB debug mode') flags.DEFINE_boolean('pdb_post_mortem', False, 'Set to true to handle uncaught exceptions with PDB ' 'post mortem.') +flags.DEFINE_alias('pdb', 'pdb_post_mortem') flags.DEFINE_boolean('run_with_profiling', False, 'Set to true for profiling the script. ' 'Execution will be slower, and the output format might ' @@ -66,7 +65,6 @@ def main(argv): allow_hide_cpp=True) - # If main() exits via an abnormal exception, call into these # handlers before exiting. EXCEPTION_HANDLERS = [] @@ -102,7 +100,7 @@ def __init__(self): short_name=self.SHORT_NAME, allow_hide_cpp=True) def parse(self, arg): - if arg: + if self._parse(arg): usage(shorthelp=True, writeto_stdout=True) # Advertise --helpfull on stdout, since usage() was on stdout. print() @@ -117,14 +115,14 @@ class HelpshortFlag(HelpFlag): class HelpfullFlag(flags.BooleanFlag): - """Display help for flags in this module and all dependent modules.""" + """Display help for flags in the main module and all dependent modules.""" def __init__(self): super(HelpfullFlag, self).__init__( 'helpfull', False, 'show full help', allow_hide_cpp=True) def parse(self, arg): - if arg: + if self._parse(arg): usage(writeto_stdout=True) sys.exit(1) @@ -138,13 +136,13 @@ def __init__(self): allow_hide_cpp=True) def parse(self, arg): - if arg: + if self._parse(arg): flags.FLAGS.write_help_in_xml_format(sys.stdout) sys.exit(1) def parse_flags_with_usage(args): - """Tries to parse the flags, print usage, and exit if unparseable. + """Tries to parse the flags, print usage, and exit if unparsable. Args: args: [str], a non-empty list of the command line arguments including @@ -157,7 +155,13 @@ def parse_flags_with_usage(args): try: return FLAGS(args) except flags.Error as error: - sys.stderr.write('FATAL Flags parsing error: %s\n' % error) + message = str(error) + if '\n' in message: + final_message = 'FATAL Flags parsing error:\n%s\n' % textwrap.indent( + message, ' ') + else: + final_message = 'FATAL Flags parsing error: %s\n' % message + sys.stderr.write(final_message) sys.stderr.write('Pass --helpshort or --helpfull to see help on flags.\n') sys.exit(1) @@ -178,7 +182,10 @@ def define_help_flags(): _define_help_flags_called = True -def register_and_parse_flags_with_usage(argv=None): +def _register_and_parse_flags_with_usage( + argv=None, + flags_parser=parse_flags_with_usage, +): """Registers help flags, parses arguments and shows usage if appropriate. This also calls sys.exit(0) if flag --only_check_args is True. @@ -186,14 +193,30 @@ def register_and_parse_flags_with_usage(argv=None): Args: argv: [str], a non-empty list of the command line arguments including program name, sys.argv is used if None. + flags_parser: Callable[[List[Text]], Any], the function used to parse flags. + The return value of this function is passed to `main` untouched. + It must guarantee FLAGS is parsed after this function is called. Returns: + The return value of `flags_parser`. When using the default `flags_parser`, + it returns the following: [str], a non-empty list of remaining command line arguments after parsing flags, including program name. + + Raises: + Error: Raised when flags_parser is called, but FLAGS is not parsed. + SystemError: Raised when it's called more than once. """ + if _register_and_parse_flags_with_usage.done: + raise SystemError('Flag registration can be done only once.') + define_help_flags() - argv = parse_flags_with_usage(sys.argv if argv is None else argv) + original_argv = sys.argv if argv is None else argv + args_to_main = flags_parser(original_argv) + if not FLAGS.is_parsed(): + raise Error('FLAGS must be parsed after flags_parser is called.') + # Exit when told so. if FLAGS.only_check_args: sys.exit(0) @@ -201,7 +224,11 @@ def register_and_parse_flags_with_usage(argv=None): # not been set. if FLAGS['verbosity'].using_default_value: FLAGS.verbosity = 0 - return argv + _register_and_parse_flags_with_usage.done = True + + return args_to_main + +_register_and_parse_flags_with_usage.done = False def _run_main(main, argv): @@ -243,43 +270,106 @@ def _call_exception_handlers(exception): pass -def run(main, argv=None): +def run( + main, + argv=None, + flags_parser=parse_flags_with_usage, +): """Begins executing the program. Args: main: The main function to execute. It takes an single argument "argv", which is a list of command line arguments with parsed flags removed. + The return value is passed to `sys.exit`, and so for example + a return value of 0 or None results in a successful termination, whereas + a return value of 1 results in abnormal termination. + For more details, see https://docs.python.org/3/library/sys#sys.exit argv: A non-empty list of the command line arguments including program name, sys.argv is used if None. + flags_parser: Callable[[List[Text]], Any], the function used to parse flags. + The return value of this function is passed to `main` untouched. + It must guarantee FLAGS is parsed after this function is called. + Should be passed as a keyword-only arg which will become mandatory in a + future release. - Parses command line flags with the flag module. - If there are any errors, prints usage(). - Calls main() with the remaining arguments. - If main() raises a UsageError, prints usage and the error message. """ try: - argv = _run_init(sys.argv if argv is None else argv) + args = _run_init( + sys.argv if argv is None else argv, + flags_parser, + ) + while _init_callbacks: + callback = _init_callbacks.popleft() + callback() try: - _run_main(main, argv) + _run_main(main, args) except UsageError as error: usage(shorthelp=True, detailed_error=error, exitcode=error.exitcode) except: - if FLAGS.pdb_post_mortem: + exc = sys.exc_info()[1] + # Don't try to post-mortem debug successful SystemExits, since those + # mean there wasn't actually an error. In particular, the test framework + # raises SystemExit(False) even if all tests passed. + if isinstance(exc, SystemExit) and not exc.code: + raise + + # Check the tty so that we don't hang waiting for input in an + # non-interactive scenario. + if FLAGS.pdb_post_mortem and sys.stdout.isatty(): traceback.print_exc() + print() + print(' *** Entering post-mortem debugging ***') + print() pdb.post_mortem() raise except Exception as e: _call_exception_handlers(e) raise +# Callbacks which have been deferred until after _run_init has been called. +_init_callbacks = collections.deque() + + +def call_after_init(callback): + """Calls the given callback only once ABSL has finished initialization. + + If ABSL has already finished initialization when ``call_after_init`` is + called then the callback is executed immediately, otherwise `callback` is + stored to be executed after ``app.run`` has finished initializing (aka. just + before the main function is called). + + If called after ``app.run``, this is equivalent to calling ``callback()`` in + the caller thread. If called before ``app.run``, callbacks are run + sequentially (in an undefined order) in the same thread as ``app.run``. + + Args: + callback: a callable to be called once ABSL has finished initialization. + This may be immediate if initialization has already finished. It + takes no arguments and returns nothing. + """ + if _run_init.done: + callback() + else: + _init_callbacks.append(callback) + -def _run_init(argv): +def _run_init( + argv, + flags_parser, +): """Does one-time initialization and re-parses flags on rerun.""" if _run_init.done: - return parse_flags_with_usage(argv) + return flags_parser(argv) command_name.make_process_name_useful() # Set up absl logging handler. logging.use_absl_handler() - argv = register_and_parse_flags_with_usage(argv=argv) + args = _register_and_parse_flags_with_usage( + argv=argv, + flags_parser=flags_parser, + ) if faulthandler: try: faulthandler.enable() @@ -288,7 +378,7 @@ def _run_init(argv): # Disabled faulthandler is a low-impact error. pass _run_init.done = True - return argv + return args _run_init.done = False @@ -299,7 +389,7 @@ def usage(shorthelp=False, writeto_stdout=False, detailed_error=None, """Writes __main__'s docstring to stderr with some help text. Args: - shorthelp: bool, if True, prints only flags from this module, + shorthelp: bool, if True, prints only flags from the main module, rather than all flags. writeto_stdout: bool, if True, writes help message to stdout, rather than to stderr. @@ -327,7 +417,7 @@ def usage(shorthelp=False, writeto_stdout=False, detailed_error=None, if shorthelp: flag_str = FLAGS.main_module_help() else: - flag_str = str(FLAGS) + flag_str = FLAGS.get_help() try: stdfile.write(doc) if flag_str: diff --git a/third_party/py/abseil/absl/command_name.py b/third_party/py/abseil/absl/command_name.py index 3bf9fad3d29183..19964937d1762f 100644 --- a/third_party/py/abseil/absl/command_name.py +++ b/third_party/py/abseil/absl/command_name.py @@ -14,10 +14,6 @@ """A tiny stand alone library to change the kernel process name on Linux.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - import os import sys diff --git a/third_party/py/abseil/absl/flags/__init__.py b/third_party/py/abseil/absl/flags/__init__.py index 8f78711398494c..6d8ba033204ae2 100644 --- a/third_party/py/abseil/absl/flags/__init__.py +++ b/third_party/py/abseil/absl/flags/__init__.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - """This package is used to define and parse command line flags. This package defines a *distributed* flag-definition policy: rather than @@ -26,10 +25,6 @@ and optionally type-converted, when it's seen on the command line. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - import getopt import os import re @@ -44,7 +39,81 @@ from absl.flags import _flagvalues from absl.flags import _helpers from absl.flags import _validators -import six + +__all__ = ( + 'DEFINE', + 'DEFINE_flag', + 'DEFINE_string', + 'DEFINE_boolean', + 'DEFINE_bool', + 'DEFINE_float', + 'DEFINE_integer', + 'DEFINE_enum', + 'DEFINE_enum_class', + 'DEFINE_list', + 'DEFINE_spaceseplist', + 'DEFINE_multi', + 'DEFINE_multi_string', + 'DEFINE_multi_integer', + 'DEFINE_multi_float', + 'DEFINE_multi_enum', + 'DEFINE_multi_enum_class', + 'DEFINE_alias', + # Flag validators. + 'register_validator', + 'validator', + 'register_multi_flags_validator', + 'multi_flags_validator', + 'mark_flag_as_required', + 'mark_flags_as_required', + 'mark_flags_as_mutual_exclusive', + 'mark_bool_flags_as_mutual_exclusive', + # Flag modifiers. + 'set_default', + # Key flag related functions. + 'declare_key_flag', + 'adopt_module_key_flags', + 'disclaim_key_flags', + # Module exceptions. + 'Error', + 'CantOpenFlagFileError', + 'DuplicateFlagError', + 'IllegalFlagValueError', + 'UnrecognizedFlagError', + 'UnparsedFlagAccessError', + 'ValidationError', + 'FlagNameConflictsWithMethodError', + # Public classes. + 'Flag', + 'BooleanFlag', + 'EnumFlag', + 'EnumClassFlag', + 'MultiFlag', + 'MultiEnumClassFlag', + 'FlagHolder', + 'FlagValues', + 'ArgumentParser', + 'BooleanParser', + 'EnumParser', + 'EnumClassParser', + 'ArgumentSerializer', + 'FloatParser', + 'IntegerParser', + 'BaseListParser', + 'ListParser', + 'ListSerializer', + 'EnumClassListSerializer', + 'CsvListSerializer', + 'WhitespaceSeparatedListParser', + 'EnumClassSerializer', + # Helper functions. + 'get_help_width', + 'text_wrap', + 'flag_dict_to_args', + 'doc_to_help', + # The global FlagValues instance. + 'FLAGS', +) # Initialize the FLAGS_MODULE as early as possible. # It's only used by adopt_module_key_flags to take SPECIAL_FLAGS into account. @@ -63,6 +132,7 @@ DEFINE_float = _defines.DEFINE_float DEFINE_integer = _defines.DEFINE_integer DEFINE_enum = _defines.DEFINE_enum +DEFINE_enum_class = _defines.DEFINE_enum_class DEFINE_list = _defines.DEFINE_list DEFINE_spaceseplist = _defines.DEFINE_spaceseplist DEFINE_multi = _defines.DEFINE_multi @@ -70,10 +140,10 @@ DEFINE_multi_integer = _defines.DEFINE_multi_integer DEFINE_multi_float = _defines.DEFINE_multi_float DEFINE_multi_enum = _defines.DEFINE_multi_enum +DEFINE_multi_enum_class = _defines.DEFINE_multi_enum_class DEFINE_alias = _defines.DEFINE_alias # pylint: enable=invalid-name - # Flag validators. register_validator = _validators.register_validator validator = _validators.validator @@ -82,14 +152,16 @@ mark_flag_as_required = _validators.mark_flag_as_required mark_flags_as_required = _validators.mark_flags_as_required mark_flags_as_mutual_exclusive = _validators.mark_flags_as_mutual_exclusive +mark_bool_flags_as_mutual_exclusive = _validators.mark_bool_flags_as_mutual_exclusive +# Flag modifiers. +set_default = _defines.set_default # Key flag related functions. declare_key_flag = _defines.declare_key_flag adopt_module_key_flags = _defines.adopt_module_key_flags disclaim_key_flags = _defines.disclaim_key_flags - # Module exceptions. # pylint: disable=invalid-name Error = _exceptions.Error @@ -101,48 +173,51 @@ ValidationError = _exceptions.ValidationError FlagNameConflictsWithMethodError = _exceptions.FlagNameConflictsWithMethodError - # Public classes. Flag = _flag.Flag BooleanFlag = _flag.BooleanFlag EnumFlag = _flag.EnumFlag +EnumClassFlag = _flag.EnumClassFlag MultiFlag = _flag.MultiFlag +MultiEnumClassFlag = _flag.MultiEnumClassFlag +FlagHolder = _flagvalues.FlagHolder FlagValues = _flagvalues.FlagValues ArgumentParser = _argument_parser.ArgumentParser BooleanParser = _argument_parser.BooleanParser EnumParser = _argument_parser.EnumParser +EnumClassParser = _argument_parser.EnumClassParser ArgumentSerializer = _argument_parser.ArgumentSerializer FloatParser = _argument_parser.FloatParser IntegerParser = _argument_parser.IntegerParser BaseListParser = _argument_parser.BaseListParser ListParser = _argument_parser.ListParser ListSerializer = _argument_parser.ListSerializer +EnumClassListSerializer = _argument_parser.EnumClassListSerializer CsvListSerializer = _argument_parser.CsvListSerializer WhitespaceSeparatedListParser = _argument_parser.WhitespaceSeparatedListParser +EnumClassSerializer = _argument_parser.EnumClassSerializer # pylint: enable=invalid-name - # Helper functions. get_help_width = _helpers.get_help_width text_wrap = _helpers.text_wrap flag_dict_to_args = _helpers.flag_dict_to_args doc_to_help = _helpers.doc_to_help - # Special flags. _helpers.SPECIAL_FLAGS = FlagValues() DEFINE_string( 'flagfile', '', 'Insert flag definitions from the given file into the command line.', - _helpers.SPECIAL_FLAGS) + _helpers.SPECIAL_FLAGS) # pytype: disable=wrong-arg-types -DEFINE_string( - 'undefok', '', - 'comma-separated list of flag names that it is okay to specify ' - 'on the command line even if the program does not define a flag ' - 'with that name. IMPORTANT: flags in this list that have ' - 'arguments MUST use the --flag=value format.', _helpers.SPECIAL_FLAGS) +DEFINE_string('undefok', '', + 'comma-separated list of flag names that it is okay to specify ' + 'on the command line even if the program does not define a flag ' + 'with that name. IMPORTANT: flags in this list that have ' + 'arguments MUST use the --flag=value format.', + _helpers.SPECIAL_FLAGS) # pytype: disable=wrong-arg-types -# The global FlagValues instance. +#: The global FlagValues instance. FLAGS = _flagvalues.FLAGS diff --git a/third_party/py/abseil/absl/flags/_argument_parser.py b/third_party/py/abseil/absl/flags/_argument_parser.py index cb57b8d44bbd9e..2c4de9b191cb35 100644 --- a/third_party/py/abseil/absl/flags/_argument_parser.py +++ b/third_party/py/abseil/absl/flags/_argument_parser.py @@ -18,21 +18,17 @@ aliases defined at the package level instead. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - +import collections import csv import io import string from absl.flags import _helpers -import six def _is_integer_type(instance): """Returns True if instance is an integer, and not a bool.""" - return (isinstance(instance, six.integer_types) and + return (isinstance(instance, int) and not isinstance(instance, bool)) @@ -76,12 +72,30 @@ def __call__(cls, *args, **kwargs): return type.__call__(cls, *args) -class ArgumentParser(six.with_metaclass(_ArgumentParserCache, object)): +# NOTE about Genericity and Metaclass of ArgumentParser. +# (1) In the .py source (this file) +# - is not declared as Generic +# - has _ArgumentParserCache as a metaclass +# (2) In the .pyi source (type stub) +# - is declared as Generic +# - doesn't have a metaclass +# The reason we need this is due to Generic having a different metaclass +# (for python versions <= 3.7) and a class can have only one metaclass. +# +# * Lack of metaclass in .pyi is not a deal breaker, since the metaclass +# doesn't affect any type information. Also type checkers can check the type +# parameters. +# * However, not declaring ArgumentParser as Generic in the source affects +# runtime annotation processing. In particular this means, subclasses should +# inherit from `ArgumentParser` and not `ArgumentParser[SomeType]`. +# The corresponding DEFINE_someType method (the public API) can be annotated +# to return FlagHolder[SomeType]. +class ArgumentParser(metaclass=_ArgumentParserCache): """Base class used to parse and convert arguments. - The parse() method checks to make sure that the string argument is a + The :meth:`parse` method checks to make sure that the string argument is a legal value and convert it to a native type. If the value cannot be - converted, it should throw a 'ValueError' exception with a human + converted, it should throw a ``ValueError`` exception with a human readable explanation of why the value is illegal. Subclasses should also define a syntactic_help string which may be @@ -109,7 +123,7 @@ def parse(self, argument): Returns: The parsed value in native type. """ - if not isinstance(argument, six.string_types): + if not isinstance(argument, str): raise TypeError('flag value must be a string, found "{}"'.format( type(argument))) return argument @@ -133,7 +147,7 @@ class ArgumentSerializer(object): def serialize(self, value): """Returns a serialized string of the value.""" - return _helpers.str_or_unicode(value) + return str(value) class NumericParser(ArgumentParser): @@ -209,7 +223,7 @@ def __init__(self, lower_bound=None, upper_bound=None): def convert(self, argument): """Returns the float value of argument.""" if (_is_integer_type(argument) or isinstance(argument, float) or - isinstance(argument, six.string_types)): + isinstance(argument, str)): return float(argument) else: raise TypeError( @@ -255,7 +269,7 @@ def convert(self, argument): """Returns the int value of argument.""" if _is_integer_type(argument): return argument - elif isinstance(argument, six.string_types): + elif isinstance(argument, str): base = 10 if len(argument) > 2 and argument[0] == '0': if argument[1] == 'o': @@ -282,14 +296,18 @@ def parse(self, argument): return True elif argument.lower() in ('false', 'f', '0'): return False - elif isinstance(argument, six.integer_types): + else: + raise ValueError('Non-boolean argument to boolean flag', argument) + elif isinstance(argument, int): # Only allow bool or integer 0, 1. # Note that float 1.0 == True, 0.0 == False. bool_value = bool(argument) if argument == bool_value: return bool_value + else: + raise ValueError('Non-boolean argument to boolean flag', argument) - raise ValueError('Non-boolean argument to boolean flag', argument) + raise TypeError('Non-boolean argument to boolean flag', argument) def flag_type(self): """See base class.""" @@ -347,6 +365,88 @@ def flag_type(self): return 'string enum' +class EnumClassParser(ArgumentParser): + """Parser of an Enum class member.""" + + def __init__(self, enum_class, case_sensitive=True): + """Initializes EnumParser. + + Args: + enum_class: class, the Enum class with all possible flag values. + case_sensitive: bool, whether or not the enum is to be case-sensitive. If + False, all member names must be unique when case is ignored. + + Raises: + TypeError: When enum_class is not a subclass of Enum. + ValueError: When enum_class is empty. + """ + # Users must have an Enum class defined before using EnumClass flag. + # Therefore this dependency is guaranteed. + import enum + + if not issubclass(enum_class, enum.Enum): + raise TypeError('{} is not a subclass of Enum.'.format(enum_class)) + if not enum_class.__members__: + raise ValueError('enum_class cannot be empty, but "{}" is empty.' + .format(enum_class)) + if not case_sensitive: + members = collections.Counter( + name.lower() for name in enum_class.__members__) + duplicate_keys = { + member for member, count in members.items() if count > 1 + } + if duplicate_keys: + raise ValueError( + 'Duplicate enum values for {} using case_sensitive=False'.format( + duplicate_keys)) + + super(EnumClassParser, self).__init__() + self.enum_class = enum_class + self._case_sensitive = case_sensitive + if case_sensitive: + self._member_names = tuple(enum_class.__members__) + else: + self._member_names = tuple( + name.lower() for name in enum_class.__members__) + + @property + def member_names(self): + """The accepted enum names, in lowercase if not case sensitive.""" + return self._member_names + + def parse(self, argument): + """Determines validity of argument and returns the correct element of enum. + + Args: + argument: str or Enum class member, the supplied flag value. + + Returns: + The first matching Enum class member in Enum class. + + Raises: + ValueError: Raised when argument didn't match anything in enum. + """ + if isinstance(argument, self.enum_class): + return argument + elif not isinstance(argument, str): + raise ValueError( + '{} is not an enum member or a name of a member in {}'.format( + argument, self.enum_class)) + key = EnumParser( + self._member_names, case_sensitive=self._case_sensitive).parse(argument) + if self._case_sensitive: + return self.enum_class[key] + else: + # If EnumParser.parse() return a value, we're guaranteed to find it + # as a member of the class + return next(value for name, value in self.enum_class.__members__.items() + if name.lower() == key.lower()) + + def flag_type(self): + """See base class.""" + return 'enum class' + + class ListSerializer(ArgumentSerializer): def __init__(self, list_sep): @@ -354,7 +454,34 @@ def __init__(self, list_sep): def serialize(self, value): """See base class.""" - return self.list_sep.join([_helpers.str_or_unicode(x) for x in value]) + return self.list_sep.join([str(x) for x in value]) + + +class EnumClassListSerializer(ListSerializer): + """A serializer for :class:`MultiEnumClass` flags. + + This serializer simply joins the output of `EnumClassSerializer` using a + provided separator. + """ + + def __init__(self, list_sep, **kwargs): + """Initializes EnumClassListSerializer. + + Args: + list_sep: String to be used as a separator when serializing + **kwargs: Keyword arguments to the `EnumClassSerializer` used to serialize + individual values. + """ + super(EnumClassListSerializer, self).__init__(list_sep) + self._element_serializer = EnumClassSerializer(**kwargs) + + def serialize(self, value): + """See base class.""" + if isinstance(value, list): + return self.list_sep.join( + self._element_serializer.serialize(x) for x in value) + else: + return self._element_serializer.serialize(value) class CsvListSerializer(ArgumentSerializer): @@ -364,28 +491,39 @@ def __init__(self, list_sep): def serialize(self, value): """Serializes a list as a CSV string or unicode.""" - if six.PY2: - # In Python2 csv.writer doesn't accept unicode, so we convert to UTF-8. - output = io.BytesIO() - csv.writer(output).writerow([unicode(x).encode('utf-8') for x in value]) - serialized_value = output.getvalue().decode('utf-8').strip() - else: - # In Python3 csv.writer expects a text stream. - output = io.StringIO() - csv.writer(output).writerow([str(x) for x in value]) - serialized_value = output.getvalue().strip() + output = io.StringIO() + writer = csv.writer(output, delimiter=self.list_sep) + writer.writerow([str(x) for x in value]) + serialized_value = output.getvalue().strip() # We need the returned value to be pure ascii or Unicodes so that # when the xml help is generated they are usefully encodable. - return _helpers.str_or_unicode(serialized_value) + return str(serialized_value) + + +class EnumClassSerializer(ArgumentSerializer): + """Class for generating string representations of an enum class flag value.""" + + def __init__(self, lowercase): + """Initializes EnumClassSerializer. + + Args: + lowercase: If True, enum member names are lowercased during serialization. + """ + self._lowercase = lowercase + + def serialize(self, value): + """Returns a serialized string of the Enum class value.""" + as_string = str(value.name) + return as_string.lower() if self._lowercase else as_string class BaseListParser(ArgumentParser): """Base class for a parser of lists of strings. - To extend, inherit from this class; from the subclass __init__, call + To extend, inherit from this class; from the subclass ``__init__``, call:: - BaseListParser.__init__(self, token, name) + super().__init__(token, name) where token is a character used to tokenize, and name is a description of the separator. diff --git a/third_party/py/abseil/absl/flags/_defines.py b/third_party/py/abseil/absl/flags/_defines.py index 35680db0d7672f..dce53ea2f681d1 100644 --- a/third_party/py/abseil/absl/flags/_defines.py +++ b/third_party/py/abseil/absl/flags/_defines.py @@ -11,18 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - """This modules contains flags DEFINE functions. Do NOT import this module directly. Import the flags package and use the aliases defined at the package level instead. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import logging import sys import types @@ -33,6 +27,17 @@ from absl.flags import _helpers from absl.flags import _validators +# pylint: disable=unused-import +try: + from typing import Text, List, Any +except ImportError: + pass + +try: + import enum +except ImportError: + pass +# pylint: enable=unused-import _helpers.disclaim_module_ids.add(id(sys.modules[__name__])) @@ -42,7 +47,7 @@ def _register_bounds_validator_if_needed(parser, name, flag_values): Args: parser: NumericParser (either FloatParser or IntegerParser), provides lower - and upper bounds, and help text to display. + and upper bounds, and help text to display. name: str, name of the flag flag_values: FlagValues. """ @@ -57,49 +62,75 @@ def checker(value): _validators.register_validator(name, checker, flag_values=flag_values) -def DEFINE(parser, name, default, help, flag_values=_flagvalues.FLAGS, # pylint: disable=redefined-builtin,invalid-name - serializer=None, module_name=None, **args): +def DEFINE( # pylint: disable=invalid-name + parser, + name, + default, + help, # pylint: disable=redefined-builtin + flag_values=_flagvalues.FLAGS, + serializer=None, + module_name=None, + required=False, + **args): """Registers a generic Flag object. NOTE: in the docstrings of all DEFINE* functions, "registers" is short for "creates a new flag and registers it". - Auxiliary function: clients should use the specialized DEFINE_ + Auxiliary function: clients should use the specialized ``DEFINE_`` function instead. Args: - parser: ArgumentParser, used to parse the flag arguments. + parser: :class:`ArgumentParser`, used to parse the flag arguments. name: str, the flag name. default: The default value of the flag. help: str, the help message. - flag_values: FlagValues, the FlagValues instance with which the flag will - be registered. This should almost never need to be overridden. - serializer: ArgumentSerializer, the flag serializer instance. - module_name: str, the name of the Python module declaring this flag. - If not provided, it will be computed using the stack trace of this call. - **args: dict, the extra keyword args that are passed to Flag __init__. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + serializer: :class:`ArgumentSerializer`, the flag serializer instance. + module_name: str, the name of the Python module declaring this flag. If not + provided, it will be computed using the stack trace of this call. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: dict, the extra keyword args that are passed to ``Flag.__init__``. + + Returns: + a handle to defined flag. """ - DEFINE_flag(_flag.Flag(parser, serializer, name, default, help, **args), - flag_values, module_name) + return DEFINE_flag( + _flag.Flag(parser, serializer, name, default, help, **args), flag_values, + module_name, required) -def DEFINE_flag(flag, flag_values=_flagvalues.FLAGS, module_name=None): # pylint: disable=invalid-name - """Registers a 'Flag' object with a 'FlagValues' object. +def DEFINE_flag( # pylint: disable=invalid-name + flag, + flag_values=_flagvalues.FLAGS, + module_name=None, + required=False): + """Registers a :class:`Flag` object with a :class:`FlagValues` object. - By default, the global FLAGS 'FlagValue' object is used. + By default, the global :const:`FLAGS` ``FlagValue`` object is used. Typical users will use one of the more specialized DEFINE_xxx - functions, such as DEFINE_string or DEFINE_integer. But developers - who need to create Flag objects themselves should use this function - to register their flags. + functions, such as :func:`DEFINE_string` or :func:`DEFINE_integer`. But + developers who need to create :class:`Flag` objects themselves should use + this function to register their flags. Args: - flag: Flag, a flag that is key to the module. - flag_values: FlagValues, the FlagValues instance with which the flag will - be registered. This should almost never need to be overridden. - module_name: str, the name of the Python module declaring this flag. - If not provided, it will be computed using the stack trace of this call. + flag: :class:`Flag`, a flag that is key to the module. + flag_values: :class:`FlagValues`, the ``FlagValues`` instance with which the + flag will be registered. This should almost never need to be overridden. + module_name: str, the name of the Python module declaring this flag. If not + provided, it will be computed using the stack trace of this call. + required: bool, is this a required flag. This must be used as a keyword + argument. + + Returns: + a handle to defined flag. """ + if required and flag.default is not None: + raise ValueError('Required flag --%s cannot have a non-None default' % + flag.name) # Copying the reference to flag_values prevents pychecker warnings. fv = flag_values fv[flag.name] = flag @@ -110,26 +141,48 @@ def DEFINE_flag(flag, flag_values=_flagvalues.FLAGS, module_name=None): # pylin module, module_name = _helpers.get_calling_module_object_and_name() flag_values.register_flag_by_module(module_name, flag) flag_values.register_flag_by_module_id(id(module), flag) + if required: + _validators.mark_flag_as_required(flag.name, fv) + ensure_non_none_value = (flag.default is not None) or required + return _flagvalues.FlagHolder( + fv, flag, ensure_non_none_value=ensure_non_none_value) + + +def set_default(flag_holder, value): + """Changes the default value of the provided flag object. + + The flag's current value is also updated if the flag is currently using + the default value, i.e. not specified in the command line, and not set + by FLAGS.name = value. + + Args: + flag_holder: FlagHolder, the flag to modify. + value: The new default value. + + Raises: + IllegalFlagValueError: Raised when value is not valid. + """ + flag_holder._flagvalues.set_default(flag_holder.name, value) # pylint: disable=protected-access -def _internal_declare_key_flags( - flag_names, flag_values=_flagvalues.FLAGS, key_flag_values=None): +def _internal_declare_key_flags(flag_names, + flag_values=_flagvalues.FLAGS, + key_flag_values=None): """Declares a flag as key for the calling module. Internal function. User code should call declare_key_flag or adopt_module_key_flags instead. Args: - flag_names: [str], a list of strings that are names of already-registered - Flag objects. - flag_values: FlagValues, the FlagValues instance with which the flags listed - in flag_names have registered (the value of the flag_values - argument from the DEFINE_* calls that defined those flags). - This should almost never need to be overridden. - key_flag_values: FlagValues, the FlagValues instance that (among possibly - many other things) keeps track of the key flags for each module. - Default None means "same as flag_values". This should almost - never need to be overridden. + flag_names: [str], a list of names of already-registered Flag objects. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flags listed in flag_names have registered (the value of the flag_values + argument from the ``DEFINE_*`` calls that defined those flags). This + should almost never need to be overridden. + key_flag_values: :class:`FlagValues`, the FlagValues instance that (among + possibly many other things) keeps track of the key flags for each module. + Default ``None`` means "same as flag_values". This should almost never + need to be overridden. Raises: UnrecognizedFlagError: Raised when the flag is not defined. @@ -139,8 +192,7 @@ def _internal_declare_key_flags( module = _helpers.get_calling_module() for flag_name in flag_names: - flag = flag_values[flag_name] - key_flag_values.register_key_flag_for_module(module, flag) + key_flag_values.register_key_flag_for_module(module, flag_values[flag_name]) def declare_key_flag(flag_name, flag_values=_flagvalues.FLAGS): @@ -152,24 +204,26 @@ def declare_key_flag(flag_name, flag_values=_flagvalues.FLAGS): main module are listed (instead of all flags, as in the case of --helpfull). - Sample usage: + Sample usage:: - flags.declare_key_flag('flag_1') + flags.declare_key_flag('flag_1') Args: - flag_name: str, the name of an already declared flag. - (Redeclaring flags as key, including flags implicitly key - because they were declared in this module, is a no-op.) - flag_values: FlagValues, the FlagValues instance in which the flag will - be declared as a key flag. This should almost never need to be - overridden. + flag_name: str | :class:`FlagHolder`, the name or holder of an already + declared flag. (Redeclaring flags as key, including flags implicitly key + because they were declared in this module, is a no-op.) + Positional-only parameter. + flag_values: :class:`FlagValues`, the FlagValues instance in which the + flag will be declared as a key flag. This should almost never need to be + overridden. Raises: ValueError: Raised if flag_name not defined as a Python flag. """ + flag_name, flag_values = _flagvalues.resolve_flag_ref(flag_name, flag_values) if flag_name in _helpers.SPECIAL_FLAGS: # Take care of the special flags, e.g., --flagfile, --undefok. - # These flags are defined in _SPECIAL_FLAGS, and are treated + # These flags are defined in SPECIAL_FLAGS, and are treated # specially during flag parsing, taking precedence over the # user-defined flags. _internal_declare_key_flags([flag_name], @@ -179,8 +233,8 @@ def declare_key_flag(flag_name, flag_values=_flagvalues.FLAGS): try: _internal_declare_key_flags([flag_name], flag_values=flag_values) except KeyError: - raise ValueError( - 'Key flag --%s needs to be defined in Python' % flag_name) + raise ValueError('Flag --%s is undefined. To set a flag as a key flag ' + 'first define it in Python.' % flag_name) def adopt_module_key_flags(module, flag_values=_flagvalues.FLAGS): @@ -188,10 +242,10 @@ def adopt_module_key_flags(module, flag_values=_flagvalues.FLAGS): Args: module: module, the module object from which all key flags will be declared - as key flags to the current module. - flag_values: FlagValues, the FlagValues instance in which the flags will - be declared as key flags. This should almost never need to be - overridden. + as key flags to the current module. + flag_values: :class:`FlagValues`, the FlagValues instance in which the + flags will be declared as key flags. This should almost never need to be + overridden. Raises: Error: Raised when given an argument that is a module name (a string), @@ -235,15 +289,33 @@ def disclaim_key_flags(): def DEFINE_string( # pylint: disable=invalid-name,redefined-builtin - name, default, help, flag_values=_flagvalues.FLAGS, **args): + name, + default, + help, + flag_values=_flagvalues.FLAGS, + required=False, + **args): """Registers a flag whose value can be any string.""" parser = _argument_parser.ArgumentParser() serializer = _argument_parser.ArgumentSerializer() - DEFINE(parser, name, default, help, flag_values, serializer, **args) + return DEFINE( + parser, + name, + default, + help, + flag_values, + serializer, + required=required, + **args) def DEFINE_boolean( # pylint: disable=invalid-name,redefined-builtin - name, default, help, flag_values=_flagvalues.FLAGS, module_name=None, + name, + default, + help, + flag_values=_flagvalues.FLAGS, + module_name=None, + required=False, **args): """Registers a boolean flag. @@ -259,22 +331,34 @@ def DEFINE_boolean( # pylint: disable=invalid-name,redefined-builtin name: str, the flag name. default: bool|str|None, the default value of the flag. help: str, the help message. - flag_values: FlagValues, the FlagValues instance with which the flag will - be registered. This should almost never need to be overridden. - module_name: str, the name of the Python module declaring this flag. - If not provided, it will be computed using the stack trace of this call. - **args: dict, the extra keyword args that are passed to Flag __init__. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + module_name: str, the name of the Python module declaring this flag. If not + provided, it will be computed using the stack trace of this call. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: dict, the extra keyword args that are passed to ``Flag.__init__``. + + Returns: + a handle to defined flag. """ - DEFINE_flag(_flag.BooleanFlag(name, default, help, **args), - flag_values, module_name) + return DEFINE_flag( + _flag.BooleanFlag(name, default, help, **args), flag_values, module_name, + required) def DEFINE_float( # pylint: disable=invalid-name,redefined-builtin - name, default, help, lower_bound=None, upper_bound=None, - flag_values=_flagvalues.FLAGS, **args): # pylint: disable=invalid-name + name, + default, + help, + lower_bound=None, + upper_bound=None, + flag_values=_flagvalues.FLAGS, + required=False, + **args): """Registers a flag whose value must be a float. - If lower_bound or upper_bound are set, then this flag must be + If ``lower_bound`` or ``upper_bound`` are set, then this flag must be within the given range. Args: @@ -283,22 +367,42 @@ def DEFINE_float( # pylint: disable=invalid-name,redefined-builtin help: str, the help message. lower_bound: float, min value of the flag. upper_bound: float, max value of the flag. - flag_values: FlagValues, the FlagValues instance with which the flag will - be registered. This should almost never need to be overridden. - **args: dict, the extra keyword args that are passed to DEFINE. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: dict, the extra keyword args that are passed to :func:`DEFINE`. + + Returns: + a handle to defined flag. """ parser = _argument_parser.FloatParser(lower_bound, upper_bound) serializer = _argument_parser.ArgumentSerializer() - DEFINE(parser, name, default, help, flag_values, serializer, **args) + result = DEFINE( + parser, + name, + default, + help, + flag_values, + serializer, + required=required, + **args) _register_bounds_validator_if_needed(parser, name, flag_values=flag_values) + return result def DEFINE_integer( # pylint: disable=invalid-name,redefined-builtin - name, default, help, lower_bound=None, upper_bound=None, - flag_values=_flagvalues.FLAGS, **args): + name, + default, + help, + lower_bound=None, + upper_bound=None, + flag_values=_flagvalues.FLAGS, + required=False, + **args): """Registers a flag whose value must be an integer. - If lower_bound, or upper_bound are set, then this flag must be + If ``lower_bound``, or ``upper_bound`` are set, then this flag must be within the given range. Args: @@ -307,39 +411,113 @@ def DEFINE_integer( # pylint: disable=invalid-name,redefined-builtin help: str, the help message. lower_bound: int, min value of the flag. upper_bound: int, max value of the flag. - flag_values: FlagValues, the FlagValues instance with which the flag will - be registered. This should almost never need to be overridden. - **args: dict, the extra keyword args that are passed to DEFINE. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: dict, the extra keyword args that are passed to :func:`DEFINE`. + + Returns: + a handle to defined flag. """ parser = _argument_parser.IntegerParser(lower_bound, upper_bound) serializer = _argument_parser.ArgumentSerializer() - DEFINE(parser, name, default, help, flag_values, serializer, **args) + result = DEFINE( + parser, + name, + default, + help, + flag_values, + serializer, + required=required, + **args) _register_bounds_validator_if_needed(parser, name, flag_values=flag_values) + return result def DEFINE_enum( # pylint: disable=invalid-name,redefined-builtin - name, default, enum_values, help, flag_values=_flagvalues.FLAGS, - module_name=None, **args): + name, + default, + enum_values, + help, + flag_values=_flagvalues.FLAGS, + module_name=None, + required=False, + **args): """Registers a flag whose value can be any string from enum_values. + Instead of a string enum, prefer `DEFINE_enum_class`, which allows + defining enums from an `enum.Enum` class. + Args: name: str, the flag name. default: str|None, the default value of the flag. enum_values: [str], a non-empty list of strings with the possible values for - the flag. + the flag. help: str, the help message. - flag_values: FlagValues, the FlagValues instance with which the flag will - be registered. This should almost never need to be overridden. - module_name: str, the name of the Python module declaring this flag. - If not provided, it will be computed using the stack trace of this call. - **args: dict, the extra keyword args that are passed to Flag __init__. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + module_name: str, the name of the Python module declaring this flag. If not + provided, it will be computed using the stack trace of this call. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: dict, the extra keyword args that are passed to ``Flag.__init__``. + + Returns: + a handle to defined flag. """ - DEFINE_flag(_flag.EnumFlag(name, default, help, enum_values, **args), - flag_values, module_name) + return DEFINE_flag( + _flag.EnumFlag(name, default, help, enum_values, **args), flag_values, + module_name, required) + + +def DEFINE_enum_class( # pylint: disable=invalid-name,redefined-builtin + name, + default, + enum_class, + help, + flag_values=_flagvalues.FLAGS, + module_name=None, + case_sensitive=False, + required=False, + **args): + """Registers a flag whose value can be the name of enum members. + + Args: + name: str, the flag name. + default: Enum|str|None, the default value of the flag. + enum_class: class, the Enum class with all the possible values for the flag. + help: str, the help message. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + module_name: str, the name of the Python module declaring this flag. If not + provided, it will be computed using the stack trace of this call. + case_sensitive: bool, whether to map strings to members of the enum_class + without considering case. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: dict, the extra keyword args that are passed to ``Flag.__init__``. + + Returns: + a handle to defined flag. + """ + return DEFINE_flag( + _flag.EnumClassFlag( + name, + default, + help, + enum_class, + case_sensitive=case_sensitive, + **args), flag_values, module_name, required) def DEFINE_list( # pylint: disable=invalid-name,redefined-builtin - name, default, help, flag_values=_flagvalues.FLAGS, **args): + name, + default, + help, + flag_values=_flagvalues.FLAGS, + required=False, + **args): """Registers a flag whose value is a comma-separated list of strings. The flag value is parsed with a CSV parser. @@ -348,18 +526,36 @@ def DEFINE_list( # pylint: disable=invalid-name,redefined-builtin name: str, the flag name. default: list|str|None, the default value of the flag. help: str, the help message. - flag_values: FlagValues, the FlagValues instance with which the flag will - be registered. This should almost never need to be overridden. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + required: bool, is this a required flag. This must be used as a keyword + argument. **args: Dictionary with extra keyword args that are passed to the - Flag __init__. + ``Flag.__init__``. + + Returns: + a handle to defined flag. """ parser = _argument_parser.ListParser() serializer = _argument_parser.CsvListSerializer(',') - DEFINE(parser, name, default, help, flag_values, serializer, **args) + return DEFINE( + parser, + name, + default, + help, + flag_values, + serializer, + required=required, + **args) def DEFINE_spaceseplist( # pylint: disable=invalid-name,redefined-builtin - name, default, help, comma_compat=False, flag_values=_flagvalues.FLAGS, + name, + default, + help, + comma_compat=False, + flag_values=_flagvalues.FLAGS, + required=False, **args): """Registers a flag whose value is a whitespace-separated list of strings. @@ -369,23 +565,43 @@ def DEFINE_spaceseplist( # pylint: disable=invalid-name,redefined-builtin name: str, the flag name. default: list|str|None, the default value of the flag. help: str, the help message. - comma_compat: bool - Whether to support comma as an additional separator. - If false then only whitespace is supported. This is intended only for - backwards compatibility with flags that used to be comma-separated. - flag_values: FlagValues, the FlagValues instance with which the flag will - be registered. This should almost never need to be overridden. + comma_compat: bool - Whether to support comma as an additional separator. If + false then only whitespace is supported. This is intended only for + backwards compatibility with flags that used to be comma-separated. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + required: bool, is this a required flag. This must be used as a keyword + argument. **args: Dictionary with extra keyword args that are passed to the - Flag __init__. + ``Flag.__init__``. + + Returns: + a handle to defined flag. """ parser = _argument_parser.WhitespaceSeparatedListParser( comma_compat=comma_compat) serializer = _argument_parser.ListSerializer(' ') - DEFINE(parser, name, default, help, flag_values, serializer, **args) + return DEFINE( + parser, + name, + default, + help, + flag_values, + serializer, + required=required, + **args) def DEFINE_multi( # pylint: disable=invalid-name,redefined-builtin - parser, serializer, name, default, help, flag_values=_flagvalues.FLAGS, - module_name=None, **args): + parser, + serializer, + name, + default, + help, + flag_values=_flagvalues.FLAGS, + module_name=None, + required=False, + **args): """Registers a generic MultiFlag that parses its args with a given parser. Auxiliary function. Normal users should NOT use it directly. @@ -398,21 +614,36 @@ def DEFINE_multi( # pylint: disable=invalid-name,redefined-builtin parser: ArgumentParser, used to parse the flag arguments. serializer: ArgumentSerializer, the flag serializer instance. name: str, the flag name. - default: list|str|None, the default value of the flag. + default: Union[Iterable[T], Text, None], the default value of the flag. If + the value is text, it will be parsed as if it was provided from the + command line. If the value is a non-string iterable, it will be iterated + over to create a shallow copy of the values. If it is None, it is left + as-is. help: str, the help message. - flag_values: FlagValues, the FlagValues instance with which the flag will - be registered. This should almost never need to be overridden. - module_name: A string, the name of the Python module declaring this flag. - If not provided, it will be computed using the stack trace of this call. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + module_name: A string, the name of the Python module declaring this flag. If + not provided, it will be computed using the stack trace of this call. + required: bool, is this a required flag. This must be used as a keyword + argument. **args: Dictionary with extra keyword args that are passed to the - Flag __init__. + ``Flag.__init__``. + + Returns: + a handle to defined flag. """ - DEFINE_flag(_flag.MultiFlag(parser, serializer, name, default, help, **args), - flag_values, module_name) + return DEFINE_flag( + _flag.MultiFlag(parser, serializer, name, default, help, **args), + flag_values, module_name, required) def DEFINE_multi_string( # pylint: disable=invalid-name,redefined-builtin - name, default, help, flag_values=_flagvalues.FLAGS, **args): + name, + default, + help, + flag_values=_flagvalues.FLAGS, + required=False, + **args): """Registers a flag whose value can be a list of any strings. Use the flag on the command line multiple times to place multiple @@ -423,21 +654,41 @@ def DEFINE_multi_string( # pylint: disable=invalid-name,redefined-builtin Args: name: str, the flag name. - default: [str]|str|None, the default value of the flag. + default: Union[Iterable[Text], Text, None], the default value of the flag; + see :func:`DEFINE_multi`. help: str, the help message. - flag_values: FlagValues, the FlagValues instance with which the flag will - be registered. This should almost never need to be overridden. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + required: bool, is this a required flag. This must be used as a keyword + argument. **args: Dictionary with extra keyword args that are passed to the - Flag __init__. + ``Flag.__init__``. + + Returns: + a handle to defined flag. """ parser = _argument_parser.ArgumentParser() serializer = _argument_parser.ArgumentSerializer() - DEFINE_multi(parser, serializer, name, default, help, flag_values, **args) + return DEFINE_multi( + parser, + serializer, + name, + default, + help, + flag_values, + required=required, + **args) def DEFINE_multi_integer( # pylint: disable=invalid-name,redefined-builtin - name, default, help, lower_bound=None, upper_bound=None, - flag_values=_flagvalues.FLAGS, **args): + name, + default, + help, + lower_bound=None, + upper_bound=None, + flag_values=_flagvalues.FLAGS, + required=False, + **args): """Registers a flag whose value can be a list of arbitrary integers. Use the flag on the command line multiple times to place multiple @@ -447,23 +698,43 @@ def DEFINE_multi_integer( # pylint: disable=invalid-name,redefined-builtin Args: name: str, the flag name. - default: [int]|str|None, the default value of the flag. + default: Union[Iterable[int], Text, None], the default value of the flag; + see `DEFINE_multi`. help: str, the help message. lower_bound: int, min values of the flag. upper_bound: int, max values of the flag. - flag_values: FlagValues, the FlagValues instance with which the flag will - be registered. This should almost never need to be overridden. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + required: bool, is this a required flag. This must be used as a keyword + argument. **args: Dictionary with extra keyword args that are passed to the - Flag __init__. + ``Flag.__init__``. + + Returns: + a handle to defined flag. """ parser = _argument_parser.IntegerParser(lower_bound, upper_bound) serializer = _argument_parser.ArgumentSerializer() - DEFINE_multi(parser, serializer, name, default, help, flag_values, **args) + return DEFINE_multi( + parser, + serializer, + name, + default, + help, + flag_values, + required=required, + **args) def DEFINE_multi_float( # pylint: disable=invalid-name,redefined-builtin - name, default, help, lower_bound=None, upper_bound=None, - flag_values=_flagvalues.FLAGS, **args): + name, + default, + help, + lower_bound=None, + upper_bound=None, + flag_values=_flagvalues.FLAGS, + required=False, + **args): """Registers a flag whose value can be a list of arbitrary floats. Use the flag on the command line multiple times to place multiple @@ -473,23 +744,43 @@ def DEFINE_multi_float( # pylint: disable=invalid-name,redefined-builtin Args: name: str, the flag name. - default: [float]|str|None, the default value of the flag. + default: Union[Iterable[float], Text, None], the default value of the flag; + see `DEFINE_multi`. help: str, the help message. lower_bound: float, min values of the flag. upper_bound: float, max values of the flag. - flag_values: FlagValues, the FlagValues instance with which the flag will - be registered. This should almost never need to be overridden. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + required: bool, is this a required flag. This must be used as a keyword + argument. **args: Dictionary with extra keyword args that are passed to the - Flag __init__. + ``Flag.__init__``. + + Returns: + a handle to defined flag. """ parser = _argument_parser.FloatParser(lower_bound, upper_bound) serializer = _argument_parser.ArgumentSerializer() - DEFINE_multi(parser, serializer, name, default, help, flag_values, **args) + return DEFINE_multi( + parser, + serializer, + name, + default, + help, + flag_values, + required=required, + **args) def DEFINE_multi_enum( # pylint: disable=invalid-name,redefined-builtin - name, default, enum_values, help, flag_values=_flagvalues.FLAGS, - case_sensitive=True, **args): + name, + default, + enum_values, + help, + flag_values=_flagvalues.FLAGS, + case_sensitive=True, + required=False, + **args): """Registers a flag whose value can be a list strings from enum_values. Use the flag on the command line multiple times to place multiple @@ -499,32 +790,99 @@ def DEFINE_multi_enum( # pylint: disable=invalid-name,redefined-builtin Args: name: str, the flag name. - default: [str]|str|None, the default value of the flag. + default: Union[Iterable[Text], Text, None], the default value of the flag; + see `DEFINE_multi`. enum_values: [str], a non-empty list of strings with the possible values for - the flag. + the flag. help: str, the help message. - flag_values: FlagValues, the FlagValues instance with which the flag will - be registered. This should almost never need to be overridden. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. case_sensitive: Whether or not the enum is to be case-sensitive. + required: bool, is this a required flag. This must be used as a keyword + argument. **args: Dictionary with extra keyword args that are passed to the - Flag __init__. + ``Flag.__init__``. + + Returns: + a handle to defined flag. """ parser = _argument_parser.EnumParser(enum_values, case_sensitive) serializer = _argument_parser.ArgumentSerializer() - DEFINE_multi(parser, serializer, name, default, help, flag_values, **args) + return DEFINE_multi( + parser, + serializer, + name, + default, + '<%s>: %s' % ('|'.join(enum_values), help), + flag_values, + required=required, + **args) + + +def DEFINE_multi_enum_class( # pylint: disable=invalid-name,redefined-builtin + name, + default, + enum_class, + help, + flag_values=_flagvalues.FLAGS, + module_name=None, + case_sensitive=False, + required=False, + **args): + """Registers a flag whose value can be a list of enum members. + Use the flag on the command line multiple times to place multiple + enum values into the list. -def DEFINE_alias(name, original_name, flag_values=_flagvalues.FLAGS, # pylint: disable=invalid-name - module_name=None): + Args: + name: str, the flag name. + default: Union[Iterable[Enum], Iterable[Text], Enum, Text, None], the + default value of the flag; see `DEFINE_multi`; only differences are + documented here. If the value is a single Enum, it is treated as a + single-item list of that Enum value. If it is an iterable, text values + within the iterable will be converted to the equivalent Enum objects. + enum_class: class, the Enum class with all the possible values for the flag. + help: str, the help message. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + module_name: A string, the name of the Python module declaring this flag. If + not provided, it will be computed using the stack trace of this call. + case_sensitive: bool, whether to map strings to members of the enum_class + without considering case. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: Dictionary with extra keyword args that are passed to the + ``Flag.__init__``. + + Returns: + a handle to defined flag. + """ + return DEFINE_flag( + _flag.MultiEnumClassFlag( + name, default, help, enum_class, case_sensitive=case_sensitive), + flag_values, + module_name, + required=required, + **args) + + +def DEFINE_alias( # pylint: disable=invalid-name + name, + original_name, + flag_values=_flagvalues.FLAGS, + module_name=None): """Defines an alias flag for an existing one. Args: name: str, the flag name. original_name: str, the original flag name. - flag_values: FlagValues, the FlagValues instance with which the flag will - be registered. This should almost never need to be overridden. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. module_name: A string, the name of the module that defines this flag. + Returns: + a handle to defined flag. + Raises: flags.FlagError: UnrecognizedFlagError: if the referenced flag doesn't exist. @@ -534,15 +892,20 @@ def DEFINE_alias(name, original_name, flag_values=_flagvalues.FLAGS, # pylint: raise _exceptions.UnrecognizedFlagError(original_name) flag = flag_values[original_name] - class _Parser(_argument_parser.ArgumentParser): - """The parser for the alias flag calls the original flag parser.""" + class _FlagAlias(_flag.Flag): + """Overrides Flag class so alias value is copy of original flag value.""" def parse(self, argument): flag.parse(argument) - return flag.value + self.present += 1 - class _FlagAlias(_flag.Flag): - """Overrides Flag class so alias value is copy of original flag value.""" + def _parse_from_default(self, value): + # The value was already parsed by the aliased flag, so there is no + # need to call the parser on it a second time. + # Additionally, because of how MultiFlag parses and merges values, + # it isn't possible to delegate to the aliased flag and still get + # the correct values. + return value @property def value(self): @@ -554,6 +917,11 @@ def value(self, value): help_msg = 'Alias for --%s.' % flag.name # If alias_name has been used, flags.DuplicatedFlag will be raised. - DEFINE_flag(_FlagAlias(_Parser(), flag.serializer, name, flag.default, - help_msg, boolean=flag.boolean), - flag_values, module_name) + return DEFINE_flag( + _FlagAlias( + flag.parser, + flag.serializer, + name, + flag.default, + help_msg, + boolean=flag.boolean), flag_values, module_name) diff --git a/third_party/py/abseil/absl/flags/_exceptions.py b/third_party/py/abseil/absl/flags/_exceptions.py index 254eb9b3ced5cc..b569d9460e3162 100644 --- a/third_party/py/abseil/absl/flags/_exceptions.py +++ b/third_party/py/abseil/absl/flags/_exceptions.py @@ -18,10 +18,6 @@ aliases defined at the package level instead. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - import sys from absl.flags import _helpers @@ -50,13 +46,13 @@ def from_flag(cls, flagname, flag_values, other_flag_values=None): Args: flagname: str, the name of the flag being redefined. - flag_values: FlagValues, the FlagValues instance containing the first - definition of flagname. - other_flag_values: FlagValues, if it is not None, it should be the - FlagValues object where the second definition of flagname occurs. - If it is None, we assume that we're being called when attempting - to create the flag a second time, and we use the module calling - this one as the source of the second definition. + flag_values: :class:`FlagValues`, the FlagValues instance containing the + first definition of flagname. + other_flag_values: :class:`FlagValues`, if it is not None, it should be + the FlagValues object where the second definition of flagname occurs. + If it is None, we assume that we're being called when attempting to + create the flag a second time, and we use the module calling this one + as the source of the second definition. Returns: An instance of DuplicateFlagError. @@ -101,7 +97,7 @@ def __init__(self, flagname, flagvalue='', suggestions=None): class UnparsedFlagAccessError(Error): - """Raised when accessing the flag value from unparsed FlagValues.""" + """Raised when accessing the flag value from unparsed :class:`FlagValues`.""" class ValidationError(Error): @@ -109,4 +105,4 @@ class ValidationError(Error): class FlagNameConflictsWithMethodError(Error): - """Raised when a flag name conflicts with FlagValues methods.""" + """Raised when a flag name conflicts with :class:`FlagValues` methods.""" diff --git a/third_party/py/abseil/absl/flags/_flag.py b/third_party/py/abseil/absl/flags/_flag.py index 80845efa9be57a..124f1371662098 100644 --- a/third_party/py/abseil/absl/flags/_flag.py +++ b/third_party/py/abseil/absl/flags/_flag.py @@ -18,10 +18,8 @@ aliases defined at the package level instead. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - +from collections import abc +import copy import functools from absl.flags import _argument_parser @@ -33,43 +31,45 @@ class Flag(object): """Information about a command-line flag. - 'Flag' objects define the following fields: - .name - the name for this flag; - .default - the default value for this flag; - .default_unparsed - the unparsed default value for this flag. - .default_as_str - default value as repr'd string, e.g., "'true'" (or None); - .value - the most recent parsed value of this flag; set by parse(); - .help - a help string or None if no help is available; - .short_name - the single letter alias for this flag (or None); - .boolean - if 'true', this flag does not accept arguments; - .present - true if this flag was parsed from command line flags; - .parser - an ArgumentParser object; - .serializer - an ArgumentSerializer object; - .allow_override - the flag may be redefined without raising an error, and - newly defined flag overrides the old one. - .allow_override_cpp - the flag may be redefined in C++ without raising an - error, value "transferred" to C++, and the flag is - replaced by the C++ flag after init; - .allow_hide_cpp - the flag may be redefined despite hiding a C++ flag with - the same name; - .using_default_value - the flag value has not been set by user; - .allow_overwrite - the flag may be parsed more than once without raising - an error, the last set value will be used; - .allow_using_method_names - whether this flag can be defined even if it has - a name that conflicts with a FlagValues method. - - The only public method of a 'Flag' object is parse(), but it is - typically only called by a 'FlagValues' object. The parse() method is - a thin wrapper around the 'ArgumentParser' parse() method. The parsed - value is saved in .value, and the .present attribute is updated. If - this flag was already present, an Error is raised. - - parse() is also called during __init__ to parse the default value and - initialize the .value attribute. This enables other python modules to - safely use flags even if the __main__ module neglects to parse the - command line arguments. The .present attribute is cleared after - __init__ parsing. If the default value is set to None, then the - __init__ parsing step is skipped and the .value attribute is + Attributes: + name: the name for this flag + default: the default value for this flag + default_unparsed: the unparsed default value for this flag. + default_as_str: default value as repr'd string, e.g., "'true'" + (or None) + value: the most recent parsed value of this flag set by :meth:`parse` + help: a help string or None if no help is available + short_name: the single letter alias for this flag (or None) + boolean: if 'true', this flag does not accept arguments + present: true if this flag was parsed from command line flags + parser: an :class:`~absl.flags.ArgumentParser` object + serializer: an ArgumentSerializer object + allow_override: the flag may be redefined without raising an error, + and newly defined flag overrides the old one. + allow_override_cpp: use the flag from C++ if available the flag + definition is replaced by the C++ flag after init + allow_hide_cpp: use the Python flag despite having a C++ flag with + the same name (ignore the C++ flag) + using_default_value: the flag value has not been set by user + allow_overwrite: the flag may be parsed more than once without + raising an error, the last set value will be used + allow_using_method_names: whether this flag can be defined even if + it has a name that conflicts with a FlagValues method. + validators: list of the flag validators. + + The only public method of a ``Flag`` object is :meth:`parse`, but it is + typically only called by a :class:`~absl.flags.FlagValues` object. The + :meth:`parse` method is a thin wrapper around the + :meth:`ArgumentParser.parse()` method. The + parsed value is saved in ``.value``, and the ``.present`` attribute is + updated. If this flag was already present, an Error is raised. + + :meth:`parse` is also called during ``__init__`` to parse the default value + and initialize the ``.value`` attribute. This enables other python modules to + safely use flags even if the ``__main__`` module neglects to parse the + command line arguments. The ``.present`` attribute is cleared after + ``__init__`` parsing. If the default value is set to ``None``, then the + ``__init__`` parsing step is skipped and the ``.value`` attribute is initialized to None. Note: The default value is also presented to the user in the help @@ -100,7 +100,7 @@ def __init__(self, parser, serializer, name, default, help_string, self.using_default_value = True self._value = None self.validators = [] - if allow_hide_cpp and allow_override_cpp: + if self.allow_hide_cpp and self.allow_override_cpp: raise _exceptions.Error( "Can't have both allow_hide_cpp (means use Python flag) and " 'allow_override_cpp (means use C++ flag after InitGoogle)') @@ -126,6 +126,22 @@ def __lt__(self, other): return id(self) < id(other) return NotImplemented + def __bool__(self): + raise TypeError('A Flag instance would always be True. ' + 'Did you mean to test the `.value` attribute?') + + def __getstate__(self): + raise TypeError("can't pickle Flag objects") + + def __copy__(self): + raise TypeError('%s does not support shallow copies. ' + 'Use copy.deepcopy instead.' % type(self).__name__) + + def __deepcopy__(self, memo): + result = object.__new__(type(self)) + result.__dict__ = copy.deepcopy(self.__dict__, memo) + return result + def _get_parsed_value_as_string(self, value): """Returns parsed flag value as string.""" if value is None: @@ -137,7 +153,7 @@ def _get_parsed_value_as_string(self, value): return repr('true') else: return repr('false') - return repr(_helpers.str_or_unicode(value)) + return repr(str(value)) def parse(self, argument): """Parses string and sets flag value. @@ -175,10 +191,15 @@ def unparse(self): self.present = 0 def serialize(self): - if self.value is None: + """Serializes the flag.""" + return self._serialize(self.value) + + def _serialize(self, value): + """Internal serialize function.""" + if value is None: return '' if self.boolean: - if self.value: + if value: return '--%s' % self.name else: return '--no%s' % self.name @@ -186,7 +207,7 @@ def serialize(self): if not self.serializer: raise _exceptions.Error( 'Serializer not present for flag %s' % self.name) - return '--%s=%s' % (self.name, self.serializer.serialize(self.value)) + return '--%s=%s' % (self.name, self.serializer.serialize(value)) def _set_default(self, value): """Changes the default value (and current value too) for this Flag.""" @@ -194,11 +215,16 @@ def _set_default(self, value): if value is None: self.default = None else: - self.default = self._parse(value) + self.default = self._parse_from_default(value) self.default_as_str = self._get_parsed_value_as_string(self.default) if self.using_default_value: self.value = self.default + # This is split out so that aliases can skip regular parsing of the default + # value. + def _parse_from_default(self, value): + return self._parse(value) + def flag_type(self): """Returns a str that describes the type of the flag. @@ -250,8 +276,9 @@ def _create_xml_dom_element(self, doc, module_name, is_key=False): default_serialized = self.default element.appendChild(_helpers.create_xml_dom_element( doc, 'default', default_serialized)) + value_serialized = self._serialize_value_for_xml(self.value) element.appendChild(_helpers.create_xml_dom_element( - doc, 'current', self.value)) + doc, 'current', value_serialized)) element.appendChild(_helpers.create_xml_dom_element( doc, 'type', self.flag_type())) # Adds extra flag features this flag may have. @@ -259,6 +286,10 @@ def _create_xml_dom_element(self, doc, module_name, is_key=False): element.appendChild(e) return element + def _serialize_value_for_xml(self, value): + """Returns the serialized value, for use in an XML help text.""" + return value + def _extra_xml_dom_elements(self, doc): """Returns extra info about this flag in XML. @@ -279,13 +310,13 @@ class BooleanFlag(Flag): """Basic boolean flag. Boolean flags do not take any arguments, and their value is either - True (1) or False (0). The false value is specified on the command - line by prepending the word 'no' to either the long or the short flag + ``True`` (1) or ``False`` (0). The false value is specified on the command + line by prepending the word ``'no'`` to either the long or the short flag name. For example, if a Boolean flag was created whose long name was - 'update' and whose short name was 'x', then this flag could be - explicitly unset through either --noupdate or --nox. + ``'update'`` and whose short name was ``'x'``, then this flag could be + explicitly unset through either ``--noupdate`` or ``--nox``. """ def __init__(self, name, default, help, short_name=None, **args): # pylint: disable=redefined-builtin @@ -313,6 +344,33 @@ def _extra_xml_dom_elements(self, doc): return elements +class EnumClassFlag(Flag): + """Basic enum flag; its value is an enum class's member.""" + + def __init__( + self, + name, + default, + help, # pylint: disable=redefined-builtin + enum_class, + short_name=None, + case_sensitive=False, + **args): + p = _argument_parser.EnumClassParser( + enum_class, case_sensitive=case_sensitive) + g = _argument_parser.EnumClassSerializer(lowercase=not case_sensitive) + super(EnumClassFlag, self).__init__( + p, g, name, default, help, short_name, **args) + self.help = '<%s>: %s' % ('|'.join(p.member_names), self.help) + + def _extra_xml_dom_elements(self, doc): + elements = [] + for enum_value in self.parser.enum_class.__members__.keys(): + elements.append(_helpers.create_xml_dom_element( + doc, 'enum_value', enum_value)) + return elements + + class MultiFlag(Flag): """A flag that can appear multiple time on the command-line. @@ -322,8 +380,8 @@ class MultiFlag(Flag): See the __doc__ for Flag for most behavior of this class. Only differences in behavior are described here: - * The default value may be either a single value or a list of values. - A single value is interpreted as the [value] singleton list. + * The default value may be either a single value or an iterable of values. + A single value is transformed into a single-item list of that value. * The value of the flag is always a list, even if the option was only supplied once, and even if the default value is a single @@ -350,6 +408,10 @@ def parse(self, arguments): self.present += len(new_values) def _parse(self, arguments): + if (isinstance(arguments, abc.Iterable) and + not isinstance(arguments, str)): + arguments = list(arguments) + if not isinstance(arguments, list): # Default value may be a list of values. Most other arguments # will not be, so convert them into a single-item list to make @@ -358,25 +420,19 @@ def _parse(self, arguments): return [super(MultiFlag, self)._parse(item) for item in arguments] - def serialize(self): + def _serialize(self, value): """See base class.""" if not self.serializer: raise _exceptions.Error( 'Serializer not present for flag %s' % self.name) - if self.value is None: + if value is None: return '' - s = '' - - multi_value = self.value - - for self.value in multi_value: - if s: s += ' ' - s += Flag.serialize(self) - - self.value = multi_value + serialized_items = [ + super(MultiFlag, self)._serialize(value_item) for value_item in value + ] - return s + return '\n'.join(serialized_items) def flag_type(self): """See base class.""" @@ -389,3 +445,44 @@ def _extra_xml_dom_elements(self, doc): elements.append(_helpers.create_xml_dom_element( doc, 'enum_value', enum_value)) return elements + + +class MultiEnumClassFlag(MultiFlag): + """A multi_enum_class flag. + + See the __doc__ for MultiFlag for most behaviors of this class. In addition, + this class knows how to handle enum.Enum instances as values for this flag + type. + """ + + def __init__(self, + name, + default, + help_string, + enum_class, + case_sensitive=False, + **args): + p = _argument_parser.EnumClassParser( + enum_class, case_sensitive=case_sensitive) + g = _argument_parser.EnumClassListSerializer( + list_sep=',', lowercase=not case_sensitive) + super(MultiEnumClassFlag, self).__init__( + p, g, name, default, help_string, **args) + self.help = ( + '<%s>: %s;\n repeat this option to specify a list of values' % + ('|'.join(p.member_names), help_string or '(no help available)')) + + def _extra_xml_dom_elements(self, doc): + elements = [] + for enum_value in self.parser.enum_class.__members__.keys(): + elements.append(_helpers.create_xml_dom_element( + doc, 'enum_value', enum_value)) + return elements + + def _serialize_value_for_xml(self, value): + """See base class.""" + if value is not None: + value_serialized = self.serializer.serialize(value) + else: + value_serialized = '' + return value_serialized diff --git a/third_party/py/abseil/absl/flags/_flagvalues.py b/third_party/py/abseil/absl/flags/_flagvalues.py index 61a5bb45665654..937dc6c2909ac4 100644 --- a/third_party/py/abseil/absl/flags/_flagvalues.py +++ b/third_party/py/abseil/absl/flags/_flagvalues.py @@ -11,64 +11,67 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - """Defines the FlagValues class - registry of 'Flag' objects. Do NOT import this module directly. Import the flags package and use the aliases defined at the package level instead. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - +import copy import itertools import logging import os import sys -import warnings +from typing import Generic, TypeVar from xml.dom import minidom from absl.flags import _exceptions from absl.flags import _flag from absl.flags import _helpers -import six +from absl.flags import _validators_classes # Add flagvalues module to disclaimed module ids. _helpers.disclaim_module_ids.add(id(sys.modules[__name__])) +_T = TypeVar('_T') + -class FlagValues(object): - """Registry of 'Flag' objects. +class FlagValues: + """Registry of :class:`~absl.flags.Flag` objects. - A 'FlagValues' can then scan command line arguments, passing flag + A :class:`FlagValues` can then scan command line arguments, passing flag arguments through to the 'Flag' objects that it owns. It also provides easy access to the flag values. Typically only one - 'FlagValues' object is needed by an application: flags.FLAGS + :class:`FlagValues` object is needed by an application: + :const:`FLAGS`. This class is heavily overloaded: - 'Flag' objects are registered via __setitem__: + :class:`Flag` objects are registered via ``__setitem__``:: + FLAGS['longname'] = x # register a new flag - The .value attribute of the registered 'Flag' objects can be accessed - as attributes of this 'FlagValues' object, through __getattr__. Both - the long and short name of the original 'Flag' objects can be used to - access its value: - FLAGS.longname # parsed flag value - FLAGS.x # parsed flag value (short name) + The ``.value`` attribute of the registered :class:`~absl.flags.Flag` objects + can be accessed as attributes of this :class:`FlagValues` object, through + ``__getattr__``. Both the long and short name of the original + :class:`~absl.flags.Flag` objects can be used to access its value:: + + FLAGS.longname # parsed flag value + FLAGS.x # parsed flag value (short name) + + Command line arguments are scanned and passed to the registered + :class:`~absl.flags.Flag` objects through the ``__call__`` method. Unparsed + arguments, including ``argv[0]`` (e.g. the program name) are returned:: - Command line arguments are scanned and passed to the registered 'Flag' - objects through the __call__ method. Unparsed arguments, including - argv[0] (e.g. the program name) are returned. argv = FLAGS(sys.argv) # scan command line arguments - The original registered Flag objects can be retrieved through the use - of the dictionary-like operator, __getitem__: + The original registered :class:`~absl.flags.Flag` objects can be retrieved + through the use of the dictionary-like operator, ``__getitem__``:: + x = FLAGS['longname'] # access the registered Flag object - The str() operator of a 'FlagValues' object provides help for all of - the registered 'Flag' objects. + The ``str()`` operator of a :class:`absl.flags.FlagValues` object provides + help for all of the registered :class:`~absl.flags.Flag` objects. """ # A note on collections.abc.Mapping: @@ -221,8 +224,7 @@ def _flag_is_registered(self, flag_obj): return True # Check whether flag_obj is registered under its short name. short_name = flag_obj.short_name - if (short_name is not None and - flag_dict.get(short_name, None) == flag_obj): + if (short_name is not None and flag_dict.get(short_name, None) == flag_obj): return True return False @@ -240,13 +242,13 @@ def _cleanup_unregistered_flag_from_module_dicts(self, flag_obj): for flags_by_module_dict in (self.flags_by_module_dict(), self.flags_by_module_id_dict(), self.key_flags_by_module_dict()): - for flags_in_module in six.itervalues(flags_by_module_dict): + for flags_in_module in flags_by_module_dict.values(): # While (as opposed to if) takes care of multiple occurrences of a # flag in the list for the same module. while flag_obj in flags_in_module: flags_in_module.remove(flag_obj) - def _get_flags_defined_by_module(self, module): + def get_flags_for_module(self, module): """Returns the list of flags defined by a module. Args: @@ -259,6 +261,8 @@ def _get_flags_defined_by_module(self, module): """ if not isinstance(module, str): module = module.__name__ + if module == '__main__': + module = sys.argv[0] return list(self.flags_by_module_dict().get(module, [])) @@ -275,11 +279,13 @@ def get_key_flags_for_module(self, module): """ if not isinstance(module, str): module = module.__name__ + if module == '__main__': + module = sys.argv[0] # Any flag is a key flag for the module that defined it. NOTE: # key_flags is a fresh list: we can update it without affecting the # internals of this FlagValues object. - key_flags = self._get_flags_defined_by_module(module) + key_flags = self.get_flags_for_module(module) # Take into account flags explicitly declared as key for a module. for flag in self.key_flags_by_module_dict().get(module, []): @@ -292,8 +298,7 @@ def find_module_defining_flag(self, flagname, default=None): Args: flagname: str, name of the flag to lookup. - default: Value to return if flagname is not defined. Defaults - to None. + default: Value to return if flagname is not defined. Defaults to None. Returns: The name of the module which registered the flag with this name. @@ -303,7 +308,7 @@ def find_module_defining_flag(self, flagname, default=None): registered_flag = self._flags().get(flagname) if registered_flag is None: return default - for module, flags in six.iteritems(self.flags_by_module_dict()): + for module, flags in self.flags_by_module_dict().items(): for flag in flags: # It must compare the flag with the one in _flags. This is because a # flag might be overridden only for its long name (or short name), @@ -318,8 +323,7 @@ def find_module_id_defining_flag(self, flagname, default=None): Args: flagname: str, name of the flag to lookup. - default: Value to return if flagname is not defined. Defaults - to None. + default: Value to return if flagname is not defined. Defaults to None. Returns: The ID of the module which registered the flag with this name. @@ -329,7 +333,7 @@ def find_module_id_defining_flag(self, flagname, default=None): registered_flag = self._flags().get(flagname) if registered_flag is None: return default - for module_id, flags in six.iteritems(self.flags_by_module_id_dict()): + for module_id, flags in self.flags_by_module_id_dict().items(): for flag in flags: # It must compare the flag with the one in _flags. This is because a # flag might be overridden only for its long name (or short name), @@ -343,8 +347,8 @@ def _register_unknown_flag_setter(self, setter): """Allow set default values for undefined flags. Args: - setter: Method(name, value) to call to __setattr__ an unknown flag. - Must raise NameError or ValueError for invalid name/value. + setter: Method(name, value) to call to __setattr__ an unknown flag. Must + raise NameError or ValueError for invalid name/value. """ self.__dict__['__set_unknown'] = setter @@ -369,7 +373,7 @@ def _set_unknown_flag(self, name, value): return value except (TypeError, ValueError): # Flag value is not valid. raise _exceptions.IllegalFlagValueError( - '"{1}" is not valid for --{0}' .format(name, value)) + '"{1}" is not valid for --{0}'.format(name, value)) except NameError: # Flag name is not valid. pass raise _exceptions.UnrecognizedFlagError(name, value) @@ -380,7 +384,7 @@ def append_flag_values(self, flag_values): Args: flag_values: FlagValues, the FlagValues instance from which to copy flags. """ - for flag_name, flag in six.iteritems(flag_values._flags()): # pylint: disable=protected-access + for flag_name, flag in flag_values._flags().items(): # pylint: disable=protected-access # Each flags with short_name appears here twice (once under its # normal name, and again with its short name). To prevent # problems (DuplicateFlagError) with double flag registration, we @@ -398,7 +402,7 @@ def remove_flag_values(self, flag_values): Args: flag_values: FlagValues, the FlagValues instance containing flags to - remove. + remove. """ for flag_name in flag_values: self.__delattr__(flag_name) @@ -408,14 +412,12 @@ def __setitem__(self, name, flag): fl = self._flags() if not isinstance(flag, _flag.Flag): raise _exceptions.IllegalFlagValueError(flag) - if str is bytes and isinstance(name, unicode): - # When using Python 2 with unicode_literals, allow it but encode it - # into the bytes type we require. - name = name.encode('utf-8') - if not isinstance(name, type('')): + if not isinstance(name, str): raise _exceptions.Error('Flag name must be a string') if not name: raise _exceptions.Error('Flag name cannot be empty') + if ' ' in name: + raise _exceptions.Error('Flag name cannot contain a space') self._check_method_name_conflicts(name, flag) if name in fl and not flag.allow_override and not fl[name].allow_override: module, module_name = _helpers.get_calling_module_object_and_name() @@ -438,8 +440,7 @@ def __setitem__(self, name, flag): flags_to_cleanup.add(fl[short_name]) fl[short_name] = flag if (name not in fl # new flag - or fl[name].using_default_value - or not flag.using_default_value): + or fl[name].using_default_value or not flag.using_default_value): if name in fl and fl[name] != flag: flags_to_cleanup.add(fl[name]) fl[name] = flag @@ -464,12 +465,6 @@ def _hide_flag(self, name): """Marks the flag --name as hidden.""" self.__dict__['__hiddenflags'].add(name) - # This exists for legacy reasons, and will be removed in the future. - def _is_unparsed_flag_access_allowed(self, name): - """Determine whether to allow unparsed flag access or not.""" - del name - return False - def __getattr__(self, name): """Retrieves the 'value' attribute of the flag --name.""" fl = self._flags() @@ -481,50 +476,41 @@ def __getattr__(self, name): if self.__dict__['__flags_parsed'] or fl[name].present: return fl[name].value else: - error_message = ( + raise _exceptions.UnparsedFlagAccessError( 'Trying to access flag --%s before flags were parsed.' % name) - if self._is_unparsed_flag_access_allowed(name): - # Print warning to stderr. Messages in logs are often ignored/unnoticed. - warnings.warn( - error_message + ' This will raise an exception in the future.', - RuntimeWarning, - stacklevel=2) - # Force logging.exception() to behave realistically, but don't propagate - # exception up. Allow flag value to be returned (for now). - try: - raise _exceptions.UnparsedFlagAccessError(error_message) - except _exceptions.UnparsedFlagAccessError: - logging.exception(error_message) - return fl[name].value - else: - if six.PY2: - # In Python 2, hasattr returns False if getattr raises any exception. - # That means if someone calls hasattr(FLAGS, 'flag'), it returns False - # instead of raises UnparsedFlagAccessError even if --flag is already - # defined. To make the error more visible, the best we can do is to - # log an error message before raising the exception. - # Don't log a full stacktrace here since that makes other callers - # get too much noise. - logging.error(error_message) - raise _exceptions.UnparsedFlagAccessError(error_message) def __setattr__(self, name, value): """Sets the 'value' attribute of the flag --name.""" - fl = self._flags() - if name in self.__dict__['__hiddenflags']: - raise AttributeError(name) - if name not in fl: - return self._set_unknown_flag(name, value) - fl[name].value = value - self._assert_validators(fl[name].validators) - fl[name].using_default_value = False + self._set_attributes(**{name: value}) return value - def _assert_all_validators(self): + def _set_attributes(self, **attributes): + """Sets multiple flag values together, triggers validators afterwards.""" + fl = self._flags() + known_flags = set() + for name, value in attributes.items(): + if name in self.__dict__['__hiddenflags']: + raise AttributeError(name) + if name in fl: + fl[name].value = value + known_flags.add(name) + else: + self._set_unknown_flag(name, value) + for name in known_flags: + self._assert_validators(fl[name].validators) + fl[name].using_default_value = False + + def validate_all_flags(self): + """Verifies whether all flags pass validation. + + Raises: + AttributeError: Raised if validators work with a non-existing flag. + IllegalFlagValueError: Raised if validation fails for at least one + validator. + """ all_validators = set() - for flag in six.itervalues(self._flags()): - for validator in flag.validators: - all_validators.add(validator) + for flag in self._flags().values(): + all_validators.update(flag.validators) self._assert_validators(all_validators) def _assert_validators(self, validators): @@ -533,20 +519,34 @@ def _assert_validators(self, validators): It asserts validators in the order they were created. Args: - validators: Iterable(validators.Validator), validators to be - verified. + validators: Iterable(validators.Validator), validators to be verified. + Raises: AttributeError: Raised if validators work with a non-existing flag. IllegalFlagValueError: Raised if validation fails for at least one validator. """ + messages = [] + bad_flags = set() for validator in sorted( validators, key=lambda validator: validator.insertion_index): try: + if isinstance(validator, _validators_classes.SingleFlagValidator): + if validator.flag_name in bad_flags: + continue + elif isinstance(validator, _validators_classes.MultiFlagsValidator): + if bad_flags & set(validator.flag_names): + continue validator.verify(self) except _exceptions.ValidationError as e: + if isinstance(validator, _validators_classes.SingleFlagValidator): + bad_flags.add(validator.flag_name) + elif isinstance(validator, _validators_classes.MultiFlagsValidator): + bad_flags.update(set(validator.flag_names)) message = validator.print_flags_with_values(self) - raise _exceptions.IllegalFlagValueError('%s: %s' % (message, str(e))) + messages.append('%s: %s' % (message, str(e))) + if messages: + raise _exceptions.IllegalFlagValueError('\n'.join(messages)) def __delattr__(self, flag_name): """Deletes a previously-defined flag from a flag object. @@ -610,13 +610,15 @@ def __len__(self): def __iter__(self): return iter(self._flags()) - def __call__(self, argv): + def __call__(self, argv, known_only=False): """Parses flags from argv; stores parsed flags into this FlagValues object. All unparsed arguments are returned. Args: argv: a tuple/list of strings. + known_only: bool, if True, parse and remove known flags; return the rest + untouched. Unknown flags specified by --undefok are not returned. Returns: The list of arguments not parsed as options, including argv[0]. @@ -626,7 +628,7 @@ def __call__(self, argv): TypeError: Raised on passing wrong type of arguments. ValueError: Raised on flag value parsing error. """ - if _helpers.is_bytes_or_string(argv): + if isinstance(argv, (str, bytes)): raise TypeError( 'argv should be a tuple/list of strings, not bytes or string.') if not argv: @@ -639,22 +641,31 @@ def __call__(self, argv): args = self.read_flags_from_files(argv[1:], force_gnu=False) # Parse the arguments. - unknown_flags, unparsed_args, undefok = self._parse_args(args) + unknown_flags, unparsed_args = self._parse_args(args, known_only) # Handle unknown flags by raising UnrecognizedFlagError. # Note some users depend on us raising this particular error. for name, value in unknown_flags: - if name in undefok: - continue - suggestions = _helpers.get_flag_suggestions(name, list(self)) raise _exceptions.UnrecognizedFlagError( name, value, suggestions=suggestions) self.mark_as_parsed() - self._assert_all_validators() + self.validate_all_flags() return [program_name] + unparsed_args + def __getstate__(self): + raise TypeError("can't pickle FlagValues") + + def __copy__(self): + raise TypeError('FlagValues does not support shallow copies. ' + 'Use absl.testing.flagsaver or copy.deepcopy instead.') + + def __deepcopy__(self, memo): + result = object.__new__(type(self)) + result.__dict__.update(copy.deepcopy(self.__dict__, memo)) + return result + def _set_is_retired_flag_func(self, is_retired_flag_func): """Sets a function for checking retired flags. @@ -663,11 +674,11 @@ def _set_is_retired_flag_func(self, is_retired_flag_func): Args: is_retired_flag_func: Callable(str) -> (bool, bool), a function takes flag - name as parameter, returns a tuple (is_retired, type_is_bool). + name as parameter, returns a tuple (is_retired, type_is_bool). """ self.__dict__['__is_retired_flag_func'] = is_retired_flag_func - def _parse_args(self, args): + def _parse_args(self, args, known_only): """Helper function to do the main argument parsing. This function goes through args and does the bulk of the flag parsing. @@ -676,18 +687,20 @@ def _parse_args(self, args): Args: args: [str], a list of strings with the arguments to parse. + known_only: bool, if True, parse and remove known flags; return the rest + untouched. Unknown flags specified by --undefok are not returned. Returns: A tuple with the following: unknown_flags: List of (flag name, arg) for flags we don't know about. unparsed_args: List of arguments we did not parse. - undefok: Set of flags that were given via --undefok. Raises: Error: Raised on any parsing error. ValueError: Raised on flag value parsing error. """ - unknown_flags, unparsed_args, undefok = [], [], set() + unparsed_names_and_args = [] # A list of (flag name or None, arg). + undefok = set() retired_flag_func = self.__dict__['__is_retired_flag_func'] flag_dict = self._flags() @@ -704,13 +717,15 @@ def get_value(): if not arg.startswith('-'): # A non-argument: default is break, GNU is skip. - unparsed_args.append(arg) + unparsed_names_and_args.append((None, arg)) if self.is_gnu_getopt(): continue else: break if arg == '--': + if known_only: + unparsed_names_and_args.append((None, arg)) break # At this point, arg must start with '-'. @@ -726,7 +741,7 @@ def get_value(): if not name: # The argument is all dashes (including one dash). - unparsed_args.append(arg) + unparsed_names_and_args.append((None, arg)) if self.is_gnu_getopt(): continue else: @@ -740,7 +755,7 @@ def get_value(): continue flag = flag_dict.get(name) - if flag: + if flag is not None: if flag.boolean and value is None: value = 'true' else: @@ -748,13 +763,13 @@ def get_value(): elif name.startswith('no') and len(name) > 2: # Boolean flags can take the form of --noflag, with no value. noflag = flag_dict.get(name[2:]) - if noflag and noflag.boolean: + if noflag is not None and noflag.boolean: if value is not None: raise ValueError(arg + ' does not take an argument') flag = noflag value = 'false' - if retired_flag_func and not flag: + if retired_flag_func and flag is None: is_retired, is_bool = retired_flag_func(name) # If we didn't recognize that flag, but it starts with @@ -769,18 +784,35 @@ def get_value(): # This happens when a non-bool retired flag is specified # in format of "--flag value". get_value() - logging.error('Flag "%s" is retired and should no longer ' - 'be specified. See go/totw/90.', name) + logging.error( + 'Flag "%s" is retired and should no longer ' + 'be specified. See go/totw/90.', name) continue - if flag: + if flag is not None: flag.parse(value) flag.using_default_value = False else: - unknown_flags.append((name, arg)) + unparsed_names_and_args.append((name, arg)) + + unknown_flags = [] + unparsed_args = [] + for name, arg in unparsed_names_and_args: + if name is None: + # Positional arguments. + unparsed_args.append(arg) + elif name in undefok: + # Remove undefok flags. + continue + else: + # This is an unknown flag. + if known_only: + unparsed_args.append(arg) + else: + unknown_flags.append((name, arg)) unparsed_args.extend(list(args)) - return unknown_flags, unparsed_args, undefok + return unknown_flags, unparsed_args def is_parsed(self): """Returns whether flags were parsed.""" @@ -790,7 +822,7 @@ def mark_as_parsed(self): """Explicitly marks flags as parsed. Use this when the caller knows that this FlagValues has been parsed as if - a __call__() invocation has happened. This is only a public method for + a ``__call__()`` invocation has happened. This is only a public method for use by things like appcommands which do additional command like parsing. """ self.__dict__['__flags_parsed'] = True @@ -807,7 +839,7 @@ def unparse_flags(self): def flag_values_dict(self): """Returns a dictionary that maps flag names to flag values.""" - return {name: flag.value for name, flag in six.iteritems(self._flags())} + return {name: flag.value for name, flag in self._flags().items()} def __str__(self): """Returns a help string for all known flags.""" @@ -819,40 +851,51 @@ def get_help(self, prefix='', include_special_flags=True): Args: prefix: str, per-line output prefix. include_special_flags: bool, whether to include description of - _SPECIAL_FLAGS, i.e. --flagfile and --undefok. + SPECIAL_FLAGS, i.e. --flagfile and --undefok. Returns: str, formatted help message. """ - helplist = [] - flags_by_module = self.flags_by_module_dict() if flags_by_module: modules = sorted(flags_by_module) - # Print the help for the main module first, if possible. main_module = sys.argv[0] if main_module in modules: modules.remove(main_module) modules = [main_module] + modules - - for module in modules: - self._render_our_module_flags(module, helplist, prefix) - if include_special_flags: - self._render_module_flags( - 'absl.flags', - _helpers.SPECIAL_FLAGS._flags().values(), # pylint: disable=protected-access - helplist, - prefix) + return self._get_help_for_modules(modules, prefix, include_special_flags) else: + output_lines = [] # Just print one long list of flags. - values = six.itervalues(self._flags()) + values = self._flags().values() if include_special_flags: values = itertools.chain( - values, six.itervalues(_helpers.SPECIAL_FLAGS._flags())) # pylint: disable=protected-access - self._render_flag_list(values, helplist, prefix) + values, _helpers.SPECIAL_FLAGS._flags().values()) # pylint: disable=protected-access + self._render_flag_list(values, output_lines, prefix) + return '\n'.join(output_lines) - return '\n'.join(helplist) + def _get_help_for_modules(self, modules, prefix, include_special_flags): + """Returns the help string for a list of modules. + + Private to absl.flags package. + + Args: + modules: List[str], a list of modules to get the help string for. + prefix: str, a string that is prepended to each generated help line. + include_special_flags: bool, whether to include description of + SPECIAL_FLAGS, i.e. --flagfile and --undefok. + """ + output_lines = [] + for module in modules: + self._render_our_module_flags(module, output_lines, prefix) + if include_special_flags: + self._render_module_flags( + 'absl.flags', + _helpers.SPECIAL_FLAGS._flags().values(), # pylint: disable=protected-access + output_lines, + prefix) + return '\n'.join(output_lines) def _render_module_flags(self, module, flags, output_lines, prefix=''): """Returns a help string for a given module.""" @@ -863,7 +906,7 @@ def _render_module_flags(self, module, flags, output_lines, prefix=''): def _render_our_module_flags(self, module, output_lines, prefix=''): """Returns a help string for a given module.""" - flags = self._get_flags_defined_by_module(module) + flags = self.get_flags_for_module(module) if flags: self._render_module_flags(module, flags, output_lines, prefix) @@ -872,8 +915,8 @@ def _render_our_module_key_flags(self, module, output_lines, prefix=''): Args: module: module|str, the module to render key flags for. - output_lines: [str], a list of strings. The generated help message - lines will be appended to this list. + output_lines: [str], a list of strings. The generated help message lines + will be appended to this list. prefix: str, a string that is prepended to each generated help line. """ key_flags = self.get_key_flags_for_module(module) @@ -915,10 +958,12 @@ def _render_flag_list(self, flaglist, output_lines, prefix=' '): # a different flag is using this name now continue # only print help once - if flag in flagset: continue + if flag in flagset: + continue flagset[flag] = 1 flaghelp = '' - if flag.short_name: flaghelp += '-%s,' % flag.short_name + if flag.short_name: + flaghelp += '-%s,' % flag.short_name if flag.boolean: flaghelp += '--[no]%s:' % flag.name else: @@ -927,15 +972,15 @@ def _render_flag_list(self, flaglist, output_lines, prefix=' '): if flag.help: flaghelp += flag.help flaghelp = _helpers.text_wrap( - flaghelp, indent=prefix+' ', firstline_indent=prefix) + flaghelp, indent=prefix + ' ', firstline_indent=prefix) if flag.default_as_str: flaghelp += '\n' flaghelp += _helpers.text_wrap( - '(default: %s)' % flag.default_as_str, indent=prefix+' ') + '(default: %s)' % flag.default_as_str, indent=prefix + ' ') if flag.parser.syntactic_help: flaghelp += '\n' flaghelp += _helpers.text_wrap( - '(%s)' % flag.parser.syntactic_help, indent=prefix+' ') + '(%s)' % flag.parser.syntactic_help, indent=prefix + ' ') output_lines.append(flaghelp) def get_flag_value(self, name, default): # pylint: disable=invalid-name @@ -957,7 +1002,7 @@ def get_flag_value(self, name, default): # pylint: disable=invalid-name def _is_flag_file_directive(self, flag_string): """Checks whether flag_string contain a --flagfile= directive.""" - if isinstance(flag_string, type('')): + if isinstance(flag_string, str): if flag_string.startswith('--flagfile='): return 1 elif flag_string == '--flagfile': @@ -991,8 +1036,7 @@ def _extract_filename(self, flagfile_str): elif flagfile_str.startswith('-flagfile='): return os.path.expanduser((flagfile_str[(len('-flagfile=')):]).strip()) else: - raise _exceptions.Error( - 'Hit illegal --flagfile type: %s' % flagfile_str) + raise _exceptions.Error('Hit illegal --flagfile type: %s' % flagfile_str) def _get_flag_file_lines(self, filename, parsed_file_stack=None): """Returns the useful (!=comments, etc) lines from a file with flags. @@ -1013,6 +1057,9 @@ def _get_flag_file_lines(self, filename, parsed_file_stack=None): EVERYTHING except whitespace lines and comments (lines starting with '#' or '//'). """ + # For consistency with the cpp version, ignore empty values. + if not filename: + return [] if parsed_file_stack is None: parsed_file_stack = [] # We do a little safety check for reparsing a file we've already encountered @@ -1063,11 +1110,11 @@ def read_flags_from_files(self, argv, force_gnu=True): Args: argv: [str], a list of strings, usually sys.argv[1:], which may contain - one or more flagfile directives of the form --flagfile="./filename". - Note that the name of the program (sys.argv[0]) should be omitted. - force_gnu: bool, if False, --flagfile parsing obeys normal flag semantics. - If True, --flagfile parsing instead follows gnu_getopt semantics. - *** WARNING *** force_gnu=False may become the future default! + one or more flagfile directives of the form --flagfile="./filename". + Note that the name of the program (sys.argv[0]) should be omitted. + force_gnu: bool, if False, --flagfile parsing obeys the + FLAGS.is_gnu_getopt() value. If True, ignore the value and always follow + gnu_getopt semantics. Returns: A new list which has the original list combined with what we read @@ -1087,14 +1134,15 @@ def read_flags_from_files(self, argv, force_gnu=True): using absl.flags DEFINE_flag() type functions. Notes (assuming we're getting a commandline of some sort as our input): - --> For duplicate flags, the last one we hit should "win". - --> Since flags that appear later win, a flagfile's settings can be "weak" + + * For duplicate flags, the last one we hit should "win". + * Since flags that appear later win, a flagfile's settings can be "weak" if the --flagfile comes at the beginning of the argument sequence, and it can be "strong" if the --flagfile comes at the end. - --> A further "--flagfile=" CAN be nested in a flagfile. + * A further "--flagfile=" CAN be nested in a flagfile. It will be expanded in exactly the spot where it is found. - --> In a flagfile, a line beginning with # or // is a comment. - --> Entirely blank lines _should_ be ignored. + * In a flagfile, a line beginning with # or // is a comment. + * Entirely blank lines _should_ be ignored. """ rest_of_args = argv new_argv = [] @@ -1124,8 +1172,8 @@ def read_flags_from_files(self, argv, force_gnu=True): if not force_gnu and not self.__dict__['__use_gnu_getopt']: break else: - if ('=' not in current_arg and - rest_of_args and not rest_of_args[0].startswith('-')): + if ('=' not in current_arg and rest_of_args and + not rest_of_args[0].startswith('-')): # If this is an occurrence of a legitimate --x y, skip the value # so that it won't be mistaken for a standalone arg. fl = self._flags() @@ -1151,11 +1199,15 @@ def flags_into_string(self): Returns: str, the string with the flags assignments from this FlagValues object. + The flags are ordered by (module_name, flag_name). """ + module_flags = sorted(self.flags_by_module_dict().items()) s = '' - for flag in self._flags().values(): - if flag.value is not None: - s += flag.serialize() + '\n' + for unused_module_name, flags in module_flags: + flags = sorted(flags, key=lambda f: f.name) + for flag in flags: + if flag.value is not None: + s += flag.serialize() + '\n' return s def append_flags_into_file(self, filename): @@ -1189,16 +1241,17 @@ def write_help_in_xml_format(self, outfile=None): all_flag = doc.createElement('AllFlags') doc.appendChild(all_flag) - all_flag.appendChild(_helpers.create_xml_dom_element( - doc, 'program', os.path.basename(sys.argv[0]))) + all_flag.appendChild( + _helpers.create_xml_dom_element(doc, 'program', + os.path.basename(sys.argv[0]))) usage_doc = sys.modules['__main__'].__doc__ if not usage_doc: usage_doc = '\nUSAGE: %s [flags]\n' % sys.argv[0] else: usage_doc = usage_doc.replace('%s', sys.argv[0]) - all_flag.appendChild(_helpers.create_xml_dom_element( - doc, 'usage', usage_doc)) + all_flag.appendChild( + _helpers.create_xml_dom_element(doc, 'usage', usage_doc)) # Get list of key flags for the main module. key_flags = self.get_key_flags_for_module(sys.argv[0]) @@ -1212,15 +1265,15 @@ def write_help_in_xml_format(self, outfile=None): flag_list.sort() for unused_flag_name, flag in flag_list: is_key = flag in key_flags - all_flag.appendChild(flag._create_xml_dom_element( # pylint: disable=protected-access - doc, module_name, is_key=is_key)) + all_flag.appendChild( + flag._create_xml_dom_element( # pylint: disable=protected-access + doc, + module_name, + is_key=is_key)) outfile = outfile or sys.stdout - if six.PY2: - outfile.write(doc.toprettyxml(indent=' ', encoding='utf-8')) - else: - outfile.write( - doc.toprettyxml(indent=' ', encoding='utf-8').decode('utf-8')) + outfile.write( + doc.toprettyxml(indent=' ', encoding='utf-8').decode('utf-8')) outfile.flush() def _check_method_name_conflicts(self, name, flag): @@ -1239,6 +1292,127 @@ def _check_method_name_conflicts(self, name, flag): name=flag_name, class_name=type(self).__name__)) +FLAGS = FlagValues() + +class FlagHolder(Generic[_T]): + """Holds a defined flag. -FLAGS = FlagValues() + This facilitates a cleaner api around global state. Instead of:: + + flags.DEFINE_integer('foo', ...) + flags.DEFINE_integer('bar', ...) + + def method(): + # prints parsed value of 'bar' flag + print(flags.FLAGS.foo) + # runtime error due to typo or possibly bad coding style. + print(flags.FLAGS.baz) + + it encourages code like:: + + _FOO_FLAG = flags.DEFINE_integer('foo', ...) + _BAR_FLAG = flags.DEFINE_integer('bar', ...) + + def method(): + print(_FOO_FLAG.value) + print(_BAR_FLAG.value) + + since the name of the flag appears only once in the source code. + """ + + def __init__(self, flag_values, flag, ensure_non_none_value=False): + """Constructs a FlagHolder instance providing typesafe access to flag. + + Args: + flag_values: The container the flag is registered to. + flag: The flag object for this flag. + ensure_non_none_value: Is the value of the flag allowed to be None. + """ + self._flagvalues = flag_values + # We take the entire flag object, but only keep the name. Why? + # - We want FlagHolder[T] to be generic container + # - flag_values contains all flags, so has no reference to T. + # - typecheckers don't like to see a generic class where none of the ctor + # arguments refer to the generic type. + self._name = flag.name + # We intentionally do NOT check if the default value is None. + # This allows future use of this for "required flags with None default" + self._ensure_non_none_value = ensure_non_none_value + + def __eq__(self, other): + raise TypeError( + "unsupported operand type(s) for ==: '{0}' and '{1}' " + "(did you mean to use '{0}.value' instead?)".format( + type(self).__name__, type(other).__name__)) + + def __bool__(self): + raise TypeError( + "bool() not supported for instances of type '{0}' " + "(did you mean to use '{0}.value' instead?)".format( + type(self).__name__)) + + __nonzero__ = __bool__ + + @property + def name(self): + return self._name + + @property + def value(self): + """Returns the value of the flag. + + If ``_ensure_non_none_value`` is ``True``, then return value is not + ``None``. + + Raises: + UnparsedFlagAccessError: if flag parsing has not finished. + IllegalFlagValueError: if value is None unexpectedly. + """ + val = getattr(self._flagvalues, self._name) + if self._ensure_non_none_value and val is None: + raise _exceptions.IllegalFlagValueError( + 'Unexpected None value for flag %s' % self._name) + return val + + @property + def default(self): + """Returns the default value of the flag.""" + return self._flagvalues[self._name].default + + @property + def present(self): + """Returns True if the flag was parsed from command-line flags.""" + return bool(self._flagvalues[self._name].present) + + +def resolve_flag_ref(flag_ref, flag_values): + """Helper to validate and resolve a flag reference argument.""" + if isinstance(flag_ref, FlagHolder): + new_flag_values = flag_ref._flagvalues # pylint: disable=protected-access + if flag_values != FLAGS and flag_values != new_flag_values: + raise ValueError( + 'flag_values must not be customized when operating on a FlagHolder') + return flag_ref.name, new_flag_values + return flag_ref, flag_values + + +def resolve_flag_refs(flag_refs, flag_values): + """Helper to validate and resolve flag reference list arguments.""" + fv = None + names = [] + for ref in flag_refs: + if isinstance(ref, FlagHolder): + newfv = ref._flagvalues # pylint: disable=protected-access + name = ref.name + else: + newfv = flag_values + name = ref + if fv and fv != newfv: + raise ValueError( + 'multiple FlagValues instances used in invocation. ' + 'FlagHolders must be registered to the same FlagValues instance as ' + 'do flag names, if provided.') + fv = newfv + names.append(name) + return names, fv diff --git a/third_party/py/abseil/absl/flags/_helpers.py b/third_party/py/abseil/absl/flags/_helpers.py index a3734189bf1c8c..ea02f2d13ca9b4 100644 --- a/third_party/py/abseil/absl/flags/_helpers.py +++ b/third_party/py/abseil/absl/flags/_helpers.py @@ -14,10 +14,6 @@ """Internal helper functions for Abseil Python flags library.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - import collections import os import re @@ -34,13 +30,11 @@ except ImportError: termios = None -import six -from six.moves import range # pylint: disable=redefined-builtin - _DEFAULT_HELP_WIDTH = 80 # Default width of help output. -_MIN_HELP_WIDTH = 40 # Minimal "sane" width of help output. We assume that any - # value below 40 is unreasonable. +# Minimal "sane" width of help output. We assume that any value below 40 is +# unreasonable. +_MIN_HELP_WIDTH = 40 # Define the allowed error rate in an input string to get suggestions. # @@ -132,32 +126,6 @@ def get_calling_module(): return get_calling_module_object_and_name().module_name -def str_or_unicode(value): - """Converts a value to a python string. - - Behavior of this function is intentionally different in Python2/3. - - In Python2, the given value is attempted to convert to a str (byte string). - If it contains non-ASCII characters, it is converted to a unicode instead. - - In Python3, the given value is always converted to a str (unicode string). - - This behavior reflects the (bad) practice in Python2 to try to represent - a string as str as long as it contains ASCII characters only. - - Args: - value: An object to be converted to a string. - - Returns: - A string representation of the given value. See the description above - for its type. - """ - try: - return str(value) - except UnicodeEncodeError: - return unicode(value) # Python3 should never come here - - def create_xml_dom_element(doc, name, value): """Returns an XML DOM element with name and text value. @@ -171,10 +139,7 @@ def create_xml_dom_element(doc, name, value): Returns: An instance of minidom.Element. """ - s = str_or_unicode(value) - if six.PY2 and not isinstance(s, unicode): - # Get a valid unicode string. - s = s.decode('utf-8', 'ignore') + s = str(value) if isinstance(value, bool): # Display boolean values as the C++ flag library does: no caps. s = s.lower() @@ -216,7 +181,8 @@ def get_flag_suggestions(attempt, longopt_list): # This also handles the case where the flag is spelled right but ambiguous. distances = [(_damerau_levenshtein(attempt, option[0:len(attempt)]), option) for option in option_names] - distances.sort(key=lambda t: t[0]) + # t[0] is distance, and sorting by t[1] allows us to have stable output. + distances.sort() least_errors, _ = distances[0] # Don't suggest excessively bad matches. @@ -317,7 +283,7 @@ def text_wrap(text, length=None, indent='', firstline_indent=None): return '\n'.join(result) -def flag_dict_to_args(flag_map): +def flag_dict_to_args(flag_map, multi_flags=None): """Convert a dict of values into process call parameters. This method is used to convert a dictionary into a sequence of parameters @@ -326,16 +292,24 @@ def flag_dict_to_args(flag_map): Args: flag_map: dict, a mapping where the keys are flag names (strings). values are treated according to their type: - * If value is None, then only the name is emitted. - * If value is True, then only the name is emitted. - * If value is False, then only the name prepended with 'no' is emitted. - * If value is a string then --name=value is emitted. - * If value is a collection, this will emit --name=value1,value2,value3. + + * If value is ``None``, then only the name is emitted. + * If value is ``True``, then only the name is emitted. + * If value is ``False``, then only the name prepended with 'no' is + emitted. + * If value is a string then ``--name=value`` is emitted. + * If value is a collection, this will emit + ``--name=value1,value2,value3``, unless the flag name is in + ``multi_flags``, in which case this will emit + ``--name=value1 --name=value2 --name=value3``. * Everything else is converted to string an passed as such. + + multi_flags: set, names (strings) of flags that should be treated as + multi-flags. Yields: sequence of string suitable for a subprocess execution. """ - for key, value in six.iteritems(flag_map): + for key, value in flag_map.items(): if value is None: yield '--%s' % key elif isinstance(value, bool): @@ -349,7 +323,11 @@ def flag_dict_to_args(flag_map): else: # Now we attempt to deal with collections. try: - yield '--%s=%s' % (key, ','.join(str(item) for item in value)) + if multi_flags and key in multi_flags: + for item in value: + yield '--%s=%s' % (key, str(item)) + else: + yield '--%s=%s' % (key, ','.join(str(item) for item in value)) except TypeError: # Default case. yield '--%s=%s' % (key, value) @@ -421,10 +399,3 @@ def doc_to_help(doc): doc = re.sub(r'(?<=\S)\n(?=\S)', ' ', doc, flags=re.M) return doc - - -def is_bytes_or_string(maybe_string): - if str is bytes: - return isinstance(maybe_string, basestring) - else: - return isinstance(maybe_string, (str, bytes)) diff --git a/third_party/py/abseil/absl/flags/_validators.py b/third_party/py/abseil/absl/flags/_validators.py index 02b508e26f3651..2161284a8e284f 100644 --- a/third_party/py/abseil/absl/flags/_validators.py +++ b/third_party/py/abseil/absl/flags/_validators.py @@ -14,16 +14,19 @@ """Module to enforce different constraints on flags. -Flags validators can be registered using following functions / decorators: +Flags validators can be registered using following functions / decorators:: + flags.register_validator @flags.validator flags.register_multi_flags_validator @flags.multi_flags_validator -Three convenience functions are also provided for common flag constraints: +Three convenience functions are also provided for common flag constraints:: + flags.mark_flag_as_required flags.mark_flags_as_required flags.mark_flags_as_mutual_exclusive + flags.mark_bool_flags_as_mutual_exclusive See their docstring in this module for a usage manual. @@ -31,165 +34,11 @@ aliases defined at the package level instead. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - import warnings from absl.flags import _exceptions from absl.flags import _flagvalues - - -class Validator(object): - """Base class for flags validators. - - Users should NOT overload these classes, and use flags.Register... - methods instead. - """ - - # Used to assign each validator an unique insertion_index - validators_count = 0 - - def __init__(self, checker, message): - """Constructor to create all validators. - - Args: - checker: function to verify the constraint. - Input of this method varies, see SingleFlagValidator and - multi_flags_validator for a detailed description. - message: str, error message to be shown to the user. - """ - self.checker = checker - self.message = message - Validator.validators_count += 1 - # Used to assert validators in the order they were registered. - self.insertion_index = Validator.validators_count - - def verify(self, flag_values): - """Verifies that constraint is satisfied. - - flags library calls this method to verify Validator's constraint. - - Args: - flag_values: flags.FlagValues, the FlagValues instance to get flags from. - Raises: - Error: Raised if constraint is not satisfied. - """ - param = self._get_input_to_checker_function(flag_values) - if not self.checker(param): - raise _exceptions.ValidationError(self.message) - - def get_flags_names(self): - """Returns the names of the flags checked by this validator. - - Returns: - [string], names of the flags. - """ - raise NotImplementedError('This method should be overloaded') - - def print_flags_with_values(self, flag_values): - raise NotImplementedError('This method should be overloaded') - - def _get_input_to_checker_function(self, flag_values): - """Given flag values, returns the input to be given to checker. - - Args: - flag_values: flags.FlagValues, containing all flags. - Returns: - The input to be given to checker. The return type depends on the specific - validator. - """ - raise NotImplementedError('This method should be overloaded') - - -class SingleFlagValidator(Validator): - """Validator behind register_validator() method. - - Validates that a single flag passes its checker function. The checker function - takes the flag value and returns True (if value looks fine) or, if flag value - is not valid, either returns False or raises an Exception. - """ - - def __init__(self, flag_name, checker, message): - """Constructor. - - Args: - flag_name: string, name of the flag. - checker: function to verify the validator. - input - value of the corresponding flag (string, boolean, etc). - output - bool, True if validator constraint is satisfied. - If constraint is not satisfied, it should either return False or - raise flags.ValidationError(desired_error_message). - message: str, error message to be shown to the user if validator's - condition is not satisfied. - """ - super(SingleFlagValidator, self).__init__(checker, message) - self.flag_name = flag_name - - def get_flags_names(self): - return [self.flag_name] - - def print_flags_with_values(self, flag_values): - return 'flag --%s=%s' % (self.flag_name, flag_values[self.flag_name].value) - - def _get_input_to_checker_function(self, flag_values): - """Given flag values, returns the input to be given to checker. - - Args: - flag_values: flags.FlagValues, the FlagValues instance to get flags from. - Returns: - object, the input to be given to checker. - """ - return flag_values[self.flag_name].value - - -class MultiFlagsValidator(Validator): - """Validator behind register_multi_flags_validator method. - - Validates that flag values pass their common checker function. The checker - function takes flag values and returns True (if values look fine) or, - if values are not valid, either returns False or raises an Exception. - """ - - def __init__(self, flag_names, checker, message): - """Constructor. - - Args: - flag_names: [str], containing names of the flags used by checker. - checker: function to verify the validator. - input - dict, with keys() being flag_names, and value for each - key being the value of the corresponding flag (string, boolean, - etc). - output - bool, True if validator constraint is satisfied. - If constraint is not satisfied, it should either return False or - raise flags.ValidationError(desired_error_message). - message: str, error message to be shown to the user if validator's - condition is not satisfied - """ - super(MultiFlagsValidator, self).__init__(checker, message) - self.flag_names = flag_names - - def _get_input_to_checker_function(self, flag_values): - """Given flag values, returns the input to be given to checker. - - Args: - flag_values: flags.FlagValues, the FlagValues instance to get flags from. - Returns: - dict, with keys() being self.lag_names, and value for each key - being the value of the corresponding flag (string, boolean, etc). - """ - return dict([key, flag_values[key].value] for key in self.flag_names) - - def print_flags_with_values(self, flag_values): - prefix = 'flags ' - flags_with_values = [] - for key in self.flag_names: - flags_with_values.append('%s=%s' % (key, flag_values[key].value)) - return prefix + ', '.join(flags_with_values) - - def get_flags_names(self): - return self.flag_names +from absl.flags import _validators_classes def register_validator(flag_name, @@ -200,25 +49,33 @@ def register_validator(flag_name, The constraint is validated when flags are initially parsed, and after each change of the corresponding flag's value. + Args: - flag_name: str, name of the flag to be checked. + flag_name: str | FlagHolder, name or holder of the flag to be checked. + Positional-only parameter. checker: callable, a function to validate the flag. - input - A single positional argument: The value of the corresponding - flag (string, boolean, etc. This value will be passed to checker - by the library). - output - bool, True if validator constraint is satisfied. - If constraint is not satisfied, it should either return False or - raise flags.ValidationError(desired_error_message). + + * input - A single positional argument: The value of the corresponding + flag (string, boolean, etc. This value will be passed to checker + by the library). + * output - bool, True if validator constraint is satisfied. + If constraint is not satisfied, it should either ``return False`` or + ``raise flags.ValidationError(desired_error_message)``. + message: str, error text to be shown to the user if checker returns False. If checker raises flags.ValidationError, message from the raised error will be shown. flag_values: flags.FlagValues, optional FlagValues instance to validate against. + Raises: AttributeError: Raised when flag_name is not registered as a valid flag name. + ValueError: Raised when flag_values is non-default and does not match the + FlagValues of the provided FlagHolder instance. """ - v = SingleFlagValidator(flag_name, checker, message) + flag_name, flag_values = _flagvalues.resolve_flag_ref(flag_name, flag_values) + v = _validators_classes.SingleFlagValidator(flag_name, checker, message) _add_validator(flag_values, v) @@ -226,16 +83,17 @@ def validator(flag_name, message='Flag validation failed', flag_values=_flagvalues.FLAGS): """A function decorator for defining a flag validator. - Registers the decorated function as a validator for flag_name, e.g. + Registers the decorated function as a validator for flag_name, e.g.:: - @flags.validator('foo') - def _CheckFoo(foo): - ... + @flags.validator('foo') + def _CheckFoo(foo): + ... - See register_validator() for the specification of checker function. + See :func:`register_validator` for the specification of checker function. Args: - flag_name: str, name of the flag to be checked. + flag_name: str | FlagHolder, name or holder of the flag to be checked. + Positional-only parameter. message: str, error text to be shown to the user if checker returns False. If checker raises flags.ValidationError, message from the raised error will be shown. @@ -266,13 +124,16 @@ def register_multi_flags_validator(flag_names, change of the corresponding flag's value. Args: - flag_names: [str], a list of the flag names to be checked. + flag_names: [str | FlagHolder], a list of the flag names or holders to be + checked. Positional-only parameter. multi_flags_checker: callable, a function to validate the flag. - input - dict, with keys() being flag_names, and value for each key + + * input - dict, with keys() being flag_names, and value for each key being the value of the corresponding flag (string, boolean, etc). - output - bool, True if validator constraint is satisfied. + * output - bool, True if validator constraint is satisfied. If constraint is not satisfied, it should either return False or raise flags.ValidationError. + message: str, error text to be shown to the user if checker returns False. If checker raises flags.ValidationError, message from the raised error will be shown. @@ -281,8 +142,14 @@ def register_multi_flags_validator(flag_names, Raises: AttributeError: Raised when a flag is not registered as a valid flag name. + ValueError: Raised when multiple FlagValues are used in the same + invocation. This can occur when FlagHolders have different `_flagvalues` + or when str-type flag_names entries are present and the `flag_values` + argument does not match that of provided FlagHolder(s). """ - v = MultiFlagsValidator( + flag_names, flag_values = _flagvalues.resolve_flag_refs( + flag_names, flag_values) + v = _validators_classes.MultiFlagsValidator( flag_names, multi_flags_checker, message) _add_validator(flag_values, v) @@ -292,17 +159,18 @@ def multi_flags_validator(flag_names, flag_values=_flagvalues.FLAGS): """A function decorator for defining a multi-flag validator. - Registers the decorated function as a validator for flag_names, e.g. + Registers the decorated function as a validator for flag_names, e.g.:: - @flags.multi_flags_validator(['foo', 'bar']) - def _CheckFooBar(flags_dict): - ... + @flags.multi_flags_validator(['foo', 'bar']) + def _CheckFooBar(flags_dict): + ... - See register_multi_flags_validator() for the specification of checker + See :func:`register_multi_flags_validator` for the specification of checker function. Args: - flag_names: [str], a list of the flag names to be checked. + flag_names: [str | FlagHolder], a list of the flag names or holders to be + checked. Positional-only parameter. message: str, error text to be shown to the user if checker returns False. If checker raises flags.ValidationError, message from the raised error will be shown. @@ -330,50 +198,53 @@ def mark_flag_as_required(flag_name, flag_values=_flagvalues.FLAGS): """Ensures that flag is not None during program execution. Registers a flag validator, which will follow usual validator rules. - Important note: validator will pass for any non-None value, such as False, - 0 (zero), '' (empty string) and so on. + Important note: validator will pass for any non-``None`` value, such as + ``False``, ``0`` (zero), ``''`` (empty string) and so on. - It is recommended to call this method like this: + If your module might be imported by others, and you only wish to make the flag + required when the module is directly executed, call this method like this:: - if __name__ == '__main__': - flags.mark_flag_as_required('your_flag_name') - app.run() - - Because validation happens at app.run() we want to ensure required-ness - is enforced at that time. However, you generally do not want to force - users who import your code to have additional required flags for their - own binaries or tests. + if __name__ == '__main__': + flags.mark_flag_as_required('your_flag_name') + app.run() Args: - flag_name: str, name of the flag - flag_values: flags.FlagValues, optional FlagValues instance where the flag - is defined. + flag_name: str | FlagHolder, name or holder of the flag. + Positional-only parameter. + flag_values: flags.FlagValues, optional :class:`~absl.flags.FlagValues` + instance where the flag is defined. Raises: AttributeError: Raised when flag_name is not registered as a valid flag name. + ValueError: Raised when flag_values is non-default and does not match the + FlagValues of the provided FlagHolder instance. """ + flag_name, flag_values = _flagvalues.resolve_flag_ref(flag_name, flag_values) if flag_values[flag_name].default is not None: warnings.warn( 'Flag --%s has a non-None default value; therefore, ' 'mark_flag_as_required will pass even if flag is not specified in the ' - 'command line!' % flag_name) - register_validator(flag_name, - lambda value: value is not None, - message='Flag --%s must be specified.' % flag_name, - flag_values=flag_values) + 'command line!' % flag_name, + stacklevel=2) + register_validator( + flag_name, + lambda value: value is not None, + message='Flag --{} must have a value other than None.'.format(flag_name), + flag_values=flag_values) def mark_flags_as_required(flag_names, flag_values=_flagvalues.FLAGS): """Ensures that flags are not None during program execution. - Recommended usage: + If your module might be imported by others, and you only wish to make the flag + required when the module is directly executed, call this method like this:: - if __name__ == '__main__': - flags.mark_flags_as_required(['flag1', 'flag2', 'flag3']) - app.run() + if __name__ == '__main__': + flags.mark_flags_as_required(['flag1', 'flag2', 'flag3']) + app.run() Args: - flag_names: Sequence[str], names of the flags. + flag_names: Sequence[str | FlagHolder], names or holders of the flags. flag_values: flags.FlagValues, optional FlagValues instance where the flags are defined. Raises: @@ -385,32 +256,89 @@ def mark_flags_as_required(flag_names, flag_values=_flagvalues.FLAGS): def mark_flags_as_mutual_exclusive(flag_names, required=False, flag_values=_flagvalues.FLAGS): - """Ensures that only one flag among flag_names is set. + """Ensures that only one flag among flag_names is not None. - Important note: validator will pass for any non-None value, such as False, - 0 (zero), '' (empty string) and so on. For multi flags, this means that the - default needs to be None not []. + Important note: This validator checks if flag values are ``None``, and it does + not distinguish between default and explicit values. Therefore, this validator + does not make sense when applied to flags with default values other than None, + including other false values (e.g. ``False``, ``0``, ``''``, ``[]``). That + includes multi flags with a default value of ``[]`` instead of None. Args: - flag_names: [str], names of the flags. - required: bool, if set, exactly one of the flags must be set. - Otherwise, it is also valid for none of the flags to be set. + flag_names: [str | FlagHolder], names or holders of flags. + Positional-only parameter. + required: bool. If true, exactly one of the flags must have a value other + than None. Otherwise, at most one of the flags can have a value other + than None, and it is valid for all of the flags to be None. flag_values: flags.FlagValues, optional FlagValues instance where the flags are defined. + + Raises: + ValueError: Raised when multiple FlagValues are used in the same + invocation. This can occur when FlagHolders have different `_flagvalues` + or when str-type flag_names entries are present and the `flag_values` + argument does not match that of provided FlagHolder(s). """ + flag_names, flag_values = _flagvalues.resolve_flag_refs( + flag_names, flag_values) + for flag_name in flag_names: + if flag_values[flag_name].default is not None: + warnings.warn( + 'Flag --{} has a non-None default value. That does not make sense ' + 'with mark_flags_as_mutual_exclusive, which checks whether the ' + 'listed flags have a value other than None.'.format(flag_name), + stacklevel=2) def validate_mutual_exclusion(flags_dict): flag_count = sum(1 for val in flags_dict.values() if val is not None) if flag_count == 1 or (not required and flag_count == 0): return True - message = ('%s one of (%s) must be specified.' % - ('Exactly' if required else 'At most', ', '.join(flag_names))) - raise _exceptions.ValidationError(message) + raise _exceptions.ValidationError( + '{} one of ({}) must have a value other than None.'.format( + 'Exactly' if required else 'At most', ', '.join(flag_names))) register_multi_flags_validator( flag_names, validate_mutual_exclusion, flag_values=flag_values) +def mark_bool_flags_as_mutual_exclusive(flag_names, required=False, + flag_values=_flagvalues.FLAGS): + """Ensures that only one flag among flag_names is True. + + Args: + flag_names: [str | FlagHolder], names or holders of flags. + Positional-only parameter. + required: bool. If true, exactly one flag must be True. Otherwise, at most + one flag can be True, and it is valid for all flags to be False. + flag_values: flags.FlagValues, optional FlagValues instance where the flags + are defined. + + Raises: + ValueError: Raised when multiple FlagValues are used in the same + invocation. This can occur when FlagHolders have different `_flagvalues` + or when str-type flag_names entries are present and the `flag_values` + argument does not match that of provided FlagHolder(s). + """ + flag_names, flag_values = _flagvalues.resolve_flag_refs( + flag_names, flag_values) + for flag_name in flag_names: + if not flag_values[flag_name].boolean: + raise _exceptions.ValidationError( + 'Flag --{} is not Boolean, which is required for flags used in ' + 'mark_bool_flags_as_mutual_exclusive.'.format(flag_name)) + + def validate_boolean_mutual_exclusion(flags_dict): + flag_count = sum(bool(val) for val in flags_dict.values()) + if flag_count == 1 or (not required and flag_count == 0): + return True + raise _exceptions.ValidationError( + '{} one of ({}) must be True.'.format( + 'Exactly' if required else 'At most', ', '.join(flag_names))) + + register_multi_flags_validator( + flag_names, validate_boolean_mutual_exclusion, flag_values=flag_values) + + def _add_validator(fv, validator_instance): """Register new flags validator to be checked. diff --git a/third_party/py/abseil/absl/flags/_validators_classes.py b/third_party/py/abseil/absl/flags/_validators_classes.py new file mode 100644 index 00000000000000..59100c8e6dc952 --- /dev/null +++ b/third_party/py/abseil/absl/flags/_validators_classes.py @@ -0,0 +1,172 @@ +# Copyright 2021 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Defines *private* classes used for flag validators. + +Do NOT import this module. DO NOT use anything from this module. They are +private APIs. +""" + +from absl.flags import _exceptions + + +class Validator(object): + """Base class for flags validators. + + Users should NOT overload these classes, and use flags.Register... + methods instead. + """ + + # Used to assign each validator an unique insertion_index + validators_count = 0 + + def __init__(self, checker, message): + """Constructor to create all validators. + + Args: + checker: function to verify the constraint. + Input of this method varies, see SingleFlagValidator and + multi_flags_validator for a detailed description. + message: str, error message to be shown to the user. + """ + self.checker = checker + self.message = message + Validator.validators_count += 1 + # Used to assert validators in the order they were registered. + self.insertion_index = Validator.validators_count + + def verify(self, flag_values): + """Verifies that constraint is satisfied. + + flags library calls this method to verify Validator's constraint. + + Args: + flag_values: flags.FlagValues, the FlagValues instance to get flags from. + Raises: + Error: Raised if constraint is not satisfied. + """ + param = self._get_input_to_checker_function(flag_values) + if not self.checker(param): + raise _exceptions.ValidationError(self.message) + + def get_flags_names(self): + """Returns the names of the flags checked by this validator. + + Returns: + [string], names of the flags. + """ + raise NotImplementedError('This method should be overloaded') + + def print_flags_with_values(self, flag_values): + raise NotImplementedError('This method should be overloaded') + + def _get_input_to_checker_function(self, flag_values): + """Given flag values, returns the input to be given to checker. + + Args: + flag_values: flags.FlagValues, containing all flags. + Returns: + The input to be given to checker. The return type depends on the specific + validator. + """ + raise NotImplementedError('This method should be overloaded') + + +class SingleFlagValidator(Validator): + """Validator behind register_validator() method. + + Validates that a single flag passes its checker function. The checker function + takes the flag value and returns True (if value looks fine) or, if flag value + is not valid, either returns False or raises an Exception. + """ + + def __init__(self, flag_name, checker, message): + """Constructor. + + Args: + flag_name: string, name of the flag. + checker: function to verify the validator. + input - value of the corresponding flag (string, boolean, etc). + output - bool, True if validator constraint is satisfied. + If constraint is not satisfied, it should either return False or + raise flags.ValidationError(desired_error_message). + message: str, error message to be shown to the user if validator's + condition is not satisfied. + """ + super(SingleFlagValidator, self).__init__(checker, message) + self.flag_name = flag_name + + def get_flags_names(self): + return [self.flag_name] + + def print_flags_with_values(self, flag_values): + return 'flag --%s=%s' % (self.flag_name, flag_values[self.flag_name].value) + + def _get_input_to_checker_function(self, flag_values): + """Given flag values, returns the input to be given to checker. + + Args: + flag_values: flags.FlagValues, the FlagValues instance to get flags from. + Returns: + object, the input to be given to checker. + """ + return flag_values[self.flag_name].value + + +class MultiFlagsValidator(Validator): + """Validator behind register_multi_flags_validator method. + + Validates that flag values pass their common checker function. The checker + function takes flag values and returns True (if values look fine) or, + if values are not valid, either returns False or raises an Exception. + """ + + def __init__(self, flag_names, checker, message): + """Constructor. + + Args: + flag_names: [str], containing names of the flags used by checker. + checker: function to verify the validator. + input - dict, with keys() being flag_names, and value for each + key being the value of the corresponding flag (string, boolean, + etc). + output - bool, True if validator constraint is satisfied. + If constraint is not satisfied, it should either return False or + raise flags.ValidationError(desired_error_message). + message: str, error message to be shown to the user if validator's + condition is not satisfied + """ + super(MultiFlagsValidator, self).__init__(checker, message) + self.flag_names = flag_names + + def _get_input_to_checker_function(self, flag_values): + """Given flag values, returns the input to be given to checker. + + Args: + flag_values: flags.FlagValues, the FlagValues instance to get flags from. + Returns: + dict, with keys() being self.flag_names, and value for each key + being the value of the corresponding flag (string, boolean, etc). + """ + return dict([key, flag_values[key].value] for key in self.flag_names) + + def print_flags_with_values(self, flag_values): + prefix = 'flags ' + flags_with_values = [] + for key in self.flag_names: + flags_with_values.append('%s=%s' % (key, flag_values[key].value)) + return prefix + ', '.join(flags_with_values) + + def get_flags_names(self): + return self.flag_names diff --git a/third_party/py/abseil/absl/flags/argparse_flags.py b/third_party/py/abseil/absl/flags/argparse_flags.py new file mode 100644 index 00000000000000..dd8b505f7ddd47 --- /dev/null +++ b/third_party/py/abseil/absl/flags/argparse_flags.py @@ -0,0 +1,388 @@ +# Copyright 2018 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module provides argparse integration with absl.flags. + +``argparse_flags.ArgumentParser`` is a drop-in replacement for +:class:`argparse.ArgumentParser`. It takes care of collecting and defining absl +flags in :mod:`argparse`. + +Here is a simple example:: + + # Assume the following absl.flags is defined in another module: + # + # from absl import flags + # flags.DEFINE_string('echo', None, 'The echo message.') + # + parser = argparse_flags.ArgumentParser( + description='A demo of absl.flags and argparse integration.') + parser.add_argument('--header', help='Header message to print.') + + # The parser will also accept the absl flag `--echo`. + # The `header` value is available as `args.header` just like a regular + # argparse flag. The absl flag `--echo` continues to be available via + # `absl.flags.FLAGS` if you want to access it. + args = parser.parse_args() + + # Example usages: + # ./program --echo='A message.' --header='A header' + # ./program --header 'A header' --echo 'A message.' + + +Here is another example demonstrates subparsers:: + + parser = argparse_flags.ArgumentParser(description='A subcommands demo.') + parser.add_argument('--header', help='The header message to print.') + + subparsers = parser.add_subparsers(help='The command to execute.') + + roll_dice_parser = subparsers.add_parser( + 'roll_dice', help='Roll a dice.', + # By default, absl flags can also be specified after the sub-command. + # To only allow them before sub-command, pass + # `inherited_absl_flags=None`. + inherited_absl_flags=None) + roll_dice_parser.add_argument('--num_faces', type=int, default=6) + roll_dice_parser.set_defaults(command=roll_dice) + + shuffle_parser = subparsers.add_parser('shuffle', help='Shuffle inputs.') + shuffle_parser.add_argument( + 'inputs', metavar='I', nargs='+', help='Inputs to shuffle.') + shuffle_parser.set_defaults(command=shuffle) + + args = parser.parse_args(argv[1:]) + args.command(args) + + # Example usages: + # ./program --echo='A message.' roll_dice --num_faces=6 + # ./program shuffle --echo='A message.' 1 2 3 4 + + +There are several differences between :mod:`absl.flags` and +:mod:`~absl.flags.argparse_flags`: + +1. Flags defined with absl.flags are parsed differently when using the + argparse parser. Notably: + + 1) absl.flags allows both single-dash and double-dash for any flag, and + doesn't distinguish them; argparse_flags only allows double-dash for + flag's regular name, and single-dash for flag's ``short_name``. + 2) Boolean flags in absl.flags can be specified with ``--bool``, + ``--nobool``, as well as ``--bool=true/false`` (though not recommended); + in argparse_flags, it only allows ``--bool``, ``--nobool``. + +2. Help related flag differences: + + 1) absl.flags does not define help flags, absl.app does that; argparse_flags + defines help flags unless passed with ``add_help=False``. + 2) absl.app supports ``--helpxml``; argparse_flags does not. + 3) argparse_flags supports ``-h``; absl.app does not. +""" + +import argparse +import sys + +from absl import flags + + +_BUILT_IN_FLAGS = frozenset({ + 'help', + 'helpshort', + 'helpfull', + 'helpxml', + 'flagfile', + 'undefok', +}) + + +class ArgumentParser(argparse.ArgumentParser): + """Custom ArgumentParser class to support special absl flags.""" + + def __init__(self, **kwargs): + """Initializes ArgumentParser. + + Args: + **kwargs: same as argparse.ArgumentParser, except: + 1. It also accepts `inherited_absl_flags`: the absl flags to inherit. + The default is the global absl.flags.FLAGS instance. Pass None to + ignore absl flags. + 2. The `prefix_chars` argument must be the default value '-'. + + Raises: + ValueError: Raised when prefix_chars is not '-'. + """ + prefix_chars = kwargs.get('prefix_chars', '-') + if prefix_chars != '-': + raise ValueError( + 'argparse_flags.ArgumentParser only supports "-" as the prefix ' + 'character, found "{}".'.format(prefix_chars)) + + # Remove inherited_absl_flags before calling super. + self._inherited_absl_flags = kwargs.pop('inherited_absl_flags', flags.FLAGS) + # Now call super to initialize argparse.ArgumentParser before calling + # add_argument in _define_absl_flags. + super(ArgumentParser, self).__init__(**kwargs) + + if self.add_help: + # -h and --help are defined in super. + # Also add the --helpshort and --helpfull flags. + self.add_argument( + # Action 'help' defines a similar flag to -h/--help. + '--helpshort', action='help', + default=argparse.SUPPRESS, help=argparse.SUPPRESS) + self.add_argument( + '--helpfull', action=_HelpFullAction, + default=argparse.SUPPRESS, help='show full help message and exit') + + if self._inherited_absl_flags: + self.add_argument( + '--undefok', default=argparse.SUPPRESS, help=argparse.SUPPRESS) + self._define_absl_flags(self._inherited_absl_flags) + + def parse_known_args(self, args=None, namespace=None): + if args is None: + args = sys.argv[1:] + if self._inherited_absl_flags: + # Handle --flagfile. + # Explicitly specify force_gnu=True, since argparse behaves like + # gnu_getopt: flags can be specified after positional arguments. + args = self._inherited_absl_flags.read_flags_from_files( + args, force_gnu=True) + + undefok_missing = object() + undefok = getattr(namespace, 'undefok', undefok_missing) + + namespace, args = super(ArgumentParser, self).parse_known_args( + args, namespace) + + # For Python <= 2.7.8: https://bugs.python.org/issue9351, a bug where + # sub-parsers don't preserve existing namespace attributes. + # Restore the undefok attribute if a sub-parser dropped it. + if undefok is not undefok_missing: + namespace.undefok = undefok + + if self._inherited_absl_flags: + # Handle --undefok. At this point, `args` only contains unknown flags, + # so it won't strip defined flags that are also specified with --undefok. + # For Python <= 2.7.8: https://bugs.python.org/issue9351, a bug where + # sub-parsers don't preserve existing namespace attributes. The undefok + # attribute might not exist because a subparser dropped it. + if hasattr(namespace, 'undefok'): + args = _strip_undefok_args(namespace.undefok, args) + # absl flags are not exposed in the Namespace object. See Namespace: + # https://docs.python.org/3/library/argparse.html#argparse.Namespace. + del namespace.undefok + self._inherited_absl_flags.mark_as_parsed() + try: + self._inherited_absl_flags.validate_all_flags() + except flags.IllegalFlagValueError as e: + self.error(str(e)) + + return namespace, args + + def _define_absl_flags(self, absl_flags): + """Defines flags from absl_flags.""" + key_flags = set(absl_flags.get_key_flags_for_module(sys.argv[0])) + for name in absl_flags: + if name in _BUILT_IN_FLAGS: + # Do not inherit built-in flags. + continue + flag_instance = absl_flags[name] + # Each flags with short_name appears in FLAGS twice, so only define + # when the dictionary key is equal to the regular name. + if name == flag_instance.name: + # Suppress the flag in the help short message if it's not a main + # module's key flag. + suppress = flag_instance not in key_flags + self._define_absl_flag(flag_instance, suppress) + + def _define_absl_flag(self, flag_instance, suppress): + """Defines a flag from the flag_instance.""" + flag_name = flag_instance.name + short_name = flag_instance.short_name + argument_names = ['--' + flag_name] + if short_name: + argument_names.insert(0, '-' + short_name) + if suppress: + helptext = argparse.SUPPRESS + else: + # argparse help string uses %-formatting. Escape the literal %'s. + helptext = flag_instance.help.replace('%', '%%') + if flag_instance.boolean: + # Only add the `no` form to the long name. + argument_names.append('--no' + flag_name) + self.add_argument( + *argument_names, action=_BooleanFlagAction, help=helptext, + metavar=flag_instance.name.upper(), + flag_instance=flag_instance) + else: + self.add_argument( + *argument_names, action=_FlagAction, help=helptext, + metavar=flag_instance.name.upper(), + flag_instance=flag_instance) + + +class _FlagAction(argparse.Action): + """Action class for Abseil non-boolean flags.""" + + def __init__( + self, + option_strings, + dest, + help, # pylint: disable=redefined-builtin + metavar, + flag_instance, + default=argparse.SUPPRESS): + """Initializes _FlagAction. + + Args: + option_strings: See argparse.Action. + dest: Ignored. The flag is always defined with dest=argparse.SUPPRESS. + help: See argparse.Action. + metavar: See argparse.Action. + flag_instance: absl.flags.Flag, the absl flag instance. + default: Ignored. The flag always uses dest=argparse.SUPPRESS so it + doesn't affect the parsing result. + """ + del dest + self._flag_instance = flag_instance + super(_FlagAction, self).__init__( + option_strings=option_strings, + dest=argparse.SUPPRESS, + help=help, + metavar=metavar) + + def __call__(self, parser, namespace, values, option_string=None): + """See https://docs.python.org/3/library/argparse.html#action-classes.""" + self._flag_instance.parse(values) + self._flag_instance.using_default_value = False + + +class _BooleanFlagAction(argparse.Action): + """Action class for Abseil boolean flags.""" + + def __init__( + self, + option_strings, + dest, + help, # pylint: disable=redefined-builtin + metavar, + flag_instance, + default=argparse.SUPPRESS): + """Initializes _BooleanFlagAction. + + Args: + option_strings: See argparse.Action. + dest: Ignored. The flag is always defined with dest=argparse.SUPPRESS. + help: See argparse.Action. + metavar: See argparse.Action. + flag_instance: absl.flags.Flag, the absl flag instance. + default: Ignored. The flag always uses dest=argparse.SUPPRESS so it + doesn't affect the parsing result. + """ + del dest, default + self._flag_instance = flag_instance + flag_names = [self._flag_instance.name] + if self._flag_instance.short_name: + flag_names.append(self._flag_instance.short_name) + self._flag_names = frozenset(flag_names) + super(_BooleanFlagAction, self).__init__( + option_strings=option_strings, + dest=argparse.SUPPRESS, + nargs=0, # Does not accept values, only `--bool` or `--nobool`. + help=help, + metavar=metavar) + + def __call__(self, parser, namespace, values, option_string=None): + """See https://docs.python.org/3/library/argparse.html#action-classes.""" + if not isinstance(values, list) or values: + raise ValueError('values must be an empty list.') + if option_string.startswith('--'): + option = option_string[2:] + else: + option = option_string[1:] + if option in self._flag_names: + self._flag_instance.parse('true') + else: + if not option.startswith('no') or option[2:] not in self._flag_names: + raise ValueError('invalid option_string: ' + option_string) + self._flag_instance.parse('false') + self._flag_instance.using_default_value = False + + +class _HelpFullAction(argparse.Action): + """Action class for --helpfull flag.""" + + def __init__(self, option_strings, dest, default, help): # pylint: disable=redefined-builtin + """Initializes _HelpFullAction. + + Args: + option_strings: See argparse.Action. + dest: Ignored. The flag is always defined with dest=argparse.SUPPRESS. + default: Ignored. + help: See argparse.Action. + """ + del dest, default + super(_HelpFullAction, self).__init__( + option_strings=option_strings, + dest=argparse.SUPPRESS, + default=argparse.SUPPRESS, + nargs=0, + help=help) + + def __call__(self, parser, namespace, values, option_string=None): + """See https://docs.python.org/3/library/argparse.html#action-classes.""" + # This only prints flags when help is not argparse.SUPPRESS. + # It includes user defined argparse flags, as well as main module's + # key absl flags. Other absl flags use argparse.SUPPRESS, so they aren't + # printed here. + parser.print_help() + + absl_flags = parser._inherited_absl_flags # pylint: disable=protected-access + if absl_flags: + modules = sorted(absl_flags.flags_by_module_dict()) + main_module = sys.argv[0] + if main_module in modules: + # The main module flags are already printed in parser.print_help(). + modules.remove(main_module) + print(absl_flags._get_help_for_modules( # pylint: disable=protected-access + modules, prefix='', include_special_flags=True)) + parser.exit() + + +def _strip_undefok_args(undefok, args): + """Returns a new list of args after removing flags in --undefok.""" + if undefok: + undefok_names = set(name.strip() for name in undefok.split(',')) + undefok_names |= set('no' + name for name in undefok_names) + # Remove undefok flags. + args = [arg for arg in args if not _is_undefok(arg, undefok_names)] + return args + + +def _is_undefok(arg, undefok_names): + """Returns whether we can ignore arg based on a set of undefok flag names.""" + if not arg.startswith('-'): + return False + if arg.startswith('--'): + arg_without_dash = arg[2:] + else: + arg_without_dash = arg[1:] + if '=' in arg_without_dash: + name, _ = arg_without_dash.split('=', 1) + else: + name = arg_without_dash + if name in undefok_names: + return True + return False diff --git a/third_party/py/abseil/absl/logging/__init__.py b/third_party/py/abseil/absl/logging/__init__.py index 551cf43aa3b7c5..c0ba4b0fa245fe 100644 --- a/third_party/py/abseil/absl/logging/__init__.py +++ b/third_party/py/abseil/absl/logging/__init__.py @@ -14,7 +14,7 @@ """Abseil Python logging module implemented on top of standard logging. -Simple usage: +Simple usage:: from absl import logging @@ -26,7 +26,7 @@ logging.set_verbosity(logging.DEBUG) logging.log(logging.DEBUG, 'This will be printed') - logging.warn('Worrying Stuff') + logging.warning('Worrying Stuff') logging.error('Alarming Stuff') logging.fatal('AAAAHHHHH!!!!') # Process exits. @@ -34,23 +34,34 @@ Instead, let the logging module perform argument interpolation. This saves cycles because strings that don't need to be printed are never formatted. Note that this module does not attempt to -interpolate arguments when no arguments are given. In other words +interpolate arguments when no arguments are given. In other words:: logging.info('Interesting Stuff: %s') does not raise an exception because logging.info() has only one argument, the message string. -"Lazy" evaluation for debugging: +"Lazy" evaluation for debugging +------------------------------- + +If you do something like this:: -If you do something like this: logging.debug('Thing: %s', thing.ExpensiveOp()) + then the ExpensiveOp will be evaluated even if nothing -is printed to the log. To avoid this, use the level_debug() function: +is printed to the log. To avoid this, use the level_debug() function:: + if logging.level_debug(): logging.debug('Thing: %s', thing.ExpensiveOp()) -Notes on Unicode: +Per file level logging is supported by logging.vlog() and +logging.vlog_is_on(). For example:: + + if logging.vlog_is_on(2): + logging.vlog(2, very_expensive_debug_message()) + +Notes on Unicode +---------------- The log output is encoded as UTF-8. Don't pass data in other encodings in bytes() instances -- instead pass unicode string instances when you need to @@ -65,10 +76,8 @@ The differences in behavior are historical and unfortunate. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - +import collections +from collections import abc import getpass import io import itertools @@ -77,17 +86,20 @@ import socket import struct import sys +import threading import time +import timeit import traceback +import types +import warnings from absl import flags from absl.logging import converter -import six -if six.PY2: - import thread as _thread_lib # For .get_ident(). -else: - import threading as _thread_lib # For .get_ident(). +try: + from typing import NoReturn +except ImportError: + pass FLAGS = flags.FLAGS @@ -96,8 +108,8 @@ # Logging levels. FATAL = converter.ABSL_FATAL ERROR = converter.ABSL_ERROR -WARN = converter.ABSL_WARN -WARNING = converter.ABSL_WARN +WARNING = converter.ABSL_WARNING +WARN = converter.ABSL_WARNING # Deprecated name. INFO = converter.ABSL_INFO DEBUG = converter.ABSL_DEBUG @@ -130,7 +142,6 @@ _absl_handler = None - _CPP_NAME_TO_LEVELS = { 'debug': '0', # Abseil C++ has no DEBUG level, mapping it to INFO here. 'info': '0', @@ -142,7 +153,7 @@ _CPP_LEVEL_TO_NAMES = { '0': 'info', - '1': 'warn', + '1': 'warning', '2': 'error', '3': 'fatal', } @@ -167,7 +178,10 @@ def value(self, v): self._update_logging_levels() def _update_logging_levels(self): - """Updates absl logging levels to the current verbosity.""" + """Updates absl logging levels to the current verbosity. + + Visibility: module-private + """ if not _absl_logger: return @@ -179,7 +193,70 @@ def _update_logging_levels(self): # Also update root level when absl_handler is used. if _absl_handler in logging.root.handlers: + # Make absl logger inherit from the root logger. absl logger might have + # a non-NOTSET value if logging.set_verbosity() is called at import time. + _absl_logger.setLevel(logging.NOTSET) logging.root.setLevel(standard_verbosity) + else: + _absl_logger.setLevel(standard_verbosity) + + +class _LoggerLevelsFlag(flags.Flag): + """Flag class for --logger_levels.""" + + def __init__(self, *args, **kwargs): + super(_LoggerLevelsFlag, self).__init__( + _LoggerLevelsParser(), + _LoggerLevelsSerializer(), + *args, **kwargs) + + @property + def value(self): + # For lack of an immutable type, be defensive and return a copy. + # Modifications to the dict aren't supported and won't have any affect. + # While Py3 could use MappingProxyType, that isn't deepcopy friendly, so + # just return a copy. + return self._value.copy() + + @value.setter + def value(self, v): + self._value = {} if v is None else v + self._update_logger_levels() + + def _update_logger_levels(self): + # Visibility: module-private. + # This is called by absl.app.run() during initialization. + for name, level in self._value.items(): + logging.getLogger(name).setLevel(level) + + +class _LoggerLevelsParser(flags.ArgumentParser): + """Parser for --logger_levels flag.""" + + def parse(self, value): + if isinstance(value, abc.Mapping): + return value + + pairs = [pair.strip() for pair in value.split(',') if pair.strip()] + + # Preserve the order so that serialization is deterministic. + levels = collections.OrderedDict() + for name_level in pairs: + name, level = name_level.split(':', 1) + name = name.strip() + level = level.strip() + levels[name] = level + return levels + + +class _LoggerLevelsSerializer(object): + """Serializer for --logger_levels flag.""" + + def serialize(self, value): + if isinstance(value, str): + return value + return ','.join( + '{}:{}'.format(name, level) for name, level in value.items()) class _StderrthresholdFlag(flags.Flag): @@ -198,24 +275,24 @@ def value(self): @value.setter def value(self, v): if v in _CPP_LEVEL_TO_NAMES: - # --stderrthreshold also accepts numberic strings whose values are + # --stderrthreshold also accepts numeric strings whose values are # Abseil C++ log levels. cpp_value = int(v) v = _CPP_LEVEL_TO_NAMES[v] # Normalize to strings. elif v.lower() in _CPP_NAME_TO_LEVELS: v = v.lower() + if v == 'warn': + v = 'warning' # Use 'warning' as the canonical name. cpp_value = int(_CPP_NAME_TO_LEVELS[v]) else: raise ValueError( '--stderrthreshold must be one of (case-insensitive) ' - "'debug', 'info', 'warn', 'warning', 'error', 'fatal', " + "'debug', 'info', 'warning', 'error', 'fatal', " "or '0', '1', '2', '3', not '%s'" % v) self._value = v - - flags.DEFINE_boolean('logtostderr', False, 'Should only log to stderr?', allow_override_cpp=True) @@ -233,15 +310,22 @@ def value(self, v): 'supplied, the value will be changed from the default of -1 (warning) to ' '0 (info) after flags are parsed.', short_name='v', allow_hide_cpp=True)) +flags.DEFINE_flag( + _LoggerLevelsFlag( + 'logger_levels', {}, + 'Specify log level of loggers. The format is a CSV list of ' + '`name:level`. Where `name` is the logger name used with ' + '`logging.getLogger()`, and `level` is a level name (INFO, DEBUG, ' + 'etc). e.g. `myapp.foo:INFO,other.logger:DEBUG`')) flags.DEFINE_flag(_StderrthresholdFlag( 'stderrthreshold', 'fatal', - 'log messages at this level, or more severe, to stderr in' - ' addition to the logfile. Possible values are ' - "'debug', 'info', 'warn', 'error', and 'fatal'. " + 'log messages at this level, or more severe, to stderr in ' + 'addition to the logfile. Possible values are ' + "'debug', 'info', 'warning', 'error', and 'fatal'. " 'Obsoletes --alsologtostderr. Using --alsologtostderr ' 'cancels the effect of this flag. Please also note that ' - 'this flag is subject to --verbosity and requires logfile' - ' not be stderr.', allow_hide_cpp=True)) + 'this flag is subject to --verbosity and requires logfile ' + 'not be stderr.', allow_hide_cpp=True)) flags.DEFINE_boolean('showprefixforinfo', True, 'If False, do not prepend prefix to info messages ' 'when it\'s logged to stderr, ' @@ -263,7 +347,7 @@ def set_verbosity(v): Args: v: int|str, the verbosity level as an integer or string. Legal string values are those that can be coerced to an integer as well as case-insensitive - 'debug', 'info', 'warn', 'error', and 'fatal'. + 'debug', 'info', 'warning', 'error', and 'fatal'. """ try: new_level = int(v) @@ -277,8 +361,8 @@ def set_stderrthreshold(s): Args: s: str|int, valid strings values are case-insensitive 'debug', - 'info', 'warn', 'error', and 'fatal'; valid integer values are - logging.DEBUG|INFO|WARN|ERROR|FATAL. + 'info', 'warning', 'error', and 'fatal'; valid integer values are + logging.DEBUG|INFO|WARNING|ERROR|FATAL. Raises: ValueError: Raised when s is an invalid value. @@ -291,11 +375,12 @@ def set_stderrthreshold(s): raise ValueError( 'set_stderrthreshold only accepts integer absl logging level ' 'from -3 to 1, or case-insensitive string values ' - "'debug', 'info', 'warn', 'error', and 'fatal'. " + "'debug', 'info', 'warning', 'error', and 'fatal'. " 'But found "{}" ({}).'.format(s, type(s))) def fatal(msg, *args, **kwargs): + # type: (Any, Any, Any) -> NoReturn """Logs a fatal message.""" log(FATAL, msg, *args, **kwargs) @@ -307,10 +392,14 @@ def error(msg, *args, **kwargs): def warning(msg, *args, **kwargs): """Logs a warning message.""" - log(WARN, msg, *args, **kwargs) + log(WARNING, msg, *args, **kwargs) -warn = warning +def warn(msg, *args, **kwargs): + """Deprecated, use 'warning' instead.""" + warnings.warn("The 'warn' function is deprecated, use 'warning' instead", + DeprecationWarning, 2) + log(WARNING, msg, *args, **kwargs) def info(msg, *args, **kwargs): @@ -323,9 +412,9 @@ def debug(msg, *args, **kwargs): log(DEBUG, msg, *args, **kwargs) -def exception(msg, *args): +def exception(msg, *args, **kwargs): """Logs an exception, with traceback and message.""" - error(msg, *args, exc_info=True) + error(msg, *args, **kwargs, exc_info=True) # Counter to keep track of number of log entries per token. @@ -348,7 +437,7 @@ def _get_next_log_count_per_token(token): def log_every_n(level, msg, n, *args): - """Logs 'msg % args' at level 'level' once per 'n' times. + """Logs ``msg % args`` at level 'level' once per 'n' times. Logs the 1st call, (N+1)st call, (2N+1)st call, etc. Not threadsafe. @@ -357,14 +446,61 @@ def log_every_n(level, msg, n, *args): level: int, the absl logging level at which to log. msg: str, the message to be logged. n: int, the number of times this should be called before it is logged. - *args: The args to be substitued into the msg. + *args: The args to be substituted into the msg. """ count = _get_next_log_count_per_token(get_absl_logger().findCaller()) log_if(level, msg, not (count % n), *args) +# Keeps track of the last log time of the given token. +# Note: must be a dict since set/get is atomic in CPython. +# Note: entries are never released as their number is expected to be low. +_log_timer_per_token = {} + + +def _seconds_have_elapsed(token, num_seconds): + """Tests if 'num_seconds' have passed since 'token' was requested. + + Not strictly thread-safe - may log with the wrong frequency if called + concurrently from multiple threads. Accuracy depends on resolution of + 'timeit.default_timer()'. + + Always returns True on the first call for a given 'token'. + + Args: + token: The token for which to look up the count. + num_seconds: The number of seconds to test for. + + Returns: + Whether it has been >= 'num_seconds' since 'token' was last requested. + """ + now = timeit.default_timer() + then = _log_timer_per_token.get(token, None) + if then is None or (now - then) >= num_seconds: + _log_timer_per_token[token] = now + return True + else: + return False + + +def log_every_n_seconds(level, msg, n_seconds, *args): + """Logs ``msg % args`` at level ``level`` iff ``n_seconds`` elapsed since last call. + + Logs the first call, logs subsequent calls if 'n' seconds have elapsed since + the last logging call from the same call site (file + line). Not thread-safe. + + Args: + level: int, the absl logging level at which to log. + msg: str, the message to be logged. + n_seconds: float or int, seconds which should elapse before logging again. + *args: The args to be substituted into the msg. + """ + should_log = _seconds_have_elapsed(get_absl_logger().findCaller(), n_seconds) + log_if(level, msg, should_log, *args) + + def log_first_n(level, msg, n, *args): - """Logs 'msg % args' at level 'level' only first 'n' times. + """Logs ``msg % args`` at level ``level`` only first ``n`` times. Not threadsafe. @@ -372,31 +508,31 @@ def log_first_n(level, msg, n, *args): level: int, the absl logging level at which to log. msg: str, the message to be logged. n: int, the maximal number of times the message is logged. - *args: The args to be substitued into the msg. + *args: The args to be substituted into the msg. """ count = _get_next_log_count_per_token(get_absl_logger().findCaller()) log_if(level, msg, count < n, *args) def log_if(level, msg, condition, *args): - """Logs 'msg % args' at level 'level' only if condition is fulfilled.""" + """Logs ``msg % args`` at level ``level`` only if condition is fulfilled.""" if condition: log(level, msg, *args) def log(level, msg, *args, **kwargs): - """Logs 'msg % args' at absl logging level 'level'. + """Logs ``msg % args`` at absl logging level ``level``. If no args are given just print msg, ignoring any interpolation specifiers. Args: level: int, the absl logging level at which to log the message - (logging.DEBUG|INFO|WARN|ERROR|FATAL). While some C++ verbose logging + (logging.DEBUG|INFO|WARNING|ERROR|FATAL). While some C++ verbose logging level constants are also supported, callers should prefer explicit logging.vlog() calls for such purpose. msg: str, the message to be logged. - *args: The args to be substitued into the msg. + *args: The args to be substituted into the msg. **kwargs: May contain exc_info to add exception traceback to message. """ if level > converter.ABSL_DEBUG: @@ -409,23 +545,54 @@ def log(level, msg, *args, **kwargs): level = converter.ABSL_FATAL standard_level = converter.absl_to_standard(level) + # Match standard logging's behavior. Before use_absl_handler() and + # logging is configured, there is no handler attached on _absl_logger nor + # logging.root. So logs go no where. + if not logging.root.handlers: + logging.basicConfig() + _absl_logger.log(standard_level, msg, *args, **kwargs) def vlog(level, msg, *args, **kwargs): - """Log 'msg % args' at C++ vlog level 'level'. + """Log ``msg % args`` at C++ vlog level ``level``. Args: level: int, the C++ verbose logging level at which to log the message, e.g. 1, 2, 3, 4... While absl level constants are also supported, callers should prefer logging.log|debug|info|... calls for such purpose. msg: str, the message to be logged. - *args: The args to be substitued into the msg. + *args: The args to be substituted into the msg. **kwargs: May contain exc_info to add exception traceback to message. """ log(level, msg, *args, **kwargs) +def vlog_is_on(level): + """Checks if vlog is enabled for the given level in caller's source file. + + Args: + level: int, the C++ verbose logging level at which to log the message, + e.g. 1, 2, 3, 4... While absl level constants are also supported, + callers should prefer level_debug|level_info|... calls for + checking those. + + Returns: + True if logging is turned on for that level. + """ + + if level > converter.ABSL_DEBUG: + # Even though this function supports level that is greater than 1, users + # should use logging.vlog instead for such cases. + # Treat this as vlog, 1 is equivalent to DEBUG. + standard_level = converter.STANDARD_DEBUG - (level - 1) + else: + if level < converter.ABSL_FATAL: + level = converter.ABSL_FATAL + standard_level = converter.absl_to_standard(level) + return _absl_logger.isEnabledFor(standard_level) + + def flush(): """Flushes all log files.""" get_absl_handler().flush() @@ -441,9 +608,12 @@ def level_info(): return get_verbosity() >= INFO -def level_warn(): +def level_warning(): """Returns True if warning logging is turned on.""" - return get_verbosity() >= WARN + return get_verbosity() >= WARNING + + +level_warn = level_warning # Deprecated function. def level_error(): @@ -451,23 +621,50 @@ def level_error(): return get_verbosity() >= ERROR +def get_log_file_name(level=INFO): + """Returns the name of the log file. + + For Python logging, only one file is used and level is ignored. And it returns + empty string if it logs to stderr/stdout or the log stream has no `name` + attribute. + + Args: + level: int, the absl.logging level. + + Raises: + ValueError: Raised when `level` has an invalid value. + """ + if level not in converter.ABSL_LEVELS: + raise ValueError('Invalid absl.logging level {}'.format(level)) + stream = get_absl_handler().python_handler.stream + if (stream == sys.stderr or stream == sys.stdout or + not hasattr(stream, 'name')): + return '' + else: + return stream.name + + def find_log_dir_and_names(program_name=None, log_dir=None): """Computes the directory and filename prefix for log file. Args: program_name: str|None, the filename part of the path to the program that is running without its extension. e.g: if your program is called - 'usr/bin/foobar.py' this method should probably be called with - program_name='foobar' However, this is just a convention, you can + ``usr/bin/foobar.py`` this method should probably be called with + ``program_name='foobar`` However, this is just a convention, you can pass in any string you want, and it will be used as part of the log filename. If you don't pass in anything, the default behavior is as described in the example. In python standard logging mode, - the program_name will be prepended with py_ if it is the program_name - argument is omitted. + the program_name will be prepended with ``py_`` if it is the + ``program_name`` argument is omitted. log_dir: str|None, the desired log directory. Returns: (log_dir, file_prefix, symlink_prefix) + + Raises: + FileNotFoundError: raised in Python 3 when it cannot find a log directory. + OSError: raised in Python 2 when it cannot find a log directory. """ if not program_name: # Strip the extension (foobar.par becomes foobar, and @@ -481,7 +678,15 @@ def find_log_dir_and_names(program_name=None, log_dir=None): actual_log_dir = find_log_dir(log_dir=log_dir) - username = getpass.getuser() + try: + username = getpass.getuser() + except KeyError: + # This can happen, e.g. when running under docker w/o passwd file. + if hasattr(os, 'getuid'): + # Windows doesn't have os.getuid + username = str(os.getuid()) + else: + username = 'unknown' hostname = socket.gethostname() file_prefix = '%s.%s.%s.log' % (program_name, hostname, username) @@ -496,6 +701,10 @@ def find_log_dir(log_dir=None): directory. Otherwise if the --log_dir command-line flag is provided, the logfile will be created in that directory. Otherwise the logfile will be created in a standard location. + + Raises: + FileNotFoundError: raised in Python 3 when it cannot find a log directory. + OSError: raised in Python 2 when it cannot find a log directory. """ # Get a list of possible log dirs (will try to use them in order). if log_dir: @@ -512,7 +721,8 @@ def find_log_dir(log_dir=None): for d in dirs: if os.path.isdir(d) and os.access(d, os.W_OK): return d - _absl_logger.fatal("Can't find a writable directory for logs, tried %s", dirs) + raise FileNotFoundError( + "Can't find a writable directory for logs, tried %s" % dirs) def get_absl_log_prefix(record): @@ -547,6 +757,45 @@ def get_absl_log_prefix(record): critical_prefix) +def skip_log_prefix(func): + """Skips reporting the prefix of a given function or name by :class:`~absl.logging.ABSLLogger`. + + This is a convenience wrapper function / decorator for + :meth:`~absl.logging.ABSLLogger.register_frame_to_skip`. + + If a callable function is provided, only that function will be skipped. + If a function name is provided, all functions with the same name in the + file that this is called in will be skipped. + + This can be used as a decorator of the intended function to be skipped. + + Args: + func: Callable function or its name as a string. + + Returns: + func (the input, unchanged). + + Raises: + ValueError: The input is callable but does not have a function code object. + TypeError: The input is neither callable nor a string. + """ + if callable(func): + func_code = getattr(func, '__code__', None) + if func_code is None: + raise ValueError('Input callable does not have a function code object.') + file_name = func_code.co_filename + func_name = func_code.co_name + func_lineno = func_code.co_firstlineno + elif isinstance(func, str): + file_name = get_absl_logger().findCaller()[0] + func_name = func + func_lineno = None + else: + raise TypeError('Input is neither callable nor a string.') + ABSLLogger.register_frame_to_skip(file_name, func_name, func_lineno) + return func + + def _is_non_absl_fatal_record(log_record): return (log_record.levelno >= logging.FATAL and not log_record.__dict__.get(_ABSL_LOG_FATAL, False)) @@ -570,7 +819,7 @@ def __init__(self, stream=None, formatter=None): def start_logging_to_file(self, program_name=None, log_dir=None): """Starts logging messages to files instead of standard error.""" - FLAGS.logtostderr = 0 + FLAGS.logtostderr = False actual_log_dir, file_prefix, symlink_prefix = find_log_dir_and_names( program_name=program_name, log_dir=log_dir) @@ -581,10 +830,7 @@ def start_logging_to_file(self, program_name=None, log_dir=None): os.getpid()) filename = os.path.join(actual_log_dir, basename) - if six.PY2: - self.stream = open(filename, 'a') - else: - self.stream = open(filename, 'a', encoding='utf-8') + self.stream = open(filename, 'a', encoding='utf-8') # os.symlink is not available on Windows Python 2. if getattr(os, 'symlink', None): @@ -639,13 +885,13 @@ def _log_to_stderr(self, record): def emit(self, record): """Prints a record out to some streams. - If FLAGS.logtostderr is set, it will print to sys.stderr ONLY. - If FLAGS.alsologtostderr is set, it will print to sys.stderr. - If FLAGS.logtostderr is not set, it will log to the stream - associated with the current thread. + 1. If ``FLAGS.logtostderr`` is set, it will print to ``sys.stderr`` ONLY. + 2. If ``FLAGS.alsologtostderr`` is set, it will print to ``sys.stderr``. + 3. If ``FLAGS.logtostderr`` is not set, it will log to the stream + associated with the current thread. Args: - record: logging.LogRecord, the record to emit. + record: :class:`logging.LogRecord`, the record to emit. """ # People occasionally call logging functions at import time before # our flags may have even been defined yet, let alone even parsed, as we @@ -687,7 +933,8 @@ def close(self): # Do not close the stream if it's sys.stderr|stdout. They may be # redirected or overridden to files, which should be managed by users # explicitly. - if self.stream not in (sys.stderr, sys.stdout) and ( + user_managed = sys.stderr, sys.stdout, sys.__stderr__, sys.__stdout__ + if self.stream not in user_managed and ( not hasattr(self.stream, 'isatty') or not self.stream.isatty()): self.stream.close() except ValueError: @@ -723,6 +970,12 @@ def close(self): super(ABSLHandler, self).close() self._current_handler.close() + def handle(self, record): + rv = self.filter(record) + if rv: + return self._current_handler.handle(record) + return rv + @property def python_handler(self): return self._python_handler @@ -739,7 +992,7 @@ def start_logging_to_file(self, program_name=None, log_dir=None): class PythonFormatter(logging.Formatter): - """Formatter class used by PythonHandler.""" + """Formatter class used by :class:`~absl.logging.PythonHandler`.""" def format(self, record): """Appends the message from the record to the results of the prefix. @@ -764,7 +1017,7 @@ class ABSLLogger(logging.getLoggerClass()): """A logger that will create LogRecords while skipping some stack frames. This class maintains an internal list of filenames and method names - for use when determining who called the currently execuing stack + for use when determining who called the currently executing stack frame. Any method names from specific source files are skipped when walking backwards through the stack. @@ -774,68 +1027,76 @@ class ABSLLogger(logging.getLoggerClass()): """ _frames_to_skip = set() - def findCaller(self, stack_info=False): + def findCaller(self, stack_info=False, stacklevel=1): """Finds the frame of the calling method on the stack. This method skips any frames registered with the ABSLLogger and any methods from this file, and whatever method is currently being used to generate the prefix for the log - line. Then it returns the file name, line nubmer, and method name - of the calling method. + line. Then it returns the file name, line number, and method name + of the calling method. An optional fourth item may be returned, + callers who only need things from the first three are advised to + always slice or index the result rather than using direct unpacking + assignment. Args: - stack_info: bool, when using Python 3 and True, include the stack trace as - the fourth item returned instead of None. + stack_info: bool, when True, include the stack trace as a fourth item + returned. On Python 3 there are always four items returned - the + fourth will be None when this is False. On Python 2 the stdlib + base class API only returns three items. We do the same when this + new parameter is unspecified or False for compatibility. Returns: (filename, lineno, methodname[, sinfo]) of the calling method. """ f_to_skip = ABSLLogger._frames_to_skip - frame = logging.currentframe() + # Use sys._getframe(2) instead of logging.currentframe(), it's slightly + # faster because there is one less frame to traverse. + frame = sys._getframe(2) # pylint: disable=protected-access while frame: code = frame.f_code if (_LOGGING_FILE_PREFIX not in code.co_filename and + (code.co_filename, code.co_name, + code.co_firstlineno) not in f_to_skip and (code.co_filename, code.co_name) not in f_to_skip): - if six.PY2: - return (code.co_filename, frame.f_lineno, code.co_name) - else: - sinfo = None - if stack_info: - out = io.StringIO() - out.write('Stack (most recent call last):\n') - traceback.print_stack(frame, file=out) - sinfo = out.getvalue().rstrip('\n') - out.close() - return (code.co_filename, frame.f_lineno, code.co_name, sinfo) + sinfo = None + if stack_info: + out = io.StringIO() + out.write(u'Stack (most recent call last):\n') + traceback.print_stack(frame, file=out) + sinfo = out.getvalue().rstrip(u'\n') + return (code.co_filename, frame.f_lineno, code.co_name, sinfo) frame = frame.f_back def critical(self, msg, *args, **kwargs): - """Logs 'msg % args' with severity 'CRITICAL'.""" + """Logs ``msg % args`` with severity ``CRITICAL``.""" self.log(logging.CRITICAL, msg, *args, **kwargs) def fatal(self, msg, *args, **kwargs): - """Logs 'msg % args' with severity 'FATAL'.""" + """Logs ``msg % args`` with severity ``FATAL``.""" self.log(logging.FATAL, msg, *args, **kwargs) def error(self, msg, *args, **kwargs): - """Logs 'msg % args' with severity 'ERROR'.""" + """Logs ``msg % args`` with severity ``ERROR``.""" self.log(logging.ERROR, msg, *args, **kwargs) def warn(self, msg, *args, **kwargs): - """Logs 'msg % args' with severity 'WARN'.""" + """Logs ``msg % args`` with severity ``WARN``.""" + warnings.warn("The 'warn' method is deprecated, use 'warning' instead", + DeprecationWarning, 2) self.log(logging.WARN, msg, *args, **kwargs) def warning(self, msg, *args, **kwargs): - """Logs 'msg % args' with severity 'WARNING'.""" + """Logs ``msg % args`` with severity ``WARNING``.""" self.log(logging.WARNING, msg, *args, **kwargs) def info(self, msg, *args, **kwargs): - """Logs 'msg % args' with severity 'INFO'.""" + """Logs ``msg % args`` with severity ``INFO``.""" self.log(logging.INFO, msg, *args, **kwargs) def debug(self, msg, *args, **kwargs): - """Logs 'msg % args' with severity 'DEBUG'.""" + """Logs ``msg % args`` with severity ``DEBUG``.""" self.log(logging.DEBUG, msg, *args, **kwargs) def log(self, level, msg, *args, **kwargs): @@ -858,12 +1119,12 @@ def log(self, level, msg, *args, **kwargs): super(ABSLLogger, self).log(level, msg, *args, **kwargs) def handle(self, record): - """Calls handlers without checking Logger.disabled. + """Calls handlers without checking ``Logger.disabled``. - Non-root loggers are set to disabled after setup with logging.config if - it's not explicitly specified. Historically, absl logging will not be + Non-root loggers are set to disabled after setup with :func:`logging.config` + if it's not explicitly specified. Historically, absl logging will not be disabled by that. To maintaining this behavior, this function skips - checking the Logger.disabled bit. + checking the ``Logger.disabled`` bit. This logger can still be disabled by adding a filter that filters out everything. @@ -875,19 +1136,25 @@ def handle(self, record): self.callHandlers(record) @classmethod - def register_frame_to_skip(cls, file_name, function_name): + def register_frame_to_skip(cls, file_name, function_name, line_number=None): """Registers a function name to skip when walking the stack. - The ABSLLogger sometimes skips method calls on the stack - to make the log messages meaningful in their appropriate context. - This method registers a function from a particluar file as one + The :class:`~absl.logging.ABSLLogger` sometimes skips method calls on the + stack to make the log messages meaningful in their appropriate context. + This method registers a function from a particular file as one which should be skipped. Args: file_name: str, the name of the file that contains the function. function_name: str, the name of the function to skip. + line_number: int, if provided, only the function with this starting line + number will be skipped. Otherwise, all functions with the same name + in the file will be skipped. """ - cls._frames_to_skip.add((file_name, function_name)) + if line_number is not None: + cls._frames_to_skip.add((file_name, function_name, line_number)) + else: + cls._frames_to_skip.add((file_name, function_name)) def _get_thread_id(): @@ -900,7 +1167,7 @@ def _get_thread_id(): Returns: Thread ID unique to this process (unsigned) """ - thread_id = _thread_lib.get_ident() + thread_id = threading.get_ident() return thread_id & _THREAD_ID_MASK @@ -925,18 +1192,33 @@ def use_python_logging(quiet=False): info('Restoring pure python logging') -def use_absl_handler(): - """Uses the ABSL logging handler for logging if not yet configured. +_attempted_to_remove_stderr_stream_handlers = False - The absl handler is already attached to root if there are no other handlers - attached when importing this module. - Otherwise, this method is called in app.run() so absl handler is used. +def use_absl_handler(): + """Uses the ABSL logging handler for logging. + + This method is called in :func:`app.run()` so the absl handler + is used in absl apps. """ + global _attempted_to_remove_stderr_stream_handlers + if not _attempted_to_remove_stderr_stream_handlers: + # The absl handler logs to stderr by default. To prevent double logging to + # stderr, the following code tries its best to remove other handlers that + # emit to stderr. Those handlers are most commonly added when + # logging.info/debug is called before calling use_absl_handler(). + handlers = [ + h for h in logging.root.handlers + if isinstance(h, logging.StreamHandler) and h.stream == sys.stderr] + for h in handlers: + logging.root.removeHandler(h) + _attempted_to_remove_stderr_stream_handlers = True + absl_handler = get_absl_handler() if absl_handler not in logging.root.handlers: logging.root.addHandler(absl_handler) FLAGS['verbosity']._update_logging_levels() # pylint: disable=protected-access + FLAGS['logger_levels']._update_logger_levels() # pylint: disable=protected-access def _initialize(): @@ -954,25 +1236,5 @@ def _initialize(): python_logging_formatter = PythonFormatter() _absl_handler = ABSLHandler(python_logging_formatter) - # The absl handler logs to stderr by default. To prevent double logging to - # stderr, the following code tries its best to remove other handlers that emit - # to stderr. Those handlers are most commonly added when logging.info/debug is - # called before importing this module. - handlers = [ - h for h in logging.root.handlers - if isinstance(h, logging.StreamHandler) and h.stream == sys.stderr] - for h in handlers: - logging.root.removeHandler(h) - - # The absl handler will always be attached to root, not the absl logger. - if not logging.root.handlers: - # Attach the absl handler at import time when there are no other handlers. - # Otherwise it means users have explicitly configured logging, and the absl - # handler will only be attached later in app.run(). For App Engine apps, - # the absl handler is not used. - logging.root.addHandler(_absl_handler) - -# Initialize absl logger. -# Must be called after logging flags in this module are defined. _initialize() diff --git a/third_party/py/abseil/absl/logging/converter.py b/third_party/py/abseil/absl/logging/converter.py index 6292547ee51813..0239ab4556458b 100644 --- a/third_party/py/abseil/absl/logging/converter.py +++ b/third_party/py/abseil/absl/logging/converter.py @@ -16,25 +16,26 @@ This converter has to convert (best effort) between three different logging level schemes: - cpp = The C++ logging level scheme used in Abseil C++. - absl = The absl.logging level scheme used in Abseil Python. - standard = The python standard library logging level scheme. - -Here is a handy ascii chart for easy mental mapping. - - LEVEL | cpp | absl | standard | - ----------+-----+--------+----------+ - DEBUG | 0 | 1 | 10 | - INFO | 0 | 0 | 20 | - WARN(ING) | 1 | -1 | 30 | - ERROR | 2 | -2 | 40 | - CRITICAL | 3 | -3 | 50 | - FATAL | 3 | -3 | 50 | - -Note: standard logging CRITICAL is mapped to absl/cpp FATAL. -However, only CRITICAL logs from the absl logger (or absl.logging.fatal) will -terminate the program. CRITICAL logs from non-absl loggers are treated as -error logs with a message prefix "CRITICAL - ". + + * **cpp**: The C++ logging level scheme used in Abseil C++. + * **absl**: The absl.logging level scheme used in Abseil Python. + * **standard**: The python standard library logging level scheme. + +Here is a handy ascii chart for easy mental mapping:: + + LEVEL | cpp | absl | standard | + ---------+-----+--------+----------+ + DEBUG | 0 | 1 | 10 | + INFO | 0 | 0 | 20 | + WARNING | 1 | -1 | 30 | + ERROR | 2 | -2 | 40 | + CRITICAL | 3 | -3 | 50 | + FATAL | 3 | -3 | 50 | + +Note: standard logging ``CRITICAL`` is mapped to absl/cpp ``FATAL``. +However, only ``CRITICAL`` logs from the absl logger (or absl.logging.fatal) +will terminate the program. ``CRITICAL`` logs from non-absl loggers are treated +as error logs with a message prefix ``"CRITICAL - "``. Converting from standard to absl or cpp is a lossy conversion. Converting back to standard will lose granularity. For this reason, @@ -43,10 +44,6 @@ or absl if those level schemes are absolutely necessary. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - import logging STANDARD_CRITICAL = logging.CRITICAL @@ -56,26 +53,32 @@ STANDARD_DEBUG = logging.DEBUG # These levels are also used to define the constants -# FATAL, ERROR, WARN, WARNING, INFO, and DEBUG in the +# FATAL, ERROR, WARNING, INFO, and DEBUG in the # absl.logging module. ABSL_FATAL = -3 ABSL_ERROR = -2 -ABSL_WARN = -1 +ABSL_WARNING = -1 +ABSL_WARN = -1 # Deprecated name. ABSL_INFO = 0 ABSL_DEBUG = 1 ABSL_LEVELS = {ABSL_FATAL: 'FATAL', ABSL_ERROR: 'ERROR', - ABSL_WARN: 'WARN', + ABSL_WARNING: 'WARNING', ABSL_INFO: 'INFO', ABSL_DEBUG: 'DEBUG'} # Inverts the ABSL_LEVELS dictionary -ABSL_NAMES = dict((v, k) for (k, v) in ABSL_LEVELS.items()) +ABSL_NAMES = {'FATAL': ABSL_FATAL, + 'ERROR': ABSL_ERROR, + 'WARNING': ABSL_WARNING, + 'WARN': ABSL_WARNING, # Deprecated name. + 'INFO': ABSL_INFO, + 'DEBUG': ABSL_DEBUG} ABSL_TO_STANDARD = {ABSL_FATAL: STANDARD_CRITICAL, ABSL_ERROR: STANDARD_ERROR, - ABSL_WARN: STANDARD_WARNING, + ABSL_WARNING: STANDARD_WARNING, ABSL_INFO: STANDARD_INFO, ABSL_DEBUG: STANDARD_DEBUG} @@ -87,10 +90,11 @@ def get_initial_for_level(level): """Gets the initial that should start the log line for the given level. It returns: - - 'I' when: level < STANDARD_WARNING. - - 'W' when: STANDARD_WARNING <= level < STANDARD_ERROR. - - 'E' when: STANDARD_ERROR <= level < STANDARD_CRITICAL. - - 'F' when: level >= STANDARD_CRITICAL. + + * ``'I'`` when: ``level < STANDARD_WARNING``. + * ``'W'`` when: ``STANDARD_WARNING <= level < STANDARD_ERROR``. + * ``'E'`` when: ``STANDARD_ERROR <= level < STANDARD_CRITICAL``. + * ``'F'`` when: ``level >= STANDARD_CRITICAL``. Args: level: int, a Python standard logging level. @@ -155,14 +159,12 @@ def string_to_standard(level): """Converts a string level to standard logging level value. Args: - level: str, case-insensitive 'debug', 'info', 'warn', 'error', 'fatal'. + level: str, case-insensitive ``'debug'``, ``'info'``, ``'warning'``, + ``'error'``, ``'fatal'``. Returns: The corresponding integer level for use in standard logging. """ - # Also support warning as an alias to warn. - if level.upper() == 'WARNING': - level = 'WARN' return absl_to_standard(ABSL_NAMES.get(level.upper())) @@ -190,7 +192,7 @@ def standard_to_absl(level): elif level < STANDARD_WARNING: return ABSL_INFO elif level < STANDARD_ERROR: - return ABSL_WARN + return ABSL_WARNING elif level < STANDARD_CRITICAL: return ABSL_ERROR else: diff --git a/third_party/py/abseil/absl/testing/_bazelize_command.py b/third_party/py/abseil/absl/testing/_bazelize_command.py index 93c135caa6da12..9380d274278593 100644 --- a/third_party/py/abseil/absl/testing/_bazelize_command.py +++ b/third_party/py/abseil/absl/testing/_bazelize_command.py @@ -14,36 +14,55 @@ """Internal helper for running tests on Windows Bazel.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - import os +from absl import flags + +FLAGS = flags.FLAGS -def get_executable_path(py_binary_path): + +def get_executable_path(py_binary_name): """Returns the executable path of a py_binary. This returns the executable path of a py_binary that is in another Bazel target's data dependencies. - On Linux/macOS, it's the same as the py_binary_path. - On Windows, the py_binary_path points to a zip file, and Bazel 0.5.3+ - generates a .cmd file that can be used to execute the py_binary. + On Linux/macOS, the path and __file__ has the same root directory. + On Windows, bazel builds an .exe file and we need to use the MANIFEST file + the location the actual binary. Args: - py_binary_path: string, the path of a py_binary that is in another Bazel + py_binary_name: string, the name of a py_binary that is in another Bazel target's data dependencies. + + Raises: + RuntimeError: Raised when it cannot locate the executable path. """ + if os.name == 'nt': - executable_path = py_binary_path + '.cmd' - if executable_path.startswith('\\\\?\\'): - # In Bazel 0.5.3 and Python 3, the paths starts with "\\?\". - # However, Python subprocess doesn't support those paths well. - # Strip them as we don't need the prefix. - # See this page for more informaton about "\\?\": - # https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247. - executable_path = executable_path[4:] - return executable_path + py_binary_name += '.exe' + manifest_file = os.path.join(FLAGS.test_srcdir, 'MANIFEST') + workspace_name = os.environ['TEST_WORKSPACE'] + manifest_entry = '{}/{}'.format(workspace_name, py_binary_name) + with open(manifest_file, 'r') as manifest_fd: + for line in manifest_fd: + tokens = line.strip().split(' ') + if len(tokens) != 2: + continue + if manifest_entry == tokens[0]: + return tokens[1] + raise RuntimeError( + 'Cannot locate executable path for {}, MANIFEST file: {}.'.format( + py_binary_name, manifest_file)) else: - return py_binary_path + # NOTE: __file__ may be .py or .pyc, depending on how the module was + # loaded and executed. + path = __file__ + + # Use the package name to find the root directory: every dot is + # a directory, plus one for ourselves. + for _ in range(__name__.count('.') + 1): + path = os.path.dirname(path) + + root_directory = path + return os.path.join(root_directory, py_binary_name) diff --git a/third_party/py/abseil/absl/testing/_pretty_print_reporter.py b/third_party/py/abseil/absl/testing/_pretty_print_reporter.py new file mode 100644 index 00000000000000..b0dde07e4f5a98 --- /dev/null +++ b/third_party/py/abseil/absl/testing/_pretty_print_reporter.py @@ -0,0 +1,91 @@ +# Copyright 2018 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""TestResult implementing default output for test execution status.""" + +import unittest + + +class TextTestResult(unittest.TextTestResult): + """TestResult class that provides the default text result formatting.""" + + def __init__(self, stream, descriptions, verbosity): + # Disable the verbose per-test output from the superclass, since it would + # conflict with our customized output. + super(TextTestResult, self).__init__(stream, descriptions, 0) + self._per_test_output = verbosity > 0 + + def _print_status(self, tag, test): + if self._per_test_output: + test_id = test.id() + if test_id.startswith('__main__.'): + test_id = test_id[len('__main__.'):] + print('[%s] %s' % (tag, test_id), file=self.stream) + self.stream.flush() + + def startTest(self, test): + super(TextTestResult, self).startTest(test) + self._print_status(' RUN ', test) + + def addSuccess(self, test): + super(TextTestResult, self).addSuccess(test) + self._print_status(' OK ', test) + + def addError(self, test, err): + super(TextTestResult, self).addError(test, err) + self._print_status(' FAILED ', test) + + def addFailure(self, test, err): + super(TextTestResult, self).addFailure(test, err) + self._print_status(' FAILED ', test) + + def addSkip(self, test, reason): + super(TextTestResult, self).addSkip(test, reason) + self._print_status(' SKIPPED ', test) + + def addExpectedFailure(self, test, err): + super(TextTestResult, self).addExpectedFailure(test, err) + self._print_status(' OK ', test) + + def addUnexpectedSuccess(self, test): + super(TextTestResult, self).addUnexpectedSuccess(test) + self._print_status(' FAILED ', test) + + +class TextTestRunner(unittest.TextTestRunner): + """A test runner that produces formatted text results.""" + + _TEST_RESULT_CLASS = TextTestResult + + # Set this to true at the class or instance level to run tests using a + # debug-friendly method (e.g, one that doesn't catch exceptions and interacts + # better with debuggers). + # Usually this is set using --pdb_post_mortem. + run_for_debugging = False + + def run(self, test): + # type: (TestCase) -> TestResult + if self.run_for_debugging: + return self._run_debug(test) + else: + return super(TextTestRunner, self).run(test) + + def _run_debug(self, test): + # type: (TestCase) -> TestResult + test.debug() + # Return an empty result to indicate success. + return self._makeResult() + + def _makeResult(self): + return TextTestResult(self.stream, self.descriptions, self.verbosity) diff --git a/third_party/py/abseil/absl/testing/absltest.py b/third_party/py/abseil/absl/testing/absltest.py index 8702bfd9d7e2c2..9071f8f65bda67 100644 --- a/third_party/py/abseil/absl/testing/absltest.py +++ b/third_party/py/abseil/absl/testing/absltest.py @@ -18,30 +18,37 @@ tests. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import collections +from collections import abc +import contextlib import difflib +import enum import errno import getpass import inspect +import io import itertools import json import os import random import re import shlex +import shutil import signal +import stat import subprocess import sys import tempfile import textwrap import unittest +from unittest import mock # pylint: disable=unused-import Allow absltest.mock. +from urllib import parse try: + # The faulthandler module isn't always available, and pytype doesn't + # understand that we're catching ImportError, so suppress the error. + # pytype: disable=import-error import faulthandler + # pytype: enable=import-error except ImportError: # We use faulthandler if it is available. faulthandler = None @@ -49,15 +56,81 @@ from absl import app from absl import flags from absl import logging +from absl.testing import _pretty_print_reporter from absl.testing import xml_reporter -import six -from six.moves import urllib -from six.moves import xrange # pylint: disable=redefined-builtin +# Make typing an optional import to avoid it being a required dependency +# in Python 2. Type checkers will still understand the imports. +try: + # pylint: disable=unused-import + import typing + from typing import Any, AnyStr, BinaryIO, Callable, ContextManager, IO, Iterator, List, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Text, TextIO, Tuple, Type, Union + # pylint: enable=unused-import +except ImportError: + pass +else: + # Use an if-type-checking block to prevent leakage of type-checking only + # symbols. We don't want people relying on these at runtime. + if typing.TYPE_CHECKING: + # Unbounded TypeVar for general usage + _T = typing.TypeVar('_T') + + import unittest.case + _OutcomeType = unittest.case._Outcome # pytype: disable=module-attr + + + +# Re-export a bunch of unittest functions we support so that people don't +# have to import unittest to get them +# pylint: disable=invalid-name +skip = unittest.skip +skipIf = unittest.skipIf +skipUnless = unittest.skipUnless +SkipTest = unittest.SkipTest +expectedFailure = unittest.expectedFailure +# pylint: enable=invalid-name + +# End unittest re-exports FLAGS = flags.FLAGS -_TEXT_OR_BINARY_TYPES = (six.text_type, six.binary_type) +_TEXT_OR_BINARY_TYPES = (str, bytes) + +# Suppress surplus entries in AssertionError stack traces. +__unittest = True # pylint: disable=invalid-name + + +def expectedFailureIf(condition, reason): # pylint: disable=invalid-name + """Expects the test to fail if the run condition is True. + + Example usage:: + + @expectedFailureIf(sys.version.major == 2, "Not yet working in py2") + def test_foo(self): + ... + + Args: + condition: bool, whether to expect failure or not. + reason: Text, the reason to expect failure. + Returns: + Decorator function + """ + del reason # Unused + if condition: + return unittest.expectedFailure + else: + return lambda f: f + + +class TempFileCleanup(enum.Enum): + # Always cleanup temp files when the test completes. + ALWAYS = 'always' + # Only cleanup temp file if the test passes. This allows easier inspection + # of tempfile contents on test failure. absltest.TEST_TMPDIR.value determines + # where tempfiles are created. + SUCCESS = 'success' + # Never cleanup temp files. + OFF = 'never' # Many of the methods in this module have names like assertSameElements. @@ -67,6 +140,7 @@ def _get_default_test_random_seed(): + # type: () -> int random_seed = 301 value = os.environ.get('TEST_RANDOM_SEED', '') try: @@ -77,11 +151,13 @@ def _get_default_test_random_seed(): def get_default_test_srcdir(): + # type: () -> Text """Returns default test source dir.""" return os.environ.get('TEST_SRCDIR', '') def get_default_test_tmpdir(): + # type: () -> Text """Returns default test temp dir.""" tmpdir = os.environ.get('TEST_TMPDIR', '') if not tmpdir: @@ -91,6 +167,7 @@ def get_default_test_tmpdir(): def _get_default_randomize_ordering_seed(): + # type: () -> int """Returns default seed to use for randomizing test order. This function first checks the --test_randomize_ordering_seed flag, and then @@ -117,11 +194,13 @@ def _get_default_randomize_ordering_seed(): ValueError: Raised when the flag or env value is not one of the options above. """ - if FLAGS.test_randomize_ordering_seed is not None: + if FLAGS['test_randomize_ordering_seed'].present: randomize = FLAGS.test_randomize_ordering_seed + elif 'TEST_RANDOMIZE_ORDERING_SEED' in os.environ: + randomize = os.environ['TEST_RANDOMIZE_ORDERING_SEED'] else: - randomize = os.environ.get('TEST_RANDOMIZE_ORDERING_SEED') - if randomize is None: + randomize = '' + if not randomize: return 0 if randomize == 'random': return random.Random().randint(1, 4294967295) @@ -137,35 +216,45 @@ def _get_default_randomize_ordering_seed(): 'Unknown test randomization seed value: {}'.format(randomize)) -flags.DEFINE_integer('test_random_seed', _get_default_test_random_seed(), - 'Random seed for testing. Some test frameworks may ' - 'change the default value of this flag between runs, so ' - 'it is not appropriate for seeding probabilistic tests.', - allow_override_cpp=True) -flags.DEFINE_string('test_srcdir', - get_default_test_srcdir(), - 'Root of directory tree where source files live', - allow_override_cpp=True) -flags.DEFINE_string('test_tmpdir', get_default_test_tmpdir(), - 'Directory for temporary testing files', - allow_override_cpp=True) -flags.DEFINE_string('test_randomize_ordering_seed', None, - 'If positive, use this as a seed to randomize the ' - 'execution order for test cases. If "random", pick a ' - 'random seed to use. If 0 or not set, do not randomize ' - 'test case execution order. This flag also overrides ' - 'the TEST_RANDOMIZE_ORDERING_SEED environment variable.') -flags.DEFINE_string('xml_output_file', '', - 'File to store XML test results') +TEST_SRCDIR = flags.DEFINE_string( + 'test_srcdir', + get_default_test_srcdir(), + 'Root of directory tree where source files live', + allow_override_cpp=True) +TEST_TMPDIR = flags.DEFINE_string( + 'test_tmpdir', + get_default_test_tmpdir(), + 'Directory for temporary testing files', + allow_override_cpp=True) + +flags.DEFINE_integer( + 'test_random_seed', + _get_default_test_random_seed(), + 'Random seed for testing. Some test frameworks may ' + 'change the default value of this flag between runs, so ' + 'it is not appropriate for seeding probabilistic tests.', + allow_override_cpp=True) +flags.DEFINE_string( + 'test_randomize_ordering_seed', + '', + 'If positive, use this as a seed to randomize the ' + 'execution order for test cases. If "random", pick a ' + 'random seed to use. If 0 or not set, do not randomize ' + 'test case execution order. This flag also overrides ' + 'the TEST_RANDOMIZE_ORDERING_SEED environment variable.', + allow_override_cpp=True) +flags.DEFINE_string('xml_output_file', '', 'File to store XML test results') # We might need to monkey-patch TestResult so that it stops considering an # unexpected pass as a as a "successful result". For details, see # http://bugs.python.org/issue20165 def _monkey_patch_test_result_for_unexpected_passes(): + # type: () -> None """Workaround for .""" def wasSuccessful(self): + # type: () -> bool """Tells whether or not this result was a success. Any unexpected pass is to be counted as a non-success. @@ -179,10 +268,10 @@ def wasSuccessful(self): return (len(self.failures) == len(self.errors) == len(self.unexpectedSuccesses) == 0) - test_result = unittest.result.TestResult() - test_result.addUnexpectedSuccess('test') + test_result = unittest.TestResult() + test_result.addUnexpectedSuccess(unittest.FunctionTestCase(lambda: None)) if test_result.wasSuccessful(): # The bug is present. - unittest.result.TestResult.wasSuccessful = wasSuccessful + unittest.TestResult.wasSuccessful = wasSuccessful if test_result.wasSuccessful(): # Warn the user if our hot-fix failed. sys.stderr.write('unittest.result.TestResult monkey patch to report' ' unexpected passes as failures did not work.\n') @@ -191,12 +280,560 @@ def wasSuccessful(self): _monkey_patch_test_result_for_unexpected_passes() +def _open(filepath, mode, _open_func=open): + # type: (Text, Text, Callable[..., IO]) -> IO + """Opens a file. + + Like open(), but ensure that we can open real files even if tests stub out + open(). + + Args: + filepath: A filepath. + mode: A mode. + _open_func: A built-in open() function. + + Returns: + The opened file object. + """ + return _open_func(filepath, mode, encoding='utf-8') + + +class _TempDir(object): + """Represents a temporary directory for tests. + + Creation of this class is internal. Using its public methods is OK. + + This class implements the `os.PathLike` interface (specifically, + `os.PathLike[str]`). This means, in Python 3, it can be directly passed + to e.g. `os.path.join()`. + """ + + def __init__(self, path): + # type: (Text) -> None + """Module-private: do not instantiate outside module.""" + self._path = path + + @property + def full_path(self): + # type: () -> Text + """Returns the path, as a string, for the directory. + + TIP: Instead of e.g. `os.path.join(temp_dir.full_path)`, you can simply + do `os.path.join(temp_dir)` because `__fspath__()` is implemented. + """ + return self._path + + def __fspath__(self): + # type: () -> Text + """See os.PathLike.""" + return self.full_path + + def create_file(self, file_path=None, content=None, mode='w', encoding='utf8', + errors='strict'): + # type: (Optional[Text], Optional[AnyStr], Text, Text, Text) -> _TempFile + """Create a file in the directory. + + NOTE: If the file already exists, it will be made writable and overwritten. + + Args: + file_path: Optional file path for the temp file. If not given, a unique + file name will be generated and used. Slashes are allowed in the name; + any missing intermediate directories will be created. NOTE: This path + is the path that will be cleaned up, including any directories in the + path, e.g., 'foo/bar/baz.txt' will `rm -r foo` + content: Optional string or bytes to initially write to the file. If not + specified, then an empty file is created. + mode: Mode string to use when writing content. Only used if `content` is + non-empty. + encoding: Encoding to use when writing string content. Only used if + `content` is text. + errors: How to handle text to bytes encoding errors. Only used if + `content` is text. + + Returns: + A _TempFile representing the created file. + """ + tf, _ = _TempFile._create(self._path, file_path, content, mode, encoding, + errors) + return tf + + def mkdir(self, dir_path=None): + # type: (Optional[Text]) -> _TempDir + """Create a directory in the directory. + + Args: + dir_path: Optional path to the directory to create. If not given, + a unique name will be generated and used. + + Returns: + A _TempDir representing the created directory. + """ + if dir_path: + path = os.path.join(self._path, dir_path) + else: + path = tempfile.mkdtemp(dir=self._path) + + # Note: there's no need to clear the directory since the containing + # dir was cleared by the tempdir() function. + os.makedirs(path, exist_ok=True) + return _TempDir(path) + + +class _TempFile(object): + """Represents a tempfile for tests. + + Creation of this class is internal. Using its public methods is OK. + + This class implements the `os.PathLike` interface (specifically, + `os.PathLike[str]`). This means, in Python 3, it can be directly passed + to e.g. `os.path.join()`. + """ + + def __init__(self, path): + # type: (Text) -> None + """Private: use _create instead.""" + self._path = path + + # pylint: disable=line-too-long + @classmethod + def _create(cls, base_path, file_path, content, mode, encoding, errors): + # type: (Text, Optional[Text], AnyStr, Text, Text, Text) -> Tuple[_TempFile, Text] + # pylint: enable=line-too-long + """Module-private: create a tempfile instance.""" + if file_path: + cleanup_path = os.path.join(base_path, _get_first_part(file_path)) + path = os.path.join(base_path, file_path) + os.makedirs(os.path.dirname(path), exist_ok=True) + # The file may already exist, in which case, ensure it's writable so that + # it can be truncated. + if os.path.exists(path) and not os.access(path, os.W_OK): + stat_info = os.stat(path) + os.chmod(path, stat_info.st_mode | stat.S_IWUSR) + else: + os.makedirs(base_path, exist_ok=True) + fd, path = tempfile.mkstemp(dir=str(base_path)) + os.close(fd) + cleanup_path = path + + tf = cls(path) + + if content: + if isinstance(content, str): + tf.write_text(content, mode=mode, encoding=encoding, errors=errors) + else: + tf.write_bytes(content, mode) + + else: + tf.write_bytes(b'') + + return tf, cleanup_path + + @property + def full_path(self): + # type: () -> Text + """Returns the path, as a string, for the file. + + TIP: Instead of e.g. `os.path.join(temp_file.full_path)`, you can simply + do `os.path.join(temp_file)` because `__fspath__()` is implemented. + """ + return self._path + + def __fspath__(self): + # type: () -> Text + """See os.PathLike.""" + return self.full_path + + def read_text(self, encoding='utf8', errors='strict'): + # type: (Text, Text) -> Text + """Return the contents of the file as text.""" + with self.open_text(encoding=encoding, errors=errors) as fp: + return fp.read() + + def read_bytes(self): + # type: () -> bytes + """Return the content of the file as bytes.""" + with self.open_bytes() as fp: + return fp.read() + + def write_text(self, text, mode='w', encoding='utf8', errors='strict'): + # type: (Text, Text, Text, Text) -> None + """Write text to the file. + + Args: + text: Text to write. In Python 2, it can be bytes, which will be + decoded using the `encoding` arg (this is as an aid for code that + is 2 and 3 compatible). + mode: The mode to open the file for writing. + encoding: The encoding to use when writing the text to the file. + errors: The error handling strategy to use when converting text to bytes. + """ + with self.open_text(mode, encoding=encoding, errors=errors) as fp: + fp.write(text) + + def write_bytes(self, data, mode='wb'): + # type: (bytes, Text) -> None + """Write bytes to the file. + + Args: + data: bytes to write. + mode: Mode to open the file for writing. The "b" flag is implicit if + not already present. It must not have the "t" flag. + """ + with self.open_bytes(mode) as fp: + fp.write(data) + + def open_text(self, mode='rt', encoding='utf8', errors='strict'): + # type: (Text, Text, Text) -> ContextManager[TextIO] + """Return a context manager for opening the file in text mode. + + Args: + mode: The mode to open the file in. The "t" flag is implicit if not + already present. It must not have the "b" flag. + encoding: The encoding to use when opening the file. + errors: How to handle decoding errors. + + Returns: + Context manager that yields an open file. + + Raises: + ValueError: if invalid inputs are provided. + """ + if 'b' in mode: + raise ValueError('Invalid mode {!r}: "b" flag not allowed when opening ' + 'file in text mode'.format(mode)) + if 't' not in mode: + mode += 't' + cm = self._open(mode, encoding, errors) + return cm + + def open_bytes(self, mode='rb'): + # type: (Text) -> ContextManager[BinaryIO] + """Return a context manager for opening the file in binary mode. + + Args: + mode: The mode to open the file in. The "b" mode is implicit if not + already present. It must not have the "t" flag. + + Returns: + Context manager that yields an open file. + + Raises: + ValueError: if invalid inputs are provided. + """ + if 't' in mode: + raise ValueError('Invalid mode {!r}: "t" flag not allowed when opening ' + 'file in binary mode'.format(mode)) + if 'b' not in mode: + mode += 'b' + cm = self._open(mode, encoding=None, errors=None) + return cm + + # TODO(b/123775699): Once pytype supports typing.Literal, use overload and + # Literal to express more precise return types. The contained type is + # currently `Any` to avoid [bad-return-type] errors in the open_* methods. + @contextlib.contextmanager + def _open( + self, mode: str, encoding: str = 'utf8', errors: str = 'strict' + ) -> Iterator[Any]: + with io.open( + self.full_path, mode=mode, encoding=encoding, errors=errors) as fp: + yield fp + + +class _method(object): + """A decorator that supports both instance and classmethod invocations. + + Using similar semantics to the @property builtin, this decorator can augment + an instance method to support conditional logic when invoked on a class + object. This breaks support for invoking an instance method via the class + (e.g. Cls.method(self, ...)) but is still situationally useful. + """ + + def __init__(self, finstancemethod): + # type: (Callable[..., Any]) -> None + self._finstancemethod = finstancemethod + self._fclassmethod = None + + def classmethod(self, fclassmethod): + # type: (Callable[..., Any]) -> _method + self._fclassmethod = classmethod(fclassmethod) + return self + + def __doc__(self): + # type: () -> str + if getattr(self._finstancemethod, '__doc__'): + return self._finstancemethod.__doc__ + elif getattr(self._fclassmethod, '__doc__'): + return self._fclassmethod.__doc__ + return '' + + def __get__(self, obj, type_): + # type: (Optional[Any], Optional[Type[Any]]) -> Callable[..., Any] + func = self._fclassmethod if obj is None else self._finstancemethod + return func.__get__(obj, type_) # pytype: disable=attribute-error + + class TestCase(unittest.TestCase): - """Extension of unittest.TestCase providing more powerful assertions.""" + """Extension of unittest.TestCase providing more power.""" + + # When to cleanup files/directories created by our `create_tempfile()` and + # `create_tempdir()` methods after each test case completes. This does *not* + # affect e.g., files created outside of those methods, e.g., using the stdlib + # tempfile module. This can be overridden at the class level, instance level, + # or with the `cleanup` arg of `create_tempfile()` and `create_tempdir()`. See + # `TempFileCleanup` for details on the different values. + # TODO(b/70517332): Remove the type comment and the disable once pytype has + # better support for enums. + tempfile_cleanup = TempFileCleanup.ALWAYS # type: TempFileCleanup # pytype: disable=annotation-type-mismatch maxDiff = 80 * 20 + longMessage = True + + # Exit stacks for per-test and per-class scopes. + _exit_stack = None + _cls_exit_stack = None + + def __init__(self, *args, **kwargs): + super(TestCase, self).__init__(*args, **kwargs) + # This is to work around missing type stubs in unittest.pyi + self._outcome = getattr(self, '_outcome') # type: Optional[_OutcomeType] + + def setUp(self): + super(TestCase, self).setUp() + # NOTE: Only Python 3 contextlib has ExitStack + if hasattr(contextlib, 'ExitStack'): + self._exit_stack = contextlib.ExitStack() + self.addCleanup(self._exit_stack.close) + + @classmethod + def setUpClass(cls): + super(TestCase, cls).setUpClass() + # NOTE: Only Python 3 contextlib has ExitStack and only Python 3.8+ has + # addClassCleanup. + if hasattr(contextlib, 'ExitStack') and hasattr(cls, 'addClassCleanup'): + cls._cls_exit_stack = contextlib.ExitStack() + cls.addClassCleanup(cls._cls_exit_stack.close) + + def create_tempdir(self, name=None, cleanup=None): + # type: (Optional[Text], Optional[TempFileCleanup]) -> _TempDir + """Create a temporary directory specific to the test. + + NOTE: The directory and its contents will be recursively cleared before + creation. This ensures that there is no pre-existing state. + + This creates a named directory on disk that is isolated to this test, and + will be properly cleaned up by the test. This avoids several pitfalls of + creating temporary directories for test purposes, as well as makes it easier + to setup directories and verify their contents. For example:: + + def test_foo(self): + out_dir = self.create_tempdir() + out_log = out_dir.create_file('output.log') + expected_outputs = [ + os.path.join(out_dir, 'data-0.txt'), + os.path.join(out_dir, 'data-1.txt'), + ] + code_under_test(out_dir) + self.assertTrue(os.path.exists(expected_paths[0])) + self.assertTrue(os.path.exists(expected_paths[1])) + self.assertEqual('foo', out_log.read_text()) + + See also: :meth:`create_tempdir` for creating temporary files. + + Args: + name: Optional name of the directory. If not given, a unique + name will be generated and used. + cleanup: Optional cleanup policy on when/if to remove the directory (and + all its contents) at the end of the test. If None, then uses + :attr:`tempfile_cleanup`. + + Returns: + A _TempDir representing the created directory; see _TempDir class docs + for usage. + """ + test_path = self._get_tempdir_path_test() + + if name: + path = os.path.join(test_path, name) + cleanup_path = os.path.join(test_path, _get_first_part(name)) + else: + os.makedirs(test_path, exist_ok=True) + path = tempfile.mkdtemp(dir=test_path) + cleanup_path = path + + _rmtree_ignore_errors(cleanup_path) + os.makedirs(path, exist_ok=True) + + self._maybe_add_temp_path_cleanup(cleanup_path, cleanup) + + return _TempDir(path) + + # pylint: disable=line-too-long + def create_tempfile(self, file_path=None, content=None, mode='w', + encoding='utf8', errors='strict', cleanup=None): + # type: (Optional[Text], Optional[AnyStr], Text, Text, Text, Optional[TempFileCleanup]) -> _TempFile + # pylint: enable=line-too-long + """Create a temporary file specific to the test. + + This creates a named file on disk that is isolated to this test, and will + be properly cleaned up by the test. This avoids several pitfalls of + creating temporary files for test purposes, as well as makes it easier + to setup files, their data, read them back, and inspect them when + a test fails. For example:: + + def test_foo(self): + output = self.create_tempfile() + code_under_test(output) + self.assertGreater(os.path.getsize(output), 0) + self.assertEqual('foo', output.read_text()) + + NOTE: This will zero-out the file. This ensures there is no pre-existing + state. + NOTE: If the file already exists, it will be made writable and overwritten. + + See also: :meth:`create_tempdir` for creating temporary directories, and + ``_TempDir.create_file`` for creating files within a temporary directory. + + Args: + file_path: Optional file path for the temp file. If not given, a unique + file name will be generated and used. Slashes are allowed in the name; + any missing intermediate directories will be created. NOTE: This path is + the path that will be cleaned up, including any directories in the path, + e.g., ``'foo/bar/baz.txt'`` will ``rm -r foo``. + content: Optional string or + bytes to initially write to the file. If not + specified, then an empty file is created. + mode: Mode string to use when writing content. Only used if `content` is + non-empty. + encoding: Encoding to use when writing string content. Only used if + `content` is text. + errors: How to handle text to bytes encoding errors. Only used if + `content` is text. + cleanup: Optional cleanup policy on when/if to remove the directory (and + all its contents) at the end of the test. If None, then uses + :attr:`tempfile_cleanup`. + + Returns: + A _TempFile representing the created file; see _TempFile class docs for + usage. + """ + test_path = self._get_tempdir_path_test() + tf, cleanup_path = _TempFile._create(test_path, file_path, content=content, + mode=mode, encoding=encoding, + errors=errors) + self._maybe_add_temp_path_cleanup(cleanup_path, cleanup) + return tf + + @_method + def enter_context(self, manager): + # type: (ContextManager[_T]) -> _T + """Returns the CM's value after registering it with the exit stack. + + Entering a context pushes it onto a stack of contexts. When `enter_context` + is called on the test instance (e.g. `self.enter_context`), the context is + exited after the test case's tearDown call. When called on the test class + (e.g. `TestCase.enter_context`), the context is exited after the test + class's tearDownClass call. + + Contexts are exited in the reverse order of entering. They will always + be exited, regardless of test failure/success. + + This is useful to eliminate per-test boilerplate when context managers + are used. For example, instead of decorating every test with `@mock.patch`, + simply do `self.foo = self.enter_context(mock.patch(...))' in `setUp()`. + + NOTE: The context managers will always be exited without any error + information. This is an unfortunate implementation detail due to some + internals of how unittest runs tests. + + Args: + manager: The context manager to enter. + """ + if not self._exit_stack: + raise AssertionError( + 'self._exit_stack is not set: enter_context is Py3-only; also make ' + 'sure that AbslTest.setUp() is called.') + return self._exit_stack.enter_context(manager) + + @enter_context.classmethod + def enter_context(cls, manager): # pylint: disable=no-self-argument + # type: (ContextManager[_T]) -> _T + if not cls._cls_exit_stack: + raise AssertionError( + 'cls._cls_exit_stack is not set: cls.enter_context requires ' + 'Python 3.8+; also make sure that AbslTest.setUpClass() is called.') + return cls._cls_exit_stack.enter_context(manager) + + @classmethod + def _get_tempdir_path_cls(cls): + # type: () -> Text + return os.path.join(TEST_TMPDIR.value, + cls.__qualname__.replace('__main__.', '')) + + def _get_tempdir_path_test(self): + # type: () -> Text + return os.path.join(self._get_tempdir_path_cls(), self._testMethodName) + + def _get_tempfile_cleanup(self, override): + # type: (Optional[TempFileCleanup]) -> TempFileCleanup + if override is not None: + return override + return self.tempfile_cleanup + + def _maybe_add_temp_path_cleanup(self, path, cleanup): + # type: (Text, Optional[TempFileCleanup]) -> None + cleanup = self._get_tempfile_cleanup(cleanup) + if cleanup == TempFileCleanup.OFF: + return + elif cleanup == TempFileCleanup.ALWAYS: + self.addCleanup(_rmtree_ignore_errors, path) + elif cleanup == TempFileCleanup.SUCCESS: + self._internal_add_cleanup_on_success(_rmtree_ignore_errors, path) + else: + raise AssertionError('Unexpected cleanup value: {}'.format(cleanup)) + + def _internal_add_cleanup_on_success( + self, + function: Callable[..., Any], + *args: Any, + **kwargs: Any, + ) -> None: + """Adds `function` as cleanup when the test case succeeds.""" + outcome = self._outcome + previous_failure_count = ( + len(outcome.result.failures) + + len(outcome.result.errors) + + len(outcome.result.unexpectedSuccesses) + ) + def _call_cleaner_on_success(*args, **kwargs): + if not self._internal_ran_and_passed_when_called_during_cleanup( + previous_failure_count): + return + function(*args, **kwargs) + self.addCleanup(_call_cleaner_on_success, *args, **kwargs) + + def _internal_ran_and_passed_when_called_during_cleanup( + self, + previous_failure_count: int, + ) -> bool: + """Returns whether test is passed. Expected to be called during cleanup.""" + outcome = self._outcome + if sys.version_info[:2] >= (3, 11): + current_failure_count = ( + len(outcome.result.failures) + + len(outcome.result.errors) + + len(outcome.result.unexpectedSuccesses) + ) + return current_failure_count == previous_failure_count + else: + # Before Python 3.11 https://github.com/python/cpython/pull/28180, errors + # were bufferred in _Outcome before calling cleanup. + result = self.defaultTestResult() + self._feedErrorsToResult(result, outcome.errors) # pytype: disable=attribute-error + return result.wasSuccessful() def shortDescription(self): + # type: () -> Text """Formats both the test method name and the first line of its docstring. If no docstring is given, only returns the method name. @@ -208,7 +845,13 @@ def shortDescription(self): Returns: desc: A short description of a test method. """ - desc = str(self) + desc = self.id() + + # Omit the main name so that test name can be directly copy/pasted to + # the command line. + if desc.startswith('__main__.'): + desc = desc[len('__main__.'):] + # NOTE: super() is used here instead of directly invoking # unittest.TestCase.shortDescription(self), because of the # following line that occurs later on: @@ -310,10 +953,10 @@ def assertEmpty(self, container, msg=None): """Asserts that an object has zero length. Args: - container: Anything that implements the collections.Sized interface. + container: Anything that implements the collections.abc.Sized interface. msg: Optional message to report on failure. """ - if not isinstance(container, collections.Sized): + if not isinstance(container, abc.Sized): self.fail('Expected a Sized object, got: ' '{!r}'.format(type(container).__name__), msg) @@ -326,10 +969,10 @@ def assertNotEmpty(self, container, msg=None): """Asserts that an object has non-zero length. Args: - container: Anything that implements the collections.Sized interface. + container: Anything that implements the collections.abc.Sized interface. msg: Optional message to report on failure. """ - if not isinstance(container, collections.Sized): + if not isinstance(container, abc.Sized): self.fail('Expected a Sized object, got: ' '{!r}'.format(type(container).__name__), msg) @@ -342,15 +985,15 @@ def assertLen(self, container, expected_len, msg=None): """Asserts that an object has the expected length. Args: - container: Anything that implements the collections.Sized interface. + container: Anything that implements the collections.abc.Sized interface. expected_len: The expected length of the container. msg: Optional message to report on failure. """ - if not isinstance(container, collections.Sized): + if not isinstance(container, abc.Sized): self.fail('Expected a Sized object, got: ' '{!r}'.format(type(container).__name__), msg) if len(container) != expected_len: - container_repr = unittest.util.safe_repr(container) + container_repr = unittest.util.safe_repr(container) # pytype: disable=module-attr self.fail('{} has length of {}, expected {}.'.format( container_repr, len(container), expected_len), msg) @@ -364,7 +1007,7 @@ def assertSequenceAlmostEqual(self, expected_seq, actual_seq, places=None, in the two sequences is more than the given delta. Note that decimal places (from zero) are usually not the same as significant - digits (measured from the most signficant digit). + digits (measured from the most significant digit). If the two sequences compare equal then they will automatically compare almost equal. @@ -383,8 +1026,13 @@ def assertSequenceAlmostEqual(self, expected_seq, actual_seq, places=None, err_list = [] for idx, (exp_elem, act_elem) in enumerate(zip(expected_seq, actual_seq)): try: + # assertAlmostEqual should be called with at most one of `places` and + # `delta`. However, it's okay for assertSequenceAlmostEqual to pass + # both because we want the latter to fail if the former does. + # pytype: disable=wrong-keyword-args self.assertAlmostEqual(exp_elem, act_elem, places=places, msg=msg, delta=delta) + # pytype: enable=wrong-keyword-args except self.failureException as err: err_list.append('At index {}: {}'.format(idx, err)) @@ -413,55 +1061,25 @@ def assertNoCommonElements(self, expected_seq, actual_seq, msg=None): common, expected_seq, actual_seq), msg) def assertItemsEqual(self, expected_seq, actual_seq, msg=None): - """An unordered sequence specific comparison. - - Equivalent to assertCountEqual(). This method is a compatibility layer - for Python 3k, since 2to3 does not convert assertItemsEqual() calls into - assertCountEqual() calls. - - Args: - expected_seq: A sequence containing elements we are expecting. - actual_seq: The sequence that we are testing. - msg: The message to be printed if the test fails. - """ - - if not hasattr(super(TestCase, self), 'assertItemsEqual'): - # The assertItemsEqual method was renamed assertCountEqual in Python 3.2 - super(TestCase, self).assertCountEqual(expected_seq, actual_seq, msg) - return - - super(TestCase, self).assertItemsEqual(expected_seq, actual_seq, msg) + """Deprecated, please use assertCountEqual instead. - def assertCountEqual(self, expected_seq, actual_seq, msg=None): - """An unordered sequence specific comparison. - - It asserts that actual_seq and expected_seq have the same element counts. - Equivalent to:: - - self.assertEqual(Counter(iter(actual_seq)), - Counter(iter(expected_seq))) - - Asserts that each element has the same count in both sequences. - Example: - - [0, 1, 1] and [1, 0, 1] compare equal. - - [0, 0, 1] and [0, 1] compare unequal. + This is equivalent to assertCountEqual. Args: expected_seq: A sequence containing elements we are expecting. actual_seq: The sequence that we are testing. msg: The message to be printed if the test fails. - """ - self.assertItemsEqual(expected_seq, actual_seq, msg) + super().assertCountEqual(expected_seq, actual_seq, msg) def assertSameElements(self, expected_seq, actual_seq, msg=None): """Asserts that two sequences have the same elements (in any order). This method, unlike assertCountEqual, doesn't care about any - duplicates in the expected and actual sequences. + duplicates in the expected and actual sequences:: - >> assertSameElements([1, 1, 1, 0, 0, 0], [0, 1]) - # Doesn't raise an AssertionError + # Doesn't raise an AssertionError + assertSameElements([1, 1, 1, 0, 0, 0], [0, 1]) If possible, you should use assertCountEqual instead of assertSameElements. @@ -512,10 +1130,10 @@ def assertSameElements(self, expected_seq, actual_seq, msg=None): # has a different error format. However, I find this slightly more readable. def assertMultiLineEqual(self, first, second, msg=None, **kwargs): """Asserts that two multi-line strings are equal.""" - assert isinstance(first, six.string_types), ( - 'First argument is not a string: %r' % (first,)) - assert isinstance(second, six.string_types), ( - 'Second argument is not a string: %r' % (second,)) + assert isinstance(first, + str), ('First argument is not a string: %r' % (first,)) + assert isinstance(second, + str), ('Second argument is not a string: %r' % (second,)) line_limit = kwargs.pop('line_limit', 0) if kwargs: raise TypeError('Unexpected keyword args {}'.format(tuple(kwargs))) @@ -552,11 +1170,12 @@ def assertBetween(self, value, minv, maxv, msg=None): def assertRegexMatch(self, actual_str, regexes, message=None): r"""Asserts that at least one regex in regexes matches str. - If possible you should use assertRegexpMatches, which is a simpler - version of this method. assertRegexpMatches takes a single regular + If possible you should use `assertRegex`, which is a simpler + version of this method. `assertRegex` takes a single regular expression (a string or re compiled object) instead of a list. Notes: + 1. This function uses substring matching, i.e. the matching succeeds if *any* substring of the error message matches *any* regex in the list. This is more convenient for the user than @@ -582,7 +1201,7 @@ def assertRegexMatch(self, actual_str, regexes, message=None): message: The message to be printed if the test fails. """ if isinstance(regexes, _TEXT_OR_BINARY_TYPES): - self.fail('regexes is string or bytes; use assertRegexpMatches instead.', + self.fail('regexes is string or bytes; use assertRegex instead.', message) if not regexes: self.fail('No regexes specified.', message) @@ -592,14 +1211,14 @@ def assertRegexMatch(self, actual_str, regexes, message=None): if type(regex) is not regex_type: # pylint: disable=unidiomatic-typecheck self.fail('regexes list must all be the same type.', message) - if regex_type is bytes and isinstance(actual_str, six.text_type): + if regex_type is bytes and isinstance(actual_str, str): regexes = [regex.decode('utf-8') for regex in regexes] - regex_type = six.text_type - elif regex_type is six.text_type and isinstance(actual_str, bytes): + regex_type = str + elif regex_type is str and isinstance(actual_str, bytes): regexes = [regex.encode('utf-8') for regex in regexes] regex_type = bytes - if regex_type is six.text_type: + if regex_type is str: regex = u'(?:%s)' % u')|(?:'.join(regexes) elif regex_type is bytes: regex = b'(?:' + (b')|(?:'.join(regexes)) + b')' @@ -631,7 +1250,7 @@ def assertCommandSucceeds(self, command, regexes=(b'',), env=None, # We need bytes regexes here because `err` is bytes. # Accommodate code which listed their output regexes w/o the b'' prefix by # converting them to bytes for the user. - if isinstance(regexes[0], six.text_type): + if isinstance(regexes[0], str): regexes = [regex.encode('utf-8') for regex in regexes] command_string = get_command_string(command) @@ -677,7 +1296,7 @@ def assertCommandFails(self, command, regexes, env=None, close_fds=True, # We need bytes regexes here because `err` is bytes. # Accommodate code which listed their output regexes w/o the b'' prefix by # converting them to bytes for the user. - if isinstance(regexes[0], six.text_type): + if isinstance(regexes[0], str): regexes = [regex.encode('utf-8') for regex in regexes] command_string = get_command_string(command) @@ -717,8 +1336,25 @@ def __exit__(self, exc_type, exc_value, tb): if not issubclass(exc_type, self.expected_exception): return False self.test_func(exc_value) + if exc_value: + self.exception = exc_value.with_traceback(None) return True + @typing.overload + def assertRaisesWithPredicateMatch( + self, expected_exception, predicate) -> _AssertRaisesContext: + # The purpose of this return statement is to work around + # https://github.com/PyCQA/pylint/issues/5273; it is otherwise ignored. + return self._AssertRaisesContext(None, None, None) + + @typing.overload + def assertRaisesWithPredicateMatch( + self, expected_exception, predicate, callable_obj: Callable[..., Any], + *args, **kwargs) -> None: + # The purpose of this return statement is to work around + # https://github.com/PyCQA/pylint/issues/5273; it is otherwise ignored. + return self._AssertRaisesContext(None, None, None) + def assertRaisesWithPredicateMatch(self, expected_exception, predicate, callable_obj=None, *args, **kwargs): """Asserts that exception is thrown and predicate(exception) is true. @@ -747,12 +1383,28 @@ def Check(err): with context: callable_obj(*args, **kwargs) + @typing.overload + def assertRaisesWithLiteralMatch( + self, expected_exception, expected_exception_message + ) -> _AssertRaisesContext: + # The purpose of this return statement is to work around + # https://github.com/PyCQA/pylint/issues/5273; it is otherwise ignored. + return self._AssertRaisesContext(None, None, None) + + @typing.overload + def assertRaisesWithLiteralMatch( + self, expected_exception, expected_exception_message, + callable_obj: Callable[..., Any], *args, **kwargs) -> None: + # The purpose of this return statement is to work around + # https://github.com/PyCQA/pylint/issues/5273; it is otherwise ignored. + return self._AssertRaisesContext(None, None, None) + def assertRaisesWithLiteralMatch(self, expected_exception, expected_exception_message, callable_obj=None, *args, **kwargs): """Asserts that the message in a raised exception equals the given string. - Unlike assertRaisesRegexp, this method takes a literal string, not + Unlike assertRaisesRegex, this method takes a literal string, not a regular expression. with self.assertRaisesWithLiteralMatch(ExType, 'message'): @@ -857,7 +1509,7 @@ def assertContainsExactSubsequence(self, container, subsequence, msg=None): subsequence = list(subsequence) longest_match = 0 - for start in xrange(1 + len(container) - len(subsequence)): + for start in range(1 + len(container) - len(subsequence)): if longest_match == len(subsequence): break index = 0 @@ -875,39 +1527,39 @@ def assertTotallyOrdered(self, *groups, **kwargs): """Asserts that total ordering has been implemented correctly. For example, say you have a class A that compares only on its attribute x. - Comparators other than __lt__ are omitted for brevity. + Comparators other than ``__lt__`` are omitted for brevity:: - class A(object): - def __init__(self, x, y): - self.x = x - self.y = y + class A(object): + def __init__(self, x, y): + self.x = x + self.y = y - def __hash__(self): - return hash(self.x) + def __hash__(self): + return hash(self.x) - def __lt__(self, other): - try: - return self.x < other.x - except AttributeError: - return NotImplemented + def __lt__(self, other): + try: + return self.x < other.x + except AttributeError: + return NotImplemented assertTotallyOrdered will check that instances can be ordered correctly. - For example, + For example:: - self.assertTotallyOrdered( - [None], # None should come before everything else. - [1], # Integers sort earlier. - [A(1, 'a')], - [A(2, 'b')], # 2 is after 1. - [A(3, 'c'), A(3, 'd')], # The second argument is irrelevant. - [A(4, 'z')], - ['foo']) # Strings sort last. + self.assertTotallyOrdered( + [None], # None should come before everything else. + [1], # Integers sort earlier. + [A(1, 'a')], + [A(2, 'b')], # 2 is after 1. + [A(3, 'c'), A(3, 'd')], # The second argument is irrelevant. + [A(4, 'z')], + ['foo']) # Strings sort last. Args: - *groups: A list of groups of elements. Each group of elements is a list - of objects that are equal. The elements in each group must be less - than the elements in the group after it. For example, these groups are - totally ordered: [None], [1], [2, 2], [3]. + *groups: A list of groups of elements. Each group of elements is a list + of objects that are equal. The elements in each group must be less + than the elements in the group after it. For example, these groups are + totally ordered: ``[None]``, ``[1]``, ``[2, 2]``, ``[3]``. **kwargs: optional msg keyword argument can be passed. """ @@ -944,10 +1596,17 @@ def CheckEqual(a, b): self.assertFalse(a != b, self._formatMessage(msg, '%r unexpectedly unequals %r' % (a, b))) - self.assertEqual(hash(a), hash(b), self._formatMessage( - msg, - 'hash %d of %r unexpectedly not equal to hash %d of %r' % - (hash(a), a, hash(b), b))) + + # Objects that compare equal must hash to the same value, but this only + # applies if both objects are hashable. + if (isinstance(a, abc.Hashable) and + isinstance(b, abc.Hashable)): + self.assertEqual( + hash(a), hash(b), + self._formatMessage( + msg, 'hash %d of %r unexpectedly not equal to hash %d of %r' % + (hash(a), a, hash(b), b))) + self.assertFalse(a < b, self._formatMessage(msg, '%r unexpectedly less than %r' % @@ -957,7 +1616,7 @@ def CheckEqual(a, b): '%r unexpectedly less than %r' % (b, a))) self.assertLessEqual(a, b, msg) - self.assertLessEqual(b, a, msg) + self.assertLessEqual(b, a, msg) # pylint: disable=arguments-out-of-order self.assertFalse(a > b, self._formatMessage(msg, '%r unexpectedly greater than %r' % @@ -967,7 +1626,7 @@ def CheckEqual(a, b): '%r unexpectedly greater than %r' % (b, a))) self.assertGreaterEqual(a, b, msg) - self.assertGreaterEqual(b, a, msg) + self.assertGreaterEqual(b, a, msg) # pylint: disable=arguments-out-of-order msg = kwargs.get('msg') @@ -1014,21 +1673,20 @@ def Sorted(list_of_items): if a == b: return - a_items = Sorted(list(six.iteritems(a))) - b_items = Sorted(list(six.iteritems(b))) + a_items = Sorted(list(a.items())) + b_items = Sorted(list(b.items())) unexpected = [] missing = [] different = [] - safe_repr = unittest.util.safe_repr + safe_repr = unittest.util.safe_repr # pytype: disable=module-attr def Repr(dikt): """Deterministic repr for dict.""" # Sort the entries based on their repr, not based on their sort order, # which will be non-deterministic across executions, for many types. - entries = sorted((safe_repr(k), safe_repr(v)) - for k, v in six.iteritems(dikt)) + entries = sorted((safe_repr(k), safe_repr(v)) for k, v in dikt.items()) return '{%s}' % (', '.join('%s: %s' % pair for pair in entries)) message = ['%s != %s%s' % (Repr(a), Repr(b), ' (%s)' % msg if msg else '')] @@ -1066,8 +1724,8 @@ def Repr(dikt): def assertUrlEqual(self, a, b, msg=None): """Asserts that urls are equal, ignoring ordering of query params.""" - parsed_a = urllib.parse.urlparse(a) - parsed_b = urllib.parse.urlparse(b) + parsed_a = parse.urlparse(a) + parsed_b = parse.urlparse(b) self.assertEqual(parsed_a.scheme, parsed_b.scheme, msg) self.assertEqual(parsed_a.netloc, parsed_b.netloc, msg) self.assertEqual(parsed_a.path, parsed_b.path, msg) @@ -1075,8 +1733,8 @@ def assertUrlEqual(self, a, b, msg=None): self.assertEqual(sorted(parsed_a.params.split(';')), sorted(parsed_b.params.split(';')), msg) self.assertDictEqual( - urllib.parse.parse_qs(parsed_a.query, keep_blank_values=True), - urllib.parse.parse_qs(parsed_b.query, keep_blank_values=True), msg) + parse.parse_qs(parsed_a.query, keep_blank_values=True), + parse.parse_qs(parsed_b.query, keep_blank_values=True), msg) def assertSameStructure(self, a, b, aname='a', bname='b', msg=None): """Asserts that two values contain the same structural content. @@ -1088,6 +1746,10 @@ def assertSameStructure(self, a, b, aname='a', bname='b', msg=None): the location within the structures where the first difference is found. This may be helpful when comparing large structures. + Mixed Sequence and Set types are supported. Mixed Mapping types are + supported, but the order of the keys will not be considered in the + comparison. + Args: a: The first structure to compare. b: The second structure to compare. @@ -1118,8 +1780,8 @@ def assertJsonEqual(self, first, second, msg=None): using assertSameStructure. Args: - first: A string contining JSON to decode and compare to second. - second: A string contining JSON to decode and compare to first. + first: A string containing JSON to decode and compare to second. + second: A string containing JSON to decode and compare to first. msg: Additional text to include in the failure message. """ try: @@ -1140,6 +1802,7 @@ def assertJsonEqual(self, first, second, msg=None): aname='first', bname='second', msg=msg) def _getAssertEqualityFunc(self, first, second): + # type: (Any, Any) -> Callable[..., None] try: return super(TestCase, self)._getAssertEqualityFunc(first, second) except AttributeError: @@ -1158,6 +1821,7 @@ def fail(self, msg=None, prefix=None): def _sorted_list_difference(expected, actual): + # type: (List[_T], List[_T]) -> Tuple[List[_T], List[_T]] """Finds elements in only one or the other of two, sorted input lists. Returns a two-element tuple of lists. The first list contains those @@ -1167,7 +1831,7 @@ def _sorted_list_difference(expected, actual): Args: expected: The list we expected. - actual: The list we actualy got. + actual: The list we actually got. Returns: (missing, unexpected) missing: items in expected that are not in actual. @@ -1206,10 +1870,34 @@ def _sorted_list_difference(expected, actual): return missing, unexpected +def _are_both_of_integer_type(a, b): + # type: (object, object) -> bool + return isinstance(a, int) and isinstance(b, int) + + +def _are_both_of_sequence_type(a, b): + # type: (object, object) -> bool + return isinstance(a, abc.Sequence) and isinstance( + b, abc.Sequence) and not isinstance( + a, _TEXT_OR_BINARY_TYPES) and not isinstance(b, _TEXT_OR_BINARY_TYPES) + + +def _are_both_of_set_type(a, b): + # type: (object, object) -> bool + return isinstance(a, abc.Set) and isinstance(b, abc.Set) + + +def _are_both_of_mapping_type(a, b): + # type: (object, object) -> bool + return isinstance(a, abc.Mapping) and isinstance( + b, abc.Mapping) + + def _walk_structure_for_problems(a, b, aname, bname, problem_list): """The recursive comparison behind assertSameStructure.""" if type(a) != type(b) and not ( # pylint: disable=unidiomatic-typecheck - isinstance(a, six.integer_types) and isinstance(b, six.integer_types)): + _are_both_of_integer_type(a, b) or _are_both_of_sequence_type(a, b) or + _are_both_of_set_type(a, b) or _are_both_of_mapping_type(a, b)): # We do not distinguish between int and long types as 99.99% of Python 2 # code should never care. They collapse into a single type in Python 3. problem_list.append('%s is a %r but %s is a %r' % @@ -1217,7 +1905,18 @@ def _walk_structure_for_problems(a, b, aname, bname, problem_list): # If they have different types there's no point continuing return - if isinstance(a, collections.Mapping): + if isinstance(a, abc.Set): + for k in a: + if k not in b: + problem_list.append( + '%s has %r but %s does not' % (aname, k, bname)) + for k in b: + if k not in a: + problem_list.append('%s lacks %r but %s has it' % (aname, k, bname)) + + # NOTE: a or b could be a defaultdict, so we must take care that the traversal + # doesn't modify the data. + elif isinstance(a, abc.Mapping): for k in a: if k in b: _walk_structure_for_problems( @@ -1234,17 +1933,17 @@ def _walk_structure_for_problems(a, b, aname, bname, problem_list): (aname, k, bname, b[k])) # Strings/bytes are Sequences but we'll just do those with regular != - elif (isinstance(a, collections.Sequence) and + elif (isinstance(a, abc.Sequence) and not isinstance(a, _TEXT_OR_BINARY_TYPES)): minlen = min(len(a), len(b)) - for i in xrange(minlen): + for i in range(minlen): _walk_structure_for_problems( a[i], b[i], '%s[%d]' % (aname, i), '%s[%d]' % (bname, i), problem_list) - for i in xrange(minlen, len(a)): + for i in range(minlen, len(a)): problem_list.append('%s has [%i] with value %r but %s does not' % (aname, i, a[i], bname)) - for i in xrange(minlen, len(b)): + for i in range(minlen, len(b)): problem_list.append('%s lacks [%i] but %s has it with value %r' % (aname, i, bname, b[i])) @@ -1261,7 +1960,7 @@ def get_command_string(command): Returns: A string suitable for use as a shell command. """ - if isinstance(command, six.string_types): + if isinstance(command, str): return command else: if os.name == 'nt': @@ -1296,7 +1995,7 @@ def get_command_stderr(command, env=None, close_fds=True): # standard handles. close_fds = False - use_shell = isinstance(command, six.string_types) + use_shell = isinstance(command, str) process = subprocess.Popen( command, close_fds=close_fds, @@ -1310,6 +2009,7 @@ def get_command_stderr(command, env=None, close_fds=True): def _quote_long_string(s): + # type: (Union[Text, bytes, bytearray]) -> Text """Quotes a potentially multi-line string to make the start and end obvious. Args: @@ -1328,16 +2028,8 @@ def _quote_long_string(s): '----------->8\n') -class _TestProgramManualRun(unittest.TestProgram): - """A TestProgram which runs the tests manually.""" - - def runTests(self, do_run=False): - """Runs the tests.""" - if do_run: - unittest.TestProgram.runTests(self) - - def print_python_version(): + # type: () -> None # Having this in the test output logs by default helps debugging when all # you've got is the log and no other idea of which Python was used. sys.stderr.write('Running tests under Python {0[0]}.{0[1]}.{0[2]}: ' @@ -1347,57 +2039,50 @@ def print_python_version(): def main(*args, **kwargs): + # type: (Text, Any) -> None """Executes a set of Python unit tests. Usually this function is called without arguments, so the unittest.TestProgram instance will get created with the default settings, - so it will run all test methods of all TestCase classes in the __main__ + so it will run all test methods of all TestCase classes in the ``__main__`` module. Args: - *args: Positional arguments passed through to unittest.TestProgram.__init__. - **kwargs: Keyword arguments passed through to unittest.TestProgram.__init__. + *args: Positional arguments passed through to + ``unittest.TestProgram.__init__``. + **kwargs: Keyword arguments passed through to + ``unittest.TestProgram.__init__``. """ print_python_version() _run_in_app(run_tests, args, kwargs) def _is_in_app_main(): + # type: () -> bool """Returns True iff app.run is active.""" f = sys._getframe().f_back # pylint: disable=protected-access while f: - if f.f_code == six.get_function_code(app.run): + if f.f_code == app.run.__code__: return True f = f.f_back return False -class _SavedFlag(object): - """Helper class for saving and restoring a flag value.""" - - def __init__(self, flag): - self.flag = flag - self.value = flag.value - self.present = flag.present - - def restore_flag(self): - self.flag.value = self.value - self.flag.present = self.present - - def _register_sigterm_with_faulthandler(): + # type: () -> None """Have faulthandler dump stacks on SIGTERM. Useful to diagnose timeouts.""" if faulthandler and getattr(faulthandler, 'register', None): - # faulthandler.register is not avaiable on Windows. + # faulthandler.register is not available on Windows. # faulthandler.enable() is already called by app.run. try: - faulthandler.register(signal.SIGTERM, chain=True) + faulthandler.register(signal.SIGTERM, chain=True) # pytype: disable=module-attr except Exception as e: # pylint: disable=broad-except sys.stderr.write('faulthandler.register(SIGTERM) failed ' '%r; ignoring.\n' % e) def _run_in_app(function, args, kwargs): + # type: (Callable[..., None], Sequence[Text], Mapping[Text, Any]) -> None """Executes a set of Python unit tests, ensuring app.run. This is a private function, users should call absltest.main(). @@ -1439,30 +2124,30 @@ def _run_in_app(function, args, kwargs): if _is_in_app_main(): _register_sigterm_with_faulthandler() - # Save command-line flags so the side effects of FLAGS(sys.argv) can be - # undone. - flag_objects = (FLAGS[name] for name in FLAGS) - saved_flags = dict((f.name, _SavedFlag(f)) for f in flag_objects) - # Change the default of alsologtostderr from False to True, so the test # programs's stderr will contain all the log messages. # If --alsologtostderr=false is specified in the command-line, or user # has called FLAGS.alsologtostderr = False before, then the value is kept # False. FLAGS.set_default('alsologtostderr', True) - # Remove it from saved flags so it doesn't get restored later. - del saved_flags['alsologtostderr'] - - # The call FLAGS(sys.argv) parses sys.argv, returns the arguments - # without the flags, and -- as a side effect -- modifies flag values in - # FLAGS. We don't want the side effect, because we don't want to - # override flag changes the program did (e.g. in __main__.main) - # after the command-line has been parsed. So we have the for loop below - # to change back flags to their old values. - argv = FLAGS(sys.argv) - for saved_flag in six.itervalues(saved_flags): - saved_flag.restore_flag() + # Here we only want to get the `argv` without the flags. To avoid any + # side effects of parsing flags, we temporarily stub out the `parse` method + stored_parse_methods = {} + noop_parse = lambda _: None + for name in FLAGS: + # Avoid any side effects of parsing flags. + stored_parse_methods[name] = FLAGS[name].parse + # This must be a separate loop since multiple flag names (short_name=) can + # point to the same flag object. + for name in FLAGS: + FLAGS[name].parse = noop_parse + try: + argv = FLAGS(sys.argv) + finally: + for name in FLAGS: + FLAGS[name].parse = stored_parse_methods[name] + sys.stdout.flush() function(argv, args, kwargs) else: @@ -1478,16 +2163,93 @@ def main_function(argv): def _is_suspicious_attribute(testCaseClass, name): + # type: (Type, Text) -> bool """Returns True if an attribute is a method named like a test method.""" if name.startswith('Test') and len(name) > 4 and name[4].isupper(): attr = getattr(testCaseClass, name) if inspect.isfunction(attr) or inspect.ismethod(attr): - args = inspect.getargspec(attr) - return (len(args.args) == 1 and args.args[0] == 'self' - and args.varargs is None and args.keywords is None) + args = inspect.getfullargspec(attr) + return (len(args.args) == 1 and args.args[0] == 'self' and + args.varargs is None and args.varkw is None and + not args.kwonlyargs) return False +def skipThisClass(reason): + # type: (Text) -> Callable[[_T], _T] + """Skip tests in the decorated TestCase, but not any of its subclasses. + + This decorator indicates that this class should skip all its tests, but not + any of its subclasses. Useful for if you want to share testMethod or setUp + implementations between a number of concrete testcase classes. + + Example usage, showing how you can share some common test methods between + subclasses. In this example, only ``BaseTest`` will be marked as skipped, and + not RealTest or SecondRealTest:: + + @absltest.skipThisClass("Shared functionality") + class BaseTest(absltest.TestCase): + def test_simple_functionality(self): + self.assertEqual(self.system_under_test.method(), 1) + + class RealTest(BaseTest): + def setUp(self): + super().setUp() + self.system_under_test = MakeSystem(argument) + + def test_specific_behavior(self): + ... + + class SecondRealTest(BaseTest): + def setUp(self): + super().setUp() + self.system_under_test = MakeSystem(other_arguments) + + def test_other_behavior(self): + ... + + Args: + reason: The reason we have a skip in place. For instance: 'shared test + methods' or 'shared assertion methods'. + + Returns: + Decorator function that will cause a class to be skipped. + """ + if isinstance(reason, type): + raise TypeError('Got {!r}, expected reason as string'.format(reason)) + + def _skip_class(test_case_class): + if not issubclass(test_case_class, unittest.TestCase): + raise TypeError( + 'Decorating {!r}, expected TestCase subclass'.format(test_case_class)) + + # Only shadow the setUpClass method if it is directly defined. If it is + # in the parent class we invoke it via a super() call instead of holding + # a reference to it. + shadowed_setupclass = test_case_class.__dict__.get('setUpClass', None) + + @classmethod + def replacement_setupclass(cls, *args, **kwargs): + # Skip this class if it is the one that was decorated with @skipThisClass + if cls is test_case_class: + raise SkipTest(reason) + if shadowed_setupclass: + # Pass along `cls` so the MRO chain doesn't break. + # The original method is a `classmethod` descriptor, which can't + # be directly called, but `__func__` has the underlying function. + return shadowed_setupclass.__func__(cls, *args, **kwargs) + else: + # Because there's no setUpClass() defined directly on test_case_class, + # we call super() ourselves to continue execution of the inheritance + # chain. + return super(test_case_class, cls).setUpClass(*args, **kwargs) + + test_case_class.setUpClass = replacement_setupclass + return test_case_class + + return _skip_class + + class TestLoader(unittest.TestLoader): """A test loader which supports common test features. @@ -1508,10 +2270,10 @@ def __init__(self, *args, **kwds): super(TestLoader, self).__init__(*args, **kwds) seed = _get_default_randomize_ordering_seed() if seed: - self._seed = seed - self._random = random.Random(self._seed) + self._randomize_ordering_seed = seed + self._random = random.Random(self._randomize_ordering_seed) else: - self._seed = None + self._randomize_ordering_seed = None self._random = None def getTestCaseNames(self, testCaseClass): # pylint:disable=invalid-name @@ -1520,19 +2282,22 @@ def getTestCaseNames(self, testCaseClass): # pylint:disable=invalid-name if _is_suspicious_attribute(testCaseClass, name): raise TypeError(TestLoader._ERROR_MSG % name) names = super(TestLoader, self).getTestCaseNames(testCaseClass) - if self._seed is not None: - logging.info('Randomizing test order with seed: %d', self._seed) - logging.info('To reproduce this order, re-run with ' - '--test_randomize_ordering_seed=%d', self._seed) + if self._randomize_ordering_seed is not None: + logging.info( + 'Randomizing test order with seed: %d', self._randomize_ordering_seed) + logging.info( + 'To reproduce this order, re-run with ' + '--test_randomize_ordering_seed=%d', self._randomize_ordering_seed) self._random.shuffle(names) return names def get_default_xml_output_filename(): + # type: () -> Optional[Text] if os.environ.get('XML_OUTPUT_FILE'): return os.environ['XML_OUTPUT_FILE'] elif os.environ.get('RUNNING_UNDER_TEST_DAEMON'): - return os.path.join(os.path.dirname(FLAGS.test_tmpdir), 'test_detail.xml') + return os.path.join(os.path.dirname(TEST_TMPDIR.value), 'test_detail.xml') elif os.environ.get('TEST_XMLOUTPUTDIR'): return os.path.join( os.environ['TEST_XMLOUTPUTDIR'], @@ -1540,13 +2305,18 @@ def get_default_xml_output_filename(): def _setup_filtering(argv): + # type: (MutableSequence[Text]) -> None """Implements the bazel test filtering protocol. The following environment variable is used in this method: TESTBRIDGE_TEST_ONLY: string, if set, is forwarded to the unittest - framework to use as a test filter. Its value is split with shlex - before being passed as positional arguments on argv. + framework to use as a test filter. Its value is split with shlex, then: + 1. On Python 3.6 and before, split values are passed as positional + arguments on argv. + 2. On Python 3.7+, split values are passed to unittest's `-k` flag. Tests + are matched by glob patterns or substring. See + https://docs.python.org/3/library/unittest.html#cmdoption-unittest-k Args: argv: the argv to mutate in-place. @@ -1555,10 +2325,39 @@ def _setup_filtering(argv): if argv is None or not test_filter: return - argv[1:1] = shlex.split(test_filter) + filters = shlex.split(test_filter) + if sys.version_info[:2] >= (3, 7): + filters = ['-k=' + test_filter for test_filter in filters] + + argv[1:1] = filters + + +def _setup_test_runner_fail_fast(argv): + # type: (MutableSequence[Text]) -> None + """Implements the bazel test fail fast protocol. + + The following environment variable is used in this method: + + TESTBRIDGE_TEST_RUNNER_FAIL_FAST=<1|0> + + If set to 1, --failfast is passed to the unittest framework to return upon + first failure. + + Args: + argv: the argv to mutate in-place. + """ + + if argv is None: + return + + if os.environ.get('TESTBRIDGE_TEST_RUNNER_FAIL_FAST') != '1': + return + + argv[1:1] = ['--failfast'] def _setup_sharding(custom_loader=None): + # type: (Optional[unittest.TestLoader]) -> unittest.TestLoader """Implements the bazel sharding protocol. The following environment variables are used in this method: @@ -1586,16 +2385,12 @@ def _setup_sharding(custom_loader=None): # to query whether a test binary implements the test sharding protocol. if 'TEST_SHARD_STATUS_FILE' in os.environ: try: - f = None - try: - f = open(os.environ['TEST_SHARD_STATUS_FILE'], 'w') + with open(os.environ['TEST_SHARD_STATUS_FILE'], 'w') as f: f.write('') - except IOError: - sys.stderr.write('Error opening TEST_SHARD_STATUS_FILE (%s). Exiting.' - % os.environ['TEST_SHARD_STATUS_FILE']) - sys.exit(1) - finally: - if f is not None: f.close() + except IOError: + sys.stderr.write('Error opening TEST_SHARD_STATUS_FILE (%s). Exiting.' + % os.environ['TEST_SHARD_STATUS_FILE']) + sys.exit(1) base_loader = custom_loader or TestLoader() if 'TEST_TOTAL_SHARDS' not in os.environ: @@ -1614,25 +2409,37 @@ def _setup_sharding(custom_loader=None): # the test case names for this shard. delegate_get_names = base_loader.getTestCaseNames - bucket_iterator = itertools.cycle(xrange(total_shards)) + bucket_iterator = itertools.cycle(range(total_shards)) def getShardedTestCaseNames(testCaseClass): filtered_names = [] - for testcase in sorted(delegate_get_names(testCaseClass)): + # We need to sort the list of tests in order to determine which tests this + # shard is responsible for; however, it's important to preserve the order + # returned by the base loader, e.g. in the case of randomized test ordering. + ordered_names = delegate_get_names(testCaseClass) + for testcase in sorted(ordered_names): bucket = next(bucket_iterator) if bucket == shard_index: filtered_names.append(testcase) - return filtered_names + return [x for x in ordered_names if x in filtered_names] base_loader.getTestCaseNames = getShardedTestCaseNames return base_loader +# pylint: disable=line-too-long def _run_and_get_tests_result(argv, args, kwargs, xml_test_runner_class): - """Executes a set of Python unit tests and returns the result.""" + # type: (MutableSequence[Text], Sequence[Any], MutableMapping[Text, Any], Type) -> unittest.TestResult + # pylint: enable=line-too-long + """Same as run_tests, except it returns the result instead of exiting.""" + + # The entry from kwargs overrides argv. + argv = kwargs.pop('argv', argv) # Set up test filtering if requested in environment. _setup_filtering(argv) + # Set up --failfast as requested in environment + _setup_test_runner_fail_fast(argv) # Shard the (default or custom) loader if sharding is turned on. kwargs['testLoader'] = _setup_sharding(kwargs.get('testLoader', None)) @@ -1644,7 +2451,7 @@ def _run_and_get_tests_result(argv, args, kwargs, xml_test_runner_class): FLAGS.xml_output_file = get_default_xml_output_filename() xml_output_file = FLAGS.xml_output_file - xml_output = None + xml_buffer = None if xml_output_file: xml_output_dir = os.path.dirname(xml_output_file) if xml_output_dir and not os.path.isdir(xml_output_dir): @@ -1654,10 +2461,11 @@ def _run_and_get_tests_result(argv, args, kwargs, xml_test_runner_class): # File exists error can occur with concurrent tests if e.errno != errno.EEXIST: raise - if sys.version_info.major == 2: - xml_output = open(xml_output_file, 'w') - else: - xml_output = open(xml_output_file, 'w', encoding='utf-8') + # Fail early if we can't write to the XML output file. This is so that we + # don't waste people's time running tests that will just fail anyways. + with _open(xml_output_file, 'w'): + pass + # We can reuse testRunner if it supports XML output (e. g. by inheriting # from xml_reporter.TextAndXMLTestRunner). Otherwise we need to use # xml_reporter.TextAndXMLTestRunner. @@ -1671,12 +2479,38 @@ def _run_and_get_tests_result(argv, args, kwargs, xml_test_runner_class): kwargs['testRunner'] = xml_test_runner_class if kwargs.get('testRunner') is None: kwargs['testRunner'] = xml_test_runner_class - kwargs['testRunner'].set_default_xml_stream(xml_output) + # Use an in-memory buffer (not backed by the actual file) to store the XML + # report, because some tools modify the file (e.g., create a placeholder + # with partial information, in case the test process crashes). + xml_buffer = io.StringIO() + kwargs['testRunner'].set_default_xml_stream(xml_buffer) # pytype: disable=attribute-error + + # If we've used a seed to randomize test case ordering, we want to record it + # as a top-level attribute in the `testsuites` section of the XML output. + randomize_ordering_seed = getattr( + kwargs['testLoader'], '_randomize_ordering_seed', None) + setter = getattr(kwargs['testRunner'], 'set_testsuites_property', None) + if randomize_ordering_seed and setter: + setter('test_randomize_ordering_seed', randomize_ordering_seed) + elif kwargs.get('testRunner') is None: + kwargs['testRunner'] = _pretty_print_reporter.TextTestRunner + + if FLAGS.pdb_post_mortem: + runner = kwargs['testRunner'] + # testRunner can be a class or an instance, which must be tested for + # differently. + # Overriding testRunner isn't uncommon, so only enable the debugging + # integration if the runner claims it does; we don't want to accidentally + # clobber something on the runner. + if ((isinstance(runner, type) and + issubclass(runner, _pretty_print_reporter.TextTestRunner)) or + isinstance(runner, _pretty_print_reporter.TextTestRunner)): + runner.run_for_debugging = True # Make sure tmpdir exists. - if not os.path.isdir(FLAGS.test_tmpdir): + if not os.path.isdir(TEST_TMPDIR.value): try: - os.makedirs(FLAGS.test_tmpdir) + os.makedirs(TEST_TMPDIR.value) except OSError as e: # Concurrent test might have created the directory. if e.errno != errno.EEXIST: @@ -1684,17 +2518,23 @@ def _run_and_get_tests_result(argv, args, kwargs, xml_test_runner_class): # Let unittest.TestProgram.__init__ do its own argv parsing, e.g. for '-v', # on argv, which is sys.argv without the command-line flags. - kwargs.setdefault('argv', argv) + kwargs['argv'] = argv try: test_program = unittest.TestProgram(*args, **kwargs) return test_program.result finally: - if xml_output: - xml_output.close() + if xml_buffer: + try: + with _open(xml_output_file, 'w') as f: + f.write(xml_buffer.getvalue()) + finally: + xml_buffer.close() -def run_tests(argv, args, kwargs): +def run_tests(argv, args, kwargs): # pylint: disable=line-too-long + # type: (MutableSequence[Text], Sequence[Any], MutableMapping[Text, Any]) -> None + # pylint: enable=line-too-long """Executes a set of Python unit tests. Most users should call absltest.main() instead of run_tests. @@ -1706,10 +2546,32 @@ def run_tests(argv, args, kwargs): Args: argv: sys.argv with the command-line flags removed from the front, i.e. the - argv with which app.run() has called __main__.main. - args: Positional arguments passed through to unittest.TestProgram.__init__. - kwargs: Keyword arguments passed through to unittest.TestProgram.__init__. + argv with which :func:`app.run()` has called + ``__main__.main``. It is passed to + ``unittest.TestProgram.__init__(argv=)``, which does its own flag parsing. + It is ignored if kwargs contains an argv entry. + args: Positional arguments passed through to + ``unittest.TestProgram.__init__``. + kwargs: Keyword arguments passed through to + ``unittest.TestProgram.__init__``. """ result = _run_and_get_tests_result( argv, args, kwargs, xml_reporter.TextAndXMLTestRunner) sys.exit(not result.wasSuccessful()) + + +def _rmtree_ignore_errors(path): + # type: (Text) -> None + if os.path.isfile(path): + try: + os.unlink(path) + except OSError: + pass + else: + shutil.rmtree(path, ignore_errors=True) + + +def _get_first_part(path): + # type: (Text) -> Text + parts = path.split(os.sep, 1) + return parts[0] diff --git a/third_party/py/abseil/absl/testing/flagsaver.py b/third_party/py/abseil/absl/testing/flagsaver.py old mode 100755 new mode 100644 index a95b742ba8f791..37926d7aac9e76 --- a/third_party/py/abseil/absl/testing/flagsaver.py +++ b/third_party/py/abseil/absl/testing/flagsaver.py @@ -17,33 +17,40 @@ There are many ways to save and restore. Always use the most convenient method for a given use case. -Here are examples of each method. They all call do_stuff() while FLAGS.someflag -is temporarily set to 'foo'. - - # Use a decorator which can optionally override flags via arguments. - @flagsaver.flagsaver(someflag='foo') - def some_func(): - do_stuff() - - # Use a decorator which does not override flags itself. - @flagsaver.flagsaver - def some_func(): - FLAGS.someflag = 'foo' - do_stuff() - - # Use a context manager which can optionally override flags via arguments. - with flagsaver.flagsaver(someflag='foo'): - do_stuff() - - # Save and restore the flag values yourself. - saved_flag_values = flagsaver.save_flag_values() - try: - FLAGS.someflag = 'foo' - do_stuff() - finally: - flagsaver.restore_flag_values(saved_flag_values) - -We save and restore a shallow copy of each Flag object's __dict__ attribute. +Here are examples of each method. They all call ``do_stuff()`` while +``FLAGS.someflag`` is temporarily set to ``'foo'``:: + + from absl.testing import flagsaver + + # Use a decorator which can optionally override flags via arguments. + @flagsaver.flagsaver(someflag='foo') + def some_func(): + do_stuff() + + # Use a decorator which can optionally override flags with flagholders. + @flagsaver.flagsaver((module.FOO_FLAG, 'foo'), (other_mod.BAR_FLAG, 23)) + def some_func(): + do_stuff() + + # Use a decorator which does not override flags itself. + @flagsaver.flagsaver + def some_func(): + FLAGS.someflag = 'foo' + do_stuff() + + # Use a context manager which can optionally override flags via arguments. + with flagsaver.flagsaver(someflag='foo'): + do_stuff() + + # Save and restore the flag values yourself. + saved_flag_values = flagsaver.save_flag_values() + try: + FLAGS.someflag = 'foo' + do_stuff() + finally: + flagsaver.restore_flag_values(saved_flag_values) + +We save and restore a shallow copy of each Flag object's ``__dict__`` attribute. This preserves all attributes of the flag, such as whether or not it was overridden from its default value. @@ -52,15 +59,10 @@ def some_func(): and then restore flag values, the added flag will be deleted with no errors. """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - import functools import inspect from absl import flags -import six FLAGS = flags.FLAGS @@ -69,7 +71,8 @@ def flagsaver(*args, **kwargs): """The main flagsaver interface. See module doc for usage.""" if not args: return _FlagOverrider(**kwargs) - elif len(args) == 1: + # args can be [func] if used as `@flagsaver` instead of `@flagsaver(...)` + if len(args) == 1 and callable(args[0]): if kwargs: raise ValueError( "It's invalid to specify both positional and keyword parameters.") @@ -77,9 +80,18 @@ def flagsaver(*args, **kwargs): if inspect.isclass(func): raise TypeError('@flagsaver.flagsaver cannot be applied to a class.') return _wrap(func, {}) - else: - raise ValueError( - "It's invalid to specify more than one positional parameters.") + # args can be a list of (FlagHolder, value) pairs. + # In which case they augment any specified kwargs. + for arg in args: + if not isinstance(arg, tuple) or len(arg) != 2: + raise ValueError('Expected (FlagHolder, value) pair, found %r' % (arg,)) + holder, value = arg + if not isinstance(holder, flags.FlagHolder): + raise ValueError('Expected (FlagHolder, value) pair, found %r' % (arg,)) + if holder.name in kwargs: + raise ValueError('Cannot set --%s multiple times' % holder.name) + kwargs[holder.name] = value + return _FlagOverrider(**kwargs) def save_flag_values(flag_values=FLAGS): @@ -90,7 +102,7 @@ def save_flag_values(flag_values=FLAGS): be saved. This should almost never need to be overridden. Returns: Dictionary mapping keys to values. Keys are flag names, values are - corresponding __dict__ members. E.g. {'key': value_dict, ...}. + corresponding ``__dict__`` members. E.g. ``{'key': value_dict, ...}``. """ return {name: _copy_flag_dict(flag_values[name]) for name in flag_values} @@ -154,8 +166,7 @@ def __call__(self, func): def __enter__(self): self._saved_flag_values = save_flag_values(FLAGS) try: - for name, value in six.iteritems(self._overrides): - setattr(FLAGS, name, value) + FLAGS._set_attributes(**self._overrides) except: # It may fail because of flag validators. restore_flag_values(self._saved_flag_values, FLAGS) @@ -166,16 +177,16 @@ def __exit__(self, exc_type, exc_value, traceback): def _copy_flag_dict(flag): - """Returns a copy of the flag object's __dict__. + """Returns a copy of the flag object's ``__dict__``. - It's mostly a shallow copy of the __dict__, except it also does a shallow + It's mostly a shallow copy of the ``__dict__``, except it also does a shallow copy of the validator list. Args: flag: flags.Flag, the flag to copy. Returns: - A copy of the flag object's __dict__. + A copy of the flag object's ``__dict__``. """ copy = flag.__dict__.copy() copy['_value'] = flag.value # Ensure correct restore for C++ flags. diff --git a/third_party/py/abseil/absl/testing/parameterized.py b/third_party/py/abseil/absl/testing/parameterized.py old mode 100755 new mode 100644 index d9301baad74536..650d6cf767b816 --- a/third_party/py/abseil/absl/testing/parameterized.py +++ b/third_party/py/abseil/absl/testing/parameterized.py @@ -17,152 +17,209 @@ A parameterized test is a method in a test case that is invoked with different argument tuples. -A simple example: - - class AdditionExample(parameterized.TestCase): - @parameterized.parameters( - (1, 2, 3), - (4, 5, 9), - (1, 1, 3)) - def testAddition(self, op1, op2, result): - self.assertEqual(result, op1 + op2) +A simple example:: + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + (1, 2, 3), + (4, 5, 9), + (1, 1, 3)) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) Each invocation is a separate test case and properly isolated just like a normal test method, with its own setUp/tearDown cycle. In the example above, there are three separate testcases, one of which will fail due to an assertion error (1 + 1 != 3). -Parameters for invididual test cases can be tuples (with positional parameters) -or dictionaries (with named parameters): +Parameters for individual test cases can be tuples (with positional parameters) +or dictionaries (with named parameters):: - class AdditionExample(parameterized.TestCase): - @parameterized.parameters( - {'op1': 1, 'op2': 2, 'result': 3}, - {'op1': 4, 'op2': 5, 'result': 9}, - ) - def testAddition(self, op1, op2, result): - self.assertEqual(result, op1 + op2) + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + {'op1': 1, 'op2': 2, 'result': 3}, + {'op1': 4, 'op2': 5, 'result': 9}, + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) If a parameterized test fails, the error message will show the -original test name (which is modified internally) and the arguments -for the specific invocation, which are part of the string returned by -the shortDescription() method on test cases. - -The id method of the test, used internally by the unittest framework, -is also modified to show the arguments. To make sure that test names -stay the same across several invocations, object representations like - - >>> class Foo(object): - ... pass - >>> repr(Foo()) - '<__main__.Foo object at 0x23d8610>' - -are turned into '<__main__.Foo>'. For even more descriptive names, -especially in test logs, you can use the named_parameters decorator. In -this case, only tuples or dicts are supported. For tuples, the first parameters +original test name and the parameters for that test. + +The id method of the test, used internally by the unittest framework, is also +modified to show the arguments (but note that the name reported by `id()` +doesn't match the actual test name, see below). To make sure that test names +stay the same across several invocations, object representations like:: + + >>> class Foo(object): + ... pass + >>> repr(Foo()) + '<__main__.Foo object at 0x23d8610>' + +are turned into ``__main__.Foo``. When selecting a subset of test cases to run +on the command-line, the test cases contain an index suffix for each argument +in the order they were passed to :func:`parameters` (eg. testAddition0, +testAddition1, etc.) This naming scheme is subject to change; for more reliable +and stable names, especially in test logs, use :func:`named_parameters` instead. + +Tests using :func:`named_parameters` are similar to :func:`parameters`, except +only tuples or dicts of args are supported. For tuples, the first parameter arg has to be a string (or an object that returns an apt name when converted via -str()). For dicts, a value for the key 'testcase_name' must be present and must -be a string (or an object that returns an apt name when converted via str()): - - class NamedExample(parameterized.TestCase): - @parameterized.named_parameters( - ('Normal', 'aa', 'aaa', True), - ('EmptyPrefix', '', 'abc', True), - ('BothEmpty', '', '', True)) - def testStartsWith(self, prefix, string, result): - self.assertEqual(result, string.startswith(prefix)) - - class NamedExample(parameterized.TestCase): - @parameterized.named_parameters( - {'testcase_name': 'Normal', - 'result': True, 'string': 'aaa', 'prefix': 'aa'}, - {'testcase_name': 'EmptyPrefix', - 'result': True, 'string: 'abc', 'prefix': ''}, - {'testcase_name': 'BothEmpty', - 'result': True, 'string': '', 'prefix': ''}) - def testStartsWith(self, prefix, string, result): - self.assertEqual(result, string.startswith(prefix)) +``str()``). For dicts, a value for the key ``testcase_name`` must be present and +must be a string (or an object that returns an apt name when converted via +``str()``):: + + class NamedExample(parameterized.TestCase): + @parameterized.named_parameters( + ('Normal', 'aa', 'aaa', True), + ('EmptyPrefix', '', 'abc', True), + ('BothEmpty', '', '', True)) + def testStartsWith(self, prefix, string, result): + self.assertEqual(result, string.startswith(prefix)) + + class NamedExample(parameterized.TestCase): + @parameterized.named_parameters( + {'testcase_name': 'Normal', + 'result': True, 'string': 'aaa', 'prefix': 'aa'}, + {'testcase_name': 'EmptyPrefix', + 'result': True, 'string': 'abc', 'prefix': ''}, + {'testcase_name': 'BothEmpty', + 'result': True, 'string': '', 'prefix': ''}) + def testStartsWith(self, prefix, string, result): + self.assertEqual(result, string.startswith(prefix)) Named tests also have the benefit that they can be run individually -from the command line: +from the command line:: - $ testmodule.py NamedExample.testStartsWithNormal - . - -------------------------------------------------------------------- - Ran 1 test in 0.000s + $ testmodule.py NamedExample.testStartsWithNormal + . + -------------------------------------------------------------------- + Ran 1 test in 0.000s - OK + OK Parameterized Classes ===================== + If invocation arguments are shared across test methods in a single TestCase class, instead of decorating all test methods -individually, the class itself can be decorated: +individually, the class itself can be decorated:: - @parameterized.parameters( - (1, 2, 3), - (4, 5, 9)) - class ArithmeticTest(parameterized.TestCase): - def testAdd(self, arg1, arg2, result): - self.assertEqual(arg1 + arg2, result) + @parameterized.parameters( + (1, 2, 3), + (4, 5, 9)) + class ArithmeticTest(parameterized.TestCase): + def testAdd(self, arg1, arg2, result): + self.assertEqual(arg1 + arg2, result) - def testSubtract(self, arg1, arg2, result): - self.assertEqual(result - arg1, arg2) + def testSubtract(self, arg1, arg2, result): + self.assertEqual(result - arg1, arg2) Inputs from Iterables ===================== + If parameters should be shared across several test cases, or are dynamically created from other sources, a single non-tuple iterable can be passed into -the decorator. This iterable will be used to obtain the test cases: +the decorator. This iterable will be used to obtain the test cases:: - class AdditionExample(parameterized.TestCase): - @parameterized.parameters( - c.op1, c.op2, c.result for c in testcases - ) - def testAddition(self, op1, op2, result): - self.assertEqual(result, op1 + op2) + class AdditionExample(parameterized.TestCase): + @parameterized.parameters( + c.op1, c.op2, c.result for c in testcases + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) Single-Argument Test Methods ============================ + If a test method takes only one argument, the single arguments must not be -wrapped into a tuple: +wrapped into a tuple:: - class NegativeNumberExample(parameterized.TestCase): - @parameterized.parameters( - -1, -3, -4, -5 - ) - def testIsNegative(self, arg): - self.assertTrue(IsNegative(arg)) + class NegativeNumberExample(parameterized.TestCase): + @parameterized.parameters( + -1, -3, -4, -5 + ) + def testIsNegative(self, arg): + self.assertTrue(IsNegative(arg)) List/tuple as a Single Argument =============================== + If a test method takes a single argument of a list/tuple, it must be wrapped -inside a tuple: +inside a tuple:: + + class ZeroSumExample(parameterized.TestCase): + @parameterized.parameters( + ([-1, 0, 1], ), + ([-2, 0, 2], ), + ) + def testSumIsZero(self, arg): + self.assertEqual(0, sum(arg)) + + +Cartesian product of Parameter Values as Parametrized Test Cases +================================================================ + +If required to test method over a cartesian product of parameters, +`parameterized.product` may be used to facilitate generation of parameters +test combinations:: + + class TestModuloExample(parameterized.TestCase): + @parameterized.product( + num=[0, 20, 80], + modulo=[2, 4], + expected=[0] + ) + def testModuloResult(self, num, modulo, expected): + self.assertEqual(expected, num % modulo) + +This results in 6 test cases being created - one for each combination of the +parameters. It is also possible to supply sequences of keyword argument dicts +as elements of the cartesian product:: + + @parameterized.product( + (dict(num=5, modulo=3, expected=2), + dict(num=7, modulo=4, expected=3)), + dtype=(int, float) + ) + def testModuloResult(self, num, modulo, expected, dtype): + self.assertEqual(expected, dtype(num) % modulo) + +This results in 4 test cases being created - for each of the two sets of test +data (supplied as kwarg dicts) and for each of the two data types (supplied as +a named parameter). Multiple keyword argument dicts may be supplied if required. + +Async Support +============= + +If a test needs to call async functions, it can inherit from both +parameterized.TestCase and another TestCase that supports async calls, such +as [asynctest](https://github.com/Martiusweb/asynctest):: + + import asynctest - class ZeroSumExample(parameterized.TestCase): + class AsyncExample(parameterized.TestCase, asynctest.TestCase): @parameterized.parameters( - ([-1, 0, 1], ), - ([-2, 0, 2], ), + ('a', 1), + ('b', 2), ) - def testSumIsZero(self, arg): - self.assertEqual(0, sum(arg)) + async def testSomeAsyncFunction(self, arg, expected): + actual = await someAsyncFunction(arg) + self.assertEqual(actual, expected) """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import collections +from collections import abc import functools +import inspect +import itertools import re import types import unittest from absl.testing import absltest -import six + _ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>') _NAMED = object() @@ -174,30 +231,48 @@ class NoTestsError(Exception): """Raised when parameterized decorators do not generate any tests.""" +class DuplicateTestNameError(Exception): + """Raised when a parameterized test has the same test name multiple times.""" + + def __init__(self, test_class_name, new_test_name, original_test_name): + super(DuplicateTestNameError, self).__init__( + 'Duplicate parameterized test name in {}: generated test name {!r} ' + '(generated from {!r}) already exists. Consider using ' + 'named_parameters() to give your tests unique names and/or renaming ' + 'the conflicting test method.'.format( + test_class_name, new_test_name, original_test_name)) + + def _clean_repr(obj): return _ADDR_RE.sub(r'<\1>', repr(obj)) def _non_string_or_bytes_iterable(obj): - return (isinstance(obj, collections.Iterable) and - not isinstance(obj, six.text_type) and - not isinstance(obj, six.binary_type)) + return (isinstance(obj, abc.Iterable) and not isinstance(obj, str) and + not isinstance(obj, bytes)) def _format_parameter_list(testcase_params): - if isinstance(testcase_params, collections.Mapping): + if isinstance(testcase_params, abc.Mapping): return ', '.join('%s=%s' % (argname, _clean_repr(value)) - for argname, value in six.iteritems(testcase_params)) + for argname, value in testcase_params.items()) elif _non_string_or_bytes_iterable(testcase_params): return ', '.join(map(_clean_repr, testcase_params)) else: return _format_parameter_list((testcase_params,)) +def _async_wrapped(func): + @functools.wraps(func) + async def wrapper(*args, **kwargs): + return await func(*args, **kwargs) + return wrapper + + class _ParameterizedTestIter(object): """Callable and iterable class for producing new test cases.""" - def __init__(self, test_method, testcases, naming_type): + def __init__(self, test_method, testcases, naming_type, original_name=None): """Returns concrete test functions for a test and a list of parameters. The naming_type is used to determine the name of the concrete @@ -207,13 +282,21 @@ def __init__(self, test_method, testcases, naming_type): Args: test_method: The decorated test method. - testcases: (list of tuple/dict) A list of parameter - tuples/dicts for individual test invocations. + testcases: (list of tuple/dict) A list of parameter tuples/dicts for + individual test invocations. naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR. + original_name: The original test method name. When decorated on a test + method, None is passed to __init__ and test_method.__name__ is used. + Note test_method.__name__ might be different than the original defined + test method because of the use of other decorators. A more accurate + value is set by TestGeneratorMetaclass.__new__ later. """ self._test_method = test_method self.testcases = testcases self._naming_type = naming_type + if original_name is None: + original_name = test_method.__name__ + self._original_name = original_name self.__name__ = _ParameterizedTestIter.__name__ def __call__(self, *args, **kwargs): @@ -221,23 +304,22 @@ def __call__(self, *args, **kwargs): 'without having inherited from parameterized.' 'TestCase. This is bad because none of ' 'your test cases are actually being run. You may also ' - 'be using a mock annotation before the parameterized ' + 'be using another decorator before the parameterized ' 'one, in which case you should reverse the order.') def __iter__(self): test_method = self._test_method naming_type = self._naming_type - extra_ids = collections.defaultdict(int) def make_bound_param_test(testcase_params): @functools.wraps(test_method) def bound_param_test(self): - if isinstance(testcase_params, collections.Mapping): - test_method(self, **testcase_params) + if isinstance(testcase_params, abc.Mapping): + return test_method(self, **testcase_params) elif _non_string_or_bytes_iterable(testcase_params): - test_method(self, *testcase_params) + return test_method(self, *testcase_params) else: - test_method(self, testcase_params) + return test_method(self, testcase_params) if naming_type is _NAMED: # Signal the metaclass that the name of the test function is unique @@ -245,28 +327,34 @@ def bound_param_test(self): bound_param_test.__x_use_name__ = True testcase_name = None - if isinstance(testcase_params, collections.Mapping): + if isinstance(testcase_params, abc.Mapping): if _NAMED_DICT_KEY not in testcase_params: raise RuntimeError( 'Dict for named tests must contain key "%s"' % _NAMED_DICT_KEY) # Create a new dict to avoid modifying the supplied testcase_params. testcase_name = testcase_params[_NAMED_DICT_KEY] - testcase_params = {k: v for k, v in six.iteritems(testcase_params) - if k != _NAMED_DICT_KEY} + testcase_params = { + k: v for k, v in testcase_params.items() if k != _NAMED_DICT_KEY + } elif _non_string_or_bytes_iterable(testcase_params): + if not isinstance(testcase_params[0], str): + raise RuntimeError( + 'The first element of named test parameters is the test name ' + 'suffix and must be a string') testcase_name = testcase_params[0] testcase_params = testcase_params[1:] else: raise RuntimeError( 'Named tests must be passed a dict or non-string iterable.') + test_method_name = self._original_name # Support PEP-8 underscore style for test naming if used. - if (bound_param_test.__name__.startswith('test_') + if (test_method_name.startswith('test_') and testcase_name and not testcase_name.startswith('_')): - bound_param_test.__name__ += '_' + test_method_name += '_' - bound_param_test.__name__ += str(testcase_name) + bound_param_test.__name__ = test_method_name + str(testcase_name) elif naming_type is _ARGUMENT_REPR: # If it's a generator, convert it to a tuple and treat them as # parameters. @@ -276,13 +364,9 @@ def bound_param_test(self): # _ARGUMENT_REPR tests using an indexed suffix. # To keep test names descriptive, only the original method name is used. # To make sure test names are unique, we add a unique descriptive suffix - # __x_extra_id__ for every test. - extra_id = '(%s)' % (_format_parameter_list(testcase_params),) - extra_ids[extra_id] += 1 - while extra_ids[extra_id] > 1: - extra_id = '%s (%d)' % (extra_id, extra_ids[extra_id]) - extra_ids[extra_id] += 1 - bound_param_test.__x_extra_id__ = extra_id + # __x_params_repr__ for every test. + params_repr = '(%s)' % (_format_parameter_list(testcase_params),) + bound_param_test.__x_params_repr__ = params_repr else: raise RuntimeError('%s is not a valid naming type.' % (naming_type,)) @@ -290,27 +374,31 @@ def bound_param_test(self): bound_param_test.__name__, _format_parameter_list(testcase_params)) if test_method.__doc__: bound_param_test.__doc__ += '\n%s' % (test_method.__doc__,) + if inspect.iscoroutinefunction(test_method): + return _async_wrapped(bound_param_test) return bound_param_test return (make_bound_param_test(c) for c in self.testcases) def _modify_class(class_object, testcases, naming_type): - assert not getattr(class_object, '_test_method_ids', None), ( + assert not getattr(class_object, '_test_params_reprs', None), ( 'Cannot add parameters to %s. Either it already has parameterized ' 'methods, or its super class is also a parameterized class.' % ( class_object,)) - class_object._test_method_ids = test_method_ids = {} - for name, obj in six.iteritems(class_object.__dict__.copy()): + # NOTE: _test_params_repr is private to parameterized.TestCase and it's + # metaclass; do not use it outside of those classes. + class_object._test_params_reprs = test_params_reprs = {} + for name, obj in class_object.__dict__.copy().items(): if (name.startswith(unittest.TestLoader.testMethodPrefix) and isinstance(obj, types.FunctionType)): delattr(class_object, name) methods = {} _update_class_dict_for_param_test_case( - methods, test_method_ids, name, - _ParameterizedTestIter(obj, testcases, naming_type)) - for name, meth in six.iteritems(methods): - setattr(class_object, name, meth) + class_object.__name__, methods, test_params_reprs, name, + _ParameterizedTestIter(obj, testcases, naming_type, name)) + for meth_name, meth in methods.items(): + setattr(class_object, meth_name, meth) def _parameter_decorator(naming_type, testcases): @@ -335,16 +423,15 @@ def _apply(obj): if (len(testcases) == 1 and not isinstance(testcases[0], tuple) and - not (naming_type == _NAMED and - isinstance(testcases[0], collections.Mapping))): + not isinstance(testcases[0], abc.Mapping)): # Support using a single non-tuple parameter as a list of test cases. - # Note in named parameters case, the single non-tuple parameter can't be - # Mapping either, which means a single named parameter case. + # Note that the single non-tuple parameter can't be Mapping either, which + # means a single dict parameter case. assert _non_string_or_bytes_iterable(testcases[0]), ( - 'Single parameter argument must be a non-string iterable') + 'Single parameter argument must be a non-string non-Mapping iterable') testcases = testcases[0] - if not isinstance(testcases, collections.Sequence): + if not isinstance(testcases, abc.Sequence): testcases = list(testcases) if not testcases: raise NoTestsError( @@ -396,80 +483,174 @@ def named_parameters(*testcases): return _parameter_decorator(_NAMED, testcases) -class TestGeneratorMetaclass(type): - """Metaclass for test cases with test generators. +def product(*kwargs_seqs, **testgrid): + """A decorator for running tests over cartesian product of parameters values. + + See the module docstring for a usage example. The test will be run for every + possible combination of the parameters. + + Args: + *kwargs_seqs: Each positional parameter is a sequence of keyword arg dicts; + every test case generated will include exactly one kwargs dict from each + positional parameter; these will then be merged to form an overall list + of arguments for the test case. + **testgrid: A mapping of parameter names and their possible values. Possible + values should given as either a list or a tuple. - A test generator is an iterable in a testcase that produces callables. These - callables must be single-argument methods. These methods are injected into - the class namespace and the original iterable is removed. If the name of the - iterable conforms to the test pattern, the injected methods will be picked - up as tests by the unittest framework. + Raises: + NoTestsError: Raised when the decorator generates no tests. - In general, it is supposed to be used in conjuction with the - parameters decorator. + Returns: + A test generator to be handled by TestGeneratorMetaclass. """ - def __new__(mcs, class_name, bases, dct): - test_method_ids = dct.setdefault('_test_method_ids', {}) - for name, obj in six.iteritems(dct.copy()): + for name, values in testgrid.items(): + assert isinstance(values, (list, tuple)), ( + 'Values of {} must be given as list or tuple, found {}'.format( + name, type(values))) + + prior_arg_names = set() + for kwargs_seq in kwargs_seqs: + assert ((isinstance(kwargs_seq, (list, tuple))) and + all(isinstance(kwargs, dict) for kwargs in kwargs_seq)), ( + 'Positional parameters must be a sequence of keyword arg' + 'dicts, found {}' + .format(kwargs_seq)) + if kwargs_seq: + arg_names = set(kwargs_seq[0]) + assert all(set(kwargs) == arg_names for kwargs in kwargs_seq), ( + 'Keyword argument dicts within a single parameter must all have the ' + 'same keys, found {}'.format(kwargs_seq)) + assert not (arg_names & prior_arg_names), ( + 'Keyword argument dict sequences must all have distinct argument ' + 'names, found duplicate(s) {}' + .format(sorted(arg_names & prior_arg_names))) + prior_arg_names |= arg_names + + assert not (prior_arg_names & set(testgrid)), ( + 'Arguments supplied in kwargs dicts in positional parameters must not ' + 'overlap with arguments supplied as named parameters; found duplicate ' + 'argument(s) {}'.format(sorted(prior_arg_names & set(testgrid)))) + + # Convert testgrid into a sequence of sequences of kwargs dicts and combine + # with the positional parameters. + # So foo=[1,2], bar=[3,4] --> [[{foo: 1}, {foo: 2}], [{bar: 3, bar: 4}]] + testgrid = (tuple({k: v} for v in vs) for k, vs in testgrid.items()) + testgrid = tuple(kwargs_seqs) + tuple(testgrid) + + # Create all possible combinations of parameters as a cartesian product + # of parameter values. + testcases = [ + dict(itertools.chain.from_iterable(case.items() + for case in cases)) + for cases in itertools.product(*testgrid) + ] + return _parameter_decorator(_ARGUMENT_REPR, testcases) + + +class TestGeneratorMetaclass(type): + """Metaclass for adding tests generated by parameterized decorators.""" + + def __new__(cls, class_name, bases, dct): + # NOTE: _test_params_repr is private to parameterized.TestCase and it's + # metaclass; do not use it outside of those classes. + test_params_reprs = dct.setdefault('_test_params_reprs', {}) + for name, obj in dct.copy().items(): if (name.startswith(unittest.TestLoader.testMethodPrefix) and _non_string_or_bytes_iterable(obj)): + # NOTE: `obj` might not be a _ParameterizedTestIter in two cases: + # 1. a class-level iterable named test* that isn't a test, such as + # a list of something. Such attributes get deleted from the class. + # + # 2. If a decorator is applied to the parameterized test, e.g. + # @morestuff + # @parameterized.parameters(...) + # def test_foo(...): ... + # + # This is OK so long as the underlying parameterized function state + # is forwarded (e.g. using functool.wraps() and **without** + # accessing explicitly accessing the internal attributes. + if isinstance(obj, _ParameterizedTestIter): + # Update the original test method name so it's more accurate. + # The mismatch might happen when another decorator is used inside + # the parameterized decrators, and the inner decorator doesn't + # preserve its __name__. + obj._original_name = name iterator = iter(obj) dct.pop(name) _update_class_dict_for_param_test_case( - dct, test_method_ids, name, iterator) + class_name, dct, test_params_reprs, name, iterator) # If the base class is a subclass of parameterized.TestCase, inherit its - # _test_method_ids too. + # _test_params_reprs too. for base in bases: - # Check if the base has _test_method_ids first, then check if it's a + # Check if the base has _test_params_reprs first, then check if it's a # subclass of parameterized.TestCase. Otherwise when this is called for # the parameterized.TestCase definition itself, this raises because # itself is not defined yet. This works as long as absltest.TestCase does - # not define _test_method_ids. - if getattr(base, '_test_method_ids', None) and issubclass(base, TestCase): - for test_method, test_method_id in six.iteritems(base._test_method_ids): + # not define _test_params_reprs. + base_test_params_reprs = getattr(base, '_test_params_reprs', None) + if base_test_params_reprs and issubclass(base, TestCase): + for test_method, test_method_id in base_test_params_reprs.items(): # test_method may both exists in base and this class. # This class's method overrides base class's. # That's why it should only inherit it if it does not exist. - test_method_ids.setdefault(test_method, test_method_id) + test_params_reprs.setdefault(test_method, test_method_id) - return type.__new__(mcs, class_name, bases, dct) + return type.__new__(cls, class_name, bases, dct) def _update_class_dict_for_param_test_case( - dct, test_method_ids, name, iterator): + test_class_name, dct, test_params_reprs, name, iterator): """Adds individual test cases to a dictionary. Args: + test_class_name: The name of the class tests are added to. dct: The target dictionary. - test_method_ids: The dictionary for mapping names to test IDs. + test_params_reprs: The dictionary for mapping names to test IDs. name: The original name of the test case. iterator: The iterator generating the individual test cases. + + Raises: + DuplicateTestNameError: Raised when a test name occurs multiple times. + RuntimeError: If non-parameterized functions are generated. """ for idx, func in enumerate(iterator): assert callable(func), 'Test generators must yield callables, got %r' % ( func,) + if not (getattr(func, '__x_use_name__', None) or + getattr(func, '__x_params_repr__', None)): + raise RuntimeError( + '{}.{} generated a test function without using the parameterized ' + 'decorators. Only tests generated using the decorators are ' + 'supported.'.format(test_class_name, name)) + if getattr(func, '__x_use_name__', False): original_name = func.__name__ new_name = original_name else: original_name = name new_name = '%s%d' % (original_name, idx) - assert new_name not in dct, ( - 'Name of parameterized test case "%s" not unique' % (new_name,)) + + if new_name in dct: + raise DuplicateTestNameError(test_class_name, new_name, original_name) + dct[new_name] = func - test_method_id = original_name + getattr(func, '__x_extra_id__', '') - assert test_method_id not in test_method_ids.values(), ( - 'Id of parameterized test case "%s" not unique' % (test_method_id,)) - test_method_ids[new_name] = test_method_id + test_params_reprs[new_name] = getattr(func, '__x_params_repr__', '') -class TestCase(six.with_metaclass(TestGeneratorMetaclass, absltest.TestCase)): +class TestCase(absltest.TestCase, metaclass=TestGeneratorMetaclass): """Base class for test cases using the parameters decorator.""" + # visibility: private; do not call outside this class. + def _get_params_repr(self): + return self._test_params_reprs.get(self._testMethodName, '') + def __str__(self): - return '%s (%s)' % ( - self._test_method_ids.get(self._testMethodName, self._testMethodName), + params_repr = self._get_params_repr() + if params_repr: + params_repr = ' ' + params_repr + return '{}{} ({})'.format( + self._testMethodName, params_repr, unittest.util.strclass(self.__class__)) def id(self): @@ -481,11 +662,16 @@ def id(self): Returns: The test id. """ - return '%s.%s' % ( - unittest.util.strclass(self.__class__), - # When a test method is NOT decorated, it doesn't exist in - # _test_method_ids. Use the _testMethodName directly. - self._test_method_ids.get(self._testMethodName, self._testMethodName)) + base = super(TestCase, self).id() + params_repr = self._get_params_repr() + if params_repr: + # We include the params in the id so that, when reported in the + # test.xml file, the value is more informative than just "test_foo0". + # Use a space to separate them so that it's copy/paste friendly and + # easy to identify the actual test id. + return '{} {}'.format(base, params_repr) + else: + return base # This function is kept CamelCase because it's used as a class's base class. @@ -494,16 +680,16 @@ def CoopTestCase(other_base_class): # pylint: disable=invalid-name This enables the TestCase to be used in combination with other base classes that have custom metaclasses, such as - mox.MoxTestBase. + ``mox.MoxTestBase``. - Only works with metaclasses that do not override type.__new__. + Only works with metaclasses that do not override ``type.__new__``. - Example: + Example:: - from absl.testing import parameterized + from absl.testing import parameterized - class ExampleTest(parameterized.CoopTestCase(OtherTestCase)): - ... + class ExampleTest(parameterized.CoopTestCase(OtherTestCase)): + ... Args: other_base_class: (class) A test case base class. diff --git a/third_party/py/abseil/absl/testing/xml_reporter.py b/third_party/py/abseil/absl/testing/xml_reporter.py old mode 100755 new mode 100644 index 9c095e780f391f..591eb7efcbf46f --- a/third_party/py/abseil/absl/testing/xml_reporter.py +++ b/third_party/py/abseil/absl/testing/xml_reporter.py @@ -14,10 +14,7 @@ """A Python test reporter that generates test reports in JUnit XML format.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - +import datetime import re import sys import threading @@ -25,8 +22,7 @@ import traceback import unittest from xml.sax import saxutils - -import six +from absl.testing import _pretty_print_reporter # See http://www.w3.org/TR/REC-xml/#NT-Char @@ -73,11 +69,42 @@ def _escape_cdata(s): Returns: An escaped version of the input string. """ - for char, escaped in six.iteritems(_control_character_conversions): + for char, escaped in _control_character_conversions.items(): s = s.replace(char, escaped) return s.replace(']]>', ']] >') +def _iso8601_timestamp(timestamp): + """Produces an ISO8601 datetime. + + Args: + timestamp: an Epoch based timestamp in seconds. + + Returns: + A iso8601 format timestamp if the input is a valid timestamp, None otherwise + """ + if timestamp is None or timestamp < 0: + return None + return datetime.datetime.fromtimestamp( + timestamp, tz=datetime.timezone.utc).isoformat() + + +def _print_xml_element_header(element, attributes, stream, indentation=''): + """Prints an XML header of an arbitrary element. + + Args: + element: element name (testsuites, testsuite, testcase) + attributes: 2-tuple list with (attributes, values) already escaped + stream: output stream to write test report XML to + indentation: indentation added to the element header + """ + stream.write('%s<%s' % (indentation, element)) + for attribute in attributes: + if (len(attribute) == 2 and attribute[0] is not None and + attribute[1] is not None): + stream.write(' %s="%s"' % (attribute[0], attribute[1])) + stream.write('>\n') + # Copy time.time which ensures the real time is used internally. # This prevents bad interactions with tests that stub out time. _time_copy = time.time @@ -97,6 +124,7 @@ class _TestCaseResult(object): name: The name of the individual test method. full_class_name: The full name of the test class. run_time: The duration (in seconds) it took to run the test. + start_time: Epoch relative timestamp of when test started (in seconds) errors: A list of error 4-tuples. Error tuple entries are 1) a string identifier of either "failure" or "error" 2) an exception_type @@ -110,6 +138,7 @@ class _TestCaseResult(object): def __init__(self, test): self.run_time = -1 + self.start_time = -1 self.skip_reason = None self.errors = [] self.test = test @@ -126,6 +155,10 @@ def __init__(self, test): full_class_name = match.group(2) else: class_name = unittest.util.strclass(test.__class__) + if isinstance(test, unittest.case._SubTest): + # If the test case is a _SubTest, the real TestCase instance is + # available as _SubTest.test_case. + class_name = unittest.util.strclass(test.test_case.__class__) if test_desc.startswith(class_name + '.'): # In a typical unittest.TestCase scenario, test.id() returns with # a class name formatted using unittest.util.strclass. @@ -143,6 +176,9 @@ def __init__(self, test): def set_run_time(self, time_in_secs): self.run_time = time_in_secs + def set_start_time(self, time_in_secs): + self.start_time = time_in_secs + def print_xml_summary(self, stream): """Prints an XML Summary of a TestCase. @@ -162,10 +198,15 @@ def print_xml_summary(self, stream): status = 'notrun' result = 'suppressed' - stream.write( - ' \n' % ( - self.name, status, result, self.run_time, self.full_class_name)) + test_case_attributes = [ + ('name', '%s' % self.name), + ('status', '%s' % status), + ('result', '%s' % result), + ('time', '%.3f' % self.run_time), + ('classname', self.full_class_name), + ('timestamp', _iso8601_timestamp(self.start_time)), + ] + _print_xml_element_header('testcase', test_case_attributes, stream, ' ') self._print_testcase_details(stream) stream.write(' \n') @@ -186,6 +227,9 @@ def __init__(self): self.suites = {} self.failure_counts = {} self.error_counts = {} + self.overall_start_time = -1 + self.overall_end_time = -1 + self._testsuites_properties = {} def add_test_case_result(self, test_case_result): suite_name = type(test_case_result.test).__name__ @@ -193,6 +237,11 @@ def add_test_case_result(self, test_case_result): # _ErrorHolder is a special case created by unittest for class / module # level functions. suite_name = test_case_result.full_class_name.rsplit('.')[-1] + if isinstance(test_case_result.test, unittest.case._SubTest): + # If the test case is a _SubTest, the real TestCase instance is + # available as _SubTest.test_case. + suite_name = type(test_case_result.test.test_case).__name__ + self._setup_test_suite(suite_name) self.suites[suite_name].append(test_case_result) for error in test_case_result.errors: @@ -206,24 +255,41 @@ def add_test_case_result(self, test_case_result): break def print_xml_summary(self, stream): - overall_test_count = sum([len(x) for x in self.suites.values()]) + overall_test_count = sum(len(x) for x in self.suites.values()) overall_failures = sum(self.failure_counts.values()) overall_errors = sum(self.error_counts.values()) - overall_time = 0 - for tests in self.suites.values(): - overall_time += sum([x.run_time for x in tests]) - overall_args = (overall_test_count, overall_failures, overall_errors, - overall_time) - stream.write('\n' % overall_args) + overall_attributes = [ + ('name', ''), + ('tests', '%d' % overall_test_count), + ('failures', '%d' % overall_failures), + ('errors', '%d' % overall_errors), + ('time', '%.3f' % (self.overall_end_time - self.overall_start_time)), + ('timestamp', _iso8601_timestamp(self.overall_start_time)), + ] + _print_xml_element_header('testsuites', overall_attributes, stream) + if self._testsuites_properties: + stream.write(' \n') + for name, value in sorted(self._testsuites_properties.items()): + stream.write(' \n' % + (_escape_xml_attr(name), _escape_xml_attr(str(value)))) + stream.write(' \n') + for suite_name in self.suites: suite = self.suites[suite_name] - suite_time = sum([x.run_time for x in suite]) + suite_end_time = max(x.start_time + x.run_time for x in suite) + suite_start_time = min(x.start_time for x in suite) failures = self.failure_counts[suite_name] errors = self.error_counts[suite_name] - args = (suite_name, len(suite), failures, errors, suite_time) - stream.write('\n' % args) + suite_attributes = [ + ('name', '%s' % suite_name), + ('tests', '%d' % len(suite)), + ('failures', '%d' % failures), + ('errors', '%d' % errors), + ('time', '%.3f' % (suite_end_time - suite_start_time)), + ('timestamp', _iso8601_timestamp(suite_start_time)), + ] + _print_xml_element_header('testsuite', suite_attributes, stream) + for test_case_result in suite: test_case_result.print_xml_summary(stream) stream.write('\n') @@ -241,8 +307,24 @@ def _setup_test_suite(self, suite_name): self.failure_counts[suite_name] = 0 self.error_counts[suite_name] = 0 + def set_end_time(self, timestamp_in_secs): + """Sets the start timestamp of this test suite. + + Args: + timestamp_in_secs: timestamp in seconds since epoch + """ + self.overall_end_time = timestamp_in_secs + + def set_start_time(self, timestamp_in_secs): + """Sets the end timestamp of this test suite. + + Args: + timestamp_in_secs: timestamp in seconds since epoch + """ + self.overall_start_time = timestamp_in_secs -class _TextAndXMLTestResult(unittest.TextTestResult): + +class _TextAndXMLTestResult(_pretty_print_reporter.TextTestResult): """Private TestResult class that produces both formatted text results and XML. Used by TextAndXMLTestRunner. @@ -252,15 +334,17 @@ class _TextAndXMLTestResult(unittest.TextTestResult): _TEST_CASE_RESULT_CLASS = _TestCaseResult def __init__(self, xml_stream, stream, descriptions, verbosity, - time_getter=_time_copy): + time_getter=_time_copy, testsuites_properties=None): super(_TextAndXMLTestResult, self).__init__(stream, descriptions, verbosity) self.xml_stream = xml_stream self.pending_test_case_results = {} self.suite = self._TEST_SUITE_RESULT_CLASS() + if testsuites_properties: + self.suite._testsuites_properties = testsuites_properties self.time_getter = time_getter # This lock guards any mutations on pending_test_case_results. - self._pending_test_case_results_lock = threading.Lock() + self._pending_test_case_results_lock = threading.RLock() def startTest(self, test): self.start_time = self.time_getter() @@ -278,12 +362,18 @@ def stopTest(self, test): test_id = id(test) run_time = self.time_getter() - self.start_time result.set_run_time(run_time) + result.set_start_time(self.start_time) self.suite.add_test_case_result(result) del self.pending_test_case_results[test_id] + def startTestRun(self): + self.suite.set_start_time(self.time_getter()) + super(_TextAndXMLTestResult, self).startTestRun() + def stopTestRun(self): + self.suite.set_end_time(self.time_getter()) # All pending_test_case_results will be added to the suite and removed from - # the pending_test_case_results dictionary. Grabing the write lock to avoid + # the pending_test_case_results dictionary. Grabbing the write lock to avoid # results from being added during this process to avoid duplicating adds or # accidentally erasing newly appended pending results. with self._pending_test_case_results_lock: @@ -295,8 +385,9 @@ def stopTestRun(self): for test_id in self.pending_test_case_results: result = self.pending_test_case_results[test_id] if hasattr(self, 'start_time'): - run_time = self.time_getter() - self.start_time + run_time = self.suite.overall_end_time - self.start_time result.set_run_time(run_time) + result.set_start_time(self.start_time) self.suite.add_test_case_result(result) self.pending_test_case_results.clear() @@ -364,12 +455,14 @@ def addSuccess(self, test): def addError(self, test, err): super(_TextAndXMLTestResult, self).addError(test, err) - error_summary = ('error', err[0], err[1], self._exc_info_to_string(err)) + error_summary = ('error', err[0], err[1], + self._exc_info_to_string(err, test=test)) self.add_pending_test_case_result(test, error_summary=error_summary) def addFailure(self, test, err): super(_TextAndXMLTestResult, self).addFailure(test, err) - error_summary = ('failure', err[0], err[1], self._exc_info_to_string(err)) + error_summary = ('failure', err[0], err[1], + self._exc_info_to_string(err, test=test)) self.add_pending_test_case_result(test, error_summary=error_summary) def addSkip(self, test, reason): @@ -379,7 +472,8 @@ def addSkip(self, test, reason): def addExpectedFailure(self, test, err): super(_TextAndXMLTestResult, self).addExpectedFailure(test, err) if callable(getattr(test, 'recordProperty', None)): - test.recordProperty('EXPECTED_FAILURE', self._exc_info_to_string(err)) + test.recordProperty('EXPECTED_FAILURE', + self._exc_info_to_string(err, test=test)) self.add_pending_test_case_result(test) def addUnexpectedSuccess(self, test): @@ -390,6 +484,19 @@ def addUnexpectedSuccess(self, test): % (test_name)) self.add_pending_test_case_result(test, error_summary=error_summary) + def addSubTest(self, test, subtest, err): # pylint: disable=invalid-name + super(_TextAndXMLTestResult, self).addSubTest(test, subtest, err) + if err is not None: + if issubclass(err[0], test.failureException): + error_summary = ('failure', err[0], err[1], + self._exc_info_to_string(err, test=test)) + else: + error_summary = ('error', err[0], err[1], + self._exc_info_to_string(err, test=test)) + else: + error_summary = None + self.add_pending_test_case_result(subtest, error_summary=error_summary) + def printErrors(self): super(_TextAndXMLTestResult, self).printErrors() self.xml_stream.write('\n') @@ -406,6 +513,7 @@ class TextAndXMLTestRunner(unittest.TextTestRunner): _TEST_RESULT_CLASS = _TextAndXMLTestResult _xml_stream = None + _testsuites_properties = {} def __init__(self, xml_stream=None, *args, **kwargs): """Initialize a TextAndXMLTestRunner. @@ -441,5 +549,10 @@ def _makeResult(self): if self._xml_stream is None: return super(TextAndXMLTestRunner, self)._makeResult() else: - return self._TEST_RESULT_CLASS(self._xml_stream, self.stream, - self.descriptions, self.verbosity) + return self._TEST_RESULT_CLASS( + self._xml_stream, self.stream, self.descriptions, self.verbosity, + testsuites_properties=self._testsuites_properties) + + @classmethod + def set_testsuites_property(cls, key, value): + cls._testsuites_properties[key] = value diff --git a/third_party/py/abseil/absl_py.egg-info/PKG-INFO b/third_party/py/abseil/absl_py.egg-info/PKG-INFO index c9b961612ae9ab..124b9d470d4ef5 100644 --- a/third_party/py/abseil/absl_py.egg-info/PKG-INFO +++ b/third_party/py/abseil/absl_py.egg-info/PKG-INFO @@ -1,21 +1,84 @@ -Metadata-Version: 1.1 +Metadata-Version: 2.1 Name: absl-py -Version: 0.1.1 -Summary: Abseil Python Common Libraries +Version: 1.3.0 +Summary: Abseil Python Common Libraries, see https://github.com/abseil/abseil-py. Home-page: https://github.com/abseil/abseil-py Author: The Abseil Authors -Author-email: UNKNOWN License: Apache 2.0 -Description: UNKNOWN -Platform: UNKNOWN Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 Classifier: Intended Audience :: Developers Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: OS Independent +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +License-File: LICENSE +License-File: AUTHORS + +# Abseil Python Common Libraries + +This repository is a collection of Python library code for building Python +applications. The code is collected from Google's own Python code base, and has +been extensively tested and used in production. + +## Features + +* Simple application startup +* Distributed commandline flags system +* Custom logging module with additional features +* Testing utilities + +## Getting Started + +### Installation + +To install the package, simply run: + +```bash +pip install absl-py +``` + +Or install from source: + +```bash +python setup.py install +``` + +### Running Tests + +To run Abseil tests, you can clone the git repo and run +[bazel](https://bazel.build/): + +```bash +git clone https://github.com/abseil/abseil-py.git +cd abseil-py +bazel test absl/... +``` + +### Example Code + +Please refer to +[smoke_tests/sample_app.py](https://github.com/abseil/abseil-py/blob/main/smoke_tests/sample_app.py) +as an example to get started. + +## Documentation + +See the [Abseil Python Developer Guide](https://abseil.io/docs/python/). + +## Future Releases + +The current repository includes an initial set of libraries for early adoption. +More components and interoperability with Abseil C++ Common Libraries +will come in future releases. + +## License + +The Abseil Python library is licensed under the terms of the Apache +license. See [LICENSE](LICENSE) for more information. diff --git a/third_party/py/abseil/absl_py.egg-info/SOURCES.txt b/third_party/py/abseil/absl_py.egg-info/SOURCES.txt index 4e12af5403ac90..00081eef5a6cbf 100644 --- a/third_party/py/abseil/absl_py.egg-info/SOURCES.txt +++ b/third_party/py/abseil/absl_py.egg-info/SOURCES.txt @@ -1,3 +1,7 @@ +AUTHORS +LICENSE +MANIFEST.in +README.md setup.py absl/__init__.py absl/app.py @@ -10,10 +14,13 @@ absl/flags/_flag.py absl/flags/_flagvalues.py absl/flags/_helpers.py absl/flags/_validators.py +absl/flags/_validators_classes.py +absl/flags/argparse_flags.py absl/logging/__init__.py absl/logging/converter.py absl/testing/__init__.py absl/testing/_bazelize_command.py +absl/testing/_pretty_print_reporter.py absl/testing/absltest.py absl/testing/flagsaver.py absl/testing/parameterized.py @@ -21,5 +28,4 @@ absl/testing/xml_reporter.py absl_py.egg-info/PKG-INFO absl_py.egg-info/SOURCES.txt absl_py.egg-info/dependency_links.txt -absl_py.egg-info/requires.txt absl_py.egg-info/top_level.txt \ No newline at end of file diff --git a/third_party/py/abseil/absl_py.egg-info/requires.txt b/third_party/py/abseil/absl_py.egg-info/requires.txt deleted file mode 100644 index 64c56a3640157e..00000000000000 --- a/third_party/py/abseil/absl_py.egg-info/requires.txt +++ /dev/null @@ -1 +0,0 @@ -six \ No newline at end of file diff --git a/third_party/py/abseil/setup.cfg b/third_party/py/abseil/setup.cfg index 861a9f554263ef..8bfd5a12f85b8f 100644 --- a/third_party/py/abseil/setup.cfg +++ b/third_party/py/abseil/setup.cfg @@ -1,5 +1,4 @@ [egg_info] tag_build = tag_date = 0 -tag_svn_revision = 0 diff --git a/third_party/py/abseil/setup.py b/third_party/py/abseil/setup.py index cc596671cb32f1..f947fd7b15a5d1 100644 --- a/third_party/py/abseil/setup.py +++ b/third_party/py/abseil/setup.py @@ -14,11 +14,8 @@ """Abseil setup configuration.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import platform +import os +import sys try: import setuptools @@ -27,35 +24,51 @@ use_setuptools() import setuptools -py_version = platform.python_version_tuple() -if py_version < ('2', '7') or py_version[0] == '3' and py_version < ('3', '4'): - raise RuntimeError('Python version 2.7 or 3.4+ is required.') +if sys.version_info < (3, 6): + raise RuntimeError('Python version 3.6+ is required.') + +setuptools_version = tuple( + int(x) for x in setuptools.__version__.split('.')[:2]) +additional_kwargs = {} +if setuptools_version >= (24, 2): + # `python_requires` was added in 24.2, see + # https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires + additional_kwargs['python_requires'] = '>=3.6' + +_README_PATH = os.path.join( + os.path.dirname(os.path.realpath(__file__)), 'README.md') +with open(_README_PATH, 'rb') as fp: + LONG_DESCRIPTION = fp.read().decode('utf-8') setuptools.setup( name='absl-py', - version='0.1.1', - description='Abseil Python Common Libraries', + version='1.3.0', + description=( + 'Abseil Python Common Libraries, ' + 'see https://github.com/abseil/abseil-py.'), + long_description=LONG_DESCRIPTION, + long_description_content_type='text/markdown', author='The Abseil Authors', url='https://github.com/abseil/abseil-py', packages=setuptools.find_packages(exclude=[ '*.tests', '*.tests.*', 'tests.*', 'tests', ]), - install_requires=[ - 'six', - ], + include_package_data=True, license='Apache 2.0', classifiers=[ 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', 'Intended Audience :: Developers', 'Topic :: Software Development :: Libraries :: Python Modules', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', ], + **additional_kwargs, ) From 0f41d11b7af3c2856273e4ab5954e043d02d1cd2 Mon Sep 17 00:00:00 2001 From: Googler Date: Tue, 27 Jun 2023 07:49:41 -0700 Subject: [PATCH 08/68] Add Bzlmod Migration Guide to bazel.build Based on https://docs.google.com/document/d/1JtXIVnXyFZ4bmbiBCr5gsTH4-opZAFf5DMMb-54kES0/edit#heading=h.5mcn15i0e1ch, but with more code snippets as examples and re-organized the guide structure. PiperOrigin-RevId: 543742388 Change-Id: If77ff96b7686f206dd09f5c2453151dee149b087 --- site/en/_book.yaml | 2 + site/en/external/migration.md | 803 ++++++++++++++++++++++++++++++++++ site/en/external/module.md | 12 +- site/en/external/overview.md | 53 ++- 4 files changed, 840 insertions(+), 30 deletions(-) create mode 100644 site/en/external/migration.md diff --git a/site/en/_book.yaml b/site/en/_book.yaml index f9528a35e187ea..1715cc2d31cd41 100644 --- a/site/en/_book.yaml +++ b/site/en/_book.yaml @@ -130,6 +130,8 @@ upper_tabs: path: /external/registry - title: Module extensions path: /external/extension + - title: Bzlmod migration guide + path: /external/migration - title: Advanced topics path: /external/advanced - heading: Querying your build diff --git a/site/en/external/migration.md b/site/en/external/migration.md new file mode 100644 index 00000000000000..de9a5ba65155ee --- /dev/null +++ b/site/en/external/migration.md @@ -0,0 +1,803 @@ +Project: /_project.yaml +Book: /_book.yaml +keywords: bzlmod + +# Bzlmod Migration Guide + +{% include "_buttons.html" %} + +Due to the [shortcomings of +WORKSPACE](/external/overview#workspace-shortcomings), Bzlmod is going to +replace the legacy WORKSPACE system in future Bazel releases. This guide helps +you migrate your project to Bzlmod and drop WORKSPACE for fetching external +dependencies. + +## WORKSPACE vs Bzlmod {:#workspace-vs-bzlmod} + +Bazel's WORKSPACE and Bzlmod offer similar features with different syntax. This +section explains how to migrate from specific WORKSPACE functionalities to +Bzlmod. + +### Define the root of a Bazel workspace {:#define-root} + +The WORKSPACE file marks the source root of a Bazel project, this responsibility +is replaced by MODULE.bazel in Bazel version 6.3 and later. With Bazel version +prior to 6.3, there should still be a `WORKSPACE` or `WORKSPACE.bazel` file at +your workspace root, maybe with comments like: + +* **WORKSPACE** + + ```python + # This file marks the root of the Bazel workspace. + # See MODULE.bazel for external dependencies setup. + ``` + +### Specify repository name for your workspace {:#specify-repo-name} + +* **WORKSPACE** + + The [`workspace`](/rules/lib/globals/workspace#workspace) function is used + to specify a repository name for your workspace. This allows a target + `//foo:bar` in the workspace to be referenced as `@//foo:bar`. If not specified, the default repository name for your + workspace is `__main__`. + + ```python + ## WORKSPACE + workspace(name = "com_foo_bar") + ``` + +* **Bzlmod** + + It's recommended to reference targets in the same workspace with the + `//foo:bar` syntax without `@`. But if you do need the old syntax + , you can use the module name specified by the + [`module`](/rules/lib/globals/module#module) function as the repository + name. If the module name is different from the needed repository name, you + can use `repo_name` attribute of the + [`module`](/rules/lib/globals/module#module) function to override the + repository name. + + ```python + ## MODULE.bazel + module( + name = "bar", + repo_name = "com_foo_bar", + ) + ``` + +### Fetch external dependencies as Bazel modules {:#fetch-bazel-modules} + +If your dependency is a Bazel project, you should be able to depend on it as a +Bazel module when it also adopts Bzlmod. + +* **WORKSPACE** + + With WORKSPACE, it's common to use the + [`http_archive`](/rules/lib/repo/http#http_archive) or + [`git_repository`](/rules/lib/repo/git#git_repository) repository rules to + download the sources of the Bazel project. + + ```python + ## WORKSPACE + load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") + + http_archive( + name = "bazel_skylib", + urls = ["https://github.com/bazelbuild/bazel-skylib/releases/download/1.4.2/bazel-skylib-1.4.2.tar.gz"], + sha256 = "66ffd9315665bfaafc96b52278f57c7e2dd09f5ede279ea6d39b2be471e7e3aa", + ) + load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace") + bazel_skylib_workspace() + + http_archive( + name = "rules_java", + urls = ["https://github.com/bazelbuild/rules_java/releases/download/6.1.1/rules_java-6.1.1.tar.gz"], + sha256 = "76402a50ae6859d50bd7aed8c1b8ef09dae5c1035bb3ca7d276f7f3ce659818a", + ) + load("@rules_java//java:repositories.bzl", "rules_java_dependencies", "rules_java_toolchains") + rules_java_dependencies() + rules_java_toolchains() + ``` + + As you can see, it's a common pattern that users need to load transitive + dependencies from a macro of the dependency. Assume both `bazel_skylib` and + `rules_java` depends on `platoform`, the exact version of the `platform` + dependency is determined by the order of the macros. + +* **Bzlmod** + + With Bzlmod, as long as the your dependency is available in [Bazel Central + Registry](https://registry.bazel.build) or your custom [Bazel + registry](/external/registry), you can simply depend on it with a + [`bazel_dep`](/rules/lib/globals/module#bazel_dep) directive. + + ```python + ## MODULE.bazel + bazel_dep(name = "bazel_skylib", version = "1.4.2") + bazel_dep(name = "rules_java", version = "6.1.1") + ``` + + Bzlmod resolves Bazel module dependencies transitively using the + [MVS](https://research.swtch.com/vgo-mvs) algorithm. Therefore, the maximal + required version of `platform` is selected automatically. + +### Override a dependency as a Bazel module{:#override-modules} + +As the root module, you can override Bazel module dependencies in different +ways. + +Please read the [overrides](/external/module#overrides) section for more +information. + +You can find some example usages in the +[examples][override-examples] +repository. + +[override-examples]: https://github.com/bazelbuild/examples/blob/main/bzlmod/02-override_bazel_module + +### Fetch external dependencies with module extensions{:#fetch-deps-module-extensions} + +If your dependency is not a Bazel project or not yet available in any Bazel +registry, you can introduce it using [module extensions](/external/extension). + +* **WORKSPACE** + + Download a file using the [`http_file`](/rules/lib/repo/http#http_file) + repository rule. + + ```python + ## WORKSPACE + load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file") + + http_file( + name = "data_file", + url = "http://example.com/file", + sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + ) + ``` + +* **Bzlmod** + + With Bzlmod, you have to move the definition into a `.bzl` file, which also + lets you share the definition between WORKSPACE and Bzlmod during the + migration period. + + ```python + ## repositories.bzl + load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file") + def my_data_dependency(): + http_file( + name = "data_file", + url = "http://example.com/file", + sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + ) + ``` + + Implement a module extension to load the dependencies macro. You can define + it in the same `.bzl` file of the macro, but to keep compatibility with + older Bazel versions, it's better to define it in a separate `.bzl` file. + + ```python + ## extensions.bzl + load("//:repositories.bzl", "my_data_dependency") + def _non_module_dependencies_impl(_ctx): + my_data_dependency() + + non_module_dependencies = module_extension( + implementation = _non_module_dependencies_impl, + ) + ``` + + To make the repository visible to the root project, you should declare the + usages of the module extension and the repository in the MODULE.bazel file. + + ```python + ## MODULE.bazel + non_module_dependencies = use_extension("//:extensions.bzl", "non_module_dependencies") + use_repo(non_module_dependencies, "data_file") + ``` + +### Resolve conflict external dependencies with module extension {:#conflict-deps-module-extension} + +A project can provide a macro that introduces external repositories based on +inputs from its callers. But what if there are multiple callers in the +dependency graph and they cause a conflict? + +Assume the project `foo` provides the following macro which takes `version` as +an argument. + +```python +## repositories.bzl in foo {:#repositories.bzl-foo} +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file") +def data_deps(version = "1.0"): + http_file( + name = "data_file", + url = "http://example.com/file-%s" % version, + # Omitting the "sha256" attribute for simplicity + ) +``` + +* **WORKSPACE** + + With WORKSPACE, you can load the macro from `@foo` and specify the version + of the data dependency you need. Assume you have another dependency `@bar`, + which also depends on `@foo` but requires a different version of the data + dependency. + + ```python + ## WORKSPACE + + # Introduce @foo and @bar. + ... + + load("@foo//:repositories.bzl", "data_deps") + data_deps(version = "2.0") + + load("@bar//:repositories.bzl", "bar_deps") + bar_deps() # -> which calls data_deps(version = "3.0") + ``` + + In this case, the end user must carefully adjust the order of macros in the + WORKSPACE to get the version they need. This is one of the biggest pain + points with WORKSPACE since it doesn't really provide a sensible way to + resolve dependencies. + +* **Bzlmod** + + With Bzlmod, the author of project `foo` can use module extension to resolve + conflicts. For example, let's assume it makes sense to always select the + maximal required version of the data dependency among all Bazel modules. + + ```python + ## extensions.bzl in foo + load("//:repositories.bzl", "data_deps") + + data = tag_class(attrs={"version": attr.string()}) + + def _data_deps_extension_impl(module_ctx): + # Select the maximal required version in the dependency graph. + version = "1.0" + for mod in module_ctx.modules: + for data in mod.tags.data: + version = max(version, data.version) + data_deps(version) + + data_deps_extension = module_extension( + implementation = _data_deps_extension_impl, + tag_classes = {"data": data}, + ) + ``` + + ```python + ## MODULE.bazel in bar + bazel_dep(name = "foo", version = "1.0") + + foo_data_deps = use_extension("@foo//:extensions.bzl", "data_deps_extension") + foo_data_deps.data(version = "3.0") + use_repo(foo_data_deps, "data_file") + ``` + + ```python + ## MODULE.bazel in root module + bazel_dep(name = "foo", version = "1.0") + bazel_dep(name = "bar", version = "1.0") + + foo_data_deps = use_extension("@foo//:extensions.bzl", "data_deps_extension") + foo_data_deps.data(version = "2.0") + use_repo(foo_data_deps, "data_file") + ``` + + In this case, the root module requires data version `2.0`, while its + dependency `bar` requires `3.0`. The module extension in `foo` can correctly + resolve this conflict and automatically select version `3.0` for the data + dependency. + +### Integrate third party package manager {:#integrate-package-manager} + +Following the last section, since module extension provides a way to collect +information from the dependency graph, perform custom logic to resolve +dependencies and call repository rules to introduce external repositories, this +provides a great way for rules authors to enhance the rulesets that integrate +package managers for specific languages. + +Please read the [module extensions](/external/extension) page to learn more +about how to use module extensions. + +Here is a list of the rulesets that already adopted Bzlmod to fetch dependencies +from different package managers: + +- [rules_jvm_external](https://github.com/bazelbuild/rules_jvm_external/blob/master/docs/bzlmod.md) +- [rules_go](https://github.com/bazelbuild/rules_go/blob/master/docs/go/core/bzlmod.md) +- [rules_python](https://github.com/bazelbuild/rules_python/blob/main/BZLMOD_SUPPORT.md) + +A minimal example that integrates a pseudo package manager is available at the +[examples][pkg-mgr-example] +repository. + +[pkg-mgr-example]: https://github.com/bazelbuild/examples/tree/main/bzlmod/05-integrate_third_party_package_manager + +### Detect toolchains on the host machine {:#detect-toolchain} + +When Bazel build rules need to detect what toolchains are available on your host +machine, they use repository rules to inspect the host machine and generate +toolchain info as external repositories. + +* **WORKSPACE** + + Given the following repository rule to detect a shell toolchain. + + ```python + ## local_config_sh.bzl + def _sh_config_rule_impl(repository_ctx): + sh_path = get_sh_path_from_env("SH_BIN_PATH") + + if not sh_path: + sh_path = detect_sh_from_path() + + if not sh_path: + sh_path = "/shell/binary/not/found" + + repository_ctx.file("BUILD", """ + load("@bazel_tools//tools/sh:sh_toolchain.bzl", "sh_toolchain") + sh_toolchain( + name = "local_sh", + path = "{sh_path}", + visibility = ["//visibility:public"], + ) + toolchain( + name = "local_sh_toolchain", + toolchain = ":local_sh", + toolchain_type = "@bazel_tools//tools/sh:toolchain_type", + ) + """.format(sh_path = sh_path)) + + sh_config_rule = repository_rule( + environ = ["SH_BIN_PATH"], + local = True, + implementation = _sh_config_rule_impl, + ) + ``` + + You can load the repository rule in WORKSPACE. + + ```python + ## WORKSPACE + load("//:local_config_sh.bzl", "sh_config_rule") + sh_config_rule(name = "local_config_sh") + ``` + +* **Bzlmod** + + With Bzlmod, you can introduce the same repository using a module extension, + which is similar to introducing the `@data_file` repository in the last + section. + + ``` + ## local_config_sh_extension.bzl + load("//:local_config_sh.bzl", "sh_config_rule") + + sh_config_extension = module_extension( + implementation = lambda ctx: sh_config_rule(name = "local_config_sh"), + ) + ``` + + Then use the extension in the MODULE.bazel file. + + ```python + ## MODULE.bazel + sh_config_ext = use_extension("//:local_config_sh_extension.bzl", "sh_config_extension") + use_repo(sh_config_ext, "local_config_sh") + ``` + +### Register toolchains & execution platforms {:#register-toolchains} + +Following the last section, after introducing a repository hosting toolchain +information (e.g. `local_config_sh`), you probably want to register the +toolchain. + +* **WORKSPACE** + + With WORKSPACE, you can register the toolchain in the following ways. + + 1. You can register the toolchain the `.bzl` file and load the macro in the + WORKSPACE file. + + ```python + ## local_config_sh.bzl + def sh_configure(): + sh_config_rule(name = "local_config_sh") + native.register_toolchains("@local_config_sh//:local_sh_toolchain") + ``` + + ```python + ## WORKSPACE + load("//:local_config_sh.bzl", "sh_configure") + sh_configure() + ``` + + 2. Or register the toolchain in the WORKSPACE file directly. + + ```python + ## WORKSPACE + load("//:local_config_sh.bzl", "sh_config_rule") + sh_config_rule(name = "local_config_sh") + register_toolchains("@local_config_sh//:local_sh_toolchain") + ``` + +* **Bzlmod** + + With Bzlmod, the + [`register_toolchains`](/rules/lib/globals/module#register_toolchains) and + [`register_execution_platforms`][register_execution_platforms] + APIs are only available in the MODULE.bazel file. You cannot call + `native.register_toolchains` in a module extension. + + ```python + ## MODULE.bazel + sh_config_ext = use_extension("//:local_config_sh_extension.bzl", "sh_config_extension") + use_repo(sh_config_ext, "local_config_sh") + register_toolchains("@local_config_sh//:local_sh_toolchain") + ``` + +[register_execution_platforms]: /rules/lib/globals/module#register_execution_platforms + +### Introduce local repositories {:#introduce-local-deps} + +You may need to introduce a dependency as a local repository when you need a +local version of the dependency for debugging or you want to incorporate a +directory in your workspace as external repository. + +* **WORKSPACE** + + With WORKSPACE, this is achieved by two native repository rules, + [`local_repository`](/reference/be/workspace#local_repository) and + [`new_local_repository`](/reference/be/workspace#new_local_repository). + + ```python + ## WORKSPACE + local_repository( + name = "rules_java", + path = "/Users/bazel_user/workspace/rules_java", + ) + ``` + +* **Bzlmod** + + With Bzlmod, you can use + [`local_path_override`](/rules/lib/globals/module#local_path_override) to + override a module with a local path. + + ```python + ## MODULE.bazel + bazel_dep(name = "rules_java") + local_path_override( + module_name = "rules_java", + path = "/Users/bazel_user/workspace/rules_java", + ) + ``` + + Note: With `local_path_override`, you can only introduce a local directory + as a Bazel module, which means it should have a MODULE.bazel file and its + transitive dependencies are taken into consideration during dependency + resolution. In addition, all module override directives can only be used by + the root module. + + It is also possible to introduce a local repository with module extension. + However, you cannot call `native.local_repository` in module extension, + there is ongoing effort on starlarkifying all native repository rules (check + [#18285](https://github.com/bazelbuild/bazel/issues/18285) for progress). + Then you can call the corresponding starlark `local_repository` in a module + extension. It's also trivial to implement a custom version of + `local_repository` repository rule if this is a blocking issue for you. + +### Bind targets {:#bind-targets} + +The [`bind`](/reference/be/workspace#bind) rule in WORKSPACE is deprecated and +not supported in Bzlmod. It was introduced to give a target an alias in the +special `//external` package. All users depending on this should migrate away. + +For example, if you have + +```python +## WORKSPACE +bind( + name = "openssl", + actual = "@my-ssl//src:openssl-lib", +) +``` + +This allows other targets to depend on `//external:openssl`. You can migrate +away from this by: + +* Replace all usages of `//external:openssl` with + `@my-ssl//src:openssl-lib`. + +* Or use the [`alias`](/reference/be/general#alias) build rule + * Define the following target in a package (e.g. `//third_party`) + + ```python + ## third_party/BUILD + alias( + name = "openssl, + actual = "@my-ssl//src:openssl-lib", + ) + ``` + + * Replace all usages of `//external:openssl` with + `//third_party:openssl-lib`. + +## Migration {:#migration} + +This section provides useful information and guidance for your Bzlmod migration +process. + +### Know your dependencies in WORKSPACE {:#know-deps-in-workspace} + +The first step of migration is to understand what dependencies you have. It +could be hard to figure out what exact dependencies are introduced in the +WORKSPACE file because transitive dependencies are often loaded with `*_deps` +macros. + +#### Inspect external dependency with workspace resolved file + +Fortunately, the flag +[`--experimental_repository_resolved_file`][resolved_file_flag] +can help. This flag essentially generates a "lock file" of all fetched external +dependencies in your last Bazel command. You can find more details in this [blog +post](https://blog.bazel.build/2018/07/09/bazel-sync-and-resolved-file.html). + +[resolved_file_flag]: /reference/command-line-reference#flag--experimental_repository_resolved_file + +It can be used in two ways: + +1. To fetch info of external dependencies needed for building certain targets. + + ```shell + bazel clean --expunge + bazel build --nobuild --experimental_repository_resolved_file=resolved.bzl //foo:bar + ``` + +2. To fetch info of all external dependencies defined in the WORKSPACE file. + + ```shell + bazel clean --expunge + bazel sync --experimental_repository_resolved_file=resolved.bzl + ``` + + With the `bazel sync` command, you can fetch all dependencies defined in the + WORKSPACE file, which include: + + * `bind` usages + * `register_toolchains` & `register_execution_platforms` usages + + However, if your project is cross platforms, bazel sync may break on certain + platforms because some repository rules may only run correctly on supported + platforms. + +After running the command, you should have information of your external +dependencies in the `resolved.bzl` file. + +#### Inspect external dependency with `bazel query` + +You may also know `bazel query` can be used for inspecting repository rules with + +```shell +bazel query --output=build //external: +``` + +While it is more convenient and much faster, [bazel query can lie about +external dependency version](https://github.com/bazelbuild/bazel/issues/12947), +so be careful using it! Querying and inspecting external +dependencies with Bzlmod is going to achieved by a [new +subcommand](https://github.com/bazelbuild/bazel/issues/15365). + +#### Built-in default dependencies {:#builtin-default-deps} + +If you check the file generated by `--experimental_repository_resolved_file`, +you are going to find many dependencies that are not defined in your WORKSPACE. +This is because Bazel in fact adds prefixes and suffixes to the user's WORKSPACE +file content to inject some default dependencies, which are usually required by +native rules (e.g. `@bazel_tools`, `@platforms` and `@remote_java_tools`). With +Bzlmod, those dependencies are introduced with a built-in module +[`bazel_tools`][bazel_tools] , which is a default dependency for every other +Bazel module. + +[bazel_tools]: https://github.com/bazelbuild/bazel/blob/master/src/MODULE.tools + +### Hybrid mode for gradual migration {:#hybrid-mode} + +Bzlmod and WORKSPACE can work side by side, which allows migrating dependencies +from the WORKSPACE file to Bzlmod to be a gradual process. + +#### WORKSPACE.bzlmod {:#workspace.bzlmod} + +During the migration, Bazel users may need to switch between builds with and +without Bzlmod enabled. WORKSPACE.bzlmod support is implemented to make the +process smoother. + +WORKSPACE.bzlmod has the exact same syntax as WORKSPACE. When Bzlmod is enabled, +if a WORKSPACE.bzlmod file also exists at the workspace root: + +* `WORKSPACE.bzlmod` takes effect and the content of `WORKSPACE` is ignored. +* No [prefixes or suffixes](/external/migration#builtin-default-deps) are + added to the WORKSPACE.bzlmod file. + +Using the WORKSPACE.bzlmod file can make the migration easier because: + +* When Bzlmod is disabled, you fall back to fetching dependencies from the + original WORKSPACE file. +* When Bzlmod is enabled, you can better track what dependencies are left to + migrate with WORKSPACE.bzlmod. + +Note: WORKSPACE.bzlmod does NOT replace the functionality of WORKSPACE for +identifying the workspace root, therefore you still need a WORKSPACE file at +your workspace root. + +#### Repository visibility {:#repository-visibility} + +Bzlmod is able to control which other repositories are visible from a given +repository, check [repository names and strict +deps](/external/module#repository_names_and_strict_deps) for more details. + +Here is a summary of repository visibilities from different types of +repositories when also taking WORKSPACE into consideration. + +| | From the main repo | From Bazel module repos | From module extension repos | From WORKSPACE repos | +|----------------|--------------------|-------------------------|---------------------------------------------------------------------------------------------------------------------|----------------------| +| The main repo | Visible | If the root module is a direct dependency | If the root module is a direct dependency of the module hosting the module extension | Visible | +| Bazel module repos | Direct deps | Direct deps | Direct deps of the module hosting the module extension | Direct deps of the root module | +| Module extension repos | Direct deps | Direct deps | Direct deps of the module hosting the module extension + all repos generated by the same module extension | Direct deps of the root module | +| WORKSPACE Repos | All visible | Not visible | Not visible | All visible | + +Note: For the root module, if a repository `@foo` is defined in WORKSPACE and +`@foo` is also used as an [apparent repository +name](/external/overview#apparent-repo-name) in MODULE.bazel, then `@foo` +refers to the one introduced in MODULE.bazel. + +Note: For a module extension generated repository `@bar`, if `@foo` is used as +an [apparent repository name](/external/overview#apparent-repo-name) of +another repository generated by the same module extension and direct +dependencies of the module hosting the module extension, then for repository +`@bar`, `@foo` refers to the latter. + +### Migration process {:#migration-process} + +A typical Bzlmod migration process can look like this: + +1. Understand what dependencies you have in WORKSPACE. +1. Add an empty MODULE.bazel file at your project root. +1. Add an empty WORKSPACE.bzlmod file to override the WORKSPACE file content. +1. Build your targets with Bzlmod enabled and check which repository is + missing. +1. Check the definition of the missing repository in the resolved dependency + file. +1. Introduce the missing dependency as a Bazel module, through a module + extension, or leave it in the WORKSPACE.bzlmod for later migration. +1. Go back to 4 and repeat until all dependencies are available. + +#### Migration tool {:#migration-tool} + +There is an interactive Bzlmod migration [helper script][migration_script] that +can get you started. + +[migration_script]: https://github.com/bazelbuild/bazel-central-registry/blob/main/tools/migrate_to_bzlmod.py + +The script does the following things: + +* Generate and parse the WORKSPACE resolved file. +* Print repository info from the resolved file in a human readable way. +* Run bazel build command, detect recognized error messages, and recommend a + way to migrate. +* Check if a dependency is already available in the BCR. +* Add a dependency to MODULE.bazel file. +* Add a dependency through a module extension. +* Add a dependency to WORKSPACE.bzlmod file. + +To use it, make sure you have the latest Bazel release installed, and run the +following command: + +```shell +git clone https://github.com/bazelbuild/bazel-central-registry.git +cd +/tools/migrate_to_bzlmod.py -t +``` + +Note: The migration script is not perfect and may not be up-to-date since Bzlmod +is evolving, always double check if the recommended solution is correct. + +## Publish Bazel modules {:#publish-modules} + +If your Bazel project is a dependency for other projects, you can publish your +project in the [Bazel Central Registry](https://registry.bazel.build/). + +To be able to check in your project in the BCR, you need a source archive URL of +the project. Take note of a few things when creating the source archive: + +* **Make sure the archive is pointing to a specific version.** + + The BCR can only accept versioned source archives because Bzlmod needs to + conduct version comparison during dependency resolution. + +* **Make sure the archive URL is stable.** + + Bazel verifies the content of the archive by a hash value, so you should + make sure the checksum of the downloaded file never changes. If the URL is + from GitHub, please create and upload a release archive in the release page. + GitHub isn't going to guarantee the checksum of source archives generated on + demand. In short, URLs in the form of + `https://github.com///releases/download/...` is considered stable + while `https://github.com///archive/...` is not. Check [GitHub + Archive Checksum + Outage](https://blog.bazel.build/2023/02/15/github-archive-checksum.html) + for more context. + +* **Make sure the source tree follows the layout of the original repository.** + + In case your repository is very large and you want to create a distribution + archive with reduced size by stripping out unnecessary sources, please make + sure the stripped source tree is a subset of the original source tree. This + makes it easier for end users to override the module to a non-release + version by [`archive_override`](/rules/lib/globals/module#archive_override) + and [`git_override`](/rules/lib/globals/module#git_override). + +* **Include a test module in a subdirectory that tests your most common + APIs.** + + A test module is a Bazel project with its own WORKSPACE and MODULE.bazel + file located in a subdirectory of the source archive which depends on the + actual module to be published. It should contain examples or some + integration tests that cover your most common APIs. Check + [test module][test_module] to learn how to set it up. + +[test_module]: https://github.com/bazelbuild/bazel-central-registry/tree/main/docs#test-module + +When you have your source archive URL ready, follow the [BCR contribution +guidelines][bcr_contrib_guide] to submit your module to the BCR with a GitHub +Pull Request. + +[bcr_contrib_guide]: https://github.com/bazelbuild/bazel-central-registry/tree/main/docs#contribute-a-bazel-module + +It is **highly recommended** to set up the [Publish to +BCR](https://github.com/bazel-contrib/publish-to-bcr) GitHub App for your +repository to automate the process of submitting your module to the BCR. + +## Best practices {:#best-practices} + +This section documents a few best practices you should follow for better +managing your external dependencies. + +#### Split targets into different packages to avoid fetching unnecessary dependencies. + +Check [#12835](https://github.com/bazelbuild/bazel/issues/12835), where dev +dependencies for tests are forced to be fetched unnecessarily for building +targets that don't need them. This is not actually not Bzlmod specific, but +following this practices makes it easier to specify dev dependencies correctly. + +#### Specify dev dependencies + +You can set the `dev_dependency` attribute to true for +[`bazel_dep`](/rules/lib/globals/module#bazel_dep) and +[`use_extension`](/rules/lib/globals/module#use_extension) directives so that +they don't propagate to dependent projects. As the root module, you can use the +[`--ignore_dev_dependency`][ignore_dev_dep_flag] flag to verify if your targets +still build without dev dependencies. + +[ignore_dev_dep_flag]: /reference/command-line-reference#flag--ignore_dev_dependency + +{# More best practices here !!! #} + +## Community migration progress {:#migration-progress} + +You can check the [Bazel Central Registry](https://registry.bazel.build) to find +out if your dependencies are already available. Otherwise feel free to join this +[GitHub discussion](https://github.com/bazelbuild/bazel/discussions/18329) to +upvote or post the dependencies that are blocking your migration. + +## Report issues {:#reporting-issues} + +Please check the [Bazel GitHub issue list][bzlmod_github_issue] for known Bzlmod +issues. Feel free to file new issues or feature requests that can help unblock +your migration! + +[bzlmod_github_issue]: https://github.com/bazelbuild/bazel/issues?q=is%3Aopen+is%3Aissue+label%3Aarea-Bzlmod diff --git a/site/en/external/module.md b/site/en/external/module.md index c07a299aa041d5..eda5fc0177da39 100644 --- a/site/en/external/module.md +++ b/site/en/external/module.md @@ -175,16 +175,16 @@ Bazel supports the following non-registry overrides: ## Repository names and strict deps -The [canonical name](/external/overview#canonical_repository_name) of a repo -backing a module is `{{ "" }}module_name{{ "" }}~{{ "" -}}version{{ "" }}` (for example, `bazel_skylib~1.0.3`). For modules with a +The [canonical name](/external/overview#canonical-repo-name) of a repo backing a +module is `{{ "" }}module_name{{ "" }}~{{ "" }}version{{ +"" }}` (for example, `bazel_skylib~1.0.3`). For modules with a non-registry override, replace the `{{ "" }}version{{ "" }}` part with the string `override`. Note that the canonical name format is not an API you should depend on and is subject to change at any time. -The [apparent name](/external/overview#apparent_repository_name) of a repo -backing a module to its direct dependents defaults to its module name, unless -the `repo_name` attribute of the [`bazel_dep`](/rules/lib/globals/module#bazel_dep) +The [apparent name](/external/overview#apparent-repo-name) of a repo backing a +module to its direct dependents defaults to its module name, unless the +`repo_name` attribute of the [`bazel_dep`](/rules/lib/globals/module#bazel_dep) directive says otherwise. Note that this means a module can only find its direct dependencies. This helps prevent accidental breakages due to changes in transitive dependencies. diff --git a/site/en/external/overview.md b/site/en/external/overview.md index c2511ecfc76bc9..97714878a0f46e 100644 --- a/site/en/external/overview.md +++ b/site/en/external/overview.md @@ -15,23 +15,24 @@ the traditional, repository-focused [`WORKSPACE` system](#workspace-system), and the newer module-focused [`MODULE.bazel` system](#bzlmod) (codenamed *Bzlmod*, and enabled with the flag `--enable_bzlmod`). The two systems can be used together, but Bzlmod is replacing the `WORKSPACE` system in future Bazel -releases. +releases, check the [Bzlmod migration guide](/external/migration) on how to +migrate. -This article explains the concepts surrounding external dependency management in -Bazel, before going into a bit more detail about the two systems in order. +This document explains the concepts surrounding external dependency management +in Bazel, before going into a bit more detail about the two systems in order. -## Concepts +## Concepts {:#concepts} -### Repository +### Repository {:#repository} A directory with a `WORKSPACE` or `WORKSPACE.bazel` file, containing source files to be used in a Bazel build. Often shortened to just **repo**. -### Main repository +### Main repository {:#main-repository} The repository in which the current Bazel command is being run. -### Workspace +### Workspace {:#workspace} The environment shared by all Bazel commands run in the same main repository. @@ -39,7 +40,7 @@ Note that historically the concepts of "repository" and "workspace" have been conflated; the term "workspace" has often been used to refer to the main repository, and sometimes even used as a synonym of "repository". -### Canonical repository name +### Canonical repository name {:#canonical-repo-name} The canonical name a repository is addressable by. Within the context of a workspace, each repository has a single canonical name. A target inside a repo @@ -48,7 +49,7 @@ whose canonical name is `canonical_name` can be addressed by the label The main repository always has the empty string as the canonical name. -### Apparent repository name +### Apparent repository name {:#apparent-repo-name} The name a repository is addressable by in the context of a certain other repo. This can be thought of as a repo's "nickname": The repo with the canonical name @@ -60,7 +61,7 @@ This can be thought of as a repo's "nickname": The repo with the canonical name Conversely, this can be understood as a **repository mapping**: each repo maintains a mapping from "apparent repo name" to a "canonical repo name". -### Repository rule +### Repository rule {:#repo-rule} A schema for repository definitions that tells Bazel how to materialize a repository. For example, it could be "download a zip archive from a certain URL @@ -68,7 +69,7 @@ and extract it", or "fetch a certain Maven artifact and make it available as a `java_import` target", or simply "symlink a local directory". Every repo is **defined** by calling a repo rule with an appropriate number of arguments. -See [Repository rules](/extending/repo) for more information on how to write +See [Repository rules](/extending/repo) for more information about how to write your own repository rules. The most common repo rules by far are @@ -77,17 +78,17 @@ from a URL and extracts it, and [`local_repository`](/reference/be/workspace#local_repository), which symlinks a local directory that is already a Bazel repository. -### Fetching a repository +### Fetch a repository {:#fetch-repository} The action of making a repo available on local disk by running its associated repo rule. The repos defined in a workspace are not available on local disk before they are fetched. -Normally, Bazel will only fetch a repo when it needs something from the repo, +Normally, Bazel only fetches a repo when it needs something from the repo, and the repo hasn't already been fetched. If the repo has already been fetched -before, Bazel will only re-fetch it if its definition has changed. +before, Bazel only re-fetches it if its definition has changed. -### Directory layout +### Directory layout {:#directory-layout} After being fetched, the repo can be found in the subdirectory `external` in the [output base](/remote/output-directories), under its canonical name. @@ -99,7 +100,7 @@ canonical name `canonical_name`: ls $(bazel info output_base)/external/{{ '' }} canonical_name {{ '' }} ``` -## Managing external dependencies with Bzlmod {:#bzlmod} +## Manage external dependencies with Bzlmod {:#bzlmod} Bzlmod, the new external dependency subsystem, does not directly work with repo definitions. Instead, it builds a dependency graph from _modules_, runs @@ -137,19 +138,18 @@ requests. Among other things, they allow Bazel to interact with other package management systems while also respecting the dependency graph built out of Bazel modules. -### External links on Bzlmod +### External links on Bzlmod {:#external-links} -* [Bzlmod Migration Guide](https://docs.google.com/document/d/1JtXIVnXyFZ4bmbiBCr5gsTH4-opZAFf5DMMb-54kES0/edit?usp=sharing){:.external} * [Bzlmod usage examples in bazelbuild/examples](https://github.com/bazelbuild/examples/tree/main/bzlmod){:.external} * [Bazel External Dependencies Overhaul](https://docs.google.com/document/d/1moQfNcEIttsk6vYanNKIy3ZuK53hQUFq1b1r0rmsYVg/edit){: .external} (original Bzlmod design doc) * [BazelCon 2021 talk on Bzlmod](https://www.youtube.com/watch?v=TxOCKtU39Fs){: .external} * [Bazel Community Day talk on Bzlmod](https://www.youtube.com/watch?v=MB6xxis9gWI){: .external} -## Defining repos with `WORKSPACE` {:#workspace-system} +## Define repos with `WORKSPACE` {:#workspace-system} -Historically, you can manage external dependencies by defining repos in -the `WORKSPACE` (or `WORKSPACE.bazel`) file. This file has a similar syntax to +Historically, you can manage external dependencies by defining repos in the +`WORKSPACE` (or `WORKSPACE.bazel`) file. This file has a similar syntax to `BUILD` files, employing repo rules instead of build rules. The following snippet is an example to use the `http_archive` repo rule in the @@ -168,9 +168,10 @@ The snippet defines a repo whose canonical name is `foo`. In the `WORKSPACE` system, by default, the canonical name of a repo is also its apparent name to all other repos. -See the [full list](/rules/lib/globals/workspace) of functions available in `WORKSPACE` files. +See the [full list](/rules/lib/globals/workspace) of functions available in +`WORKSPACE` files. -### Shortcomings of the `WORKSPACE` system +### Shortcomings of the `WORKSPACE` system {:#workspace-shortcomings} In the years since the `WORKSPACE` system was introduced, users have reported many pain points, including: @@ -189,4 +190,8 @@ many pain points, including: dependencies are specified using `http_archive` with URLs, without any version information. This means that there is no reliable way to perform version resolution in the case of diamond dependencies (`A` depends on - `B` and `C`; `B` and `C` both depend on different versions of `D`). \ No newline at end of file + `B` and `C`; `B` and `C` both depend on different versions of `D`). + +Due to the shortcomings of WORKSPACE, Bzlmod is going to replace the legacy +WORKSPACE system in future Bazel releases. Please read the [Bzlmod migration +guide](/external/migration) on how to migrate to Bzlmod. \ No newline at end of file From 288cd01239fc607c57f26345031fe58779527ba8 Mon Sep 17 00:00:00 2001 From: Googler Date: Tue, 27 Jun 2023 07:50:12 -0700 Subject: [PATCH 09/68] Bazel DocGen: Enable rewriter_test again. This CL can be submitted once the Python version problem has been solved by https://github.com/bazelbuild/bazel/pull/18497 Fixes https://github.com/bazelbuild/bazel/issues/16975 PiperOrigin-RevId: 543742501 Change-Id: Ie4ff1c94f38030cf709bcf60ac5a91beadb9b13d --- .bazelci/postsubmit.yml | 2 -- .bazelci/presubmit.yml | 2 -- 2 files changed, 4 deletions(-) diff --git a/.bazelci/postsubmit.yml b/.bazelci/postsubmit.yml index f0a1eef5a12a4a..c1170c38864445 100644 --- a/.bazelci/postsubmit.yml +++ b/.bazelci/postsubmit.yml @@ -311,8 +311,6 @@ tasks: - "-//src/test/java/com/google/devtools/build/lib/skyframe/rewinding:RewindingTest" - "-//src/test/java/com/google/devtools/build/lib/buildtool:MiscAnalysisTest" - "-//src/test/java/com/google/devtools/build/lib/rules/objc:ObjcRulesTests" - # https://github.com/bazelbuild/bazel/issues/16975 - - "-//scripts/docs:rewriter_test" # https://github.com/bazelbuild/bazel/issues/17007 - "-//src/test/java/com/google/devtools/build/lib/platform:SystemMemoryPressureEventTest" include_json_profile: diff --git a/.bazelci/presubmit.yml b/.bazelci/presubmit.yml index 0e8010fb16316d..8d08a220c2ab8d 100644 --- a/.bazelci/presubmit.yml +++ b/.bazelci/presubmit.yml @@ -322,8 +322,6 @@ tasks: - "-//src/test/java/com/google/devtools/build/lib/skyframe/rewinding:RewindingTest" - "-//src/test/java/com/google/devtools/build/lib/buildtool:MiscAnalysisTest" - "-//src/test/java/com/google/devtools/build/lib/rules/objc:ObjcRulesTests" - # https://github.com/bazelbuild/bazel/issues/16975 - - "-//scripts/docs:rewriter_test" # https://github.com/bazelbuild/bazel/issues/17007 - "-//src/test/java/com/google/devtools/build/lib/platform:SystemMemoryPressureEventTest" # Disable the most time-consuming tests on macOS arm64 platform in presubmit. From a3525b57f1a4deff805cc81ed0212df4883bfb61 Mon Sep 17 00:00:00 2001 From: Tony Aiuto Date: Tue, 27 Jun 2023 08:28:01 -0700 Subject: [PATCH 10/68] Create the initial version of the packages_used rule `packages_used` writes transitive SBOM data to a file for future processing. By itself, the rule is not useful. It produces input for an SBOM generator (TBD in a future PR). That generator will take the output from packages_used, and merge with the maven lock file to produce a richer SBOM. Most of this will be moved to rules_license after we finish this proof-of-concept within the Bazel sources. ``` bazel build //tools/compliance:all more bazel-bin/tools/compliance/bazel_packages.json ``` Closes #18782. PiperOrigin-RevId: 543751564 Change-Id: I0cf0fcefa3c5477e6c3bdfe064693e13413887a0 --- .bazelci/presubmit.yml | 1 + tools/compliance/BUILD | 20 ++++ tools/compliance/gather_packages.bzl | 46 +++++++- tools/compliance/packages_used_test.py | 54 ++++++++++ tools/compliance/to_json.bzl | 144 +++++++++++++++++++++++++ 5 files changed, 262 insertions(+), 3 deletions(-) create mode 100644 tools/compliance/packages_used_test.py create mode 100644 tools/compliance/to_json.bzl diff --git a/.bazelci/presubmit.yml b/.bazelci/presubmit.yml index 8d08a220c2ab8d..a265b1055c235e 100644 --- a/.bazelci/presubmit.yml +++ b/.bazelci/presubmit.yml @@ -50,6 +50,7 @@ tasks: - "//third_party/ijar/..." - "//tools/android/..." - "//tools/aquery_differ/..." + - "//tools/compliance/..." - "//tools/python/..." # Re-enable once fixed: https://github.com/bazelbuild/bazel/issues/8162 - "-//src/java_tools/buildjar/..." diff --git a/tools/compliance/BUILD b/tools/compliance/BUILD index ec627a97ecbf78..7e3ea350886d8c 100644 --- a/tools/compliance/BUILD +++ b/tools/compliance/BUILD @@ -1,5 +1,7 @@ # Tools for gathering OSS licenses +load(":gather_packages.bzl", "packages_used") + licenses(["notice"]) # Apache 2.0 filegroup( @@ -10,3 +12,21 @@ filegroup( "@bazel_tools//tools:__subpackages__", ], ) + +packages_used( + name = "bazel_packages", + out = "bazel_packages.json", + target = "//src:bazel_nojdk", +) + +py_test( + name = "packages_used_test", + size = "medium", + srcs = ["packages_used_test.py"], + data = [ + ":bazel_packages.json", + ], + python_version = "PY3", + deps = [ + ], +) diff --git a/tools/compliance/gather_packages.bzl b/tools/compliance/gather_packages.bzl index cbb58f0aeab1f1..34ccbd76cbcdf9 100644 --- a/tools/compliance/gather_packages.bzl +++ b/tools/compliance/gather_packages.bzl @@ -21,6 +21,7 @@ load( ) load("@rules_license//rules_gathering:trace.bzl", "TraceInfo") load(":user_filtered_rule_kinds.bzl", "user_aspect_filters") +load(":to_json.bzl", "labels_to_json", "licenses_to_json", "package_infos_to_json") TransitivePackageInfo = provider( """Transitive list of all SBOM relevant dependencies.""", @@ -98,9 +99,10 @@ def _get_transitive_metadata(ctx, trans_license_info, trans_package_info, trans_ if info.packages: trans_packages.append(info.packages) - if info.traces: - for trace in info.traces: - traces.append("(" + ", ".join([str(ctx.label), ctx.rule.kind, name]) + ") -> " + trace) + if hasattr(info, "traces"): + if info.traces: + for trace in info.traces: + traces.append("(" + ", ".join([str(ctx.label), ctx.rule.kind, name]) + ") -> " + trace) def gather_package_common(target, ctx, provider_factory, metadata_providers, filter_func): """Collect license and other metadata info from myself and my deps. @@ -212,3 +214,41 @@ gather_package_info = aspect( provides = [TransitivePackageInfo], apply_to_generating_rules = True, ) + +def _packages_used_impl(ctx): + """Write the TransitivePackageInfo as JSON.""" + tpi = ctx.attr.target[TransitivePackageInfo] + licenses_json = licenses_to_json(tpi.license_info) + package_info_json = package_infos_to_json(tpi.package_info) + packages = labels_to_json(tpi.packages.to_list()) + + # Create a single dict of all the info. + main_template = """{{ + "top_level_target": "{top_level_target}", + "licenses": {licenses}, + "package_info": {package_info}, + "packages": {packages} + \n}}""" + + content = main_template.format( + top_level_target = ctx.attr.target.label, + licenses = licenses_json, + package_info = package_info_json, + packages = packages, + ) + ctx.actions.write( + output = ctx.outputs.out, + content = content, + ) + +packages_used = rule( + doc = """Gather transitive package information for a target and write as JSON.""", + implementation = _packages_used_impl, + attrs = { + "target": attr.label( + aspects = [gather_package_info], + allow_files = True, + ), + "out": attr.output(mandatory = True), + }, +) diff --git a/tools/compliance/packages_used_test.py b/tools/compliance/packages_used_test.py new file mode 100644 index 00000000000000..3b4882abf53d69 --- /dev/null +++ b/tools/compliance/packages_used_test.py @@ -0,0 +1,54 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Smoke test for packages_used.""" + +import json +import os +import unittest + + +def read_data_file(basename): + path = os.path.join( + os.getenv("TEST_SRCDIR"), "io_bazel/tools/compliance", basename + ) + with open(path, "rt", encoding="utf-8") as f: + return f.read() + + +class PackagesUsedTest(unittest.TestCase): + + def test_found_key_licenses(self): + raw_json = read_data_file("bazel_packages.json") + content = json.loads(raw_json) + found_top_level_license = False + found_zlib = False + for l in content["licenses"]: + if l["label"] == "//:license": + found_top_level_license = True + if l["label"] == "//third_party/zlib:license": + found_zlib = True + self.assertTrue(found_top_level_license) + self.assertTrue(found_zlib) + + def test_found_remote_packages(self): + raw_json = read_data_file("bazel_packages.json") + content = json.loads(raw_json) + self.assertIn( + "@remoteapis//:build_bazel_remote_execution_v2_remote_execution_proto", + content["packages"], + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tools/compliance/to_json.bzl b/tools/compliance/to_json.bzl new file mode 100644 index 00000000000000..58525a5d7ea7ff --- /dev/null +++ b/tools/compliance/to_json.bzl @@ -0,0 +1,144 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Utility methods for turning package metadata to JSON. + +These should eventually be part of rules_license. +""" + +def _strip_null_repo(label): + """Removes the null repo name (e.g. @//) from a string. + + The is to make str(label) compatible between bazel 5.x and 6.x + """ + s = str(label) + if s.startswith("@//"): + return s[1:] + elif s.startswith("@@//"): + return s[2:] + return s + +def _bazel_package(label): + """Returns the package containing a label.""" + clean_label = _strip_null_repo(label) + return clean_label[0:-(len(label.name) + 1)] + +_license_template = """{{ + "label": "{label}", + "bazel_package": "{bazel_package}", + "license_kinds": [{kinds}], + "copyright_notice": "{copyright_notice}", + "package_name": "{package_name}", + "package_url": "{package_url}", + "package_version": "{package_version}", + "license_text": "{license_text}" +}}""" + +_kind_template = """{{ + "target": "{kind_path}", + "name": "{kind_name}", + "conditions": {kind_conditions} +}}""" + +def license_info_to_json(license): + """Converts a LicenseInfo to JSON. + + Args: + license: a LicenseInfo + Returns: + JSON representation of license. + """ + kinds = [] + for kind in sorted(license.license_kinds, key = lambda x: x.name): + kinds.append(_kind_template.format( + kind_name = kind.name, + kind_path = kind.label, + kind_conditions = kind.conditions, + )) + + return _license_template.format( + copyright_notice = license.copyright_notice, + kinds = ",".join(kinds), + license_text = license.license_text.path, + package_name = license.package_name, + package_url = license.package_url, + package_version = license.package_version, + label = _strip_null_repo(license.label), + bazel_package = _bazel_package(license.label), + ) + +def licenses_to_json(licenses): + """Converts a list of LicenseInfo to JSON. + + This list is sorted by label for stability. + + Args: + licenses: list(LicenseInfo) + Returns: + JSON representation of licenses + """ + all_licenses = [] + for license in sorted(licenses.to_list(), key = lambda x: x.label): + all_licenses.append(license_info_to_json(license)) + return "[" + ",".join(all_licenses) + "]" + +_package_info_template = """{{ + "target": "{label}", + "bazel_package": "{bazel_package}", + "package_name": "{package_name}", + "package_url": "{package_url}", + "package_version": "{package_version}" +}}""" + +def package_info_to_json(package_info): + """Converts a PackageInfo to json. + + Args: + package_info: a PackageInfo + Returns: + JSON representation of package_info. + """ + return _package_info_template.format( + label = _strip_null_repo(package_info.label), + bazel_package = _bazel_package(package_info.label), + package_name = package_info.package_name, + package_url = package_info.package_url, + package_version = package_info.package_version, + ) + +def package_infos_to_json(packages): + """Converts a list of PackageInfo to JSON. + + This list is sorted by label for stability. + + Args: + packages: list(PackageInfo) + Returns: + JSON representation of packages. + """ + all_packages = [] + for package in sorted(packages.to_list(), key = lambda x: x.label): + all_packages.append(package_info_to_json(package)) + return "[" + ",".join(all_packages) + "]" + +def labels_to_json(labels): + """Converts a list of Labels to JSON. + + This list is sorted for stability. + + Args: + labels: list(Label) + Returns: + JSON representation of the labels. + """ + return "[%s]" % ",".join(['"%s"' % _strip_null_repo(label) for label in sorted(labels)]) From 5e1e210027a0d01fa22483a02b9381ffa17c4872 Mon Sep 17 00:00:00 2001 From: Googler Date: Tue, 27 Jun 2023 09:01:40 -0700 Subject: [PATCH 11/68] Refactor `FileSystemValueCheckerInferringAncestors` to take in an `InMemoryGraph` instead of `graphValues` and `graphDoneValues`. Also introduce `InMemoryGraph#getIfPresent` which returns a `NodeEntry` for a given `SkyKey` if present, otherwise returns null. PiperOrigin-RevId: 543760157 Change-Id: I8d2f5ec5ed5cbf7062e7b448aeb56591fb708fb3 --- ...eSystemValueCheckerInferringAncestors.java | 44 +-- .../build/lib/skyframe/SkyframeExecutor.java | 5 +- .../build/skyframe/InMemoryGraph.java | 7 + .../build/skyframe/InMemoryGraphImpl.java | 6 + ...temValueCheckerInferringAncestorsTest.java | 371 ++++++++++-------- .../skyframe/DeterministicInMemoryGraph.java | 6 + .../skyframe/NotifyingInMemoryGraph.java | 6 + 7 files changed, 256 insertions(+), 189 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestors.java b/src/main/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestors.java index bc0e542cb66a30..d78c598012585f 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestors.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestors.java @@ -34,8 +34,9 @@ import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.Differencer.DiffWithDelta.Delta; import com.google.devtools.build.skyframe.ImmutableDiff; +import com.google.devtools.build.skyframe.InMemoryGraph; +import com.google.devtools.build.skyframe.InMemoryNodeEntry; import com.google.devtools.build.skyframe.SkyKey; -import com.google.devtools.build.skyframe.SkyValue; import java.io.IOException; import java.util.Collections; import java.util.HashMap; @@ -64,8 +65,8 @@ */ final class FileSystemValueCheckerInferringAncestors { @Nullable private final TimestampGranularityMonitor tsgm; - private final Map graphValues; - private final Map graphDoneValues; + + private final InMemoryGraph inMemoryGraph; private final Map nodeStates; private final SyscallCache syscallCache; private final Set valuesToInvalidate = Sets.newConcurrentHashSet(); @@ -115,22 +116,19 @@ boolean signalFinishedChild(boolean needsToBeVisited) { private FileSystemValueCheckerInferringAncestors( @Nullable TimestampGranularityMonitor tsgm, - Map graphValues, - Map graphDoneValues, + InMemoryGraph inMemoryGraph, Map nodeStates, SyscallCache syscallCache) { this.tsgm = tsgm; - this.graphValues = graphValues; - this.graphDoneValues = graphDoneValues; this.nodeStates = nodeStates; this.syscallCache = syscallCache; + this.inMemoryGraph = inMemoryGraph; } @SuppressWarnings("ReferenceEquality") static ImmutableDiff getDiffWithInferredAncestors( @Nullable TimestampGranularityMonitor tsgm, - Map graphValues, - Map graphDoneValues, + InMemoryGraph inMemoryGraph, Iterable modifiedKeys, int nThreads, SyscallCache syscallCache) @@ -163,11 +161,7 @@ static ImmutableDiff getDiffWithInferredAncestors( } return new FileSystemValueCheckerInferringAncestors( - tsgm, - graphValues, - graphDoneValues, - Collections.unmodifiableMap(nodeStates), - syscallCache) + tsgm, inMemoryGraph, Collections.unmodifiableMap(nodeStates), syscallCache) .processEntries(nThreads); } @@ -253,7 +247,8 @@ private boolean visitEntry( NodeVisitState parentState) throws StatFailedException { FileStateKey key = FileStateValue.key(path); - @Nullable FileStateValue fsv = (FileStateValue) graphValues.get(key); + @Nullable InMemoryNodeEntry fsvNode = inMemoryGraph.getIfPresent(key); + @Nullable FileStateValue fsv = fsvNode != null ? (FileStateValue) fsvNode.toValue() : null; if (fsv == null) { visitUnknownEntry(key, isInferredDirectory, parentState); parentState.addMaybeDeletedChild(path.getRootRelativePath().getBaseName()); @@ -298,13 +293,12 @@ private void visitUnknownEntry( // Run stats on unknown files in order to preserve the parent listing if present unless we // already know it has changed. Optional parentListingKey = parentListingKey(path); - @Nullable - DirectoryListingStateValue parentListing = - parentListingKey - // Only look for done listings since already invalidated ones will be reevaluated - // anyway. - .map(k -> (DirectoryListingStateValue) graphDoneValues.get(k)) - .orElse(null); + @Nullable DirectoryListingStateValue parentListing = null; + if (parentListingKey.isPresent()) { + @Nullable InMemoryNodeEntry entry = inMemoryGraph.getIfPresent(parentListingKey.get()); + parentListing = + entry != null && entry.isDone() ? (DirectoryListingStateValue) entry.getValue() : null; + } // No listing/we already know it has changed -- nothing to gain from stats anymore. if (parentListing == null || valuesToInvalidate.contains(parentListingKey.get())) { @@ -347,8 +341,12 @@ private boolean listingHasEntriesOutsideOf(RootedPath path, Set allAffec // TODO(192010830): Try looking up BUILD files if there is no listing -- this is a lookup we // can speculatively try since those files are often checked against. @Nullable + InMemoryNodeEntry nodeEntry = inMemoryGraph.getIfPresent(DirectoryListingStateValue.key(path)); + @Nullable DirectoryListingStateValue listing = - (DirectoryListingStateValue) graphDoneValues.get(DirectoryListingStateValue.key(path)); + nodeEntry != null && nodeEntry.isDone() + ? (DirectoryListingStateValue) nodeEntry.getValue() + : null; if (listing == null) { return false; } diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java b/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java index 941ca5be46d8a8..95febd9e950cb5 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java @@ -1370,12 +1370,9 @@ protected Differencer.Diff getDiff( return FileStateValue.key(RootedPath.toRootedPath(pathEntry, pathFragment)); }); - Map valuesMap = memoizingEvaluator.getValues(); - return FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( tsgm, - valuesMap, - memoizingEvaluator.getDoneValues(), + memoizingEvaluator.getInMemoryGraph(), dirtyFileStateSkyKeys, fsvcThreads, syscallCache); diff --git a/src/main/java/com/google/devtools/build/skyframe/InMemoryGraph.java b/src/main/java/com/google/devtools/build/skyframe/InMemoryGraph.java index 9e22b28e2e0638..e2849dbd991570 100644 --- a/src/main/java/com/google/devtools/build/skyframe/InMemoryGraph.java +++ b/src/main/java/com/google/devtools/build/skyframe/InMemoryGraph.java @@ -96,4 +96,11 @@ default int valuesSize() { * instances back to weak interner and uninstall current pool. */ void cleanupInterningPool(); + + /** + * Returns the {@link InMemoryNodeEntry} for a given {@link SkyKey} if present in the graph. + * Otherwise, returns null. + */ + @Nullable + InMemoryNodeEntry getIfPresent(SkyKey key); } diff --git a/src/main/java/com/google/devtools/build/skyframe/InMemoryGraphImpl.java b/src/main/java/com/google/devtools/build/skyframe/InMemoryGraphImpl.java index 3ef855f18a8ef0..ad90b06cc6973d 100644 --- a/src/main/java/com/google/devtools/build/skyframe/InMemoryGraphImpl.java +++ b/src/main/java/com/google/devtools/build/skyframe/InMemoryGraphImpl.java @@ -267,6 +267,12 @@ public void cleanupInterningPool() { LabelInterner.setGlobalPool(null); } + @Override + @Nullable + public InMemoryNodeEntry getIfPresent(SkyKey key) { + return nodeMap.get(key); + } + static final class EdgelessInMemoryGraphImpl extends InMemoryGraphImpl { public EdgelessInMemoryGraphImpl(boolean usePooledInterning) { diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestorsTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestorsTest.java index 4b5086afec1d10..6dad42c2f80117 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestorsTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestorsTest.java @@ -46,13 +46,21 @@ import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.Differencer.DiffWithDelta.Delta; import com.google.devtools.build.skyframe.ImmutableDiff; +import com.google.devtools.build.skyframe.InMemoryGraph; +import com.google.devtools.build.skyframe.InMemoryNodeEntry; +import com.google.devtools.build.skyframe.NodeBatch; +import com.google.devtools.build.skyframe.NodeEntry.DirtyType; +import com.google.devtools.build.skyframe.QueryableGraph.Reason; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; +import com.google.devtools.build.skyframe.Version; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.testing.junit.testparameterinjector.TestParameter; import com.google.testing.junit.testparameterinjector.TestParameterInjector; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Map.Entry; import javax.annotation.Nullable; import org.junit.After; import org.junit.Before; @@ -73,6 +81,8 @@ public final class FileSystemValueCheckerInferringAncestorsTest { private Root untrackedRoot; private Exception throwOnStat; + private final InMemoryGraph inMemoryGraph = InMemoryGraph.create(); + @TestParameter({"1", "16"}) private int fsvcThreads; @@ -112,10 +122,9 @@ public void getDiffWithInferredAncestors_unknownFileChanged_returnsFileAndDirs() throws Exception { ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of(), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(fileStateValueKey("foo/file")), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(fileStateValueKey("foo/file")), fsvcThreads, syscallCache); @@ -136,13 +145,13 @@ public void getDiffWithInferredAncestors_fileModified_returnsFileWithValues() th FileStateKey key = fileStateValueKey("file"); FileStateValue value = fileStateValue("file"); scratch.overwriteFile("file", "there"); + addDoneNodesAndThenMarkChanged(ImmutableMap.of(key, value)); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of(fileStateValueKey("file"), value), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(key), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(key), fsvcThreads, syscallCache); @@ -156,14 +165,15 @@ public void getDiffWithInferredAncestors_fileModified_returnsFileWithValues() th public void getDiffWithInferredAncestors_fileAdded_returnsFileAndDirListing() throws Exception { scratch.file("file"); FileStateKey key = fileStateValueKey("file"); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of( + key, NONEXISTENT_FILE_STATE_NODE, fileStateValueKey(""), fileStateValue(""))); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of( - key, NONEXISTENT_FILE_STATE_NODE, fileStateValueKey(""), fileStateValue("")), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(key), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(key), fsvcThreads, syscallCache); @@ -179,21 +189,22 @@ public void getDiffWithInferredAncestors_fileWithDirsAdded_returnsFileAndInjects throws Exception { scratch.file("a/b/file"); FileStateKey fileKey = fileStateValueKey("a/b/file"); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of( + fileStateValueKey(""), + fileStateValue(""), + fileStateValueKey("a"), + NONEXISTENT_FILE_STATE_NODE, + fileStateValueKey("a/b"), + NONEXISTENT_FILE_STATE_NODE, + fileKey, + NONEXISTENT_FILE_STATE_NODE)); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of( - fileStateValueKey(""), - fileStateValue(""), - fileStateValueKey("a"), - NONEXISTENT_FILE_STATE_NODE, - fileStateValueKey("a/b"), - NONEXISTENT_FILE_STATE_NODE, - fileKey, - NONEXISTENT_FILE_STATE_NODE), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(fileKey), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(fileKey), fsvcThreads, syscallCache); @@ -219,21 +230,22 @@ public void getDiffWithInferredAncestors_addedFileWithReportedDirs_returnsFileAn throws Exception { scratch.file("a/b/file"); FileStateKey fileKey = fileStateValueKey("a/b/file"); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of( + fileStateValueKey(""), + fileStateValue(""), + fileStateValueKey("a"), + NONEXISTENT_FILE_STATE_NODE, + fileStateValueKey("a/b"), + NONEXISTENT_FILE_STATE_NODE, + fileKey, + NONEXISTENT_FILE_STATE_NODE)); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of( - fileStateValueKey(""), - fileStateValue(""), - fileStateValueKey("a"), - NONEXISTENT_FILE_STATE_NODE, - fileStateValueKey("a/b"), - NONEXISTENT_FILE_STATE_NODE, - fileKey, - NONEXISTENT_FILE_STATE_NODE), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(fileKey, fileStateValueKey("a")), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(fileKey, fileStateValueKey("a")), fsvcThreads, syscallCache); @@ -262,17 +274,18 @@ public void getDiffWithInferredAncestors_addedFileWithReportedDirs_returnsFileAn public void getDiffWithInferredAncestors_fileWithUnknownDirsAdded_returnsFileAndDirs() throws Exception { scratch.file("a/b/c/d"); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of( + fileStateValueKey(""), + fileStateValue(""), + fileStateValueKey("a/b/c/d"), + NONEXISTENT_FILE_STATE_NODE)); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of( - fileStateValueKey(""), - fileStateValue(""), - fileStateValueKey("a/b/c/d"), - NONEXISTENT_FILE_STATE_NODE), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(fileStateValueKey("a/b/c/d")), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(fileStateValueKey("a/b/c/d")), fsvcThreads, syscallCache); @@ -296,14 +309,15 @@ public void getDiffWithInferredAncestors_addEmptyDir_returnsDirAndParentListing( scratch.dir("dir"); FileStateKey key = fileStateValueKey("dir"); Delta delta = fileStateValueDelta("dir"); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of( + key, NONEXISTENT_FILE_STATE_NODE, fileStateValueKey(""), fileStateValue(""))); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of( - key, NONEXISTENT_FILE_STATE_NODE, fileStateValueKey(""), fileStateValue("")), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(key), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(key), fsvcThreads, syscallCache); @@ -320,14 +334,14 @@ public void getDiffWithInferredAncestors_deleteFile_returnsFileParentListing() t FileStateKey key = fileStateValueKey("dir/file1"); FileStateValue oldValue = fileStateValue("dir/file1"); file.delete(); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of(key, oldValue, fileStateValueKey("dir"), fileStateValue("dir"))); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of( - key, oldValue, fileStateValueKey("dir"), fileStateValue("dir")), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(key), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(key), fsvcThreads, syscallCache); @@ -345,16 +359,18 @@ public void getDiffWithInferredAncestors_deleteFileFromDirWithListing_skipsDirSt FileStateKey key = fileStateValueKey("dir/file1"); FileStateValue oldValue = fileStateValue("dir/file1"); file1.delete(); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of(key, oldValue, fileStateValueKey("dir"), fileStateValue("dir"))); + addDoneNodes( + ImmutableMap.of( + directoryListingStateValueKey("dir"), + directoryListingStateValue(file("file1"), file("file2")))); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of( - key, oldValue, fileStateValueKey("dir"), fileStateValue("dir")), - /*graphDoneValues=*/ ImmutableMap.of( - directoryListingStateValueKey("dir"), - directoryListingStateValue(file("file1"), file("file2"))), - /*modifiedKeys=*/ ImmutableSet.of(key), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(key), fsvcThreads, syscallCache); @@ -372,19 +388,20 @@ public void getDiffWithInferredAncestors_deleteLastFileFromDir_ignoresInvalidate FileStateKey key = fileStateValueKey("dir/file1"); FileStateValue oldValue = fileStateValue("dir/file1"); file1.delete(); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of( + key, + oldValue, + fileStateValueKey("dir"), + fileStateValue("dir"), + directoryListingStateValueKey("dir"), + directoryListingStateValue(file("file1"), file("file2")))); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of( - key, - oldValue, - fileStateValueKey("dir"), - fileStateValue("dir"), - directoryListingStateValueKey("dir"), - directoryListingStateValue(file("file1"), file("file2"))), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(key), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(key), fsvcThreads, syscallCache); @@ -404,15 +421,18 @@ public void getDiffWithInferredAncestors_modifyAllUnknownEntriesInDirWithListing .createSymbolicLink(PathFragment.create("file")); FileStateKey fileKey = fileStateValueKey("dir/file"); FileStateKey symlinkKey = fileStateValueKey("dir/symlink"); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of(fileStateValueKey("dir"), fileStateValue("dir"))); + addDoneNodes( + ImmutableMap.of( + directoryListingStateValueKey("dir"), + directoryListingStateValue(file("file"), symlink("symlink")))); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of(fileStateValueKey("dir"), fileStateValue("dir")), - /*graphDoneValues=*/ ImmutableMap.of( - directoryListingStateValueKey("dir"), - directoryListingStateValue(file("file"), symlink("symlink"))), - /*modifiedKeys=*/ ImmutableSet.of(fileKey, symlinkKey), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(fileKey, symlinkKey), fsvcThreads, syscallCache); @@ -434,14 +454,15 @@ public void getDiffWithInferredAncestors_replaceUnknownEntriesInDirWithListing_s FileStateKey file1Key = fileStateValueKey("dir/file1"); FileStateKey file2Key = fileStateValueKey("dir/file2"); DirectoryListingStateValue.Key dirKey = directoryListingStateValueKey("dir"); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of(fileStateValueKey("dir"), fileStateValue("dir"))); + addDoneNodes(ImmutableMap.of(dirKey, directoryListingStateValue(file("file1"), file("file2")))); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of(fileStateValueKey("dir"), fileStateValue("dir")), - /*graphDoneValues=*/ ImmutableMap.of( - dirKey, directoryListingStateValue(file("file1"), file("file2"))), - /*modifiedKeys=*/ ImmutableSet.of(file1Key, file2Key), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(file1Key, file2Key), fsvcThreads, syscallCache); @@ -481,21 +502,22 @@ public void getDiffWithInferredAncestors_deleteAllFilesFromDir_returnsFilesAndDi file1.delete(); file2.delete(); file3.delete(); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of( + key1, + oldValue1, + key2, + oldValue2, + key3, + oldValue3, + fileStateValueKey("dir"), + fileStateValue("dir"))); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of( - key1, - oldValue1, - key2, - oldValue2, - key3, - oldValue3, - fileStateValueKey("dir"), - fileStateValue("dir")), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(key1, key2, key3), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(key1, key2, key3), fsvcThreads, syscallCache); @@ -520,21 +542,22 @@ public void getDiffWithInferredAncestors_deleteFileWithDirs_returnsFileAndDirs() FileStateKey abcFileKey = fileStateValueKey("a/b/c/file"); FileStateValue abcFileValue = fileStateValue("a/b/c/file"); scratch.dir("a/b").deleteTree(); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of( + fileStateValueKey("a"), + fileStateValue("a"), + abKey, + abValue, + abcKey, + abcValue, + abcFileKey, + abcFileValue)); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of( - fileStateValueKey("a"), - fileStateValue("a"), - abKey, - abValue, - abcKey, - abcValue, - abcFileKey, - abcFileValue), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(abcFileKey), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(abcFileKey), fsvcThreads, syscallCache); @@ -562,21 +585,22 @@ public void getDiffWithInferredAncestors_deleteFileWithReportedDirs_returnsFileA FileStateKey abcFileKey = fileStateValueKey("a/b/c/file"); FileStateValue abcFileValue = fileStateValue("a/b/c/file"); scratch.dir("a/b").deleteTree(); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of( + fileStateValueKey("a"), + fileStateValue("a"), + abKey, + abValue, + abcKey, + abcValue, + abcFileKey, + abcFileValue)); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of( - fileStateValueKey("a"), - fileStateValue("a"), - abKey, - abValue, - abcKey, - abcValue, - abcFileKey, - abcFileValue), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(abcFileKey, abKey), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(abcFileKey, abKey), fsvcThreads, syscallCache); @@ -604,19 +628,20 @@ public void getDiffWithInferredAncestors_deleteFile_infersDirFromModifiedSibling FileStateValue file2Value = fileStateValue("dir/file2"); file1.delete(); scratch.overwriteFile("dir/file2", "12"); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of( + fileStateValueKey("dir"), + fileStateValue("dir"), + file1Key, + file1Value, + file2Key, + file2Value)); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of( - fileStateValueKey("dir"), - fileStateValue("dir"), - file1Key, - file1Value, - file2Key, - file2Value), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(file1Key, file2Key, fileStateValueKey("dir")), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(file1Key, file2Key, fileStateValueKey("dir")), fsvcThreads, syscallCache); @@ -639,21 +664,22 @@ public void getDiffWithInferredAncestors_deleteDirReportDirOnly_returnsDir() thr FileStateKey dirKey = fileStateValueKey("dir"); FileStateValue dirValue = fileStateValue("dir"); file1.getParentDirectory().deleteTree(); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of( + file1Key, + file1Value, + file2Key, + file2Value, + dirKey, + dirValue, + fileStateValueKey(""), + fileStateValue(""))); ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of( - file1Key, - file1Value, - file2Key, - file2Value, - dirKey, - dirValue, - fileStateValueKey(""), - fileStateValue("")), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(dirKey), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(dirKey), fsvcThreads, syscallCache); @@ -667,13 +693,14 @@ public void getDiffWithInferredAncestors_deleteDirReportDirOnly_returnsDir() thr @Test public void getDiffWithInferredAncestors_phantomChangeForNonexistentEntry_returnsEmptyDiff() throws Exception { + addDoneNodesAndThenMarkChanged( + ImmutableMap.of(fileStateValueKey("file"), NONEXISTENT_FILE_STATE_NODE)); + ImmutableDiff diff = FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - /*graphValues=*/ ImmutableMap.of( - fileStateValueKey("file"), NONEXISTENT_FILE_STATE_NODE), - /*graphDoneValues=*/ ImmutableMap.of(), - /*modifiedKeys=*/ ImmutableSet.of(fileStateValueKey("file")), + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(fileStateValueKey("file")), fsvcThreads, syscallCache); @@ -683,21 +710,19 @@ public void getDiffWithInferredAncestors_phantomChangeForNonexistentEntry_return } @Test - public void getDiffWithInferredAncestors_statFails_fails() { + public void getDiffWithInferredAncestors_statFails_fails() throws Exception { throwOnStat = new IOException("oh no"); - ImmutableMap graphValues = - ImmutableMap.of(fileStateValueKey("file"), NONEXISTENT_FILE_STATE_NODE); - ImmutableSet modifiedKeys = ImmutableSet.of(fileStateValueKey("file")); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of(fileStateValueKey("file"), NONEXISTENT_FILE_STATE_NODE)); AbruptExitException e = assertThrows( AbruptExitException.class, () -> FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - graphValues, - /*graphDoneValues=*/ ImmutableMap.of(), - modifiedKeys, + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(fileStateValueKey("file")), fsvcThreads, syscallCache)); @@ -708,20 +733,18 @@ public void getDiffWithInferredAncestors_statFails_fails() { } @Test - public void getDiffWithInferredAncestors_statCrashes_fails() { + public void getDiffWithInferredAncestors_statCrashes_fails() throws Exception { throwOnStat = new RuntimeException("oh no"); - ImmutableMap graphValues = - ImmutableMap.of(fileStateValueKey("file"), NONEXISTENT_FILE_STATE_NODE); - ImmutableSet modifiedKeys = ImmutableSet.of(fileStateValueKey("file")); + addDoneNodesAndThenMarkChanged( + ImmutableMap.of(fileStateValueKey("file"), NONEXISTENT_FILE_STATE_NODE)); assertThrows( IllegalStateException.class, () -> FileSystemValueCheckerInferringAncestors.getDiffWithInferredAncestors( - /*tsgm=*/ null, - graphValues, - /*graphDoneValues=*/ ImmutableMap.of(), - modifiedKeys, + /* tsgm= */ null, + inMemoryGraph, + /* modifiedKeys= */ ImmutableSet.of(fileStateValueKey("file")), fsvcThreads, syscallCache)); } @@ -758,4 +781,28 @@ private FileStateValue fileStateValue(String relativePath) throws IOException { private Delta fileStateValueDelta(String relativePath) throws IOException { return Delta.justNew(fileStateValue(relativePath)); } + + private void addDoneNodesAndThenMarkChanged(ImmutableMap values) + throws InterruptedException { + for (Entry entry : values.entrySet()) { + InMemoryNodeEntry node = addDoneNode(entry.getKey(), entry.getValue()); + node.markDirty(DirtyType.CHANGE); + } + } + + private void addDoneNodes(ImmutableMap values) throws InterruptedException { + for (Entry entry : values.entrySet()) { + addDoneNode(entry.getKey(), entry.getValue()); + } + } + + @CanIgnoreReturnValue + private InMemoryNodeEntry addDoneNode(SkyKey key, SkyValue value) throws InterruptedException { + NodeBatch batch = inMemoryGraph.createIfAbsentBatch(null, Reason.OTHER, ImmutableList.of(key)); + InMemoryNodeEntry entry = (InMemoryNodeEntry) batch.get(key); + entry.addReverseDepAndCheckIfDone(null); + entry.markRebuilding(); + entry.setValue(value, Version.minimal(), null); + return entry; + } } diff --git a/src/test/java/com/google/devtools/build/skyframe/DeterministicInMemoryGraph.java b/src/test/java/com/google/devtools/build/skyframe/DeterministicInMemoryGraph.java index 000e0e58482abf..31b534c67a75d9 100644 --- a/src/test/java/com/google/devtools/build/skyframe/DeterministicInMemoryGraph.java +++ b/src/test/java/com/google/devtools/build/skyframe/DeterministicInMemoryGraph.java @@ -88,4 +88,10 @@ public void cleanupInterningPool() { public void removeIfDone(SkyKey key) { ((InMemoryGraph) delegate).removeIfDone(key); } + + @Override + @Nullable + public InMemoryNodeEntry getIfPresent(SkyKey key) { + return ((InMemoryGraph) delegate).getIfPresent(key); + } } diff --git a/src/test/java/com/google/devtools/build/skyframe/NotifyingInMemoryGraph.java b/src/test/java/com/google/devtools/build/skyframe/NotifyingInMemoryGraph.java index f3f41fe979154e..e9ba9568b711fe 100644 --- a/src/test/java/com/google/devtools/build/skyframe/NotifyingInMemoryGraph.java +++ b/src/test/java/com/google/devtools/build/skyframe/NotifyingInMemoryGraph.java @@ -95,4 +95,10 @@ public void cleanupInterningPool() { public void removeIfDone(SkyKey key) { ((InMemoryGraph) delegate).removeIfDone(key); } + + @Override + @Nullable + public InMemoryNodeEntry getIfPresent(SkyKey key) { + return ((InMemoryGraph) delegate).getIfPresent(key); + } } From 25b0c40a61af657ddc0b5763538c47c65ef67a48 Mon Sep 17 00:00:00 2001 From: Googler Date: Tue, 27 Jun 2023 09:13:02 -0700 Subject: [PATCH 12/68] Fix action inputs and commands in j2objc_aspect Compared Starlark and Native code with aquery and fixed their differences. Also compared Starlark/Native outputs and parameters of actions which were failing in the full TGP. PiperOrigin-RevId: 543763373 Change-Id: If102cdfa76f3414868d2c1d9d701151997fab23e --- .../common/objc/j2objc_aspect.bzl | 37 ++++++++++--------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/src/main/starlark/builtins_bzl/common/objc/j2objc_aspect.bzl b/src/main/starlark/builtins_bzl/common/objc/j2objc_aspect.bzl index ddda3e0e8f422f..2ea3652501d9eb 100644 --- a/src/main/starlark/builtins_bzl/common/objc/j2objc_aspect.bzl +++ b/src/main/starlark/builtins_bzl/common/objc/j2objc_aspect.bzl @@ -57,7 +57,7 @@ def _proto_j2objc_source(ctx, proto_info, proto_sources, objc_file_path): def _get_output_objc_files(actions, srcs, objc_file_root_relative_path, suffix): objc_sources = [] for src in srcs: - src_path = src.short_path.removesuffix("." + src.extension) + src_path = src.path.removesuffix("." + src.extension) objc_source_path = paths.get_relative(objc_file_root_relative_path, src_path) + suffix objc_sources.append(actions.declare_file(objc_source_path)) return objc_sources @@ -97,7 +97,7 @@ def _java_j2objc_source(ctx, java_source_files, java_source_jars): header_tree_artifact_rel_path = _get_header_tree_artifact_rel_path(ctx) translated_header = ctx.actions.declare_directory(header_tree_artifact_rel_path) objc_hdrs.append(translated_header) - header_search_paths.append(translated_header.short_path) + header_search_paths.append(translated_header.path) return struct( target = ctx.label, @@ -239,7 +239,7 @@ def _create_j2objc_transpilation_action( dep_j2objc_mapping_file_provider, transitive_compile_time_jars, j2objc_source): - java_runtime = java_semantics.find_java_runtime_toolchain(ctx) + java_runtime = ctx.toolchains[java_semantics.JAVA_TOOLCHAIN_TYPE].java.java_runtime args = ctx.actions.args() args.use_param_file(param_file_arg = "@%s", use_always = True) @@ -285,7 +285,7 @@ def _create_j2objc_transpilation_action( args.add("--compiled_archive_file_path", compiled_library) boothclasspath_jar = ctx.file._jre_emul_jar - args.add("-Xbootclasspath:" + boothclasspath_jar.short_path) + args.add("-Xbootclasspath:" + boothclasspath_jar.path) module_files = ctx.attr._jre_emul_module.files.to_list() for file in module_files: @@ -304,17 +304,18 @@ def _create_j2objc_transpilation_action( args.add_all(java_source_files) + direct_files = [j2objc_deploy_jar, boothclasspath_jar] + if dead_code_report != None: + direct_files.append(dead_code_report) + if not experimental_j2objc_header_map: + direct_files.append(output_header_mapping_file) + ctx.actions.run( mnemonic = "TranspilingJ2objc", executable = ctx.executable._j2objc_wrapper, arguments = [args], inputs = depset( - [ - j2objc_deploy_jar, - boothclasspath_jar, - dead_code_report, - ] + module_files + java_source_files + java_source_jars + - [output_header_mapping_file] if not experimental_j2objc_header_map else [], + direct_files + module_files + java_source_files + java_source_jars, transitive = [ transitive_compile_time_jars, java_runtime.files, @@ -322,9 +323,8 @@ def _create_j2objc_transpilation_action( deps_class_mapping_files, ], ), - outputs = [output_dep_mapping_file, archive_source_mapping_file] + - j2objc_source.objc_srcs + - j2objc_source.objc_hdrs, + outputs = j2objc_source.objc_srcs + j2objc_source.objc_hdrs + + [output_dep_mapping_file, archive_source_mapping_file], toolchain = None, ) @@ -551,12 +551,14 @@ j2objc_aspect = aspect( default = "@" + cc_semantics.get_repo() + "//tools/j2objc:j2objc_header_map_binary", ), "_jre_emul_jar": attr.label( + cfg = "exec", allow_single_file = True, - default = Label("@//third_party/java/j2objc:jre_emul.jar"), + default = Label("@" + cc_semantics.get_repo() + "//third_party/java/j2objc:jre_emul.jar"), ), "_jre_emul_module": attr.label( + cfg = "exec", allow_files = True, - default = Label("@//third_party/java/j2objc:jre_emul_module"), + default = Label("@" + cc_semantics.get_repo() + "//third_party/java/j2objc:jre_emul_module"), ), "_dead_code_report": attr.label( allow_single_file = True, @@ -568,7 +570,7 @@ j2objc_aspect = aspect( ), "_jre_lib": attr.label( allow_files = True, - default = Label("@//third_party/java/j2objc:jre_core_lib"), + default = Label("@" + cc_semantics.get_repo() + "//third_party/java/j2objc:jre_core_lib"), ), "_xcrunwrapper": attr.label( allow_files = True, @@ -591,13 +593,12 @@ j2objc_aspect = aspect( "_j2objc_proto_toolchain": attr.label( default = configuration_field(fragment = "proto", name = "proto_toolchain_for_j2objc"), ), - "_java_toolchain_type": attr.label(default = java_semantics.JAVA_TOOLCHAIN_TYPE), "_cc_toolchain": attr.label( default = "@" + cc_semantics.get_repo() + "//tools/cpp:current_cc_toolchain", ), }, required_providers = [[JavaInfo], [ProtoInfo]], provides = [apple_common.Objc], - toolchains = [java_semantics.JAVA_TOOLCHAIN_TYPE, java_semantics.JAVA_RUNTIME_TOOLCHAIN_TYPE] + cc_helper.use_cpp_toolchain(), + toolchains = [java_semantics.JAVA_TOOLCHAIN_TYPE] + cc_helper.use_cpp_toolchain(), fragments = ["apple", "cpp", "j2objc", "objc", "proto"], ) From dba9e43a26d41c2559568e4abfdece57e79de37d Mon Sep 17 00:00:00 2001 From: Googler Date: Tue, 27 Jun 2023 09:29:12 -0700 Subject: [PATCH 13/68] Automated rollback of commit 607d0f7335f95aa0ee236ba3c18ce2a232370cdb. *** Reason for rollback *** It changes the output format of --output=jsonproto to ndjson (i.e. which prints out ConfiguredTarget(s) and Configuration(s) separated by a newline) instead of being wrapped by a CqueryResult. Caught by https://github.com/bazelbuild/rules_go/issues/3597. *** Original change description *** Add new output format for cquery `--output=streamed_proto`. * The current state of the output formats of cquery has a few forms now: - (UNCHANGED) `cquery --output=proto|jsonproto|textproto --proto:include_configurations`: A single CqueryResult of the specified `--output` format. - (NEW) `cquery --output=streamed_proto --proto:include_configurations`: Multiple length-delimited `CqueryResult` protos each containing a single `ConfiguredTarget` or `Configuration`. - (UNCHANGED) `cquery --out... *** PiperOrigin-RevId: 543767534 Change-Id: Ib63ed4b5bc2b8e4823a4f935c1f7a32a22a80bcc --- .../ConfiguredTargetQueryEnvironment.java | 20 ---- .../lib/query2/cquery/CqueryOptions.java | 5 +- .../cquery/ProtoOutputFormatterCallback.java | 92 ++++++------------- .../devtools/build/lib/query2/cquery/BUILD | 2 - .../ProtoOutputFormatterCallbackTest.java | 88 +----------------- 5 files changed, 34 insertions(+), 173 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/query2/cquery/ConfiguredTargetQueryEnvironment.java b/src/main/java/com/google/devtools/build/lib/query2/cquery/ConfiguredTargetQueryEnvironment.java index f1d38d8b1c1f31..bde635bcb09d30 100644 --- a/src/main/java/com/google/devtools/build/lib/query2/cquery/ConfiguredTargetQueryEnvironment.java +++ b/src/main/java/com/google/devtools/build/lib/query2/cquery/ConfiguredTargetQueryEnvironment.java @@ -54,7 +54,6 @@ import com.google.devtools.build.lib.skyframe.SkyframeExecutor; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.WalkableGraph; -import com.google.protobuf.CodedOutputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Collection; @@ -80,12 +79,6 @@ public class ConfiguredTargetQueryEnvironment /** Cquery specific functions. */ public static final ImmutableList CQUERY_FUNCTIONS = getCqueryFunctions(); - /** - * Pseudo-arbitrarily chosen buffer size for output. Chosen to be large enough to fit a handful of - * messages without needing to flush to the underlying output, which may not be buffered. - */ - private static final int OUTPUT_BUFFER_SIZE = 16384; - private CqueryOptions cqueryOptions; private final TopLevelArtifactContext topLevelArtifactContext; @@ -233,7 +226,6 @@ private static ImmutableMap getTransitiveConfig eventHandler, cqueryOptions, out, - CodedOutputStream.newInstance(out, OUTPUT_BUFFER_SIZE), skyframeExecutor, accessor, aspectResolver, @@ -243,17 +235,6 @@ private static ImmutableMap getTransitiveConfig eventHandler, cqueryOptions, out, - CodedOutputStream.newInstance(out, OUTPUT_BUFFER_SIZE), - skyframeExecutor, - accessor, - aspectResolver, - OutputType.DELIMITED_BINARY, - ruleClassProvider), - new ProtoOutputFormatterCallback( - eventHandler, - cqueryOptions, - out, - CodedOutputStream.newInstance(out, OUTPUT_BUFFER_SIZE), skyframeExecutor, accessor, aspectResolver, @@ -263,7 +244,6 @@ private static ImmutableMap getTransitiveConfig eventHandler, cqueryOptions, out, - CodedOutputStream.newInstance(out, OUTPUT_BUFFER_SIZE), skyframeExecutor, accessor, aspectResolver, diff --git a/src/main/java/com/google/devtools/build/lib/query2/cquery/CqueryOptions.java b/src/main/java/com/google/devtools/build/lib/query2/cquery/CqueryOptions.java index 78760394db6733..0d85d0db680ea7 100644 --- a/src/main/java/com/google/devtools/build/lib/query2/cquery/CqueryOptions.java +++ b/src/main/java/com/google/devtools/build/lib/query2/cquery/CqueryOptions.java @@ -45,9 +45,8 @@ public enum Transitions { effectTags = {OptionEffectTag.TERMINAL_OUTPUT}, help = "The format in which the cquery results should be printed. Allowed values for cquery " - + "are: label, label_kind, textproto, transitions, proto, streamed_proto, jsonproto. " - + "If you select 'transitions', you also have to specify the " - + "--transitions=(lite|full) option.") + + "are: label, label_kind, textproto, transitions, proto, jsonproto. If you select " + + "'transitions', you also have to specify the --transitions=(lite|full) option.") public String outputFormat; @Option( diff --git a/src/main/java/com/google/devtools/build/lib/query2/cquery/ProtoOutputFormatterCallback.java b/src/main/java/com/google/devtools/build/lib/query2/cquery/ProtoOutputFormatterCallback.java index 4fe5f643929827..c22c7ba4052eda 100644 --- a/src/main/java/com/google/devtools/build/lib/query2/cquery/ProtoOutputFormatterCallback.java +++ b/src/main/java/com/google/devtools/build/lib/query2/cquery/ProtoOutputFormatterCallback.java @@ -15,7 +15,7 @@ import static com.google.common.collect.ImmutableList.toImmutableList; -import com.google.common.base.Preconditions; +import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; @@ -23,7 +23,6 @@ import com.google.devtools.build.lib.actions.BuildConfigurationEvent; import com.google.devtools.build.lib.analysis.AnalysisProtosV2; import com.google.devtools.build.lib.analysis.AnalysisProtosV2.Configuration; -import com.google.devtools.build.lib.analysis.AnalysisProtosV2.CqueryResult; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.analysis.config.ConfigMatchingProvider; @@ -40,13 +39,11 @@ import com.google.devtools.build.lib.query2.cquery.CqueryTransitionResolver.EvaluateException; import com.google.devtools.build.lib.query2.engine.QueryEnvironment.TargetAccessor; import com.google.devtools.build.lib.query2.proto.proto2api.Build; -import com.google.devtools.build.lib.query2.proto.proto2api.Build.QueryResult; import com.google.devtools.build.lib.query2.query.aspectresolvers.AspectResolver; import com.google.devtools.build.lib.query2.query.output.ProtoOutputFormatter; import com.google.devtools.build.lib.skyframe.BuildConfigurationKey; import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey; import com.google.devtools.build.lib.skyframe.SkyframeExecutor; -import com.google.protobuf.CodedOutputStream; import com.google.protobuf.Message; import com.google.protobuf.TextFormat; import com.google.protobuf.util.JsonFormat; @@ -63,7 +60,6 @@ class ProtoOutputFormatterCallback extends CqueryThreadsafeCallback { /** Defines the types of proto output this class can handle. */ public enum OutputType { BINARY("proto"), - DELIMITED_BINARY("streamed_proto"), TEXT("textproto"), JSON("jsonproto"); @@ -106,7 +102,6 @@ public ImmutableList getConfigurations() { } } - private final CodedOutputStream codedOut; private final OutputType outputType; private final AspectResolver resolver; private final SkyframeExecutor skyframeExecutor; @@ -114,6 +109,8 @@ public ImmutableList getConfigurations() { private final JsonFormat.Printer jsonPrinter = JsonFormat.printer(); private final RuleClassProvider ruleClassProvider; + private AnalysisProtosV2.CqueryResult.Builder protoResult; + private final Map partialResultMap; private ConfiguredTarget currentTarget; @@ -121,14 +118,12 @@ public ImmutableList getConfigurations() { ExtendedEventHandler eventHandler, CqueryOptions options, OutputStream out, - CodedOutputStream codedOut, SkyframeExecutor skyframeExecutor, TargetAccessor accessor, AspectResolver resolver, OutputType outputType, RuleClassProvider ruleClassProvider) { super(eventHandler, options, out, skyframeExecutor, accessor, /*uniquifyResults=*/ false); - this.codedOut = codedOut; this.outputType = outputType; this.skyframeExecutor = skyframeExecutor; this.resolver = resolver; @@ -136,41 +131,32 @@ public ImmutableList getConfigurations() { this.partialResultMap = Maps.newHashMap(); } + @Override + public void start() { + protoResult = AnalysisProtosV2.CqueryResult.newBuilder(); + } + @Override public void close(boolean failFast) throws IOException { - if (failFast || printStream == null) { - return; - } - if (options.protoIncludeConfigurations) { - for (Configuration configuration : configurationCache.getConfigurations()) { - if (outputType == OutputType.DELIMITED_BINARY) { - // For streamed protos, we wrap each Configuration in its own CqueryResult that will be - // written length delimited to the stream. - writeData( - AnalysisProtosV2.CqueryResult.newBuilder().addConfigurations(configuration).build()); - } else { - writeData(configuration, CqueryResult.CONFIGURATIONS_FIELD_NUMBER); - } + if (!failFast && printStream != null) { + if (options.protoIncludeConfigurations) { + writeData(getProtoResult()); + } else { + // Documentation promises that setting this flag to false means we convert directly + // to the build.proto format. This is hard to test in integration testing due to the way + // proto output is turned readable (codex). So change the following code with caution. + Build.QueryResult.Builder queryResult = Build.QueryResult.newBuilder(); + protoResult.getResultsList().forEach(ct -> queryResult.addTarget(ct.getTarget())); + writeData(queryResult.build()); } + printStream.flush(); } - codedOut.flush(); - outputStream.flush(); - printStream.flush(); } private void writeData(Message message) throws IOException { - writeData(message, 0); - } - - private void writeData(Message message, int fieldNumber) throws IOException { switch (outputType) { case BINARY: - Preconditions.checkState( - fieldNumber != 0, "Cannot have fieldNumber of 0 when outputType is BINARY"); - codedOut.writeMessage(fieldNumber, message); - break; - case DELIMITED_BINARY: - message.writeDelimitedTo(outputStream); + message.writeTo(outputStream); break; case TEXT: TextFormat.printer().print(message, printStream); @@ -189,9 +175,14 @@ public String getName() { return outputType.formatName(); } + @VisibleForTesting + public AnalysisProtosV2.CqueryResult getProtoResult() { + protoResult.addAllConfigurations(configurationCache.getConfigurations()); + return protoResult.build(); + } + @Override - public void processOutput(Iterable partialResult) - throws InterruptedException, IOException { + public void processOutput(Iterable partialResult) throws InterruptedException { partialResult.forEach( kct -> partialResultMap.put(kct.getOriginalLabel(), accessor.getTarget(kct))); @@ -248,7 +239,6 @@ public void processOutput(Iterable partialResult) throw new InterruptedException(e.getMessage()); } } - builder.setTarget(targetBuilder); if (options.protoIncludeConfigurations) { @@ -269,33 +259,7 @@ public void processOutput(Iterable partialResult) } } - // There are a few cases that affect the shape of the output: - // 1. --output=proto|textproto|jsonproto --proto:include_configurations => - // Writes a single CqueryResult containing all the ConfiguredTarget(s) and - // Configuration(s) in the specified output format. - // 2. --output=streamed_proto --proto:include_configurations => - // Writes multiple length delimited CqueryResult protos, each containing a single - // ConfiguredTarget or Configuration. - // 3. --output=proto|textproto|jsonproto --noproto:include_configurations => - // Writes a single QueryResult containing all the corresponding Target(s) in the - // specified output format. - // 4.--output=streamed_proto --noproto:include_configurations => - // Writes multiple length delimited Target protos. - if (options.protoIncludeConfigurations) { - if (outputType == OutputType.DELIMITED_BINARY) { - // Case 2. - writeData(AnalysisProtosV2.CqueryResult.newBuilder().addResults(builder).build()); - } else { - // Case 1. - writeData(builder.build(), CqueryResult.RESULTS_FIELD_NUMBER); - } - } else { - // Case 3 & 4. - // Documentation promises that setting this flag to false means we convert directly - // to the build.proto format. This is hard to test in integration testing due to the way - // proto output is turned readable (codex). So change the following code with caution. - writeData(builder.build().getTarget(), QueryResult.TARGET_FIELD_NUMBER); - } + protoResult.addResults(builder.build()); } } diff --git a/src/test/java/com/google/devtools/build/lib/query2/cquery/BUILD b/src/test/java/com/google/devtools/build/lib/query2/cquery/BUILD index 54e9b61d842c83..095797ed6c9db7 100644 --- a/src/test/java/com/google/devtools/build/lib/query2/cquery/BUILD +++ b/src/test/java/com/google/devtools/build/lib/query2/cquery/BUILD @@ -137,14 +137,12 @@ java_test( "//src/main/java/com/google/devtools/build/lib/query2/engine", "//src/main/java/com/google/devtools/build/lib/query2/query/aspectresolvers", "//src/main/java/com/google/devtools/build/lib/util:filetype", - "//src/main/java/com/google/devtools/build/lib/util:pair", "//src/main/protobuf:analysis_v2_java_proto", "//src/main/protobuf:build_java_proto", "//src/test/java/com/google/devtools/build/lib/analysis/util", "//third_party:guava", "//third_party:junit4", "//third_party:truth", - "//third_party/protobuf:protobuf_java", ], ) diff --git a/src/test/java/com/google/devtools/build/lib/query2/cquery/ProtoOutputFormatterCallbackTest.java b/src/test/java/com/google/devtools/build/lib/query2/cquery/ProtoOutputFormatterCallbackTest.java index 6f11ee325d8e8d..5819474a4067bb 100644 --- a/src/test/java/com/google/devtools/build/lib/query2/cquery/ProtoOutputFormatterCallbackTest.java +++ b/src/test/java/com/google/devtools/build/lib/query2/cquery/ProtoOutputFormatterCallbackTest.java @@ -18,7 +18,6 @@ import static com.google.devtools.build.lib.packages.Attribute.attr; import static com.google.devtools.build.lib.packages.BuildType.LABEL_LIST; -import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.eventbus.EventBus; import com.google.devtools.build.lib.analysis.AnalysisProtosV2; @@ -41,13 +40,6 @@ import com.google.devtools.build.lib.query2.proto.proto2api.Build.ConfiguredRuleInput; import com.google.devtools.build.lib.query2.query.aspectresolvers.AspectResolver.Mode; import com.google.devtools.build.lib.util.FileTypeSet; -import com.google.devtools.build.lib.util.Pair; -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.CodedOutputStream; -import com.google.protobuf.ExtensionRegistry; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.InputStream; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; @@ -325,103 +317,31 @@ public void testAlias_withSelect() throws Exception { .containsAtLeast("//test:my_alias_rule", "//test:config1", "//test:target1"); } - @Test - public void testStreamedProtoAndProtoOutputsAreEquivalent() throws Exception { - MockRule depsRule = - () -> - MockRule.define( - "my_rule", - (builder, env) -> - builder.add(attr("deps", LABEL_LIST).allowedFileTypes(FileTypeSet.ANY_FILE))); - ConfiguredRuleClassProvider ruleClassProvider = setRuleClassProviders(depsRule).build(); - helper.useRuleClassProvider(ruleClassProvider); - - writeFile( - "test/BUILD", - "my_rule(name = 'my_rule',", - " deps = select({", - " ':garfield': ['lasagna.java', 'naps.java'],", - " '//conditions:default': ['mondays.java']", - " })", - ")", - "config_setting(", - " name = 'garfield',", - " values = {'foo': 'cat'}", - ")"); - getHelper().useConfiguration("--foo=cat"); - AnalysisProtosV2.CqueryResult protoOutput = getOutput("//test:my_rule", ruleClassProvider); - ImmutableList streamedProtoOutput = - getStreamedOutput("//test:my_rule", ruleClassProvider); - AnalysisProtosV2.CqueryResult.Builder combinedStreamedProtoBuilder = - AnalysisProtosV2.CqueryResult.newBuilder(); - for (AnalysisProtosV2.CqueryResult result : streamedProtoOutput) { - if (!result.getResultsList().isEmpty()) { - combinedStreamedProtoBuilder.addAllResults(result.getResultsList()); - } - if (!result.getConfigurationsList().isEmpty()) { - combinedStreamedProtoBuilder.addAllConfigurations(result.getConfigurationsList()); - } - } - assertThat(protoOutput) - .ignoringRepeatedFieldOrder() - .isEqualTo(combinedStreamedProtoBuilder.build()); - } - private MockRule getSimpleRule() { return () -> MockRule.define("simple_rule"); } private AnalysisProtosV2.CqueryResult getOutput( String queryExpression, RuleClassProvider ruleClassProvider) throws Exception { - CodedInputStream codedIn = - getInputStreamsWithData(queryExpression, ruleClassProvider, OutputType.BINARY).getSecond(); - return AnalysisProtosV2.CqueryResult.parser() - .parseFrom(codedIn, ExtensionRegistry.getEmptyRegistry()); - } - - private ImmutableList getStreamedOutput( - String queryExpression, RuleClassProvider ruleClassProvider) throws Exception { - InputStream in = - getInputStreamsWithData(queryExpression, ruleClassProvider, OutputType.DELIMITED_BINARY) - .getFirst(); - ImmutableList.Builder builder = new ImmutableList.Builder<>(); - AnalysisProtosV2.CqueryResult result; - while ((result = - AnalysisProtosV2.CqueryResult.parser() - .parseDelimitedFrom(in, ExtensionRegistry.getEmptyRegistry())) - != null) { - builder.add(result); - } - return builder.build(); - } - - private Pair getInputStreamsWithData( - String queryExpression, RuleClassProvider ruleClassProvider, OutputType outputType) - throws Exception { QueryExpression expression = QueryParser.parse(queryExpression, getDefaultFunctions()); Set targetPatternSet = new LinkedHashSet<>(); expression.collectTargetPatterns(targetPatternSet); helper.setQuerySettings(Setting.NO_IMPLICIT_DEPS); PostAnalysisQueryEnvironment env = ((ConfiguredTargetQueryHelper) helper).getPostAnalysisQueryEnvironment(targetPatternSet); - ByteArrayOutputStream out = new ByteArrayOutputStream(); - CodedOutputStream codedOut = CodedOutputStream.newInstance(out); + ProtoOutputFormatterCallback callback = new ProtoOutputFormatterCallback( reporter, options, - out, - codedOut, + /* out= */ null, getHelper().getSkyframeExecutor(), env.getAccessor(), options.aspectDeps.createResolver( getHelper().getPackageManager(), NullEventHandler.INSTANCE), - outputType, + OutputType.BINARY, ruleClassProvider); env.evaluateQuery(expression, callback); - codedOut.flush(); - ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); - CodedInputStream codedIn = CodedInputStream.newInstance(in); - return new Pair<>(in, codedIn); + return callback.getProtoResult(); } } From 044a14cca2747aeff258fc71eaeb153c08cb34d5 Mon Sep 17 00:00:00 2001 From: Googler Date: Tue, 27 Jun 2023 09:34:15 -0700 Subject: [PATCH 14/68] Fix rpath for symlinks in cc_shared_library The rpath were set based on the path of the linker inputs relative to the output being created by the linking action. However, for shared objects after creating the linking action in most cases we also create a symlink. The dynamic loader takes into account the location of the symlink to resolve $ORIGIN in the rpath, not the target of the symlink. The RPATH was therefore wrong in most cases. This misunderstanding of how the dynamic loader treats symlinks also influenced changes introduced in https://github.com/bazelbuild/bazel/commit/95ae4dbfe34fee7b033c4b7918e1afdc1cee017a. All the features introduced in that change are not needed: - No need to switch from RUNPATH to RPATH - No need to stop RPATHs from being added to a shared library - cc_test can avoid linking everything shared library (just like cc_binary) - We don't ever need to link indirect shared libraries into a cc_binary except on Windows This CL also fixes the name of the symlink for a shared library which was mangled so that the original name is preserved. This is important because if a distribution artifact is linked against the library with the mangled name, even if the shared library was installed in the system and the RPATH set correctly, the dynamic loader won't find it. Fixes #18790 RELNOTES:none PiperOrigin-RevId: 543768869 Change-Id: I48a2a6553e97cd611814051e731c874552d1de27 --- .../build/lib/rules/cpp/CcLinkingHelper.java | 48 ++++-- .../lib/rules/cpp/CppLinkActionBuilder.java | 23 ++- .../build/lib/rules/cpp/CppRuleClasses.java | 31 ---- .../rules/cpp/LibrariesToLinkCollector.java | 158 ++++++++++-------- .../builtins_bzl/common/cc/cc_binary.bzl | 18 +- .../test_cc_shared_library/BUILD.builtin_test | 4 +- .../cc_shared_library_integration_test.sh | 3 +- .../test_cc_shared_library/starlark_tests.bzl | 2 +- .../test_cc_shared_library/testenv.sh | 1 + .../BUILD.builtin_test | 2 +- tools/cpp/unix_cc_toolchain_config.bzl | 25 --- 11 files changed, 144 insertions(+), 171 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CcLinkingHelper.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CcLinkingHelper.java index 58e697645c9628..3ac8de69735581 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CcLinkingHelper.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CcLinkingHelper.java @@ -694,7 +694,8 @@ private boolean createDynamicLinkAction( "-Wl,-soname=" + SolibSymlinkAction.getDynamicLibrarySoname( linkerOutput.getRootRelativePath(), - /* preserveName= */ false, + /* preserveName= */ dynamicLinkType + != LinkTargetType.NODEPS_DYNAMIC_LIBRARY, actionConstructionContext.getConfiguration().getMnemonic())); } } @@ -799,6 +800,10 @@ private boolean createDynamicLinkAction( if (dynamicLinkActionBuilder.getAllLtoBackendArtifacts() != null) { ccLinkingOutputs.addAllLtoArtifacts(dynamicLinkActionBuilder.getAllLtoBackendArtifacts()); } + Artifact implLibraryLinkArtifact = getDynamicLibrarySolibSymlinkOutput(linkerOutput); + if (implLibraryLinkArtifact != null) { + dynamicLinkActionBuilder.setDynamicLibrarySolibSymlinkOutput(implLibraryLinkArtifact); + } CppLinkAction dynamicLinkAction = dynamicLinkActionBuilder.build(); if (dynamicLinkType.isExecutable()) { ccLinkingOutputs.setExecutable(linkerOutput); @@ -824,14 +829,16 @@ private boolean createDynamicLinkAction( } libraryToLinkBuilder.setDynamicLibrary(dynamicLibrary.getArtifact()); } else { - Artifact implLibraryLinkArtifact = - SolibSymlinkAction.getDynamicLibrarySymlink( - /* actionRegistry= */ actionRegistry, - /* actionConstructionContext= */ actionConstructionContext, - ccToolchain.getSolibDirectory(), - dynamicLibrary.getArtifact(), - /* preserveName= */ false, - /* prefixConsumer= */ false); + if (dynamicLinkType == LinkTargetType.NODEPS_DYNAMIC_LIBRARY) { + implLibraryLinkArtifact = + SolibSymlinkAction.getDynamicLibrarySymlink( + /* actionRegistry= */ actionRegistry, + /* actionConstructionContext= */ actionConstructionContext, + ccToolchain.getSolibDirectory(), + dynamicLibrary.getArtifact(), + /* preserveName= */ false, + /* prefixConsumer= */ false); + } libraryToLinkBuilder.setDynamicLibrary(implLibraryLinkArtifact); libraryToLinkBuilder.setResolvedSymlinkDynamicLibrary(dynamicLibrary.getArtifact()); @@ -842,7 +849,8 @@ private boolean createDynamicLinkAction( /* actionConstructionContext= */ actionConstructionContext, ccToolchain.getSolibDirectory(), interfaceLibrary.getArtifact(), - /* preserveName= */ false, + // Need to preserve name for transitive shared libraries that may be distributed. + /* preserveName= */ dynamicLinkType != LinkTargetType.NODEPS_DYNAMIC_LIBRARY, /* prefixConsumer= */ false); libraryToLinkBuilder.setInterfaceLibrary(libraryLinkArtifact); libraryToLinkBuilder.setResolvedSymlinkInterfaceLibrary(interfaceLibrary.getArtifact()); @@ -1001,4 +1009,24 @@ private static List convertLibraryToLinkListToLinker } return librariesToLinkBuilder.build(); } + + @Nullable + private Artifact getDynamicLibrarySolibSymlinkOutput(Artifact linkerOutputArtifact) { + if (dynamicLinkType != LinkTargetType.DYNAMIC_LIBRARY + || neverlink + || featureConfiguration.isEnabled(CppRuleClasses.COPY_DYNAMIC_LIBRARIES_TO_BINARY)) { + return null; + } + return SolibSymlinkAction.getDynamicLibrarySymlink( + /* actionRegistry= */ actionRegistry, + /* actionConstructionContext= */ actionConstructionContext, + ccToolchain.getSolibDirectory(), + linkerOutputArtifact, + // For transitive shared libraries we want to preserve the name of the original library so + // that distribution artifacts can be linked against it and not against the mangled name. + // This makes it possible to find the library on the system if the RPATH has been set + // properly. + /* preserveName= */ true, + /* prefixConsumer= */ false); + } } diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkActionBuilder.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkActionBuilder.java index f364008afda6fe..0140fc1fa8c2ee 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkActionBuilder.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkActionBuilder.java @@ -147,6 +147,10 @@ public Artifact create( private boolean isStampingEnabled; private final Map executionInfo = new LinkedHashMap<>(); + // We have to add the dynamicLibrarySolibOutput to the CppLinkActionBuilder so that it knows how + // to set up the RPATH properly with respect to the symlink itself and not the original library. + private Artifact dynamicLibrarySolibSymlinkOutput; + /** * Creates a builder that builds {@link CppLinkAction}s. * @@ -814,7 +818,8 @@ public CppLinkAction build() throws InterruptedException, RuleErrorException { nonExpandedLinkerInputs, needWholeArchive, ruleErrorConsumer, - ((RuleContext) actionConstructionContext).getWorkspaceName()); + ((RuleContext) actionConstructionContext).getWorkspaceName(), + dynamicLibrarySolibSymlinkOutput); CollectedLibrariesToLink collectedLibrariesToLink = librariesToLinkCollector.collectLibrariesToLink(); @@ -831,13 +836,6 @@ public CppLinkAction build() throws InterruptedException, RuleErrorException { userLinkFlags.addAll(cppConfiguration.getLtoIndexOptions()); } - NestedSet runtimeLibrarySearchDirectories = - collectedLibrariesToLink.getRuntimeLibrarySearchDirectories(); - if (linkType.getActionName().equals(CppActionNames.CPP_LINK_DYNAMIC_LIBRARY) - && featureConfiguration.isEnabled( - CppRuleClasses.EXCLUDE_BAZEL_RPATHS_IN_TRANSITIVE_LIBS_FEATURE_NAME)) { - runtimeLibrarySearchDirectories = null; - } variables = LinkBuildVariables.setupVariables( ((RuleContext) actionConstructionContext).getStarlarkThread(), @@ -865,7 +863,7 @@ public CppLinkAction build() throws InterruptedException, RuleErrorException { ltoOutputRootPrefix, defFile != null ? defFile.getExecPathString() : null, fdoContext, - runtimeLibrarySearchDirectories, + collectedLibrariesToLink.getRuntimeLibrarySearchDirectories(), collectedLibrariesToLink.getLibrariesToLink(), collectedLibrariesToLink.getLibrarySearchDirectories(), /* addIfsoRelatedVariables= */ true); @@ -1563,4 +1561,11 @@ public CppLinkActionBuilder addExecutionInfo(Map executionInfo) this.executionInfo.putAll(executionInfo); return this; } + + @CanIgnoreReturnValue + public CppLinkActionBuilder setDynamicLibrarySolibSymlinkOutput( + Artifact dynamicLibrarySolibSymlinkOutput) { + this.dynamicLibrarySolibSymlinkOutput = dynamicLibrarySolibSymlinkOutput; + return this; + } } diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppRuleClasses.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppRuleClasses.java index fab3d348f40815..89289d652c4dca 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppRuleClasses.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppRuleClasses.java @@ -517,37 +517,6 @@ public static ToolchainTypeRequirement ccToolchainTypeRequirement(RuleDefinition */ public static final String LEGACY_IS_CC_TEST_FEATURE_NAME = "legacy_is_cc_test"; - /** - * By default Bazel will be embed runtime search directories (RPATHS) in transitive shared - * libraries, however, for Linux they are wrong in most cases since the runfiles directory where - * the transitive shared library will live will not be known at the time of linking. The runfiles - * directory is decided by dependent rules, we don't know where those dependents will live. For - * Mac (where it works differently by searching from the library's path instead of the main - * binary's) the loader paths (not rpaths) are correct so the paths will work. - * - *

      This feature controls whether Bazel will embed those rpaths into the transitive shared - * library. - */ - public static final String EXCLUDE_BAZEL_RPATHS_IN_TRANSITIVE_LIBS_FEATURE_NAME = - "exclude_bazel_rpaths_in_transitive_libs"; - - /** - * With this feature enabled cc_binary will link all its dynamic_deps, even the ones it depends on - * transitively, linking indirect deps might be necessary because if the RPATHs haven't been set - * up properly in those dynamic_deps then the loader won't be able to find those libraries unless - * they are also linked. For a production binary this is probably not the desired behavior and you - * can switch it off by disabling this feature, for the binary to work you have to make sure that - * the RPATHs in all shared libraries are set up properly though. The default toolchains have this - * behavior switched off for cc_binaries by default. The behavior for cc_tests with dynamic_deps - * on all platforms and Windows cc_binaries is hardcoded to always link every transitive library. - * - *

      This feature controls the behavior for shared libraries depended on via dynamic_deps and - * doesn't control the behavior of the dynamic dependencies created by cc_libraries and used for - * cc_tests or cc_binaries(linkstatic=0). - */ - public static final String LINK_INDIRECT_DYNAMIC_DEPS_IN_BINARY_FEATURE_NAME = - "link_indirect_dynamic_deps_in_binary"; - /** Ancestor for all rules that do include scanning. */ public static final class CcIncludeScanningRule implements RuleDefinition { private final boolean addGrepIncludes; diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/LibrariesToLinkCollector.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/LibrariesToLinkCollector.java index abf3c1babf78be..0704dde0238ed9 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/LibrariesToLinkCollector.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/LibrariesToLinkCollector.java @@ -61,6 +61,7 @@ public class LibrariesToLinkCollector { private final RuleErrorConsumer ruleErrorConsumer; private final Artifact output; private final String workspaceName; + private final Artifact dynamicLibrarySolibSymlinkOutput; public LibrariesToLinkCollector( boolean isNativeDeps, @@ -79,7 +80,8 @@ public LibrariesToLinkCollector( Iterable linkerInputs, boolean needWholeArchive, RuleErrorConsumer ruleErrorConsumer, - String workspaceName) { + String workspaceName, + Artifact dynamicLibrarySolibSymlinkOutput) { this.isNativeDeps = isNativeDeps; this.cppConfiguration = cppConfiguration; this.ccToolchainProvider = toolchain; @@ -95,6 +97,7 @@ public LibrariesToLinkCollector( this.ruleErrorConsumer = ruleErrorConsumer; this.output = output; this.workspaceName = workspaceName; + this.dynamicLibrarySolibSymlinkOutput = dynamicLibrarySolibSymlinkOutput; needToolchainLibrariesRpath = toolchainLibrariesSolibDir != null @@ -164,81 +167,88 @@ private NestedSet collectToolchainRuntimeLibrarySearchDirectories( } private ImmutableList findPotentialSolibParents() { - // The runtime location of the solib directory relative to the binary depends on four factors: - // - // * whether the binary is contained in the main repository or an external repository; - // * whether the binary is executed directly or from a runfiles tree; - // * whether the binary is staged as a symlink (sandboxed execution; local execution if the - // binary is in the runfiles of another target) or a regular file (remote execution) - the - // dynamic linker follows sandbox and runfiles symlinks into its location under the - // unsandboxed execroot, which thus becomes the effective $ORIGIN; - // * whether --experimental_sibling_repository_layout is enabled or not. - // - // The rpaths emitted into the binary thus have to cover the following cases (assuming that - // the binary target is located in the pkg `pkg` and has name `file`) for the directory used - // as $ORIGIN by the dynamic linker and the directory containing the solib directories: - // - // 1. main, direct, symlink: - // $ORIGIN: $EXECROOT/pkg - // solib root: $EXECROOT - // 2. main, direct, regular file: - // $ORIGIN: $EXECROOT/pkg - // solib root: $EXECROOT/pkg/file.runfiles/main_repo - // 3. main, runfiles, symlink: - // $ORIGIN: $EXECROOT/pkg - // solib root: $EXECROOT - // 4. main, runfiles, regular file: - // $ORIGIN: other_target.runfiles/main_repo/pkg - // solib root: other_target.runfiles/main_repo - // 5a. external, direct, symlink: - // $ORIGIN: $EXECROOT/external/other_repo/pkg - // solib root: $EXECROOT - // 5b. external, direct, symlink, with --experimental_sibling_repository_layout: - // $ORIGIN: $EXECROOT/../other_repo/pkg - // solib root: $EXECROOT/../other_repo - // 6a. external, direct, regular file: - // $ORIGIN: $EXECROOT/external/other_repo/pkg - // solib root: $EXECROOT/external/other_repo/pkg/file.runfiles/main_repo - // 6b. external, direct, regular file, with --experimental_sibling_repository_layout: - // $ORIGIN: $EXECROOT/../other_repo/pkg - // solib root: $EXECROOT/../other_repo/pkg/file.runfiles/other_repo - // 7a. external, runfiles, symlink: - // $ORIGIN: $EXECROOT/external/other_repo/pkg - // solib root: $EXECROOT - // 7b. external, runfiles, symlink, with --experimental_sibling_repository_layout: - // $ORIGIN: $EXECROOT/../other_repo/pkg - // solib root: $EXECROOT/../other_repo - // 8a. external, runfiles, regular file: - // $ORIGIN: other_target.runfiles/some_repo/pkg - // solib root: other_target.runfiles/main_repo - // 8b. external, runfiles, regular file, with --experimental_sibling_repository_layout: - // $ORIGIN: other_target.runfiles/some_repo/pkg - // solib root: other_target.runfiles/some_repo - // - // Cases 1, 3, 4, 5, 7, and 8b are covered by an rpath that walks up the root relative path. - // Cases 2 and 6 covered by walking into file.runfiles/main_repo. - // Case 8a is covered by walking up some_repo/pkg and then into main_repo. - boolean isExternal = - output.getRunfilesPath().startsWith(LabelConstants.EXTERNAL_RUNFILES_PATH_PREFIX); - boolean usesLegacyRepositoryLayout = output.getRoot().isLegacy(); ImmutableList.Builder solibParents = ImmutableList.builder(); - // Handles cases 1, 3, 4, 5, and 7. - solibParents.add("../".repeat(output.getRootRelativePath().segmentCount() - 1)); - // Handle cases 2 and 6. - String solibRepositoryName; - if (isExternal && !usesLegacyRepositoryLayout) { - // Case 6b - solibRepositoryName = output.getRunfilesPath().getSegment(1); - } else { - // Cases 2 and 6a - solibRepositoryName = workspaceName; + ImmutableList.Builder outputs = ImmutableList.builder(); + outputs.add(output); + if (dynamicLibrarySolibSymlinkOutput != null) { + outputs.add(dynamicLibrarySolibSymlinkOutput); } - solibParents.add(output.getFilename() + ".runfiles/" + solibRepositoryName + "/"); - if (isExternal && usesLegacyRepositoryLayout) { - // Handles case 8a. The runfiles path is of the form ../some_repo/pkg/file and we need to - // walk up some_repo/pkg and then down into main_repo. - solibParents.add( - "../".repeat(output.getRunfilesPath().segmentCount() - 2) + workspaceName + "/"); + for (Artifact output : outputs.build()) { + // The runtime location of the solib directory relative to the binary depends on four factors: + // + // * whether the binary is contained in the main repository or an external repository; + // * whether the binary is executed directly or from a runfiles tree; + // * whether the binary is staged as a symlink (sandboxed execution; local execution if the + // binary is in the runfiles of another target) or a regular file (remote execution) - the + // dynamic linker follows sandbox and runfiles symlinks into its location under the + // unsandboxed execroot, which thus becomes the effective $ORIGIN; + // * whether --experimental_sibling_repository_layout is enabled or not. + // + // The rpaths emitted into the binary thus have to cover the following cases (assuming that + // the binary target is located in the pkg `pkg` and has name `file`) for the directory used + // as $ORIGIN by the dynamic linker and the directory containing the solib directories: + // + // 1. main, direct, symlink: + // $ORIGIN: $EXECROOT/pkg + // solib root: $EXECROOT + // 2. main, direct, regular file: + // $ORIGIN: $EXECROOT/pkg + // solib root: $EXECROOT/pkg/file.runfiles/main_repo + // 3. main, runfiles, symlink: + // $ORIGIN: $EXECROOT/pkg + // solib root: $EXECROOT + // 4. main, runfiles, regular file: + // $ORIGIN: other_target.runfiles/main_repo/pkg + // solib root: other_target.runfiles/main_repo + // 5a. external, direct, symlink: + // $ORIGIN: $EXECROOT/external/other_repo/pkg + // solib root: $EXECROOT + // 5b. external, direct, symlink, with --experimental_sibling_repository_layout: + // $ORIGIN: $EXECROOT/../other_repo/pkg + // solib root: $EXECROOT/../other_repo + // 6a. external, direct, regular file: + // $ORIGIN: $EXECROOT/external/other_repo/pkg + // solib root: $EXECROOT/external/other_repo/pkg/file.runfiles/main_repo + // 6b. external, direct, regular file, with --experimental_sibling_repository_layout: + // $ORIGIN: $EXECROOT/../other_repo/pkg + // solib root: $EXECROOT/../other_repo/pkg/file.runfiles/other_repo + // 7a. external, runfiles, symlink: + // $ORIGIN: $EXECROOT/external/other_repo/pkg + // solib root: $EXECROOT + // 7b. external, runfiles, symlink, with --experimental_sibling_repository_layout: + // $ORIGIN: $EXECROOT/../other_repo/pkg + // solib root: $EXECROOT/../other_repo + // 8a. external, runfiles, regular file: + // $ORIGIN: other_target.runfiles/some_repo/pkg + // solib root: other_target.runfiles/main_repo + // 8b. external, runfiles, regular file, with --experimental_sibling_repository_layout: + // $ORIGIN: other_target.runfiles/some_repo/pkg + // solib root: other_target.runfiles/some_repo + // + // Cases 1, 3, 4, 5, 7, and 8b are covered by an rpath that walks up the root relative path. + // Cases 2 and 6 covered by walking into file.runfiles/main_repo. + // Case 8a is covered by walking up some_repo/pkg and then into main_repo. + boolean isExternal = + output.getRunfilesPath().startsWith(LabelConstants.EXTERNAL_RUNFILES_PATH_PREFIX); + boolean usesLegacyRepositoryLayout = output.getRoot().isLegacy(); + // Handles cases 1, 3, 4, 5, and 7. + solibParents.add("../".repeat(output.getRootRelativePath().segmentCount() - 1)); + // Handle cases 2 and 6. + String solibRepositoryName; + if (isExternal && !usesLegacyRepositoryLayout) { + // Case 6b + solibRepositoryName = output.getRunfilesPath().getSegment(1); + } else { + // Cases 2 and 6a + solibRepositoryName = workspaceName; + } + solibParents.add(output.getFilename() + ".runfiles/" + solibRepositoryName + "/"); + if (isExternal && usesLegacyRepositoryLayout) { + // Handles case 8a. The runfiles path is of the form ../some_repo/pkg/file and we need to + // walk up some_repo/pkg and then down into main_repo. + solibParents.add( + "../".repeat(output.getRunfilesPath().segmentCount() - 2) + workspaceName + "/"); + } } return solibParents.build(); diff --git a/src/main/starlark/builtins_bzl/common/cc/cc_binary.bzl b/src/main/starlark/builtins_bzl/common/cc/cc_binary.bzl index b889808d747b19..b6724ecbf1f725 100644 --- a/src/main/starlark/builtins_bzl/common/cc/cc_binary.bzl +++ b/src/main/starlark/builtins_bzl/common/cc/cc_binary.bzl @@ -409,22 +409,8 @@ def _filter_libraries_that_are_linked_dynamically(ctx, feature_configuration, cc # Unlike Unix on Windows every dynamic dependency must be linked to the # main binary, even indirect ones that are dependencies of direct - # dynamic dependencies of this binary. So even though linking indirect - # dynamic dependencies is not needed for Unix, we link them here for tests too - # because we cannot know whether the shared libraries were linked with - # RUNPATH or RPATH. If they were linked with the former, then the loader - # won't search in the runfiles directory of this binary for the library, it - # will only search in the RUNPATH set at the time of linking the shared - # library and we cannot possibly know at that point the runfiles directory - # of all of its dependents. - link_indirect_deps = ( - ctx.attr._is_test or - cc_common.is_enabled(feature_configuration = feature_configuration, feature_name = "targets_windows") or - cc_common.is_enabled( - feature_configuration = feature_configuration, - feature_name = "link_indirect_dynamic_deps_in_binary", - ) - ) + # dynamic dependencies of this binary. + link_indirect_deps = cc_common.is_enabled(feature_configuration = feature_configuration, feature_name = "targets_windows") direct_dynamic_dep_labels = {dep[CcSharedLibraryInfo].linker_input.owner: True for dep in ctx.attr.dynamic_deps} topologically_sorted_labels_set = {label: True for label in topologically_sorted_labels} for dynamic_linker_input_owner, unused_linker_input in unused_dynamic_linker_inputs.items(): diff --git a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/BUILD.builtin_test b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/BUILD.builtin_test index 3df2f8b868258a..11203e74e90a03 100644 --- a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/BUILD.builtin_test +++ b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/BUILD.builtin_test @@ -25,7 +25,6 @@ licenses(["notice"]) package( default_visibility = ["//src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library:__subpackages__"], - features = ["exclude_bazel_rpaths_in_transitive_libs"], ) py_test( @@ -46,7 +45,6 @@ cc_binary( name = "binary", srcs = ["main.cc"], dynamic_deps = ["foo_so"], - features = ["use_rpath_instead_of_runpath"], deps = [ ":foo", ], @@ -133,7 +131,7 @@ cc_shared_library( "bar_so", "//src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library3:diff_pkg_so" ], - features = ["windows_export_all_symbols", "use_rpath_instead_of_runpath"], + features = ["windows_export_all_symbols"], exports_filter = [ ":indirect_dep2", ], diff --git a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/cc_shared_library_integration_test.sh b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/cc_shared_library_integration_test.sh index 8bc6c1e88e7921..cf2769994b5c4a 100755 --- a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/cc_shared_library_integration_test.sh +++ b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/cc_shared_library_integration_test.sh @@ -52,7 +52,8 @@ function test_shared_library_symbols() { function test_shared_library_user_link_flags() { foo_so=$(find . -name libfoo_so.so) - objdump -x $foo_so | grep RPATH | grep "kittens" > /dev/null \ + # $RPATH defined in testenv.sh + objdump -x $foo_so | grep $RPATH | grep "kittens" > /dev/null \ || (echo "Expected to have RUNPATH contain 'kittens' (set by user_link_flags)" \ && exit 1) } diff --git a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/starlark_tests.bzl b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/starlark_tests.bzl index b04d475d7725e4..27913fa891e73c 100644 --- a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/starlark_tests.bzl +++ b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/starlark_tests.bzl @@ -68,7 +68,7 @@ def _linking_order_test_impl(env, target): if "qux2.pic.o" in user_libs: found_bar = False for arg in args: - if "libbar" in arg: + if "-lbar_so" in arg: found_bar = True elif "qux2.pic.o" in arg: env.expect.where( diff --git a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/testenv.sh b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/testenv.sh index 1572f31ddafb68..5e946c88ca2052 100644 --- a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/testenv.sh +++ b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/testenv.sh @@ -14,3 +14,4 @@ # limitations under the License. LDD_BINARY="ldd" +RPATH="RUNPATH" diff --git a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library3/BUILD.builtin_test b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library3/BUILD.builtin_test index 7d15613e6746b2..39d541f8f37624 100644 --- a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library3/BUILD.builtin_test +++ b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library3/BUILD.builtin_test @@ -16,7 +16,7 @@ cc_library( cc_shared_library( name = "diff_pkg_so", - features = ["windows_export_all_symbols", "exclude_bazel_rpaths_in_transitive_libs"], + features = ["windows_export_all_symbols"], deps = [ ":diff_pkg", ], diff --git a/tools/cpp/unix_cc_toolchain_config.bzl b/tools/cpp/unix_cc_toolchain_config.bzl index cdbec472aa8a85..a4fb2f606e7eb8 100644 --- a/tools/cpp/unix_cc_toolchain_config.bzl +++ b/tools/cpp/unix_cc_toolchain_config.bzl @@ -609,10 +609,6 @@ def _impl(ctx): ], ) - link_indirect_dynamic_deps_in_binary_feature = feature( - name = "link_indirect_dynamic_deps_in_binary", - ) - fission_support_feature = feature( name = "fission_support", flag_sets = [ @@ -1363,23 +1359,6 @@ def _impl(ctx): # TODO(#8303): Mac crosstool should also declare every feature. if is_linux: - use_rpath_instead_of_runpath_feature = feature( - name = "use_rpath_instead_of_runpath", - flag_sets = [ - flag_set( - actions = all_link_actions + lto_index_actions, - flag_groups = [ - flag_group( - flags = ["-Wl,--disable-new-dtags"], - ), - ], - ), - ], - ) - exclude_bazel_rpaths_in_transitive_libs_feature = feature( - name = "exclude_bazel_rpaths_in_transitive_libs", - ) - # Linux artifact name patterns are the default. artifact_name_patterns = [] features = [ @@ -1404,9 +1383,6 @@ def _impl(ctx): linkstamps_feature, output_execpath_flags_feature, runtime_library_search_directories_feature, - exclude_bazel_rpaths_in_transitive_libs_feature, - use_rpath_instead_of_runpath_feature, - link_indirect_dynamic_deps_in_binary_feature, library_search_directories_feature, libtool_feature, archiver_flags_feature, @@ -1469,7 +1445,6 @@ def _impl(ctx): default_link_flags_feature, user_link_flags_feature, default_link_libs_feature, - link_indirect_dynamic_deps_in_binary_feature, fdo_optimize_feature, dbg_feature, opt_feature, From 6e90569c889a01166d652ab794b22070764a351d Mon Sep 17 00:00:00 2001 From: Sam Shadwell Date: Tue, 27 Jun 2023 11:05:17 -0700 Subject: [PATCH 15/68] Document precedence of JAVABIN on java_binary rule Ran into some confusion around this at work. We recently changed our Bazel repo so that everything was compiling/running with Java 17, but some folks would get issues when `bazel run`ing `java_binary` targets. Error messages indicated they were still running on a Java 8 VM despite the `--java_runtime_version` flag. Looking at where the stub template is filled in, I noticed that JAVABIN environment variable takes precedence over the java_executable: https://cs.opensource.google/bazel/bazel/+/master:src/main/starlark/builtins_bzl/bazel/java/bazel_java_binary.bzl;l=183?q=javabin&ss=bazel%2Fbazel&start=11 The folks running into issues set their `JAVABIN` in `.zshrc` files, so that was taking precedence over the Bazel flags and incorrectly running the Java 17 compiled code on a Java 8 VM. Seems reasonable that `JAVABIN` take precedence, but I figure this should probably be documented somewhere to save other folks the trouble of digging into the guts to figure out what was going on. Closes #18564. PiperOrigin-RevId: 543796927 Change-Id: Ia0eaa6af41ae3589874f5810190aedefc39942bd --- .../build/lib/bazel/rules/java/BazelJavaBinaryRule.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/google/devtools/build/lib/bazel/rules/java/BazelJavaBinaryRule.java b/src/main/java/com/google/devtools/build/lib/bazel/rules/java/BazelJavaBinaryRule.java index c89c619cf96689..87bd95df34ff60 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/rules/java/BazelJavaBinaryRule.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/java/BazelJavaBinaryRule.java @@ -120,7 +120,9 @@ public Metadata getMetadata() {

      Builds a Java archive ("jar file"), plus a wrapper shell script with the same name as the rule. The wrapper shell script uses a classpath that includes, among other things, a jar file for each - library on which the binary depends. + library on which the binary depends. When running the wrapper shell script, any nonempty + JAVABIN environment variable will take precedence over the version specified via + Bazel's --java_runtime_version flag.

      The wrapper script accepts several unique flags. Refer to From 24db557a994e2da4273f8ee11e08485365f4ad1f Mon Sep 17 00:00:00 2001 From: Googler Date: Tue, 27 Jun 2023 11:56:16 -0700 Subject: [PATCH 16/68] Clarify that ActionOwner.getMnemonic() is actually the mnemonic of the build configuration. RELNOTES: None. PiperOrigin-RevId: 543811424 Change-Id: I36f85c1de2b6a64ad4650b1e5601300e7e1a46b1 --- .../devtools/build/lib/actions/ActionOwner.java | 11 +++++++---- .../devtools/build/lib/runtime/UiStateTracker.java | 9 ++++++--- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/actions/ActionOwner.java b/src/main/java/com/google/devtools/build/lib/actions/ActionOwner.java index 643ee3591b9b5b..65f487cbd739e8 100644 --- a/src/main/java/com/google/devtools/build/lib/actions/ActionOwner.java +++ b/src/main/java/com/google/devtools/build/lib/actions/ActionOwner.java @@ -42,7 +42,7 @@ public abstract class ActionOwner { /* label= */ null, Location.BUILTIN, /* targetKind= */ "empty target kind", - /* mnemonic= */ "system", + /* buildConfigurationMnemonic= */ "system", /* configurationChecksum= */ "system", /* buildConfigurationEvent= */ null, /* isToolConfiguration= */ false, @@ -78,7 +78,7 @@ public static ActionOwner createDummy( @Nullable Label label, Location location, String targetKind, - String mnemonic, + String buildConfigurationMnemonic, String configurationChecksum, @Nullable BuildConfigurationEvent buildConfigurationEvent, boolean isToolConfiguration, @@ -90,7 +90,10 @@ public static ActionOwner createDummy( location, targetKind, BuildConfigurationInfo.AutoBuildConfigurationInfo.create( - mnemonic, configurationChecksum, buildConfigurationEvent, isToolConfiguration), + buildConfigurationMnemonic, + configurationChecksum, + buildConfigurationEvent, + isToolConfiguration), executionPlatform, aspectDescriptors, execProperties); @@ -112,7 +115,7 @@ public static ActionOwner createDummy( public abstract BuildConfigurationInfo getBuildConfigurationInfo(); /** Returns the mnemonic for the configuration for this {@link ActionOwner}. */ - public final String getMnemonic() { + public final String getBuildConfigurationMnemonic() { return getBuildConfigurationInfo().getMnemonic(); } diff --git a/src/main/java/com/google/devtools/build/lib/runtime/UiStateTracker.java b/src/main/java/com/google/devtools/build/lib/runtime/UiStateTracker.java index 45e0fe26b574a3..e2d55004a5fdde 100644 --- a/src/main/java/com/google/devtools/build/lib/runtime/UiStateTracker.java +++ b/src/main/java/com/google/devtools/build/lib/runtime/UiStateTracker.java @@ -527,7 +527,8 @@ void actionStarted(ActionStartedEvent event) { getActionState(action, actionId, event.getNanoTimeStart()); - if (action.getOwner() != null && action.getOwner().getMnemonic().equals("TestRunner")) { + if (action.getOwner() != null + && action.getOwner().getBuildConfigurationMnemonic().equals("TestRunner")) { Label owner = action.getOwner().getLabel(); if (owner != null) { Set testActionsForOwner = testActions.get(owner); @@ -601,7 +602,8 @@ void actionCompletion(ActionCompletionEvent event) { checkNotNull(activeActions.remove(actionId), "%s not active after %s", actionId, event); - if (action.getOwner() != null && action.getOwner().getMnemonic().equals("TestRunner")) { + if (action.getOwner() != null + && action.getOwner().getBuildConfigurationMnemonic().equals("TestRunner")) { Label owner = action.getOwner().getLabel(); if (owner != null) { Set testActionsForOwner = testActions.get(owner); @@ -748,7 +750,8 @@ private String describeActionProgress(ActionState action, int desiredWidth) { protected String describeAction( ActionState actionState, long nanoTime, int desiredWidth, Set toSkip) { ActionExecutionMetadata action = actionState.action; - if (action.getOwner() != null && action.getOwner().getMnemonic().equals("TestRunner")) { + if (action.getOwner() != null + && action.getOwner().getBuildConfigurationMnemonic().equals("TestRunner")) { Label owner = action.getOwner().getLabel(); if (owner != null) { Set allRelatedActions = testActions.get(owner); From 604a9ef6332d49110d14d427317bd726225fff1d Mon Sep 17 00:00:00 2001 From: Googler Date: Tue, 27 Jun 2023 13:36:17 -0700 Subject: [PATCH 17/68] Output contents of RepoMappingManifest files when using --include_file_write_contents This is so that we can write out the files in bazel for android, which converts the aquery results to a ninja file. PiperOrigin-RevId: 543837984 Change-Id: I9926a282214b293eb46ac376f42b21104e6af43b --- .../google/devtools/build/lib/analysis/BUILD | 2 + .../analysis/RepoMappingManifestAction.java | 27 ++++- .../lib/analysis/SourceManifestAction.java | 8 +- .../actions/AbstractFileWriteAction.java | 10 ++ .../lib/analysis/actions/FileWriteAction.java | 12 ++- .../google/devtools/build/lib/query2/BUILD | 1 + ...ctionGraphTextOutputFormatterCallback.java | 22 ++--- .../lib/query2/aquery/AqueryOptions.java | 4 +- .../actiongraph/v2/ActionGraphDump.java | 16 ++- .../build/lib/skyframe/actiongraph/v2/BUILD | 1 + .../analysis/SourceManifestActionTest.java | 24 ++--- src/test/shell/integration/BUILD | 10 ++ .../shell/integration/bazel_aquery_test.sh | 98 +++++++++++++++++++ 13 files changed, 188 insertions(+), 47 deletions(-) create mode 100755 src/test/shell/integration/bazel_aquery_test.sh diff --git a/src/main/java/com/google/devtools/build/lib/analysis/BUILD b/src/main/java/com/google/devtools/build/lib/analysis/BUILD index 32d208771ecb0c..a7ff85cb5efa80 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/BUILD +++ b/src/main/java/com/google/devtools/build/lib/analysis/BUILD @@ -1048,6 +1048,7 @@ java_library( "//src/main/java/com/google/devtools/build/lib/actions:commandline_item", "//src/main/java/com/google/devtools/build/lib/cmdline", "//src/main/java/com/google/devtools/build/lib/collect/nestedset", + "//src/main/java/com/google/devtools/build/lib/events", "//src/main/java/com/google/devtools/build/lib/packages", "//src/main/java/com/google/devtools/build/lib/util", "//src/main/java/net/starlark/java/eval", @@ -1400,6 +1401,7 @@ java_library( "//src/main/java/com/google/devtools/build/lib/actions", "//src/main/java/com/google/devtools/build/lib/actions:artifacts", "//src/main/java/com/google/devtools/build/lib/collect/nestedset", + "//src/main/java/com/google/devtools/build/lib/events", "//third_party:guava", "//third_party:jsr305", ], diff --git a/src/main/java/com/google/devtools/build/lib/analysis/RepoMappingManifestAction.java b/src/main/java/com/google/devtools/build/lib/analysis/RepoMappingManifestAction.java index 347999e4e5c1f5..ea80019fbbe203 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/RepoMappingManifestAction.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/RepoMappingManifestAction.java @@ -15,6 +15,7 @@ import static com.google.common.collect.ImmutableSortedMap.toImmutableSortedMap; import static java.nio.charset.StandardCharsets.ISO_8859_1; +import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Comparator.comparing; import com.google.common.collect.ImmutableSet; @@ -26,7 +27,6 @@ import com.google.devtools.build.lib.actions.Artifact.ArtifactExpander; import com.google.devtools.build.lib.actions.CommandLineExpansionException; import com.google.devtools.build.lib.actions.CommandLineItem.MapFn; -import com.google.devtools.build.lib.actions.ExecException; import com.google.devtools.build.lib.analysis.actions.AbstractFileWriteAction; import com.google.devtools.build.lib.analysis.actions.DeterministicWriter; import com.google.devtools.build.lib.cmdline.Label; @@ -35,8 +35,11 @@ import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; +import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.packages.Package; import com.google.devtools.build.lib.util.Fingerprint; +import java.io.ByteArrayOutputStream; +import java.io.IOException; import java.io.PrintWriter; import java.util.Map.Entry; import java.util.UUID; @@ -44,7 +47,8 @@ import net.starlark.java.eval.EvalException; /** Creates a manifest file describing the repos and mappings relevant for a runfile tree. */ -public final class RepoMappingManifestAction extends AbstractFileWriteAction { +public final class RepoMappingManifestAction extends AbstractFileWriteAction + implements AbstractFileWriteAction.FileContentsProvider { private static final UUID MY_UUID = UUID.fromString("458e351c-4d30-433d-b927-da6cddd4737f"); @@ -119,9 +123,24 @@ protected void computeKey( fp.addString(workspaceName); } + /** + * Get the contents of a file internally using an in memory output stream. + * + * @return returns the file contents as a string. + */ @Override - public DeterministicWriter newDeterministicWriter(ActionExecutionContext ctx) - throws InterruptedException, ExecException { + public String getFileContents(@Nullable EventHandler eventHandler) throws IOException { + ByteArrayOutputStream stream = new ByteArrayOutputStream(); + newDeterministicWriter().writeOutputFile(stream); + return stream.toString(UTF_8); + } + + @Override + public DeterministicWriter newDeterministicWriter(ActionExecutionContext ctx) { + return newDeterministicWriter(); + } + + public DeterministicWriter newDeterministicWriter() { return out -> { PrintWriter writer = new PrintWriter(out, /* autoFlush= */ false, ISO_8859_1); diff --git a/src/main/java/com/google/devtools/build/lib/analysis/SourceManifestAction.java b/src/main/java/com/google/devtools/build/lib/analysis/SourceManifestAction.java index 89827c5ca432c1..e469c0bbefe5a8 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/SourceManifestAction.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/SourceManifestAction.java @@ -55,7 +55,8 @@ *

      This action carefully avoids building the manifest content in memory because it can be large. */ @Immutable // if all ManifestWriter implementations are immutable -public final class SourceManifestAction extends AbstractFileWriteAction { +public final class SourceManifestAction extends AbstractFileWriteAction + implements AbstractFileWriteAction.FileContentsProvider { private static final String GUID = "07459553-a3d0-4d37-9d78-18ed942470f4"; @@ -193,7 +194,8 @@ public void writeOutputFile(OutputStream out, @Nullable EventHandler eventHandle * * @return returns the file contents as a string. */ - public String getFileContentsAsString(@Nullable EventHandler eventHandler) throws IOException { + @Override + public String getFileContents(@Nullable EventHandler eventHandler) throws IOException { ByteArrayOutputStream stream = new ByteArrayOutputStream(); writeOutputFile(stream, eventHandler); return stream.toString(UTF_8); @@ -201,7 +203,7 @@ public String getFileContentsAsString(@Nullable EventHandler eventHandler) throw @Override public String getStarlarkContent() throws IOException { - return getFileContentsAsString(null); + return getFileContents(null); } @Override diff --git a/src/main/java/com/google/devtools/build/lib/analysis/actions/AbstractFileWriteAction.java b/src/main/java/com/google/devtools/build/lib/analysis/actions/AbstractFileWriteAction.java index 4d2c82baf8b7fc..a0da40e1ad3ba0 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/actions/AbstractFileWriteAction.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/actions/AbstractFileWriteAction.java @@ -25,6 +25,9 @@ import com.google.devtools.build.lib.actions.ExecException; import com.google.devtools.build.lib.actions.SpawnResult; import com.google.devtools.build.lib.collect.nestedset.NestedSet; +import com.google.devtools.build.lib.events.EventHandler; +import java.io.IOException; +import javax.annotation.Nullable; /** * Abstract Action to write to a file. @@ -104,4 +107,11 @@ public boolean isRemotable() { return true; } + /** + * This interface is used to get the contents of the file to output to aquery when using + * --include_file_write_contents. + */ + public interface FileContentsProvider { + String getFileContents(@Nullable EventHandler eventHandler) throws IOException; + } } diff --git a/src/main/java/com/google/devtools/build/lib/analysis/actions/FileWriteAction.java b/src/main/java/com/google/devtools/build/lib/analysis/actions/FileWriteAction.java index 76f367962af42a..556c12c582b38a 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/actions/FileWriteAction.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/actions/FileWriteAction.java @@ -25,6 +25,7 @@ import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; +import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.OnDemandString; import java.io.ByteArrayInputStream; @@ -49,7 +50,8 @@ * BinaryFileWriteAction}. */ @Immutable // if fileContents is immutable -public final class FileWriteAction extends AbstractFileWriteAction { +public final class FileWriteAction extends AbstractFileWriteAction + implements AbstractFileWriteAction.FileContentsProvider { private static final String GUID = "332877c7-ca9f-4731-b387-54f620408522"; @@ -206,6 +208,14 @@ public String toString() { } } + /** + * @see #getFilecontents() + */ + @Override + public String getFileContents(@Nullable EventHandler eventHandler) { + return getFileContents(); + } + /** * Returns the string contents to be written. * diff --git a/src/main/java/com/google/devtools/build/lib/query2/BUILD b/src/main/java/com/google/devtools/build/lib/query2/BUILD index 177b075a8412ff..b4c8fb93825587 100644 --- a/src/main/java/com/google/devtools/build/lib/query2/BUILD +++ b/src/main/java/com/google/devtools/build/lib/query2/BUILD @@ -30,6 +30,7 @@ java_library( "//src/main/java/com/google/devtools/build/lib/actions:artifacts", "//src/main/java/com/google/devtools/build/lib/actions:commandline_item", "//src/main/java/com/google/devtools/build/lib/actions:file_metadata", + "//src/main/java/com/google/devtools/build/lib/analysis:actions/abstract_file_write_action", "//src/main/java/com/google/devtools/build/lib/analysis:actions/parameter_file_write_action", "//src/main/java/com/google/devtools/build/lib/analysis:actions/substitution", "//src/main/java/com/google/devtools/build/lib/analysis:actions/template_expansion_action", diff --git a/src/main/java/com/google/devtools/build/lib/query2/aquery/ActionGraphTextOutputFormatterCallback.java b/src/main/java/com/google/devtools/build/lib/query2/aquery/ActionGraphTextOutputFormatterCallback.java index adcd0c5a432a28..cb7763a5a3cb4f 100644 --- a/src/main/java/com/google/devtools/build/lib/query2/aquery/ActionGraphTextOutputFormatterCallback.java +++ b/src/main/java/com/google/devtools/build/lib/query2/aquery/ActionGraphTextOutputFormatterCallback.java @@ -31,8 +31,7 @@ import com.google.devtools.build.lib.actions.CommandLineExpansionException; import com.google.devtools.build.lib.analysis.AspectValue; import com.google.devtools.build.lib.analysis.ConfiguredTargetValue; -import com.google.devtools.build.lib.analysis.SourceManifestAction; -import com.google.devtools.build.lib.analysis.actions.FileWriteAction; +import com.google.devtools.build.lib.analysis.actions.AbstractFileWriteAction; import com.google.devtools.build.lib.analysis.actions.ParameterFileWriteAction; import com.google.devtools.build.lib.analysis.actions.Substitution; import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction; @@ -336,22 +335,13 @@ private void writeAction(ActionAnalysisMetadata action, PrintStream printStream) stringBuilder.append(" ]\n"); } - if (options.includeFileWriteContents && action instanceof FileWriteAction) { - FileWriteAction fileWriteAction = (FileWriteAction) action; + if (options.includeFileWriteContents + && action instanceof AbstractFileWriteAction.FileContentsProvider) { + String contents = + ((AbstractFileWriteAction.FileContentsProvider) action).getFileContents(eventHandler); stringBuilder .append(" FileWriteContents: [") - .append( - Base64.getEncoder().encodeToString(fileWriteAction.getFileContents().getBytes(UTF_8))) - .append("]\n"); - } - if (options.includeFileWriteContents && action instanceof SourceManifestAction) { - SourceManifestAction sourceManifestAction = (SourceManifestAction) action; - stringBuilder - .append(" FileWriteContents: [") - .append( - Base64.getEncoder() - .encodeToString( - sourceManifestAction.getFileContentsAsString(eventHandler).getBytes(UTF_8))) + .append(Base64.getEncoder().encodeToString(contents.getBytes(UTF_8))) .append("]\n"); } diff --git a/src/main/java/com/google/devtools/build/lib/query2/aquery/AqueryOptions.java b/src/main/java/com/google/devtools/build/lib/query2/aquery/AqueryOptions.java index 0fed2c2058133b..81071863f75873 100644 --- a/src/main/java/com/google/devtools/build/lib/query2/aquery/AqueryOptions.java +++ b/src/main/java/com/google/devtools/build/lib/query2/aquery/AqueryOptions.java @@ -64,8 +64,8 @@ public class AqueryOptions extends CommonQueryOptions { documentationCategory = OptionDocumentationCategory.QUERY, effectTags = {OptionEffectTag.TERMINAL_OUTPUT}, help = - "Include the file contents for the FileWrite and SourceSymlinkManifest actions" - + " (potentially large). ") + "Include the file contents for the FileWrite, SourceSymlinkManifest, and " + + "RepoMappingManifest actions (potentially large). ") public boolean includeFileWriteContents; @Option( diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/actiongraph/v2/ActionGraphDump.java b/src/main/java/com/google/devtools/build/lib/skyframe/actiongraph/v2/ActionGraphDump.java index fa1fbd40b98f42..2dcb8728a33c13 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/actiongraph/v2/ActionGraphDump.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/actiongraph/v2/ActionGraphDump.java @@ -30,8 +30,7 @@ import com.google.devtools.build.lib.analysis.AspectValue; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.ConfiguredTargetValue; -import com.google.devtools.build.lib.analysis.SourceManifestAction; -import com.google.devtools.build.lib.analysis.actions.FileWriteAction; +import com.google.devtools.build.lib.analysis.actions.AbstractFileWriteAction; import com.google.devtools.build.lib.analysis.actions.ParameterFileWriteAction; import com.google.devtools.build.lib.analysis.actions.Substitution; import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction; @@ -198,14 +197,11 @@ private void dumpSingleAction(ConfiguredTarget configuredTarget, ActionAnalysisM actionBuilder.addAllArguments(commandAction.getArguments()); } - if (includeFileWriteContents && action instanceof FileWriteAction) { - FileWriteAction fileWriteAction = (FileWriteAction) action; - actionBuilder.setFileContents(fileWriteAction.getFileContents()); - } - - if (includeFileWriteContents && action instanceof SourceManifestAction) { - SourceManifestAction sourceManifestAction = (SourceManifestAction) action; - actionBuilder.setFileContents(sourceManifestAction.getFileContentsAsString(eventHandler)); + if (includeFileWriteContents + && action instanceof AbstractFileWriteAction.FileContentsProvider) { + String contents = + ((AbstractFileWriteAction.FileContentsProvider) action).getFileContents(eventHandler); + actionBuilder.setFileContents(contents); } // Include the content of param files in output. diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/actiongraph/v2/BUILD b/src/main/java/com/google/devtools/build/lib/skyframe/actiongraph/v2/BUILD index e60c0caad44d3f..3d396fd61ee7d3 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/actiongraph/v2/BUILD +++ b/src/main/java/com/google/devtools/build/lib/skyframe/actiongraph/v2/BUILD @@ -18,6 +18,7 @@ java_library( "//src/main/java/com/google/devtools/build/lib/actions", "//src/main/java/com/google/devtools/build/lib/actions:artifacts", "//src/main/java/com/google/devtools/build/lib/actions:commandline_item", + "//src/main/java/com/google/devtools/build/lib/analysis:actions/abstract_file_write_action", "//src/main/java/com/google/devtools/build/lib/analysis:actions/parameter_file_write_action", "//src/main/java/com/google/devtools/build/lib/analysis:actions/substitution", "//src/main/java/com/google/devtools/build/lib/analysis:actions/template_expansion_action", diff --git a/src/test/java/com/google/devtools/build/lib/analysis/SourceManifestActionTest.java b/src/test/java/com/google/devtools/build/lib/analysis/SourceManifestActionTest.java index a8aa13bdf98c54..a6b4e4f17eafd3 100644 --- a/src/test/java/com/google/devtools/build/lib/analysis/SourceManifestActionTest.java +++ b/src/test/java/com/google/devtools/build/lib/analysis/SourceManifestActionTest.java @@ -163,18 +163,20 @@ public boolean isRemotable() { @Test public void testManifestWriterIntegration() throws Exception { MockManifestWriter mockWriter = new MockManifestWriter(); - new SourceManifestAction( - mockWriter, - NULL_ACTION_OWNER, - manifestOutputFile, - new Runfiles.Builder("TESTING", false).addSymlinks(fakeManifest).build()) - .getFileContentsAsString(reporter); + String manifestContents = + new SourceManifestAction( + mockWriter, + NULL_ACTION_OWNER, + manifestOutputFile, + new Runfiles.Builder("TESTING", false).addSymlinks(fakeManifest).build()) + .getFileContents(reporter); assertThat(mockWriter.unconsumedInputs()).isEqualTo(0); + assertThat(manifestContents).isEmpty(); } @Test public void testSimpleFileWriting() throws Exception { - String manifestContents = createSymlinkAction().getFileContentsAsString(reporter); + String manifestContents = createSymlinkAction().getFileContents(reporter); assertThat(manifestContents) .isEqualTo( "TESTING/trivial/BUILD /workspace/trivial/BUILD\n" @@ -188,7 +190,7 @@ public void testSimpleFileWriting() throws Exception { */ @Test public void testSourceOnlyFormatting() throws Exception { - String manifestContents = createSourceOnlyAction().getFileContentsAsString(reporter); + String manifestContents = createSourceOnlyAction().getFileContents(reporter); assertThat(manifestContents) .isEqualTo( "TESTING/trivial/BUILD\n" @@ -207,7 +209,7 @@ public void testSwigLibrariesTriggerInitDotPyInclusion() throws Exception { Path swiggedFile = scratch.file("swig/fakeLib.so"); Artifact swigDotSO = ActionsTestUtil.createArtifact(swiggedLibPath, swiggedFile); fakeManifest.put(swiggedFile.relativeTo(rootDirectory), swigDotSO); - String manifestContents = createSymlinkAction().getFileContentsAsString(reporter); + String manifestContents = createSymlinkAction().getFileContents(reporter); assertThat(manifestContents).containsMatch(".*TESTING/swig/__init__.py .*"); assertThat(manifestContents).containsMatch("fakeLib.so"); } @@ -219,7 +221,7 @@ public void testNoPythonOrSwigLibrariesDoNotTriggerInitDotPyInclusion() throws E Path nonPythonFile = scratch.file("not_python/blob_of_data"); Artifact nonPython = ActionsTestUtil.createArtifact(nonPythonPath, nonPythonFile); fakeManifest.put(nonPythonFile.relativeTo(rootDirectory), nonPython); - String manifestContents = createSymlinkAction().getFileContentsAsString(reporter); + String manifestContents = createSymlinkAction().getFileContents(reporter); assertThat(manifestContents).doesNotContain("not_python/__init__.py \n"); assertThat(manifestContents).containsMatch("blob_of_data"); } @@ -367,7 +369,7 @@ public void testUnresolvedSymlink() throws Exception { assertThat(inputs).isEqualTo(action.getInputs()); assertThat(inputs.toList()).isEqualTo(action.getInputs().toList()); - assertThat(action.getFileContentsAsString(reporter)) + assertThat(action.getFileContents(reporter)) .isEqualTo( "TESTING/BUILD /workspace/trivial/BUILD\n" + "TESTING/absolute_symlink /absolute/path\n" diff --git a/src/test/shell/integration/BUILD b/src/test/shell/integration/BUILD index 0a866cbd6b6c59..902eca78092354 100644 --- a/src/test/shell/integration/BUILD +++ b/src/test/shell/integration/BUILD @@ -320,6 +320,16 @@ sh_test( ], ) +sh_test( + name = "bazel_aquery_test", + size = "large", + srcs = ["bazel_aquery_test.sh"], + data = [ + ":test-deps", + "@bazel_tools//tools/bash/runfiles", + ], +) + sh_test( name = "bazel_command_log_test", size = "medium", diff --git a/src/test/shell/integration/bazel_aquery_test.sh b/src/test/shell/integration/bazel_aquery_test.sh new file mode 100755 index 00000000000000..14bd05ed58667d --- /dev/null +++ b/src/test/shell/integration/bazel_aquery_test.sh @@ -0,0 +1,98 @@ +#!/bin/bash +# +# Copyright 2018 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# --- begin runfiles.bash initialization v2 --- +# Copy-pasted from the Bazel Bash runfiles library v2. +set -uo pipefail; f=bazel_tools/tools/bash/runfiles/runfiles.bash +source "${RUNFILES_DIR:-/dev/null}/$f" 2>/dev/null || \ + source "$(grep -sm1 "^$f " "${RUNFILES_MANIFEST_FILE:-/dev/null}" | cut -f2- -d' ')" 2>/dev/null || \ + source "$0.runfiles/$f" 2>/dev/null || \ + source "$(grep -sm1 "^$f " "$0.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \ + source "$(grep -sm1 "^$f " "$0.exe.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \ + { echo>&2 "ERROR: cannot find $f"; exit 1; }; f=; set -e +# --- end runfiles.bash initialization v2 --- + +source "$(rlocation "io_bazel/src/test/shell/integration_test_setup.sh")" \ + || { echo "integration_test_setup.sh not found!" >&2; exit 1; } + +case "$(uname -s | tr [:upper:] [:lower:])" in +msys*|mingw*|cygwin*) + declare -r is_macos=false + declare -r is_windows=true + ;; +darwin) + declare -r is_macos=true + declare -r is_windows=false + ;; +*) + declare -r is_macos=false + declare -r is_windows=false + ;; +esac + +if "$is_windows"; then + export MSYS_NO_PATHCONV=1 + export MSYS2_ARG_CONV_EXCL="*" +fi + +add_to_bazelrc "build --package_path=%workspace%" + + +function test_repo_mapping_manifest() { + local pkg="${FUNCNAME[0]}" + local pkg2="${FUNCNAME[0]}_pkg2" + mkdir -p "$pkg" || fail "mkdir -p $pkg" + cat > "$pkg/WORKSPACE" < "$pkg/BUILD" < "$pkg2/BUILD" <output 2> "$TEST_log" || fail "Expected success" + cat output >> "$TEST_log" + assert_contains "^file_contents:.*pkg2,__main__,__main__" output + + bazel aquery --output=text --include_file_write_contents "//:foo" | \ + sed -nr '/Mnemonic: RepoMappingManifest/,/^ *$/p' >output \ + 2> "$TEST_log" || fail "Expected success" + cat output >> "$TEST_log" + assert_contains "^ *FileWriteContents: \[.*\]" output + # Verify file contents if we can decode base64-encoded data. + if which base64 >/dev/null; then + sed -nr 's/^ *FileWriteContents: \[(.*)\]/echo \1 | base64 -d/p' output | \ + sh | tee -a "$TEST_log" | assert_contains "pkg2,__main__,__main__" - + fi +} + +run_suite "${PRODUCT_NAME} action graph query tests" From 1c32e87ed7cbde87add35f002763687dce7d1189 Mon Sep 17 00:00:00 2001 From: Tony Aiuto Date: Tue, 27 Jun 2023 22:55:15 -0700 Subject: [PATCH 18/68] Add SBOM generator - first cut This adds an `sbom` rule and uses it to produce one for Bazel. ``` bazel build //tools/compliance:all more bazel-bin/tools/compliance/bazel_sbom.json ``` This is no where near complete. It just establishes the rule and the helper to have an end to end path. One or more followup PRs will add more fields to the SBOM. Closes #18789. PiperOrigin-RevId: 543956376 Change-Id: I9ab593561e58cf4e6a4d2ba8ade3b00bcc947fe5 --- tools/compliance/BUILD | 19 ++++++ tools/compliance/sbom.bzl | 77 ++++++++++++++++++++++ tools/compliance/write_sbom.py | 115 +++++++++++++++++++++++++++++++++ 3 files changed, 211 insertions(+) create mode 100644 tools/compliance/sbom.bzl create mode 100644 tools/compliance/write_sbom.py diff --git a/tools/compliance/BUILD b/tools/compliance/BUILD index 7e3ea350886d8c..a5b56fc462b279 100644 --- a/tools/compliance/BUILD +++ b/tools/compliance/BUILD @@ -1,6 +1,13 @@ # Tools for gathering OSS licenses load(":gather_packages.bzl", "packages_used") +load(":sbom.bzl", "sbom") + +# TODO(aiuto): fix copybara transforms. +#package( +# default_package_metadata = ["//:license"], +# default_visibility = ["//visibility:private"], +#) licenses(["notice"]) # Apache 2.0 @@ -30,3 +37,15 @@ py_test( deps = [ ], ) + +sbom( + name = "bazel_sbom", + out = "bazel_sbom.json", + target = "//src:bazel_nojdk", +) + +py_binary( + name = "write_sbom_private", + srcs = ["write_sbom.py"], + main = "write_sbom.py", +) diff --git a/tools/compliance/sbom.bzl b/tools/compliance/sbom.bzl new file mode 100644 index 00000000000000..37465038e5b6b7 --- /dev/null +++ b/tools/compliance/sbom.bzl @@ -0,0 +1,77 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Generate an SBOM for a target.""" + +load(":gather_packages.bzl", "packages_used") + +def _sbom_impl(ctx): + # Gather all licenses and write information to one place + + # Now turn the big blob of data into something consumable. + outputs = [ctx.outputs.out] + args = ctx.actions.args() + inps = ctx.files.packages_used + if len(inps) != 1: + fail("Got too many inputs for packages_used") + inputs = [inps[0]] + args.add("--packages_used", inps[0].path) + args.add("--out", ctx.outputs.out.path) + ctx.actions.run( + mnemonic = "CreateSBOM", + progress_message = "Creating SBOM for %s" % ctx.label, + inputs = inputs, + outputs = outputs, + executable = ctx.executable._sbom_generator, + arguments = [args], + ) + return [DefaultInfo(files = depset(outputs))] + +_sbom = rule( + implementation = _sbom_impl, + attrs = { + "packages_used": attr.label( + allow_files = True, + mandatory = True, + ), + "out": attr.output(mandatory = True), + "_sbom_generator": attr.label( + default = Label("//tools/compliance:write_sbom_private"), + executable = True, + allow_files = True, + cfg = "exec", + ), + }, +) + +def sbom(name, target, out = None): + """Wrapper for sbom rule. + + Args: + name: name + target: Target to create sbom for + out: output file name + """ + packages = "_packages_" + name + packages_used( + name = packages, + target = target, + out = packages + ".json", + ) + if not out: + out = name + "_sbom.json" + _sbom( + name = name, + out = out, + packages_used = ":" + packages + ".json", + ) diff --git a/tools/compliance/write_sbom.py b/tools/compliance/write_sbom.py new file mode 100644 index 00000000000000..fd7a004afc91ab --- /dev/null +++ b/tools/compliance/write_sbom.py @@ -0,0 +1,115 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""SBOM generator. + +This tool takes input from several sources and weaves together an SBOM. + +Inputs: + - the output of packages_used. This is a JSON block of license, package_info + and other declarations, plus a list of all remote packages referenced. + - TODO: the maven lock file + - FUTURE: other packgage lock files + - FUTURE: a user provided override of package URL to corrected information + +This tool is private to the sbom() rule. +""" + +import argparse +import datetime +import json + + +def create_sbom(package_info: dict) -> dict: # pylint: disable=g-bare-generic + """Creates a dict representing an SBOM. + + Args: + package_info: dict of data from packages_used output + Returns: + dict of SBOM data + """ + now = datetime.datetime.now(datetime.timezone.utc) + ret = { + "spdxVersion": "SPDX-2.3", + "dataLicense": "CC0-1.0", + "SPDXID": "SPDXRef-DOCUMENT", + "documentNamespace": ( + "https://spdx.google/be852459-4c54-4c50-9d2f-0e48890418fc" + ), + "name": package_info["top_level_target"], + "creationInfo": { + "licenseListVersion": "", + "creators": [ + "Tool: github.com/bazelbuild/bazel/tools/compliance/write_sbom", + "Organization: Google LLC", + ], + "created": now.isoformat(), + }, + } + + packages = [] + relationships = [] + + relationships.append({ + "spdxElementId": "SPDXRef-DOCUMENT", + "relatedSpdxElement": "SPDXRef-Package-main", + "relationshipType": "DESCRIBES" + }) + + for pkg in package_info["packages"]: + packages.append( + { + "name": pkg, + # TODO(aiuto): Fill in the rest + # "SPDXID": "SPDXRef-GooglePackage-4c7dc29872b9c418", + # "supplier": "Organization: Google LLC", + # "downloadLocation": "NOASSERTION", + # "licenseConcluded": "License-da09db95a268defe", + # "copyrightText": "" + } + ) + relationships.append( + { + "spdxElementId": "SPDXRef-Package-main", + # "relatedSpdxElement": "SPDXRef-GooglePackage-4c7dc29872b9c418", + "relationshipType": "CONTAINS", + } + ) + + ret["packages"] = packages + ret["relationships"] = relationships + return ret + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Helper for creating SBOMs", fromfile_prefix_chars="@" + ) + parser.add_argument( + "--out", required=True, help="The output file, mandatory." + ) + parser.add_argument( + "--packages_used", + required=True, + help="JSON list of transitive package data for a target", + ) + opts = parser.parse_args() + + with open(opts.packages_used, "rt", encoding="utf-8") as inp: + package_info = json.loads(inp.read()) + with open(opts.out, "w", encoding="utf-8") as out: + out.write(json.dumps(create_sbom(package_info), indent=2)) + + +if __name__ == "__main__": + main() From bdcd3110587359a03c372ad5187d18793ea262fd Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 00:47:03 -0700 Subject: [PATCH 19/68] RELNOTES[INC]: Remove deprecated and unused `--experimental_show_artifacts` flag. PiperOrigin-RevId: 543976073 Change-Id: Ia961518c6bc9f068d225ce9b8289d8ac8eab7b09 --- .../lib/bazel/rules/BazelRulesModule.java | 8 ++++ .../lib/buildtool/BuildRequestOptions.java | 12 ------ .../lib/buildtool/BuildResultPrinter.java | 37 ------------------- .../build/lib/buildtool/ExecutionTool.java | 10 ----- 4 files changed, 8 insertions(+), 59 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRulesModule.java b/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRulesModule.java index a0fd5fbb6df893..e494a37d720f23 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRulesModule.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRulesModule.java @@ -395,6 +395,14 @@ public static class BuildGraveyardOptions extends OptionsBase { effectTags = {OptionEffectTag.UNKNOWN}, help = "No-op.") public boolean parallelAqueryOutput; + + @Option( + name = "experimental_show_artifacts", + defaultValue = "false", + documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, + effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, + help = "Deprecated no-op.") + public boolean showArtifacts; } /** This is where deprecated Bazel-specific options only used by the build command go to die. */ diff --git a/src/main/java/com/google/devtools/build/lib/buildtool/BuildRequestOptions.java b/src/main/java/com/google/devtools/build/lib/buildtool/BuildRequestOptions.java index 540f86c151ba7a..6485cb0a2c3615 100644 --- a/src/main/java/com/google/devtools/build/lib/buildtool/BuildRequestOptions.java +++ b/src/main/java/com/google/devtools/build/lib/buildtool/BuildRequestOptions.java @@ -203,18 +203,6 @@ public class BuildRequestOptions extends OptionsBase { + " under the threshold.") public int maxResultTargets; - @Option( - name = "experimental_show_artifacts", - defaultValue = "false", - documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, - effectTags = {OptionEffectTag.AFFECTS_OUTPUTS}, - help = - "Output a list of all top level artifacts produced by this build." - + "Use output format suitable for tool consumption. " - + "This flag is temporary and intended to facilitate Android Studio integration. " - + "This output format will likely change in the future or disappear completely.") - public boolean showArtifacts; - @Option( name = "announce", defaultValue = "false", diff --git a/src/main/java/com/google/devtools/build/lib/buildtool/BuildResultPrinter.java b/src/main/java/com/google/devtools/build/lib/buildtool/BuildResultPrinter.java index b42577ec016867..03f03d4b3757d5 100644 --- a/src/main/java/com/google/devtools/build/lib/buildtool/BuildResultPrinter.java +++ b/src/main/java/com/google/devtools/build/lib/buildtool/BuildResultPrinter.java @@ -29,8 +29,6 @@ import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; import com.google.devtools.build.lib.analysis.configuredtargets.OutputFileConfiguredTarget; import com.google.devtools.build.lib.cmdline.Label; -import com.google.devtools.build.lib.collect.nestedset.NestedSet; -import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.exec.ExecutionOptions; import com.google.devtools.build.lib.runtime.BlazeRuntime; import com.google.devtools.build.lib.runtime.CommandEnvironment; @@ -360,41 +358,6 @@ private static String formatArtifactForShowResults( return " " + prettyPrinter.getPrettyPath(artifact.getPath().asFragment()); } - /** - * Prints a flat list of all artifacts built by the passed top-level targets. - * - *

      This corresponds to the --experimental_show_artifacts flag. - */ - void showArtifacts( - BuildRequest request, - Collection configuredTargets, - Collection aspects) { - - TopLevelArtifactContext context = request.getTopLevelArtifactContext(); - Collection targetsToPrint = filterTargetsToPrint(configuredTargets); - - NestedSetBuilder artifactsBuilder = NestedSetBuilder.stableOrder(); - targetsToPrint.forEach( - t -> - artifactsBuilder.addTransitive( - TopLevelArtifactHelper.getAllArtifactsToBuild(t, context).getImportantArtifacts())); - - aspects.forEach( - a -> - artifactsBuilder.addTransitive( - TopLevelArtifactHelper.getAllArtifactsToBuild(a, context).getImportantArtifacts())); - - OutErr outErr = request.getOutErr(); - outErr.printErrLn("Build artifacts:"); - - NestedSet artifacts = artifactsBuilder.build(); - for (Artifact artifact : artifacts.toList()) { - if (!artifact.isSourceArtifact()) { - outErr.printErrLn(">>>" + artifact.getPath()); - } - } - } - /** * Returns a list of configured targets that should participate in printing. * diff --git a/src/main/java/com/google/devtools/build/lib/buildtool/ExecutionTool.java b/src/main/java/com/google/devtools/build/lib/buildtool/ExecutionTool.java index 506c1875343b7a..3a871c2901af85 100644 --- a/src/main/java/com/google/devtools/build/lib/buildtool/ExecutionTool.java +++ b/src/main/java/com/google/devtools/build/lib/buildtool/ExecutionTool.java @@ -576,16 +576,6 @@ void nonCatastrophicFinalizations( buildResultListener.getAnalyzedAspects()); } - try (SilentCloseable c = Profiler.instance().profile("Show artifacts")) { - if (request.getBuildOptions().showArtifacts) { - BuildResultPrinter buildResultPrinter = new BuildResultPrinter(env); - buildResultPrinter.showArtifacts( - request, - buildResultListener.getAnalyzedTargets(), - buildResultListener.getAnalyzedAspects().values()); - } - } - if (explanationHandler != null) { uninstallExplanationHandler(explanationHandler); try { From 29086ee9738424ee935308f5daa0424e0b41e610 Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 01:16:22 -0700 Subject: [PATCH 20/68] Add some extra info around cycle checking. PiperOrigin-RevId: 543981692 Change-Id: Ia8412b24b72e7b807883b6a614ea530543b62fa9 --- .../devtools/build/skyframe/ParallelEvaluator.java | 4 ++++ .../devtools/build/skyframe/SimpleCycleDetector.java | 10 ++++++---- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/src/main/java/com/google/devtools/build/skyframe/ParallelEvaluator.java b/src/main/java/com/google/devtools/build/skyframe/ParallelEvaluator.java index cd6bd0eacdd0aa..4e865c350416f5 100644 --- a/src/main/java/com/google/devtools/build/skyframe/ParallelEvaluator.java +++ b/src/main/java/com/google/devtools/build/skyframe/ParallelEvaluator.java @@ -216,6 +216,9 @@ private EvaluationResult waitForCompletionAndConstructRe "Scheduler exception only thrown for catastrophe in keep_going evaluation: %s", e); catastrophe = true; + // For b/287183296 + logger.atInfo().withCause(e).log( + "Catastrophic exception in --keep_going mode while evaluating SkyKey: %s", errorKey); } } Preconditions.checkState( @@ -549,6 +552,7 @@ private EvaluationResult constructResult( } } if (!cycleRoots.isEmpty()) { + logger.atInfo().log("Detecting cycles with roots: %s", cycleRoots); cycleDetector.checkForCycles(cycleRoots, result, evaluatorContext); } Preconditions.checkState( diff --git a/src/main/java/com/google/devtools/build/skyframe/SimpleCycleDetector.java b/src/main/java/com/google/devtools/build/skyframe/SimpleCycleDetector.java index e75ee88cb2c919..42280fa7057043 100644 --- a/src/main/java/com/google/devtools/build/skyframe/SimpleCycleDetector.java +++ b/src/main/java/com/google/devtools/build/skyframe/SimpleCycleDetector.java @@ -150,7 +150,7 @@ private static ErrorInfo checkForCycles(SkyKey root, ParallelEvaluatorContext ev // Find out which children have errors. Similar logic to that in Evaluate#run(). List errorDeps = getChildrenErrorsForCycle( - key, directDeps.getAllElementsAsIterable(), entry, evaluatorContext); + key, directDeps.getAllElementsAsIterable(), entry, evaluatorContext, removedDeps); checkState( !errorDeps.isEmpty(), "Node %s was not successfully evaluated, but had no child errors. NodeEntry: %s", @@ -366,7 +366,8 @@ private static List getChildrenErrorsForCycle( SkyKey parent, Iterable children, NodeEntry entryForDebugging, - ParallelEvaluatorContext evaluatorContext) + ParallelEvaluatorContext evaluatorContext, + Set removedDepsForDebugging) throws InterruptedException { List allErrors = new ArrayList<>(); boolean foundCycle = false; @@ -388,10 +389,11 @@ private static List getChildrenErrorsForCycle( } checkState( foundCycle, - "Key %s with entry %s had no cycle beneath it: %s", + "Key %s with entry %s had no cycle beneath it: %s; Removed deps: %s", parent, entryForDebugging, - allErrors); + allErrors, + removedDepsForDebugging); return allErrors; } From 15c412eb9aa38e1b81f7dd2047849bbb55417a83 Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 05:12:00 -0700 Subject: [PATCH 21/68] Automated rollback of commit 52dbdc7a92cedfa212ef681f88e0b733cb5280e0. *** Reason for rollback *** https://github.com/bazelbuild/bazel/issues/18771 PiperOrigin-RevId: 544025702 Change-Id: I5c036cda4536f86088f259391cdb7c58ef04df6d --- .../lib/actions/ActionLookupKeyOrProxy.java | 1 - .../analysis/TransitiveDependencyState.java | 2 +- .../producers/AttributeConfiguration.java | 14 - .../producers/ConfigConditionsProducer.java | 17 +- .../ConfiguredTargetAndDataProducer.java | 13 +- .../producers/DependencyProducer.java | 24 -- .../producers/PrerequisiteParameters.java | 19 +- .../producers/PrerequisitesProducer.java | 26 +- .../TargetAndConfigurationProducer.java | 77 +++-- .../skyframe/ConfiguredTargetFunction.java | 8 +- .../lib/skyframe/ConfiguredTargetKey.java | 271 ++++++++++++++---- .../build/lib/skyframe/SkyframeExecutor.java | 5 +- .../google/devtools/build/lib/analysis/BUILD | 1 + .../analysis/ConfigurableAttributesTest.java | 71 +++++ .../analysis/util/BuildViewForTesting.java | 2 +- .../lib/skyframe/ConfiguredTargetKeyTest.java | 93 +++++- .../lib/starlark/StarlarkIntegrationTest.java | 6 +- 17 files changed, 474 insertions(+), 176 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/actions/ActionLookupKeyOrProxy.java b/src/main/java/com/google/devtools/build/lib/actions/ActionLookupKeyOrProxy.java index 48f76839214673..92b189cf5e8c3b 100644 --- a/src/main/java/com/google/devtools/build/lib/actions/ActionLookupKeyOrProxy.java +++ b/src/main/java/com/google/devtools/build/lib/actions/ActionLookupKeyOrProxy.java @@ -29,7 +29,6 @@ * are subclasses of {@link ActionLookupKeyOrProxy}. This allows callers to easily find the value * key, while remaining agnostic to what action lookup values actually exist. */ -// TODO(b/261521010): this layer of indirection is no longer needed and may be cleaned up. public interface ActionLookupKeyOrProxy extends ArtifactOwner { /** * Returns the {@link BuildConfigurationKey} for the configuration associated with this key, or diff --git a/src/main/java/com/google/devtools/build/lib/analysis/TransitiveDependencyState.java b/src/main/java/com/google/devtools/build/lib/analysis/TransitiveDependencyState.java index 111b352f5ab504..ac9c872ea089cf 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/TransitiveDependencyState.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/TransitiveDependencyState.java @@ -62,7 +62,7 @@ public final class TransitiveDependencyState { * *

      More ideally, those properties would be conveyed via providers of those dependencies, but * doing so would adversely affect resting heap usage whereas {@link ConfiguredTargetAndData} is - * ephemeral. Distributed implementations will include these properties in an extra provider. It + * ephemeral. Distributed implementations will include these properties in an extra providers. It * won't affect memory because the underlying package won't exist on the node loading it remotely. * *

      It's valid to obtain {@link Package}s of dependencies from this map instead of creating an diff --git a/src/main/java/com/google/devtools/build/lib/analysis/producers/AttributeConfiguration.java b/src/main/java/com/google/devtools/build/lib/analysis/producers/AttributeConfiguration.java index 106f2ef3836c48..c927e3c38467a0 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/producers/AttributeConfiguration.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/producers/AttributeConfiguration.java @@ -28,13 +28,6 @@ enum Kind { * target is known, it should be verified to be a {@link PackageGroup}. */ VISIBILITY, - /** - * The configuration is null. - * - *

      This is only applied when the dependency is in the same package as the parent and it is - * not configurable. - */ - NULL_CONFIGURATION, /** * There is a single configuration. * @@ -53,8 +46,6 @@ enum Kind { abstract void visibility(); - abstract void nullConfiguration(); - abstract BuildConfigurationKey unary(); abstract ImmutableMap split(); @@ -62,7 +53,6 @@ enum Kind { public int count() { switch (kind()) { case VISIBILITY: - case NULL_CONFIGURATION: case UNARY: return 1; case SPLIT: @@ -75,10 +65,6 @@ static AttributeConfiguration ofVisibility() { return AutoOneOf_AttributeConfiguration.visibility(); } - static AttributeConfiguration ofNullConfiguration() { - return AutoOneOf_AttributeConfiguration.nullConfiguration(); - } - static AttributeConfiguration ofUnary(BuildConfigurationKey key) { return AutoOneOf_AttributeConfiguration.unary(key); } diff --git a/src/main/java/com/google/devtools/build/lib/analysis/producers/ConfigConditionsProducer.java b/src/main/java/com/google/devtools/build/lib/analysis/producers/ConfigConditionsProducer.java index 7b71751f9bd179..c9d91a559ce06a 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/producers/ConfigConditionsProducer.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/producers/ConfigConditionsProducer.java @@ -14,7 +14,7 @@ package com.google.devtools.build.lib.analysis.producers; import com.google.common.collect.ImmutableMap; -import com.google.devtools.build.lib.analysis.InconsistentNullConfigException; +import com.google.devtools.build.lib.analysis.InvalidVisibilityDependencyException; import com.google.devtools.build.lib.analysis.TargetAndConfiguration; import com.google.devtools.build.lib.analysis.TransitiveDependencyState; import com.google.devtools.build.lib.analysis.config.ConfigConditions; @@ -130,12 +130,17 @@ public void acceptConfiguredTargetAndDataError(ConfiguredValueCreationException } @Override - public void acceptConfiguredTargetAndDataError(InconsistentNullConfigException error) { - // A config label was evaluated with a null configuration. This should never happen as - // ConfigConditions are only present if the parent is a Rule, then always evaluated with the - // parent configuration. + public void acceptConfiguredTargetAndDataError(InvalidVisibilityDependencyException error) { + // After removing the rule transition from dependency resolution, a ConfiguredTargetKey in + // Skyframe with a null BuildConfigurationKey will only be used to request visibility + // dependencies. This will never be the case for ConfigConditions, which are always requested + // with the parent configuration. At the moment, nothing throws + // InvalidVisibilityDependencyException. + // + // TODO(b/261521010): update this comment once rule transitions are removed from dependency + // resolution. throw new IllegalArgumentException( - "ConfigCondition dependency should never be evaluated with a null configuration.", error); + "ConfigCondition dependency should never be marked visibility.", error); } private StateMachine constructConfigConditions(Tasks tasks, ExtendedEventHandler listener) { diff --git a/src/main/java/com/google/devtools/build/lib/analysis/producers/ConfiguredTargetAndDataProducer.java b/src/main/java/com/google/devtools/build/lib/analysis/producers/ConfiguredTargetAndDataProducer.java index 50c475a1824509..2ca4a755ff50b2 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/producers/ConfiguredTargetAndDataProducer.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/producers/ConfiguredTargetAndDataProducer.java @@ -16,7 +16,7 @@ import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.ConfiguredTargetValue; -import com.google.devtools.build.lib.analysis.InconsistentNullConfigException; +import com.google.devtools.build.lib.analysis.InvalidVisibilityDependencyException; import com.google.devtools.build.lib.analysis.TransitiveDependencyState; import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue; import com.google.devtools.build.lib.events.ExtendedEventHandler; @@ -42,14 +42,14 @@ public final class ConfiguredTargetAndDataProducer implements StateMachine, Consumer, StateMachine.ValueOrException2Sink< - ConfiguredValueCreationException, InconsistentNullConfigException> { + ConfiguredValueCreationException, InvalidVisibilityDependencyException> { /** Interface for accepting values produced by this class. */ public interface ResultSink { void acceptConfiguredTargetAndData(ConfiguredTargetAndData value, int index); void acceptConfiguredTargetAndDataError(ConfiguredValueCreationException error); - void acceptConfiguredTargetAndDataError(InconsistentNullConfigException error); + void acceptConfiguredTargetAndDataError(InvalidVisibilityDependencyException error); } // -------------------- Input -------------------- @@ -86,8 +86,9 @@ public StateMachine step(Tasks tasks, ExtendedEventHandler listener) { tasks.lookUp( key.toKey(), ConfiguredValueCreationException.class, - InconsistentNullConfigException.class, - (ValueOrException2Sink) + InvalidVisibilityDependencyException.class, + (ValueOrException2Sink< + ConfiguredValueCreationException, InvalidVisibilityDependencyException>) this); return this::fetchConfigurationAndPackage; } @@ -96,7 +97,7 @@ public StateMachine step(Tasks tasks, ExtendedEventHandler listener) { public void acceptValueOrException2( @Nullable SkyValue value, @Nullable ConfiguredValueCreationException error, - @Nullable InconsistentNullConfigException visibilityError) { + @Nullable InvalidVisibilityDependencyException visibilityError) { if (value != null) { var configuredTargetValue = (ConfiguredTargetValue) value; this.configuredTarget = configuredTargetValue.getConfiguredTarget(); diff --git a/src/main/java/com/google/devtools/build/lib/analysis/producers/DependencyProducer.java b/src/main/java/com/google/devtools/build/lib/analysis/producers/DependencyProducer.java index e6bddce74310ef..2940409889ba9d 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/producers/DependencyProducer.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/producers/DependencyProducer.java @@ -30,16 +30,11 @@ import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue; import com.google.devtools.build.lib.analysis.config.DependencyEvaluationException; import com.google.devtools.build.lib.analysis.starlark.StarlarkTransition.TransitionException; -import com.google.devtools.build.lib.causes.LoadingFailedCause; import com.google.devtools.build.lib.cmdline.Label; -import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.packages.Aspect; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.AttributeTransitionData; -import com.google.devtools.build.lib.packages.NoSuchTargetException; -import com.google.devtools.build.lib.packages.Target; -import com.google.devtools.build.lib.packages.TargetUtils; import com.google.devtools.build.lib.skyframe.BuildConfigurationKey; import com.google.devtools.build.lib.skyframe.ConfiguredTargetAndData; import com.google.devtools.build.lib.skyframe.ConfiguredValueCreationException; @@ -117,25 +112,6 @@ public StateMachine step(Tasks tasks, ExtendedEventHandler listener) { AttributeConfiguration.ofVisibility(), /* executionPlatformLabel= */ null); } - Target parentTarget = parameters.target(); - if (parentTarget.getLabel().getPackageIdentifier().equals(toLabel.getPackageIdentifier())) { - try { - Target toTarget = parentTarget.getPackage().getTarget(toLabel.getName()); - if (!toTarget.isConfigurable()) { - return computePrerequisites( - AttributeConfiguration.ofNullConfiguration(), /* executionPlatformLabel= */ null); - } - } catch (NoSuchTargetException e) { - parameters - .transitiveState() - .addTransitiveCause(new LoadingFailedCause(toLabel, e.getDetailedExitCode())); - listener.handle( - Event.error( - TargetUtils.getLocationMaybe(parentTarget), - TargetUtils.formatMissingEdge(parentTarget, toLabel, e, kind.getAttribute()))); - } - } - // The logic of `DependencyResolver.computeDependencyLabels` implies that // `parameters.configurationKey()` is non-null for everything that follows. BuildConfigurationKey configurationKey = checkNotNull(parameters.configurationKey()); diff --git a/src/main/java/com/google/devtools/build/lib/analysis/producers/PrerequisiteParameters.java b/src/main/java/com/google/devtools/build/lib/analysis/producers/PrerequisiteParameters.java index 7ab5fd1300888c..4cdd28db3de305 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/producers/PrerequisiteParameters.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/producers/PrerequisiteParameters.java @@ -25,7 +25,6 @@ import com.google.devtools.build.lib.packages.Aspect; import com.google.devtools.build.lib.packages.ConfiguredAttributeMapper; import com.google.devtools.build.lib.packages.Rule; -import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.skyframe.BuildConfigurationKey; import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey; import javax.annotation.Nullable; @@ -34,7 +33,7 @@ /** Common parameters for computing prerequisites. */ public final class PrerequisiteParameters { private final ConfiguredTargetKey configuredTargetKey; - private final Target target; + @Nullable private final Rule associatedRule; private final ImmutableList aspects; private final StarlarkTransitionCache transitionCache; @@ -45,14 +44,14 @@ public final class PrerequisiteParameters { public PrerequisiteParameters( ConfiguredTargetKey configuredTargetKey, - Target target, + @Nullable Rule associatedRule, Iterable aspects, StarlarkTransitionCache transitionCache, @Nullable ToolchainCollection toolchainContexts, @Nullable ConfiguredAttributeMapper attributeMap, TransitiveDependencyState transitiveState) { this.configuredTargetKey = configuredTargetKey; - this.target = target; + this.associatedRule = associatedRule; this.aspects = ImmutableList.copyOf(aspects); this.transitionCache = transitionCache; this.toolchainContexts = toolchainContexts; @@ -64,13 +63,9 @@ public Label label() { return configuredTargetKey.getLabel(); } - public Target target() { - return target; - } - @Nullable public Rule associatedRule() { - return target.getAssociatedRule(); + return associatedRule; } @Nullable @@ -96,8 +91,12 @@ public ConfiguredAttributeMapper attributeMap() { return attributeMap; } + @Nullable public Location location() { - return target.getLocation(); + if (associatedRule == null) { + return null; + } + return associatedRule.getLocation(); } public BuildEventId eventId() { diff --git a/src/main/java/com/google/devtools/build/lib/analysis/producers/PrerequisitesProducer.java b/src/main/java/com/google/devtools/build/lib/analysis/producers/PrerequisitesProducer.java index 43a286440cb2d9..295eedb29706f3 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/producers/PrerequisitesProducer.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/producers/PrerequisitesProducer.java @@ -15,7 +15,6 @@ import static com.google.common.base.Preconditions.checkState; import static com.google.devtools.build.lib.analysis.AspectResolutionHelpers.computeAspectCollection; -import static com.google.devtools.build.lib.analysis.producers.AttributeConfiguration.Kind.VISIBILITY; import static com.google.devtools.build.lib.skyframe.ConfiguredTargetAndData.SPLIT_DEP_ORDERING; import static java.util.Arrays.sort; @@ -23,11 +22,9 @@ import com.google.devtools.build.lib.analysis.AspectCollection; import com.google.devtools.build.lib.analysis.DuplicateException; import com.google.devtools.build.lib.analysis.InconsistentAspectOrderException; -import com.google.devtools.build.lib.analysis.InconsistentNullConfigException; import com.google.devtools.build.lib.analysis.InvalidVisibilityDependencyException; import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue; import com.google.devtools.build.lib.analysis.config.DependencyEvaluationException; -import com.google.devtools.build.lib.analysis.configuredtargets.PackageGroupConfiguredTarget; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.packages.Aspect; @@ -104,7 +101,6 @@ interface ResultSink { public StateMachine step(Tasks tasks, ExtendedEventHandler listener) { switch (configuration.kind()) { case VISIBILITY: - case NULL_CONFIGURATION: tasks.enqueue( new ConfiguredTargetAndDataProducer( getPrerequisiteKey(/* configurationKey= */ null), @@ -145,18 +141,9 @@ public void acceptConfiguredTargetAndData(ConfiguredTargetAndData value, int ind } @Override - public void acceptConfiguredTargetAndDataError(InconsistentNullConfigException error) { + public void acceptConfiguredTargetAndDataError(InvalidVisibilityDependencyException error) { hasError = true; - if (configuration.kind() == VISIBILITY) { - // The target was configurable, but used as a visibility dependency. This is invalid because - // only `PackageGroup`s are accepted as visibility dependencies and those are not - // configurable. Propagates the exception with more precise information. - sink.acceptPrerequisitesError(new InvalidVisibilityDependencyException(label)); - return; - } - // `configuration.kind()` was `NULL_CONFIGURATION`. This is only used when the target is in the - // same package as the parent and not configurable so this should never happen. - throw new IllegalStateException(error); + sink.acceptPrerequisitesError(error); } @Override @@ -170,15 +157,6 @@ private StateMachine computeConfiguredAspects(Tasks tasks, ExtendedEventHandler return DONE; } - if (configuration.kind() == VISIBILITY) { - // Verifies that the dependency is a `package_group`. The value is always at index 0 because - // the `VISIBILITY` configuration is always unary. - if (!(configuredTargets[0].getConfiguredTarget() instanceof PackageGroupConfiguredTarget)) { - sink.acceptPrerequisitesError(new InvalidVisibilityDependencyException(label)); - return DONE; - } - } - cleanupValues(); AspectCollection aspects; diff --git a/src/main/java/com/google/devtools/build/lib/analysis/producers/TargetAndConfigurationProducer.java b/src/main/java/com/google/devtools/build/lib/analysis/producers/TargetAndConfigurationProducer.java index b64d5fa7c0a507..c8fdc99f0de7c1 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/producers/TargetAndConfigurationProducer.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/producers/TargetAndConfigurationProducer.java @@ -22,6 +22,7 @@ import com.google.devtools.build.lib.actions.ActionLookupKey; import com.google.devtools.build.lib.analysis.ConfiguredTargetValue; import com.google.devtools.build.lib.analysis.InconsistentNullConfigException; +import com.google.devtools.build.lib.analysis.InvalidVisibilityDependencyException; import com.google.devtools.build.lib.analysis.TargetAndConfiguration; import com.google.devtools.build.lib.analysis.TransitiveDependencyState; import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue; @@ -34,6 +35,7 @@ import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.packages.NoSuchPackageException; import com.google.devtools.build.lib.packages.NoSuchTargetException; +import com.google.devtools.build.lib.packages.PackageGroup; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.packages.RuleTransitionData; import com.google.devtools.build.lib.packages.Target; @@ -52,8 +54,13 @@ * Computes the target and configuration for a configured target key. * *

      If the key has a configuration and the target is configurable, attempts to apply a rule side - * transition. If the configuration changes, delegates to a target with the new configuration. If - * the target is not configurable, directly delegates to the null configuration. + * transition. If the target is not configurable, directly transitions to the null configuration. If + * the resulting configuration already has an owner, delegates to the owner instead of recomputing + * the configured target. + * + *

      If the key does not have a configuration, it was requested as a visibility dependency. + * Verifies that the {@link Target} is a {@link PackageGroup}, throwing {@link + * InvalidVisibilityDependencyException} if that's not the case. */ public final class TargetAndConfigurationProducer implements StateMachine, Consumer, TargetProducer.ResultSink { @@ -72,6 +79,7 @@ public abstract static class TargetAndConfigurationError { /** Tags the error type. */ public enum Kind { CONFIGURED_VALUE_CREATION, + INVALID_VISIBILITY_DEPENDENCY, INCONSISTENT_NULL_CONFIG } @@ -79,6 +87,8 @@ public enum Kind { public abstract ConfiguredValueCreationException configuredValueCreation(); + public abstract InvalidVisibilityDependencyException invalidVisibilityDependency(); + public abstract InconsistentNullConfigException inconsistentNullConfig(); private static TargetAndConfigurationError of(ConfiguredValueCreationException e) { @@ -86,6 +96,15 @@ private static TargetAndConfigurationError of(ConfiguredValueCreationException e .configuredValueCreation(e); } + // TODO(b/261521010): enable this error once Rule transitions are removed from dependency + // resolution. + // private static TargetAndConfigurationError of(InvalidVisibilityDependencyException e) { + // return AutoOneOf_TargetAndConfigurationProducer_TargetAndConfigurationError + // .invalidVisibilityDependency(e); + // } + + // TODO(b/261521010): delete this error once Rule transitions are removed from dependency + // resolution. private static TargetAndConfigurationError of(InconsistentNullConfigException e) { return AutoOneOf_TargetAndConfigurationProducer_TargetAndConfigurationError .inconsistentNullConfig(e); @@ -154,29 +173,41 @@ private StateMachine determineConfiguration(Tasks tasks, ExtendedEventHandler li if (configurationKey == null) { if (target.isConfigurable()) { // We somehow ended up in a target that requires a non-null configuration but with a key - // that doesn't have a configuration. This is always an error, but we need to bubble this - // up to the parent to provide more context. + // that doesn't have a configuration. This is always an error, but we need to analyze the + // dependencies of the latter target to realize that. Short-circuit the evaluation to avoid + // doing useless work and running code with a null configuration that's not prepared for it. sink.acceptTargetAndConfigurationError( TargetAndConfigurationError.of(new InconsistentNullConfigException())); return DONE; } + // TODO(b/261521010): after removing the rule transition from dependency resolution, the logic + // here changes. + // + // A null configuration key will only be used for visibility dependencies so when that's + // true, a check that the target is a PackageGroup will be performed, throwing + // InvalidVisibilityDependencyException on failure. + // + // The ConfiguredTargetKey cannot fan-in in this case. sink.acceptTargetAndConfiguration( new TargetAndConfiguration(target, /* configuration= */ null), preRuleTransitionKey); return DONE; } + // This may happen for top-level ConfiguredTargets. + // + // TODO(b/261521010): this may also happen for targets that are not top-level after removing + // rule transitions from dependency resolution. Update this comment. if (!target.isConfigurable()) { - // If target is not configurable, but requested with a configuration. Delegates to a key with - // the null configuration. This is expected to be uncommon. The common case of a - // non-configurable target is an input file, but those are usually package local and requested - // correctly with the null configuration. - delegateTo( - tasks, - ConfiguredTargetKey.builder() - .setLabel(preRuleTransitionKey.getLabel()) - .setExecutionPlatformLabel(preRuleTransitionKey.getExecutionPlatformLabel()) - .build() - .toKey()); + var nullConfiguredTargetKey = + ConfiguredTargetKey.builder().setDelegate(preRuleTransitionKey).build(); + ActionLookupKey delegate = nullConfiguredTargetKey.toKey(); + if (!delegate.equals(preRuleTransitionKey)) { + // Delegates to the key that already owns the null configuration. + delegateTo(tasks, delegate); + return DONE; + } + sink.acceptTargetAndConfiguration( + new TargetAndConfiguration(target, /* configuration= */ null), nullConfiguredTargetKey); return DONE; } @@ -236,15 +267,17 @@ private StateMachine processTransitionedKey(Tasks tasks, ExtendedEventHandler li } if (!configurationKey.equals(preRuleTransitionKey.getConfigurationKey())) { - delegateTo( - tasks, + fullKey = ConfiguredTargetKey.builder() - .setLabel(preRuleTransitionKey.getLabel()) - .setExecutionPlatformLabel(preRuleTransitionKey.getExecutionPlatformLabel()) + .setDelegate(preRuleTransitionKey) .setConfigurationKey(configurationKey) - .build() - .toKey()); - return DONE; + .build(); + ActionLookupKey delegate = fullKey.toKey(); + if (!delegate.equals(preRuleTransitionKey)) { + // Delegates to the key that already owns this configuration. + delegateTo(tasks, delegate); + return DONE; + } } else { fullKey = preRuleTransitionKey; } diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/ConfiguredTargetFunction.java b/src/main/java/com/google/devtools/build/lib/skyframe/ConfiguredTargetFunction.java index 9b9756ce198f20..8fa7842a22039d 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/ConfiguredTargetFunction.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/ConfiguredTargetFunction.java @@ -228,6 +228,7 @@ public SkyValue compute(SkyKey key, Environment env) throws ReportedException, UnreportedException, DependencyException, InterruptedException { State state = env.getState(() -> new State(storeTransitivePackages)); ConfiguredTargetKey configuredTargetKey = (ConfiguredTargetKey) key.argument(); + Preconditions.checkArgument(!configuredTargetKey.isProxy(), configuredTargetKey); SkyframeBuildView view = buildViewProvider.getSkyframeBuildView(); if (shouldUnblockCpuWorkWhenFetchingDeps) { @@ -248,9 +249,6 @@ public SkyValue compute(SkyKey key, Environment env) } catch (InconsistentNullConfigException e) { // TODO(b/267529852): see if we can remove this. It's not clear the conditions that trigger // InconsistentNullConfigException are even possible. - // - // TODO(b/261521010): propagate this exception once the parent side rule transition is - // deleted. The caller should handle it correctly. return new NonRuleConfiguredTargetValue( new EmptyConfiguredTarget(configuredTargetKey), computeDependenciesState.transitivePackages()); @@ -509,6 +507,10 @@ private void computeTargetAndConfiguration( storedEvents.handle(Event.error(e.getLocation(), e.getMessage())); } throw new ReportedException(e); + case INVALID_VISIBILITY_DEPENDENCY: + // Bubbles the error up to the parent ConfiguredTargetFunction where it will be reported + // with additional context. + throw new DependencyException(error.invalidVisibilityDependency()); case INCONSISTENT_NULL_CONFIG: throw error.inconsistentNullConfig(); } diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/ConfiguredTargetKey.java b/src/main/java/com/google/devtools/build/lib/skyframe/ConfiguredTargetKey.java index 513221089fae90..e2282048cbbc6e 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/ConfiguredTargetKey.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/ConfiguredTargetKey.java @@ -14,11 +14,13 @@ package com.google.devtools.build.lib.skyframe; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.devtools.build.lib.util.HashCodes.hashObjects; import com.google.common.base.MoreObjects; import com.google.devtools.build.lib.actions.ActionLookupKey; +import com.google.devtools.build.lib.actions.ActionLookupKeyOrProxy; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue; import com.google.devtools.build.lib.cmdline.Label; @@ -41,9 +43,17 @@ * dependency resolution and the rule analysis. * *

      In practice, a ({@link Label} and post-transition {@link BuildConfigurationKey}) pair plus a - * possible execution platform override {@link Label} with special constraints described as follows. + * possible execution platform override {@link Label} with special constraints. To elaborate, in + * order of highest to lowest potential for concern: * - *

      A build should not request keys with equal ({@link Label}, {@link BuildConfigurationValue}) + *

      1. The {@link BuildConfigurationKey} must be post-transition and thus ready for immediate use + * in dependency resolution and analysis. In practice, this means that if the rule has an + * incoming-edge transition (cfg in {@link RuleClass}) or there are global trimming transitions, + * THOSE TRANSITIONS MUST ALREADY BE DONE before creating the key. Failure to do so will lead to + * build graphs with ConfiguredTarget that have seemingly impossible {@link BuildConfigurationValue} + * (due to the skipped transitions). + * + *

      2. A build should not request keys with equal ({@link Label}, {@link BuildConfigurationValue}) * pairs but different execution platform override {@link Label} if the invoked rule will register * actions. (This is potentially OK if all outputs of all registered actions incorporate the * execution platform in their name unless the build also requests keys without an override that @@ -53,40 +63,32 @@ * *

      Note that this key may be used to look up the generating action of an artifact. * + *

      The {@link ConfiguredTargetKey} is not a {@link SkyKey} and must be cast to one using {@link + * ActionLookupKeyOrProxy#toKey}. + * *

      TODO(blaze-configurability-team): Consider just using BuildOptions over a * BuildConfigurationKey. */ -public class ConfiguredTargetKey implements ActionLookupKey { +public abstract class ConfiguredTargetKey implements ActionLookupKeyOrProxy { /** * Cache so that the number of ConfiguredTargetKey instances is {@code O(configured targets)} and * not {@code O(edges between configured targets)}. */ - private static final SkyKey.SkyKeyInterner interner = SkyKey.newInterner(); + private static final SkyKey.SkyKeyInterner interner = SkyKey.newInterner(); - private final Label label; @Nullable private final BuildConfigurationKey configurationKey; private final transient int hashCode; - private ConfiguredTargetKey( - Label label, @Nullable BuildConfigurationKey configurationKey, int hashCode) { - this.label = label; + private ConfiguredTargetKey(@Nullable BuildConfigurationKey configurationKey, int hashCode) { this.configurationKey = configurationKey; this.hashCode = hashCode; } - @Override - public final SkyFunctionName functionName() { - return SkyFunctions.CONFIGURED_TARGET; - } - - @Override - public SkyKeyInterner getSkyKeyInterner() { - return interner; - } - - @Override - public Label getLabel() { - return label; + public Builder toBuilder() { + return builder() + .setConfigurationKey(configurationKey) + .setLabel(getLabel()) + .setExecutionPlatformLabel(getExecutionPlatformLabel()); } @Nullable @@ -95,26 +97,22 @@ public final BuildConfigurationKey getConfigurationKey() { return configurationKey; } - @Override - public final ConfiguredTargetKey toKey() { - return this; - } + public abstract Label getExecutionPlatformLabel(); - @Nullable - public Label getExecutionPlatformLabel() { - return null; + @Override + public final int hashCode() { + return hashCode; } - public final String prettyPrint() { - if (getLabel() == null) { - return "null"; - } - return String.format("%s (%s)", getLabel(), formatConfigurationKey(configurationKey)); + public boolean isProxy() { + return false; } - @Override - public final int hashCode() { - return hashCode; + private static int computeHashCode( + Label label, + @Nullable BuildConfigurationKey configurationKey, + @Nullable Label executionPlatformLabel) { + return hashObjects(label, configurationKey, executionPlatformLabel); } @Override @@ -132,8 +130,19 @@ && getLabel().equals(other.getLabel()) && Objects.equals(getExecutionPlatformLabel(), other.getExecutionPlatformLabel()); } + public String prettyPrint() { + if (getLabel() == null) { + return "null"; + } + return String.format("%s (%s)", getLabel(), formatConfigurationKey(configurationKey)); + } + + private static ConfiguredTargetKey intern(ConfiguredTargetKey key) { + return (ConfiguredTargetKey) interner.intern((SkyKey) key); + } + @Override - public final String toString() { + public String toString() { // TODO(b/162809183): consider reverting to less verbose toString when bug is resolved. MoreObjects.ToStringHelper helper = MoreObjects.toStringHelper(this).add("label", getLabel()).add("config", configurationKey); @@ -143,7 +152,47 @@ public final String toString() { return helper.toString(); } - private static final class ToolchainDependencyConfiguredTargetKey extends ConfiguredTargetKey { + private static final class RealConfiguredTargetKey extends ConfiguredTargetKey + implements ActionLookupKey { + private final Label label; + + private RealConfiguredTargetKey( + Label label, @Nullable BuildConfigurationKey configurationKey, int hashCode) { + super(configurationKey, hashCode); + this.label = label; + } + + static ConfiguredTargetKey create( + Label label, @Nullable BuildConfigurationKey configurationKey) { + int hashCode = computeHashCode(label, configurationKey, /* executionPlatformLabel= */ null); + return intern(new RealConfiguredTargetKey(label, configurationKey, hashCode)); + } + + @Override + public final SkyFunctionName functionName() { + return SkyFunctions.CONFIGURED_TARGET; + } + + @Override + public SkyKeyInterner getSkyKeyInterner() { + return interner; + } + + @Override + public Label getLabel() { + return label; + } + + @Nullable + @Override + public Label getExecutionPlatformLabel() { + return null; + } + } + + private static final class ToolchainDependencyConfiguredTargetKey extends ConfiguredTargetKey + implements ActionLookupKey { + private final Label label; private final Label executionPlatformLabel; private ToolchainDependencyConfiguredTargetKey( @@ -151,21 +200,119 @@ private ToolchainDependencyConfiguredTargetKey( @Nullable BuildConfigurationKey configurationKey, int hashCode, Label executionPlatformLabel) { - super(label, configurationKey, hashCode); + super(configurationKey, hashCode); + this.label = label; this.executionPlatformLabel = checkNotNull(executionPlatformLabel); } + private static ConfiguredTargetKey create( + Label label, + @Nullable BuildConfigurationKey configurationKey, + Label executionPlatformLabel) { + int hashCode = computeHashCode(label, configurationKey, executionPlatformLabel); + return intern( + new ToolchainDependencyConfiguredTargetKey( + label, configurationKey, hashCode, executionPlatformLabel)); + } + + @Override + public SkyFunctionName functionName() { + return SkyFunctions.CONFIGURED_TARGET; + } + + @Override + public Label getLabel() { + return label; + } + @Override public Label getExecutionPlatformLabel() { return executionPlatformLabel; } + + @Override + public SkyKeyInterner getSkyKeyInterner() { + return interner; + } } - public Builder toBuilder() { - return builder() - .setConfigurationKey(configurationKey) - .setLabel(getLabel()) - .setExecutionPlatformLabel(getExecutionPlatformLabel()); + // This class implements SkyKey only so that it can share the interner. It should never be used as + // a SkyKey. + private static final class ProxyConfiguredTargetKey extends ConfiguredTargetKey + implements SkyKey { + private final ConfiguredTargetKey delegate; + + private static ConfiguredTargetKey create( + ConfiguredTargetKey delegate, @Nullable BuildConfigurationKey configurationKey) { + int hashCode = + computeHashCode( + delegate.getLabel(), configurationKey, delegate.getExecutionPlatformLabel()); + return intern(new ProxyConfiguredTargetKey(delegate, configurationKey, hashCode)); + } + + private ProxyConfiguredTargetKey( + ConfiguredTargetKey delegate, + @Nullable BuildConfigurationKey configurationKey, + int hashCode) { + super(configurationKey, hashCode); + checkArgument( + !delegate.isProxy(), "Proxy keys must not be nested: %s %s", delegate, configurationKey); + this.delegate = delegate; + } + + @Override + public SkyFunctionName functionName() { + // ProxyConfiguredTargetKey is never used directly by Skyframe. It must always be cast using + // toKey. + throw new UnsupportedOperationException(); + } + + @Override + public Label getLabel() { + return delegate.getLabel(); + } + + @Override + @Nullable + public Label getExecutionPlatformLabel() { + return delegate.getExecutionPlatformLabel(); + } + + @Override + public ActionLookupKey toKey() { + return (ActionLookupKey) delegate; + } + + @Override + public boolean isProxy() { + return true; + } + + @Override + public Builder toBuilder() { + return new Builder().setDelegate(delegate).setConfigurationKey(getConfigurationKey()); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("delegate", delegate) + .add("config", getConfigurationKey()) + .toString(); + } + + @Override + public String prettyPrint() { + return super.prettyPrint() + + " virtual(" + + formatConfigurationKey(getConfigurationKey()) + + ")"; + } + + @Override + public SkyKeyInterner getSkyKeyInterner() { + return interner; + } } /** Returns a new {@link Builder} to create instances of {@link ConfiguredTargetKey}. */ @@ -188,6 +335,7 @@ public static final class Builder { private Label label = null; private BuildConfigurationKey configurationKey = null; private Label executionPlatformLabel = null; + private ConfiguredTargetKey delegate; private Builder() {} @@ -221,24 +369,33 @@ public Builder setExecutionPlatformLabel(@Nullable Label executionPlatformLabel) return this; } + /** + * If set, creates a {@link ProxyConfiguredTargetKey}. + * + *

      It's invalid to set a label or execution platform label if this is set. Those will be + * defined by the corresponding values of {@code delegate}. + */ + @CanIgnoreReturnValue + public Builder setDelegate(ConfiguredTargetKey delegate) { + this.delegate = delegate; + return this; + } + /** Builds a new {@link ConfiguredTargetKey} based on the supplied data. */ public ConfiguredTargetKey build() { - int hashCode = computeHashCode(label, configurationKey, executionPlatformLabel); - return interner.intern( - executionPlatformLabel == null - ? new ConfiguredTargetKey(label, configurationKey, hashCode) - : new ToolchainDependencyConfiguredTargetKey( - label, configurationKey, hashCode, executionPlatformLabel)); + if (this.delegate != null) { + checkArgument(label == null); + checkArgument(executionPlatformLabel == null); + return ProxyConfiguredTargetKey.create(delegate, configurationKey); + } + if (this.executionPlatformLabel != null) { + return ToolchainDependencyConfiguredTargetKey.create( + label, configurationKey, executionPlatformLabel); + } + return RealConfiguredTargetKey.create(label, configurationKey); } } - private static int computeHashCode( - Label label, - @Nullable BuildConfigurationKey configurationKey, - @Nullable Label executionPlatformLabel) { - return hashObjects(label, configurationKey, executionPlatformLabel); - } - private static String formatConfigurationKey(@Nullable BuildConfigurationKey key) { if (key == null) { return "null"; diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java b/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java index 95febd9e950cb5..656117e4c225c4 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java @@ -88,7 +88,7 @@ import com.google.devtools.build.lib.analysis.ConfiguredTargetValue; import com.google.devtools.build.lib.analysis.Dependency; import com.google.devtools.build.lib.analysis.DependencyKind; -import com.google.devtools.build.lib.analysis.InconsistentNullConfigException; +import com.google.devtools.build.lib.analysis.InvalidVisibilityDependencyException; import com.google.devtools.build.lib.analysis.PlatformOptions; import com.google.devtools.build.lib.analysis.TargetAndConfiguration; import com.google.devtools.build.lib.analysis.TargetConfiguredEvent; @@ -3886,7 +3886,8 @@ public void acceptConfiguredTargetAndData(ConfiguredTargetAndData value, int ind public void acceptConfiguredTargetAndDataError(ConfiguredValueCreationException error) {} @Override - public void acceptConfiguredTargetAndDataError(InconsistentNullConfigException error) {} + public void acceptConfiguredTargetAndDataError( + InvalidVisibilityDependencyException error) {} }; EvaluationResult result; diff --git a/src/test/java/com/google/devtools/build/lib/analysis/BUILD b/src/test/java/com/google/devtools/build/lib/analysis/BUILD index 9c4dff27ffa6a8..aa6e0028a31696 100644 --- a/src/test/java/com/google/devtools/build/lib/analysis/BUILD +++ b/src/test/java/com/google/devtools/build/lib/analysis/BUILD @@ -101,6 +101,7 @@ java_library( "//src/main/java/com/google/devtools/build/lib/analysis:config/transitions/transition_collector", "//src/main/java/com/google/devtools/build/lib/analysis:config/transitions/transition_factory", "//src/main/java/com/google/devtools/build/lib/analysis:configured_target", + "//src/main/java/com/google/devtools/build/lib/analysis:configured_target_value", "//src/main/java/com/google/devtools/build/lib/analysis:dependency", "//src/main/java/com/google/devtools/build/lib/analysis:dependency_key", "//src/main/java/com/google/devtools/build/lib/analysis:dependency_kind", diff --git a/src/test/java/com/google/devtools/build/lib/analysis/ConfigurableAttributesTest.java b/src/test/java/com/google/devtools/build/lib/analysis/ConfigurableAttributesTest.java index ef6be77deebae6..280593f8edbf3d 100644 --- a/src/test/java/com/google/devtools/build/lib/analysis/ConfigurableAttributesTest.java +++ b/src/test/java/com/google/devtools/build/lib/analysis/ConfigurableAttributesTest.java @@ -13,6 +13,7 @@ // limitations under the License. package com.google.devtools.build.lib.analysis; +import static com.google.common.testing.GcFinalization.awaitClear; import static com.google.common.truth.Truth.assertThat; import static com.google.devtools.build.lib.packages.Attribute.attr; import static com.google.devtools.build.lib.packages.BuildType.LABEL_LIST; @@ -32,9 +33,12 @@ import com.google.devtools.build.lib.packages.RuleClass.ToolchainResolutionMode; import com.google.devtools.build.lib.packages.Type; import com.google.devtools.build.lib.skyframe.ConfiguredTargetAndData; +import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey; +import com.google.devtools.build.lib.skyframe.SkyframeExecutorWrappingWalkableGraph; import com.google.devtools.build.lib.testutil.TestRuleClassProvider; import com.google.devtools.build.lib.util.FileTypeSet; import java.io.IOException; +import java.lang.ref.WeakReference; import java.util.Collection; import java.util.Set; import org.junit.Before; @@ -1927,4 +1931,71 @@ public void selectWithLabelKeysInMacro() throws Exception { /*expected:*/ ImmutableList.of("bin java/foo/libb.jar", "bin java/foo/libb2.jar"), /*not expected:*/ ImmutableList.of("bin java/foo/liba.jar", "bin java/foo/liba2.jar")); } + + @Test + public void proxyKeysAreRetained() throws Exception { + // This test case verifies that when a ProxyConfiguredTargetKey is created, it is retained. + scratch.file( + "conditions/BUILD", + "constraint_setting(name = 'animal')", + "constraint_value(name = 'manatee', constraint_setting = 'animal')", + "constraint_value(name = 'koala', constraint_setting = 'animal')", + "platform(", + " name = 'manatee_platform',", + " constraint_values = [':manatee'],", + ")", + "platform(", + " name = 'koala_platform',", + " constraint_values = [':koala'],", + ")"); + scratch.file( + "check/BUILD", + "filegroup(name = 'adep', srcs = ['afile'])", + "filegroup(name = 'bdep', srcs = ['bfile'])", + "filegroup(name = 'hello',", + " srcs = select({", + " '//conditions:manatee': [':adep'],", + " '//conditions:koala': [':bdep'],", + " }))"); + + useConfiguration("--experimental_platforms=//conditions:manatee_platform"); + ConfiguredTarget hello = getConfiguredTarget("//check:hello"); + + var koalaLabel = Label.parseCanonical("//conditions:koala"); + + // Shakes the interner to try to get any non-strongly reachable keys to fall out. This should + // cause the ProxyConfiguredTargetKey created for "//conditions:koala" to fall out if it's not + // otherwise retained. + // + // Creates and inserts a canary key into the interner that can be used to detect eviction of + // weak keys. + var canaryKey = new WeakReference<>(ConfiguredTargetKey.builder().setLabel(koalaLabel).build()); + awaitClear(canaryKey); + // Once we get here we know that the canaryKey is no longer in the weak interner. Due to the + // collection properties of weak references, that implies the interner now has no weakly + // reachable keys at all. + + // Since //conditions:koala is a ConfigCondition, so it would be requested by //check:hello + // using //check:hello's configuration. + var koalaOwner = + ConfiguredTargetKey.builder() + .setLabel(koalaLabel) + .setConfigurationKey(hello.getConfigurationKey()) + .build(); + // Uses a WalkableGraph lookup to ensure there is an existing //conditions:koala instance that + // was created using koalaOwner. + var walkableGraph = SkyframeExecutorWrappingWalkableGraph.of(skyframeExecutor); + var koala = (ConfiguredTargetValue) walkableGraph.getValue(koalaOwner.toKey()); + assertThat(koala).isNotNull(); + + // constraint_value has a NoConfigTransition rule transition so a corresponding proxy key + // should exist. + ConfiguredTargetKey koalaKey = + ConfiguredTargetKey.builder() + .setLabel(koalaLabel) + .setConfigurationKey(koala.getConfiguredTarget().getConfigurationKey()) + .build(); + assertThat(koalaKey.isProxy()).isTrue(); + assertThat(koalaKey.toKey()).isEqualTo(koalaOwner); + } } diff --git a/src/test/java/com/google/devtools/build/lib/analysis/util/BuildViewForTesting.java b/src/test/java/com/google/devtools/build/lib/analysis/util/BuildViewForTesting.java index 8b2ad5419fb708..91e7395e9688ff 100644 --- a/src/test/java/com/google/devtools/build/lib/analysis/util/BuildViewForTesting.java +++ b/src/test/java/com/google/devtools/build/lib/analysis/util/BuildViewForTesting.java @@ -425,7 +425,7 @@ private OrderedSetMultimap getPrerequis eventHandler, new PrerequisiteParameters( ConfiguredTargetKey.fromConfiguredTarget(target), - state.targetAndConfiguration.getTarget(), + state.targetAndConfiguration.getTarget().getAssociatedRule(), /* aspects= */ ImmutableList.of(), skyframeBuildView.getStarlarkTransitionCache(), toolchainContexts, diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/ConfiguredTargetKeyTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/ConfiguredTargetKeyTest.java index c391e943715024..c44c5b4b2d3e80 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/ConfiguredTargetKeyTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/ConfiguredTargetKeyTest.java @@ -13,24 +13,106 @@ // limitations under the License. package com.google.devtools.build.lib.skyframe; +import static com.google.common.truth.Truth.assertThat; + +import com.google.common.collect.ImmutableList; +import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.analysis.config.BuildOptions.MapBackedChecksumCache; import com.google.devtools.build.lib.analysis.config.BuildOptions.OptionsChecksumCache; +import com.google.devtools.build.lib.analysis.config.CoreOptions; import com.google.devtools.build.lib.analysis.util.BuildViewTestCase; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.skyframe.serialization.testutils.SerializationTester; +import com.google.testing.junit.testparameterinjector.TestParameter; import com.google.testing.junit.testparameterinjector.TestParameterInjector; +import java.util.concurrent.atomic.AtomicInteger; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(TestParameterInjector.class) public final class ConfiguredTargetKeyTest extends BuildViewTestCase { + private static final AtomicInteger nextId = new AtomicInteger(); + + @Test + public void testDelegation( + @TestParameter boolean useNullConfig, @TestParameter boolean isToolchainKey) { + var baseKey = createKey(useNullConfig, isToolchainKey); + + assertThat(baseKey.isProxy()).isFalse(); + assertThat(baseKey.toKey()).isSameInstanceAs(baseKey); + + BuildConfigurationKey newConfigurationKey = getNewUniqueConfigurationKey(); + var delegatingKey = + ConfiguredTargetKey.builder() + .setDelegate(baseKey) + .setConfigurationKey(newConfigurationKey) + .build(); + assertThat(delegatingKey.isProxy()).isTrue(); + assertThat(delegatingKey.toKey()).isSameInstanceAs(baseKey); + assertThat(delegatingKey.getLabel()).isSameInstanceAs(baseKey.getLabel()); + assertThat(delegatingKey.getConfigurationKey()).isSameInstanceAs(newConfigurationKey); + assertThat(delegatingKey.getExecutionPlatformLabel()) + .isSameInstanceAs(baseKey.getExecutionPlatformLabel()); + + // Building a key with the same parameters as the delegating key returns the delegating key. + var similarKey = + ConfiguredTargetKey.builder() + .setLabel(delegatingKey.getLabel()) + .setConfigurationKey(delegatingKey.getConfigurationKey()) + .setExecutionPlatformLabel(delegatingKey.getExecutionPlatformLabel()) + .build(); + assertThat(similarKey).isSameInstanceAs(delegatingKey); + } + + @Test + public void existingKey_inhibitsDelegation( + @TestParameter boolean useNullConfig, @TestParameter boolean isToolchainKey) { + var baseKey = createKey(useNullConfig, isToolchainKey); + + BuildConfigurationKey newConfigurationKey = getNewUniqueConfigurationKey(); + + var existingKey = + ConfiguredTargetKey.builder() + .setLabel(baseKey.getLabel()) + .setConfigurationKey(newConfigurationKey) + .setExecutionPlatformLabel(baseKey.getExecutionPlatformLabel()) + .build(); + + var delegatingKey = + ConfiguredTargetKey.builder() + .setDelegate(baseKey) + .setConfigurationKey(newConfigurationKey) + .build(); + + assertThat(delegatingKey).isSameInstanceAs(existingKey); + } + @Test public void testCodec() throws Exception { var nullConfigKey = createKey(/* useNullConfig= */ true, /* isToolchainKey= */ false); var keyWithConfig = createKey(/* useNullConfig= */ false, /* isToolchainKey= */ false); var toolchainKey = createKey(/* useNullConfig= */ false, /* isToolchainKey= */ true); - new SerializationTester(nullConfigKey, keyWithConfig, toolchainKey) + var delegatingToNullConfig = + ConfiguredTargetKey.builder() + .setDelegate(nullConfigKey) + .setConfigurationKey(targetConfigKey) + .build(); + var delegatingToKeyWithConfig = + ConfiguredTargetKey.builder().setDelegate(keyWithConfig).build(); + var delegatingToToolchainKey = + ConfiguredTargetKey.builder() + .setDelegate(toolchainKey) + .setConfigurationKey(getNewUniqueConfigurationKey()) + .build(); + + new SerializationTester( + nullConfigKey, + keyWithConfig, + toolchainKey, + delegatingToNullConfig, + delegatingToKeyWithConfig, + delegatingToToolchainKey) .addDependency(OptionsChecksumCache.class, new MapBackedChecksumCache()) .runTests(); } @@ -45,4 +127,13 @@ private ConfiguredTargetKey createKey(boolean useNullConfig, boolean isToolchain } return key.build(); } + + private BuildConfigurationKey getNewUniqueConfigurationKey() { + BuildOptions newOptions = targetConfigKey.getOptions().clone(); + var coreOptions = newOptions.get(CoreOptions.class); + coreOptions.affectedByStarlarkTransition = + ImmutableList.of("//fake:id" + nextId.getAndIncrement()); + assertThat(newOptions.checksum()).isNotEqualTo(targetConfigKey.getOptions().checksum()); + return BuildConfigurationKey.withoutPlatformMapping(newOptions); + } } diff --git a/src/test/java/com/google/devtools/build/lib/starlark/StarlarkIntegrationTest.java b/src/test/java/com/google/devtools/build/lib/starlark/StarlarkIntegrationTest.java index b698efac1d597f..ee5f77ff6760be 100644 --- a/src/test/java/com/google/devtools/build/lib/starlark/StarlarkIntegrationTest.java +++ b/src/test/java/com/google/devtools/build/lib/starlark/StarlarkIntegrationTest.java @@ -2908,10 +2908,8 @@ public void testPrintFromTransitionImpl() throws Exception { scratch.file( "test/rules.bzl", "def _transition_impl(settings, attr):", - " foo = settings['//command_line_option:foo']", - " print('printing from transition impl', foo)", - " foo = foo if foo.endswith('meowmeowmeow') else foo + 'meow'", - " return {'//command_line_option:foo': foo}", + " print('printing from transition impl', settings['//command_line_option:foo'])", + " return {'//command_line_option:foo': " + "settings['//command_line_option:foo']+'meow'}", "my_transition = transition(", " implementation = _transition_impl,", " inputs = ['//command_line_option:foo'],", From 1a2b792e580ec2e1d23f22e16f23b3f957d2acbe Mon Sep 17 00:00:00 2001 From: Chi Wang Date: Wed, 28 Jun 2023 05:52:55 -0700 Subject: [PATCH 22/68] Implement lease extension Add flag `--experimental_remote_cache_lease_extension`, which when set, Bazel will create a background thread periodically sending `FindMissingBlobs` requests to CAS during the build. 1. All the outputs that were not downloaded are within the scope of lease extension. The outputs are acquired from skyframe by traversing the action graph. 2. Lease extension starts after any action was built and ends after execution phase ended. The frequency is related to `--experimental_remote_cache_ttl`. 3. Lease extensions are performed on action basis, not by collecting all outputs and issue one giant `FindMissingBlobs`. - Collecting all outputs might increase memory watermark and cause OOM. - Sending one `FindMissingBlobs` request per action may increase the overhead of network roundtrip, but the cost should be saturated given that the lease extension happens at background and is not wall time critical. 4. For an incremental build, the same applies: lease extension starts after any action was executed. - We don't want lease extension blocking action execution, nor affecting build performance. - Since we have TTL based cache discarding, any expired blobs will be discarded. - Leases of blobs that are not downloaded, still used by this build (because they are referenced by skyframe) will be extended as normal. Part of #16660. Closes #17944. PiperOrigin-RevId: 544032753 Change-Id: Iafe8b96c48abbb2e67302cd7a2f06f97ab43f825 --- .../build/lib/actions/ActionCacheUtils.java | 14 + .../build/lib/actions/FileArtifactValue.java | 14 +- .../google/devtools/build/lib/remote/BUILD | 4 + .../build/lib/remote/LeaseService.java | 41 ++- .../lib/remote/RemoteLeaseExtension.java | 282 ++++++++++++++++++ .../build/lib/remote/RemoteModule.java | 15 +- .../build/lib/remote/RemoteOutputService.java | 14 +- .../lib/remote/options/RemoteOptions.java | 11 + .../google/devtools/build/lib/remote/BUILD | 1 + .../BuildWithoutTheBytesIntegrationTest.java | 59 ++++ .../lib/remote/util/IntegrationTestUtils.java | 4 + 11 files changed, 449 insertions(+), 10 deletions(-) create mode 100644 src/main/java/com/google/devtools/build/lib/remote/RemoteLeaseExtension.java diff --git a/src/main/java/com/google/devtools/build/lib/actions/ActionCacheUtils.java b/src/main/java/com/google/devtools/build/lib/actions/ActionCacheUtils.java index 49065220a98869..a7e6b7bd93fe4a 100644 --- a/src/main/java/com/google/devtools/build/lib/actions/ActionCacheUtils.java +++ b/src/main/java/com/google/devtools/build/lib/actions/ActionCacheUtils.java @@ -14,12 +14,26 @@ package com.google.devtools.build.lib.actions; import com.google.devtools.build.lib.actions.cache.ActionCache; +import java.util.AbstractMap.SimpleEntry; +import java.util.Map.Entry; import javax.annotation.Nullable; /** Utility functions for {@link ActionCache}. */ public class ActionCacheUtils { private ActionCacheUtils() {} + @Nullable + public static Entry getCacheEntryWithKey( + ActionCache actionCache, Action action) { + for (Artifact output : action.getOutputs()) { + ActionCache.Entry entry = actionCache.get(output.getExecPathString()); + if (entry != null) { + return new SimpleEntry<>(output.getExecPathString(), entry); + } + } + return null; + } + /** Checks whether one of existing output paths is already used as a key. */ @Nullable public static ActionCache.Entry getCacheEntry(ActionCache actionCache, Action action) { diff --git a/src/main/java/com/google/devtools/build/lib/actions/FileArtifactValue.java b/src/main/java/com/google/devtools/build/lib/actions/FileArtifactValue.java index 39936bb605002d..44413c283563b6 100644 --- a/src/main/java/com/google/devtools/build/lib/actions/FileArtifactValue.java +++ b/src/main/java/com/google/devtools/build/lib/actions/FileArtifactValue.java @@ -627,6 +627,12 @@ public long getExpireAtEpochMilli() { return -1; } + /** + * Extends the expiration time for this metadata. If it was constructed without known expiration + * time (i.e. expireAtEpochMilli < 0), this extension does nothing. + */ + public void extendExpireAtEpochMilli(long expireAtEpochMilli) {} + public boolean isAlive(Instant now) { return true; } @@ -655,7 +661,7 @@ public final String toString() { /** A remote artifact that expires at a particular time. */ private static final class RemoteFileArtifactValueWithExpiration extends RemoteFileArtifactValue { - private final long expireAtEpochMilli; + private long expireAtEpochMilli; private RemoteFileArtifactValueWithExpiration( byte[] digest, @@ -672,6 +678,12 @@ public long getExpireAtEpochMilli() { return expireAtEpochMilli; } + @Override + public void extendExpireAtEpochMilli(long expireAtEpochMilli) { + Preconditions.checkState(expireAtEpochMilli > this.expireAtEpochMilli); + this.expireAtEpochMilli = expireAtEpochMilli; + } + @Override public boolean isAlive(Instant now) { return now.toEpochMilli() < expireAtEpochMilli; diff --git a/src/main/java/com/google/devtools/build/lib/remote/BUILD b/src/main/java/com/google/devtools/build/lib/remote/BUILD index f600dd196b5705..5f43aa85253d8d 100644 --- a/src/main/java/com/google/devtools/build/lib/remote/BUILD +++ b/src/main/java/com/google/devtools/build/lib/remote/BUILD @@ -56,6 +56,7 @@ java_library( "//src/main/java/com/google/devtools/build/lib:runtime/command_line_path_factory", "//src/main/java/com/google/devtools/build/lib/actions", "//src/main/java/com/google/devtools/build/lib/actions:action_input_helper", + "//src/main/java/com/google/devtools/build/lib/actions:action_lookup_data", "//src/main/java/com/google/devtools/build/lib/actions:artifacts", "//src/main/java/com/google/devtools/build/lib/actions:execution_requirements", "//src/main/java/com/google/devtools/build/lib/actions:file_metadata", @@ -99,7 +100,9 @@ java_library( "//src/main/java/com/google/devtools/build/lib/remote/options", "//src/main/java/com/google/devtools/build/lib/remote/util", "//src/main/java/com/google/devtools/build/lib/remote/zstd", + "//src/main/java/com/google/devtools/build/lib/skyframe:action_execution_value", "//src/main/java/com/google/devtools/build/lib/skyframe:mutable_supplier", + "//src/main/java/com/google/devtools/build/lib/skyframe:sky_functions", "//src/main/java/com/google/devtools/build/lib/skyframe:tree_artifact_value", "//src/main/java/com/google/devtools/build/lib/util", "//src/main/java/com/google/devtools/build/lib/util:abrupt_exit_exception", @@ -116,6 +119,7 @@ java_library( "//src/main/java/com/google/devtools/build/lib/worker:worker_options", "//src/main/java/com/google/devtools/build/lib/worker:worker_spawn_runner", "//src/main/java/com/google/devtools/build/skyframe", + "//src/main/java/com/google/devtools/build/skyframe:skyframe-objects", "//src/main/java/com/google/devtools/common/options", "//src/main/protobuf:failure_details_java_proto", "//src/main/protobuf:spawn_java_proto", diff --git a/src/main/java/com/google/devtools/build/lib/remote/LeaseService.java b/src/main/java/com/google/devtools/build/lib/remote/LeaseService.java index 0e1d65d628e645..f9ef0e05530082 100644 --- a/src/main/java/com/google/devtools/build/lib/remote/LeaseService.java +++ b/src/main/java/com/google/devtools/build/lib/remote/LeaseService.java @@ -21,31 +21,60 @@ import com.google.devtools.build.lib.skyframe.TreeArtifactValue; import com.google.devtools.build.skyframe.MemoizingEvaluator; import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; import javax.annotation.Nullable; /** A lease service that manages the lease of remote blobs. */ public class LeaseService { private final MemoizingEvaluator memoizingEvaluator; @Nullable private final ActionCache actionCache; + private final AtomicBoolean leaseExtensionStarted = new AtomicBoolean(false); + @Nullable LeaseExtension leaseExtension; - public LeaseService(MemoizingEvaluator memoizingEvaluator, @Nullable ActionCache actionCache) { + public LeaseService( + MemoizingEvaluator memoizingEvaluator, + @Nullable ActionCache actionCache, + @Nullable LeaseExtension leaseExtension) { this.memoizingEvaluator = memoizingEvaluator; this.actionCache = actionCache; + this.leaseExtension = leaseExtension; } - /** Clean up internal state when files are evicted from remote CAS. */ - public void handleMissingInputs(Set missingActionInputs) { - if (missingActionInputs.isEmpty()) { - return; + public void finalizeAction() { + if (leaseExtensionStarted.compareAndSet(false, true)) { + if (leaseExtension != null) { + leaseExtension.start(); + } + } + } + + public void finalizeExecution(Set missingActionInputs) { + if (leaseExtension != null) { + leaseExtension.stop(); + } + + if (!missingActionInputs.isEmpty()) { + handleMissingInputs(); } + } + + /** + * An interface whose implementations extend the leases of remote outputs referenced by skyframe. + */ + public interface LeaseExtension { + void start(); + + void stop(); + } + /** Clean up internal state when files are evicted from remote CAS. */ + private void handleMissingInputs() { // If any outputs are evicted, remove all remote metadata from skyframe and local action cache. // // With TTL based discarding and lease extension, remote cache eviction error won't happen if // remote cache can guarantee the TTL. However, if it happens, it usually means the remote cache // is under high load and it could possibly evict more blobs that Bazel wouldn't aware of. // Following builds could still fail for the same error (caused by different blobs). - memoizingEvaluator.delete( key -> { if (key.functionName().equals(SkyFunctions.ACTION_EXECUTION)) { diff --git a/src/main/java/com/google/devtools/build/lib/remote/RemoteLeaseExtension.java b/src/main/java/com/google/devtools/build/lib/remote/RemoteLeaseExtension.java new file mode 100644 index 00000000000000..507d97ee06014c --- /dev/null +++ b/src/main/java/com/google/devtools/build/lib/remote/RemoteLeaseExtension.java @@ -0,0 +1,282 @@ +// Copyright 2023 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package com.google.devtools.build.lib.remote; + +import static com.google.common.base.Preconditions.checkNotNull; +import static com.google.devtools.build.lib.remote.util.Utils.getFromFuture; +import static java.util.concurrent.TimeUnit.MILLISECONDS; + +import build.bazel.remote.execution.v2.Digest; +import build.bazel.remote.execution.v2.RequestMetadata; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; +import com.google.common.flogger.GoogleLogger; +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import com.google.devtools.build.lib.actions.Action; +import com.google.devtools.build.lib.actions.ActionCacheUtils; +import com.google.devtools.build.lib.actions.ActionLookupData; +import com.google.devtools.build.lib.actions.ActionLookupValue; +import com.google.devtools.build.lib.actions.Artifact; +import com.google.devtools.build.lib.actions.Artifact.TreeFileArtifact; +import com.google.devtools.build.lib.actions.FileArtifactValue; +import com.google.devtools.build.lib.actions.FileArtifactValue.RemoteFileArtifactValue; +import com.google.devtools.build.lib.actions.cache.ActionCache; +import com.google.devtools.build.lib.actions.cache.ActionCache.Entry; +import com.google.devtools.build.lib.actions.cache.ActionCache.Entry.SerializableTreeArtifactValue; +import com.google.devtools.build.lib.concurrent.ExecutorUtil; +import com.google.devtools.build.lib.profiler.Profiler; +import com.google.devtools.build.lib.remote.LeaseService.LeaseExtension; +import com.google.devtools.build.lib.remote.common.RemoteActionExecutionContext; +import com.google.devtools.build.lib.remote.util.DigestUtil; +import com.google.devtools.build.lib.remote.util.TracingMetadataUtils; +import com.google.devtools.build.lib.skyframe.ActionExecutionValue; +import com.google.devtools.build.lib.skyframe.SkyFunctions; +import com.google.devtools.build.skyframe.MemoizingEvaluator; +import com.google.devtools.build.skyframe.SkyFunctionName; +import com.google.devtools.build.skyframe.SkyKey; +import com.google.devtools.build.skyframe.SkyValue; +import java.io.IOException; +import java.time.Duration; +import java.time.Instant; +import java.util.Map; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Predicate; +import javax.annotation.Nullable; + +/** A {@link LeaseExtension} implementation that uses REAPI. */ +public class RemoteLeaseExtension implements LeaseExtension { + private static final GoogleLogger logger = GoogleLogger.forEnclosingClass(); + private static final Predicate ACTION_FILTER = + SkyFunctionName.functionIs(SkyFunctions.ACTION_EXECUTION); + + private final ScheduledExecutorService scheduledExecutor = + Executors.newSingleThreadScheduledExecutor( + new ThreadFactoryBuilder().setNameFormat("lease-extension-%d").build()); + + private final ReentrantLock lock = new ReentrantLock(); + + private final MemoizingEvaluator memoizingEvaluator; + @Nullable private final ActionCache actionCache; + private final RemoteCache remoteCache; + private final Duration remoteCacheTtl; + private final RemoteActionExecutionContext context; + + public RemoteLeaseExtension( + MemoizingEvaluator memoizingEvaluator, + @Nullable ActionCache actionCache, + String buildRequestId, + String commandId, + RemoteCache remoteCache, + Duration remoteCacheTtl) { + this.memoizingEvaluator = memoizingEvaluator; + this.actionCache = actionCache; + this.remoteCache = remoteCache; + this.remoteCacheTtl = remoteCacheTtl; + RequestMetadata requestMetadata = + TracingMetadataUtils.buildMetadata(buildRequestId, commandId, "lease-extension", null); + this.context = RemoteActionExecutionContext.create(requestMetadata); + } + + @Override + public void start() { + // Immediately extend leases for outputs that are already known to skyframe. For clean build, + // the set of outputs is empty. For incremental build, it contains outputs that were not + // invalidated after skyframe's dirtiness check. + var unused = scheduledExecutor.schedule(this::extendLeases, 0, MILLISECONDS); + } + + private void extendLeases() { + // Acquire the lock to prevent multiple doExtendLeases() running. + lock.lock(); + try (var silentCloseable = Profiler.instance().profile("doExtendLeases")) { + doExtendLeases(); + } finally { + lock.unlock(); + } + } + + private void doExtendLeases() { + var valuesMap = memoizingEvaluator.getValues(); + // We will extend leases for all known outputs so the earliest time when one output could be + // expired is (now + ttl). + var earliestExpiration = Instant.now().plus(remoteCacheTtl); + + try { + for (var entry : valuesMap.entrySet()) { + SkyKey key = entry.getKey(); + SkyValue value = entry.getValue(); + if (value != null && ACTION_FILTER.test(key)) { + var action = getActionFromSkyKey(key); + var actionExecutionValue = (ActionExecutionValue) value; + var remoteFiles = collectRemoteFiles(actionExecutionValue); + if (!remoteFiles.isEmpty()) { + // Lease extensions are performed on action basis, not by collecting all outputs and + // issue one giant `FindMissingBlobs` call to avoid increasing memory footprint. Since + // this happens in the background, increased network calls are acceptable. + try (var silentCloseable1 = Profiler.instance().profile(action.describe())) { + extendLeaseForAction(action, remoteFiles, earliestExpiration.toEpochMilli()); + } + } + } + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return; + } catch (Throwable e) { + logger.atWarning().withCause(e).log("Failed to extend the lease"); + } + + // Only extend the leases again when one of the outputs is about to expire. + var now = Instant.now(); + Duration delay; + if (earliestExpiration.isAfter(now)) { + delay = Duration.between(now, earliestExpiration); + } else { + delay = Duration.ZERO; + } + var unused = scheduledExecutor.schedule(this::extendLeases, delay.toMillis(), MILLISECONDS); + } + + private static boolean isRemoteMetadataWithTtl(FileArtifactValue metadata) { + return metadata.isRemote() && ((RemoteFileArtifactValue) metadata).getExpireAtEpochMilli() >= 0; + } + + private ImmutableList> collectRemoteFiles( + ActionExecutionValue actionExecutionValue) { + var result = ImmutableList.>builder(); + for (var entry : actionExecutionValue.getAllFileValues().entrySet()) { + if (isRemoteMetadataWithTtl(entry.getValue())) { + result.add(entry); + } + } + + for (var treeMetadata : actionExecutionValue.getAllTreeArtifactValues().values()) { + for (var entry : treeMetadata.getChildValues().entrySet()) { + if (isRemoteMetadataWithTtl(entry.getValue())) { + result.add(entry); + } + } + } + + return result.build(); + } + + /** Returns {@code true} iff the outputs of the action */ + private void extendLeaseForAction( + Action action, + ImmutableList> remoteFiles, + long expireAtEpochMilli) + throws IOException, InterruptedException { + ImmutableSet missingDigests; + try (var silentCloseable = Profiler.instance().profile("findMissingDigests")) { + // We assume remote server will extend the leases for all referenced blobs by a + // FindMissingBlobs call. + missingDigests = + getFromFuture( + remoteCache.findMissingDigests( + context, + Iterables.transform( + remoteFiles, remoteFile -> buildDigest(remoteFile.getValue())))); + } + + var token = getActionCacheToken(action); + for (var remoteFile : remoteFiles) { + var artifact = remoteFile.getKey(); + var metadata = (RemoteFileArtifactValue) remoteFile.getValue(); + // Only extend the lease for the remote output if it is still alive remotely. + if (!missingDigests.contains(buildDigest(metadata))) { + metadata.extendExpireAtEpochMilli(expireAtEpochMilli); + if (token != null) { + if (artifact instanceof TreeFileArtifact) { + token.extendOutputTreeFile((TreeFileArtifact) artifact, expireAtEpochMilli); + } else { + token.extendOutputFile(artifact, expireAtEpochMilli); + } + } + } + } + + if (actionCache != null && token != null && token.dirty) { + // Only update the action cache entry if the token was updated because it usually involves + // serialization. + actionCache.put(token.key, token.entry); + } + } + + @Override + public void stop() { + if (ExecutorUtil.uninterruptibleShutdownNow(scheduledExecutor)) { + Thread.currentThread().interrupt(); + } + } + + private static Digest buildDigest(FileArtifactValue metadata) { + return DigestUtil.buildDigest(metadata.getDigest(), metadata.getSize()); + } + + private Action getActionFromSkyKey(SkyKey key) throws InterruptedException { + var actionLookupData = (ActionLookupData) key.argument(); + var actionLookupValue = + (ActionLookupValue) + checkNotNull( + memoizingEvaluator.getExistingValue(actionLookupData.getActionLookupKey().toKey())); + return actionLookupValue.getAction(actionLookupData.getActionIndex()); + } + + @Nullable + private ActionCacheToken getActionCacheToken(Action action) { + if (actionCache != null) { + var actionCacheEntryWithKey = ActionCacheUtils.getCacheEntryWithKey(actionCache, action); + if (actionCacheEntryWithKey != null) { + return new ActionCacheToken( + actionCacheEntryWithKey.getKey(), actionCacheEntryWithKey.getValue()); + } + } + + return null; + } + + private static class ActionCacheToken { + final String key; + final ActionCache.Entry entry; + private boolean dirty; + + ActionCacheToken(String key, Entry entry) { + this.key = key; + this.entry = entry; + } + + void extendOutputFile(Artifact artifact, long expireAtEpochMilli) { + var metadata = entry.getOutputFile(artifact); + if (metadata != null) { + metadata.extendExpireAtEpochMilli(expireAtEpochMilli); + dirty = true; + } + } + + void extendOutputTreeFile(TreeFileArtifact treeFile, long expireAtEpochMilli) { + SerializableTreeArtifactValue treeMetadata = entry.getOutputTree(treeFile.getParent()); + if (treeMetadata != null) { + var metadata = treeMetadata.childValues().get(treeFile.getTreeRelativePathString()); + if (metadata != null) { + metadata.extendExpireAtEpochMilli(expireAtEpochMilli); + dirty = true; + } + } + } + } +} diff --git a/src/main/java/com/google/devtools/build/lib/remote/RemoteModule.java b/src/main/java/com/google/devtools/build/lib/remote/RemoteModule.java index 0ced5278475a2f..2c3930733d33a5 100644 --- a/src/main/java/com/google/devtools/build/lib/remote/RemoteModule.java +++ b/src/main/java/com/google/devtools/build/lib/remote/RemoteModule.java @@ -62,6 +62,7 @@ import com.google.devtools.build.lib.exec.ModuleActionContextRegistry; import com.google.devtools.build.lib.exec.SpawnStrategyRegistry; import com.google.devtools.build.lib.profiler.Profiler; +import com.google.devtools.build.lib.remote.LeaseService.LeaseExtension; import com.google.devtools.build.lib.remote.RemoteServerCapabilities.ServerCapabilitiesRequirement; import com.google.devtools.build.lib.remote.circuitbreaker.CircuitBreakerFactory; import com.google.devtools.build.lib.remote.common.RemoteCacheClient; @@ -1026,10 +1027,22 @@ public void executorInit(CommandEnvironment env, BuildRequest request, ExecutorB builder.setActionInputPrefetcher(actionInputFetcher); actionContextProvider.setActionInputFetcher(actionInputFetcher); + LeaseExtension leaseExtension = null; + if (remoteOptions.remoteCacheLeaseExtension) { + leaseExtension = + new RemoteLeaseExtension( + env.getSkyframeExecutor().getEvaluator(), + env.getBlazeWorkspace().getPersistentActionCache(), + env.getBuildRequestId(), + env.getCommandId().toString(), + actionContextProvider.getRemoteCache(), + remoteOptions.remoteCacheTtl); + } var leaseService = new LeaseService( env.getSkyframeExecutor().getEvaluator(), - env.getBlazeWorkspace().getPersistentActionCache()); + env.getBlazeWorkspace().getPersistentActionCache(), + leaseExtension); remoteOutputService.setRemoteOutputChecker(remoteOutputChecker); remoteOutputService.setActionInputFetcher(actionInputFetcher); diff --git a/src/main/java/com/google/devtools/build/lib/remote/RemoteOutputService.java b/src/main/java/com/google/devtools/build/lib/remote/RemoteOutputService.java index afee4118704c83..da4f5c0cf45ad9 100644 --- a/src/main/java/com/google/devtools/build/lib/remote/RemoteOutputService.java +++ b/src/main/java/com/google/devtools/build/lib/remote/RemoteOutputService.java @@ -19,9 +19,11 @@ import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.eventbus.Subscribe; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.actions.ActionExecutionMetadata; +import com.google.devtools.build.lib.actions.ActionInput; import com.google.devtools.build.lib.actions.ActionInputMap; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactPathResolver; @@ -133,8 +135,12 @@ public void finalizeBuild(boolean buildSuccessful) { @Subscribe public void onExecutionPhaseCompleteEvent(ExecutionPhaseCompleteEvent event) { - if (leaseService != null && actionInputFetcher != null) { - leaseService.handleMissingInputs(actionInputFetcher.getMissingActionInputs()); + if (leaseService != null) { + var missingActionInputs = ImmutableSet.of(); + if (actionInputFetcher != null) { + missingActionInputs = actionInputFetcher.getMissingActionInputs(); + } + leaseService.finalizeExecution(missingActionInputs); } } @@ -150,6 +156,10 @@ public void finalizeAction(Action action, OutputMetadataStore outputMetadataStor if (actionInputFetcher != null) { actionInputFetcher.finalizeAction(action, outputMetadataStore); } + + if (leaseService != null) { + leaseService.finalizeAction(); + } } @Override diff --git a/src/main/java/com/google/devtools/build/lib/remote/options/RemoteOptions.java b/src/main/java/com/google/devtools/build/lib/remote/options/RemoteOptions.java index 6f3fe9176b957f..022fec36748f95 100644 --- a/src/main/java/com/google/devtools/build/lib/remote/options/RemoteOptions.java +++ b/src/main/java/com/google/devtools/build/lib/remote/options/RemoteOptions.java @@ -687,6 +687,17 @@ public RemoteOutputsStrategyConverter() { + " seconds.") public Duration remoteFailureWindowInterval; + @Option( + name = "experimental_remote_cache_lease_extension", + defaultValue = "false", + documentationCategory = OptionDocumentationCategory.REMOTE, + effectTags = {OptionEffectTag.UNKNOWN}, + help = + "If set to true, Bazel will extend the lease for outputs of remote actions during the" + + " build by sending `FindMissingBlobs` calls periodically to remote cache. The" + + " frequency is based on the value of `--experimental_remote_cache_ttl`.") + public boolean remoteCacheLeaseExtension; + // The below options are not configurable by users, only tests. // This is part of the effort to reduce the overall number of flags. diff --git a/src/test/java/com/google/devtools/build/lib/remote/BUILD b/src/test/java/com/google/devtools/build/lib/remote/BUILD index 2f3da6d42a7562..58f0345d83c454 100644 --- a/src/test/java/com/google/devtools/build/lib/remote/BUILD +++ b/src/test/java/com/google/devtools/build/lib/remote/BUILD @@ -169,6 +169,7 @@ java_test( "//src/main/java/com/google/devtools/build/lib/authandtls/credentialhelper:credential_module", "//src/main/java/com/google/devtools/build/lib/dynamic", "//src/main/java/com/google/devtools/build/lib/remote", + "//src/main/java/com/google/devtools/build/lib/remote/util", "//src/main/java/com/google/devtools/build/lib/standalone", "//src/main/java/com/google/devtools/build/lib/util:os", "//src/main/java/com/google/devtools/build/lib/vfs", diff --git a/src/test/java/com/google/devtools/build/lib/remote/BuildWithoutTheBytesIntegrationTest.java b/src/test/java/com/google/devtools/build/lib/remote/BuildWithoutTheBytesIntegrationTest.java index ffecdfb109b082..412bdd0bc90e21 100644 --- a/src/test/java/com/google/devtools/build/lib/remote/BuildWithoutTheBytesIntegrationTest.java +++ b/src/test/java/com/google/devtools/build/lib/remote/BuildWithoutTheBytesIntegrationTest.java @@ -22,10 +22,12 @@ import static org.junit.Assume.assumeFalse; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Sets; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.BuildFailedException; import com.google.devtools.build.lib.authandtls.credentialhelper.CredentialModule; import com.google.devtools.build.lib.dynamic.DynamicExecutionModule; +import com.google.devtools.build.lib.remote.util.DigestUtil; import com.google.devtools.build.lib.remote.util.IntegrationTestUtils.WorkerInstance; import com.google.devtools.build.lib.runtime.BlazeModule; import com.google.devtools.build.lib.runtime.BlazeRuntime; @@ -796,4 +798,61 @@ public void downloadToplevel_symlinkTree() throws Exception { assertValidOutputFile("foo-link/file-2", "2"); assertValidOutputFile("foo-link/file-3", "3"); } + + @Test + public void leaseExtension() throws Exception { + // Test that Bazel will extend the leases for remote output by sending FindMissingBlobs calls + // periodically to remote server. The test assumes remote server will set mtime of referenced + // blobs to `now`. + write( + "BUILD", + "genrule(", + " name = 'foo',", + " srcs = [],", + " outs = ['out/foo.txt'],", + " cmd = 'echo -n foo > $@',", + ")", + "genrule(", + " name = 'foobar',", + " srcs = [':foo'],", + " outs = ['out/foobar.txt'],", + // We need the action lasts more than --experimental_remote_cache_ttl so Bazel has the + // chance to extend the lease + " cmd = 'sleep 2 && cat $(location :foo) > $@ && echo bar >> $@',", + ")"); + addOptions("--experimental_remote_cache_ttl=1s", "--experimental_remote_cache_lease_extension"); + var content = "foo".getBytes(UTF_8); + var hashCode = getFileSystem().getDigestFunction().getHashFunction().hashBytes(content); + var digest = DigestUtil.buildDigest(hashCode.asBytes(), content.length).getHash(); + // Calculate the blob path in CAS. This is specific to the remote worker. See + // {@link DiskCacheClient#getPath()}. + var blobPath = + getFileSystem() + .getPath(worker.getCasPath()) + .getChild("cas") + .getChild(digest.substring(0, 2)) + .getChild(digest); + var mtimes = Sets.newConcurrentHashSet(); + // Observe the mtime of the blob in background. + var thread = + new Thread( + () -> { + while (!Thread.currentThread().isInterrupted()) { + try { + mtimes.add(blobPath.getLastModifiedTime()); + } catch (IOException ignored) { + // Intentionally ignored + } + } + }); + thread.start(); + + buildTarget("//:foobar"); + waitDownloads(); + + thread.interrupt(); + thread.join(); + // We should be able to observe more than 1 mtime if the server extends the lease. + assertThat(mtimes.size()).isGreaterThan(1); + } } diff --git a/src/test/java/com/google/devtools/build/lib/remote/util/IntegrationTestUtils.java b/src/test/java/com/google/devtools/build/lib/remote/util/IntegrationTestUtils.java index 0eca00a81e273c..7074f41e20750c 100644 --- a/src/test/java/com/google/devtools/build/lib/remote/util/IntegrationTestUtils.java +++ b/src/test/java/com/google/devtools/build/lib/remote/util/IntegrationTestUtils.java @@ -257,5 +257,9 @@ private static void deleteDir(PathFragment path) throws IOException { public int getPort() { return port; } + + public PathFragment getCasPath() { + return casPath; + } } } From e449322d7c01d66b720aea4c8ca2d0d0d52e15f7 Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 06:13:12 -0700 Subject: [PATCH 23/68] Change `worker_max_instances` default value for consistency with `worker_max_multiplex_instances` The resulting default value won't change since `MultiResourceConverter` map `null` and `auto` to same value. PiperOrigin-RevId: 544036500 Change-Id: Ib87dffcf2e96ce1fc64f978005ff9aeeffdffe96 --- .../com/google/devtools/build/lib/worker/WorkerOptions.java | 6 +++--- .../devtools/common/options/processor/OptionProcessor.java | 3 +-- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/worker/WorkerOptions.java b/src/main/java/com/google/devtools/build/lib/worker/WorkerOptions.java index a121d1c7cb611d..24413867637e7c 100644 --- a/src/main/java/com/google/devtools/build/lib/worker/WorkerOptions.java +++ b/src/main/java/com/google/devtools/build/lib/worker/WorkerOptions.java @@ -50,7 +50,7 @@ public static class MultiResourceConverter extends Converter.Contextless convert(String input) throws OptionsParsingException { // TODO(steinman): Make auto value return a reasonable multiplier of host capacity. - if (input == null || "null".equals(input) || "auto".equals(input)) { + if (input == null || input.equals("null") || input.equals("auto")) { return Maps.immutableEntry(null, null); } int pos = input.indexOf('='); @@ -59,7 +59,7 @@ public Map.Entry convert(String input) throws OptionsParsingExc } String name = input.substring(0, pos); String value = input.substring(pos + 1); - if ("auto".equals(value)) { + if (value.equals("auto")) { return Maps.immutableEntry(name, null); } @@ -75,7 +75,7 @@ public String getTypeDescription() { @Option( name = "worker_max_instances", converter = MultiResourceConverter.class, - defaultValue = "auto", + defaultValue = "null", documentationCategory = OptionDocumentationCategory.EXECUTION_STRATEGY, effectTags = {OptionEffectTag.EXECUTION, OptionEffectTag.HOST_MACHINE_RESOURCE_OPTIMIZATIONS}, help = diff --git a/src/main/java/com/google/devtools/common/options/processor/OptionProcessor.java b/src/main/java/com/google/devtools/common/options/processor/OptionProcessor.java index fc148014e36b91..8ee41190bd648a 100644 --- a/src/main/java/com/google/devtools/common/options/processor/OptionProcessor.java +++ b/src/main/java/com/google/devtools/common/options/processor/OptionProcessor.java @@ -492,8 +492,7 @@ private static boolean hasSpecialNullDefaultValue(Option annotation) { // Production multiple options that still have default value. // Mostly due to backward compatibility reasons. "runs_per_test", - "flaky_test_attempts", - "worker_max_instances"); + "flaky_test_attempts"); private static boolean isMultipleOptionDefaultValueException(Option annotation) { return MULTIPLE_OPTIONS_DEFAULT_VALUE_EXCEPTIONS.contains(annotation.name()); From d055c468118b66a3533f713a180f95eef7e61912 Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 06:17:16 -0700 Subject: [PATCH 24/68] Set BAZEL_VC for Bazel's integration tests To work around [test failure](https://buildkite.com/bazel/bazel-bazel-github-presubmit/builds/16353#018901e5-b0f1-41be-a6f4-e12878711ad3) on https://github.com/bazelbuild/bazel/pull/18608. This PR enables Bazel to detect the latest VC build tools installed, but apparently there is a test case which is failing with the new toolchain. Setting --test_env=BAZEL_VC ensures the integrations tests also uses the older VC build tools for now. PiperOrigin-RevId: 544037250 Change-Id: I4625da17ff2168acbe63813aa7a01e49e0cb459a --- .bazelci/postsubmit.yml | 1 + .bazelci/presubmit.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.bazelci/postsubmit.yml b/.bazelci/postsubmit.yml index c1170c38864445..ae5c3a1c2e5043 100644 --- a/.bazelci/postsubmit.yml +++ b/.bazelci/postsubmit.yml @@ -338,6 +338,7 @@ tasks: - "--host_copt=-w" - "--test_tag_filters=-no_windows,-slow" - "--test_env=JAVA_HOME" + - "--test_env=BAZEL_VC" - "--test_env=TEST_INSTALL_BASE=$HOME/bazeltest_install_base" - "--test_env=TEST_REPOSITORY_HOME=C:/b/bazeltest_external" test_targets: diff --git a/.bazelci/presubmit.yml b/.bazelci/presubmit.yml index a265b1055c235e..b0f2f25b21430e 100644 --- a/.bazelci/presubmit.yml +++ b/.bazelci/presubmit.yml @@ -405,6 +405,7 @@ tasks: - "--host_copt=-w" - "--test_tag_filters=-no_windows,-slow" - "--test_env=JAVA_HOME" + - "--test_env=BAZEL_VC" - "--test_env=TEST_INSTALL_BASE=$HOME/bazeltest_install_base" - "--test_env=TEST_REPOSITORY_HOME=C:/b/bazeltest_external" test_targets: From 56370834dc7dd0477d0436ae0a11fd9ab3271044 Mon Sep 17 00:00:00 2001 From: Chi Wang Date: Wed, 28 Jun 2023 06:23:43 -0700 Subject: [PATCH 25/68] Fix remote execution tests by replacing `mock` with `spy` because we rely on the real method and these methods are final. Before mockito 5, it uses sublcass mockmaker which cannot mock final method/class. So when we call final methods on these mocked classes, we are calling into the real methods. Now, it uses inline mockmaker which **can** mock final method/class. Calling final methods on these mocked classes will return `null` if we didn't provide the stub. This change fixes that by using `spy`. Closes #18800. PiperOrigin-RevId: 544038459 Change-Id: Id75a5cb3b9d14e38d6bec918449e5aee671471eb --- .../build/lib/remote/GrpcCacheClientTest.java | 11 ++-- ...SpawnRunnerWithGrpcRemoteExecutorTest.java | 52 +++++++++---------- 2 files changed, 30 insertions(+), 33 deletions(-) diff --git a/src/test/java/com/google/devtools/build/lib/remote/GrpcCacheClientTest.java b/src/test/java/com/google/devtools/build/lib/remote/GrpcCacheClientTest.java index 26e374d05a958c..df3087daf62c1b 100644 --- a/src/test/java/com/google/devtools/build/lib/remote/GrpcCacheClientTest.java +++ b/src/test/java/com/google/devtools/build/lib/remote/GrpcCacheClientTest.java @@ -21,7 +21,7 @@ import static org.mockito.AdditionalAnswers.answerVoid; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.when; +import static org.mockito.Mockito.spy; import build.bazel.remote.execution.v2.Action; import build.bazel.remote.execution.v2.ActionCacheGrpc.ActionCacheImplBase; @@ -811,10 +811,9 @@ public void updateActionResult( } } }); - ByteStreamImplBase mockByteStreamImpl = Mockito.mock(ByteStreamImplBase.class); + ByteStreamImplBase mockByteStreamImpl = spy(ByteStreamImplBase.class); serviceRegistry.addService(mockByteStreamImpl); - when(mockByteStreamImpl.write(ArgumentMatchers.>any())) - .thenAnswer( + doAnswer( new Answer>() { private int numErrors = 4; @@ -865,7 +864,9 @@ public void onError(Throwable t) { } }; } - }); + }) + .when(mockByteStreamImpl) + .write(any()); doAnswer( answerVoid( (QueryWriteStatusRequest request, diff --git a/src/test/java/com/google/devtools/build/lib/remote/RemoteSpawnRunnerWithGrpcRemoteExecutorTest.java b/src/test/java/com/google/devtools/build/lib/remote/RemoteSpawnRunnerWithGrpcRemoteExecutorTest.java index 0663ba4aeff8a4..faa020f21e6311 100644 --- a/src/test/java/com/google/devtools/build/lib/remote/RemoteSpawnRunnerWithGrpcRemoteExecutorTest.java +++ b/src/test/java/com/google/devtools/build/lib/remote/RemoteSpawnRunnerWithGrpcRemoteExecutorTest.java @@ -22,6 +22,7 @@ import static org.mockito.AdditionalAnswers.answerVoid; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -730,9 +731,8 @@ public void findMissingBlobs( }; serviceRegistry.addService(ServerInterceptors.intercept(cas, new RequestHeadersValidator())); - ByteStreamImplBase mockByteStreamImpl = Mockito.mock(ByteStreamImplBase.class); - when(mockByteStreamImpl.write(ArgumentMatchers.>any())) - .thenAnswer(blobWriteAnswer("xyz".getBytes(UTF_8))); + ByteStreamImplBase mockByteStreamImpl = spy(ByteStreamImplBase.class); + doAnswer(blobWriteAnswer("xyz".getBytes(UTF_8))).when(mockByteStreamImpl).write(any()); serviceRegistry.addService( ServerInterceptors.intercept(mockByteStreamImpl, new RequestHeadersValidator())); @@ -808,7 +808,7 @@ public void getActionResult( .setResponse(Any.pack(ExecuteResponse.newBuilder().setResult(actionResult).build())) .build(); - ExecutionImplBase mockExecutionImpl = Mockito.mock(ExecutionImplBase.class); + ExecutionImplBase mockExecutionImpl = spy(ExecutionImplBase.class); // Flow of this test: // - call execute, get retriable gRPC error // - retry: call execute, get retriable Operation error @@ -816,7 +816,7 @@ public void getActionResult( // - retry: call waitExecute, get a retriable gRPC error // - retry: call waitExecute, get retriable Operation error // - retry: call execute, get successful operation, ignore further errors. - Mockito.doAnswer(answerWith(null, Status.UNAVAILABLE)) + doAnswer(answerWith(null, Status.UNAVAILABLE)) .doAnswer(answerWith(operationWithExecuteError, Status.OK)) .doAnswer(answerWith(unfinishedOperation, Status.UNAVAILABLE)) .doAnswer(answerWith(opSuccess, Status.UNAVAILABLE)) // last status should be ignored. @@ -824,7 +824,7 @@ public void getActionResult( .execute( ArgumentMatchers.any(), ArgumentMatchers.>any()); - Mockito.doAnswer(answerWith(null, Status.UNAVAILABLE)) + doAnswer(answerWith(null, Status.UNAVAILABLE)) .doAnswer(answerWith(operationWithExecuteError, Status.OK)) .when(mockExecutionImpl) .waitExecution( @@ -854,11 +854,12 @@ public void findMissingBlobs( } }); - ByteStreamImplBase mockByteStreamImpl = Mockito.mock(ByteStreamImplBase.class); - when(mockByteStreamImpl.write(ArgumentMatchers.>any())) - .thenAnswer(blobWriteAnswerError()) // Error on the input file. - .thenAnswer(blobWriteAnswerError()) // Error on the input file again. - .thenAnswer(blobWriteAnswer("xyz".getBytes(UTF_8))); // Upload input file successfully. + ByteStreamImplBase mockByteStreamImpl = spy(ByteStreamImplBase.class); + doAnswer(blobWriteAnswerError()) // Error on the input file. + .doAnswer(blobWriteAnswerError()) // Error on the input file again. + .doAnswer(blobWriteAnswer("xyz".getBytes(UTF_8))) // Upload input file successfully. + .when(mockByteStreamImpl) + .write(any()); doAnswer( answerVoid( (QueryWriteStatusRequest request, @@ -872,7 +873,7 @@ public void findMissingBlobs( })) .when(mockByteStreamImpl) .queryWriteStatus(any(), any()); - Mockito.doAnswer( + doAnswer( invocationOnMock -> { @SuppressWarnings("unchecked") StreamObserver responseObserver = @@ -952,26 +953,22 @@ public void getActionResult( .setResponse(Any.pack(ExecuteResponse.newBuilder().setResult(actionResult).build())) .build(); - ExecutionImplBase mockExecutionImpl = Mockito.mock(ExecutionImplBase.class); + ExecutionImplBase mockExecutionImpl = spy(ExecutionImplBase.class); // Flow of this test: // - call execute, get an Operation, then a retriable gRPC error // - retry: call waitExecute, get NOT_FOUND (operation lost) // - retry: call execute, get NOT_FOUND (operation lost) // - retry: call execute, get an Operation, then a retriable gRPC error // - retry: call waitExecute, get successful operation, ignore further errors. - Mockito.doAnswer(answerWith(unfinishedOperation, Status.UNAVAILABLE)) + doAnswer(answerWith(unfinishedOperation, Status.UNAVAILABLE)) .doAnswer(answerWith(unfinishedOperation, Status.NOT_FOUND)) .doAnswer(answerWith(unfinishedOperation, Status.UNAVAILABLE)) .when(mockExecutionImpl) - .execute( - ArgumentMatchers.any(), - ArgumentMatchers.>any()); - Mockito.doAnswer(answerWith(unfinishedOperation, Status.NOT_FOUND)) + .execute(any(), any()); + doAnswer(answerWith(unfinishedOperation, Status.NOT_FOUND)) .doAnswer(answerWith(opSuccess, Status.UNAVAILABLE)) // This error is ignored. .when(mockExecutionImpl) - .waitExecution( - ArgumentMatchers.any(), - ArgumentMatchers.>any()); + .waitExecution(any(), any()); serviceRegistry.addService(mockExecutionImpl); serviceRegistry.addService( @@ -990,10 +987,9 @@ public void findMissingBlobs( } }); - ByteStreamImplBase mockByteStreamImpl = Mockito.mock(ByteStreamImplBase.class); - when(mockByteStreamImpl.write(ArgumentMatchers.>any())) - .thenAnswer(blobWriteAnswer("xyz".getBytes(UTF_8))); // Upload input file successfully. - Mockito.doAnswer( + ByteStreamImplBase mockByteStreamImpl = spy(ByteStreamImplBase.class); + doAnswer(blobWriteAnswer("xyz".getBytes(UTF_8))).when(mockByteStreamImpl).write(any()); + doAnswer( invocationOnMock -> { @SuppressWarnings("unchecked") StreamObserver responseObserver = @@ -1514,17 +1510,17 @@ public void read(ReadRequest request, StreamObserver responseObser .build(); final WaitExecutionRequest waitExecutionRequest = WaitExecutionRequest.newBuilder().setName(opName).build(); - ExecutionImplBase mockExecutionImpl = Mockito.mock(ExecutionImplBase.class); + ExecutionImplBase mockExecutionImpl = spy(ExecutionImplBase.class); // Flow of this test: // - call execute, get an unfinished Operation, then the stream completes // - call waitExecute, get an unfinished Operation, then the stream completes // - call waitExecute, get a finished Operation - Mockito.doAnswer(answerWith(unfinishedOperation, Status.OK)) + doAnswer(answerWith(unfinishedOperation, Status.OK)) .when(mockExecutionImpl) .execute( ArgumentMatchers.any(), ArgumentMatchers.>any()); - Mockito.doAnswer(answerWith(unfinishedOperation, Status.OK)) + doAnswer(answerWith(unfinishedOperation, Status.OK)) .doAnswer(answerWith(completeOperation, Status.OK)) .when(mockExecutionImpl) .waitExecution( From e609d60df95e1da8508fe24e1b0857d5e0f4b146 Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 06:54:33 -0700 Subject: [PATCH 26/68] Remove GraphNodeAspect -1 native aspect PiperOrigin-RevId: 544044482 Change-Id: I3be0bc6fba70e5e46e2c31e46a9b10a13f2f8a5b --- .../build/lib/bazel/rules/CcRules.java | 5 +- .../devtools/build/lib/bazel/rules/cpp/BUILD | 1 - .../bazel/rules/cpp/BazelCppRuleClasses.java | 13 +-- .../build/lib/rules/cpp/GraphNodeAspect.java | 81 ------------------- .../build/lib/rules/cpp/GraphNodeInfo.java | 40 --------- 5 files changed, 3 insertions(+), 137 deletions(-) delete mode 100644 src/main/java/com/google/devtools/build/lib/rules/cpp/GraphNodeAspect.java delete mode 100644 src/main/java/com/google/devtools/build/lib/rules/cpp/GraphNodeInfo.java diff --git a/src/main/java/com/google/devtools/build/lib/bazel/rules/CcRules.java b/src/main/java/com/google/devtools/build/lib/bazel/rules/CcRules.java index 021c560c3c12bd..f7b562ee9cd27c 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/rules/CcRules.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/CcRules.java @@ -43,7 +43,6 @@ import com.google.devtools.build.lib.rules.cpp.DebugPackageProvider; import com.google.devtools.build.lib.rules.cpp.FdoPrefetchHintsRule; import com.google.devtools.build.lib.rules.cpp.FdoProfileRule; -import com.google.devtools.build.lib.rules.cpp.GraphNodeAspect; import com.google.devtools.build.lib.rules.cpp.PropellerOptimizeRule; import com.google.devtools.build.lib.rules.platform.PlatformRules; import com.google.devtools.build.lib.starlarkbuildapi.cpp.CcBootstrap; @@ -63,7 +62,6 @@ private CcRules() { @Override public void init(ConfiguredRuleClassProvider.Builder builder) { - GraphNodeAspect graphNodeAspect = new GraphNodeAspect(); BazelCcModule bazelCcModule = new BazelCcModule(); // TODO(gnish): This is only required for cc_toolchain_suite rule, // because it does not have AppleConfiguration fragment. @@ -76,7 +74,6 @@ public void init(ConfiguredRuleClassProvider.Builder builder) { builder.addBzlToplevel("cc_proto_aspect", Starlark.NONE); builder.addBuildInfoFactory(new CppBuildInfo()); - builder.addNativeAspectClass(graphNodeAspect); builder.addRuleDefinition(new CcToolchainRule()); builder.addRuleDefinition(new CcToolchainSuiteRule()); builder.addRuleDefinition(new CcToolchainAliasRule()); @@ -87,7 +84,7 @@ public void init(ConfiguredRuleClassProvider.Builder builder) { builder.addRuleDefinition(new BazelCppRuleClasses.CcDeclRule()); builder.addRuleDefinition(new BazelCppRuleClasses.CcBaseRule()); builder.addRuleDefinition(new BazelCppRuleClasses.CcRule()); - builder.addRuleDefinition(new BazelCppRuleClasses.CcBinaryBaseRule(graphNodeAspect)); + builder.addRuleDefinition(new BazelCppRuleClasses.CcBinaryBaseRule()); builder.addRuleDefinition(new BazelCcBinaryRule()); builder.addRuleDefinition(new CcSharedLibraryRule()); builder.addRuleDefinition(new BazelCcTestRule()); diff --git a/src/main/java/com/google/devtools/build/lib/bazel/rules/cpp/BUILD b/src/main/java/com/google/devtools/build/lib/bazel/rules/cpp/BUILD index cdfa3447fded8d..b46e763878c654 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/rules/cpp/BUILD +++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/cpp/BUILD @@ -35,7 +35,6 @@ java_library( "//src/main/java/com/google/devtools/build/lib/starlarkbuildapi/cpp", "//src/main/java/com/google/devtools/build/lib/util:filetype", "//src/main/java/com/google/devtools/build/lib/util:os", - "//src/main/java/net/starlark/java/eval", "//third_party:guava", "//third_party:jsr305", ], diff --git a/src/main/java/com/google/devtools/build/lib/bazel/rules/cpp/BazelCppRuleClasses.java b/src/main/java/com/google/devtools/build/lib/bazel/rules/cpp/BazelCppRuleClasses.java index 71f9612e9ff7ae..b638612a3c7d0f 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/rules/cpp/BazelCppRuleClasses.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/cpp/BazelCppRuleClasses.java @@ -57,7 +57,6 @@ import com.google.devtools.build.lib.rules.cpp.CppFileTypes; import com.google.devtools.build.lib.rules.cpp.CppRuleClasses; import com.google.devtools.build.lib.rules.cpp.CppRuleClasses.CcIncludeScanningRule; -import com.google.devtools.build.lib.rules.cpp.GraphNodeAspect; import com.google.devtools.build.lib.util.FileTypeSet; import javax.annotation.Nullable; @@ -485,12 +484,6 @@ public Metadata getMetadata() { /** Helper rule class. */ public static final class CcBinaryBaseRule implements RuleDefinition { - private final GraphNodeAspect graphNodeAspect; - - public CcBinaryBaseRule(GraphNodeAspect graphNodeAspect) { - this.graphNodeAspect = graphNodeAspect; - } - @Override public RuleClass build(RuleClass.Builder builder, RuleDefinitionEnvironment env) { return builder @@ -511,8 +504,7 @@ public RuleClass build(RuleClass.Builder builder, RuleDefinitionEnvironment env) .allowedRuleClasses(DEPS_ALLOWED_RULES) .allowedFileTypes(CppFileTypes.LINKER_SCRIPT) .skipAnalysisTimeFileTypeCheck() - .mandatoryProviders(StarlarkProviderIdentifier.forKey(CcInfo.PROVIDER.getKey())) - .aspect(graphNodeAspect, GraphNodeAspect.ASPECT_PARAMETERS)) + .mandatoryProviders(StarlarkProviderIdentifier.forKey(CcInfo.PROVIDER.getKey()))) .add( attr("dynamic_deps", LABEL_LIST) .allowedFileTypes(FileTypeSet.NO_FILE) @@ -541,8 +533,7 @@ public RuleClass build(RuleClass.Builder builder, RuleDefinitionEnvironment env) attr("malloc", LABEL) .value(env.getToolsLabel("//tools/cpp:malloc")) .allowedFileTypes() - .allowedRuleClasses("cc_library") - .aspect(graphNodeAspect, GraphNodeAspect.ASPECT_PARAMETERS)) + .allowedRuleClasses("cc_library")) .add(attr(":default_malloc", LABEL).value(CppRuleClasses.DEFAULT_MALLOC)) /* Whether to encode build information into the binary. Possible values: diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/GraphNodeAspect.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/GraphNodeAspect.java deleted file mode 100644 index 68d42d9929f1dc..00000000000000 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/GraphNodeAspect.java +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright 2019 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -package com.google.devtools.build.lib.rules.cpp; - - -import com.google.common.base.Function; -import com.google.common.collect.ImmutableList; -import com.google.devtools.build.lib.actions.MutableActionGraph.ActionConflictException; -import com.google.devtools.build.lib.analysis.AnalysisUtils; -import com.google.devtools.build.lib.analysis.ConfiguredAspect; -import com.google.devtools.build.lib.analysis.ConfiguredAspectFactory; -import com.google.devtools.build.lib.analysis.ConfiguredTarget; -import com.google.devtools.build.lib.analysis.RuleContext; -import com.google.devtools.build.lib.cmdline.Label; -import com.google.devtools.build.lib.cmdline.RepositoryName; -import com.google.devtools.build.lib.packages.AspectDefinition; -import com.google.devtools.build.lib.packages.AspectParameters; -import com.google.devtools.build.lib.packages.NativeAspectClass; -import com.google.devtools.build.lib.packages.Rule; -import javax.annotation.Nullable; - -/** - * Aspect for constructing a tree of labels that is used to prune static libraries that are already - * linked dynamically into a cc_binary. TODO(b/145508948): Try to remove this class in the future. - */ -public final class GraphNodeAspect extends NativeAspectClass implements ConfiguredAspectFactory { - // When the dynamic_deps attribute is not set, we return null. We would only want the graph to be - // analyzed with the aspect in the cases that we have set dynamic_deps. Otherwise it would be a - // waste of memory in the cases where we don't need the aspect. If we return null, the aspect is - // not used analyze anything. - // See - // https://github.com/bazelbuild/bazel/blob/df52777aac8cbfc7719af9f0dbb23335e59c42df/src/main/java/com/google/devtools/build/lib/packages/Attribute.java#L114 - public static final Function ASPECT_PARAMETERS = - new Function() { - @Nullable - @Override - public AspectParameters apply(Rule rule) { - return rule.isAttributeValueExplicitlySpecified("dynamic_deps") - ? AspectParameters.EMPTY - : null; - } - }; - - @Override - public AspectDefinition getDefinition(AspectParameters aspectParameters) { - return new AspectDefinition.Builder(this) - .propagateAlongAllAttributes() - .requireStarlarkProviders(CcInfo.PROVIDER.id()) - .build(); - } - - @Override - public ConfiguredAspect create( - Label targetLabel, - ConfiguredTarget ct, - RuleContext ruleContext, - AspectParameters params, - RepositoryName toolsRepository) - throws ActionConflictException, InterruptedException { - ImmutableList.Builder children = ImmutableList.builder(); - if (ruleContext.attributes().has("deps")) { - children.addAll( - AnalysisUtils.getProviders(ruleContext.getPrerequisites("deps"), GraphNodeInfo.class)); - } - return new ConfiguredAspect.Builder(ruleContext) - .addProvider( - GraphNodeInfo.class, new GraphNodeInfo(ruleContext.getLabel(), children.build())) - .build(); - } -} diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/GraphNodeInfo.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/GraphNodeInfo.java deleted file mode 100644 index 9c663eddb79cf4..00000000000000 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/GraphNodeInfo.java +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2019 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -package com.google.devtools.build.lib.rules.cpp; - -import com.google.common.collect.ImmutableList; -import com.google.devtools.build.lib.analysis.TransitiveInfoProvider; -import com.google.devtools.build.lib.cmdline.Label; -import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; -import java.util.List; - -/** Provider used to propagate information for {@link GraphNodeAspect}. */ -@Immutable -public final class GraphNodeInfo implements TransitiveInfoProvider { - private final Label label; - private final ImmutableList children; - - public GraphNodeInfo(Label label, List children) { - this.label = label; - this.children = children == null ? null : ImmutableList.copyOf(children); - } - - public Label getLabel() { - return label; - } - - public List getChildren() { - return children; - } -} From e4cb191bd99410a2147b8aea81d48dba94a3f5a4 Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 07:40:16 -0700 Subject: [PATCH 27/68] Remove flag `experimental_worker_multiplex` from documentation. Instead of this flag we should use `worker_multiplex` PiperOrigin-RevId: 544053771 Change-Id: I1d3a5a54b8990271f61b026bca9a15aa3fa5e908 --- site/en/remote/multiplex.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/site/en/remote/multiplex.md b/site/en/remote/multiplex.md index 9491289a6b2e6c..f7c0bee538b2c3 100644 --- a/site/en/remote/multiplex.md +++ b/site/en/remote/multiplex.md @@ -73,7 +73,7 @@ strategy needs to be specified, either at the ruleset level (for example, example, `--dynamic_local_strategy=worker,standalone`.) No additional flags are necessary, and `supports-multiplex-workers` takes precedence over `supports-workers`, if both are set. You can turn off multiplex workers -globally by passing `--noexperimental_worker_multiplex`. +globally by passing `--noworker_multiplex`. A ruleset is encouraged to use multiplex workers if possible, to reduce memory pressure and improve performance. However, multiplex workers are not currently From 281bef004d8cec14dc986d3001db884b646039fc Mon Sep 17 00:00:00 2001 From: Jay Conrod Date: Wed, 28 Jun 2023 07:47:59 -0700 Subject: [PATCH 28/68] WindowsSubprocess: fix potential null pointer exception WindowsSubprocess sets its stdoutStream and stderrStream fields to null when output is redirected to a file. WindowsSubprocess.close should check whether those fields are null before calling ProcessInputStream.close. Closes #18759. PiperOrigin-RevId: 544055326 Change-Id: I91488a45997002ede4d94b2ca4da56f058504fe0 --- .../devtools/build/lib/shell/WindowsSubprocess.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/shell/WindowsSubprocess.java b/src/main/java/com/google/devtools/build/lib/shell/WindowsSubprocess.java index dbe3a23c414553..f3a1ae1c0dd7af 100644 --- a/src/main/java/com/google/devtools/build/lib/shell/WindowsSubprocess.java +++ b/src/main/java/com/google/devtools/build/lib/shell/WindowsSubprocess.java @@ -247,8 +247,13 @@ public void waitFor() throws InterruptedException { @Override public synchronized void close() { if (nativeProcess != WindowsProcesses.INVALID) { - stdoutStream.close(); - stderrStream.close(); + // stdoutStream and stderrStream are null if they are redirected to files. + if (stdoutStream != null) { + stdoutStream.close(); + } + if (stderrStream != null) { + stderrStream.close(); + } long process = nativeProcess; nativeProcess = WindowsProcesses.INVALID; WindowsProcesses.deleteProcess(process); From 0d721039b679b48098c84cc1d9ec9be959a59865 Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 07:54:04 -0700 Subject: [PATCH 29/68] Fix SkyframeTests Similar to https://github.com/bazelbuild/bazel/commit/56370834dc7dd0477d0436ae0a11fd9ab3271044, use `spy` instead of `mock` for the test that relies on real method. The tests rely on `FileArtifactValue#addTo` which is a final method. Before Mockito 5, when we call this method on a mocked object, we call into the real method. With Mockito 5, we can mock the final method and now they are doing nothing which breaks the tests. PiperOrigin-RevId: 544056698 Change-Id: Ie4340ee9cf2615928cc3df4b11be891ef9ddb3d2 --- .../build/lib/skyframe/TreeArtifactValueTest.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/TreeArtifactValueTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/TreeArtifactValueTest.java index d855ed3efa6c0c..e950edc6189563 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/TreeArtifactValueTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/TreeArtifactValueTest.java @@ -17,8 +17,8 @@ import static com.google.common.truth.Truth8.assertThat; import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.spy; import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; @@ -734,9 +734,9 @@ private static FileArtifactValue metadataWithId(int id) { } private static FileArtifactValue metadataWithIdNoDigest(int id) { - FileArtifactValue value = mock(FileArtifactValue.class); - when(value.getDigest()).thenReturn(null); - when(value.getModifiedTime()).thenReturn((long) id); + FileArtifactValue value = spy(FileArtifactValue.class); + doReturn(null).when(value).getDigest(); + doReturn((long) id).when(value).getModifiedTime(); return value; } } From 2a2a4743ad624f7f238b7771b8ceedb7649e8f5c Mon Sep 17 00:00:00 2001 From: Fabian Meumertzheim Date: Wed, 28 Jun 2023 08:05:33 -0700 Subject: [PATCH 30/68] Prevent most side effects of yanked modules Yanked module versions no longer contribute dependency requirements or emit `DEBUG` messages for `print()` statements. Since the module files of yanked modules are still evaluated to learn their compatibility levels, they can still fail to execute. Closes #18698. PiperOrigin-RevId: 544059396 Change-Id: I8a37d5c7975947cd717f6e56d97cce467f22178e --- .../lib/bazel/BazelRepositoryModule.java | 3 +- .../devtools/build/lib/bazel/bzlmod/BUILD | 1 + .../bazel/bzlmod/BazelDepGraphFunction.java | 4 +- .../bzlmod/BazelModuleResolutionFunction.java | 147 +--------------- .../build/lib/bazel/bzlmod/InterimModule.java | 9 +- .../lib/bazel/bzlmod/ModuleFileFunction.java | 67 +++++++- .../lib/bazel/bzlmod/YankedVersionsUtil.java | 157 ++++++++++++++++++ .../RunfilesRepoMappingManifestTest.java | 4 +- .../StarlarkRuleTransitionProviderTest.java | 4 +- .../lib/analysis/util/AnalysisTestCase.java | 5 +- .../bzlmod/BazelDepGraphFunctionTest.java | 2 +- .../bzlmod/BazelLockFileFunctionTest.java | 4 +- .../BazelModuleResolutionFunctionTest.java | 92 +++++++++- .../bzlmod/BzlmodRepoRuleFunctionTest.java | 2 +- .../build/lib/bazel/bzlmod/DiscoveryTest.java | 6 +- .../bzlmod/ModuleExtensionResolutionTest.java | 2 +- .../bazel/bzlmod/ModuleFileFunctionTest.java | 6 +- .../query2/testutil/SkyframeQueryHelper.java | 5 +- .../lib/rules/LabelBuildSettingTest.java | 4 +- .../repository/RepositoryDelegatorTest.java | 3 +- .../StarlarkDocExtractTest.java | 4 +- .../lib/skyframe/BzlLoadFunctionTest.java | 4 +- .../PrepareDepsOfPatternsFunctionTest.java | 4 +- .../RepositoryMappingFunctionTest.java | 4 +- ...isteredExecutionPlatformsFunctionTest.java | 4 +- .../RegisteredToolchainsFunctionTest.java | 4 +- 26 files changed, 365 insertions(+), 186 deletions(-) create mode 100644 src/main/java/com/google/devtools/build/lib/bazel/bzlmod/YankedVersionsUtil.java diff --git a/src/main/java/com/google/devtools/build/lib/bazel/BazelRepositoryModule.java b/src/main/java/com/google/devtools/build/lib/bazel/BazelRepositoryModule.java index 82cb4a3f754dd6..38fede1b7f3aa9 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/BazelRepositoryModule.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/BazelRepositoryModule.java @@ -48,6 +48,7 @@ import com.google.devtools.build.lib.bazel.bzlmod.RepoSpec; import com.google.devtools.build.lib.bazel.bzlmod.SingleExtensionEvalFunction; import com.google.devtools.build.lib.bazel.bzlmod.SingleExtensionUsagesFunction; +import com.google.devtools.build.lib.bazel.bzlmod.YankedVersionsUtil; import com.google.devtools.build.lib.bazel.commands.FetchCommand; import com.google.devtools.build.lib.bazel.commands.ModqueryCommand; import com.google.devtools.build.lib.bazel.commands.SyncCommand; @@ -590,7 +591,7 @@ public ImmutableList getPrecomputedValues() { BazelModuleResolutionFunction.BAZEL_COMPATIBILITY_MODE, bazelCompatibilityMode), PrecomputedValue.injected(BazelLockFileFunction.LOCKFILE_MODE, bazelLockfileMode), PrecomputedValue.injected( - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS, allowedYankedVersions)); + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS, allowedYankedVersions)); } @Override diff --git a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/BUILD b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/BUILD index b04cd1d270c42e..37b23eb6a56616 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/BUILD +++ b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/BUILD @@ -176,6 +176,7 @@ java_library( "SingleExtensionUsagesFunction.java", "StarlarkBazelModule.java", "TypeCheckedTag.java", + "YankedVersionsUtil.java", ], deps = [ ":common", diff --git a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/BazelDepGraphFunction.java b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/BazelDepGraphFunction.java index 94ea47b7fdb4d8..9e3ed795734e92 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/BazelDepGraphFunction.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/BazelDepGraphFunction.java @@ -173,7 +173,7 @@ static BzlmodFlagsAndEnvVars getFlagsAndEnvVars(Environment env) throws Interrup (ClientEnvironmentValue) env.getValue( ClientEnvironmentFunction.key( - BazelModuleResolutionFunction.BZLMOD_ALLOWED_YANKED_VERSIONS_ENV)); + YankedVersionsUtil.BZLMOD_ALLOWED_YANKED_VERSIONS_ENV)); if (allowedYankedVersionsFromEnv == null) { return null; } @@ -185,7 +185,7 @@ static BzlmodFlagsAndEnvVars getFlagsAndEnvVars(Environment env) throws Interrup toImmutableMap(e -> e.getKey(), e -> ((LocalPathOverride) e.getValue()).getPath())); ImmutableList yankedVersions = - ImmutableList.copyOf(BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS.get(env)); + ImmutableList.copyOf(YankedVersionsUtil.ALLOWED_YANKED_VERSIONS.get(env)); Boolean ignoreDevDeps = ModuleFileFunction.IGNORE_DEV_DEPS.get(env); String compatabilityMode = BazelModuleResolutionFunction.BAZEL_COMPATIBILITY_MODE.get(env).name(); diff --git a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/BazelModuleResolutionFunction.java b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/BazelModuleResolutionFunction.java index aebce5ce7ba5d5..2b84a14500fb24 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/BazelModuleResolutionFunction.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/BazelModuleResolutionFunction.java @@ -15,27 +15,21 @@ package com.google.devtools.build.lib.bazel.bzlmod; -import com.google.common.base.Splitter; import com.google.common.base.Strings; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import com.google.devtools.build.lib.analysis.BlazeVersionInfo; import com.google.devtools.build.lib.bazel.BazelVersion; import com.google.devtools.build.lib.bazel.bzlmod.InterimModule.DepSpec; import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileValue.RootModuleFileValue; -import com.google.devtools.build.lib.bazel.bzlmod.Version.ParseException; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.BazelCompatibilityMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.CheckDirectDepsMode; -import com.google.devtools.build.lib.cmdline.RepositoryName; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.server.FailureDetails.ExternalDeps.Code; -import com.google.devtools.build.lib.skyframe.ClientEnvironmentFunction; -import com.google.devtools.build.lib.skyframe.ClientEnvironmentValue; import com.google.devtools.build.lib.skyframe.PrecomputedValue.Precomputed; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; @@ -43,10 +37,8 @@ import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.io.IOException; -import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; import javax.annotation.Nullable; /** @@ -59,22 +51,11 @@ public class BazelModuleResolutionFunction implements SkyFunction { new Precomputed<>("check_direct_dependency"); public static final Precomputed BAZEL_COMPATIBILITY_MODE = new Precomputed<>("bazel_compatibility_mode"); - public static final Precomputed> ALLOWED_YANKED_VERSIONS = - new Precomputed<>("allowed_yanked_versions"); - - public static final String BZLMOD_ALLOWED_YANKED_VERSIONS_ENV = "BZLMOD_ALLOW_YANKED_VERSIONS"; @Override @Nullable public SkyValue compute(SkyKey skyKey, Environment env) throws BazelModuleResolutionFunctionException, InterruptedException { - - ClientEnvironmentValue allowedYankedVersionsFromEnv = - (ClientEnvironmentValue) - env.getValue(ClientEnvironmentFunction.key(BZLMOD_ALLOWED_YANKED_VERSIONS_ENV)); - if (allowedYankedVersionsFromEnv == null) { - return null; - } RootModuleFileValue root = (RootModuleFileValue) env.getValue(ModuleFileValue.KEY_FOR_ROOT_MODULE); if (root == null) { @@ -104,12 +85,7 @@ public SkyValue compute(SkyKey skyKey, Environment env) Objects.requireNonNull(BAZEL_COMPATIBILITY_MODE.get(env)), env.getListener()); - verifyYankedVersions( - resolvedDepGraph, - parseYankedVersions( - allowedYankedVersionsFromEnv.getValue(), - Objects.requireNonNull(ALLOWED_YANKED_VERSIONS.get(env))), - env.getListener()); + checkNoYankedVersions(resolvedDepGraph); ImmutableMap finalDepGraph = computeFinalDepGraph(resolvedDepGraph, root.getOverrides(), env.getListener()); @@ -194,125 +170,10 @@ public static void checkBazelCompatibility( } } - /** - * Parse a set of allowed yanked version from command line flag (--allowed_yanked_versions) and - * environment variable (ALLOWED_YANKED_VERSIONS). If `all` is specified, return Optional.empty(); - * otherwise returns the set of parsed modulel key. - */ - private Optional> parseYankedVersions( - String allowedYankedVersionsFromEnv, List allowedYankedVersionsFromFlag) - throws BazelModuleResolutionFunctionException { - ImmutableSet.Builder allowedYankedVersionBuilder = new ImmutableSet.Builder<>(); - if (allowedYankedVersionsFromEnv != null) { - if (parseModuleKeysFromString( - allowedYankedVersionsFromEnv, - allowedYankedVersionBuilder, - String.format( - "envirnoment variable %s=%s", - BZLMOD_ALLOWED_YANKED_VERSIONS_ENV, allowedYankedVersionsFromEnv))) { - return Optional.empty(); - } - } - for (String allowedYankedVersions : allowedYankedVersionsFromFlag) { - if (parseModuleKeysFromString( - allowedYankedVersions, - allowedYankedVersionBuilder, - String.format("command line flag --allow_yanked_versions=%s", allowedYankedVersions))) { - return Optional.empty(); - } - } - return Optional.of(allowedYankedVersionBuilder.build()); - } - - /** - * Parse of a comma-separated list of module version(s) of the form '@' or - * 'all' from the string. Returns true if 'all' is present, otherwise returns false. - */ - private boolean parseModuleKeysFromString( - String input, ImmutableSet.Builder allowedYankedVersionBuilder, String context) + private static void checkNoYankedVersions(ImmutableMap depGraph) throws BazelModuleResolutionFunctionException { - ImmutableList moduleStrs = ImmutableList.copyOf(Splitter.on(',').split(input)); - - for (String moduleStr : moduleStrs) { - if (moduleStr.equals("all")) { - return true; - } - - if (moduleStr.isEmpty()) { - continue; - } - - String[] pieces = moduleStr.split("@", 2); - - if (pieces.length != 2) { - throw new BazelModuleResolutionFunctionException( - ExternalDepsException.withMessage( - Code.VERSION_RESOLUTION_ERROR, - "Parsing %s failed, module versions must be of the form '@'", - context), - Transience.PERSISTENT); - } - - if (!RepositoryName.VALID_MODULE_NAME.matcher(pieces[0]).matches()) { - throw new BazelModuleResolutionFunctionException( - ExternalDepsException.withMessage( - Code.VERSION_RESOLUTION_ERROR, - "Parsing %s failed, invalid module name '%s': valid names must 1) only contain" - + " lowercase letters (a-z), digits (0-9), dots (.), hyphens (-), and" - + " underscores (_); 2) begin with a lowercase letter; 3) end with a lowercase" - + " letter or digit.", - context, - pieces[0]), - Transience.PERSISTENT); - } - - Version version; - try { - version = Version.parse(pieces[1]); - } catch (ParseException e) { - throw new BazelModuleResolutionFunctionException( - ExternalDepsException.withCauseAndMessage( - Code.VERSION_RESOLUTION_ERROR, - e, - "Parsing %s failed, invalid version specified for module: %s", - context, - pieces[1]), - Transience.PERSISTENT); - } - - allowedYankedVersionBuilder.add(ModuleKey.create(pieces[0], version)); - } - return false; - } - - private static void verifyYankedVersions( - ImmutableMap depGraph, - Optional> allowedYankedVersions, - ExtendedEventHandler eventHandler) - throws BazelModuleResolutionFunctionException, InterruptedException { - // Check whether all resolved modules are either not yanked or allowed. Modules with a - // NonRegistryOverride are ignored as their metadata is not available whatsoever. for (InterimModule m : depGraph.values()) { - if (m.getKey().equals(ModuleKey.ROOT) || m.getRegistry() == null) { - continue; - } - Optional> yankedVersions; - try { - yankedVersions = m.getRegistry().getYankedVersions(m.getKey().getName(), eventHandler); - } catch (IOException e) { - eventHandler.handle( - Event.warn( - String.format( - "Could not read metadata file for module %s: %s", m.getKey(), e.getMessage()))); - continue; - } - if (yankedVersions.isEmpty()) { - continue; - } - String yankedInfo = yankedVersions.get().get(m.getVersion()); - if (yankedInfo != null - && allowedYankedVersions.isPresent() - && !allowedYankedVersions.get().contains(m.getKey())) { + if (m.getYankedInfo().isPresent()) { throw new BazelModuleResolutionFunctionException( ExternalDepsException.withMessage( Code.VERSION_RESOLUTION_ERROR, @@ -322,7 +183,7 @@ private static void verifyYankedVersions( + "continue using this version, allow it using the --allow_yanked_versions " + "flag or the BZLMOD_ALLOW_YANKED_VERSIONS env variable.", m.getKey(), - yankedInfo), + m.getYankedInfo().get()), Transience.PERSISTENT); } } diff --git a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/InterimModule.java b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/InterimModule.java index 06f5051a7c0c3a..b4a6a3d8fe50c7 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/InterimModule.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/InterimModule.java @@ -50,6 +50,9 @@ public abstract class InterimModule extends ModuleBase { /** List of bazel compatible versions that would run/fail this module */ public abstract ImmutableList getBazelCompatibility(); + /** The reason why this module was yanked or empty if it hasn't been yanked. */ + public abstract Optional getYankedInfo(); + /** The specification of a dependency. */ @AutoValue public abstract static class DepSpec { @@ -102,7 +105,8 @@ public static Builder builder() { .setName("") .setVersion(Version.EMPTY) .setKey(ModuleKey.ROOT) - .setCompatibilityLevel(0); + .setCompatibilityLevel(0) + .setYankedInfo(Optional.empty()); } /** @@ -133,6 +137,9 @@ public abstract static class Builder { /** Optional; defaults to {@link #setName}. */ public abstract Builder setRepoName(String value); + /** Optional; defaults to {@link Optional#empty()}. */ + public abstract Builder setYankedInfo(Optional value); + public abstract Builder setBazelCompatibility(ImmutableList value); abstract ImmutableList.Builder bazelCompatibilityBuilder(); diff --git a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleFileFunction.java b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleFileFunction.java index cccc0d79b7c627..02e62f19f8d904 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleFileFunction.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleFileFunction.java @@ -19,6 +19,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import com.google.devtools.build.lib.actions.FileValue; import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileValue.NonRootModuleFileValue; @@ -28,6 +29,8 @@ import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.rules.repository.RepositoryDirectoryValue; import com.google.devtools.build.lib.server.FailureDetails.ExternalDeps.Code; +import com.google.devtools.build.lib.skyframe.ClientEnvironmentFunction; +import com.google.devtools.build.lib.skyframe.ClientEnvironmentValue; import com.google.devtools.build.lib.skyframe.PrecomputedValue; import com.google.devtools.build.lib.skyframe.PrecomputedValue.Precomputed; import com.google.devtools.build.lib.util.Fingerprint; @@ -102,10 +105,29 @@ public SkyValue compute(SkyKey skyKey, Environment env) return computeForRootModule(starlarkSemantics, env); } + ClientEnvironmentValue allowedYankedVersionsFromEnv = + (ClientEnvironmentValue) + env.getValue( + ClientEnvironmentFunction.key( + YankedVersionsUtil.BZLMOD_ALLOWED_YANKED_VERSIONS_ENV)); + if (allowedYankedVersionsFromEnv == null) { + return null; + } + + Optional> allowedYankedVersions; + try { + allowedYankedVersions = + YankedVersionsUtil.parseAllowedYankedVersions( + allowedYankedVersionsFromEnv.getValue(), + Objects.requireNonNull(YankedVersionsUtil.ALLOWED_YANKED_VERSIONS.get(env))); + } catch (ExternalDepsException e) { + throw new ModuleFileFunctionException(e, SkyFunctionException.Transience.PERSISTENT); + } + ModuleFileValue.Key moduleFileKey = (ModuleFileValue.Key) skyKey; ModuleKey moduleKey = moduleFileKey.getModuleKey(); GetModuleFileResult getModuleFileResult = - getModuleFile(moduleKey, moduleFileKey.getOverride(), env); + getModuleFile(moduleKey, moduleFileKey.getOverride(), allowedYankedVersions, env); if (getModuleFileResult == null) { return null; } @@ -119,6 +141,8 @@ public SkyValue compute(SkyKey skyKey, Environment env) moduleKey, // Dev dependencies should always be ignored if the current module isn't the root module /* ignoreDevDeps= */ true, + // We try to prevent most side effects of yanked modules, in particular print(). + /* printIsNoop= */ getModuleFileResult.yankedInfo != null, starlarkSemantics, env); @@ -139,6 +163,23 @@ public SkyValue compute(SkyKey skyKey, Environment env) module.getVersion()); } + if (getModuleFileResult.yankedInfo != null) { + // Yanked modules should not have observable side effects such as adding dependency + // requirements, so we drop those from the constructed module. We do have to preserve the + // compatibility level as it influences the set of versions the yanked version can be updated + // to during selection. + return NonRootModuleFileValue.create( + InterimModule.builder() + .setKey(module.getKey()) + .setName(module.getName()) + .setVersion(module.getVersion()) + .setCompatibilityLevel(module.getCompatibilityLevel()) + .setRegistry(module.getRegistry()) + .setYankedInfo(Optional.of(getModuleFileResult.yankedInfo)) + .build(), + moduleFileHash); + } + return NonRootModuleFileValue.create(module, moduleFileHash); } @@ -159,6 +200,7 @@ private SkyValue computeForRootModule(StarlarkSemantics starlarkSemantics, Envir /* registry= */ null, ModuleKey.ROOT, /* ignoreDevDeps= */ Objects.requireNonNull(IGNORE_DEV_DEPS.get(env)), + /* printIsNoop= */ false, starlarkSemantics, env); InterimModule module = moduleFileGlobals.buildModule(); @@ -205,6 +247,7 @@ private ModuleFileGlobals execModuleFile( @Nullable Registry registry, ModuleKey moduleKey, boolean ignoreDevDeps, + boolean printIsNoop, StarlarkSemantics starlarkSemantics, Environment env) throws ModuleFileFunctionException, InterruptedException { @@ -223,7 +266,11 @@ private ModuleFileGlobals execModuleFile( Program program = Program.compileFile(starlarkFile, predeclaredEnv); // TODO(wyv): check that `program` has no `def`, `if`, etc StarlarkThread thread = new StarlarkThread(mu, starlarkSemantics); - thread.setPrintHandler(Event.makeDebugPrintHandler(env.getListener())); + if (printIsNoop) { + thread.setPrintHandler((t, msg) -> {}); + } else { + thread.setPrintHandler(Event.makeDebugPrintHandler(env.getListener())); + } Starlark.execFileProgram(program, predeclaredEnv, thread); } catch (SyntaxError.Exception e) { Event.replayEventsOn(env.getListener(), e.errors()); @@ -237,13 +284,19 @@ private ModuleFileGlobals execModuleFile( private static class GetModuleFileResult { ModuleFile moduleFile; + // `yankedInfo` is non-null if and only if the module has been yanked and hasn't been + // allowlisted. + @Nullable String yankedInfo; // `registry` can be null if this module has a non-registry override. @Nullable Registry registry; } @Nullable private GetModuleFileResult getModuleFile( - ModuleKey key, @Nullable ModuleOverride override, Environment env) + ModuleKey key, + @Nullable ModuleOverride override, + Optional> allowedYankedVersions, + Environment env) throws ModuleFileFunctionException, InterruptedException { // If there is a non-registry override for this module, we need to fetch the corresponding repo // first and read the module file from there. @@ -303,6 +356,10 @@ private GetModuleFileResult getModuleFile( } result.moduleFile = moduleFile.get(); result.registry = registry; + result.yankedInfo = + YankedVersionsUtil.getYankedInfo( + registry, key, allowedYankedVersions, env.getListener()) + .orElse(null); return result; } catch (IOException e) { throw errorf( @@ -346,5 +403,9 @@ static final class ModuleFileFunctionException extends SkyFunctionException { ModuleFileFunctionException(ExternalDepsException cause) { super(cause, Transience.TRANSIENT); } + + ModuleFileFunctionException(ExternalDepsException cause, Transience transience) { + super(cause, transience); + } } } diff --git a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/YankedVersionsUtil.java b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/YankedVersionsUtil.java new file mode 100644 index 00000000000000..b6b5018b151286 --- /dev/null +++ b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/YankedVersionsUtil.java @@ -0,0 +1,157 @@ +// Copyright 2022 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package com.google.devtools.build.lib.bazel.bzlmod; + +import com.google.common.base.Splitter; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.devtools.build.lib.cmdline.RepositoryName; +import com.google.devtools.build.lib.events.Event; +import com.google.devtools.build.lib.events.ExtendedEventHandler; +import com.google.devtools.build.lib.server.FailureDetails; +import com.google.devtools.build.lib.skyframe.PrecomputedValue; +import java.io.IOException; +import java.util.List; +import java.util.Optional; + +/** Utility class to parse and evaluate yanked version specifications and exceptions. */ +public final class YankedVersionsUtil { + + public static final PrecomputedValue.Precomputed> ALLOWED_YANKED_VERSIONS = + new PrecomputedValue.Precomputed<>("allowed_yanked_versions"); + public static final String BZLMOD_ALLOWED_YANKED_VERSIONS_ENV = "BZLMOD_ALLOW_YANKED_VERSIONS"; + + /** + * Parse a set of allowed yanked version from command line flag (--allowed_yanked_versions) and + * environment variable (ALLOWED_YANKED_VERSIONS). If `all` is specified, return Optional.empty(); + * otherwise returns the set of parsed modulel key. + */ + static Optional> parseAllowedYankedVersions( + String allowedYankedVersionsFromEnv, List allowedYankedVersionsFromFlag) + throws ExternalDepsException { + ImmutableSet.Builder allowedYankedVersionBuilder = new ImmutableSet.Builder<>(); + if (allowedYankedVersionsFromEnv != null) { + if (parseModuleKeysFromString( + allowedYankedVersionsFromEnv, + allowedYankedVersionBuilder, + String.format( + "environment variable %s=%s", + BZLMOD_ALLOWED_YANKED_VERSIONS_ENV, allowedYankedVersionsFromEnv))) { + return Optional.empty(); + } + } + for (String allowedYankedVersions : allowedYankedVersionsFromFlag) { + if (parseModuleKeysFromString( + allowedYankedVersions, + allowedYankedVersionBuilder, + String.format("command line flag --allow_yanked_versions=%s", allowedYankedVersions))) { + return Optional.empty(); + } + } + return Optional.of(allowedYankedVersionBuilder.build()); + } + + /** + * Returns the reason for the given module being yanked, or {@code Optional.empty()} if the module + * is not yanked or explicitly allowed despite being yanked. + */ + static Optional getYankedInfo( + Registry registry, + ModuleKey key, + Optional> allowedYankedVersions, + ExtendedEventHandler eventHandler) + throws InterruptedException { + Optional> yankedVersions; + try { + yankedVersions = registry.getYankedVersions(key.getName(), eventHandler); + } catch (IOException e) { + eventHandler.handle( + Event.warn( + String.format( + "Could not read metadata file for module %s: %s", key, e.getMessage()))); + // This is failing open: If we can't read the metadata file, we allow yanked modules to be + // fetched. + return Optional.empty(); + } + if (yankedVersions.isEmpty()) { + return Optional.empty(); + } + String yankedInfo = yankedVersions.get().get(key.getVersion()); + if (yankedInfo != null + && allowedYankedVersions.isPresent() + && !allowedYankedVersions.get().contains(key)) { + return Optional.of(yankedInfo); + } else { + return Optional.empty(); + } + } + + /** + * Parse of a comma-separated list of module version(s) of the form '@' or + * 'all' from the string. Returns true if 'all' is present, otherwise returns false. + */ + private static boolean parseModuleKeysFromString( + String input, ImmutableSet.Builder allowedYankedVersionBuilder, String context) + throws ExternalDepsException { + ImmutableList moduleStrs = ImmutableList.copyOf(Splitter.on(',').split(input)); + + for (String moduleStr : moduleStrs) { + if (moduleStr.equals("all")) { + return true; + } + + if (moduleStr.isEmpty()) { + continue; + } + + String[] pieces = moduleStr.split("@", 2); + + if (pieces.length != 2) { + throw ExternalDepsException.withMessage( + FailureDetails.ExternalDeps.Code.VERSION_RESOLUTION_ERROR, + "Parsing %s failed, module versions must be of the form '@'", + context); + } + + if (!RepositoryName.VALID_MODULE_NAME.matcher(pieces[0]).matches()) { + throw ExternalDepsException.withMessage( + FailureDetails.ExternalDeps.Code.VERSION_RESOLUTION_ERROR, + "Parsing %s failed, invalid module name '%s': valid names must 1) only contain" + + " lowercase letters (a-z), digits (0-9), dots (.), hyphens (-), and" + + " underscores (_); 2) begin with a lowercase letter; 3) end with a lowercase" + + " letter or digit.", + context, + pieces[0]); + } + + Version version; + try { + version = Version.parse(pieces[1]); + } catch (Version.ParseException e) { + throw ExternalDepsException.withCauseAndMessage( + FailureDetails.ExternalDeps.Code.VERSION_RESOLUTION_ERROR, + e, + "Parsing %s failed, invalid version specified for module: %s", + context, + pieces[1]); + } + + allowedYankedVersionBuilder.add(ModuleKey.create(pieces[0], version)); + } + return false; + } + + private YankedVersionsUtil() {} +} diff --git a/src/test/java/com/google/devtools/build/lib/analysis/RunfilesRepoMappingManifestTest.java b/src/test/java/com/google/devtools/build/lib/analysis/RunfilesRepoMappingManifestTest.java index 637a60ae7c32c3..67802e376c768a 100644 --- a/src/test/java/com/google/devtools/build/lib/analysis/RunfilesRepoMappingManifestTest.java +++ b/src/test/java/com/google/devtools/build/lib/analysis/RunfilesRepoMappingManifestTest.java @@ -26,6 +26,7 @@ import com.google.devtools.build.lib.bazel.bzlmod.BazelModuleResolutionFunction; import com.google.devtools.build.lib.bazel.bzlmod.FakeRegistry; import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileFunction; +import com.google.devtools.build.lib.bazel.bzlmod.YankedVersionsUtil; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.BazelCompatibilityMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.CheckDirectDepsMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.LockfileMode; @@ -64,8 +65,7 @@ protected ImmutableList extraPrecomputedValues() throws Exception { PrecomputedValue.injected( BazelModuleResolutionFunction.BAZEL_COMPATIBILITY_MODE, BazelCompatibilityMode.ERROR), PrecomputedValue.injected(BazelLockFileFunction.LOCKFILE_MODE, LockfileMode.OFF), - PrecomputedValue.injected( - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS, ImmutableList.of())); + PrecomputedValue.injected(YankedVersionsUtil.ALLOWED_YANKED_VERSIONS, ImmutableList.of())); } @Override diff --git a/src/test/java/com/google/devtools/build/lib/analysis/StarlarkRuleTransitionProviderTest.java b/src/test/java/com/google/devtools/build/lib/analysis/StarlarkRuleTransitionProviderTest.java index 2cba454ff7dead..e53fdd5054ea7e 100644 --- a/src/test/java/com/google/devtools/build/lib/analysis/StarlarkRuleTransitionProviderTest.java +++ b/src/test/java/com/google/devtools/build/lib/analysis/StarlarkRuleTransitionProviderTest.java @@ -29,6 +29,7 @@ import com.google.devtools.build.lib.bazel.bzlmod.BazelModuleResolutionFunction; import com.google.devtools.build.lib.bazel.bzlmod.FakeRegistry; import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileFunction; +import com.google.devtools.build.lib.bazel.bzlmod.YankedVersionsUtil; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.BazelCompatibilityMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.CheckDirectDepsMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.LockfileMode; @@ -71,8 +72,7 @@ protected ImmutableList extraPrecomputedValues() { ModuleFileFunction.REGISTRIES, ImmutableList.of(registry.getUrl())), PrecomputedValue.injected(ModuleFileFunction.IGNORE_DEV_DEPS, false), PrecomputedValue.injected(ModuleFileFunction.MODULE_OVERRIDES, ImmutableMap.of()), - PrecomputedValue.injected( - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), + PrecomputedValue.injected(YankedVersionsUtil.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), PrecomputedValue.injected( BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES, CheckDirectDepsMode.WARNING), PrecomputedValue.injected( diff --git a/src/test/java/com/google/devtools/build/lib/analysis/util/AnalysisTestCase.java b/src/test/java/com/google/devtools/build/lib/analysis/util/AnalysisTestCase.java index 9d1b863d90a029..a2672c0eafba21 100644 --- a/src/test/java/com/google/devtools/build/lib/analysis/util/AnalysisTestCase.java +++ b/src/test/java/com/google/devtools/build/lib/analysis/util/AnalysisTestCase.java @@ -47,6 +47,7 @@ import com.google.devtools.build.lib.bazel.bzlmod.BazelModuleResolutionFunction; import com.google.devtools.build.lib.bazel.bzlmod.FakeRegistry; import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileFunction; +import com.google.devtools.build.lib.bazel.bzlmod.YankedVersionsUtil; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.BazelCompatibilityMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.CheckDirectDepsMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.LockfileMode; @@ -250,7 +251,7 @@ protected void useRuleClassProvider(ConfiguredRuleClassProvider ruleClassProvide BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES, CheckDirectDepsMode.WARNING), PrecomputedValue.injected( - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), PrecomputedValue.injected( BazelModuleResolutionFunction.BAZEL_COMPATIBILITY_MODE, BazelCompatibilityMode.ERROR), @@ -301,7 +302,7 @@ private void reinitializeSkyframeExecutor() { BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES, CheckDirectDepsMode.WARNING), PrecomputedValue.injected( - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), PrecomputedValue.injected( BazelModuleResolutionFunction.BAZEL_COMPATIBILITY_MODE, BazelCompatibilityMode.WARNING), diff --git a/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BazelDepGraphFunctionTest.java b/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BazelDepGraphFunctionTest.java index e64e0bef52605b..120283f50f273a 100644 --- a/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BazelDepGraphFunctionTest.java +++ b/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BazelDepGraphFunctionTest.java @@ -148,7 +148,7 @@ public void setup() throws Exception { BazelModuleResolutionFunction.BAZEL_COMPATIBILITY_MODE.set( differencer, BazelCompatibilityMode.ERROR); BazelLockFileFunction.LOCKFILE_MODE.set(differencer, LockfileMode.OFF); - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); } @Test diff --git a/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BazelLockFileFunctionTest.java b/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BazelLockFileFunctionTest.java index 4efcebc1ece290..2388c6f9498759 100644 --- a/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BazelLockFileFunctionTest.java +++ b/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BazelLockFileFunctionTest.java @@ -203,7 +203,7 @@ public SkyValue compute(SkyKey skyKey, Environment env) ModuleFileFunction.REGISTRIES.set(differencer, ImmutableList.of()); ModuleFileFunction.IGNORE_DEV_DEPS.set(differencer, true); ModuleFileFunction.MODULE_OVERRIDES.set(differencer, ImmutableMap.of()); - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); BazelModuleResolutionFunction.BAZEL_COMPATIBILITY_MODE.set( differencer, BazelCompatibilityMode.ERROR); BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES.set( @@ -283,7 +283,7 @@ public void moduleWithFlags() throws Exception { ModuleFileFunction.IGNORE_DEV_DEPS.set(differencer, true); ModuleFileFunction.REGISTRIES.set(differencer, registries); ModuleFileFunction.MODULE_OVERRIDES.set(differencer, ImmutableMap.of("my_dep_1", override)); - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS.set(differencer, yankedVersions); + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS.set(differencer, yankedVersions); BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES.set( differencer, CheckDirectDepsMode.ERROR); BazelModuleResolutionFunction.BAZEL_COMPATIBILITY_MODE.set( diff --git a/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BazelModuleResolutionFunctionTest.java b/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BazelModuleResolutionFunctionTest.java index 47d15dcc2e1e65..f240b5f30cf273 100644 --- a/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BazelModuleResolutionFunctionTest.java +++ b/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BazelModuleResolutionFunctionTest.java @@ -131,7 +131,7 @@ public void setup() throws Exception { BazelModuleResolutionFunction.BAZEL_COMPATIBILITY_MODE.set( differencer, BazelCompatibilityMode.ERROR); BazelLockFileFunction.LOCKFILE_MODE.set(differencer, LockfileMode.OFF); - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); } @Test @@ -282,7 +282,7 @@ public void testYankedVersionCheckSuccess() throws Exception { @Test public void testYankedVersionCheckIgnoredByAll() throws Exception { setupModulesForYankedVersion(); - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of("all")); + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of("all")); EvaluationResult result = evaluator.evaluate(ImmutableList.of(BazelModuleResolutionValue.KEY), evaluationContext); assertThat(result.hasError()).isFalse(); @@ -291,8 +291,7 @@ public void testYankedVersionCheckIgnoredByAll() throws Exception { @Test public void testYankedVersionCheckIgnoredBySpecific() throws Exception { setupModulesForYankedVersion(); - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS.set( - differencer, ImmutableList.of("b@1.0")); + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of("b@1.0")); EvaluationResult result = evaluator.evaluate(ImmutableList.of(BazelModuleResolutionValue.KEY), evaluationContext); assertThat(result.hasError()).isFalse(); @@ -301,8 +300,7 @@ public void testYankedVersionCheckIgnoredBySpecific() throws Exception { @Test public void testBadYankedVersionFormat() throws Exception { setupModulesForYankedVersion(); - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS.set( - differencer, ImmutableList.of("b~1.0")); + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of("b~1.0")); EvaluationResult result = evaluator.evaluate(ImmutableList.of(BazelModuleResolutionValue.KEY), evaluationContext); assertThat(result.hasError()).isTrue(); @@ -329,4 +327,86 @@ private void setupModulesForYankedVersion() throws Exception { .addYankedVersion("b", ImmutableMap.of(Version.parse("1.0"), "1.0 is a bad version!")); ModuleFileFunction.REGISTRIES.set(differencer, ImmutableList.of(registry.getUrl())); } + + @Test + public void testYankedVersionSideEffects_equalCompatibilityLevel() throws Exception { + scratch.file( + rootDirectory.getRelative("MODULE.bazel").getPathString(), + "module(name='mod', version='1.0')", + "bazel_dep(name = 'a', version = '1.0')", + "bazel_dep(name = 'b', version = '1.1')"); + + FakeRegistry registry = + registryFactory + .newFakeRegistry("/bar") + .addModule( + createModuleKey("a", "1.0"), + "module(name='a', version='1.0')", + "bazel_dep(name='b', version='1.0')") + .addModule(createModuleKey("c", "1.0"), "module(name='c', version='1.0')") + .addModule(createModuleKey("c", "1.1"), "module(name='c', version='1.1')") + .addModule( + createModuleKey("b", "1.0"), + "module(name='b', version='1.0', compatibility_level = 2)", + "bazel_dep(name='c', version='1.1')", + "print('hello from yanked version')") + .addModule( + createModuleKey("b", "1.1"), + "module(name='b', version='1.1', compatibility_level = 2)", + "bazel_dep(name='c', version='1.0')") + .addYankedVersion("b", ImmutableMap.of(Version.parse("1.0"), "1.0 is a bad version!")); + + ModuleFileFunction.REGISTRIES.set(differencer, ImmutableList.of(registry.getUrl())); + EvaluationResult result = + evaluator.evaluate(ImmutableList.of(BazelModuleResolutionValue.KEY), evaluationContext); + + assertThat(result.hasError()).isFalse(); + assertThat(result.get(BazelModuleResolutionValue.KEY).getResolvedDepGraph().keySet()) + .containsExactly( + ModuleKey.ROOT, + createModuleKey("a", "1.0"), + createModuleKey("b", "1.1"), + createModuleKey("c", "1.0")); + assertDoesNotContainEvent("hello from yanked version"); + } + + @Test + public void testYankedVersionSideEffects_differentCompatibilityLevel() throws Exception { + scratch.file( + rootDirectory.getRelative("MODULE.bazel").getPathString(), + "module(name='mod', version='1.0')", + "bazel_dep(name = 'a', version = '1.0')", + "bazel_dep(name = 'b', version = '1.1')"); + + FakeRegistry registry = + registryFactory + .newFakeRegistry("/bar") + .addModule( + createModuleKey("a", "1.0"), + "module(name='a', version='1.0')", + "bazel_dep(name='b', version='1.0')") + .addModule(createModuleKey("c", "1.0"), "module(name='c', version='1.0')") + .addModule(createModuleKey("c", "1.1"), "module(name='c', version='1.1')") + .addModule( + createModuleKey("b", "1.0"), + "module(name='b', version='1.0', compatibility_level = 2)", + "bazel_dep(name='c', version='1.1')", + "print('hello from yanked version')") + .addModule( + createModuleKey("b", "1.1"), + "module(name='b', version='1.1', compatibility_level = 3)", + "bazel_dep(name='c', version='1.0')") + .addYankedVersion("b", ImmutableMap.of(Version.parse("1.0"), "1.0 is a bad version!")); + + ModuleFileFunction.REGISTRIES.set(differencer, ImmutableList.of(registry.getUrl())); + EvaluationResult result = + evaluator.evaluate(ImmutableList.of(BazelModuleResolutionValue.KEY), evaluationContext); + + assertThat(result.hasError()).isTrue(); + assertThat(result.getError().toString()) + .contains( + "a@1.0 depends on b@1.0 with compatibility level 2, but depends on b@1.1 with" + + " compatibility level 3 which is different"); + assertDoesNotContainEvent("hello from yanked version"); + } } diff --git a/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BzlmodRepoRuleFunctionTest.java b/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BzlmodRepoRuleFunctionTest.java index 190521f59625ab..8e100021b95972 100644 --- a/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BzlmodRepoRuleFunctionTest.java +++ b/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/BzlmodRepoRuleFunctionTest.java @@ -140,7 +140,7 @@ public void setup() throws Exception { ModuleFileFunction.REGISTRIES.set(differencer, ImmutableList.of()); ModuleFileFunction.IGNORE_DEV_DEPS.set(differencer, false); ModuleFileFunction.MODULE_OVERRIDES.set(differencer, ImmutableMap.of()); - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES.set( differencer, CheckDirectDepsMode.WARNING); BazelModuleResolutionFunction.BAZEL_COMPATIBILITY_MODE.set( diff --git a/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/DiscoveryTest.java b/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/DiscoveryTest.java index fda410324d68c6..47aca2e2b01d55 100644 --- a/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/DiscoveryTest.java +++ b/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/DiscoveryTest.java @@ -41,6 +41,7 @@ import com.google.devtools.build.lib.rules.repository.RepositoryFunction; import com.google.devtools.build.lib.skyframe.BazelSkyframeExecutorConstants; import com.google.devtools.build.lib.skyframe.BzlmodRepoRuleFunction; +import com.google.devtools.build.lib.skyframe.ClientEnvironmentFunction; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.FileFunction; @@ -178,6 +179,9 @@ private void setUpWithBuiltinModules(ImmutableMap b .put( BzlmodRepoRuleValue.BZLMOD_REPO_RULE, new BzlmodRepoRuleFunction(ruleClassProvider, directories)) + .put( + SkyFunctions.CLIENT_ENVIRONMENT_VARIABLE, + new ClientEnvironmentFunction(new AtomicReference<>(ImmutableMap.of()))) .buildOrThrow(), differencer); @@ -196,7 +200,7 @@ private void setUpWithBuiltinModules(ImmutableMap b RepositoryDelegatorFunction.RESOLVED_FILE_FOR_VERIFICATION.set(differencer, Optional.empty()); ModuleFileFunction.IGNORE_DEV_DEPS.set(differencer, false); ModuleFileFunction.MODULE_OVERRIDES.set(differencer, ImmutableMap.of()); - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); } @Test diff --git a/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleExtensionResolutionTest.java b/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleExtensionResolutionTest.java index 134a7e1b4636b0..73cf61c2bb7461 100644 --- a/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleExtensionResolutionTest.java +++ b/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleExtensionResolutionTest.java @@ -284,7 +284,7 @@ public void setup() throws Exception { RepositoryDelegatorFunction.RESOLVED_FILE_FOR_VERIFICATION.set(differencer, Optional.empty()); ModuleFileFunction.IGNORE_DEV_DEPS.set(differencer, false); ModuleFileFunction.MODULE_OVERRIDES.set(differencer, ImmutableMap.of()); - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); ModuleFileFunction.REGISTRIES.set(differencer, ImmutableList.of(registry.getUrl())); BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES.set( differencer, CheckDirectDepsMode.WARNING); diff --git a/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleFileFunctionTest.java b/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleFileFunctionTest.java index d04da12c560e00..8ff80a284e55f5 100644 --- a/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleFileFunctionTest.java +++ b/src/test/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleFileFunctionTest.java @@ -43,6 +43,7 @@ import com.google.devtools.build.lib.rules.repository.RepositoryFunction; import com.google.devtools.build.lib.skyframe.BazelSkyframeExecutorConstants; import com.google.devtools.build.lib.skyframe.BzlmodRepoRuleFunction; +import com.google.devtools.build.lib.skyframe.ClientEnvironmentFunction; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.FileFunction; @@ -152,6 +153,9 @@ private void setUpWithBuiltinModules(ImmutableMap b .put( BzlmodRepoRuleValue.BZLMOD_REPO_RULE, new BzlmodRepoRuleFunction(ruleClassProvider, directories)) + .put( + SkyFunctions.CLIENT_ENVIRONMENT_VARIABLE, + new ClientEnvironmentFunction(new AtomicReference<>(ImmutableMap.of()))) .buildOrThrow(), differencer); @@ -170,7 +174,7 @@ private void setUpWithBuiltinModules(ImmutableMap b RepositoryDelegatorFunction.RESOLVED_FILE_FOR_VERIFICATION.set(differencer, Optional.empty()); ModuleFileFunction.IGNORE_DEV_DEPS.set(differencer, false); ModuleFileFunction.MODULE_OVERRIDES.set(differencer, ImmutableMap.of()); - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); } @Test diff --git a/src/test/java/com/google/devtools/build/lib/query2/testutil/SkyframeQueryHelper.java b/src/test/java/com/google/devtools/build/lib/query2/testutil/SkyframeQueryHelper.java index 08d819a4fd384c..ce8918d669c4e4 100644 --- a/src/test/java/com/google/devtools/build/lib/query2/testutil/SkyframeQueryHelper.java +++ b/src/test/java/com/google/devtools/build/lib/query2/testutil/SkyframeQueryHelper.java @@ -29,6 +29,7 @@ import com.google.devtools.build.lib.bazel.bzlmod.FakeRegistry; import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileFunction; import com.google.devtools.build.lib.bazel.bzlmod.ModuleKey; +import com.google.devtools.build.lib.bazel.bzlmod.YankedVersionsUtil; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.BazelCompatibilityMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.CheckDirectDepsMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.LockfileMode; @@ -374,7 +375,7 @@ protected SkyframeExecutor createSkyframeExecutor(ConfiguredRuleClassProvider ru BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES, CheckDirectDepsMode.WARNING), PrecomputedValue.injected( - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), PrecomputedValue.injected( BazelModuleResolutionFunction.BAZEL_COMPATIBILITY_MODE, BazelCompatibilityMode.ERROR), @@ -416,7 +417,7 @@ protected SkyframeExecutor createSkyframeExecutor(ConfiguredRuleClassProvider ru CheckDirectDepsMode.WARNING)) .add( PrecomputedValue.injected( - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS, ImmutableList.of())) + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS, ImmutableList.of())) .add( PrecomputedValue.injected( BazelModuleResolutionFunction.BAZEL_COMPATIBILITY_MODE, diff --git a/src/test/java/com/google/devtools/build/lib/rules/LabelBuildSettingTest.java b/src/test/java/com/google/devtools/build/lib/rules/LabelBuildSettingTest.java index 0d88e1b9f9d35b..c43e1b88c18be5 100644 --- a/src/test/java/com/google/devtools/build/lib/rules/LabelBuildSettingTest.java +++ b/src/test/java/com/google/devtools/build/lib/rules/LabelBuildSettingTest.java @@ -25,6 +25,7 @@ import com.google.devtools.build.lib.bazel.bzlmod.BazelModuleResolutionFunction; import com.google.devtools.build.lib.bazel.bzlmod.FakeRegistry; import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileFunction; +import com.google.devtools.build.lib.bazel.bzlmod.YankedVersionsUtil; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.BazelCompatibilityMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.CheckDirectDepsMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.LockfileMode; @@ -54,8 +55,7 @@ protected ImmutableList extraPrecomputedValues() { ModuleFileFunction.REGISTRIES, ImmutableList.of(registry.getUrl())), PrecomputedValue.injected(ModuleFileFunction.IGNORE_DEV_DEPS, false), PrecomputedValue.injected(ModuleFileFunction.MODULE_OVERRIDES, ImmutableMap.of()), - PrecomputedValue.injected( - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), + PrecomputedValue.injected(YankedVersionsUtil.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), PrecomputedValue.injected( BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES, CheckDirectDepsMode.WARNING), PrecomputedValue.injected( diff --git a/src/test/java/com/google/devtools/build/lib/rules/repository/RepositoryDelegatorTest.java b/src/test/java/com/google/devtools/build/lib/rules/repository/RepositoryDelegatorTest.java index 91c261a465f263..8a951990f9d899 100644 --- a/src/test/java/com/google/devtools/build/lib/rules/repository/RepositoryDelegatorTest.java +++ b/src/test/java/com/google/devtools/build/lib/rules/repository/RepositoryDelegatorTest.java @@ -36,6 +36,7 @@ import com.google.devtools.build.lib.bazel.bzlmod.BzlmodRepoRuleValue; import com.google.devtools.build.lib.bazel.bzlmod.FakeRegistry; import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileFunction; +import com.google.devtools.build.lib.bazel.bzlmod.YankedVersionsUtil; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.BazelCompatibilityMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.CheckDirectDepsMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.LockfileMode; @@ -273,7 +274,7 @@ public void setupDelegator() throws Exception { RepositoryDelegatorFunction.RESOLVED_FILE_FOR_VERIFICATION.set(differencer, Optional.empty()); ModuleFileFunction.IGNORE_DEV_DEPS.set(differencer, false); ModuleFileFunction.MODULE_OVERRIDES.set(differencer, ImmutableMap.of()); - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); + YankedVersionsUtil.ALLOWED_YANKED_VERSIONS.set(differencer, ImmutableList.of()); BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES.set( differencer, CheckDirectDepsMode.WARNING); BazelModuleResolutionFunction.BAZEL_COMPATIBILITY_MODE.set( diff --git a/src/test/java/com/google/devtools/build/lib/rules/starlarkdocextract/StarlarkDocExtractTest.java b/src/test/java/com/google/devtools/build/lib/rules/starlarkdocextract/StarlarkDocExtractTest.java index eed32f0a6922c4..baacd649254b22 100644 --- a/src/test/java/com/google/devtools/build/lib/rules/starlarkdocextract/StarlarkDocExtractTest.java +++ b/src/test/java/com/google/devtools/build/lib/rules/starlarkdocextract/StarlarkDocExtractTest.java @@ -31,6 +31,7 @@ import com.google.devtools.build.lib.bazel.bzlmod.BzlmodTestUtil; import com.google.devtools.build.lib.bazel.bzlmod.FakeRegistry; import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileFunction; +import com.google.devtools.build.lib.bazel.bzlmod.YankedVersionsUtil; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.BazelCompatibilityMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.CheckDirectDepsMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.LockfileMode; @@ -77,8 +78,7 @@ protected ImmutableList extraPrecomputedValues() { ModuleFileFunction.REGISTRIES, ImmutableList.of(registry.getUrl())), PrecomputedValue.injected(ModuleFileFunction.IGNORE_DEV_DEPS, false), PrecomputedValue.injected(ModuleFileFunction.MODULE_OVERRIDES, ImmutableMap.of()), - PrecomputedValue.injected( - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), + PrecomputedValue.injected(YankedVersionsUtil.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), PrecomputedValue.injected( BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES, CheckDirectDepsMode.WARNING), PrecomputedValue.injected( diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/BzlLoadFunctionTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/BzlLoadFunctionTest.java index f247b5d7fc3000..89d1ad224dfc9f 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/BzlLoadFunctionTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/BzlLoadFunctionTest.java @@ -26,6 +26,7 @@ import com.google.devtools.build.lib.bazel.bzlmod.BazelModuleResolutionFunction; import com.google.devtools.build.lib.bazel.bzlmod.FakeRegistry; import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileFunction; +import com.google.devtools.build.lib.bazel.bzlmod.YankedVersionsUtil; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.BazelCompatibilityMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.CheckDirectDepsMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.LockfileMode; @@ -109,8 +110,7 @@ protected ImmutableList extraPrecomputedValues() { ModuleFileFunction.REGISTRIES, ImmutableList.of(registry.getUrl())), PrecomputedValue.injected(ModuleFileFunction.IGNORE_DEV_DEPS, false), PrecomputedValue.injected(ModuleFileFunction.MODULE_OVERRIDES, ImmutableMap.of()), - PrecomputedValue.injected( - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), + PrecomputedValue.injected(YankedVersionsUtil.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), PrecomputedValue.injected( BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES, CheckDirectDepsMode.WARNING), PrecomputedValue.injected( diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/PrepareDepsOfPatternsFunctionTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/PrepareDepsOfPatternsFunctionTest.java index 0def6666f83855..1853972778e0dd 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/PrepareDepsOfPatternsFunctionTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/PrepareDepsOfPatternsFunctionTest.java @@ -27,6 +27,7 @@ import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileFunction; import com.google.devtools.build.lib.bazel.bzlmod.ModuleKey; import com.google.devtools.build.lib.bazel.bzlmod.Version; +import com.google.devtools.build.lib.bazel.bzlmod.YankedVersionsUtil; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.BazelCompatibilityMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.CheckDirectDepsMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.LockfileMode; @@ -243,8 +244,7 @@ protected ImmutableList extraPrecomputedValues() { PrecomputedValue.injected(ModuleFileFunction.IGNORE_DEV_DEPS, false), PrecomputedValue.injected( BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES, CheckDirectDepsMode.WARNING), - PrecomputedValue.injected( - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), + PrecomputedValue.injected(YankedVersionsUtil.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), PrecomputedValue.injected( BazelModuleResolutionFunction.BAZEL_COMPATIBILITY_MODE, BazelCompatibilityMode.ERROR), PrecomputedValue.injected(BazelLockFileFunction.LOCKFILE_MODE, LockfileMode.OFF)); diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/RepositoryMappingFunctionTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/RepositoryMappingFunctionTest.java index 06672b18ddc308..96fada31eed435 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/RepositoryMappingFunctionTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/RepositoryMappingFunctionTest.java @@ -31,6 +31,7 @@ import com.google.devtools.build.lib.bazel.bzlmod.FakeRegistry; import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileFunction; import com.google.devtools.build.lib.bazel.bzlmod.Version; +import com.google.devtools.build.lib.bazel.bzlmod.YankedVersionsUtil; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.BazelCompatibilityMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.CheckDirectDepsMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.LockfileMode; @@ -82,8 +83,7 @@ protected ImmutableList extraPrecomputedValues() thro ModuleFileFunction.REGISTRIES, ImmutableList.of(registry.getUrl())), PrecomputedValue.injected(ModuleFileFunction.IGNORE_DEV_DEPS, false), PrecomputedValue.injected(ModuleFileFunction.MODULE_OVERRIDES, ImmutableMap.of()), - PrecomputedValue.injected( - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), + PrecomputedValue.injected(YankedVersionsUtil.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), PrecomputedValue.injected( BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES, CheckDirectDepsMode.WARNING), PrecomputedValue.injected( diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/toolchains/RegisteredExecutionPlatformsFunctionTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/toolchains/RegisteredExecutionPlatformsFunctionTest.java index 162eb7cbbe6095..2a3ef2fbab270a 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/toolchains/RegisteredExecutionPlatformsFunctionTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/toolchains/RegisteredExecutionPlatformsFunctionTest.java @@ -29,6 +29,7 @@ import com.google.devtools.build.lib.bazel.bzlmod.BazelModuleResolutionFunction; import com.google.devtools.build.lib.bazel.bzlmod.FakeRegistry; import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileFunction; +import com.google.devtools.build.lib.bazel.bzlmod.YankedVersionsUtil; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.BazelCompatibilityMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.CheckDirectDepsMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.LockfileMode; @@ -109,8 +110,7 @@ protected ImmutableList extraPrecomputedValues() { ModuleFileFunction.REGISTRIES, ImmutableList.of(registry.getUrl())), PrecomputedValue.injected(ModuleFileFunction.IGNORE_DEV_DEPS, false), PrecomputedValue.injected(ModuleFileFunction.MODULE_OVERRIDES, ImmutableMap.of()), - PrecomputedValue.injected( - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), + PrecomputedValue.injected(YankedVersionsUtil.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), PrecomputedValue.injected( BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES, CheckDirectDepsMode.WARNING), PrecomputedValue.injected( diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/toolchains/RegisteredToolchainsFunctionTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/toolchains/RegisteredToolchainsFunctionTest.java index 49b331c039d1d6..9bb149377d1c81 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/toolchains/RegisteredToolchainsFunctionTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/toolchains/RegisteredToolchainsFunctionTest.java @@ -27,6 +27,7 @@ import com.google.devtools.build.lib.bazel.bzlmod.BazelModuleResolutionFunction; import com.google.devtools.build.lib.bazel.bzlmod.FakeRegistry; import com.google.devtools.build.lib.bazel.bzlmod.ModuleFileFunction; +import com.google.devtools.build.lib.bazel.bzlmod.YankedVersionsUtil; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.BazelCompatibilityMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.CheckDirectDepsMode; import com.google.devtools.build.lib.bazel.repository.RepositoryOptions.LockfileMode; @@ -64,8 +65,7 @@ protected ImmutableList extraPrecomputedValues() { ModuleFileFunction.REGISTRIES, ImmutableList.of(registry.getUrl())), PrecomputedValue.injected(ModuleFileFunction.IGNORE_DEV_DEPS, false), PrecomputedValue.injected(ModuleFileFunction.MODULE_OVERRIDES, ImmutableMap.of()), - PrecomputedValue.injected( - BazelModuleResolutionFunction.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), + PrecomputedValue.injected(YankedVersionsUtil.ALLOWED_YANKED_VERSIONS, ImmutableList.of()), PrecomputedValue.injected( BazelModuleResolutionFunction.CHECK_DIRECT_DEPENDENCIES, CheckDirectDepsMode.WARNING), PrecomputedValue.injected( From 54ae805ab0fba506319fffc1b7402e4623fa7e19 Mon Sep 17 00:00:00 2001 From: Yannic Bonenberger Date: Wed, 28 Jun 2023 08:10:38 -0700 Subject: [PATCH 31/68] Fix some errors from `-Werror=sign-compare` Closes #18783. PiperOrigin-RevId: 544060603 Change-Id: I1ec59f74f2c85ee7a927024842cd94825d5e211b --- src/main/native/darwin/fsevents.cc | 2 +- src/main/tools/process-tools-darwin.cc | 20 ++++++++++++++++---- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/src/main/native/darwin/fsevents.cc b/src/main/native/darwin/fsevents.cc index 7dfdcc0fc5dc46..76e933cbd07978 100644 --- a/src/main/native/darwin/fsevents.cc +++ b/src/main/native/darwin/fsevents.cc @@ -57,7 +57,7 @@ void FsEventsDiffAwarenessCallback(ConstFSEventStreamRef streamRef, JNIEventsDiffAwareness *info = static_cast(clientCallBackInfo); pthread_mutex_lock(&(info->mutex)); - for (int i = 0; i < numEvents; i++) { + for (size_t i = 0; i < numEvents; i++) { if ((eventFlags[i] & kFSEventStreamEventFlagMustScanSubDirs) != 0) { // Either we lost events or they were coalesced. Assume everything changed // and give up, which matches the fsevents documentation in that the diff --git a/src/main/tools/process-tools-darwin.cc b/src/main/tools/process-tools-darwin.cc index 5843397f3aadf4..f2421823f03ef3 100644 --- a/src/main/tools/process-tools-darwin.cc +++ b/src/main/tools/process-tools-darwin.cc @@ -24,7 +24,9 @@ #include "src/main/tools/logging.h" #include "src/main/tools/process-tools.h" -int WaitForProcessToTerminate(pid_t pid) { +namespace { + +int WaitForProcessToTerminate(uintptr_t ident) { int kq; if ((kq = kqueue()) == -1) { return -1; @@ -34,7 +36,7 @@ int WaitForProcessToTerminate(pid_t pid) { // reports any pending such events, so this is not racy even if the // process happened to exit before we got to installing the kevent. struct kevent kc; - EV_SET(&kc, pid, EVFILT_PROC, EV_ADD | EV_ENABLE, NOTE_EXIT, 0, 0); + EV_SET(&kc, ident, EVFILT_PROC, EV_ADD | EV_ENABLE, NOTE_EXIT, 0, 0); int nev; struct kevent ke; @@ -48,9 +50,9 @@ int WaitForProcessToTerminate(pid_t pid) { if (nev != 1) { DIE("Expected only one event from the kevent call; got %d", nev); } - if (ke.ident != pid) { + if (ke.ident != ident) { DIE("Expected PID in the kevent to be %" PRIdMAX " but got %" PRIdMAX, - (intmax_t)pid, (intmax_t)ke.ident); + (intmax_t)ident, (intmax_t)ke.ident); } if (!(ke.fflags & NOTE_EXIT)) { DIE("Expected the kevent to be for an exit condition"); @@ -59,6 +61,16 @@ int WaitForProcessToTerminate(pid_t pid) { return close(kq); } +} // namespace + +int WaitForProcessToTerminate(pid_t pid) { + if (pid < 0) { + DIE("PID must be >= 0, got %" PRIdMAX, static_cast(pid)); + } + + return WaitForProcessToTerminate((uintptr_t)pid); +} + int WaitForProcessGroupToTerminate(pid_t pgid) { int name[] = {CTL_KERN, KERN_PROC, KERN_PROC_PGRP, pgid}; From d14a56fb67babf6cfa21006ceeee41c9662221b5 Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 09:06:27 -0700 Subject: [PATCH 32/68] Flip j2objc_library and expose its Starkarkified providers I've fixed relevant tests. I've added and exposed 2 simple Starlark functions relevant for unions of j2objc providers in Native code: j2objc_mapping_file_info_union and j2objc_entry_class_info_union. PiperOrigin-RevId: 544073703 Change-Id: I7785a13e29b441f075f69cc174882f3250ec238e --- .../build/lib/rules/objc/AppleBinary.java | 4 +- .../lib/rules/objc/AppleStarlarkCommon.java | 3 +- .../lib/rules/objc/CompilationSupport.java | 35 ++++-- .../rules/objc/MultiArchBinarySupport.java | 54 +++++++-- .../lib/rules/objc/ObjcStarlarkInternal.java | 27 ----- .../starlark/builtins_bzl/common/exports.bzl | 4 +- .../builtins_bzl/common/objc/objc_common.bzl | 34 ++++++ .../builtins_bzl/common/objc/objc_library.bzl | 13 +- .../devtools/build/lib/rules/objc/BUILD | 1 + .../rules/objc/BazelJ2ObjcLibraryTest.java | 113 +++++++++++------- .../build/lib/rules/objc/ObjcLibraryTest.java | 18 ++- 11 files changed, 204 insertions(+), 102 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/rules/objc/AppleBinary.java b/src/main/java/com/google/devtools/build/lib/rules/objc/AppleBinary.java index b7f8f8c481b377..7ef86f865543ec 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/objc/AppleBinary.java +++ b/src/main/java/com/google/devtools/build/lib/rules/objc/AppleBinary.java @@ -20,7 +20,6 @@ import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.actions.Artifact; -import com.google.devtools.build.lib.actions.MutableActionGraph.ActionConflictException; import com.google.devtools.build.lib.analysis.OutputGroupInfo; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; @@ -39,6 +38,7 @@ import java.util.List; import java.util.Map; import java.util.TreeMap; +import net.starlark.java.eval.EvalException; /** Native support for Apple binary rules. */ public class AppleBinary { @@ -70,7 +70,7 @@ public static AppleLinkingOutputs linkMultiArchBinary( Iterable extraRequestedFeatures, Iterable extraDisabledFeatures, boolean isStampingEnabled) - throws InterruptedException, RuleErrorException, ActionConflictException { + throws InterruptedException, RuleErrorException, EvalException { Map, List> splitDeps = ruleContext.getSplitPrerequisiteConfiguredTargetAndTargets("deps"); Map, List> splitToolchains = diff --git a/src/main/java/com/google/devtools/build/lib/rules/objc/AppleStarlarkCommon.java b/src/main/java/com/google/devtools/build/lib/rules/objc/AppleStarlarkCommon.java index d1da07021e1892..2e2e1c1da626c1 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/objc/AppleStarlarkCommon.java +++ b/src/main/java/com/google/devtools/build/lib/rules/objc/AppleStarlarkCommon.java @@ -19,7 +19,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.devtools.build.lib.actions.Artifact; -import com.google.devtools.build.lib.actions.MutableActionGraph.ActionConflictException; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue; @@ -283,7 +282,7 @@ public StructImpl linkMultiArchBinary( Sequence.cast(extraDisabledFeatures, String.class, "extra_disabled_features"), isStampingEnabled); return createStarlarkLinkingOutputs(linkingOutputs, thread); - } catch (RuleErrorException | ActionConflictException exception) { + } catch (RuleErrorException exception) { throw new EvalException(exception); } } diff --git a/src/main/java/com/google/devtools/build/lib/rules/objc/CompilationSupport.java b/src/main/java/com/google/devtools/build/lib/rules/objc/CompilationSupport.java index 0286cdbf8c1378..0797d796663546 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/objc/CompilationSupport.java +++ b/src/main/java/com/google/devtools/build/lib/rules/objc/CompilationSupport.java @@ -50,11 +50,13 @@ import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.LabelSyntaxException; +import com.google.devtools.build.lib.collect.nestedset.Depset; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.packages.BuildType; import com.google.devtools.build.lib.packages.RuleClass.ConfiguredTargetFactory.RuleErrorException; +import com.google.devtools.build.lib.packages.StarlarkInfo; import com.google.devtools.build.lib.packages.TargetUtils; import com.google.devtools.build.lib.rules.apple.AppleConfiguration; import com.google.devtools.build.lib.rules.apple.XcodeConfigInfo; @@ -536,14 +538,14 @@ public CompilationSupport registerLinkActions( Object linkingInfoProvider, ObjcProvider secondaryObjcProvider, CcLinkingContext secondaryCcLinkingContext, - J2ObjcMappingFileProvider j2ObjcMappingFileProvider, - J2ObjcEntryClassProvider j2ObjcEntryClassProvider, + StarlarkInfo j2ObjcMappingFileProvider, + StarlarkInfo j2ObjcEntryClassProvider, ExtraLinkArgs extraLinkArgs, Iterable extraLinkInputs, Iterable extraRequestedFeatures, Iterable extraDisabledFeatures, boolean isStampingEnabled) - throws InterruptedException, RuleErrorException { + throws InterruptedException, RuleErrorException, EvalException { ObjcProvider objcProviderWithLinkingInfo = null; CcLinkingContext ccLinkingContextWithLinkingInfo = null; checkState( @@ -824,30 +826,39 @@ private static ImmutableSet getForceLoadArtifacts(ObjcProvider objcPro .build(); } + private NestedSet getField(StarlarkInfo provider, String fieldName, Class type) + throws EvalException { + return Depset.cast(provider.getValue(fieldName), type, fieldName); + } + /** Returns true if this build should strip J2Objc dead code. */ - private boolean stripJ2ObjcDeadCode(J2ObjcEntryClassProvider j2ObjcEntryClassProvider) { + private boolean stripJ2ObjcDeadCode(StarlarkInfo j2ObjcEntryClassProvider) throws EvalException { J2ObjcConfiguration j2objcConfiguration = buildConfiguration.getFragment(J2ObjcConfiguration.class); + NestedSet entryClasses = + getField(j2ObjcEntryClassProvider, "entry_classes", String.class); + // Only perform J2ObjC dead code stripping if flag --j2objc_dead_code_removal is specified and // users have specified entry classes. - return j2objcConfiguration.removeDeadCode() - && !j2ObjcEntryClassProvider.getEntryClasses().isEmpty(); + return j2objcConfiguration.removeDeadCode() && !entryClasses.isEmpty(); } /** Registers actions to perform J2Objc dead code removal. */ private void registerJ2ObjcDeadCodeRemovalActions( ObjcProvider objcProvider, - J2ObjcMappingFileProvider j2ObjcMappingFileProvider, - J2ObjcEntryClassProvider j2ObjcEntryClassProvider) { + StarlarkInfo j2ObjcMappingFileProvider, + StarlarkInfo j2ObjcEntryClassProvider) + throws EvalException { ObjcConfiguration objcConfiguration = buildConfiguration.getFragment(ObjcConfiguration.class); - NestedSet entryClasses = j2ObjcEntryClassProvider.getEntryClasses(); + NestedSet entryClasses = + getField(j2ObjcEntryClassProvider, "entry_classes", String.class); NestedSet j2ObjcDependencyMappingFiles = - j2ObjcMappingFileProvider.getDependencyMappingFiles(); + getField(j2ObjcMappingFileProvider, "dependency_mapping_files", Artifact.class); NestedSet j2ObjcHeaderMappingFiles = - j2ObjcMappingFileProvider.getHeaderMappingFiles(); + getField(j2ObjcMappingFileProvider, "header_mapping_files", Artifact.class); NestedSet j2ObjcArchiveSourceMappingFiles = - j2ObjcMappingFileProvider.getArchiveSourceMappingFiles(); + getField(j2ObjcMappingFileProvider, "archive_source_mapping_files", Artifact.class); for (Artifact j2objcArchive : objcProvider.get(ObjcProvider.J2OBJC_LIBRARY).toList()) { Artifact prunedJ2ObjcArchive = intermediateArtifacts.j2objcPrunedArchive(j2objcArchive); diff --git a/src/main/java/com/google/devtools/build/lib/rules/objc/MultiArchBinarySupport.java b/src/main/java/com/google/devtools/build/lib/rules/objc/MultiArchBinarySupport.java index 2dd7c5a2333414..824a51c10051bc 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/objc/MultiArchBinarySupport.java +++ b/src/main/java/com/google/devtools/build/lib/rules/objc/MultiArchBinarySupport.java @@ -38,6 +38,9 @@ import com.google.devtools.build.lib.packages.BuiltinProvider; import com.google.devtools.build.lib.packages.Info; import com.google.devtools.build.lib.packages.RuleClass.ConfiguredTargetFactory.RuleErrorException; +import com.google.devtools.build.lib.packages.StarlarkInfo; +import com.google.devtools.build.lib.packages.StarlarkProvider; +import com.google.devtools.build.lib.packages.StarlarkProviderIdentifier; import com.google.devtools.build.lib.packages.StructImpl; import com.google.devtools.build.lib.rules.apple.AppleCommandLineOptions; import com.google.devtools.build.lib.rules.apple.AppleConfiguration; @@ -55,11 +58,14 @@ import com.google.devtools.build.lib.rules.objc.CompilationSupport.ExtraLinkArgs; import com.google.devtools.build.lib.skyframe.ConfiguredTargetAndData; import com.google.devtools.build.lib.vfs.PathFragment; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import net.starlark.java.eval.Dict; +import net.starlark.java.eval.EvalException; +import net.starlark.java.eval.StarlarkList; /** Support utility for creating multi-arch Apple binaries. */ public class MultiArchBinarySupport { @@ -114,6 +120,28 @@ public MultiArchBinarySupport(RuleContext ruleContext, CppSemantics cppSemantics this.cppSemantics = cppSemantics; } + private StarlarkInfo getStarlarkUnionedJ2objcProvider( + String providerName, + String unionFunctionName, + Iterable infoCollections) + throws RuleErrorException, InterruptedException { + ImmutableList providers = + getTypedProviders( + infoCollections, + StarlarkProviderIdentifier.forKey( + new StarlarkProvider.Key( + Label.parseCanonicalUnchecked("@_builtins//:common/objc/providers.bzl"), + providerName))); + + Object starlarkFunc = ruleContext.getStarlarkDefinedBuiltin(unionFunctionName); + ruleContext.initStarlarkRuleContext(); + return (StarlarkInfo) + ruleContext.callStarlarkOrThrowRuleError( + starlarkFunc, + ImmutableList.of(StarlarkList.immutableCopyOf(providers)), + new HashMap<>()); + } + /** * Registers actions to link a single-platform/architecture Apple binary in a specific * configuration. @@ -141,16 +169,17 @@ public Artifact registerConfigurationSpecificLinkActions( boolean isStampingEnabled, Iterable infoCollections, Map> outputMapCollector) - throws RuleErrorException, InterruptedException { + throws RuleErrorException, InterruptedException, EvalException { IntermediateArtifacts intermediateArtifacts = new IntermediateArtifacts(ruleContext, dependencySpecificConfiguration.config()); - J2ObjcMappingFileProvider j2ObjcMappingFileProvider = - J2ObjcMappingFileProvider.union( - getTypedProviders(infoCollections, J2ObjcMappingFileProvider.PROVIDER)); - J2ObjcEntryClassProvider j2ObjcEntryClassProvider = - new J2ObjcEntryClassProvider.Builder() - .addTransitive(getTypedProviders(infoCollections, J2ObjcEntryClassProvider.PROVIDER)) - .build(); + + StarlarkInfo j2ObjcEntryClassProvider = + getStarlarkUnionedJ2objcProvider( + "J2ObjcEntryClassInfo", "j2objc_entry_class_info_union", infoCollections); + + StarlarkInfo j2ObjcMappingFileProvider = + getStarlarkUnionedJ2objcProvider( + "J2ObjcMappingFileInfo", "j2objc_mapping_file_info_union", infoCollections); CompilationSupport compilationSupport = new CompilationSupport.Builder(ruleContext, cppSemantics) @@ -658,6 +687,15 @@ private static ImmutableList getTypedProviders( .collect(toImmutableList()); } + private static ImmutableList getTypedProviders( + Iterable infoCollections, + StarlarkProviderIdentifier identifier) { + return stream(infoCollections) + .filter(infoCollection -> infoCollection.get(identifier) != null) + .map(infoCollection -> (StarlarkInfo) infoCollection.get(identifier)) + .collect(toImmutableList()); + } + /** Returns providers from a list of {@link ConfiguredTargetAndData} */ public static List getProvidersFromCtads( List ctads) { diff --git a/src/main/java/com/google/devtools/build/lib/rules/objc/ObjcStarlarkInternal.java b/src/main/java/com/google/devtools/build/lib/rules/objc/ObjcStarlarkInternal.java index 2aeb5be1bca62f..d40fc4e2a395ce 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/objc/ObjcStarlarkInternal.java +++ b/src/main/java/com/google/devtools/build/lib/rules/objc/ObjcStarlarkInternal.java @@ -16,7 +16,6 @@ import static com.google.common.collect.ImmutableList.toImmutableList; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; @@ -25,7 +24,6 @@ import com.google.devtools.build.lib.analysis.LocationExpander; import com.google.devtools.build.lib.analysis.TemplateVariableInfo; import com.google.devtools.build.lib.analysis.starlark.StarlarkRuleContext; -import com.google.devtools.build.lib.packages.NativeInfo; import com.google.devtools.build.lib.packages.Type; import com.google.devtools.build.lib.rules.cpp.CcCompilationContext; import com.google.devtools.build.lib.rules.cpp.CcLinkingContext; @@ -238,31 +236,6 @@ public CompilationArtifacts j2objcCreateCompilationArtifacts( intermediateArtifacts); } - @StarlarkMethod( - name = "j2objc_providers_from_deps", - documented = false, - parameters = { - @Param(name = "ctx", positional = false, named = true), - }) - public Sequence createj2objcProvidersFromDeps(StarlarkRuleContext starlarkRuleContext) - throws EvalException { - J2ObjcMappingFileProvider j2ObjcMappingFileProvider = - J2ObjcMappingFileProvider.union( - starlarkRuleContext - .getRuleContext() - .getPrerequisites("deps", J2ObjcMappingFileProvider.PROVIDER)); - J2ObjcEntryClassProvider j2ObjcEntryClassProvider = - new J2ObjcEntryClassProvider.Builder() - .addTransitive( - starlarkRuleContext - .getRuleContext() - .getPrerequisites("deps", J2ObjcEntryClassProvider.PROVIDER)) - .build(); - - return StarlarkList.immutableCopyOf( - ImmutableList.of(j2ObjcEntryClassProvider, j2ObjcMappingFileProvider)); - } - @StarlarkMethod( name = "create_compilation_context", documented = false, diff --git a/src/main/starlark/builtins_bzl/common/exports.bzl b/src/main/starlark/builtins_bzl/common/exports.bzl index 2456597095678b..48d732bc0b38f9 100755 --- a/src/main/starlark/builtins_bzl/common/exports.bzl +++ b/src/main/starlark/builtins_bzl/common/exports.bzl @@ -72,7 +72,7 @@ exported_rules = { "java_lite_proto_library": java_lite_proto_library, "objc_import": objc_import, "objc_library": objc_library, - "-j2objc_library": j2objc_library, + "+j2objc_library": j2objc_library, "proto_library": proto_library, "cc_shared_library": cc_shared_library, "cc_binary": cc_binary, @@ -94,4 +94,6 @@ exported_to_java = { "get_cc_toolchain_provider": get_cc_toolchain_provider, "cc_toolchain_build_variables": cc_helper.cc_toolchain_build_variables, "apple_cc_toolchain_build_variables": objc_common.apple_cc_toolchain_build_variables, + "j2objc_mapping_file_info_union": objc_common.j2objc_mapping_file_info_union, + "j2objc_entry_class_info_union": objc_common.j2objc_entry_class_info_union, } diff --git a/src/main/starlark/builtins_bzl/common/objc/objc_common.bzl b/src/main/starlark/builtins_bzl/common/objc/objc_common.bzl index 65e283090a2c39..eb8c806102133d 100644 --- a/src/main/starlark/builtins_bzl/common/objc/objc_common.bzl +++ b/src/main/starlark/builtins_bzl/common/objc/objc_common.bzl @@ -15,6 +15,7 @@ """Common functionality for Objc rules.""" load(":common/cc/cc_info.bzl", "CcInfo") +load(":common/objc/providers.bzl", "J2ObjcEntryClassInfo", "J2ObjcMappingFileInfo") objc_internal = _builtins.internal.objc_internal apple_common = _builtins.toplevel.apple_common @@ -460,6 +461,37 @@ def _apple_cc_toolchain_build_variables(xcode_config): return apple_cc_toolchain_build_variables +# TODO(bazel-team): Delete this function when MultiArchBinarySupport is starlarkified. +def _j2objc_mapping_file_info_union(providers): + transitive_header_mapping_files = [] + transitive_class_mapping_files = [] + transitive_dependency_mapping_files = [] + transitive_archive_source_mapping_files = [] + + for provider in providers: + transitive_header_mapping_files.append(provider.header_mapping_files) + transitive_class_mapping_files.append(provider.class_mapping_files) + transitive_dependency_mapping_files.append(provider.dependency_mapping_files) + transitive_archive_source_mapping_files.append(provider.archive_source_mapping_files) + + return J2ObjcMappingFileInfo( + header_mapping_files = depset([], transitive = transitive_header_mapping_files), + class_mapping_files = depset([], transitive = transitive_class_mapping_files), + dependency_mapping_files = depset([], transitive = transitive_dependency_mapping_files), + archive_source_mapping_files = depset([], transitive = transitive_archive_source_mapping_files), + ) + +# TODO(bazel-team): Delete this function when MultiArchBinarySupport is starlarkified. +def _j2objc_entry_class_info_union(providers): + transitive_entry_classes = [] + + for provider in providers: + transitive_entry_classes.append(provider.entry_classes) + + return J2ObjcEntryClassInfo( + entry_classes = depset([], transitive = transitive_entry_classes), + ) + objc_common = struct( create_context_and_provider = _create_context_and_provider, to_string_with_minimum_components = _to_string_with_minimum_components, @@ -468,4 +500,6 @@ objc_common = struct( apple_cc_toolchain_build_variables = _apple_cc_toolchain_build_variables, is_apple_platform = _is_apple_platform, get_common_vars = _get_common_vars, + j2objc_mapping_file_info_union = _j2objc_mapping_file_info_union, + j2objc_entry_class_info_union = _j2objc_entry_class_info_union, ) diff --git a/src/main/starlark/builtins_bzl/common/objc/objc_library.bzl b/src/main/starlark/builtins_bzl/common/objc/objc_library.bzl index 1ea52415dd5eb7..038e7f84229c00 100644 --- a/src/main/starlark/builtins_bzl/common/objc/objc_library.bzl +++ b/src/main/starlark/builtins_bzl/common/objc/objc_library.bzl @@ -17,9 +17,10 @@ load("@_builtins//:common/cc/cc_helper.bzl", "cc_helper") load("@_builtins//:common/objc/compilation_support.bzl", "compilation_support") load("@_builtins//:common/objc/attrs.bzl", "common_attrs") -load("@_builtins//:common/objc/objc_common.bzl", "extensions") +load("@_builtins//:common/objc/objc_common.bzl", "extensions", "objc_common") load("@_builtins//:common/objc/semantics.bzl", "semantics") load("@_builtins//:common/objc/transitions.bzl", "apple_crosstool_transition") +load(":common/objc/providers.bzl", "J2ObjcEntryClassInfo", "J2ObjcMappingFileInfo") load(":common/cc/cc_info.bzl", "CcInfo") objc_internal = _builtins.internal.objc_internal @@ -78,7 +79,11 @@ def _objc_library_impl(ctx): compilation_support.validate_attributes(common_variables) - j2objc_providers = objc_internal.j2objc_providers_from_deps(ctx = ctx) + j2objc_mapping_file_infos = [dep[J2ObjcMappingFileInfo] for dep in ctx.attr.deps if J2ObjcMappingFileInfo in dep] + j2objc_mapping_file_info = objc_common.j2objc_mapping_file_info_union(providers = j2objc_mapping_file_infos) + + j2objc_entry_class_infos = [dep[J2ObjcEntryClassInfo] for dep in ctx.attr.deps if J2ObjcEntryClassInfo in dep] + j2objc_entry_class_info = objc_common.j2objc_entry_class_info_union(providers = j2objc_entry_class_infos) objc_provider = common_variables.objc_provider @@ -105,8 +110,8 @@ def _objc_library_impl(ctx): linking_context = linking_context, ), objc_provider, - j2objc_providers[0], - j2objc_providers[1], + j2objc_mapping_file_info, + j2objc_entry_class_info, instrumented_files_info, OutputGroupInfo(**output_groups), ] diff --git a/src/test/java/com/google/devtools/build/lib/rules/objc/BUILD b/src/test/java/com/google/devtools/build/lib/rules/objc/BUILD index 716881324a8774..76666c6b13a512 100644 --- a/src/test/java/com/google/devtools/build/lib/rules/objc/BUILD +++ b/src/test/java/com/google/devtools/build/lib/rules/objc/BUILD @@ -32,6 +32,7 @@ java_library( "//src/main/java/com/google/devtools/build/lib/analysis:config/build_configuration", "//src/main/java/com/google/devtools/build/lib/analysis:configured_target", "//src/main/java/com/google/devtools/build/lib/cmdline", + "//src/main/java/com/google/devtools/build/lib/collect/nestedset", "//src/main/java/com/google/devtools/build/lib/packages", "//src/main/java/com/google/devtools/build/lib/rules/apple", "//src/main/java/com/google/devtools/build/lib/rules/cpp", diff --git a/src/test/java/com/google/devtools/build/lib/rules/objc/BazelJ2ObjcLibraryTest.java b/src/test/java/com/google/devtools/build/lib/rules/objc/BazelJ2ObjcLibraryTest.java index 0440e6aadfc907..af0941594d4928 100644 --- a/src/test/java/com/google/devtools/build/lib/rules/objc/BazelJ2ObjcLibraryTest.java +++ b/src/test/java/com/google/devtools/build/lib/rules/objc/BazelJ2ObjcLibraryTest.java @@ -43,7 +43,10 @@ import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.RepositoryName; -import com.google.devtools.build.lib.packages.NativeAspectClass; +import com.google.devtools.build.lib.collect.nestedset.Depset; +import com.google.devtools.build.lib.packages.Provider; +import com.google.devtools.build.lib.packages.StarlarkProvider; +import com.google.devtools.build.lib.packages.StructImpl; import com.google.devtools.build.lib.packages.util.MockObjcSupport; import com.google.devtools.build.lib.rules.apple.ApplePlatform.PlatformType; import com.google.devtools.build.lib.rules.apple.AppleToolchain; @@ -74,8 +77,21 @@ */ @RunWith(JUnit4.class) public class BazelJ2ObjcLibraryTest extends J2ObjcLibraryTest { - protected NativeAspectClass getJ2ObjcAspect() { - return ruleClassProvider.getNativeAspectClass(J2ObjcAspect.NAME); + + private static final Provider.Key starlarkJ2objcMappingFileProviderKey = + new StarlarkProvider.Key( + Label.parseCanonicalUnchecked("@_builtins//:common/objc/providers.bzl"), + "J2ObjcMappingFileInfo"); + + private StructImpl getJ2ObjcMappingFileInfoFromTarget(ConfiguredTarget configuredTarget) + throws Exception { + return (StructImpl) configuredTarget.get(starlarkJ2objcMappingFileProviderKey); + } + + private ImmutableList getArtifacts(StructImpl j2ObjcMappingFileInfo, String attribute) + throws Exception { + Depset filesDepset = (Depset) j2ObjcMappingFileInfo.getValue(attribute); + return filesDepset.toList(Artifact.class); } /** @@ -244,9 +260,11 @@ public void testJ2ObjcHeaderMapExportedInJavaLibrary() throws Exception { ")"); ConfiguredTarget target = getJ2ObjCAspectConfiguredTarget("//java/com/google/transpile:dummy"); - J2ObjcMappingFileProvider provider = target.get(J2ObjcMappingFileProvider.PROVIDER); + StructImpl j2ObjcMappingFileInfo = getJ2ObjcMappingFileInfoFromTarget(target); + ImmutableList headerMappingFilesList = + getArtifacts(j2ObjcMappingFileInfo, "header_mapping_files"); - assertThat(provider.getHeaderMappingFiles().getSingleton().getRootRelativePath().toString()) + assertThat(headerMappingFilesList.get(0).getRootRelativePath().toString()) .isEqualTo("java/com/google/transpile/dummy.mapping.j2objc"); } @@ -266,9 +284,11 @@ public void testDepsJ2ObjcHeaderMapExportedInJavaLibraryWithNoSourceFile() throw ")"); ConfiguredTarget target = getJ2ObjCAspectConfiguredTarget("//java/com/google/transpile:dummy"); - J2ObjcMappingFileProvider provider = target.get(J2ObjcMappingFileProvider.PROVIDER); + StructImpl j2ObjcMappingFileInfo = getJ2ObjcMappingFileInfoFromTarget(target); + ImmutableList headerMappingFilesList = + getArtifacts(j2ObjcMappingFileInfo, "header_mapping_files"); - assertThat(provider.getHeaderMappingFiles().getSingleton().getRootRelativePath().toString()) + assertThat(headerMappingFilesList.get(0).getRootRelativePath().toString()) .isEqualTo("java/com/google/dep/dep.mapping.j2objc"); } @@ -296,15 +316,13 @@ public void testJ2ObjcProtoClassMappingFilesExportedInJavaLibrary() throws Excep ConfiguredTarget target = getJ2ObjCAspectConfiguredTarget( "//java/com/google/dummy/test/proto:test"); - J2ObjcMappingFileProvider provider = target.get(J2ObjcMappingFileProvider.PROVIDER); - Artifact classMappingFile = - getGenfilesArtifact( - "test.clsmap.properties", - getConfiguredTarget( - "//java/com/google/dummy/test/proto:test_proto", getAppleCrosstoolConfiguration()), - getJ2ObjcAspect()); - - assertThat(provider.getClassMappingFiles().toList()).containsExactly(classMappingFile); + StructImpl j2ObjcMappingFileInfo = getJ2ObjcMappingFileInfoFromTarget(target); + ImmutableList classMappingFilesList = + getArtifacts(j2ObjcMappingFileInfo, "class_mapping_files"); + + assertThat(classMappingFilesList.get(0).getExecPathString()) + .containsMatch( + "/applebin_macos-darwin_x86_64-fastbuild-ST-[^/]*/bin/java/com/google/dummy/test/proto/test.clsmap.properties"); } @Test @@ -327,19 +345,20 @@ public void testJavaProtoLibraryWithProtoLibrary() throws Exception { ")"); ConfiguredTarget target = getJ2ObjCAspectConfiguredTarget("//x:test"); - ConfiguredTarget test = getConfiguredTarget("//x:test_proto", getAppleCrosstoolConfiguration()); - J2ObjcMappingFileProvider provider = target.get(J2ObjcMappingFileProvider.PROVIDER); - Artifact classMappingFile = - getGenfilesArtifact("test.clsmap.properties", test, getJ2ObjcAspect()); - assertThat(provider.getClassMappingFiles().toList()).containsExactly(classMappingFile); + StructImpl j2ObjcMappingFileInfo = getJ2ObjcMappingFileInfoFromTarget(target); + ImmutableList classMappingFilesList = + getArtifacts(j2ObjcMappingFileInfo, "class_mapping_files"); + assertThat(classMappingFilesList.get(0).getExecPathString()) + .containsMatch( + "/applebin_macos-darwin_x86_64-fastbuild-ST-[^/]*/bin/x/test.clsmap.properties"); ObjcProvider objcProvider = target.get(ObjcProvider.STARLARK_CONSTRUCTOR); CcCompilationContext ccCompilationContext = target.get(CcInfo.PROVIDER).getCcCompilationContext(); - Artifact headerFile = getGenfilesArtifact("test.j2objc.pb.h", test, getJ2ObjcAspect()); - Artifact sourceFile = getGenfilesArtifact("test.j2objc.pb.m", test, getJ2ObjcAspect()); - assertThat(ccCompilationContext.getDeclaredIncludeSrcs().toList()).contains(headerFile); - assertThat(objcProvider.get(ObjcProvider.SOURCE).toList()).contains(sourceFile); + assertThat(ccCompilationContext.getDeclaredIncludeSrcs().toList().toString()) + .containsMatch("/applebin_macos-darwin_x86_64-fastbuild-ST-[^/]*/bin]x/test.j2objc.pb.h"); + assertThat(objcProvider.get(ObjcProvider.SOURCE).toList().toString()) + .containsMatch("/applebin_macos-darwin_x86_64-fastbuild-ST-[^/]*/bin]x/test.j2objc.pb.m,"); } @Test @@ -374,25 +393,25 @@ public void testJavaProtoLibraryWithProtoLibrary_external() throws Exception { " deps = ['@bla//foo:test_java_proto'])"); ConfiguredTarget target = getJ2ObjCAspectConfiguredTarget("//x:test"); - ConfiguredTarget test = - getConfiguredTarget("@bla//foo:test_proto", getAppleCrosstoolConfiguration()); - J2ObjcMappingFileProvider provider = target.get(J2ObjcMappingFileProvider.PROVIDER); + StructImpl j2ObjcMappingFileInfo = getJ2ObjcMappingFileInfoFromTarget(target); + ImmutableList classMappingFilesList = + getArtifacts(j2ObjcMappingFileInfo, "class_mapping_files"); - Artifact classMappingFile = - getGenfilesArtifact("../external/bla/foo/test.clsmap.properties", test, getJ2ObjcAspect()); - assertThat(provider.getClassMappingFiles().toList()).containsExactly(classMappingFile); + assertThat(classMappingFilesList.get(0).getExecPathString()) + .containsMatch( + "/applebin_macos-darwin_x86_64-fastbuild-ST-[^/]*/bin/external/bla/foo/test.clsmap.properties"); ObjcProvider objcProvider = target.get(ObjcProvider.STARLARK_CONSTRUCTOR); CcCompilationContext ccCompilationContext = target.get(CcInfo.PROVIDER).getCcCompilationContext(); - Artifact headerFile = - getGenfilesArtifact("../external/bla/foo/test.j2objc.pb.h", test, getJ2ObjcAspect()); - Artifact sourceFile = - getGenfilesArtifact("../external/bla/foo/test.j2objc.pb.m", test, getJ2ObjcAspect()); - assertThat(ccCompilationContext.getDeclaredIncludeSrcs().toList()).contains(headerFile); - assertThat(objcProvider.get(ObjcProvider.SOURCE).toList()).contains(sourceFile); + assertThat(ccCompilationContext.getDeclaredIncludeSrcs().toList().toString()) + .containsMatch( + "/applebin_macos-darwin_x86_64-fastbuild-ST-[^/]*/bin]external/bla/foo/test.j2objc.pb.h"); + assertThat(objcProvider.get(ObjcProvider.SOURCE).toList().toString()) + .containsMatch( + "/applebin_macos-darwin_x86_64-fastbuild-ST-[^/]*/bin]external/bla/foo/test.j2objc.pb.m"); assertThat(ccCompilationContext.getIncludeDirs()) .contains( getConfiguration(target) @@ -411,11 +430,15 @@ public void testJ2ObjcInfoExportedInJavaImport() throws Exception { ")"); ConfiguredTarget target = getJ2ObjCAspectConfiguredTarget("//java/com/google/transpile:dummy"); - J2ObjcMappingFileProvider provider = target.get(J2ObjcMappingFileProvider.PROVIDER); + StructImpl j2ObjcMappingFileInfo = getJ2ObjcMappingFileInfoFromTarget(target); + ImmutableList headerMappingFilesList = + getArtifacts(j2ObjcMappingFileInfo, "header_mapping_files"); + ImmutableList classMappingFilesList = + getArtifacts(j2ObjcMappingFileInfo, "class_mapping_files"); - assertThat(provider.getHeaderMappingFiles().getSingleton().getRootRelativePath().toString()) + assertThat(headerMappingFilesList.get(0).getRootRelativePath().toString()) .isEqualTo("java/com/google/transpile/dummy.mapping.j2objc"); - assertThat(provider.getClassMappingFiles().toList()).isEmpty(); + assertThat(classMappingFilesList).isEmpty(); } protected void checkObjcArchiveAndLinkActions( @@ -581,12 +604,14 @@ public void testJ2ObjcHeaderMappingAction() throws Exception { ConfiguredTarget target = getJ2ObjCAspectConfiguredTarget( "//java/com/google/transpile:lib1"); - J2ObjcMappingFileProvider mappingFileProvider = target.get(J2ObjcMappingFileProvider.PROVIDER); - assertThat(baseArtifactNames(mappingFileProvider.getHeaderMappingFiles())) + StructImpl j2ObjcMappingFileInfo = getJ2ObjcMappingFileInfoFromTarget(target); + ImmutableList headerMappingFilesList = + getArtifacts(j2ObjcMappingFileInfo, "header_mapping_files"); + assertThat(baseArtifactNames(headerMappingFilesList)) .containsExactly("lib1.mapping.j2objc", "lib2.mapping.j2objc"); - Artifact mappingFile = getFirstArtifactEndingWith( - mappingFileProvider.getHeaderMappingFiles(), "lib1.mapping.j2objc"); + Artifact mappingFile = + getFirstArtifactEndingWith(headerMappingFilesList, "lib1.mapping.j2objc"); SpawnAction headerMappingAction = (SpawnAction) getGeneratingAction(mappingFile); String execPath = getRuleContext(target).getBinFragment() + "/"; assertThat(baseArtifactNames(headerMappingAction.getInputs())) diff --git a/src/test/java/com/google/devtools/build/lib/rules/objc/ObjcLibraryTest.java b/src/test/java/com/google/devtools/build/lib/rules/objc/ObjcLibraryTest.java index 33a237c2fb6b2b..4c7e4d3f552c04 100644 --- a/src/test/java/com/google/devtools/build/lib/rules/objc/ObjcLibraryTest.java +++ b/src/test/java/com/google/devtools/build/lib/rules/objc/ObjcLibraryTest.java @@ -49,10 +49,14 @@ import com.google.devtools.build.lib.analysis.test.InstrumentedFilesInfo; import com.google.devtools.build.lib.analysis.util.AnalysisMock; import com.google.devtools.build.lib.analysis.util.ScratchAttributeWriter; +import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.LabelSyntaxException; import com.google.devtools.build.lib.cmdline.RepositoryName; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.packages.NoSuchTargetException; +import com.google.devtools.build.lib.packages.Provider; +import com.google.devtools.build.lib.packages.StarlarkProvider; +import com.google.devtools.build.lib.packages.StructImpl; import com.google.devtools.build.lib.packages.util.MockObjcSupport; import com.google.devtools.build.lib.rules.apple.AppleToolchain; import com.google.devtools.build.lib.rules.cpp.CcCompilationContext; @@ -1586,11 +1590,21 @@ public void testApplePlatformEnvForCcLibraryDep() throws Exception { assertAppleSdkVersionEnv(action.getIncompleteEnvironmentForTesting()); } + private StructImpl getJ2ObjcInfoFromTarget(ConfiguredTarget configuredTarget, String providerName) + throws Exception { + Provider.Key key = + new StarlarkProvider.Key( + Label.parseCanonical("@_builtins//:common/objc/providers.bzl"), providerName); + return (StructImpl) configuredTarget.get(key); + } + @Test public void testExportsJ2ObjcProviders() throws Exception { ConfiguredTarget lib = createLibraryTargetWriter("//a:lib").write(); - assertThat(lib.get(J2ObjcEntryClassProvider.PROVIDER)).isNotNull(); - assertThat(lib.get(J2ObjcMappingFileProvider.PROVIDER)).isNotNull(); + StructImpl j2ObjcEntryClassInfo = getJ2ObjcInfoFromTarget(lib, "J2ObjcEntryClassInfo"); + StructImpl j2ObjcMappingFileInfo = getJ2ObjcInfoFromTarget(lib, "J2ObjcMappingFileInfo"); + assertThat(j2ObjcEntryClassInfo).isNotNull(); + assertThat(j2ObjcMappingFileInfo).isNotNull(); } @Test From ba5c74038ed3ae633a9bf8346c88b01e1931992e Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 09:18:24 -0700 Subject: [PATCH 33/68] Link unused targets listed in cc_shared_library.dynamic_deps It used to be an invariant in the cc_shared_library design that the rule would only link the dynamic_deps (other cc_shared_libraries) that were exporting cc_library targets coming from the cc_library graph (in other words targets reachable from cc_shared_library.deps, formerly cc_shared_library.roots). However, these were not being linked silently without giving an error. It also turns out that it's a valid use case not to require the library in the cc_library graph when for example owners of the cc_shared_library target want users to only depend on the dynamic library and make their cc_library private so that it's never linked statically. cc_binary already allowed this and linked the unused direct dynamic_deps. RELNOTES:none PiperOrigin-RevId: 544076791 Change-Id: I78668c6cc26676922cd1478e290019ca4fccd675 --- .../builtins_bzl/common/cc/cc_binary.bzl | 40 ++------------ .../common/cc/cc_shared_library.bzl | 53 +++++++++++++++++-- .../test_cc_shared_library/BUILD.builtin_test | 41 +++++++------- .../failing_targets/BUILD.builtin_test | 2 - .../test_cc_shared_library/starlark_tests.bzl | 6 +++ 5 files changed, 83 insertions(+), 59 deletions(-) diff --git a/src/main/starlark/builtins_bzl/common/cc/cc_binary.bzl b/src/main/starlark/builtins_bzl/common/cc/cc_binary.bzl index b6724ecbf1f725..945a21f53bf548 100644 --- a/src/main/starlark/builtins_bzl/common/cc/cc_binary.bzl +++ b/src/main/starlark/builtins_bzl/common/cc/cc_binary.bzl @@ -15,7 +15,7 @@ """cc_binary Starlark implementation replacing native""" load(":common/cc/semantics.bzl", "semantics") -load(":common/cc/cc_shared_library.bzl", "CcSharedLibraryInfo", "GraphNodeInfo", "build_exports_map_from_only_dynamic_deps", "build_link_once_static_libs_map", "merge_cc_shared_library_infos", "separate_static_and_dynamic_link_libraries", "sort_linker_inputs", "throw_linked_but_not_exported_errors") +load(":common/cc/cc_shared_library.bzl", "GraphNodeInfo", "add_unused_dynamic_deps", "build_exports_map_from_only_dynamic_deps", "build_link_once_static_libs_map", "merge_cc_shared_library_infos", "separate_static_and_dynamic_link_libraries", "sort_linker_inputs", "throw_linked_but_not_exported_errors") load(":common/cc/cc_helper.bzl", "cc_helper", "linker_mode") load(":common/cc/cc_info.bzl", "CcInfo") load(":common/cc/cc_common.bzl", "cc_common") @@ -331,22 +331,6 @@ def _collect_transitive_dwo_artifacts(cc_compilation_outputs, cc_debug_context, transitive_dwo_files = cc_debug_context.files return depset(dwo_files, transitive = [transitive_dwo_files]) -def _build_map_direct_dynamic_dep_to_transitive_dynamic_deps(ctx): - all_dynamic_dep_linker_inputs = {} - direct_dynamic_dep_to_transitive_dynamic_deps = {} - for dep in ctx.attr.dynamic_deps: - owner = dep[CcSharedLibraryInfo].linker_input.owner - all_dynamic_dep_linker_inputs[owner] = dep[CcSharedLibraryInfo].linker_input - transitive_dynamic_dep_labels = [] - for dynamic_dep in dep[CcSharedLibraryInfo].dynamic_deps.to_list(): - all_dynamic_dep_linker_inputs[dynamic_dep[1].owner] = dynamic_dep[1] - transitive_dynamic_dep_labels.append(dynamic_dep[1].owner) - transitive_dynamic_dep_labels_set = depset(transitive_dynamic_dep_labels, order = "topological") - for export in dep[CcSharedLibraryInfo].exports: - direct_dynamic_dep_to_transitive_dynamic_deps[export] = transitive_dynamic_dep_labels_set - - return direct_dynamic_dep_to_transitive_dynamic_deps, all_dynamic_dep_linker_inputs - def _filter_libraries_that_are_linked_dynamically(ctx, feature_configuration, cc_linking_context): merged_cc_shared_library_infos = merge_cc_shared_library_infos(ctx) link_once_static_libs_map = build_link_once_static_libs_map(merged_cc_shared_library_infos) @@ -365,19 +349,15 @@ def _filter_libraries_that_are_linked_dynamically(ctx, feature_configuration, cc # Entries in unused_dynamic_linker_inputs will be marked None if they are # used - ( - transitive_dynamic_dep_labels, - unused_dynamic_linker_inputs, - ) = _build_map_direct_dynamic_dep_to_transitive_dynamic_deps(ctx) - ( targets_to_be_linked_statically_map, targets_to_be_linked_dynamically_set, topologically_sorted_labels, + unused_dynamic_linker_inputs, ) = separate_static_and_dynamic_link_libraries( + ctx, graph_structure_aspect_nodes, can_be_linked_dynamically, - transitive_dynamic_dep_labels, ) topologically_sorted_labels = [ctx.label] + topologically_sorted_labels @@ -411,19 +391,7 @@ def _filter_libraries_that_are_linked_dynamically(ctx, feature_configuration, cc # main binary, even indirect ones that are dependencies of direct # dynamic dependencies of this binary. link_indirect_deps = cc_common.is_enabled(feature_configuration = feature_configuration, feature_name = "targets_windows") - direct_dynamic_dep_labels = {dep[CcSharedLibraryInfo].linker_input.owner: True for dep in ctx.attr.dynamic_deps} - topologically_sorted_labels_set = {label: True for label in topologically_sorted_labels} - for dynamic_linker_input_owner, unused_linker_input in unused_dynamic_linker_inputs.items(): - should_link_input = (unused_linker_input and - (link_indirect_deps or dynamic_linker_input_owner in direct_dynamic_dep_labels)) - if should_link_input: - _add_linker_input_to_dict( - dynamic_linker_input_owner, - unused_dynamic_linker_inputs[dynamic_linker_input_owner], - ) - linker_inputs_count += 1 - if dynamic_linker_input_owner not in topologically_sorted_labels_set: - topologically_sorted_labels.append(dynamic_linker_input_owner) + linker_inputs_count += add_unused_dynamic_deps(ctx, unused_dynamic_linker_inputs, _add_linker_input_to_dict, topologically_sorted_labels, link_indirect_deps) throw_linked_but_not_exported_errors(linked_statically_but_not_exported) diff --git a/src/main/starlark/builtins_bzl/common/cc/cc_shared_library.bzl b/src/main/starlark/builtins_bzl/common/cc/cc_shared_library.bzl index e82b34b2cb995b..690e6466d6506f 100644 --- a/src/main/starlark/builtins_bzl/common/cc/cc_shared_library.bzl +++ b/src/main/starlark/builtins_bzl/common/cc/cc_shared_library.bzl @@ -83,9 +83,14 @@ def _sort_linker_inputs(topologically_sorted_labels, label_to_linker_inputs, lin # dynamically. The transitive_dynamic_dep_labels parameter is only needed for # binaries because they link all dynamic_deps (cc_binary|cc_test). def _separate_static_and_dynamic_link_libraries( + ctx, direct_children, - can_be_linked_dynamically, - transitive_dynamic_dep_labels = {}): + can_be_linked_dynamically): + ( + transitive_dynamic_dep_labels, + all_dynamic_dep_linker_inputs, + ) = _build_map_direct_dynamic_dep_to_transitive_dynamic_deps(ctx) + node = None all_children = reversed(direct_children) targets_to_be_linked_statically_map = {} @@ -209,7 +214,7 @@ def _separate_static_and_dynamic_link_libraries( transitive.append(first_owner_to_depset[child.owners[0]]) topologically_sorted_labels = depset(transitive = transitive, order = "topological").to_list() - return (targets_to_be_linked_statically_map, targets_to_be_linked_dynamically_set, topologically_sorted_labels) + return (targets_to_be_linked_statically_map, targets_to_be_linked_dynamically_set, topologically_sorted_labels, all_dynamic_dep_linker_inputs) def _create_linker_context(ctx, linker_inputs): return cc_common.create_linking_context( @@ -389,11 +394,15 @@ def _filter_inputs( # The targets_to_be_linked_statically_map points to whether the target to # be linked statically can be linked more than once. + # Entries in unused_dynamic_linker_inputs will be marked None if they are + # used ( targets_to_be_linked_statically_map, targets_to_be_linked_dynamically_set, topologically_sorted_labels, + unused_dynamic_linker_inputs, ) = _separate_static_and_dynamic_link_libraries( + ctx, graph_structure_aspect_nodes, can_be_linked_dynamically, ) @@ -437,6 +446,8 @@ def _filter_inputs( linker_inputs_seen[stringified_linker_input] = True owner = str(linker_input.owner) if owner in targets_to_be_linked_dynamically_set: + unused_dynamic_linker_inputs[transitive_exports[owner].owner] = None + # Link the library in this iteration dynamically, # transitive_exports contains the artifacts produced by a # cc_shared_library @@ -502,6 +513,8 @@ def _filter_inputs( message += dynamic_only_root + "\n" fail(message) + linker_inputs_count += _add_unused_dynamic_deps(ctx, unused_dynamic_linker_inputs, _add_linker_input_to_dict, topologically_sorted_labels, link_indirect_deps = False) + if ctx.attr.experimental_disable_topo_sort_do_not_use_remove_before_7_0: linker_inputs = experimental_remove_before_7_0_linker_inputs else: @@ -567,6 +580,39 @@ def _get_deps(ctx): return deps +def _build_map_direct_dynamic_dep_to_transitive_dynamic_deps(ctx): + all_dynamic_dep_linker_inputs = {} + direct_dynamic_dep_to_transitive_dynamic_deps = {} + for dep in ctx.attr.dynamic_deps: + owner = dep[CcSharedLibraryInfo].linker_input.owner + all_dynamic_dep_linker_inputs[owner] = dep[CcSharedLibraryInfo].linker_input + transitive_dynamic_dep_labels = [] + for dynamic_dep in dep[CcSharedLibraryInfo].dynamic_deps.to_list(): + all_dynamic_dep_linker_inputs[dynamic_dep[1].owner] = dynamic_dep[1] + transitive_dynamic_dep_labels.append(dynamic_dep[1].owner) + transitive_dynamic_dep_labels_set = depset(transitive_dynamic_dep_labels, order = "topological") + for export in dep[CcSharedLibraryInfo].exports: + direct_dynamic_dep_to_transitive_dynamic_deps[export] = transitive_dynamic_dep_labels_set + + return direct_dynamic_dep_to_transitive_dynamic_deps, all_dynamic_dep_linker_inputs + +def _add_unused_dynamic_deps(ctx, unused_dynamic_linker_inputs, add_linker_inputs_lambda, topologically_sorted_labels, link_indirect_deps): + linker_inputs_count = 0 + direct_dynamic_dep_labels = {dep[CcSharedLibraryInfo].linker_input.owner: True for dep in ctx.attr.dynamic_deps} + topologically_sorted_labels_set = {label: True for label in topologically_sorted_labels} + for dynamic_linker_input_owner, unused_linker_input in unused_dynamic_linker_inputs.items(): + should_link_input = (unused_linker_input and + (link_indirect_deps or dynamic_linker_input_owner in direct_dynamic_dep_labels)) + if should_link_input: + add_linker_inputs_lambda( + dynamic_linker_input_owner, + unused_dynamic_linker_inputs[dynamic_linker_input_owner], + ) + linker_inputs_count += 1 + if dynamic_linker_input_owner not in topologically_sorted_labels_set: + topologically_sorted_labels.append(dynamic_linker_input_owner) + return linker_inputs_count + def _cc_shared_library_impl(ctx): if not cc_common.check_experimental_cc_shared_library(): if len(ctx.attr.static_deps): @@ -806,3 +852,4 @@ build_exports_map_from_only_dynamic_deps = _build_exports_map_from_only_dynamic_ throw_linked_but_not_exported_errors = _throw_linked_but_not_exported_errors separate_static_and_dynamic_link_libraries = _separate_static_and_dynamic_link_libraries sort_linker_inputs = _sort_linker_inputs +add_unused_dynamic_deps = _add_unused_dynamic_deps diff --git a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/BUILD.builtin_test b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/BUILD.builtin_test index 11203e74e90a03..c3399ee3140806 100644 --- a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/BUILD.builtin_test +++ b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/BUILD.builtin_test @@ -61,7 +61,7 @@ cc_binary( "foo_so", "bar_so", ], - deps = ["foo", "bar"], + deps = ["foo"], ) cc_shared_library( @@ -77,32 +77,18 @@ cc_shared_library( deps = [":a_suffix"], ) -cc_library( - name = "diamond_lib1", - deps = [ - ":a_suffix", - ], -) - -cc_library( - name = "diamond_lib2", - deps = [ - ":a_suffix", - ], -) - cc_shared_library( name = "diamond_so", dynamic_deps = [":a_so"], features = ["windows_export_all_symbols"], - deps = [":qux", "diamond_lib1"], + deps = [":qux"], ) cc_shared_library( name = "diamond2_so", dynamic_deps = [":a_so"], features = ["windows_export_all_symbols"], - deps = [":bar", "diamond_lib2"], + deps = [":bar"], ) cc_binary( @@ -129,7 +115,8 @@ cc_shared_library( }), dynamic_deps = [ "bar_so", - "//src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library3:diff_pkg_so" + "//src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library3:diff_pkg_so", + "private_lib_so", ], features = ["windows_export_all_symbols"], exports_filter = [ @@ -445,6 +432,24 @@ cc_library( ], ) +cc_shared_library( + name = "private_lib_so", + deps = [ + ":private_lib", + ], +) + +genrule( + name = "private_cc_lib_source", + outs = ["private_cc_library.cc"], + cmd = "touch $@", +) + +cc_library( + name = "private_lib", + srcs = [":private_cc_library.cc"] +) + build_failure_test( name = "two_dynamic_deps_same_export_in_so_test", message = "Two shared libraries in dependencies export the same symbols", diff --git a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/failing_targets/BUILD.builtin_test b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/failing_targets/BUILD.builtin_test index 1e4a63657e4703..5b0b495c8ed70f 100644 --- a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/failing_targets/BUILD.builtin_test +++ b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/failing_targets/BUILD.builtin_test @@ -12,7 +12,6 @@ cc_binary( dynamic_deps = ["//src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library:bar_so"], tags = TAGS, deps = [ - "//src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library:bar", "//src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library:barX", ], ) @@ -29,7 +28,6 @@ cc_shared_library( cc_library( name = "intermediate", deps = [ - "//src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library:bar", "//src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library:barX", ], ) diff --git a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/starlark_tests.bzl b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/starlark_tests.bzl index 27913fa891e73c..a74d65247cd9fd 100644 --- a/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/starlark_tests.bzl +++ b/src/main/starlark/tests/builtins_bzl/cc/cc_shared_library/test_cc_shared_library/starlark_tests.bzl @@ -58,6 +58,11 @@ def _linking_order_test_impl(env, target): matching.contains("foo.pic.o"), matching.contains("baz.pic.o"), ]).in_order() + + env.expect.that_collection(args).contains_at_least([ + "-lprivate_lib_so", + ]) + env.expect.where( detail = "liba_suffix.pic.o should be the last user library linked", ).that_str(user_libs[-1]).equals("a_suffix.pic.o") @@ -181,6 +186,7 @@ def _runfiles_test_impl(env, target): "libfoo_so.so", "libbar_so.so", "libdiff_pkg_so.so", + "libprivate_lib_so.so", "Smain_Sstarlark_Stests_Sbuiltins_Ubzl_Scc_Scc_Ushared_Ulibrary_Stest_Ucc_Ushared_Ulibrary_Slibfoo_Uso.so", "Smain_Sstarlark_Stests_Sbuiltins_Ubzl_Scc_Scc_Ushared_Ulibrary_Stest_Ucc_Ushared_Ulibrary_Slibbar_Uso.so", "Smain_Sstarlark_Stests_Sbuiltins_Ubzl_Scc_Scc_Ushared_Ulibrary_Stest_Ucc_Ushared_Ulibrary3_Slibdiff_Upkg_Uso.so", From 62afb4ba663d9cbd7c4d210270afeb909a38ad2d Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 10:05:39 -0700 Subject: [PATCH 34/68] Avoid creating an identical `ImmutableMap` when the input to `Dict#copyOf` is already an `ImmutableMap`. Also use `forEach` for iteration to avoid creating entry set views. PiperOrigin-RevId: 544088895 Change-Id: Iac12fe7754b5c95f5939e4f2124935abc26d955d --- .../java/net/starlark/java/eval/Dict.java | 36 ++++++++++--------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/src/main/java/net/starlark/java/eval/Dict.java b/src/main/java/net/starlark/java/eval/Dict.java index 649ec3520943fb..6ff9a7eb66f474 100644 --- a/src/main/java/net/starlark/java/eval/Dict.java +++ b/src/main/java/net/starlark/java/eval/Dict.java @@ -441,31 +441,35 @@ public static Dict copyOf(@Nullable Mutability mu, Map dict = (Dict) m; // safe - return dict; - } - if (mu == Mutability.IMMUTABLE) { if (m.isEmpty()) { return empty(); } + + if (m instanceof ImmutableMap) { + m.forEach( + (k, v) -> { + Starlark.checkValid(k); + Starlark.checkValid(v); + }); + @SuppressWarnings("unchecked") + var immutableMap = (ImmutableMap) m; + return new Dict<>(immutableMap); + } + + if (m instanceof Dict && ((Dict) m).isImmutable()) { + @SuppressWarnings("unchecked") + var dict = (Dict) m; + return dict; + } + ImmutableMap.Builder immutableMapBuilder = ImmutableMap.builderWithExpectedSize(m.size()); - for (Map.Entry e : m.entrySet()) { - immutableMapBuilder.put( - Starlark.checkValid(e.getKey()), // - Starlark.checkValid(e.getValue())); - } + m.forEach((k, v) -> immutableMapBuilder.put(Starlark.checkValid(k), Starlark.checkValid(v))); return new Dict<>(immutableMapBuilder.buildOrThrow()); } else { LinkedHashMap linkedHashMap = Maps.newLinkedHashMapWithExpectedSize(m.size()); - for (Map.Entry e : m.entrySet()) { - linkedHashMap.put( - Starlark.checkValid(e.getKey()), // - Starlark.checkValid(e.getValue())); - } + m.forEach((k, v) -> linkedHashMap.put(Starlark.checkValid(k), Starlark.checkValid(v))); return new Dict<>(mu, linkedHashMap); } } From d00e34ac6d2cef0c9f50dd1b959996b8311cb91f Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 10:52:29 -0700 Subject: [PATCH 35/68] Add INFO log lines to all call paths to `#handleAnalysisInvalidatingChange`. This way we have post-facto debugging information for why we dropped the analysis cache. In particular, this is useful for when the cause is a different build configuration; Blaze already prints a helpful configuration diff to stderr but the user might not have saved that or noticed it (and even comparing the full set of options we already log to INFO is a bit tedious). PiperOrigin-RevId: 544102574 Change-Id: Ia97ea099dea20ec97e38259db609867192fc65ff --- .../devtools/build/lib/skyframe/SkyframeBuildView.java | 6 ++++++ .../devtools/build/lib/skyframe/SkyframeExecutor.java | 1 + 2 files changed, 7 insertions(+) diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeBuildView.java b/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeBuildView.java index 050cec9f14e21b..6c00d2bcc18871 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeBuildView.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeBuildView.java @@ -29,6 +29,7 @@ import com.google.common.collect.Sets; import com.google.common.collect.Streams; import com.google.common.eventbus.EventBus; +import com.google.common.flogger.GoogleLogger; import com.google.devtools.build.lib.actions.ActionAnalysisMetadata; import com.google.devtools.build.lib.actions.ActionKeyContext; import com.google.devtools.build.lib.actions.ActionLookupKey; @@ -127,6 +128,8 @@ *

      Covers enough functionality to work as a substitute for {@code BuildView#configureTargets}. */ public final class SkyframeBuildView { + private static final GoogleLogger logger = GoogleLogger.forEnclosingClass(); + private final ConfiguredTargetFactory factory; private final ArtifactFactory artifactFactory; private final SkyframeExecutor skyframeExecutor; @@ -282,6 +285,7 @@ public void setConfiguration( Event.warn( "--discard_analysis_cache was used in the previous build, " + "discarding analysis cache.")); + logger.atInfo().log("Discarding analysis cache because the previous invocation told us to"); skyframeExecutor.handleAnalysisInvalidatingChange(); } else { String diff = describeConfigurationDifference(configuration, maxDifferencesToShow); @@ -291,6 +295,8 @@ public void setConfiguration( diff + ", discarding analysis cache (this can be expensive, see" + " https://bazel.build/advanced/performance/iteration-speed).")); + logger.atInfo().log( + "Discarding analysis cache because the build configuration changed: %s", diff); // Note that clearing the analysis cache is currently required for correctness. It is also // helpful to save memory. // diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java b/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java index 656117e4c225c4..8d15d1a95a43bb 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java @@ -2574,6 +2574,7 @@ public WorkspaceInfoFromDiff sync( syncPackageLoading(pathPackageLocator, commandId, clientEnv, tsgm, executors, options); if (lastAnalysisDiscarded) { + logger.atInfo().log("Discarding analysis cache because the previous invocation told us to"); dropConfiguredTargetsNow(eventHandler); lastAnalysisDiscarded = false; } From 9092303c358b87c1e027abafe135c44a27531b36 Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 12:50:11 -0700 Subject: [PATCH 36/68] Group synthetic classes with their context classes in DexFileSplitter so that synthetic classes don't end up without their context classes in dex shards, which will causing merging to fail in DexFileMerger. Fixes #16368. RELNOTES: None PiperOrigin-RevId: 544134712 Change-Id: Ib29f6659f18dd71be96a7985bc25cfb44e719ae5 --- .../android/dexer/DexLimitTrackerTest.java | 45 +++-- src/test/shell/bazel/android/BUILD | 14 ++ .../DexFileSplitter_synthetic_classes_test.sh | 142 +++++++++++++++ .../android/dexer/DexFileAggregator.java | 3 +- .../build/android/dexer/DexFileSplitter.java | 163 ++++++++++++++---- .../build/android/dexer/DexLimitTracker.java | 12 +- 6 files changed, 325 insertions(+), 54 deletions(-) create mode 100755 src/test/shell/bazel/android/DexFileSplitter_synthetic_classes_test.sh diff --git a/src/test/java/com/google/devtools/build/android/dexer/DexLimitTrackerTest.java b/src/test/java/com/google/devtools/build/android/dexer/DexLimitTrackerTest.java index f88ca36ab64709..9e93a0d9a67591 100644 --- a/src/test/java/com/google/devtools/build/android/dexer/DexLimitTrackerTest.java +++ b/src/test/java/com/google/devtools/build/android/dexer/DexLimitTrackerTest.java @@ -42,46 +42,61 @@ public void setUp() throws Exception { public void testUnderLimit() { DexLimitTracker tracker = new DexLimitTracker(Math.max(dex.methodIds().size(), dex.fieldIds().size())); - assertThat(tracker.track(dex)).isFalse(); + tracker.track(dex); + assertThat(tracker.outsideLimits()).isFalse(); } @Test public void testOverLimit() throws Exception { DexLimitTracker tracker = new DexLimitTracker(Math.max(dex.methodIds().size(), dex.fieldIds().size()) - 1); - assertThat(tracker.track(dex)).isTrue(); - assertThat(tracker.track(dex)).isTrue(); - assertThat(tracker.track(DexFiles.toDex(convertClass(DexLimitTracker.class)))).isTrue(); + tracker.track(dex); + assertThat(tracker.outsideLimits()).isTrue(); + tracker.track(dex); + assertThat(tracker.outsideLimits()).isTrue(); + tracker.track(DexFiles.toDex(convertClass(DexLimitTracker.class))); + assertThat(tracker.outsideLimits()).isTrue(); } @Test public void testRepeatedReferencesDeduped() throws Exception { DexLimitTracker tracker = new DexLimitTracker(Math.max(dex.methodIds().size(), dex.fieldIds().size())); - assertThat(tracker.track(dex)).isFalse(); - assertThat(tracker.track(dex)).isFalse(); - assertThat(tracker.track(dex)).isFalse(); - assertThat(tracker.track(dex)).isFalse(); - assertThat(tracker.track(DexFiles.toDex(convertClass(DexLimitTracker.class)))).isTrue(); - assertThat(tracker.track(dex)).isTrue(); + tracker.track(dex); + assertThat(tracker.outsideLimits()).isFalse(); + tracker.track(dex); + assertThat(tracker.outsideLimits()).isFalse(); + tracker.track(dex); + assertThat(tracker.outsideLimits()).isFalse(); + tracker.track(dex); + assertThat(tracker.outsideLimits()).isFalse(); + tracker.track(DexFiles.toDex(convertClass(DexLimitTracker.class))); + assertThat(tracker.outsideLimits()).isTrue(); + tracker.track(dex); + assertThat(tracker.outsideLimits()).isTrue(); } @Test public void testGoOverLimit() throws Exception { DexLimitTracker tracker = new DexLimitTracker(Math.max(dex.methodIds().size(), dex.fieldIds().size())); - assertThat(tracker.track(dex)).isFalse(); - assertThat(tracker.track(DexFiles.toDex(convertClass(DexLimitTracker.class)))).isTrue(); + tracker.track(dex); + assertThat(tracker.outsideLimits()).isFalse(); + tracker.track(DexFiles.toDex(convertClass(DexLimitTracker.class))); + assertThat(tracker.outsideLimits()).isTrue(); } @Test public void testClear() throws Exception { DexLimitTracker tracker = new DexLimitTracker(Math.max(dex.methodIds().size(), dex.fieldIds().size())); - assertThat(tracker.track(dex)).isFalse(); - assertThat(tracker.track(DexFiles.toDex(convertClass(DexLimitTracker.class)))).isTrue(); + tracker.track(dex); + assertThat(tracker.outsideLimits()).isFalse(); + tracker.track(DexFiles.toDex(convertClass(DexLimitTracker.class))); + assertThat(tracker.outsideLimits()).isTrue(); tracker.clear(); - assertThat(tracker.track(dex)).isFalse(); + tracker.track(dex); + assertThat(tracker.outsideLimits()).isFalse(); } private static DexFile convertClass(Class clazz) throws Exception { diff --git a/src/test/shell/bazel/android/BUILD b/src/test/shell/bazel/android/BUILD index c6674fa6f65156..db8ee37d3105df 100644 --- a/src/test/shell/bazel/android/BUILD +++ b/src/test/shell/bazel/android/BUILD @@ -184,3 +184,17 @@ android_sh_test( "//src/test/shell/bazel:test-deps", ], ) + +android_sh_test( + name = "DexFileSplitter_synthetic_classes_test", + size = "medium", + srcs = ["DexFileSplitter_synthetic_classes_test.sh"], + data = [ + ":android_helper", + "//external:android_sdk_for_testing", + "//src/test/shell/bazel:test-deps", + ], + tags = [ + "no_windows", + ], +) diff --git a/src/test/shell/bazel/android/DexFileSplitter_synthetic_classes_test.sh b/src/test/shell/bazel/android/DexFileSplitter_synthetic_classes_test.sh new file mode 100755 index 00000000000000..bd70e9f4533437 --- /dev/null +++ b/src/test/shell/bazel/android/DexFileSplitter_synthetic_classes_test.sh @@ -0,0 +1,142 @@ +#!/bin/bash +# +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# For these tests to run do the following: +# +# 1. Install an Android SDK from https://developer.android.com +# 2. Set the $ANDROID_HOME environment variable +# 3. Uncomment the line in WORKSPACE containing android_sdk_repository +# +# Note that if the environment is not set up as above android_integration_test +# will silently be ignored and will be shown as passing. + +# Load the test setup defined in the parent directory +CURRENT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +source "${CURRENT_DIR}/android_helper.sh" \ + || { echo "android_helper.sh not found!" >&2; exit 1; } +fail_if_no_android_sdk + +source "${CURRENT_DIR}/../../integration_test_setup.sh" \ + || { echo "integration_test_setup.sh not found!" >&2; exit 1; } + +resolve_android_toolchains "$1" + +function test_DexFileSplitter_synthetic_classes_crossing_dexfiles() { + create_new_workspace + setup_android_sdk_support + + mkdir -p java/com/testapp + + cat > java/com/testapp/AndroidManifest.xml < + + + + + + + + + + + + +EOF + + cat > java/com/testapp/MainActivity.java < java/com/testapp/BigLib.java + + cat > java/com/testapp/BUILD < $i," + done + + echo " };" + echo " }" + echo " }" + echo "}" +} + +run_suite "Tests for DexFileSplitter with synthetic classes crossing dexfiles" \ No newline at end of file diff --git a/src/tools/android/java/com/google/devtools/build/android/dexer/DexFileAggregator.java b/src/tools/android/java/com/google/devtools/build/android/dexer/DexFileAggregator.java index e3fed0a1d1e3e9..c6f88b73d79d3f 100644 --- a/src/tools/android/java/com/google/devtools/build/android/dexer/DexFileAggregator.java +++ b/src/tools/android/java/com/google/devtools/build/android/dexer/DexFileAggregator.java @@ -82,7 +82,8 @@ public DexFileAggregator add(Dex dexFile) { if (multidex.isMultidexAllowed()) { // To determine whether currentShard is "full" we track unique field and method signatures, // which predicts precisely the number of field and method indices. - if (tracker.track(dexFile) && !currentShard.isEmpty()) { + tracker.track(dexFile); + if (tracker.outsideLimits() && !currentShard.isEmpty()) { // For simplicity just start a new shard to fit the given file. // Don't bother with waiting for a later file that might fit the old shard as in the extreme // we'd have to wait until the end to write all shards. diff --git a/src/tools/android/java/com/google/devtools/build/android/dexer/DexFileSplitter.java b/src/tools/android/java/com/google/devtools/build/android/dexer/DexFileSplitter.java index 4e684c87543b3d..3dd844d30ba485 100644 --- a/src/tools/android/java/com/google/devtools/build/android/dexer/DexFileSplitter.java +++ b/src/tools/android/java/com/google/devtools/build/android/dexer/DexFileSplitter.java @@ -14,6 +14,7 @@ package com.google.devtools.build.android.dexer; import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static java.nio.charset.StandardCharsets.UTF_8; @@ -21,8 +22,8 @@ import com.android.dex.DexFormat; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Predicates; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.TreeMultimap; import com.google.common.io.ByteStreams; import com.google.common.io.Closer; import com.google.devtools.build.android.Converters.ExistingPathConverter; @@ -40,13 +41,17 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; -import java.util.Comparator; -import java.util.LinkedHashMap; +import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Scanner; +import java.util.Set; +import java.util.TreeMap; import java.util.function.Predicate; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; +import javax.annotation.Nullable; /** * Shuffles .class.dex files from input archives into 1 or more archives each to be merged into a @@ -159,32 +164,43 @@ static void splitIntoShards(Options options) throws IOException { try (Closer closer = Closer.create(); DexFileSplitter out = new DexFileSplitter(options.outputDirectory, options.maxNumberOfIdxPerDex)) { + // 1. Scan inputs in order and keep first occurrence of each class, keeping all zips open. // We don't process anything yet so we can shard in sorted order, which is what dx would do // if presented with a single jar containing all the given inputs. // TODO(kmb): Abandon alphabetic sorting to process each input fully before moving on (still // requires scanning inputs twice for main dex list). + Predicate inclusionFilter = ZipEntryPredicates.suffixes(".dex", ".class"); if (expected != null) { inclusionFilter = inclusionFilter.and(e -> expected.contains(e.getName())); } - LinkedHashMap deduped = new LinkedHashMap<>(); + + // Maps a dex file name to the zip file containing that dex file. + TreeMap dexFilesAndContainingZip = + new TreeMap<>(ZipEntryComparator::compareClassNames); + // Maps a class to its synthetic classes, if any. + TreeMultimap contextClassesToSyntheticClasses = TreeMultimap.create(); + for (Path inputArchive : options.inputArchives) { ZipFile zip = closer.register(new ZipFile(inputArchive.toFile())); + + // synthetic-contexts.map is generated by CompatDexBuilder. + ZipEntry syntheticContextsZipEntry = zip.getEntry("META-INF/synthetic-contexts.map"); + if (syntheticContextsZipEntry != null) { + parseSyntheticContextsMap( + zip.getInputStream(syntheticContextsZipEntry), contextClassesToSyntheticClasses); + } + zip.stream() .filter(inclusionFilter) - .forEach(e -> deduped.putIfAbsent(e.getName(), zip)); + .forEach(e -> dexFilesAndContainingZip.putIfAbsent(e.getName(), zip)); } - ImmutableList> files = - deduped - .entrySet() - .stream() - .sorted(Comparator.comparing(e -> e.getKey(), ZipEntryComparator::compareClassNames)) - .collect(ImmutableList.toImmutableList()); // 2. Process each class in desired order, rolling from shard to shard as needed. if (classesInMainDex == null || classesInMainDex.isEmpty()) { - out.processDexFiles(files, Predicates.alwaysTrue()); + out.processDexes( + dexFilesAndContainingZip, contextClassesToSyntheticClasses, Predicates.alwaysTrue()); } else { checkArgument(classesInMainDex.stream().noneMatch(s -> s.startsWith("j$/")), "%s lists classes in package 'j$', which can't be included in classes.dex and can " @@ -194,14 +210,15 @@ static void splitIntoShards(Options options) throws IOException { // 1. process only the classes listed in the given file // 2. process the remaining files Predicate mainDexFilter = ZipEntryPredicates.classFileNameFilter(classesInMainDex); - out.processDexFiles(files, mainDexFilter); + out.processDexes(dexFilesAndContainingZip, contextClassesToSyntheticClasses, mainDexFilter); // Fail if main_dex_list is too big, following dx's example checkState(out.shardsWritten() == 0, "Too many classes listed in main dex list file " + "%s, main dex capacity exceeded", options.mainDexListFile); if (options.minimalMainDex) { out.nextShard(); // Start new .dex file if requested } - out.processDexFiles(files, mainDexFilter.negate()); + out.processDexes( + dexFilesAndContainingZip, contextClassesToSyntheticClasses, mainDexFilter.negate()); } } } @@ -215,6 +232,23 @@ private static ImmutableSet expectedEntries(Path filterJar) throws IOExc } } + private static void parseSyntheticContextsMap( + InputStream inputStream, TreeMultimap syntheticClassContexts) { + Scanner scanner = new Scanner(inputStream, UTF_8); + scanner.useDelimiter("[;\n]"); + while (scanner.hasNext()) { + String syntheticClass = scanner.next(); + String context = scanner.next(); + // DexFileSplitter mostly expects filenames which all end in .class.dex, while the synthetic + // context map has class names, so add the extension here to make this easier to work with in + // the rest of the code. + syntheticClassContexts.put( + context + CLASS_DEX_EXTENSION, syntheticClass + CLASS_DEX_EXTENSION); + } + } + + private static final String CLASS_DEX_EXTENSION = ".class.dex"; + private final int maxNumberOfIdxPerDex; private final Path outputDirectory; /** Collect written zip files so we can conveniently wait for all of them to close when done. */ @@ -269,23 +303,31 @@ public void close() throws IOException { closer.close(); } - private void processDexFiles( - ImmutableList> filesToProcess, Predicate filter) + private void processDexes( + Map dexFilesAndContainingZip, + TreeMultimap contextClassesToSyntheticClasses, + Predicate filter) throws IOException { - for (Map.Entry entry : filesToProcess) { + + Set syntheticClasses = new HashSet<>(contextClassesToSyntheticClasses.values()); + for (Map.Entry entry : dexFilesAndContainingZip.entrySet()) { String filename = entry.getKey(); if (filter.test(filename)) { - ZipFile zipFile = entry.getValue(); - processDexEntry(zipFile, zipFile.getEntry(filename)); + // Synthetic classes will be gathered with their context classes and added to the dex file + // all together as a unit, so skip them here. + if (!syntheticClasses.contains(filename)) { + ZipFile zipFile = entry.getValue(); + processDex(zipFile, filename, contextClassesToSyntheticClasses.get(filename)); + } } } } - private void processDexEntry(ZipFile zip, ZipEntry entry) throws IOException { - String filename = entry.getName(); - checkState(filename.endsWith(".class.dex"), - "%s isn't a dex archive: %s", zip.getName(), filename); - checkState(entry.getMethod() == ZipEntry.STORED, "Expect to process STORED: %s", filename); + private void processDex(ZipFile zip, String filename, Set syntheticClasses) + throws IOException { + + // Synthetic classes base their names on their context classes, so this check only needs to be + // done for the context class. if (inCoreLib == null) { inCoreLib = filename.startsWith("j$/"); } else if (inCoreLib != filename.startsWith("j$/")) { @@ -299,6 +341,53 @@ private void processDexEntry(ZipFile zip, ZipEntry entry) throws IOException { filename); } + List zipEntryDexAndContents = new ArrayList<>(); + ZipEntryDexAndContent contextZdc = processDex(zip, filename); + checkNotNull(contextZdc, "Context class %s expected to be in %s", filename, zip.getName()); + zipEntryDexAndContents.add(contextZdc); + + for (String syntheticClass : syntheticClasses) { + ZipEntryDexAndContent syntheticClassZdc = processDex(zip, syntheticClass); + // Some synthetic classes are contained within the same dex as their enclosing class, + // so they won't be standalone dexes in the zip file, and some synthetic classes are present + // in synthetic-contexts.map but aren't standalone dexes in the zip nor are they in the + // dex with their enclosing class, so just skip these. + if (syntheticClassZdc != null) { + zipEntryDexAndContents.add(syntheticClassZdc); + } + } + + if (tracker.outsideLimits()) { + nextShard(); + for (ZipEntryDexAndContent zdc : zipEntryDexAndContents) { + tracker.track(zdc.dex); + } + checkState( + !tracker.outsideLimits(), + "Impossible to fit %s and all of its synthetic classes (count: %s) in a single shard", + filename, + syntheticClasses.size()); + } + + for (ZipEntryDexAndContent zdc : zipEntryDexAndContents) { + curOut.writeAsync(zdc.zipEntry, zdc.content); + } + } + + @Nullable + private ZipEntryDexAndContent processDex(ZipFile zip, String filename) throws IOException { + ZipEntry entry = zip.getEntry(filename); + if (entry == null) { + return null; + } + + checkState( + filename.endsWith(CLASS_DEX_EXTENSION), + "%s isn't a dex archive: %s", + zip.getName(), + filename); + checkState(entry.getMethod() == ZipEntry.STORED, "Expect to process STORED: %s", filename); + try (InputStream entryStream = zip.getInputStream(entry)) { // We don't want to use the Dex(InputStream) constructor because it closes the stream, // which will break the for loop, and it has its own bespoke way of reading the file into @@ -306,15 +395,27 @@ private void processDexEntry(ZipFile zip, ZipEntry entry) throws IOException { // TODO(kmb) since entry is stored, mmap content and give to Dex(ByteBuffer) and output zip byte[] content = new byte[(int) entry.getSize()]; ByteStreams.readFully(entryStream, content); // throws if file is smaller than expected - checkState(entryStream.read() == -1, - "Too many bytes in jar entry %s, expected %s", entry, entry.getSize()); + checkState( + entryStream.read() == -1, + "Too many bytes in jar entry %s, expected %s", + entry, + entry.getSize()); Dex dexFile = new Dex(content); - if (tracker.track(dexFile)) { - nextShard(); - tracker.track(dexFile); - } - curOut.writeAsync(entry, content); + tracker.track(dexFile); + return new ZipEntryDexAndContent(entry, content, dexFile); + } + } + + private static final class ZipEntryDexAndContent { + final ZipEntry zipEntry; + final byte[] content; + final Dex dex; + + ZipEntryDexAndContent(ZipEntry zipEntry, byte[] content, Dex dex) { + this.zipEntry = zipEntry; + this.content = content; + this.dex = dex; } } } diff --git a/src/tools/android/java/com/google/devtools/build/android/dexer/DexLimitTracker.java b/src/tools/android/java/com/google/devtools/build/android/dexer/DexLimitTracker.java index a0bfb51dfd054e..c8383e367d697e 100644 --- a/src/tools/android/java/com/google/devtools/build/android/dexer/DexLimitTracker.java +++ b/src/tools/android/java/com/google/devtools/build/android/dexer/DexLimitTracker.java @@ -38,14 +38,12 @@ public DexLimitTracker(int maxNumberOfIdxPerDex) { } /** - * Tracks the field and method references in the given file and returns whether we're within - * limits. + * Returns whether we're within limits. * - * @return {@code true} if method or field references are outside limits, {@code false} both - * are within limits. + * @return {@code true} if method or field references are outside limits, {@code false} both are + * within limits. */ - public boolean track(Dex dexFile) { - trackFieldsAndMethods(dexFile); + public boolean outsideLimits() { return fieldsSeen.size() > maxNumberOfIdxPerDex || methodsSeen.size() > maxNumberOfIdxPerDex; } @@ -55,7 +53,7 @@ public void clear() { methodsSeen.clear(); } - private void trackFieldsAndMethods(Dex dexFile) { + public void track(Dex dexFile) { int fieldCount = dexFile.fieldIds().size(); for (int fieldIndex = 0; fieldIndex < fieldCount; ++fieldIndex) { fieldsSeen.add(FieldDescriptor.fromDex(dexFile, fieldIndex)); From db347988dfd1b4b8cd5685de63d937f2ffa76b25 Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 13:26:34 -0700 Subject: [PATCH 37/68] Add thinlto support for tree artifacts Add a new feature to bazel: use_lto_native_object_directory. When this feature is enabled, the thinlto objects are stored in a different directory as the .imports and .thinlto.bc file outputs from the indexing action, and thinlto can be used in the presence of tree artifacts. Create LtoBackendActionTemplate.java to generate lto backend actions to tree artifact. Minor: move constants to rules/cpp/CppFileTypes.java and rules/cpp/CppHelper.java for consistency. Limitations: There is still no support for .dwo files for tree artifacts, i.e. compiling using fission. TreeArtifacts do not generate minimized bitcode files, only full bitcode files, because of lack of support at the indexing stage. PiperOrigin-RevId: 544144581 Change-Id: Idc6a638a6fccef6912b79948ea167dd6e651f156 --- .bazelci/postsubmit.yml | 3 + .bazelci/presubmit.yml | 3 + .../lib/analysis/actions/SpawnAction.java | 22 +- .../build/lib/rules/cpp/BitcodeFiles.java | 10 + .../lib/rules/cpp/CcCompilationHelper.java | 35 +- .../build/lib/rules/cpp/CcModule.java | 13 +- .../rules/cpp/CppCompileActionTemplate.java | 40 +- .../build/lib/rules/cpp/CppFileTypes.java | 7 + .../build/lib/rules/cpp/CppHelper.java | 21 + .../build/lib/rules/cpp/CppLinkAction.java | 55 +- .../lib/rules/cpp/CppLinkActionBuilder.java | 110 +- .../build/lib/rules/cpp/CppRuleClasses.java | 3 + .../rules/cpp/LibrariesToLinkCollector.java | 4 +- .../lib/rules/cpp/LinkBuildVariables.java | 25 +- .../build/lib/rules/cpp/LtoBackendAction.java | 95 +- .../rules/cpp/LtoBackendActionTemplate.java | 387 +++ .../lib/rules/cpp/LtoBackendArtifacts.java | 446 ++-- .../rules/nativedeps/NativeDepsHelper.java | 24 +- .../lib/starlarkbuildapi/cpp/CcModuleApi.java | 2 + .../builtins_bzl/common/cc/cc_common.bzl | 2 + .../lib/analysis/mock/cc_toolchain_config.bzl | 5 + .../google/devtools/build/lib/rules/cpp/BUILD | 22 + .../rules/cpp/CcBinaryThinLtoObjDirTest.java | 2100 +++++++++++++++++ .../lib/rules/cpp/StarlarkCcCommonTest.java | 3 +- src/test/shell/bazel/BUILD | 10 + .../shell/bazel/bazel_thinlto_obj_dir_test.sh | 105 + 26 files changed, 3259 insertions(+), 293 deletions(-) create mode 100644 src/main/java/com/google/devtools/build/lib/rules/cpp/LtoBackendActionTemplate.java create mode 100644 src/test/java/com/google/devtools/build/lib/rules/cpp/CcBinaryThinLtoObjDirTest.java create mode 100755 src/test/shell/bazel/bazel_thinlto_obj_dir_test.sh diff --git a/.bazelci/postsubmit.yml b/.bazelci/postsubmit.yml index ae5c3a1c2e5043..047fd07308fc4d 100644 --- a/.bazelci/postsubmit.yml +++ b/.bazelci/postsubmit.yml @@ -87,6 +87,7 @@ tasks: - "--sandbox_writable_path=$HOME/bazeltest" - "--test_env=TEST_INSTALL_BASE=$HOME/bazeltest/install_base" - "--test_env=TEST_REPOSITORY_HOME=$OUTPUT_BASE/external" + - "--test_tag_filters=-no_1804" # Configure and enable tests that require access to the network. - "--test_env=REMOTE_NETWORK_ADDRESS=bazel.build:80" test_targets: @@ -138,6 +139,7 @@ tasks: - "--sandbox_writable_path=$HOME/bazeltest" - "--test_env=TEST_INSTALL_BASE=$HOME/bazeltest/install_base" - "--test_env=TEST_REPOSITORY_HOME=$OUTPUT_BASE/external" + - "--test_tag_filters=-no_1804" test_targets: - "//src/test/shell/bazel:cc_integration_test" include_json_profile: @@ -402,6 +404,7 @@ tasks: - "--experimental_remote_download_outputs=minimal" - "--experimental_inmemory_jdeps_files" - "--experimental_inmemory_dotd_files" + - "--test_tag_filters=-no_1804" test_targets: - "//scripts/..." - "//src/java_tools/..." diff --git a/.bazelci/presubmit.yml b/.bazelci/presubmit.yml index b0f2f25b21430e..fe30ced7d83907 100644 --- a/.bazelci/presubmit.yml +++ b/.bazelci/presubmit.yml @@ -92,6 +92,7 @@ tasks: - "--sandbox_writable_path=$HOME/bazeltest" - "--test_env=TEST_INSTALL_BASE=$HOME/bazeltest/install_base" - "--test_env=TEST_REPOSITORY_HOME=$OUTPUT_BASE/external" + - "--test_tag_filters=-no_1804" # Configure and enable tests that require access to the network. - "--test_env=REMOTE_NETWORK_ADDRESS=bazel.build:80" test_targets: @@ -143,6 +144,7 @@ tasks: - "--sandbox_writable_path=$HOME/bazeltest" - "--test_env=TEST_INSTALL_BASE=$HOME/bazeltest/install_base" - "--test_env=TEST_REPOSITORY_HOME=$OUTPUT_BASE/external" + - "--test_tag_filters=-no_1804" test_targets: - "//src/test/shell/bazel:cc_integration_test" include_json_profile: @@ -471,6 +473,7 @@ tasks: - "--experimental_remote_cache_async" - "--experimental_remote_merkle_tree_cache" - "--remote_download_minimal" + - "--test_tag_filters=-no_1804" test_targets: - "//scripts/..." - "//src/java_tools/..." diff --git a/src/main/java/com/google/devtools/build/lib/analysis/actions/SpawnAction.java b/src/main/java/com/google/devtools/build/lib/analysis/actions/SpawnAction.java index 8a38f2b4c257c9..ae7e57f08ae564 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/actions/SpawnAction.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/actions/SpawnAction.java @@ -600,7 +600,7 @@ public static class Builder { public Builder() {} /** Creates a builder that is a copy of another builder. */ - Builder(Builder other) { + public Builder(Builder other) { this.toolsBuilder.addTransitive(other.toolsBuilder.build()); this.inputsBuilder.addTransitive(other.inputsBuilder.build()); this.outputs.addAll(other.outputs); @@ -657,7 +657,7 @@ public SpawnAction build(ActionOwner owner, BuildConfigurationValue configuratio } @CheckReturnValue - SpawnAction buildForActionTemplate(ActionOwner owner) { + public SpawnAction buildForActionTemplate(ActionOwner owner) { CommandLines.Builder result = CommandLines.builder(); if (executableArg != null) { result.addSingleArgument(executableArg); @@ -683,12 +683,7 @@ private SpawnAction buildSpawnAction( ActionEnvironment env) { NestedSet tools = toolsBuilder.build(); - // Tools are by definition a subset of the inputs, so make sure they're present there, too. - NestedSet inputsAndTools = - NestedSetBuilder.stableOrder() - .addTransitive(inputsBuilder.build()) - .addTransitive(tools) - .build(); + NestedSet inputsAndTools = getInputsAndTools(); return createSpawnAction( owner, @@ -788,6 +783,17 @@ public Builder addInputs(Iterable artifacts) { return this; } + /** + * Returns the inputs that the spawn action will depend on. Tools are by definition a subset of + * the inputs, so they are also present. + */ + public NestedSet getInputsAndTools() { + return NestedSetBuilder.stableOrder() + .addTransitive(inputsBuilder.build()) + .addTransitive(toolsBuilder.build()) + .build(); + } + /** Adds transitive inputs to this action. */ @CanIgnoreReturnValue public Builder addTransitiveInputs(NestedSet artifacts) { diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/BitcodeFiles.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/BitcodeFiles.java index 5691e50d46a5cc..ec82961bd8611b 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/BitcodeFiles.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/BitcodeFiles.java @@ -13,9 +13,13 @@ // limitations under the License. package com.google.devtools.build.lib.rules.cpp; +import static com.google.common.collect.ImmutableMap.toImmutableMap; + +import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.util.Fingerprint; +import com.google.devtools.build.lib.vfs.PathFragment; import javax.annotation.Nullable; /** Wrapper around a map of bitcode files for purposes of caching its fingerprint. */ @@ -32,6 +36,12 @@ NestedSet getFiles() { return files; } + /** Helper function to get a map from path to artifact */ + ImmutableMap getFilesArtifactPathMap() { + return getFiles().toList().stream() + .collect(toImmutableMap(Artifact::getExecPath, artifact -> artifact)); + } + void addToFingerprint(Fingerprint fp) { if (fingerprint == null) { synchronized (this) { diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CcCompilationHelper.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CcCompilationHelper.java index ff4d097d3b9fe7..1403dbdc822350 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CcCompilationHelper.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CcCompilationHelper.java @@ -1398,10 +1398,12 @@ private CcCompilationOutputs createCcCompileActions(StarlarkThread thread) source, outputName, builder, + result, ImmutableList.of( ArtifactCategory.GENERATED_HEADER, ArtifactCategory.PROCESSED_HEADER), // If we generate pic actions, we prefer the header actions to use the pic mode. - generatePicAction); + generatePicAction, + bitcodeOutput); result.addHeaderTokenFile(headerTokenFile); break; case SOURCE: @@ -1412,8 +1414,10 @@ private CcCompilationOutputs createCcCompileActions(StarlarkThread thread) source, outputName, builder, + result, ImmutableList.of(ArtifactCategory.OBJECT_FILE), - /* usePic= */ false); + /* usePic= */ false, + featureConfiguration.isEnabled(CppRuleClasses.THIN_LTO)); result.addObjectFile(objectFile); } @@ -1424,8 +1428,10 @@ private CcCompilationOutputs createCcCompileActions(StarlarkThread thread) source, outputName, builder, + result, ImmutableList.of(ArtifactCategory.PIC_OBJECT_FILE), - /* usePic= */ true); + /* usePic= */ true, + featureConfiguration.isEnabled(CppRuleClasses.THIN_LTO)); result.addPicObjectFile(picObjectFile); } break; @@ -1463,8 +1469,10 @@ private Artifact createCompileActionTemplate( CppSource source, String outputName, CppCompileActionBuilder builder, + CcCompilationOutputs.Builder result, ImmutableList outputCategories, - boolean usePic) + boolean usePic, + boolean bitcodeOutput) throws RuleErrorException, InterruptedException { if (usePic) { builder = new CppCompileActionBuilder(builder).setPicMode(true); @@ -1504,6 +1512,24 @@ private Artifact createCompileActionTemplate( actionConstructionContext, label, sourceArtifact, outputName, usePic); } + // Currently we do not generate minimized bitcode files for tree artifacts because of issues + // with the indexing step. + // If ltoIndexTreeArtifact is set to a tree artifact, the minimized bitcode files will be + // properly generated and will be an input to the indexing step. However, the lto indexing step + // fails. The indexing step finds the full bitcode file by replacing the suffix of the + // minimized bitcode file, therefore they have to be in the same directory. + // Since the files are in the same directory, the command line artifact expander expands the + // tree artifact to both the minimized bitcode files and the full bitcode files, causing an + // error that functions are defined twice. + // TODO(b/289071777): support for minimized bitcode files. + SpecialArtifact ltoIndexTreeArtifact = null; + + if (bitcodeOutput) { + Label sourceLabel = source.getLabel(); + result.addLtoBitcodeFile( + outputFiles, ltoIndexTreeArtifact, getCopts(sourceArtifact, sourceLabel)); + } + ActionOwner actionOwner = null; if (actionConstructionContext instanceof RuleContext && ((RuleContext) actionConstructionContext).useAutoExecGroups()) { @@ -1515,6 +1541,7 @@ private Artifact createCompileActionTemplate( outputFiles, dotdTreeArtifact, diagnosticsTreeArtifact, + ltoIndexTreeArtifact, builder, ccToolchain, outputCategories, diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CcModule.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CcModule.java index 8ab8fbb8143e9f..13bfb6738a6c0f 100755 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CcModule.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CcModule.java @@ -429,6 +429,7 @@ public CcToolchainVariables getLinkBuildVariables( /* interfaceLibraryBuilder= */ null, /* interfaceLibraryOutput= */ null, /* ltoOutputRootPrefix= */ null, + /* ltoObjRootPrefix= */ null, convertFromNoneable(defFile, /* defaultValue= */ null), /* fdoContext= */ null, Depset.noneableCast( @@ -941,10 +942,17 @@ public CppModuleMap createCppModuleMap( } } + /** + * Create an LTO backend that does not perform any cross-module optimization because Starlark does + * not hava support for LTO indexing actions yet. + * + *

      TODO(b/128341904): Do cross module optimization once there is Starlark support. + */ @Override public LtoBackendArtifacts createLtoBackendArtifacts( StarlarkRuleContext starlarkRuleContext, String ltoOutputRootPrefixString, + String ltoObjRootPrefixString, Artifact bitcodeFile, FeatureConfigurationForStarlark featureConfigurationForStarlark, CcToolchainProvider ccToolchain, @@ -957,6 +965,7 @@ public LtoBackendArtifacts createLtoBackendArtifacts( isCalledFromStarlarkCcCommon(thread); RuleContext ruleContext = starlarkRuleContext.getRuleContext(); PathFragment ltoOutputRootPrefix = PathFragment.create(ltoOutputRootPrefixString); + PathFragment ltoObjRootPrefix = PathFragment.create(ltoObjRootPrefixString); LtoBackendArtifacts ltoBackendArtifacts; try { ltoBackendArtifacts = @@ -966,7 +975,9 @@ public LtoBackendArtifacts createLtoBackendArtifacts( ruleContext.getConfiguration().getOptions(), ruleContext.getConfiguration().getFragment(CppConfiguration.class), ltoOutputRootPrefix, + ltoObjRootPrefix, bitcodeFile, + /* allBitcodeFiles= */ null, starlarkRuleContext.actions().getActionConstructionContext(), ruleContext.getRepository(), ruleContext.getConfiguration(), @@ -2802,7 +2813,7 @@ public CcLinkingOutputs link( helper.addVariableExtension(new UserVariablesExtension(asDict(variablesExtension))); } if (convertFromNoneable(useShareableArtifactFactory, false)) { - helper.setLinkArtifactFactory(CppLinkActionBuilder.SHAREABLE_LINK_ARTIFACT_FACTORY); + helper.setLinkArtifactFactory(CppLinkAction.SHAREABLE_LINK_ARTIFACT_FACTORY); } CcCompilationOutputs compilationOutputs = convertFromNoneable(compilationOutputsObject, /* defaultValue= */ null); diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileActionTemplate.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileActionTemplate.java index 839a8f6312a623..64c7b12835cb5a 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileActionTemplate.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileActionTemplate.java @@ -17,6 +17,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.ActionAnalysisMetadata; import com.google.devtools.build.lib.actions.ActionExecutionContext; import com.google.devtools.build.lib.actions.ActionExecutionException; @@ -38,6 +39,7 @@ import com.google.devtools.build.lib.util.DetailedExitCode; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.vfs.FileSystemUtils; +import com.google.devtools.build.lib.vfs.PathFragment; import javax.annotation.Nullable; /** An {@link ActionTemplate} that expands into {@link CppCompileAction}s at execution time. */ @@ -48,6 +50,7 @@ public final class CppCompileActionTemplate extends ActionKeyCacher private final SpecialArtifact outputTreeArtifact; private final SpecialArtifact dotdTreeArtifact; private final SpecialArtifact diagnosticsTreeArtifact; + private final SpecialArtifact ltoIndexTreeArtifact; private final CcToolchainProvider toolchain; private final ImmutableList categories; private final ActionOwner actionOwner; @@ -61,6 +64,7 @@ public final class CppCompileActionTemplate extends ActionKeyCacher * @param outputTreeArtifact the TreeArtifact that contains compilation outputs. * @param dotdTreeArtifact the TreeArtifact that contains dotd files. * @param diagnosticsTreeArtifact the TreeArtifact that contains serialized diagnostics files. + * @param ltoIndexTreeArtifact the TreeArtifact that contains lto index files (minimized bitcode). * @param cppCompileActionBuilder An almost completely configured {@link CppCompileActionBuilder} * without the input and output files set. It is used as a template to instantiate expanded * {CppCompileAction}s. @@ -74,6 +78,7 @@ public final class CppCompileActionTemplate extends ActionKeyCacher SpecialArtifact outputTreeArtifact, SpecialArtifact dotdTreeArtifact, SpecialArtifact diagnosticsTreeArtifact, + SpecialArtifact ltoIndexTreeArtifact, CppCompileActionBuilder cppCompileActionBuilder, CcToolchainProvider toolchain, ImmutableList categories, @@ -82,6 +87,7 @@ public final class CppCompileActionTemplate extends ActionKeyCacher this.sourceTreeArtifact = sourceTreeArtifact; this.outputTreeArtifact = outputTreeArtifact; this.dotdTreeArtifact = dotdTreeArtifact; + this.ltoIndexTreeArtifact = ltoIndexTreeArtifact; this.diagnosticsTreeArtifact = diagnosticsTreeArtifact; this.toolchain = toolchain; this.categories = categories; @@ -146,12 +152,25 @@ public ImmutableList generateActionsForInputArtifacts( TreeFileArtifact.createTemplateExpansionOutput( diagnosticsTreeArtifact, outputName + ".dia", artifactOwner); } + + TreeFileArtifact ltoIndexFileArtifact = null; + if (ltoIndexTreeArtifact != null) { + PathFragment outputFilePathFragment = PathFragment.create(outputName); + PathFragment thinltofile = + FileSystemUtils.replaceExtension( + outputFilePathFragment, + Iterables.getOnlyElement(CppFileTypes.LTO_INDEXING_OBJECT_FILE.getExtensions())); + ltoIndexFileArtifact = + TreeFileArtifact.createTemplateExpansionOutput( + ltoIndexTreeArtifact, thinltofile, artifactOwner); + } expandedActions.add( createAction( inputTreeFileArtifact, outputTreeFileArtifact, dotdFileArtifact, diagnosticsFileArtifact, + ltoIndexFileArtifact, privateHeaders)); } @@ -201,13 +220,15 @@ private CppCompileAction createAction( TreeFileArtifact outputTreeFileArtifact, @Nullable Artifact dotdFileArtifact, @Nullable Artifact diagnosticsFileArtifact, + @Nullable Artifact ltoIndexFileArtifact, NestedSet privateHeaders) throws ActionExecutionException { CppCompileActionBuilder builder = new CppCompileActionBuilder(cppCompileActionBuilder) .setAdditionalPrunableHeaders(privateHeaders) .setSourceFile(sourceTreeFileArtifact) - .setOutputs(outputTreeFileArtifact, dotdFileArtifact, diagnosticsFileArtifact); + .setOutputs(outputTreeFileArtifact, dotdFileArtifact, diagnosticsFileArtifact) + .setLtoIndexingFile(ltoIndexFileArtifact); CcToolchainVariables.Builder buildVariables = CcToolchainVariables.builder(cppCompileActionBuilder.getVariables()); @@ -228,6 +249,12 @@ private CppCompileAction createAction( diagnosticsFileArtifact.getExecPathString()); } + if (ltoIndexFileArtifact != null) { + buildVariables.overrideStringVariable( + CompileBuildVariables.LTO_INDEXING_BITCODE_FILE.getVariableName(), + ltoIndexFileArtifact.getExecPathString()); + } + builder.setVariables(buildVariables.build()); try { @@ -316,10 +343,15 @@ public NestedSet getSchedulingDependencies() { @Override public ImmutableSet getOutputs() { - if (dotdTreeArtifact == null) { - return ImmutableSet.of(outputTreeArtifact); + ImmutableSet.Builder builder = ImmutableSet.builder(); + builder.add(outputTreeArtifact); + if (dotdTreeArtifact != null) { + builder.add(dotdTreeArtifact); + } + if (ltoIndexTreeArtifact != null) { + builder.add(ltoIndexTreeArtifact); } - return ImmutableSet.of(outputTreeArtifact, dotdTreeArtifact); + return builder.build(); } @Override diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppFileTypes.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppFileTypes.java index c55cf90028708f..ea8ee704338e67 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppFileTypes.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppFileTypes.java @@ -239,6 +239,13 @@ public ImmutableList getExtensions() { // Minimized bitcode file emitted by the ThinLTO compile step and used just for LTO indexing. public static final FileType LTO_INDEXING_OBJECT_FILE = FileType.of(".indexing.o"); + // Imports file emitted by the ThinLTO indexing step and used for LTO backend action. + public static final FileType LTO_IMPORTS_FILE = FileType.of(".imports"); + + // Indexing analysis result file emitted by the ThinLTO indexing step and used for LTO backend + // action. + public static final FileType LTO_INDEXING_ANALYSIS_FILE = FileType.of(".thinlto.bc"); + // TODO(bazel-team): File types should not be read from this hard-coded list but should come from // the toolchain instead. See https://github.com/bazelbuild/bazel/issues/17117 public static final FileType SHARED_LIBRARY = diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppHelper.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppHelper.java index 7ce7419c27d4e4..de4f753f66770e 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppHelper.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppHelper.java @@ -51,6 +51,7 @@ import com.google.devtools.build.lib.rules.cpp.Link.LinkTargetType; import com.google.devtools.build.lib.server.FailureDetails.FailAction.Code; import com.google.devtools.build.lib.util.FileTypeSet; +import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.ArrayList; import java.util.LinkedHashMap; @@ -73,6 +74,9 @@ public class CppHelper { static final PathFragment DIA_FILES = PathFragment.create("_dia"); static final PathFragment PIC_DIA_FILES = PathFragment.create("_pic_dia"); + public static final PathFragment SHARED_NONLTO_BACKEND_ROOT_PREFIX = + PathFragment.create("shared.nonlto"); + // TODO(bazel-team): should this use Link.SHARED_LIBRARY_FILETYPES? public static final FileTypeSet SHARED_LIBRARY_FILETYPES = FileTypeSet.of(CppFileTypes.SHARED_LIBRARY, CppFileTypes.VERSIONED_SHARED_LIBRARY); @@ -264,6 +268,23 @@ private static PathFragment getDiagnosticsDirectory( ruleLabel, usePic ? PIC_DIA_FILES : DIA_FILES, siblingRepositoryLayout); } + /** + * Given the output file path, returns the directory where the results of thinlto indexing will be + * created: output_file.lto/ + */ + public static PathFragment getLtoOutputRootPrefix(PathFragment outputRootRelativePath) { + return FileSystemUtils.appendExtension(outputRootRelativePath, ".lto"); + } + + /** + * Given the lto output root directory path, returns the directory where thinlto native object + * files are created: output_file.lto-obj/ + */ + public static PathFragment getThinLtoNativeObjectDirectoryFromLtoOutputRoot( + PathFragment ltoOutputRootRelativePath) { + return FileSystemUtils.appendExtension(ltoOutputRootRelativePath, "-obj"); + } + public static Artifact getLinkedArtifact( Label label, ActionConstructionContext actionConstructionContext, diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkAction.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkAction.java index 6b22b982e0fe12..9c4a34372341d4 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkAction.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkAction.java @@ -31,6 +31,7 @@ import com.google.devtools.build.lib.actions.ActionResult; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.Artifact.ArtifactExpander; +import com.google.devtools.build.lib.actions.Artifact.SpecialArtifact; import com.google.devtools.build.lib.actions.CommandAction; import com.google.devtools.build.lib.actions.CommandLine; import com.google.devtools.build.lib.actions.CommandLineExpansionException; @@ -88,6 +89,13 @@ Artifact create( RepositoryName repositoryName, BuildConfigurationValue configuration, PathFragment rootRelativePath); + + /** Create a tree artifact at the specified root-relative path in the bin directory. */ + SpecialArtifact createTreeArtifact( + ActionConstructionContext actionConstructionContext, + RepositoryName repositoryName, + BuildConfigurationValue configuration, + PathFragment rootRelativePath); } /** @@ -105,6 +113,49 @@ public Artifact create( return actionConstructionContext.getDerivedArtifact( rootRelativePath, configuration.getBinDirectory(repositoryName)); } + + @Override + public SpecialArtifact createTreeArtifact( + ActionConstructionContext actionConstructionContext, + RepositoryName repositoryName, + BuildConfigurationValue configuration, + PathFragment rootRelativePath) { + return actionConstructionContext.getTreeArtifact( + rootRelativePath, configuration.getBinDirectory(repositoryName)); + } + }; + + /** + * An implementation of {@link LinkArtifactFactory} that can create artifacts anywhere. + * + *

      Necessary when the LTO backend actions of libraries should be shareable, and thus cannot be + * under the package directory. + * + *

      Necessary because the actions of nativedeps libraries should be shareable, and thus cannot + * be under the package directory. + */ + public static final LinkArtifactFactory SHAREABLE_LINK_ARTIFACT_FACTORY = + new LinkArtifactFactory() { + @Override + public Artifact create( + ActionConstructionContext actionConstructionContext, + RepositoryName repositoryName, + BuildConfigurationValue configuration, + PathFragment rootRelativePath) { + return actionConstructionContext.getShareableArtifact( + rootRelativePath, configuration.getBinDirectory(repositoryName)); + } + + @Override + public SpecialArtifact createTreeArtifact( + ActionConstructionContext actionConstructionContext, + RepositoryName repositoryName, + BuildConfigurationValue configuration, + PathFragment rootRelativePath) { + return actionConstructionContext + .getAnalysisEnvironment() + .getTreeArtifact(rootRelativePath, configuration.getBinDirectory(repositoryName)); + } }; private static final String LINK_GUID = "58ec78bd-1176-4e36-8143-439f656b181d"; @@ -292,9 +343,11 @@ public ActionResult execute(ActionExecutionContext actionExecutionContext) private Spawn createSpawn(ActionExecutionContext actionExecutionContext) throws ActionExecutionException { try { + ArtifactExpander actionContextExpander = actionExecutionContext.getArtifactExpander(); + ArtifactExpander expander = actionContextExpander; return new SimpleSpawn( this, - ImmutableList.copyOf(getCommandLine(actionExecutionContext.getArtifactExpander())), + ImmutableList.copyOf(getCommandLine(expander)), getEffectiveEnvironment(actionExecutionContext.getClientEnv()), getExecutionInfo(), getInputs(), diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkActionBuilder.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkActionBuilder.java index 0140fc1fa8c2ee..b5c3816840042e 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkActionBuilder.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkActionBuilder.java @@ -68,27 +68,6 @@ /** Builder class to construct {@link CppLinkAction}s. */ public class CppLinkActionBuilder { - /** - * An implementation of {@link LinkArtifactFactory} that can create artifacts anywhere. - * - *

      Necessary when the LTO backend actions of libraries should be shareable, and thus cannot be - * under the package directory. - */ - static final LinkArtifactFactory SHAREABLE_LINK_ARTIFACT_FACTORY = - new LinkArtifactFactory() { - @Override - public Artifact create( - ActionConstructionContext actionConstructionContext, - RepositoryName repositoryName, - BuildConfigurationValue configuration, - PathFragment rootRelativePath) { - return actionConstructionContext.getShareableArtifact( - rootRelativePath, configuration.getBinDirectory(repositoryName)); - } - }; - - public static final String SHARED_NONLTO_BACKEND_ROOT_PREFIX = "shared.nonlto"; - private final Artifact output; private final CppSemantics cppSemantics; @Nullable private String mnemonic; @@ -334,6 +313,7 @@ private LtoBackendArtifacts createLtoArtifact( Artifact bitcodeFile, @Nullable BitcodeFiles allBitcode, PathFragment ltoOutputRootPrefix, + PathFragment ltoObjRootPrefix, boolean createSharedNonLto, List argv) throws RuleErrorException, InterruptedException { @@ -341,45 +321,28 @@ private LtoBackendArtifacts createLtoArtifact( // that will be fed the results of the indexing step, or a dummy LTO backend // that simply compiles the bitcode into native code without any index-based // cross module optimization. - Preconditions.checkArgument(actionConstructionContext instanceof RuleContext); - LtoBackendArtifacts ltoArtifact = - createSharedNonLto - ? new LtoBackendArtifacts( - ((RuleContext) actionConstructionContext).getStarlarkThread(), - ruleErrorConsumer, - configuration.getOptions(), - cppConfiguration, - ltoOutputRootPrefix, - bitcodeFile, - actionConstructionContext, - repositoryName, - configuration, - SHAREABLE_LINK_ARTIFACT_FACTORY, - featureConfiguration, - toolchain, - fdoContext, - usePicForLtoBackendActions, - toolchain.shouldCreatePerObjectDebugInfo(featureConfiguration, cppConfiguration), - argv) - : new LtoBackendArtifacts( - ((RuleContext) actionConstructionContext).getStarlarkThread(), - ruleErrorConsumer, - configuration.getOptions(), - cppConfiguration, - ltoOutputRootPrefix, - bitcodeFile, - allBitcode, - actionConstructionContext, - repositoryName, - configuration, - linkArtifactFactory, - featureConfiguration, - toolchain, - fdoContext, - usePicForLtoBackendActions, - toolchain.shouldCreatePerObjectDebugInfo(featureConfiguration, cppConfiguration), - argv); - return ltoArtifact; + LinkArtifactFactory linkFactory = + createSharedNonLto ? CppLinkAction.SHAREABLE_LINK_ARTIFACT_FACTORY : linkArtifactFactory; + BitcodeFiles bitcodeFiles = createSharedNonLto ? null : allBitcode; + return new LtoBackendArtifacts( + ((RuleContext) actionConstructionContext).getStarlarkThread(), + ruleErrorConsumer, + configuration.getOptions(), + cppConfiguration, + ltoOutputRootPrefix, + ltoObjRootPrefix, + bitcodeFile, + bitcodeFiles, + actionConstructionContext, + repositoryName, + configuration, + linkFactory, + featureConfiguration, + toolchain, + fdoContext, + usePicForLtoBackendActions, + toolchain.shouldCreatePerObjectDebugInfo(featureConfiguration, cppConfiguration), + argv); } private ImmutableList collectPerFileLtoBackendOpts(Artifact objectFile) { @@ -402,6 +365,7 @@ private List getLtoBackendUserCompileFlags( private Iterable createLtoArtifacts( PathFragment ltoOutputRootPrefix, + PathFragment ltoObjRootPrefix, NestedSet uniqueLibraries, boolean allowLtoIndexing, boolean includeLinkStaticInLtoIndexing) @@ -435,7 +399,11 @@ private Iterable createLtoArtifacts( } } BitcodeFiles bitcodeFiles = new BitcodeFiles(allBitcode.build()); - + if (bitcodeFiles.getFiles().toList().stream().anyMatch(Artifact::isTreeArtifact) + && ltoOutputRootPrefix.equals(ltoObjRootPrefix)) { + throw new RuleErrorException( + "Thinlto with tree artifacts requires feature use_lto_native_object_directory."); + } ImmutableList.Builder ltoOutputs = ImmutableList.builder(); for (LinkerInputs.LibraryToLink lib : uniqueLibraries.toList()) { if (!lib.containsObjectFiles()) { @@ -454,6 +422,7 @@ private Iterable createLtoArtifacts( objectFile, bitcodeFiles, ltoOutputRootPrefix, + ltoObjRootPrefix, /* createSharedNonLto= */ false, backendUserCompileFlags); ltoOutputs.add(ltoArtifacts); @@ -478,6 +447,7 @@ private Iterable createLtoArtifacts( input.getArtifact(), bitcodeFiles, ltoOutputRootPrefix, + ltoObjRootPrefix, !allowLtoIndexing, backendUserCompileFlags); ltoOutputs.add(ltoArtifacts); @@ -496,7 +466,11 @@ private ImmutableMap createSharedNonLtoArtifacts( return ImmutableMap.of(); } - PathFragment ltoOutputRootPrefix = PathFragment.create(SHARED_NONLTO_BACKEND_ROOT_PREFIX); + PathFragment ltoOutputRootPrefix = CppHelper.SHARED_NONLTO_BACKEND_ROOT_PREFIX; + PathFragment ltoObjRootPrefix = + featureConfiguration.isEnabled(CppRuleClasses.USE_LTO_NATIVE_OBJECT_DIRECTORY) + ? CppHelper.getThinLtoNativeObjectDirectoryFromLtoOutputRoot(ltoOutputRootPrefix) + : ltoOutputRootPrefix; ImmutableMap.Builder sharedNonLtoBackends = ImmutableMap.builder(); @@ -511,6 +485,7 @@ private ImmutableMap createSharedNonLtoArtifacts( input.getArtifact(), /* allBitcode= */ null, ltoOutputRootPrefix, + ltoObjRootPrefix, /* createSharedNonLto= */ true, backendUserCompileFlags); sharedNonLtoBackends.put(input.getArtifact(), ltoArtifacts); @@ -615,12 +590,17 @@ public CppLinkAction build() throws InterruptedException, RuleErrorException { || (linkingMode == Link.LinkingMode.DYNAMIC && !ltoCompilationContext.isEmpty()); PathFragment ltoOutputRootPrefix = null; + PathFragment ltoObjRootPrefix = null; if (isLtoIndexing) { Preconditions.checkState(allLtoArtifacts == null); ltoOutputRootPrefix = allowLtoIndexing - ? FileSystemUtils.appendExtension(output.getRootRelativePath(), ".lto") - : PathFragment.create(SHARED_NONLTO_BACKEND_ROOT_PREFIX); + ? CppHelper.getLtoOutputRootPrefix(output.getRootRelativePath()) + : CppHelper.SHARED_NONLTO_BACKEND_ROOT_PREFIX; + ltoObjRootPrefix = + featureConfiguration.isEnabled(CppRuleClasses.USE_LTO_NATIVE_OBJECT_DIRECTORY) + ? CppHelper.getThinLtoNativeObjectDirectoryFromLtoOutputRoot(ltoOutputRootPrefix) + : ltoOutputRootPrefix; // Use the originalUniqueLibraries which contains the full bitcode files // needed by the LTO backends (as opposed to the minimized bitcode files // containing just the summaries and symbol information that can be used by @@ -628,6 +608,7 @@ public CppLinkAction build() throws InterruptedException, RuleErrorException { allLtoArtifacts = createLtoArtifacts( ltoOutputRootPrefix, + ltoObjRootPrefix, originalUniqueLibraries, allowLtoIndexing, includeLinkStaticInLtoIndexing); @@ -861,6 +842,7 @@ public CppLinkAction build() throws InterruptedException, RuleErrorException { toolchain.getInterfaceSoBuilder().getExecPathString(), interfaceOutput != null ? interfaceOutput.getExecPathString() : null, ltoOutputRootPrefix, + ltoObjRootPrefix, defFile != null ? defFile.getExecPathString() : null, fdoContext, collectedLibrariesToLink.getRuntimeLibrarySearchDirectories(), diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppRuleClasses.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppRuleClasses.java index 89289d652c4dca..178048f3b553f1 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppRuleClasses.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppRuleClasses.java @@ -287,6 +287,9 @@ public static ToolchainTypeRequirement ccToolchainTypeRequirement(RuleDefinition /** A string constant for the LTO indexing bitcode feature. */ public static final String NO_USE_LTO_INDEXING_BITCODE_FILE = "no_use_lto_indexing_bitcode_file"; + /** A string constant for the LTO separate native object directory feature. */ + public static final String USE_LTO_NATIVE_OBJECT_DIRECTORY = "use_lto_native_object_directory"; + /* * A string constant for allowing implicit ThinLTO enablement for AFDO. */ diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/LibrariesToLinkCollector.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/LibrariesToLinkCollector.java index 0704dde0238ed9..5fce318f621219 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/LibrariesToLinkCollector.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/LibrariesToLinkCollector.java @@ -778,9 +778,7 @@ private static boolean handledByLtoIndexing(Artifact a, boolean allowLtoIndexing // Otherwise, this may be from a linkstatic library that we decided not to include in // LTO indexing because we are linking a test, to improve scalability when linking many tests. return allowLtoIndexing - && !a.getRootRelativePath() - .startsWith( - PathFragment.create(CppLinkActionBuilder.SHARED_NONLTO_BACKEND_ROOT_PREFIX)); + && !a.getRootRelativePath().startsWith(CppHelper.SHARED_NONLTO_BACKEND_ROOT_PREFIX); } @Nullable diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/LinkBuildVariables.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/LinkBuildVariables.java index 8e5abc61ede9b5..2f2c1a17ebe3ac 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/LinkBuildVariables.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/LinkBuildVariables.java @@ -118,6 +118,7 @@ public static CcToolchainVariables setupVariables( String interfaceLibraryBuilder, String interfaceLibraryOutput, PathFragment ltoOutputRootPrefix, + PathFragment ltoObjRootPrefix, String defFile, FdoContext fdoContext, NestedSet runtimeLibrarySearchDirectories, @@ -185,13 +186,27 @@ public static CcToolchainVariables setupVariables( // TODO(b/33846234): Remove once all the relevant crosstools don't depend on the variable. buildVariables.addStringVariable("thinlto_optional_params_file", ""); } - // Given "fullbitcode_prefix;thinlto_index_prefix", replaces fullbitcode_prefix with - // thinlto_index_prefix to generate the index and imports files. + // Given "fullbitcode_prefix;thinlto_index_prefix;native_object_prefix", replaces + // fullbitcode_prefix with thinlto_index_prefix to generate the index and imports files. // fullbitcode_prefix is the empty string because we are appending a prefix to the fullbitcode // instead of replacing it. This argument is passed to the linker. - buildVariables.addStringVariable( - THINLTO_PREFIX_REPLACE.getVariableName(), - ";" + binDirectoryPath.getRelative(ltoOutputRootPrefix) + '/'); + // The native objects generated after the LTOBackend action are stored in a directory by + // replacing the prefix "fullbitcode_prefix" with "native_object_prefix", and this is used + // when generating the param file in the indexing step, which will be used during the final + // link step. + if (!ltoOutputRootPrefix.equals(ltoObjRootPrefix)) { + buildVariables.addStringVariable( + THINLTO_PREFIX_REPLACE.getVariableName(), + ";" + + binDirectoryPath.getRelative(ltoOutputRootPrefix) + + "/;" + + binDirectoryPath.getRelative(ltoObjRootPrefix) + + "/"); + } else { + buildVariables.addStringVariable( + THINLTO_PREFIX_REPLACE.getVariableName(), + ";" + binDirectoryPath.getRelative(ltoOutputRootPrefix) + "/"); + } String objectFileExtension = ccToolchainProvider .getFeatures() diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/LtoBackendAction.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/LtoBackendAction.java index 86d6decae305af..840924be8cee36 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/LtoBackendAction.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/LtoBackendAction.java @@ -14,14 +14,12 @@ package com.google.devtools.build.lib.rules.cpp; -import static com.google.common.collect.ImmutableSet.toImmutableSet; import static java.util.stream.Collectors.joining; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; import com.google.devtools.build.lib.actions.AbstractAction; import com.google.devtools.build.lib.actions.ActionEnvironment; import com.google.devtools.build.lib.actions.ActionExecutionContext; @@ -44,11 +42,13 @@ import com.google.devtools.build.lib.util.DetailedExitCode; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.vfs.FileSystemUtils; +import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.IOException; import java.util.HashSet; import java.util.Map; +import java.util.Optional; import java.util.Set; import javax.annotation.Nullable; @@ -122,13 +122,60 @@ protected void setInputsDiscovered(boolean inputsDiscovered) { this.inputsDiscovered = inputsDiscovered; } - private NestedSet computeBitcodeInputs(HashSet inputPaths) { + /** + * Given a map of path to artifact, and a path, returns the artifact whose key is in the map, or + * if none, an artifact whose key matches a prefix of the path. Assumes that artifacts whose paths + * are directories are tree artifacts. Assumes that no artifact key is a sub directory of another + * artifact key. For example, "path/file1" may return the artifact whose path is "path/file1" or + * whose path is "path/". Returns empty if there are no matches. + */ + private Optional getArtifactOrTreeArtifact( + PathFragment path, Map pathToArtifact) { + PathFragment currentPath = path; + while (!currentPath.isEmpty()) { + if (pathToArtifact.containsKey(currentPath)) { + return Optional.of(pathToArtifact.get(currentPath)); + } else { + currentPath = currentPath.getParentDirectory(); + } + } + return Optional.empty(); + } + + /** + * Throws an error if any of the input paths is not in the bitcodeFiles or in a subdirecorty of a + * file in bitcodeFiles + */ + private NestedSet computeBitcodeInputs( + HashSet inputPaths, ActionExecutionContext actionExecutionContext) + throws ActionExecutionException { NestedSetBuilder bitcodeInputs = NestedSetBuilder.stableOrder(); - for (Artifact inputArtifact : bitcodeFiles.getFiles().toList()) { - if (inputPaths.contains(inputArtifact.getExecPath())) { - bitcodeInputs.add(inputArtifact); + ImmutableMap execPathToArtifact = + bitcodeFiles.getFilesArtifactPathMap(); + Set missingInputs = new HashSet<>(); + for (PathFragment inputPath : inputPaths) { + Optional maybeArtifact = getArtifactOrTreeArtifact(inputPath, execPathToArtifact); + if (maybeArtifact.isPresent()) { + bitcodeInputs.add(maybeArtifact.get()); + } else { + // One of the inputs is not present. We add it to missingInputs and will fail. + missingInputs.add(inputPath); } } + if (!missingInputs.isEmpty()) { + String message = + String.format( + "error computing inputs from imports file: %s, missing bitcode files (first 10): %s", + actionExecutionContext.getInputPath(imports), + // Limit the reported count to protect against a large error message. + missingInputs.stream() + .map(Object::toString) + .sorted() + .limit(10) + .collect(joining(", "))); + DetailedExitCode code = createDetailedExitCode(message, Code.MISSING_BITCODE_FILES); + throw new ActionExecutionException(message, this, false, code); + } return bitcodeInputs.build(); } @@ -136,9 +183,10 @@ private NestedSet computeBitcodeInputs(HashSet inputPath @Override public NestedSet discoverInputs(ActionExecutionContext actionExecutionContext) throws ActionExecutionException { + Path importsFilePath = actionExecutionContext.getInputPath(imports); ImmutableList lines; try { - lines = FileSystemUtils.readLinesAsLatin1(actionExecutionContext.getInputPath(imports)); + lines = FileSystemUtils.readLinesAsLatin1(importsFilePath); } catch (IOException e) { String message = String.format( @@ -168,27 +216,8 @@ public NestedSet discoverInputs(ActionExecutionContext actionExecution } // Convert the import set of paths to the set of bitcode file artifacts. - NestedSet bitcodeInputSet = computeBitcodeInputs(importSet); - if (bitcodeInputSet.memoizedFlattenAndGetSize() != importSet.size()) { - Set missingInputs = - Sets.difference( - importSet, - bitcodeInputSet.toList().stream() - .map(Artifact::getExecPath) - .collect(toImmutableSet())); - String message = - String.format( - "error computing inputs from imports file: %s, missing bitcode files (first 10): %s", - actionExecutionContext.getInputPath(imports), - // Limit the reported count to protect against a large error message. - missingInputs.stream() - .map(Object::toString) - .sorted() - .limit(10) - .collect(joining(", "))); - DetailedExitCode code = createDetailedExitCode(message, Code.MISSING_BITCODE_FILES); - throw new ActionExecutionException(message, this, false, code); - } + // Throws an error if there is any path in the importset that is not pat of any artifact + NestedSet bitcodeInputSet = computeBitcodeInputs(importSet, actionExecutionContext); updateInputs( NestedSetBuilder.fromNestedSet(bitcodeInputSet).addTransitive(mandatoryInputs).build()); return bitcodeInputSet; @@ -251,6 +280,16 @@ public static class Builder extends SpawnAction.Builder { private BitcodeFiles bitcodeFiles; private Artifact imports; + public Builder() { + super(); + } + + public Builder(Builder other) { + super(other); + bitcodeFiles = other.bitcodeFiles; + imports = other.imports; + } + @CanIgnoreReturnValue public Builder addImportsInfo(BitcodeFiles allBitcodeFiles, Artifact importsFile) { this.bitcodeFiles = allBitcodeFiles; diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/LtoBackendActionTemplate.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/LtoBackendActionTemplate.java new file mode 100644 index 00000000000000..0160101bc3595f --- /dev/null +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/LtoBackendActionTemplate.java @@ -0,0 +1,387 @@ +// Copyright 2017 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package com.google.devtools.build.lib.rules.cpp; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.devtools.build.lib.actions.ActionAnalysisMetadata; +import com.google.devtools.build.lib.actions.ActionExecutionContext; +import com.google.devtools.build.lib.actions.ActionExecutionException; +import com.google.devtools.build.lib.actions.ActionKeyCacher; +import com.google.devtools.build.lib.actions.ActionKeyContext; +import com.google.devtools.build.lib.actions.ActionLookupKey; +import com.google.devtools.build.lib.actions.ActionOwner; +import com.google.devtools.build.lib.actions.ActionTemplate; +import com.google.devtools.build.lib.actions.Artifact; +import com.google.devtools.build.lib.actions.Artifact.SpecialArtifact; +import com.google.devtools.build.lib.actions.Artifact.TreeFileArtifact; +import com.google.devtools.build.lib.actions.CommandLineExpansionException; +import com.google.devtools.build.lib.actions.MiddlemanType; +import com.google.devtools.build.lib.collect.nestedset.NestedSet; +import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; +import com.google.devtools.build.lib.collect.nestedset.Order; +import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.FeatureConfiguration; +import com.google.devtools.build.lib.server.FailureDetails; +import com.google.devtools.build.lib.util.DetailedExitCode; +import com.google.devtools.build.lib.util.FileType; +import com.google.devtools.build.lib.util.Fingerprint; +import com.google.devtools.build.lib.vfs.FileSystemUtils; +import com.google.devtools.build.lib.vfs.PathFragment; +import javax.annotation.Nullable; + +/** + * An {@link ActionTemplate} that expands into {@link LtoBackendAction}s at execution time. Is is + * similar to {@link com.google.devtools.build.lib.analysis.actions.SpawnActionTemplate}. + */ +public final class LtoBackendActionTemplate extends ActionKeyCacher + implements ActionTemplate { + private final LtoBackendAction.Builder ltoBackendActionbuilder; + private final CcToolchainVariables buildVariables; + + // An input tree artifact containing the full bitcode. It is never null. + private final SpecialArtifact fullBitcodeTreeArtifact; + + // An input tree artifact containing ".thinlto.bc" and ".imports" files, generated together with + // It will be null when this is a shared non-lto backend. + @Nullable private final SpecialArtifact indexAndImportsTreeArtifact; + + // An output tree artifact that will contain the native objects. In a sibling directory to + // indexTreeArtifact. The objects will be generated in the same location as defined in the .param + // file created during the lto indexing step. + private final SpecialArtifact objectFileTreeArtifact; + + // The corresponding dwoFile if fission is used. + private final SpecialArtifact dwoFileTreeArtifact; + + private final FeatureConfiguration featureConfiguration; + + private final boolean usePic; + + private final BitcodeFiles bitcodeFiles; + + private final ActionOwner actionOwner; + private final NestedSet mandatoryInputs; + private final NestedSet allInputs; + + /** + * Creates an LtoBackendActionTemplate. + * + * @param indexAndImportsTreeArtifact the TreeArtifact that contains .thinlto.bc. and .imports + * files. + * @param fullBitcodeTreeArtifact the TreeArtifact that contains .pic.o files. + * @param objectFileTreeArtifact the TreeArtifact that contains .pic.o files. + * @param dwoFileTreeArtifact the TreeArtifact that contains .dwo files. + * @param featureConfiguration the feature configuration. + * @param ltoBackendActionbuilder An almost completely configured {@link LtoBackendAction.Builder} + * without the input and output files set. It is used as a template to instantiate expanded + * {@link LtoBackendAction}s. + * @param buildVariables the building variables. + * @param usePic whether to use PIC or not. + * @param actionOwner the owner of this {@link ActionTemplate}. + */ + LtoBackendActionTemplate( + SpecialArtifact indexAndImportsTreeArtifact, + SpecialArtifact fullBitcodeTreeArtifact, + SpecialArtifact objectFileTreeArtifact, + SpecialArtifact dwoFileTreeArtifact, + FeatureConfiguration featureConfiguration, + LtoBackendAction.Builder ltoBackendActionbuilder, + CcToolchainVariables buildVariables, + boolean usePic, + BitcodeFiles bitcodeFiles, + ActionOwner actionOwner) { + this.ltoBackendActionbuilder = ltoBackendActionbuilder; + this.buildVariables = buildVariables; + this.indexAndImportsTreeArtifact = indexAndImportsTreeArtifact; + this.fullBitcodeTreeArtifact = fullBitcodeTreeArtifact; + this.objectFileTreeArtifact = objectFileTreeArtifact; + this.dwoFileTreeArtifact = dwoFileTreeArtifact; + this.actionOwner = checkNotNull(actionOwner, objectFileTreeArtifact); + this.featureConfiguration = featureConfiguration; + this.usePic = usePic; + this.bitcodeFiles = bitcodeFiles; + + NestedSetBuilder mandatoryInputsBuilder = + NestedSetBuilder.compileOrder() + .add(fullBitcodeTreeArtifact) + .addTransitive(ltoBackendActionbuilder.getInputsAndTools()); + if (indexAndImportsTreeArtifact != null) { + mandatoryInputsBuilder.add(indexAndImportsTreeArtifact); + } + this.mandatoryInputs = mandatoryInputsBuilder.build(); + this.allInputs = mandatoryInputs; + } + + /** Helper functions for generateActionsForInputArtifacts */ + private String pathFragmentToRelativePath(PathFragment parentPath, PathFragment path) { + return path.relativeTo(parentPath).getSafePathString(); + } + + private String removeImportsExtension(String path) { + return FileSystemUtils.removeExtension(path); + } + + private String removeThinltoBcExtension(String path) { + return FileSystemUtils.removeExtension(FileSystemUtils.removeExtension(path)); + } + + /** + * Given all the files inside indexAndImportsTreeArtifact, we find the corresponding index and + * imports files. Then we use their path together with the fullBitcodeTreeArtifact path to derive + * the path of the original full bitcode file. Then for each imports file, we create an lto + * backend action that depends on that import file, on the corresponding index file, and on the + * whole fullBitcodeTreeArtifact, which it uses to find the full bitcode file. TODO(antunesi): + * make the generated action depend only on the corresponding full bitcode file rather than depend + * on the whole tree artifact that contains the full bitcode file. + */ + @Override + public ImmutableList generateActionsForInputArtifacts( + ImmutableSet inputTreeFileArtifacts, ActionLookupKey artifactOwner) + throws ActionExecutionException { + ImmutableList.Builder expandedActions = new ImmutableList.Builder<>(); + + final FileType thinltoBcSourceType = CppFileTypes.LTO_INDEXING_ANALYSIS_FILE; + final FileType importsType = CppFileTypes.LTO_IMPORTS_FILE; + + ImmutableList.Builder importsBuilder = ImmutableList.builder(); + ImmutableMap.Builder nameToThinLtoBuilder = + new ImmutableMap.Builder<>(); + + PathFragment indexAndImportParentPath = indexAndImportsTreeArtifact.getExecPath(); + + for (TreeFileArtifact inputTreeFileArtifact : inputTreeFileArtifacts) { + PathFragment path = inputTreeFileArtifact.getExecPath(); + boolean isThinLto = thinltoBcSourceType.matches(path); + boolean isImport = importsType.matches(path); + + if (isThinLto) { + String thinLtoNoExtension = + removeThinltoBcExtension(pathFragmentToRelativePath(indexAndImportParentPath, path)); + nameToThinLtoBuilder.put(thinLtoNoExtension, inputTreeFileArtifact); + } else if (isImport) { + importsBuilder.add(inputTreeFileArtifact); + } else { + String message = + String.format( + "Artifact '%s' expanded from the directory artifact '%s' is neither imports nor" + + " thinlto .", + inputTreeFileArtifact.getExecPathString(), + fullBitcodeTreeArtifact.getExecPathString()); // kinda wrong, should be index + throw new ActionExecutionException( + message, this, /* catastrophe= */ false, makeDetailedExitCode(message)); + } + } + + // Maps each imports to a .bc file + ImmutableList imports = importsBuilder.build(); + ImmutableMap nameToThinLto = nameToThinLtoBuilder.buildOrThrow(); + if (imports.size() != nameToThinLto.size()) { + String message = + String.format( + "Either both or neither bitcodeFiles and imports files should be null. %s %s" + ".", + inputTreeFileArtifacts, + fullBitcodeTreeArtifact.getExecPathString()); // kinda wrong, should be index + throw new ActionExecutionException( + message, this, /* catastrophe= */ false, makeDetailedExitCode(message)); + } + + for (TreeFileArtifact importFile : imports) { + PathFragment path = importFile.getExecPath(); + String relativePathNoExtension = + removeImportsExtension(pathFragmentToRelativePath(indexAndImportParentPath, path)); + TreeFileArtifact thinLtoFile = nameToThinLto.get(relativePathNoExtension); + PathFragment fullBitcodePath = + fullBitcodeTreeArtifact.getExecPath().getRelative(relativePathNoExtension); + String outputName = relativePathNoExtension; + TreeFileArtifact objTreeFileArtifact = + TreeFileArtifact.createTemplateExpansionOutput( + objectFileTreeArtifact, outputName, artifactOwner); + TreeFileArtifact dwoFileArtifact = null; + if (dwoFileTreeArtifact != null) { + dwoFileArtifact = + TreeFileArtifact.createTemplateExpansionOutput( + dwoFileTreeArtifact, + FileSystemUtils.replaceExtension( + PathFragment.create(relativePathNoExtension), ".dwo"), + artifactOwner); + } + LtoBackendAction.Builder builderCopy = new LtoBackendAction.Builder(ltoBackendActionbuilder); + + LtoBackendArtifacts.addArtifactsLtoBackendAction( + builderCopy, + buildVariables, + featureConfiguration, + thinLtoFile, + importFile, + fullBitcodeTreeArtifact, + objTreeFileArtifact, + bitcodeFiles, + dwoFileArtifact, + usePic, + fullBitcodePath.toString(), + /* isDummyAction= */ false); + expandedActions.add((LtoBackendAction) builderCopy.buildForActionTemplate(actionOwner)); + } + + return expandedActions.build(); + } + + @Override + protected void computeKey( + ActionKeyContext actionKeyContext, + @Nullable Artifact.ArtifactExpander artifactExpander, + Fingerprint fp) + throws CommandLineExpansionException, InterruptedException { + + LtoBackendAction dummyAction = getDummyAction(); + dummyAction.computeKey(actionKeyContext, artifactExpander, fp); + } + + /** + * This is an action that is not valid, because its input bitcode file is a TreeArtifact rather + * than a specific file. It is useful for calculating keys and inputs of the Action Template by + * reusing functionality from LtoBackendAction. + */ + private LtoBackendAction getDummyAction() { + LtoBackendAction.Builder builderCopy = new LtoBackendAction.Builder(ltoBackendActionbuilder); + // This is a dummy action that would not work, because the bitcode file path is a directory + // rather than a file. + LtoBackendArtifacts.addArtifactsLtoBackendAction( + builderCopy, + buildVariables, + featureConfiguration, + indexAndImportsTreeArtifact, + indexAndImportsTreeArtifact, + fullBitcodeTreeArtifact, + objectFileTreeArtifact, + bitcodeFiles, + dwoFileTreeArtifact, + usePic, + null, + /* isDummyAction= */ true); + + return (LtoBackendAction) builderCopy.buildForActionTemplate(actionOwner); + } + + @Override + public SpecialArtifact getInputTreeArtifact() { + return indexAndImportsTreeArtifact; + } + + @Override + public SpecialArtifact getOutputTreeArtifact() { + return objectFileTreeArtifact; + } + + @Override + public ActionOwner getOwner() { + return actionOwner; + } + + @Override + public boolean isShareable() { + return false; + } + + @Override + public String getMnemonic() { + return "LtoBackendActionTemplate"; + } + + @Override + public NestedSet getMandatoryInputs() { + return mandatoryInputs; + } + + @Override + public NestedSet getInputFilesForExtraAction( + ActionExecutionContext actionExecutionContext) { + return NestedSetBuilder.emptySet(Order.STABLE_ORDER); + } + + @Override + public ImmutableSet getMandatoryOutputs() { + return ImmutableSet.of(); + } + + @Override + public NestedSet getTools() { + return NestedSetBuilder.emptySet(Order.STABLE_ORDER); + } + + @Override + public NestedSet getInputs() { + return allInputs; + } + + @Override + public ImmutableSet getOutputs() { + ImmutableSet.Builder builder = ImmutableSet.builder(); + builder.add(objectFileTreeArtifact); + if (dwoFileTreeArtifact != null) { + builder.add(dwoFileTreeArtifact); + } + return builder.build(); + } + + @Override + public ImmutableList getClientEnvironmentVariables() { + return ImmutableList.of(); + } + + @Override + public NestedSet getSchedulingDependencies() { + return NestedSetBuilder.emptySet(Order.STABLE_ORDER); + } + + @Override + public boolean shouldReportPathPrefixConflict(ActionAnalysisMetadata action) { + return this != action; + } + + @Override + public MiddlemanType getActionType() { + return MiddlemanType.NORMAL; + } + + @Override + public String prettyPrint() { + return "LtoBackendActionTemplate compiling " + fullBitcodeTreeArtifact.getExecPathString(); + } + + @Override + public String describe() { + return "Lto backend compiling all C++ files in " + fullBitcodeTreeArtifact.prettyPrint(); + } + + @Override + public String toString() { + return prettyPrint(); + } + + private static DetailedExitCode makeDetailedExitCode(String message) { + return DetailedExitCode.of( + FailureDetails.FailureDetail.newBuilder() + .setMessage(message) + .setExecution( + FailureDetails.Execution.newBuilder() + .setCode( + FailureDetails.Execution.Code + .PERSISTENT_ACTION_OUTPUT_DIRECTORY_CREATION_FAILURE)) + .build()); + } +} diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/LtoBackendArtifacts.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/LtoBackendArtifacts.java index b602ab280ed945..337f9cdf1925d3 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/LtoBackendArtifacts.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/LtoBackendArtifacts.java @@ -14,12 +14,13 @@ package com.google.devtools.build.lib.rules.cpp; - import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.Artifact.ArtifactExpander; +import com.google.devtools.build.lib.actions.Artifact.SpecialArtifact; import com.google.devtools.build.lib.actions.CommandLine; import com.google.devtools.build.lib.actions.CommandLineExpansionException; import com.google.devtools.build.lib.analysis.RuleErrorConsumer; @@ -82,14 +83,19 @@ public final class LtoBackendArtifacts implements LtoBackendArtifactsApi userCompileFlags) throws RuleErrorException, InterruptedException { + boolean createSharedNonLto = allBitcodeFiles == null; this.bitcodeFile = bitcodeFile; - PathFragment obj = ltoOutputRootPrefix.getRelative(bitcodeFile.getExecPath()); - - objectFile = - linkArtifactFactory.create(actionConstructionContext, repositoryName, configuration, obj); - imports = - linkArtifactFactory.create( - actionConstructionContext, - repositoryName, - configuration, - FileSystemUtils.appendExtension(obj, ".imports")); - index = - linkArtifactFactory.create( - actionConstructionContext, - repositoryName, - configuration, - FileSystemUtils.appendExtension(obj, ".thinlto.bc")); - - scheduleLtoBackendAction( - thread, - ruleErrorConsumer, - buildOptions, - cppConfiguration, - actionConstructionContext, - repositoryName, - featureConfiguration, - ccToolchain, - fdoContext, - usePic, - generateDwo, - configuration, - linkArtifactFactory, - userCompileFlags, - allBitcodeFiles); - } + PathFragment obj = ltoObjRootPrefix.getRelative(bitcodeFile.getExecPath()); + // indexObj is an object that does not exist but helps us find where to store the index and + // imports files + PathFragment indexObj = ltoOutputRootPrefix.getRelative(bitcodeFile.getExecPath()); - // Interface to create an LTO backend that does not perform any cross-module optimization. - public LtoBackendArtifacts( - StarlarkThread thread, - RuleErrorConsumer ruleErrorConsumer, - BuildOptions buildOptions, - CppConfiguration cppConfiguration, - PathFragment ltoOutputRootPrefix, - Artifact bitcodeFile, - ActionConstructionContext actionConstructionContext, - RepositoryName repositoryName, - BuildConfigurationValue configuration, - LinkArtifactFactory linkArtifactFactory, - FeatureConfiguration featureConfiguration, - CcToolchainProvider ccToolchain, - FdoContext fdoContext, - boolean usePic, - boolean generateDwo, - List userCompileFlags) - throws RuleErrorException, InterruptedException { - this.bitcodeFile = bitcodeFile; + LtoBackendAction.Builder builder = new LtoBackendAction.Builder(); - PathFragment obj = ltoOutputRootPrefix.getRelative(bitcodeFile.getExecPath()); - objectFile = - linkArtifactFactory.create(actionConstructionContext, repositoryName, configuration, obj); - imports = null; - index = null; + CcToolchainVariables ccToolchainVariables; - scheduleLtoBackendAction( - thread, - ruleErrorConsumer, - buildOptions, - cppConfiguration, - actionConstructionContext, - repositoryName, - featureConfiguration, + try { + ccToolchainVariables = ccToolchain.getBuildVariables(thread, buildOptions, cppConfiguration); + } catch (EvalException e) { + throw new RuleErrorException(e.getMessage()); + } + + CcToolchainVariables.Builder buildVariablesBuilder = + CcToolchainVariables.builder(ccToolchainVariables); + + initializeLtoBackendBuilder( + builder, + buildVariablesBuilder, ccToolchain, + cppConfiguration, fdoContext, - usePic, - generateDwo, - configuration, - linkArtifactFactory, + featureConfiguration, userCompileFlags, - /* bitcodeFiles= */ null); + ruleErrorConsumer); + CcToolchainVariables buildVariables = buildVariablesBuilder.build(); + if (bitcodeFile.isTreeArtifact()) { + objectFile = + linkArtifactFactory.createTreeArtifact( + actionConstructionContext, repositoryName, configuration, obj); + if (createSharedNonLto) { + imports = null; + index = null; + } else { + imports = + linkArtifactFactory.createTreeArtifact( + actionConstructionContext, repositoryName, configuration, indexObj); + index = imports; + } + if (generateDwo) { + // No support for dwo files for tree artifacts at the moment. This should not throw an + // irrecoverable exception because we can still generate dwo files for the other artifacts. + // TODO(b/289089713): Add support for dwo files for tree artifacts. + dwoFile = null; + } + createLtoBackendActionTemplate( + actionConstructionContext, + featureConfiguration, + builder, + buildVariables, + usePic, + allBitcodeFiles); + } else { + objectFile = + linkArtifactFactory.create(actionConstructionContext, repositoryName, configuration, obj); + if (createSharedNonLto) { + imports = null; + index = null; + } else { + String importsExt = Iterables.getOnlyElement(CppFileTypes.LTO_IMPORTS_FILE.getExtensions()); + String indexExt = + Iterables.getOnlyElement(CppFileTypes.LTO_INDEXING_ANALYSIS_FILE.getExtensions()); + imports = + linkArtifactFactory.create( + actionConstructionContext, + repositoryName, + configuration, + FileSystemUtils.appendExtension(indexObj, importsExt)); + index = + linkArtifactFactory.create( + actionConstructionContext, + repositoryName, + configuration, + FileSystemUtils.appendExtension(indexObj, indexExt)); + } + if (generateDwo) { + dwoFile = + linkArtifactFactory.create( + actionConstructionContext, + repositoryName, + configuration, + FileSystemUtils.replaceExtension( + objectFile.getOutputDirRelativePath(configuration.isSiblingRepositoryLayout()), + ".dwo")); + } + scheduleLtoBackendAction( + builder, + buildVariables, + actionConstructionContext, + featureConfiguration, + usePic, + allBitcodeFiles); + } } public Artifact getObjectFile() { @@ -217,69 +241,22 @@ void addIndexingOutputs(ImmutableSet.Builder builder) { builder.add(index); } - private void scheduleLtoBackendAction( - StarlarkThread thread, - RuleErrorConsumer ruleErrorConsumer, - BuildOptions buildOptions, - CppConfiguration cppConfiguration, - ActionConstructionContext actionConstructionContext, - RepositoryName repositoryName, - FeatureConfiguration featureConfiguration, + /** + * Populate buildVariablesBuilder, and builder with data that is independent of what file is the + * input to the action. + */ + private static void initializeLtoBackendBuilder( + LtoBackendAction.Builder builder, + CcToolchainVariables.Builder buildVariablesBuilder, CcToolchainProvider ccToolchain, + CppConfiguration cppConfiguration, FdoContext fdoContext, - boolean usePic, - boolean generateDwo, - BuildConfigurationValue configuration, - LinkArtifactFactory linkArtifactFactory, + FeatureConfiguration featureConfiguration, List userCompileFlags, - @Nullable BitcodeFiles bitcodeFiles) - throws RuleErrorException, InterruptedException { - LtoBackendAction.Builder builder = new LtoBackendAction.Builder(); - - builder.addInput(bitcodeFile); - - Preconditions.checkState( - (index == null) == (imports == null), - "Either both or neither index and imports files should be null"); - if (imports != null) { - builder.addImportsInfo(bitcodeFiles, imports); - // Although the imports file is not used by the LTOBackendAction while the action is - // executing, it is needed during the input discovery phase, and we must list it as an input - // to the action in order for it to be preserved under --discard_orphaned_artifacts. - builder.addInput(imports); - } - if (index != null) { - builder.addInput(index); - } + RuleErrorConsumer ruleErrorConsumer) + throws RuleErrorException { builder.addTransitiveInputs(ccToolchain.getCompilerFiles()); - - builder.addOutput(objectFile); - - builder.setProgressMessage("LTO Backend Compile %s", objectFile.getExecPath()); builder.setMnemonic("CcLtoBackendCompile"); - - CcToolchainVariables ccToolchainVariables; - - try { - ccToolchainVariables = ccToolchain.getBuildVariables(thread, buildOptions, cppConfiguration); - } catch (EvalException e) { - throw new RuleErrorException(e.getMessage()); - } - - CcToolchainVariables.Builder buildVariablesBuilder = - CcToolchainVariables.builder(ccToolchainVariables); - if (index != null) { - buildVariablesBuilder.addStringVariable("thinlto_index", index.getExecPath().toString()); - } else { - // An empty input indicates not to perform cross-module optimization. - buildVariablesBuilder.addStringVariable("thinlto_index", "/dev/null"); - } - // The output from the LTO backend step is a native object file. - buildVariablesBuilder.addStringVariable( - "thinlto_output_object_file", objectFile.getExecPath().toString()); - // The input to the LTO backend step is the bitcode file. - buildVariablesBuilder.addStringVariable( - "thinlto_input_bitcode_file", bitcodeFile.getExecPath().toString()); addProfileForLtoBackend(builder, fdoContext, featureConfiguration, buildVariablesBuilder); // Add the context sensitive instrument path to the backend. if (featureConfiguration.isEnabled(CppRuleClasses.CS_FDO_INSTRUMENT)) { @@ -287,28 +264,9 @@ private void scheduleLtoBackendAction( CompileBuildVariables.CS_FDO_INSTRUMENT_PATH.getVariableName(), ccToolchain.getCSFdoInstrument()); } - - if (generateDwo) { - dwoFile = - linkArtifactFactory.create( - actionConstructionContext, - repositoryName, - configuration, - FileSystemUtils.replaceExtension( - objectFile.getOutputDirRelativePath(configuration.isSiblingRepositoryLayout()), - ".dwo")); - builder.addOutput(dwoFile); - buildVariablesBuilder.addStringVariable( - CompileBuildVariables.PER_OBJECT_DEBUG_INFO_FILE.getVariableName(), - dwoFile.getExecPathString()); - buildVariablesBuilder.addStringVariable( - CompileBuildVariables.IS_USING_FISSION.getVariableName(), ""); - } buildVariablesBuilder.addStringSequenceVariable( CompileBuildVariables.USER_COMPILE_FLAGS.getVariableName(), userCompileFlags); - CcToolchainVariables buildVariables = buildVariablesBuilder.build(); - if (cppConfiguration.useStandaloneLtoIndexingCommandLines()) { if (!featureConfiguration.actionIsConfigured(CppActionNames.LTO_BACKEND)) { throw ruleErrorConsumer.throwWithRuleError( @@ -323,7 +281,75 @@ private void scheduleLtoBackendAction( PathFragment compiler = ccToolchain.getToolPathFragment(Tool.GCC, ruleErrorConsumer); builder.setExecutable(compiler); } + } + + private static void addPathsToBuildVariablesBuilder( + CcToolchainVariables.Builder buildVariablesBuilder, + String indexPath, + String objectFilePath, + String dwoFilePath, + String bitcodeFilePath) { + // Ideally, those strings would come directly from the execPath of the Artifacts of + // the LtoBackendAction.Builder; however, in order to support tree artifacts, we need + // the bitcodeFilePath to be different from the bitcodeTreeArtifact execPath. + // The former is a file path and the latter is the directory path. + // Therefore we accept strings as inputs rather than artifacts. + if (indexPath != null) { + buildVariablesBuilder.addStringVariable("thinlto_index", indexPath); + } else { + // An empty input indicates not to perform cross-module optimization. + buildVariablesBuilder.addStringVariable("thinlto_index", "/dev/null"); + } + // The output from the LTO backend step is a native object file. + buildVariablesBuilder.addStringVariable("thinlto_output_object_file", objectFilePath); + // The input to the LTO backend step is the bitcode file. + buildVariablesBuilder.addStringVariable("thinlto_input_bitcode_file", bitcodeFilePath); + // Add the context sensitive instrument path to the backend. + + if (dwoFilePath != null) { + buildVariablesBuilder.addStringVariable( + CompileBuildVariables.PER_OBJECT_DEBUG_INFO_FILE.getVariableName(), dwoFilePath); + buildVariablesBuilder.addStringVariable( + CompileBuildVariables.IS_USING_FISSION.getVariableName(), ""); + } + } + private static void addInputsToLtoBackendActionBuilder( + LtoBackendAction.Builder builder, + @Nullable Artifact index, + @Nullable Artifact imports, + Artifact bitcodeFile, + @Nullable BitcodeFiles bitcodeFiles) { + builder.addInput(bitcodeFile); + Preconditions.checkState( + (index == null) == (imports == null) && (imports == null) == (bitcodeFiles == null), + "Either all or none of index, imports and bitcodeFiles should be null"); + if (imports != null) { + builder.addImportsInfo(bitcodeFiles, imports); + // Although the imports file is not used by the LTOBackendAction while the action is + // executing, it is needed during the input discovery phase, and we must list it as an input + // to the action in order for it to be preserved under --discard_orphaned_artifacts. + builder.addInput(imports); + } + if (index != null) { + builder.addInput(index); + } + } + + private static void addOutputsToLtoBackendActionBuilder( + LtoBackendAction.Builder builder, Artifact objectFile, Artifact dwoFile) { + builder.addOutput(objectFile); + // Add the context sensitive instrument path to the backend. + if (dwoFile != null) { + builder.addOutput(dwoFile); + } + } + + private static void addCommandLineToLtoBackendActionBuilder( + LtoBackendAction.Builder builder, + FeatureConfiguration featureConfiguration, + CcToolchainVariables buildVariables, + boolean usePic) { CommandLine ltoCommandLine = new CommandLine() { @@ -354,6 +380,124 @@ public Iterable arguments(ArtifactExpander artifactExpander) } }; builder.addCommandLine(ltoCommandLine); + } + + /** + * Adds artifact to builder. The resulting builder can be built into a valid ltoBackendAction. + * + *

      Assumes that build and builderVariableBuilder have been initialized by calling {@link + * initializeLtoBackendBuilder}. If this is not true, the action will be wrong. + * + * @param builder the builder to add the artifacts to, initialized by initializeLtoBackendBuilder. + * @param buildVariables CcToolchainVariables initialized by initializeLtoBackendBuilder + * @param featureConfiguration the feature configuration to get the command line for the builder. + * @param index the index artifact to add. Can be a TreeFileArtifact but cannot be a Tree + * Artifact. + * @param imports the imports artifact to add. Can be a TreeFileArtifact but cannot be a Tree + * Artifact. + * @param bitcodeArtifact the bitcode artifact to add. If it is a Tree Artifact, bitcodeFilePath + * must be set. + * @param objectFile the object file to add. Can be a TreeFileArtifact but cannot be a Tree + * Artifact. + * @param bitcodeFiles the bitcode files to add. + * @param dwoFile the dwo file to add. + * @param usePic whether to add the PIC option to the command line. + * @param bitcodeFilePath the path of the bitcode object we are compiling. Only used if + * bitcodeArtifact is a tree artifact. + * @param isDummyAction if true then ignores the preconditions, because it is generating a dummy + * action, not a valid action. + */ + public static void addArtifactsLtoBackendAction( + LtoBackendAction.Builder builder, + CcToolchainVariables buildVariables, + FeatureConfiguration featureConfiguration, + @Nullable Artifact index, + @Nullable Artifact imports, + Artifact bitcodeArtifact, + Artifact objectFile, + @Nullable BitcodeFiles bitcodeFiles, + @Nullable Artifact dwoFile, + boolean usePic, + @Nullable String bitcodeFilePath, + boolean isDummyAction) { + Preconditions.checkState( + isDummyAction + || ((index == null || !index.isTreeArtifact()) + && (imports == null || !imports.isTreeArtifact()) + && (dwoFile == null || !dwoFile.isTreeArtifact()) + && !objectFile.isTreeArtifact()), + "index, imports, object and dwo files cannot be TreeArtifacts. We need to know their exact" + + " path not just directory path."); + Preconditions.checkState( + isDummyAction || (bitcodeArtifact.isTreeArtifact() ^ bitcodeFilePath == null), + "If bitcode file is a tree artifact, the bitcode file path must contain the path. If it is" + + " not a tree artifact, then bitcode file path should be null to not override the" + + " path."); + CcToolchainVariables.Builder buildVariablesBuilder = + CcToolchainVariables.builder(buildVariables); + addInputsToLtoBackendActionBuilder(builder, index, imports, bitcodeArtifact, bitcodeFiles); + addOutputsToLtoBackendActionBuilder(builder, objectFile, dwoFile); + builder.setProgressMessage("LTO Backend Compile %{output}"); + + String indexPath = index == null ? null : index.getExecPathString(); + String dwoFilePath = dwoFile == null ? null : dwoFile.getExecPathString(); + addPathsToBuildVariablesBuilder( + buildVariablesBuilder, + indexPath, + objectFile.getExecPathString(), + dwoFilePath, + bitcodeFilePath != null ? bitcodeFilePath : bitcodeArtifact.getExecPathString()); + CcToolchainVariables buildVariablesWithFiles = buildVariablesBuilder.build(); + addCommandLineToLtoBackendActionBuilder( + builder, featureConfiguration, buildVariablesWithFiles, usePic); + } + + private void createLtoBackendActionTemplate( + ActionConstructionContext actionConstructionContext, + FeatureConfiguration featureConfiguration, + LtoBackendAction.Builder ltoBackendActionbuilder, + CcToolchainVariables buildVariables, + boolean usePic, + BitcodeFiles bitcodeFiles) { + Preconditions.checkState( + (index == null && imports == null) || index.equals(imports), + "index and imports tree artifact must be the same"); + LtoBackendActionTemplate actionTemplate = + new LtoBackendActionTemplate( + (SpecialArtifact) index, + (SpecialArtifact) bitcodeFile, + (SpecialArtifact) objectFile, + (SpecialArtifact) dwoFile, + featureConfiguration, + ltoBackendActionbuilder, + buildVariables, + usePic, + bitcodeFiles, + actionConstructionContext.getActionOwner()); + actionConstructionContext.registerAction(actionTemplate); + } + + private void scheduleLtoBackendAction( + LtoBackendAction.Builder builder, + CcToolchainVariables buildVariables, + ActionConstructionContext actionConstructionContext, + FeatureConfiguration featureConfiguration, + boolean usePic, + @Nullable BitcodeFiles bitcodeFiles) { + + addArtifactsLtoBackendAction( + builder, + buildVariables, + featureConfiguration, + index, + imports, + bitcodeFile, + objectFile, + bitcodeFiles, + dwoFile, + usePic, + /* bitcodeFilePath= */ null, + /* isDummyAction= */ false); actionConstructionContext.registerAction(builder.build(actionConstructionContext)); } diff --git a/src/main/java/com/google/devtools/build/lib/rules/nativedeps/NativeDepsHelper.java b/src/main/java/com/google/devtools/build/lib/rules/nativedeps/NativeDepsHelper.java index e9f786e31a9e02..f267c2ddc21923 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/nativedeps/NativeDepsHelper.java +++ b/src/main/java/com/google/devtools/build/lib/rules/nativedeps/NativeDepsHelper.java @@ -27,10 +27,8 @@ import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.analysis.AnalysisUtils; import com.google.devtools.build.lib.analysis.RuleContext; -import com.google.devtools.build.lib.analysis.actions.ActionConstructionContext; import com.google.devtools.build.lib.analysis.actions.SymlinkAction; import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue; -import com.google.devtools.build.lib.cmdline.RepositoryName; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.packages.RuleClass.ConfiguredTargetFactory.RuleErrorException; import com.google.devtools.build.lib.packages.TargetUtils; @@ -71,26 +69,6 @@ * that some rules are implicitly neverlink. */ public abstract class NativeDepsHelper { - /** - * An implementation of {@link - * com.google.devtools.build.lib.rules.cpp.CppLinkAction.LinkArtifactFactory} that can create - * artifacts anywhere. - * - *

      Necessary because the actions of nativedeps libraries should be shareable, and thus cannot - * be under the package directory. - */ - private static final CppLinkAction.LinkArtifactFactory SHAREABLE_LINK_ARTIFACT_FACTORY = - new CppLinkAction.LinkArtifactFactory() { - @Override - public Artifact create( - ActionConstructionContext actionConstructionContext, - RepositoryName repositoryName, - BuildConfigurationValue configuration, - PathFragment rootRelativePath) { - return actionConstructionContext.getShareableArtifact( - rootRelativePath, configuration.getBinDirectory(repositoryName)); - } - }; private NativeDepsHelper() {} @@ -288,7 +266,7 @@ public static NativeDepsRunfiles createNativeDepsAction( .setNeverLink(true) .setShouldCreateStaticLibraries(false) .addCcLinkingContexts(ImmutableList.of(ccLinkingContext)) - .setLinkArtifactFactory(SHAREABLE_LINK_ARTIFACT_FACTORY) + .setLinkArtifactFactory(CppLinkAction.SHAREABLE_LINK_ARTIFACT_FACTORY) .setDynamicLinkType(LinkTargetType.DYNAMIC_LIBRARY) .link(CcCompilationOutputs.EMPTY); diff --git a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/cpp/CcModuleApi.java b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/cpp/CcModuleApi.java index a1699f1b1b03b3..3b593472395cc2 100755 --- a/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/cpp/CcModuleApi.java +++ b/src/main/java/com/google/devtools/build/lib/starlarkbuildapi/cpp/CcModuleApi.java @@ -1688,6 +1688,7 @@ DebugInfoT mergeCcDebugInfoFromStarlark( positional = false, named = true, documented = false), + @Param(name = "lto_obj_root_prefix", positional = false, named = true, documented = false), @Param(name = "bitcode_file", positional = false, named = true, documented = false), @Param( name = "feature_configuration", @@ -1707,6 +1708,7 @@ DebugInfoT mergeCcDebugInfoFromStarlark( LtoBackendArtifactsT createLtoBackendArtifacts( StarlarkRuleContextT starlarkRuleContext, String ltoOutputRootPrefixString, + String ltoObjRootPrefixString, FileT bitcodeFile, FeatureConfigurationT featureConfigurationForStarlark, CcToolchainProviderT ccToolchain, diff --git a/src/main/starlark/builtins_bzl/common/cc/cc_common.bzl b/src/main/starlark/builtins_bzl/common/cc/cc_common.bzl index 1d01ce08d1dc91..b2375707d18443 100644 --- a/src/main/starlark/builtins_bzl/common/cc/cc_common.bzl +++ b/src/main/starlark/builtins_bzl/common/cc/cc_common.bzl @@ -741,6 +741,7 @@ def _create_lto_backend_artifacts( *, ctx, lto_output_root_prefix, + lto_obj_root_prefix, bitcode_file, feature_configuration, cc_toolchain, @@ -753,6 +754,7 @@ def _create_lto_backend_artifacts( ctx = ctx, bitcode_file = bitcode_file, lto_output_root_prefix = lto_output_root_prefix, + lto_obj_root_prefix = lto_obj_root_prefix, feature_configuration = feature_configuration, cc_toolchain = cc_toolchain, fdo_context = fdo_context, diff --git a/src/test/java/com/google/devtools/build/lib/analysis/mock/cc_toolchain_config.bzl b/src/test/java/com/google/devtools/build/lib/analysis/mock/cc_toolchain_config.bzl index c7be375090d0f9..0d25ecab4cd7ca 100644 --- a/src/test/java/com/google/devtools/build/lib/analysis/mock/cc_toolchain_config.bzl +++ b/src/test/java/com/google/devtools/build/lib/analysis/mock/cc_toolchain_config.bzl @@ -54,6 +54,7 @@ _FEATURE_NAMES = struct( user_compile_flags = "user_compile_flags", thin_lto = "thin_lto", no_use_lto_indexing_bitcode_file = "no_use_lto_indexing_bitcode_file", + use_lto_native_object_directory = "use_lto_native_object_directory", thin_lto_linkstatic_tests_use_shared_nonlto_backends = "thin_lto_linkstatic_tests_use_shared_nonlto_backends", thin_lto_all_linkstatic_use_shared_nonlto_backends = "thin_lto_all_linkstatic_use_shared_nonlto_backends", enable_afdo_thinlto = "enable_afdo_thinlto", @@ -475,6 +476,9 @@ _no_use_lto_indexing_bitcode_file_feature = feature( name = _FEATURE_NAMES.no_use_lto_indexing_bitcode_file, ) +_use_lto_native_object_directory_feature = feature( + name = _FEATURE_NAMES.use_lto_native_object_directory, +) _thin_lto_feature = feature( name = _FEATURE_NAMES.thin_lto, flag_sets = [ @@ -1322,6 +1326,7 @@ _feature_name_to_feature = { _FEATURE_NAMES.user_compile_flags: _user_compile_flags_feature, _FEATURE_NAMES.thin_lto: _thin_lto_feature, _FEATURE_NAMES.no_use_lto_indexing_bitcode_file: _no_use_lto_indexing_bitcode_file_feature, + _FEATURE_NAMES.use_lto_native_object_directory: _use_lto_native_object_directory_feature, _FEATURE_NAMES.thin_lto_linkstatic_tests_use_shared_nonlto_backends: _thin_lto_linkstatic_tests_use_shared_nonlto_backends_feature, _FEATURE_NAMES.thin_lto_all_linkstatic_use_shared_nonlto_backends: _thin_lto_all_linkstatic_use_shared_nonlto_backends_feature, _FEATURE_NAMES.enable_afdo_thinlto: _enable_afdo_thinlto_feature, diff --git a/src/test/java/com/google/devtools/build/lib/rules/cpp/BUILD b/src/test/java/com/google/devtools/build/lib/rules/cpp/BUILD index 5bf38f2ea3ed7c..7fca39b50fae6f 100644 --- a/src/test/java/com/google/devtools/build/lib/rules/cpp/BUILD +++ b/src/test/java/com/google/devtools/build/lib/rules/cpp/BUILD @@ -110,6 +110,28 @@ java_test( ], ) +java_test( + name = "CcBinaryThinLtoObjDirTest", + srcs = ["CcBinaryThinLtoObjDirTest.java"], + deps = [ + "//src/main/java/com/google/devtools/build/lib/actions", + "//src/main/java/com/google/devtools/build/lib/actions:artifacts", + "//src/main/java/com/google/devtools/build/lib/analysis:analysis_cluster", + "//src/main/java/com/google/devtools/build/lib/analysis:configured_target", + "//src/main/java/com/google/devtools/build/lib/cmdline", + "//src/main/java/com/google/devtools/build/lib/rules/cpp", + "//src/main/java/com/google/devtools/build/lib/skyframe:configured_target_key", + "//src/main/java/com/google/devtools/build/lib/skyframe:rule_configured_target_value", + "//src/main/java/com/google/devtools/build/lib/vfs:pathfragment", + "//src/test/java/com/google/devtools/build/lib/actions/util", + "//src/test/java/com/google/devtools/build/lib/analysis/util", + "//src/test/java/com/google/devtools/build/lib/packages:testutil", + "//third_party:guava", + "//third_party:junit4", + "//third_party:truth", + ], +) + java_test( name = "CcBinarySplitFunctionsTest", srcs = ["CcBinarySplitFunctionsTest.java"], diff --git a/src/test/java/com/google/devtools/build/lib/rules/cpp/CcBinaryThinLtoObjDirTest.java b/src/test/java/com/google/devtools/build/lib/rules/cpp/CcBinaryThinLtoObjDirTest.java new file mode 100644 index 00000000000000..048cb049ac1bc6 --- /dev/null +++ b/src/test/java/com/google/devtools/build/lib/rules/cpp/CcBinaryThinLtoObjDirTest.java @@ -0,0 +1,2100 @@ +// Copyright 2020 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package com.google.devtools.build.lib.rules.cpp; + +import static com.google.common.collect.ImmutableList.toImmutableList; +import static com.google.common.truth.Truth.assertThat; +import static java.util.Arrays.stream; + +import com.google.common.base.Joiner; +import com.google.common.collect.ImmutableList; +import com.google.devtools.build.lib.actions.Action; +import com.google.devtools.build.lib.actions.ActionAnalysisMetadata; +import com.google.devtools.build.lib.actions.Artifact; +import com.google.devtools.build.lib.actions.util.ActionsTestUtil; +import com.google.devtools.build.lib.analysis.ConfiguredTarget; +import com.google.devtools.build.lib.analysis.RuleContext; +import com.google.devtools.build.lib.analysis.actions.SpawnAction; +import com.google.devtools.build.lib.analysis.util.AnalysisMock; +import com.google.devtools.build.lib.analysis.util.BuildViewTestCase; +import com.google.devtools.build.lib.cmdline.RepositoryName; +import com.google.devtools.build.lib.packages.util.Crosstool.CcToolchainConfig; +import com.google.devtools.build.lib.packages.util.MockCcSupport; +import com.google.devtools.build.lib.rules.cpp.CppConfiguration.Tool; +import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey; +import com.google.devtools.build.lib.skyframe.RuleConfiguredTargetValue; +import com.google.devtools.build.lib.vfs.PathFragment; +import java.io.IOException; +import java.util.List; +import java.util.stream.Stream; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +/** Tests for cc_binary with treeArtifacts, ThinLTO and separate obj dir for thinlto. */ +@RunWith(JUnit4.class) +public class CcBinaryThinLtoObjDirTest extends BuildViewTestCase { + + private String targetName = "bin"; + + private ConfiguredTarget getCurrentTarget() throws Exception { + return getConfiguredTarget("//pkg:" + targetName); + } + + private CppLinkAction getLinkAction() throws Exception { + ConfiguredTarget pkg = getCurrentTarget(); + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(pkgArtifact); + assertThat(linkAction.getOutputs()).containsExactly(pkgArtifact); + return linkAction; + } + + private LtoBackendAction getBackendAction(String path) throws Exception { + return (LtoBackendAction) getPredecessorByInputName(getLinkAction(), path); + } + + private String getRootExecPath() throws Exception { + ConfiguredTarget pkg = getCurrentTarget(); + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + return pkgArtifact.getRoot().getExecPathString(); + } + + private CppLinkAction getIndexAction(LtoBackendAction backendAction) throws Exception { + return (CppLinkAction) + getPredecessorByInputName( + backendAction, + (backendAction.getPrimaryOutput().getExecPathString() + ".thinlto.bc") + .replaceFirst(".lto-obj/", ".lto/")); + } + + @Before + public void createBasePkg() throws IOException { + scratch.overwriteFile( + "base/BUILD", "cc_library(name = 'system_malloc', visibility = ['//visibility:public'])"); + } + + public void createBuildFiles(String... extraCcBinaryParameters) throws Exception { + scratch.file( + "pkg/BUILD", + "load(':do_gen.bzl', 'test_generation', 'test_generation_2', 'test_generation_empty')", + "package(features = ['thin_lto', 'use_lto_native_object_directory'])", + "", + "test_generation(", + " name = 'tree',", + ")", + "test_generation_2(", + " name = 'tree_2',", + ")", + "test_generation_empty(", + " name = 'tree_empty',", + ")", + "cc_binary(name = '" + targetName + "',", + " srcs = ['binfile.cc', ],", + " deps = [ ':lib', ':tree', ':tree_2', 'tree_empty'], ", + String.join("", extraCcBinaryParameters), + " malloc = '//base:system_malloc')", + "cc_library(name = 'lib',", + " srcs = ['libfile.cc'],", + " hdrs = ['libfile.h'],", + " linkstamp = 'linkstamp.cc',", + " )"); + scratch.file( + "pkg/do_gen.bzl", + "def _create_cc_impl(ctx):", + " directory = ctx.actions.declare_directory(ctx.label.name + \"_gen_cc\")", + " ctx.actions.run_shell(", + " command = \"echo -e '#include \\\"pkg/treelib.h\\\"\\n" + + "Foo::~Foo() { }' > %s/file1.cc\" % directory.path,", + " outputs=[directory]", + " )", + " return DefaultInfo(files=depset([directory]))", + "", + "_create_cc = rule(implementation=_create_cc_impl)", + "def test_generation(name):", + " _create_cc(name=name + \"_ccgen\")", + "", + " native.cc_library(", + " name = name,", + " hdrs = [\"treelib.h\",],", + " srcs = [\":\" + name + \"_ccgen\",]", + ")", + "", + "def _create_cc_impl_2(ctx):", + " directory = ctx.actions.declare_directory(ctx.label.name + \"_gen_cc_2\")", + " ctx.actions.run_shell(", + " command = \"echo -e '#include \\\"pkg/treelib_2.h\\\"\\n" + + "int two() { return 2; }' > %s/file1.cc\" % directory.path +" + + " \"echo -e '#include \\\"pkg/treelib_2.h\\\"\\n" + + "int three() { return 3; }' > %s/file2.cc\" % directory.path,", + " outputs=[directory]", + " )", + " return DefaultInfo(files=depset([directory]))", + "", + "_create_cc_2 = rule(implementation=_create_cc_impl_2)", + "def test_generation_2(name):", + " _create_cc_2(name=name + \"_ccgen_2\")", + "", + " native.cc_library(", + " name = name,", + " hdrs = [\"treelib_2.h\",],", + " srcs = [\":\" + name + \"_ccgen_2\",]", + ")", + "", + "def _create_cc_impl_empty(ctx):", + " directory = ctx.actions.declare_directory(ctx.label.name + \"_gen_cc_empty\")", + " ctx.actions.run_shell(", + " command = \"echo 'empty'\",", + " outputs=[directory]", + " )", + " return DefaultInfo(files=depset([directory]))", + "", + "_create_cc_empty = rule(implementation=_create_cc_impl_empty)", + "def test_generation_empty(name):", + " _create_cc_empty(name=name + \"_ccgen_empty\")", + "", + " native.cc_library(", + " name = name,", + " srcs = [\":\" + name + \"_ccgen_empty\",]", + ")"); + + scratch.file("pkg/treelib.h", "class Foo{ public: ~Foo(); };"); + scratch.file("pkg/treelib_2.h", "int two(); int three();"); + + scratch.file( + "pkg/binfile.cc", + "#include \"pkg/libfile.h\"", + "#include \"pkg/treelib.h\"", + "#include \"pkg/treelib_2.h\"", + "int main() {", + " Foo foo;", + " return pkg() + two() + three(); }"); + scratch.file("pkg/libfile.cc", "int pkg() { return 42; }"); + scratch.file("pkg/libfile.h", "int pkg();"); + scratch.file("pkg/linkstamp.cc"); + } + + public void createTestFiles(String extraTestParameters, String extraLibraryParameters) + throws Exception { + scratch.file( + "pkg/BUILD", + "load(':do_gen.bzl', 'test_generation')", + "package(features = ['thin_lto', 'use_lto_native_object_directory'])", + "", + "test_generation(", + " name = 'tree',", + ")", + "cc_test(", + " name = 'bin_test',", + " srcs = ['bin_test.cc', ],", + " deps = [ ':lib', ':tree', ], ", + extraTestParameters, + " malloc = '//base:system_malloc'", + ")", + "cc_test(", + " name = 'bin_test2',", + " srcs = ['bin_test2.cc', ],", + " deps = [ ':lib', ':tree', ], ", + extraTestParameters, + " malloc = '//base:system_malloc'", + ")", + "cc_library(", + " name = 'lib',", + " srcs = ['libfile.cc'],", + " hdrs = ['libfile.h'],", + extraLibraryParameters, + " linkstamp = 'linkstamp.cc',", + ")"); + scratch.file( + "pkg/do_gen.bzl", + "def _create_cc_impl(ctx):", + " directory = ctx.actions.declare_directory(ctx.label.name + \"_gen_cc\")", + " ctx.actions.run_shell(", + " command = \"echo -e '#include \\\"pkg/treelib.h\\\"\\n" + + "Foo::~Foo() { }' > %s/file.cc\" % directory.path,", + " outputs=[directory]", + " )", + " return DefaultInfo(files=depset([directory]))", + "", + "_create_cc = rule(implementation=_create_cc_impl)", + "def test_generation(name):", + " _create_cc(name=name + \"_ccgen\")", + "", + " native.cc_library(", + " name = name,", + " hdrs = [\"treelib.h\",],", + " srcs = [\":\" + name + \"_ccgen\",]", + ")"); + scratch.file("pkg/treelib.h", "class Foo{ public: ~Foo(); };"); + scratch.file( + "pkg/bin_test.cc", + "#include \"pkg/libfile.h\"", + "#include \"pkg/treelib.h\"", + "int main() { Foo foo; return pkg(); }"); + scratch.file( + "pkg/bin_test2.cc", + "#include \"pkg/libfile.h\"", + "#include \"pkg/treelib.h\"", + "int main() { Foo foo; return pkg(); }"); + scratch.file("pkg/libfile.cc", "int pkg() { return 42; }"); + scratch.file("pkg/libfile.h", "int pkg();"); + scratch.file("pkg/linkstamp.cc"); + } + + @Test + public void testActionGraph() throws Exception { + createBuildFiles(); + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC); + useConfiguration("--noincompatible_make_thinlto_command_lines_standalone"); + + /* + We follow the chain from the final product backwards. + + binary <=[Link]= + .lto-obj/...o <=[LTOBackend]= + {.o.thinlto.bc,.o.imports} <=[LTOIndexing]= + .o <= [CppCompile] .cc + */ + ConfiguredTarget pkg = getCurrentTarget(); + CppLinkAction linkAction = getLinkAction(); + String rootExecPath = getRootExecPath(); + + assertThat(ActionsTestUtil.getFirstArtifactEndingWith(linkAction.getInputs(), "linkstamp.o")) + .isNotNull(); + + List commandLine = linkAction.getLinkCommandLineForTesting().getRawLinkArgv(); + String prefix = getTargetConfiguration().getOutputDirectory(RepositoryName.MAIN) + .getExecPathString(); + assertThat(commandLine) + .containsAtLeast( + prefix + "/bin/pkg/bin.lto.merged.o", + "thinlto_param_file=" + prefix + "/bin/pkg/bin-lto-final.params") + .inOrder(); + + // We have no bitcode files: all files have pkg/bin.lto/ + for (String arg : commandLine) { + if (arg.contains("_objs") && !arg.contains("linkstamp.o")) { + assertThat(arg).contains("pkg/bin.lto"); + } + } + + assertThat(artifactsToStrings(linkAction.getInputs())) + .containsAtLeast( + "bin pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o", + "bin pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o", + "bin pkg/bin-2.params", + "bin pkg/bin-lto-final.params"); + + LtoBackendAction backendAction = + getBackendAction("pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + + assertThat(artifactsToStrings(backendAction.getInputs())) + .containsAtLeast( + "bin pkg/bin.lto/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o.thinlto.bc", + "bin pkg/bin.lto/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o.imports"); + + assertThat(backendAction.getArguments()) + .containsAtLeast( + "thinlto_index=" + + prefix + + "/bin/pkg/bin.lto/" + + rootExecPath + + "/pkg/_objs/bin/binfile.pic.o.thinlto.bc", + "thinlto_output_object_file=" + + prefix + + "/bin/pkg/bin.lto-obj/" + + rootExecPath + + "/pkg/_objs/bin/binfile.pic.o", + "thinlto_input_bitcode_file=" + prefix + "/bin/pkg/_objs/bin/binfile.pic.o"); + + CppLinkAction indexAction = getIndexAction(backendAction); + + RuleConfiguredTargetValue configuredTargetValue = + (RuleConfiguredTargetValue) + getSkyframeExecutor() + .getEvaluator() + .getExistingEntryAtCurrentlyEvaluatingVersion( + ConfiguredTargetKey.builder() + .setLabel(pkg.getLabel()) + .setConfiguration(getConfiguration(pkg)) + .build() + .toKey()) + .getValue(); + ImmutableList linkstampCompileActions = + configuredTargetValue.getActions().stream() + .filter(a -> a.getMnemonic().equals("CppLinkstampCompile")) + .collect(toImmutableList()); + assertThat(linkstampCompileActions).hasSize(1); + ActionAnalysisMetadata linkstampCompileAction = linkstampCompileActions.get(0); + assertThat(indexAction.getInputs().toList()) + .containsNoneIn(linkstampCompileAction.getOutputs()); + + assertThat(indexAction.getArguments()) + .containsAtLeast( + "param_file=" + prefix + "/bin/pkg/bin-lto-final.params", + "prefix_replace=" + + ";" + + prefix + + "/bin/pkg/bin.lto/" + + ";" + + prefix + + "/bin/pkg/bin.lto-obj/", + "thinlto_merged_object_file=" + prefix + "/bin/pkg/bin.lto.merged.o", + "object_suffix_replace=.indexing.o;.o"); + assertThat(indexAction.getArguments()) + .doesNotContain("thinlto_param_file=" + prefix + "/bin/pkg/bin-lto-final.params"); + + assertThat(artifactsToStrings(indexAction.getOutputs())) + .containsAtLeast( + "bin pkg/bin.lto/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o.imports", + "bin pkg/bin.lto/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o.thinlto.bc", + "bin pkg/bin.lto/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o.imports", + "bin pkg/bin.lto/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o.thinlto.bc", + "bin pkg/bin-lto-final.params"); + + assertThat(indexAction.getMnemonic()).isEqualTo("CppLTOIndexing"); + + assertThat(artifactsToStrings(indexAction.getInputs())) + .containsAtLeast( + "bin pkg/_objs/bin/binfile.pic.indexing.o", "bin pkg/_objs/lib/libfile.pic.indexing.o"); + + CppCompileAction bitcodeAction = + (CppCompileAction) + getPredecessorByInputName(indexAction, "pkg/_objs/bin/binfile.pic.indexing.o"); + assertThat(bitcodeAction.getMnemonic()).isEqualTo("CppCompile"); + assertThat(bitcodeAction.getArguments()) + .contains("lto_indexing_bitcode=" + prefix + "/bin/pkg/_objs/bin/binfile.pic.indexing.o"); + } + + @Test + public void testLinkshared() throws Exception { + targetName = "bin.so"; + createBuildFiles("linkshared = 1,"); + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC); + useConfiguration(); + + CppLinkAction linkAction = getLinkAction(); + String rootExecPath = getRootExecPath(); + + Action backendAction = + getPredecessorByInputName( + linkAction, "pkg/bin.so.lto-obj/" + rootExecPath + "/pkg/_objs/bin.so/binfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + } + + @Test + public void testNoLinkstatic() throws Exception { + createBuildFiles("linkstatic = 0,"); + setupThinLTOCrosstool( + CppRuleClasses.SUPPORTS_DYNAMIC_LINKER, + CppRuleClasses.SUPPORTS_PIC, + CppRuleClasses.SUPPORTS_INTERFACE_SHARED_LIBRARIES); + useConfiguration("--noincompatible_make_thinlto_command_lines_standalone"); + + /* + We follow the chain from the final product backwards to verify intermediate actions. + + binary <=[Link]= + .ifso <=[SolibSymlink]= + _S...ifso <=[SolibSymlink]= + .ifso <=[Link]= + .lto-obj/...o <=[LTOBackend]= + {.o.thinlto.bc,.o.imports} <=[LTOIndexing]= + .o <= [CppCompile] .cc + */ + CppLinkAction linkAction = getLinkAction(); + String rootExecPath = getRootExecPath(); + + List commandLine = linkAction.getLinkCommandLineForTesting().getRawLinkArgv(); + String prefix = getTargetConfiguration().getOutputDirectory(RepositoryName.MAIN) + .getExecPathString(); + + assertThat(commandLine).contains("-Wl,@" + prefix + "/bin/pkg/bin-lto-final.params"); + + // We have no bitcode files: all files have pkg/bin.lto/ + for (String arg : commandLine) { + if (arg.contains("_objs") && !arg.contains("linkstamp.o")) { + assertThat(arg).contains("pkg/bin.lto"); + } + } + + assertThat(artifactsToStrings(linkAction.getInputs())) + .containsAtLeast( + "bin pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o", + "bin _solib_k8/libpkg_Sliblib.ifso", + "bin pkg/bin-2.params", + "bin pkg/bin-lto-final.params"); + + SolibSymlinkAction solibSymlinkAction = + (SolibSymlinkAction) getPredecessorByInputName(linkAction, "_solib_k8/libpkg_Sliblib.ifso"); + assertThat(solibSymlinkAction.getMnemonic()).isEqualTo("SolibSymlink"); + + CppLinkAction libLinkAction = + (CppLinkAction) getPredecessorByInputName(solibSymlinkAction, "bin/pkg/liblib.ifso"); + assertThat(libLinkAction.getMnemonic()).isEqualTo("CppLink"); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + libLinkAction, + "pkg/liblib.so.lto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + + assertThat(artifactsToStrings(backendAction.getInputs())) + .contains( + "bin pkg/liblib.so.lto/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o.thinlto.bc"); + + assertThat(backendAction.getArguments()) + .containsAtLeast( + "thinlto_index=" + + prefix + + "/bin/pkg/liblib.so.lto/" + + rootExecPath + + "/pkg/_objs/lib/libfile.pic.o.thinlto.bc", + "thinlto_output_object_file=" + + prefix + + "/bin/pkg/liblib.so.lto-obj/" + + rootExecPath + + "/pkg/_objs/lib/libfile.pic.o", + "thinlto_input_bitcode_file=" + prefix + "/bin/pkg/_objs/lib/libfile.pic.o"); + + CppLinkAction indexAction = + (CppLinkAction) + getPredecessorByInputName( + backendAction, + "pkg/liblib.so.lto/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o.thinlto.bc"); + + assertThat(indexAction.getArguments()) + .containsAtLeast( + "param_file=" + prefix + "/bin/pkg/liblib.so-lto-final.params", + "prefix_replace=" + + ";" + + prefix + + "/bin/pkg/liblib.so.lto/" + + ";" + + prefix + + "/bin/pkg/liblib.so.lto-obj/", + "object_suffix_replace=.indexing.o;.o"); + + assertThat(artifactsToStrings(indexAction.getOutputs())) + .containsAtLeast( + "bin pkg/liblib.so.lto/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o.imports", + "bin pkg/liblib.so.lto/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o.thinlto.bc", + "bin pkg/liblib.so-lto-final.params"); + + assertThat(indexAction.getMnemonic()).isEqualTo("CppLTOIndexing"); + + assertThat(artifactsToStrings(indexAction.getInputs())) + .contains("bin pkg/_objs/lib/libfile.pic.indexing.o"); + + CppCompileAction bitcodeAction = + (CppCompileAction) + getPredecessorByInputName(indexAction, "pkg/_objs/lib/libfile.pic.indexing.o"); + assertThat(bitcodeAction.getMnemonic()).isEqualTo("CppCompile"); + assertThat(bitcodeAction.getArguments()) + .contains("lto_indexing_bitcode=" + prefix + "/bin/pkg/_objs/lib/libfile.pic.indexing.o"); + } + + /** Helper method to get the root prefix from the given dwpFile. */ + private static PathFragment dwpRootPrefix(Artifact dwpFile) throws Exception { + return dwpFile + .getExecPath() + .subFragment( + 0, dwpFile.getExecPath().segmentCount() - dwpFile.getRootRelativePath().segmentCount()); + } + + /** Helper method that checks that a .dwp has the expected generating action structure. */ + private void validateDwp( + RuleContext ruleContext, + Artifact dwpFile, + CcToolchainProvider toolchain, + List expectedInputs) + throws Exception { + SpawnAction dwpAction = (SpawnAction) getGeneratingAction(dwpFile); + String dwpToolPath = toolchain.getToolPathFragment(Tool.DWP, ruleContext).getPathString(); + assertThat(dwpAction.getMnemonic()).isEqualTo("CcGenerateDwp"); + assertThat(dwpToolPath).isEqualTo(dwpAction.getCommandFilename()); + List commandArgs = dwpAction.getArguments(); + // The first argument should be the command being executed. + assertThat(dwpToolPath).isEqualTo(commandArgs.get(0)); + // The final two arguments should be "-o dwpOutputFile". + assertThat(commandArgs.subList(commandArgs.size() - 2, commandArgs.size())) + .containsExactly("-o", dwpFile.getExecPathString()) + .inOrder(); + // The remaining arguments should be the set of .dwo inputs (in any order). + assertThat(commandArgs.subList(1, commandArgs.size() - 2)) + .containsExactlyElementsIn(expectedInputs); + } + + @Test + public void testFission() throws Exception { + createBuildFiles(); + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC, CppRuleClasses.PER_OBJECT_DEBUG_INFO); + useConfiguration("--fission=yes", "--copt=-g0"); + + String rootExecPath = getRootExecPath(); + LtoBackendAction backendAction = + getBackendAction("pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + assertThat(artifactsToStrings(backendAction.getOutputs())) + .containsExactly( + "bin pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o", + "bin pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.dwo"); + + assertThat(backendAction.getArguments()).containsAtLeast("-g0", "per_object_debug_info_option"); + + backendAction = + getBackendAction("pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + assertThat(artifactsToStrings(backendAction.getOutputs())) + .containsExactly( + "bin pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o", + "bin pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.dwo"); + + assertThat(backendAction.getArguments()).contains("per_object_debug_info_option"); + + // Now check the dwp action. + ConfiguredTarget pkg = getCurrentTarget(); + Artifact dwpFile = getFileConfiguredTarget(pkg.getLabel() + ".dwp").getArtifact(); + PathFragment rootPrefix = dwpRootPrefix(dwpFile); + RuleContext ruleContext = getRuleContext(pkg); + CcToolchainProvider toolchain = + CppHelper.getToolchainUsingDefaultCcToolchainAttribute(ruleContext); + validateDwp( + ruleContext, + dwpFile, + toolchain, + ImmutableList.of( + rootPrefix + "/pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.dwo", + rootPrefix + "/pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.dwo")); + } + + @Test + public void testNoLinkstaticFission() throws Exception { + createBuildFiles("linkstatic = 0,"); + setupThinLTOCrosstool( + CppRuleClasses.SUPPORTS_PIC, + CppRuleClasses.SUPPORTS_INTERFACE_SHARED_LIBRARIES, + CppRuleClasses.SUPPORTS_DYNAMIC_LINKER, + CppRuleClasses.PER_OBJECT_DEBUG_INFO); + useConfiguration("--fission=yes"); + + ConfiguredTarget pkg = getConfiguredTarget("//pkg:bin"); + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(pkgArtifact); + String rootExecPath = pkgArtifact.getRoot().getExecPathString(); + + SolibSymlinkAction solibSymlinkAction = + (SolibSymlinkAction) getPredecessorByInputName(linkAction, "_solib_k8/libpkg_Sliblib.ifso"); + assertThat(solibSymlinkAction.getMnemonic()).isEqualTo("SolibSymlink"); + + CppLinkAction libLinkAction = + (CppLinkAction) getPredecessorByInputName(solibSymlinkAction, "bin/pkg/liblib.ifso"); + assertThat(libLinkAction.getMnemonic()).isEqualTo("CppLink"); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + libLinkAction, + "pkg/liblib.so.lto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + assertThat(artifactsToStrings(backendAction.getOutputs())) + .containsExactly( + "bin pkg/liblib.so.lto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o", + "bin pkg/liblib.so.lto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.dwo"); + + assertThat(backendAction.getArguments()).contains("per_object_debug_info_option"); + + // Check the dwp action. + Artifact dwpFile = getFileConfiguredTarget(pkg.getLabel() + ".dwp").getArtifact(); + PathFragment rootPrefix = dwpRootPrefix(dwpFile); + RuleContext ruleContext = getRuleContext(pkg); + CcToolchainProvider toolchain = + CppHelper.getToolchainUsingDefaultCcToolchainAttribute(ruleContext); + validateDwp( + ruleContext, + dwpFile, + toolchain, + ImmutableList.of( + rootPrefix + "/pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.dwo")); + } + + @Test + public void testLinkstaticCcTestFission() throws Exception { + createTestFiles("linkstatic = 1,", ""); + + setupThinLTOCrosstool( + CppRuleClasses.SUPPORTS_PIC, + CppRuleClasses.THIN_LTO_LINKSTATIC_TESTS_USE_SHARED_NONLTO_BACKENDS, + CppRuleClasses.PER_OBJECT_DEBUG_INFO); + useConfiguration( + "--fission=yes", "--features=thin_lto_linkstatic_tests_use_shared_nonlto_backends"); + + ConfiguredTarget pkg = getConfiguredTarget("//pkg:bin_test"); + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + String rootExecPath = pkgArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(pkgArtifact); + + // All backends should be shared non-LTO in this case + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, + "shared.nonlto-obj/" + rootExecPath + "/pkg/_objs/bin_test/bin_test.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + assertThat(artifactsToStrings(backendAction.getOutputs())) + .containsExactly( + "bin shared.nonlto-obj/" + rootExecPath + "/pkg/_objs/bin_test/bin_test.pic.o", + "bin shared.nonlto-obj/" + rootExecPath + "/pkg/_objs/bin_test/bin_test.pic.dwo"); + + assertThat(backendAction.getArguments()).contains("per_object_debug_info_option"); + + backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "shared.nonlto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + assertThat(backendAction.getArguments()).contains("-fPIC"); + assertThat(artifactsToStrings(backendAction.getOutputs())) + .containsExactly( + "bin shared.nonlto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o", + "bin shared.nonlto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.dwo"); + + assertThat(backendAction.getArguments()).contains("per_object_debug_info_option"); + + // Now check the dwp action. + Artifact dwpFile = getFileConfiguredTarget(pkg.getLabel() + ".dwp").getArtifact(); + PathFragment rootPrefix = dwpRootPrefix(dwpFile); + RuleContext ruleContext = getRuleContext(pkg); + CcToolchainProvider toolchain = + CppHelper.getToolchainUsingDefaultCcToolchainAttribute(ruleContext); + validateDwp( + ruleContext, + dwpFile, + toolchain, + ImmutableList.of( + rootPrefix + "/shared.nonlto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.dwo", + rootPrefix + + "/shared.nonlto-obj/" + + rootExecPath + + "/pkg/_objs/bin_test/bin_test.pic.dwo")); + } + + @Test + public void testLinkstaticCcTest() throws Exception { + createTestFiles("linkstatic = 1,", ""); + + setupThinLTOCrosstool( + CppRuleClasses.SUPPORTS_PIC, + CppRuleClasses.THIN_LTO_LINKSTATIC_TESTS_USE_SHARED_NONLTO_BACKENDS, + CppRuleClasses.PER_OBJECT_DEBUG_INFO); + useConfiguration("--features=thin_lto_linkstatic_tests_use_shared_nonlto_backends"); + + ConfiguredTarget pkg = getConfiguredTarget("//pkg:bin_test"); + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(pkgArtifact); + + ConfiguredTarget pkg2 = getConfiguredTarget("//pkg:bin_test2"); + Artifact pkgArtifact2 = getFilesToBuild(pkg2).getSingleton(); + CppLinkAction linkAction2 = (CppLinkAction) getGeneratingAction(pkgArtifact2); + + // All backends should be shared non-LTO in this case + String rootExecPath1 = pkgArtifact.getRoot().getExecPathString(); + String rootExecPath2 = pkgArtifact.getRoot().getExecPathString(); + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, + "shared.nonlto-obj/" + rootExecPath1 + "/pkg/_objs/bin_test/bin_test.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + + backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "shared.nonlto-obj/" + rootExecPath1 + "/pkg/_objs/lib/libfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + assertThat(backendAction.getArguments()).contains("-fPIC"); + + LtoBackendAction backendAction2 = + (LtoBackendAction) + getPredecessorByInputName( + linkAction2, "shared.nonlto-obj/" + rootExecPath2 + "/pkg/_objs/lib/libfile.pic.o"); + assertThat(backendAction2.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + + assertThat(backendAction).isEqualTo(backendAction2); + } + + @Test + public void testTestOnlyTarget() throws Exception { + createBuildFiles("testonly = 1,"); + + setupThinLTOCrosstool( + CppRuleClasses.SUPPORTS_PIC, + CppRuleClasses.THIN_LTO_LINKSTATIC_TESTS_USE_SHARED_NONLTO_BACKENDS); + useConfiguration("--features=thin_lto_linkstatic_tests_use_shared_nonlto_backends"); + + ConfiguredTarget pkg = getConfiguredTarget("//pkg:bin"); + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + String rootExecPath = pkgArtifact.getRoot().getExecPathString(); + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(pkgArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "shared.nonlto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + } + + @Test + public void testUseSharedAllLinkstatic() throws Exception { + createBuildFiles(); + + setupThinLTOCrosstool( + CppRuleClasses.THIN_LTO_ALL_LINKSTATIC_USE_SHARED_NONLTO_BACKENDS, + CppRuleClasses.SUPPORTS_PIC); + useConfiguration("--features=thin_lto_all_linkstatic_use_shared_nonlto_backends"); + + ConfiguredTarget pkg = getConfiguredTarget("//pkg:bin"); + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + String rootExecPath = pkgArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(pkgArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "shared.nonlto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + } + + private Action getPredecessorByInputName(Action action, String str) { + for (Artifact a : action.getInputs().toList()) { + if (a.getExecPathString().contains(str)) { + return getGeneratingAction(a); + } + } + return null; + } + + @Test + public void testFdoInstrument() throws Exception { + scratch.file( + "pkg/BUILD", + "package(features = ['thin_lto', 'use_lto_native_object_directory'])", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " malloc = '//base:system_malloc')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC, CppRuleClasses.FDO_INSTRUMENT); + useConfiguration("--fdo_instrument=profiles"); + + ConfiguredTarget pkg = getConfiguredTarget("//pkg:bin"); + + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + String rootExecPath = pkgArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(pkgArtifact); + assertThat(linkAction.getOutputs()).containsExactly(pkgArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o"); + // If the LtoBackendAction incorrectly tries to add the fdo_instrument + // feature, we will fail with an "unknown variable 'fdo_instrument_path'" + // error. But let's also explicitly confirm that the fdo_instrument + // option didn't end up here. + assertThat(backendAction.getArguments()).doesNotContain("fdo_instrument_option"); + } + + @Test + public void testLtoIndexOpt() throws Exception { + createBuildFiles(); + + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC); + useConfiguration( + "--ltoindexopt=anltoindexopt", "--noincompatible_make_thinlto_command_lines_standalone"); + + /* + We follow the chain from the final product backwards. + + binary <=[Link]= + .lto-obj/...o <=[LTOBackend]= + {.o.thinlto.bc,.o.imports} <=[LTOIndexing]= + .o <= [CppCompile] .cc + */ + ConfiguredTarget pkg = getConfiguredTarget("//pkg:bin"); + + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + String rootExecPath = pkgArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(pkgArtifact); + assertThat(linkAction.getOutputs()).containsExactly(pkgArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + + CppLinkAction indexAction = + (CppLinkAction) + getPredecessorByInputName( + backendAction, + "pkg/bin.lto/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o.thinlto.bc"); + + assertThat(indexAction.getArguments()).contains("anltoindexopt"); + } + + @Test + public void testLtoStandaloneCommandLines() throws Exception { + createBuildFiles(); + + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC); + useConfiguration( + "--ltoindexopt=anltoindexopt", + "--incompatible_make_thinlto_command_lines_standalone", + "--features=thin_lto", + "--features=use_lto_native_object_directory"); + + /* + We follow the chain from the final product backwards. + + binary <=[Link]= + .lto-obj/...o <=[LTOBackend]= + {.o.thinlto.bc,.o.imports} <=[LTOIndexing]= + .o <= [CppCompile] .cc + */ + ConfiguredTarget pkg = getConfiguredTarget("//pkg:bin"); + + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + String rootExecPath = pkgArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(pkgArtifact); + assertThat(linkAction.getOutputs()).containsExactly(pkgArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + + CppLinkAction indexAction = + (CppLinkAction) + getPredecessorByInputName( + backendAction, + "pkg/bin.lto/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o.thinlto.bc"); + + assertThat(indexAction.getArguments()) + .contains("--i_come_from_standalone_lto_index=anltoindexopt"); + } + + @Test + public void testCopt() throws Exception { + createBuildFiles(); + + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC); + useConfiguration("--copt=acopt"); + + /* + We follow the chain from the final product backwards. + + binary <=[Link]= + .lto-obj/...o <=[LTOBackend]= + */ + ConfiguredTarget pkg = getConfiguredTarget("//pkg:bin"); + + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + String rootExecPath = pkgArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(pkgArtifact); + assertThat(linkAction.getOutputs()).containsExactly(pkgArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + assertThat(backendAction.getArguments()).contains("acopt"); + } + + @Test + public void testPerFileCopt() throws Exception { + createBuildFiles(); + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC); + useConfiguration( + "--per_file_copt=binfile\\.cc@copt1", + "--per_file_copt=libfile\\.cc@copt2", + "--per_file_copt=.*\\.cc,-binfile\\.cc@copt2"); + + /* + We follow the chain from the final product backwards. + + binary <=[Link]= + .lto-obj/...o <=[LTOBackend]= + */ + ConfiguredTarget pkg = getConfiguredTarget("//pkg:bin"); + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + String rootExecPath = pkgArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(pkgArtifact); + assertThat(linkAction.getOutputs()).containsExactly(pkgArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o"); + assertThat(backendAction.getArguments()).contains("copt1"); + assertThat(backendAction.getArguments()).doesNotContain("copt2"); + + backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o"); + assertThat(backendAction.getArguments()).doesNotContain("copt1"); + assertThat(backendAction.getArguments()).contains("copt2"); + } + + @Test + public void testCoptNoCoptAttributes() throws Exception { + createBuildFiles("copts = ['acopt', 'nocopt1'], nocopts = 'nocopt1|nocopt2',"); + + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC); + useConfiguration("--copt=nocopt2", "--noincompatible_disable_nocopts"); + + /* + We follow the chain from the final product backwards. + + binary <=[Link]= + .lto-obj/...o <=[LTOBackend]= + */ + ConfiguredTarget pkg = getConfiguredTarget("//pkg:bin"); + + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + String rootExecPath = pkgArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(pkgArtifact); + assertThat(linkAction.getOutputs()).containsExactly(pkgArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + assertThat(backendAction.getArguments()).contains("acopt"); + // TODO(b/122303926): Remove when nocopts are removed, or uncomment and fix if not removing. + // assertThat(backendAction.getArguments()).doesNotContain("nocopt1"); + // assertThat(backendAction.getArguments()).doesNotContain("nocopt2"); + } + + @Test + public void testLtoBackendOpt() throws Exception { + createBuildFiles(); + + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC, MockCcSupport.USER_COMPILE_FLAGS); + useConfiguration("--ltobackendopt=anltobackendopt"); + + /* + We follow the chain from the final product backwards. + + binary <=[Link]= + .lto-obj/...o <=[LTOBackend]= + */ + ConfiguredTarget pkg = getConfiguredTarget("//pkg:bin"); + + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + String rootExecPath = pkgArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(pkgArtifact); + assertThat(linkAction.getOutputs()).containsExactly(pkgArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + assertThat(backendAction.getArguments()) + .containsAtLeast("--default-compile-flag", "anltobackendopt"); + } + + @Test + public void testPerFileLtoBackendOpt() throws Exception { + createBuildFiles(); + + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC); + useConfiguration( + "--per_file_ltobackendopt=binfile\\.pic\\.o@ltobackendopt1", + "--per_file_ltobackendopt=.*\\.o,-binfile\\.pic\\.o@ltobackendopt2"); + + /* + We follow the chain from the final product backwards. + + binary <=[Link]= + .lto-obj/...o <=[LTOBackend]= + */ + ConfiguredTarget pkg = getConfiguredTarget("//pkg:bin"); + Artifact pkgArtifact = getFilesToBuild(pkg).getSingleton(); + String rootExecPath = pkgArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(pkgArtifact); + assertThat(linkAction.getOutputs()).containsExactly(pkgArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o"); + assertThat(backendAction.getArguments()).contains("ltobackendopt1"); + assertThat(backendAction.getArguments()).doesNotContain("ltobackendopt2"); + + backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/lib/libfile.pic.o"); + assertThat(backendAction.getArguments()).doesNotContain("ltobackendopt1"); + assertThat(backendAction.getArguments()).contains("ltobackendopt2"); + } + + @Test + public void testNoUseLtoIndexingBitcodeFile() throws Exception { + createBuildFiles(); + + setupThinLTOCrosstool( + CppRuleClasses.NO_USE_LTO_INDEXING_BITCODE_FILE, CppRuleClasses.SUPPORTS_PIC); + useConfiguration( + "--features=no_use_lto_indexing_bitcode_file", + "--features=use_lto_native_object_directory"); + String rootExecPath = getRootExecPath(); + + /* + We follow the chain from the final product backwards. + + binary <=[Link]= + .lto-obj/...o <=[LTOBackend]= + {.o.thinlto.bc,.o.imports} <=[LTOIndexing]= + .o <= [CppCompile] .cc + */ + CppLinkAction indexAction = + getIndexAction( + getBackendAction("pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o")); + + assertThat(indexAction.getArguments()).doesNotContain("object_suffix_replace"); + + assertThat(artifactsToStrings(indexAction.getInputs())) + .containsAtLeast("bin pkg/_objs/bin/binfile.pic.o", "bin pkg/_objs/lib/libfile.pic.o"); + + CppCompileAction bitcodeAction = + (CppCompileAction) getPredecessorByInputName(indexAction, "pkg/_objs/bin/binfile.pic.o"); + assertThat(bitcodeAction.getArguments()).doesNotContain("lto_indexing_bitcode="); + } + + @Test + public void testAutoFdo() throws Exception { + scratch.file( + "pkg/BUILD", + "package(features = ['thin_lto', 'use_lto_native_object_directory'])", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " malloc = '//base:system_malloc')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + scratch.file("pkg/profile.afdo", ""); + + setupThinLTOCrosstool(CppRuleClasses.AUTOFDO); + useConfiguration("--fdo_optimize=/pkg/profile.afdo", "--compilation_mode=opt"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + + // Checks that -fauto-profile is added to the LtoBackendAction. + assertThat(Joiner.on(" ").join(backendAction.getArguments())).containsMatch( + "-fauto-profile=[^ ]*/profile.afdo"); + assertThat(ActionsTestUtil.baseArtifactNames(backendAction.getInputs())).contains( + "profile.afdo"); + } + + private void setupThinLTOCrosstool(String... extraFeatures) throws Exception { + String[] allFeatures = + Stream.concat( + Stream.of( + CppRuleClasses.THIN_LTO, + CppRuleClasses.USE_LTO_NATIVE_OBJECT_DIRECTORY, + CppRuleClasses.SUPPORTS_START_END_LIB, + MockCcSupport.HOST_AND_NONHOST_CONFIGURATION_FEATURES), + stream(extraFeatures)) + .toArray(String[]::new); + AnalysisMock.get() + .ccSupport() + .setupCcToolchainConfig( + mockToolsConfig, CcToolchainConfig.builder().withFeatures(allFeatures)); + } + + private void setupAutoFdoThinLtoCrosstool() throws Exception { + setupThinLTOCrosstool( + CppRuleClasses.AUTOFDO, + CppRuleClasses.ENABLE_AFDO_THINLTO, + CppRuleClasses.AUTOFDO_IMPLICIT_THINLTO); + } + + /** + * Tests that ThinLTO is not enabled for AFDO with LLVM without + * --features=autofdo_implicit_thinlto. + */ + @Test + public void testAutoFdoNoImplicitThinLto() throws Exception { + scratch.file( + "pkg/BUILD", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " malloc = '//base:system_malloc')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + scratch.file("pkg/profile.afdo", ""); + + setupAutoFdoThinLtoCrosstool(); + useConfiguration("--fdo_optimize=/pkg/profile.afdo", "--compilation_mode=opt"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // We should not have a ThinLTO backend action + assertThat(backendAction).isNull(); + } + + /** Tests that --features=autofdo_implicit_thinlto enables ThinLTO for AFDO with LLVM. */ + @Test + public void testAutoFdoImplicitThinLto() throws Exception { + scratch.file( + "pkg/BUILD", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " malloc = '//base:system_malloc')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + scratch.file("pkg/profile.afdo", ""); + + setupAutoFdoThinLtoCrosstool(); + useConfiguration( + "--fdo_optimize=/pkg/profile.afdo", + "--compilation_mode=opt", + "--features=autofdo_implicit_thinlto", + "--features=use_lto_native_object_directory"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // For ThinLTO compilation we should have a non-null backend action + assertThat(backendAction).isNotNull(); + } + + /** + * Tests that --features=-thin_lto overrides --features=autofdo_implicit_thinlto and prevents + * enabling ThinLTO for AFDO with LLVM. + */ + @Test + public void testAutoFdoImplicitThinLtoDisabledOption() throws Exception { + scratch.file( + "pkg/BUILD", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " malloc = '//base:system_malloc')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + scratch.file("pkg/profile.afdo", ""); + + setupAutoFdoThinLtoCrosstool(); + useConfiguration( + "--fdo_optimize=/pkg/profile.afdo", + "--compilation_mode=opt", + "--features=autofdo_implicit_thinlto", + "--features=-thin_lto", + "--features=use_lto_native_object_directory"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // We should not have a ThinLTO backend action + assertThat(backendAction).isNull(); + } + + /** + * Tests that features=[-thin_lto] in the build rule overrides --features=autofdo_implicit_thinlto + * and prevents enabling ThinLTO for AFDO with LLVM. + */ + @Test + public void testAutoFdoImplicitThinLtoDisabledRule() throws Exception { + scratch.file( + "pkg/BUILD", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " features = ['-thin_lto', 'use_lto_native_object_directory'],", + " malloc = '//base:system_malloc')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + scratch.file("pkg/profile.afdo", ""); + + setupAutoFdoThinLtoCrosstool(); + useConfiguration( + "--fdo_optimize=/pkg/profile.afdo", + "--compilation_mode=opt", + "--features=autofdo_implicit_thinlto"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // We should not have a ThinLTO backend action + assertThat(backendAction).isNull(); + } + + /** + * Tests that features=[-thin_lto] in the package overrides --features=autofdo_implicit_thinlto + * and prevents enabling ThinLTO for AFDO with LLVM. + */ + @Test + public void testAutoFdoImplicitThinLtoDisabledPackage() throws Exception { + scratch.file( + "pkg/BUILD", + "package(features = ['-thin_lto', 'use_lto_native_object_directory'])", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " malloc = '//base:system_malloc')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + scratch.file("pkg/profile.afdo", ""); + + setupAutoFdoThinLtoCrosstool(); + useConfiguration( + "--fdo_optimize=/pkg/profile.afdo", + "--compilation_mode=opt", + "--features=autofdo_implicit_thinlto"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // We should not have a ThinLTO backend action + assertThat(backendAction).isNull(); + } + + private void setupFdoThinLtoCrosstool() throws Exception { + setupThinLTOCrosstool( + CppRuleClasses.FDO_OPTIMIZE, + CppRuleClasses.ENABLE_FDO_THINLTO, + MockCcSupport.FDO_IMPLICIT_THINLTO); + } + + /** + * Tests that ThinLTO is not enabled for FDO with LLVM without --features=fdo_implicit_thinlto. + */ + @Test + public void testFdoNoImplicitThinLto() throws Exception { + scratch.file( + "pkg/BUILD", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " malloc = '//base:system_malloc')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + scratch.file("pkg/profile.zip", ""); + + setupFdoThinLtoCrosstool(); + useConfiguration("--fdo_optimize=/pkg/profile.zip", "--compilation_mode=opt"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // We should not have a ThinLTO backend action + assertThat(backendAction).isNull(); + } + + /** Tests that --features=fdo_implicit_thinlto enables ThinLTO for FDO with LLVM. */ + @Test + public void testFdoImplicitThinLto() throws Exception { + scratch.file( + "pkg/BUILD", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " malloc = '//base:system_malloc')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + scratch.file("pkg/profile.zip", ""); + + setupFdoThinLtoCrosstool(); + useConfiguration( + "--fdo_optimize=/pkg/profile.zip", + "--compilation_mode=opt", + "--features=fdo_implicit_thinlto", + "--features=use_lto_native_object_directory"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // For ThinLTO compilation we should have a non-null backend action + assertThat(backendAction).isNotNull(); + } + + /** + * Tests that --features=-thin_lto overrides --features=fdo_implicit_thinlto and prevents enabling + * ThinLTO for FDO with LLVM. + */ + @Test + public void testFdoImplicitThinLtoDisabledOption() throws Exception { + scratch.file( + "pkg/BUILD", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " malloc = '//base:system_malloc')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + scratch.file("pkg/profile.zip", ""); + + setupFdoThinLtoCrosstool(); + useConfiguration( + "--fdo_optimize=/pkg/profile.zip", + "--compilation_mode=opt", + "--features=fdo_implicit_thinlto", + "--features=-thin_lto", + "--features=use_lto_native_object_directory"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // We should not have a ThinLTO backend action + assertThat(backendAction).isNull(); + } + + /** + * Tests that features=[-thin_lto] in the build rule overrides --features=fdo_implicit_thinlto and + * prevents enabling ThinLTO for FDO with LLVM. + */ + @Test + public void testFdoImplicitThinLtoDisabledRule() throws Exception { + scratch.file( + "pkg/BUILD", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " features = ['-thin_lto', 'use_lto_native_object_directory'],", + " malloc = '//base:system_malloc')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + scratch.file("pkg/profile.zip", ""); + + setupFdoThinLtoCrosstool(); + useConfiguration( + "--fdo_optimize=/pkg/profile.zip", + "--compilation_mode=opt", + "--features=fdo_implicit_thinlto"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // We should not have a ThinLTO backend action + assertThat(backendAction).isNull(); + } + + /** + * Tests that features=[-thin_lto] in the package overrides --features=fdo_implicit_thinlto and + * prevents enabling ThinLTO for FDO with LLVM. + */ + @Test + public void testFdoImplicitThinLtoDisabledPackage() throws Exception { + setupThinLTOCrosstool(); + scratch.file( + "pkg/BUILD", + "package(features = ['-thin_lto', 'use_lto_native_object_directory'])", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " malloc = '//base:system_malloc')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + scratch.file("pkg/profile.zip", ""); + + setupFdoThinLtoCrosstool(); + useConfiguration( + "--fdo_optimize=/pkg/profile.zip", + "--compilation_mode=opt", + "--features=fdo_implicit_thinlto"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // We should not have a ThinLTO backend action + assertThat(backendAction).isNull(); + } + + private void setupXBinaryFdoThinLtoCrosstool() throws Exception { + setupThinLTOCrosstool( + CppRuleClasses.XBINARYFDO, + CppRuleClasses.ENABLE_XFDO_THINLTO, + MockCcSupport.XFDO_IMPLICIT_THINLTO); + } + + /** + * Tests that ThinLTO is not enabled for XFDO with LLVM without + * --features=xbinaryfdo_implicit_thinlto. + */ + @Test + public void testXBinaryFdoNoImplicitThinLto() throws Exception { + scratch.file( + "pkg/BUILD", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ])", + "fdo_profile(name='out.xfdo', profile='profiles.xfdo')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + + setupXBinaryFdoThinLtoCrosstool(); + useConfiguration("--xbinary_fdo=//pkg:out.xfdo", "--compilation_mode=opt"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // We should not have a ThinLTO backend action + assertThat(backendAction).isNull(); + } + + /** Tests that --features=xbinaryfdo_implicit_thinlto enables ThinLTO for XFDO with LLVM. */ + @Test + public void testXBinaryFdoImplicitThinLto() throws Exception { + scratch.file( + "pkg/BUILD", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ])", + "fdo_profile(name='out.xfdo', profile='profiles.xfdo')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + + setupXBinaryFdoThinLtoCrosstool(); + useConfiguration( + "--xbinary_fdo=//pkg:out.xfdo", + "--compilation_mode=opt", + "--features=xbinaryfdo_implicit_thinlto", + "--features=use_lto_native_object_directory"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // For ThinLTO compilation we should have a non-null backend action + assertThat(backendAction).isNotNull(); + } + + /** + * Tests that --features=-thin_lto overrides --features=xbinaryfdo_implicit_thinlto and prevents + * enabling ThinLTO for XFDO with LLVM. + */ + @Test + public void testXBinaryFdoImplicitThinLtoDisabledOption() throws Exception { + scratch.file( + "pkg/BUILD", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ])", + "fdo_profile(name='out.xfdo', profile='profiles.xfdo')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + + setupXBinaryFdoThinLtoCrosstool(); + useConfiguration( + "--xbinary_fdo=//pkg:out.xfdo", + "--compilation_mode=opt", + "--features=xbinaryfdo_implicit_thinlto", + "--features=-thin_lto", + "--features=use_lto_native_object_directory"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // We should not have a ThinLTO backend action + assertThat(backendAction).isNull(); + } + + /** + * Tests that features=[-thin_lto] in the build rule overrides + * --features=xbinaryfdo_implicit_thinlto and prevents enabling ThinLTO for XFDO with LLVM. + */ + @Test + public void testXBinaryFdoImplicitThinLtoDisabledRule() throws Exception { + scratch.file( + "pkg/BUILD", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " features = ['-thin_lto', 'use_lto_native_object_directory'])", + "fdo_profile(name='out.xfdo', profile='profiles.xfdo')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + + setupXBinaryFdoThinLtoCrosstool(); + useConfiguration( + "--xbinary_fdo=//pkg:out.xfdo", + "--compilation_mode=opt", + "--features=xbinaryfdo_implicit_thinlto"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // We should not have a ThinLTO backend action + assertThat(backendAction).isNull(); + } + + /** + * Tests that features=[-thin_lto] in the package overrides --features=fdo_implicit_thinlto and + * prevents enabling ThinLTO for XFDO with LLVM. + */ + @Test + public void testXBinaryFdoImplicitThinLtoDisabledPackage() throws Exception { + scratch.file( + "pkg/BUILD", + "package(features = ['-thin_lto', 'use_lto_native_object_directory'])", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ])", + "fdo_profile(name='out.xfdo', profile='profiles.xfdo')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + + setupXBinaryFdoThinLtoCrosstool(); + useConfiguration( + "--xbinary_fdo=//pkg:out.xfdo", + "--compilation_mode=opt", + "--features=xbinaryfdo_implicit_thinlto"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + // We should not have a ThinLTO backend action + assertThat(backendAction).isNull(); + } + + @Test + public void testXBinaryFdo() throws Exception { + scratch.file( + "pkg/BUILD", + "package(features = ['thin_lto', 'use_lto_native_object_directory'])", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " malloc = '//base:system_malloc')", + "fdo_profile(name='out.xfdo', profile='profiles.xfdo')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + + setupThinLTOCrosstool(CppRuleClasses.XBINARYFDO); + useConfiguration("--xbinary_fdo=//pkg:out.xfdo", "--compilation_mode=opt"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + + // Checks that -fauto-profile is added to the LtoBackendAction. + assertThat(Joiner.on(" ").join(backendAction.getArguments())) + .containsMatch("-fauto-profile=[^ ]*/profiles.xfdo"); + assertThat(ActionsTestUtil.baseArtifactNames(backendAction.getInputs())) + .contains("profiles.xfdo"); + } + + /** + * Tests that ThinLTO is not enabled for XBINARYFDO with --features=autofdo_implicit_thinlto and + * --features=fdo_implicit_thinlto. + */ + @Test + public void testXBinaryFdoNoAutoFdoOrFdoImplicitThinLto() throws Exception { + scratch.file( + "pkg/BUILD", + "", + "cc_binary(name = 'bin',", + " srcs = ['binfile.cc', ],", + " malloc = '//base:system_malloc')", + "fdo_profile(name='out.xfdo', profile='profiles.xfdo')"); + + scratch.file("pkg/binfile.cc", "int main() {}"); + + setupThinLTOCrosstool( + CppRuleClasses.ENABLE_FDO_THINLTO, + MockCcSupport.FDO_IMPLICIT_THINLTO, + CppRuleClasses.ENABLE_AFDO_THINLTO, + MockCcSupport.AUTOFDO_IMPLICIT_THINLTO, + CppRuleClasses.XBINARYFDO); + useConfiguration( + "--xbinary_fdo=//pkg:out.xfdo", + "--compilation_mode=opt", + "--features=autofdo_implicit_thinlto", + "--features=fdo_implicit_thinlto", + "--features=use_lto_native_object_directory"); + + Artifact binArtifact = getFilesToBuild(getConfiguredTarget("//pkg:bin")).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/pkg/binfile.o"); + // We should not have a ThinLTO backend action + assertThat(backendAction).isNull(); + } + + @Test + public void testPICBackendOrder() throws Exception { + createBuildFiles(); + + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC); + useConfiguration("--copt=-fno-PIE"); + String rootExecPath = getRootExecPath(); + LtoBackendAction backendAction = + getBackendAction("pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.pic.o"); + assertThat(backendAction.getMnemonic()).isEqualTo("CcLtoBackendCompile"); + assertThat(backendAction.getArguments()).containsAtLeast("-fno-PIE", "-fPIC").inOrder(); + } + + @Test + public void testPropellerOptimizeAbsoluteOptions() throws Exception { + createBuildFiles(); + + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC, CppRuleClasses.AUTOFDO); + + useConfiguration( + "--propeller_optimize_absolute_cc_profile=/tmp/cc_profile.txt", + "--propeller_optimize_absolute_ld_profile=/tmp/ld_profile.txt", + "--compilation_mode=opt"); + Artifact binArtifact = getFilesToBuild(getCurrentTarget()).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + assertThat(ActionsTestUtil.baseArtifactNames(linkAction.getInputs())) + .contains("ld_profile.txt"); + + List commandLine = linkAction.getLinkCommandLineForTesting().getRawLinkArgv(); + assertThat(commandLine.toString()) + .containsMatch("-Wl,--symbol-ordering-file=.*/ld_profile.txt"); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + + String expectedCompilerFlag = "-fbasic-block-sections=list=.*/cc_profile.txt"; + assertThat(Joiner.on(" ").join(backendAction.getArguments())) + .containsMatch(expectedCompilerFlag); + String expectedBuildTypeFlag = "-DBUILD_PROPELLER_TYPE=\"full\""; + assertThat(Joiner.on(" ").join(backendAction.getArguments())) + .containsMatch(expectedBuildTypeFlag); + assertThat(ActionsTestUtil.baseArtifactNames(backendAction.getInputs())) + .contains("cc_profile.txt"); + + CppLinkAction indexAction = getIndexAction(backendAction); + assertThat(ActionsTestUtil.baseArtifactNames(indexAction.getInputs())) + .doesNotContain("ld_profile.txt"); + } + + @Test + public void testPropellerCcCompile() throws Exception { + createBuildFiles(); + + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC, CppRuleClasses.AUTOFDO); + + useConfiguration( + "--propeller_optimize_absolute_cc_profile=/tmp/cc_profile.txt", + "--propeller_optimize_absolute_ld_profile=/tmp/ld_profile.txt", + "--compilation_mode=opt"); + Artifact binArtifact = getFilesToBuild(getCurrentTarget()).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + CppLinkAction indexAction = getIndexAction(backendAction); + CppCompileAction bitcodeAction = + (CppCompileAction) + getPredecessorByInputName(indexAction, "pkg/_objs/bin/binfile.indexing.o"); + assertThat(ActionsTestUtil.baseArtifactNames(bitcodeAction.getInputs())) + .doesNotContain("cc_profile.txt"); + assertThat(Joiner.on(" ").join(bitcodeAction.getArguments())) + .doesNotContainMatch("-fbasic-block-sections="); + } + + /** + * Check that the temporary opt-out from disabling Propeller profiles for ThinLTO compile actions + * works. + * + *

      TODO(b/182804945): Remove after making sure that the rollout of the new Propeller profile + * passing logic didn't break anything. + */ + @Test + public void testPropellerCcCompileWithPropellerOptimizeThinLtoCompileActions() throws Exception { + createBuildFiles(); + + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC, CppRuleClasses.AUTOFDO); + + useConfiguration( + "--propeller_optimize_absolute_cc_profile=/tmp/cc_profile.txt", + "--propeller_optimize_absolute_ld_profile=/tmp/ld_profile.txt", + "--compilation_mode=opt", + "--features=propeller_optimize_thinlto_compile_actions", + "--features=use_lto_native_object_directory"); + Artifact binArtifact = getFilesToBuild(getCurrentTarget()).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + CppLinkAction indexAction = getIndexAction(backendAction); + assertThat(artifactsToStrings(indexAction.getInputs())) + .containsAtLeast( + "bin pkg/_objs/bin/binfile.indexing.o", "bin pkg/_objs/lib/libfile.indexing.o"); + + CppCompileAction bitcodeAction = + (CppCompileAction) + getPredecessorByInputName(indexAction, "pkg/_objs/bin/binfile.indexing.o"); + assertThat(ActionsTestUtil.baseArtifactNames(bitcodeAction.getInputs())) + .contains("cc_profile.txt"); + assertThat(Joiner.on(" ").join(bitcodeAction.getArguments())) + .containsMatch("-fbasic-block-sections=list=.*/cc_profile.txt"); + } + + @Test + public void testPropellerHostBuilds() throws Exception { + scratch.file( + "pkg/BUILD", + "package(features = ['thin_lto', 'use_lto_native_object_directory'])", + "", + "cc_binary(name = '" + targetName + "',", + " srcs = ['binfile.cc', ],", + " deps = [ ':lib' ], ", + " malloc = '//base:system_malloc')", + "cc_library(name = 'lib',", + " srcs = ['libfile.cc'],", + " hdrs = ['libfile.h'])", + "cc_binary(name = 'gen_lib',", + " srcs = ['gen_lib.cc'])", + "genrule(name = 'lib_genrule',", + " srcs = [],", + " outs = ['libfile.cc'],", + " cmd = '$(location gen_lib) > \"$@\"',", + " tools = [':gen_lib'])"); + + scratch.file("pkg/binfile.cc", "#include \"pkg/libfile.h\"", "int main() { return pkg(); }"); + scratch.file( + "pkg/gen_lib.cc", + "#include ", + "int main() { puts(\"int pkg() { return 42; }\"); }"); + scratch.file("pkg/libfile.h", "int pkg();"); + + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC, CppRuleClasses.AUTOFDO); + + useConfiguration( + "--propeller_optimize_absolute_cc_profile=/tmp/cc_profile.txt", + "--propeller_optimize_absolute_ld_profile=/tmp/ld_profile.txt", + "--compilation_mode=opt"); + Artifact binArtifact = getFilesToBuild(getCurrentTarget()).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + CppLinkAction indexAction = getIndexAction(backendAction); + assertThat(artifactsToStrings(indexAction.getInputs())) + .contains("bin pkg/_objs/lib/libfile.indexing.o"); + + CppCompileAction bitcodeAction = + (CppCompileAction) + getPredecessorByInputName(indexAction, "pkg/_objs/lib/libfile.indexing.o"); + + Action genruleAction = getPredecessorByInputName(bitcodeAction, "pkg/libfile.cc"); + + CppLinkAction hostLinkAction = + (CppLinkAction) getPredecessorByInputName(genruleAction, "pkg/gen_lib"); + assertThat(ActionsTestUtil.baseArtifactNames(hostLinkAction.getInputs())) + .doesNotContain("ld_profile.txt"); + assertThat(hostLinkAction.getLinkCommandLineForTesting().getRawLinkArgv().toString()) + .doesNotContainMatch("-Wl,--symbol-ordering-file=.*/ld_profile.txt"); + + // The hostLinkAction inputs has a different root from the backendAction. + // Here we confirm that the correct root is on the path + String hostrootExecPath = hostLinkAction.getPrimaryOutput().getRoot().getExecPathString(); + LtoBackendAction hostBackendAction = + (LtoBackendAction) + getPredecessorByInputName( + hostLinkAction, + "pkg/gen_lib.lto-obj/" + hostrootExecPath + "/pkg/_objs/gen_lib/gen_lib.o"); + assertThat(ActionsTestUtil.baseArtifactNames(hostBackendAction.getInputs())) + .doesNotContain("cc_profile.txt"); + assertThat(Joiner.on(" ").join(hostBackendAction.getArguments())) + .doesNotContainMatch("-fbasic-block-sections"); + + CppLinkAction hostIndexAction = getIndexAction(hostBackendAction); + assertThat(hostIndexAction).isNotNull(); + assertThat(ActionsTestUtil.baseArtifactNames(hostIndexAction.getInputs())) + .doesNotContain("ld_profile.txt"); + assertThat(hostIndexAction.getLinkCommandLineForTesting().getRawLinkArgv().toString()) + .doesNotContainMatch("-Wl,--symbol-ordering-file=.*/ld_profile.txt"); + + CppCompileAction hostBitcodeAction = + (CppCompileAction) + getPredecessorByInputName(hostIndexAction, "pkg/_objs/gen_lib/gen_lib.indexing.o"); + assertThat(ActionsTestUtil.baseArtifactNames(hostBitcodeAction.getInputs())) + .doesNotContain("cc_profile.txt"); + assertThat(Joiner.on(" ").join(hostBitcodeAction.getArguments())) + .doesNotContainMatch("-fbasic-block-sections="); + } + + private void testPropellerOptimizeOption(boolean label) throws Exception { + createBuildFiles(); + + if (label) { + scratch.file( + "fdo/BUILD", + "propeller_optimize(name='test_propeller_optimize', cc_profile=':cc_profile.txt'," + + " ld_profile=':ld_profile.txt')"); + } else { + scratch.file( + "fdo/BUILD", + "propeller_optimize(name='test_propeller_optimize'," + + "absolute_cc_profile='/tmp/cc_profile.txt'," + + "absolute_ld_profile='/tmp/ld_profile.txt')"); + } + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC, CppRuleClasses.AUTOFDO); + + useConfiguration( + "--propeller_optimize=//fdo:test_propeller_optimize", "--compilation_mode=opt"); + + Artifact binArtifact = getFilesToBuild(getCurrentTarget()).getSingleton(); + String rootExecPath = binArtifact.getRoot().getExecPathString(); + + CppLinkAction linkAction = (CppLinkAction) getGeneratingAction(binArtifact); + assertThat(linkAction.getOutputs()).containsExactly(binArtifact); + + List commandLine = linkAction.getLinkCommandLineForTesting().getRawLinkArgv(); + assertThat(commandLine.toString()) + .containsMatch("-Wl,--symbol-ordering-file=.*/ld_profile.txt"); + + LtoBackendAction backendAction = + (LtoBackendAction) + getPredecessorByInputName( + linkAction, "pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + + String expectedCompilerFlag = "-fbasic-block-sections=list=.*/cc_profile.txt"; + assertThat(Joiner.on(" ").join(backendAction.getArguments())) + .containsMatch(expectedCompilerFlag); + String expectedBuildTypeFlag = "-DBUILD_PROPELLER_TYPE=\"full\""; + assertThat(Joiner.on(" ").join(backendAction.getArguments())) + .containsMatch(expectedBuildTypeFlag); + assertThat(ActionsTestUtil.baseArtifactNames(backendAction.getInputs())) + .contains("cc_profile.txt"); + } + + @Test + public void testPropellerOptimizeOptionFromAbsolutePath() throws Exception { + testPropellerOptimizeOption(false); + } + + @Test + public void testPropellerOptimizeOptionFromLabel() throws Exception { + testPropellerOptimizeOption(true); + } + + private void testLLVMCachePrefetchBackendOption(String extraOption, boolean asLabel) + throws Exception { + createBuildFiles(); + if (asLabel) { + scratch.file( + "fdo/BUILD", "fdo_prefetch_hints(name='test_profile', profile=':prefetch.afdo')"); + } else { + scratch.file( + "fdo/BUILD", + "fdo_prefetch_hints(name='test_profile', absolute_path_profile='/tmp/prefetch.afdo')"); + } + + setupThinLTOCrosstool(CppRuleClasses.SUPPORTS_PIC, CppRuleClasses.AUTOFDO); + useConfiguration( + "--fdo_prefetch_hints=//fdo:test_profile", "--compilation_mode=opt", extraOption); + + String rootExecPath = getRootExecPath(); + LtoBackendAction backendAction = + getBackendAction("pkg/bin.lto-obj/" + rootExecPath + "/pkg/_objs/bin/binfile.o"); + + String expectedCompilerFlag = + "-prefetch-hints-file=" + + (asLabel ? ".*/prefetch.afdo" : "(blaze|bazel)-out/.*/fdo/.*/prefetch.afdo"); + assertThat(Joiner.on(" ").join(backendAction.getArguments())) + .containsMatch("-mllvm " + expectedCompilerFlag); + + assertThat(ActionsTestUtil.baseArtifactNames(backendAction.getInputs())) + .contains("prefetch.afdo"); + } + + @Test + public void testFdoCachePrefetchLLVMOptionsToBackendFromPath() throws Exception { + testLLVMCachePrefetchBackendOption("", false); + } + + @Test + public void testFdoCachePrefetchAndFdoLLVMOptionsToBackendFromPath() throws Exception { + testLLVMCachePrefetchBackendOption("--fdo_optimize=/profile.zip", false); + } + + @Test + public void testFdoCachePrefetchLLVMOptionsToBackendFromLabel() throws Exception { + testLLVMCachePrefetchBackendOption("", true); + } + + @Test + public void testFdoCachePrefetchAndFdoLLVMOptionsToBackendFromLabel() throws Exception { + testLLVMCachePrefetchBackendOption("--fdo_optimize=/profile.zip", true); + } +} diff --git a/src/test/java/com/google/devtools/build/lib/rules/cpp/StarlarkCcCommonTest.java b/src/test/java/com/google/devtools/build/lib/rules/cpp/StarlarkCcCommonTest.java index 686edb7f82f2de..693d51b30730d9 100755 --- a/src/test/java/com/google/devtools/build/lib/rules/cpp/StarlarkCcCommonTest.java +++ b/src/test/java/com/google/devtools/build/lib/rules/cpp/StarlarkCcCommonTest.java @@ -7251,7 +7251,8 @@ public void testExpandedLtoAndFdoApiRaisesError() throws Exception { " fdo_context = toolchain.fdo_context()", " branch_fdo_profile = fdo_context.branch_fdo_profile()", " lto_backend_artifacts = cc_common.create_lto_backend_artifacts(ctx=ctx,", - " lto_output_root_prefix=ctx.label.package, bitcode_file=ctx.file.file,", + " lto_output_root_prefix=ctx.label.package, lto_obj_root_prefix=ctx.label.package,", + " bitcode_file=ctx.file.file,", " feature_configuration=feature_configuration, cc_toolchain=toolchain,", " fdo_context=fdo_context, use_pic=True,", " should_create_per_object_debug_info=False, argv=[])", diff --git a/src/test/shell/bazel/BUILD b/src/test/shell/bazel/BUILD index 9dd7a46db02047..b5bbc81f9c9592 100644 --- a/src/test/shell/bazel/BUILD +++ b/src/test/shell/bazel/BUILD @@ -604,6 +604,16 @@ sh_test( ], ) +sh_test( + name = "bazel_thinlto_obj_dir_test", + srcs = ["bazel_thinlto_obj_dir_test.sh"], + data = [":test-deps"], + tags = [ + "no_1804", + "no_windows", + ], +) + sh_test( name = "bazel_layering_check_test", srcs = ["bazel_layering_check_test.sh"], diff --git a/src/test/shell/bazel/bazel_thinlto_obj_dir_test.sh b/src/test/shell/bazel/bazel_thinlto_obj_dir_test.sh new file mode 100755 index 00000000000000..89d92685601e1f --- /dev/null +++ b/src/test/shell/bazel/bazel_thinlto_obj_dir_test.sh @@ -0,0 +1,105 @@ +#!/bin/bash +# +# Copyright 2019 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eu + +# Load the test setup defined in the parent directory +CURRENT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +source "${CURRENT_DIR}/../integration_test_setup.sh" \ + || { echo "integration_test_setup.sh not found!" >&2; exit 1; } + +function write_hello_world_files { + mkdir -p hello || fail "mkdir hello failed" + cat >hello/BUILD <hello/hello.h <hello/hellolib.cc < +#include "hello.h" +using namespace std; + +int hello1() { + return 42; +} + +void hello2() { + cout << "Hi" << endl; +} +EOF + + cat >hello/hello.cc < +#include "hello.h" +int main() { + hello2(); +} +EOF +} + +function test_bazel_thinlto() { + + if is_darwin; then + echo "This test doesn't run on Darwin. Skipping." + return + fi + + local -r clang_tool=$(which clang) + if [[ ! -x ${clang_tool:-/usr/bin/clang_tool} ]]; then + echo "clang not installed. Skipping test." + return + fi + + local major_version=$($clang_tool --version | \ + grep -oP 'version.*' | cut -d' ' -f 2 | cut -d '.' -f 1) + + if [[ $major_version < 6 ]]; then + echo "clang version is smaller than 6.0. Skipping test." + return + fi + + write_hello_world_files + + CC=$clang_tool bazel run \ + //hello:hello -c opt -s --features=thin_lto --features=use_lto_native_object_directory &>$TEST_log \ + || fail "Build with ThinLTO failed" + + grep -q "action 'LTO Backend Compile" $TEST_log \ + || fail "LTO Actions missing" + + # Find thinlto.bc files in subdirectories + if [[ -z $(find "bazel-bin/hello/hello.lto" -path "*hello/_objs/hello/hello.o.thinlto.bc" -print -quit) ]] \ + || [[ -z $(find "bazel-bin/hello/hello.lto" -path "*hello/_objs/hello_lib/hellolib.o.thinlto.bc" -print -quit) ]]; then + fail "bitcode files were not generated" + fi +} + +run_suite "test ThinLTO" From 536acae3fa9008dcd582fbe083c3b2f0f9e1947f Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 14:33:50 -0700 Subject: [PATCH 38/68] Update and document peak-heap-size info in user-manual page PiperOrigin-RevId: 544163119 Change-Id: I43daab85586586d5b0acfb9f349a7259e45ee6b6 --- .../devtools/build/lib/metrics/PostGCMemoryUseRecorder.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/metrics/PostGCMemoryUseRecorder.java b/src/main/java/com/google/devtools/build/lib/metrics/PostGCMemoryUseRecorder.java index 6afac9bdfd8e03..9f8a8df7cafc99 100644 --- a/src/main/java/com/google/devtools/build/lib/metrics/PostGCMemoryUseRecorder.java +++ b/src/main/java/com/google/devtools/build/lib/metrics/PostGCMemoryUseRecorder.java @@ -254,8 +254,9 @@ private static final class PeakMemInfoItem extends InfoItem { PeakMemInfoItem() { super( "peak-heap-size", - "The peak amount of used memory in bytes after any call to System.gc().", - /*hidden=*/ true); + "The peak amount of used memory in bytes after any full GC during the most recent" + + " invocation.", + /* hidden= */ true); } @Override From 41f894f5adcb04f1860344793ea113d2308973aa Mon Sep 17 00:00:00 2001 From: Googler Date: Wed, 28 Jun 2023 15:55:00 -0700 Subject: [PATCH 39/68] Support R.txt files generated with aapt2 link --package-id ResourceProcessorBusyBox is being used in the Android platform build, which includes CTS tests that set `--package-id 0x80` when running aapt2 link. This produces R.txt files with ids that are out of range of java integers, for example `int color blue 0x80020000`. In an R.java file, javac interprets these as negative integers, but IntFieldInitializer throws a NumberFormatException. Make IntFieldInitializer act like javac by parsing the number as a Long and then narrowing it to an int. PiperOrigin-RevId: 544184014 Change-Id: Ida26d00a4c487830a8b35bbdf58dd9a5c5411adc --- .../build/android/resources/RClassGeneratorTest.java | 12 +++++++++--- .../build/android/resources/IntFieldInitializer.java | 6 +++++- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/src/test/java/com/google/devtools/build/android/resources/RClassGeneratorTest.java b/src/test/java/com/google/devtools/build/android/resources/RClassGeneratorTest.java index 21cabeab64393e..8e1bc9cc746871 100644 --- a/src/test/java/com/google/devtools/build/android/resources/RClassGeneratorTest.java +++ b/src/test/java/com/google/devtools/build/android/resources/RClassGeneratorTest.java @@ -165,12 +165,18 @@ private void checkSimpleInts(boolean finalFields) throws Exception { "int id someTextView 0x7f080000", "int integer maxNotifications 0x7f090000", "int string alphabet 0x7f100000", - "int string ok 0x7f100001"); + "int string ok 0x7f100001", + // aapt2 link --package-id 0x80 produces IDs that are out of range of a java integer. + "int string largePackageId 0x80001000"); // R.txt for the library, where the values are not the final ones (so ignore them). We only use // this to keep the # of inner classes small (exactly the set needed by the library). ResourceSymbols symbolsInLibrary = createSymbolFile( - "lib.R.txt", "int attr agility 0x1", "int id someTextView 0x1", "int string ok 0x1"); + "lib.R.txt", + "int attr agility 0x1", + "int id someTextView 0x1", + "int string ok 0x1", + "int string largePackageId 0x1"); Path out = temp.resolve("classes"); Files.createDirectories(out); RClassGenerator writer = RClassGenerator.with(out, symbolValues.asInitializers(), finalFields); @@ -198,7 +204,7 @@ private void checkSimpleInts(boolean finalFields) throws Exception { out, "com.bar.R$string", outerClass, - ImmutableMap.of("ok", 0x7f100001), + ImmutableMap.of("ok", 0x7f100001, "largePackageId", 0x80001000), ImmutableMap.>of(), finalFields); } diff --git a/src/tools/android/java/com/google/devtools/build/android/resources/IntFieldInitializer.java b/src/tools/android/java/com/google/devtools/build/android/resources/IntFieldInitializer.java index 66e310f0691bd4..b6a5fe3f5d1231 100644 --- a/src/tools/android/java/com/google/devtools/build/android/resources/IntFieldInitializer.java +++ b/src/tools/android/java/com/google/devtools/build/android/resources/IntFieldInitializer.java @@ -44,7 +44,11 @@ private IntFieldInitializer( public static FieldInitializer of( DependencyInfo dependencyInfo, Visibility visibility, String fieldName, String value) { - return of(dependencyInfo, visibility, fieldName, Integer.decode(value)); + // aapt2 --package-id 0x80 (or higher) will produce R.txt values that are outside the range of + // Integer.decode, e.g. 0x80001000. javac interprets them as negative integers, do the same + // here by decoding as a Long and then performing a narrowing primitive conversion to int. + int intValue = Long.decode(value).intValue(); + return of(dependencyInfo, visibility, fieldName, intValue); } public static IntFieldInitializer of( From b9dcb0cfd1164766c1e934c3d547aeca833b46dc Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 02:08:47 -0700 Subject: [PATCH 40/68] Remove the ability to disable Starlark java_test in bazel PiperOrigin-RevId: 544290808 Change-Id: Ia2a13f92434fc3a26d735274c67e012314ff5c5a --- src/main/starlark/builtins_bzl/bazel/exports.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/starlark/builtins_bzl/bazel/exports.bzl b/src/main/starlark/builtins_bzl/bazel/exports.bzl index 0b2035db5b6fa3..0ad6f515684207 100644 --- a/src/main/starlark/builtins_bzl/bazel/exports.bzl +++ b/src/main/starlark/builtins_bzl/bazel/exports.bzl @@ -41,7 +41,7 @@ exported_rules = { "java_proto_library": java_proto_library, "+cc_proto_library": cc_proto_library, "+java_binary": java_binary, - "+java_test": java_test, + "java_test": java_test, "py_binary": py_binary, "py_test": py_test, "py_library": py_library, From 05dea888ffddb87121fcbb9c0e880c65d91523b9 Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 02:08:59 -0700 Subject: [PATCH 41/68] Remove the ability to disable Starlark java_binary in bazel PiperOrigin-RevId: 544290837 Change-Id: I35edd9d21625a491bf83ae22fc90ab8392702cb4 --- src/main/starlark/builtins_bzl/bazel/exports.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/starlark/builtins_bzl/bazel/exports.bzl b/src/main/starlark/builtins_bzl/bazel/exports.bzl index 0ad6f515684207..0ef0d2dbc49825 100644 --- a/src/main/starlark/builtins_bzl/bazel/exports.bzl +++ b/src/main/starlark/builtins_bzl/bazel/exports.bzl @@ -40,7 +40,7 @@ exported_rules = { "+java_import": java_import, "java_proto_library": java_proto_library, "+cc_proto_library": cc_proto_library, - "+java_binary": java_binary, + "java_binary": java_binary, "java_test": java_test, "py_binary": py_binary, "py_test": py_test, From cafa3cb1f2754a92fc4085f95d1fd19324df0e38 Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 02:28:28 -0700 Subject: [PATCH 42/68] Fix error prone ComparisonOutOfRange in PersistentStringIndexer PiperOrigin-RevId: 544294153 Change-Id: Ia89d02a025dfafa213d68ea2110a79e5adccf79c --- .../build/lib/actions/cache/PersistentStringIndexer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/com/google/devtools/build/lib/actions/cache/PersistentStringIndexer.java b/src/main/java/com/google/devtools/build/lib/actions/cache/PersistentStringIndexer.java index 8cdc4f818a2c47..e12826c025d655 100644 --- a/src/main/java/com/google/devtools/build/lib/actions/cache/PersistentStringIndexer.java +++ b/src/main/java/com/google/devtools/build/lib/actions/cache/PersistentStringIndexer.java @@ -65,7 +65,7 @@ public PersistentIndexMap(Path mapFile, Path journalFile, Clock clock) throws IO @Override protected boolean updateJournal() { long time = clock.nanoTime(); - if (SAVE_INTERVAL_NS == 0 || time > nextUpdate) { + if (SAVE_INTERVAL_NS == 0L || time > nextUpdate) { nextUpdate = time + SAVE_INTERVAL_NS; return true; } From 93508d7416cb678ba1146312aa0318ac7c605a03 Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 03:33:49 -0700 Subject: [PATCH 43/68] Fix integration test failures on Windows. mockito's inline mock maker manipulates byte code of mocked methods and output new byte code in a temporary jarfile with pattern mockitobootXXXXXXX.jar. It then loads these jar files into JVM to make mock effective which means Bazel runtime still holds handles of these files making it impossible to delete on Windows. PiperOrigin-RevId: 544307226 Change-Id: Iad2931780f118a78759e9440bbaa4efaa78e3602 --- .../util/BuildIntegrationTestCase.java | 31 ++++++++++++++++--- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/src/test/java/com/google/devtools/build/lib/buildtool/util/BuildIntegrationTestCase.java b/src/test/java/com/google/devtools/build/lib/buildtool/util/BuildIntegrationTestCase.java index c353474f596b1a..db8f2db1609408 100644 --- a/src/test/java/com/google/devtools/build/lib/buildtool/util/BuildIntegrationTestCase.java +++ b/src/test/java/com/google/devtools/build/lib/buildtool/util/BuildIntegrationTestCase.java @@ -144,6 +144,7 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; +import java.util.function.Predicate; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import org.junit.After; @@ -369,9 +370,28 @@ public final void cleanUp() throws Exception { LoggingUtil.installRemoteLoggerForTesting(null); if (OS.getCurrent() == OS.WINDOWS) { - // Bazel runtime still holds the file handle of windows_jni.dll making it impossible to delete - // on Windows. Try to delete all other files (and directories). - bestEffortDeleteTreesBelow(testRoot, "windows_jni.dll"); + bestEffortDeleteTreesBelow( + testRoot, + filename -> { + // Bazel runtime still holds the file handle of windows_jni.dll making it impossible to + // delete on Windows. + if (filename.equals("windows_jni.dll")) { + return true; + } + + // mockito's inline mock maker manipulates byte code of mocked methods and output new + // byte code in a temporary jarfile with pattern mockitobootXXXXXXX.jar. It then loads + // these jar files into JVM to make mock effective which means Bazel runtime still holds + // handles of these files making it impossible to delete on Windows. + // + // See https://github.com/mockito/mockito/issues/1379#issuecomment-466372914 and + // https://github.com/mockito/mockito/blob/91f18ea1648e389bea06289d818def7978e82288/src/main/java/org/mockito/internal/creation/bytebuddy/InlineDelegateByteBuddyMockMaker.java#L123C10-L123C10. + if (filename.startsWith("mockitoboot") && filename.endsWith(".jar")) { + return true; + } + + return false; + }); } else { testRoot.deleteTreesBelow(); // (comment out during debugging) } @@ -382,7 +402,8 @@ public final void cleanUp() throws Exception { Thread.interrupted(); // If there was a crash in test case, main thread was interrupted. } - private static void bestEffortDeleteTreesBelow(Path path, String canSkip) throws IOException { + private static void bestEffortDeleteTreesBelow(Path path, Predicate canSkip) + throws IOException { for (Dirent dirent : path.readdir(Symlinks.NOFOLLOW)) { Path child = path.getRelative(dirent.getName()); if (dirent.getType() == Dirent.Type.DIRECTORY) { @@ -396,7 +417,7 @@ private static void bestEffortDeleteTreesBelow(Path path, String canSkip) throws try { child.delete(); } catch (IOException e) { - if (!child.getBaseName().equals(canSkip)) { + if (!canSkip.test(child.getBaseName())) { throw e; } } From 1dc358cebdbee9ac371b703e619fd61981ba100d Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 05:06:44 -0700 Subject: [PATCH 44/68] Remove mention of `experimental_worker_multiplex` form flag help. Instead of this flag we should use worker_multiplex PiperOrigin-RevId: 544324118 Change-Id: I472b21cf97a6b915d5ef73c13186ba9e56f44ae8 --- .../com/google/devtools/build/lib/worker/WorkerOptions.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/com/google/devtools/build/lib/worker/WorkerOptions.java b/src/main/java/com/google/devtools/build/lib/worker/WorkerOptions.java index 24413867637e7c..dbb96c833c95f9 100644 --- a/src/main/java/com/google/devtools/build/lib/worker/WorkerOptions.java +++ b/src/main/java/com/google/devtools/build/lib/worker/WorkerOptions.java @@ -101,7 +101,7 @@ public String getTypeDescription() { effectTags = {OptionEffectTag.EXECUTION, OptionEffectTag.HOST_MACHINE_RESOURCE_OPTIMIZATIONS}, help = "How many WorkRequests a multiplex worker process may receive in parallel if you use the" - + " 'worker' strategy with --experimental_worker_multiplex. May be specified as " + + " 'worker' strategy with --worker_multiplex. May be specified as " + "[name=value] to give a different value per mnemonic. The limit is based on worker " + "keys, which are differentiated based on mnemonic, but also on startup flags and " + "environment, so there can in some cases be more workers per mnemonic than this " From 785b91fbdda97b397cea42b8e76f0bf264d92d89 Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 05:21:34 -0700 Subject: [PATCH 45/68] Update maven_install.json PiperOrigin-RevId: 544326726 Change-Id: I8a289542b92ff1269d9e3526032f3fdbed53a924 --- WORKSPACE | 8 +++++--- maven_install.json | 26 +++++++++++++------------- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/WORKSPACE b/WORKSPACE index c7ae1fa85dae7b..8ee3c553d655b1 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -479,8 +479,8 @@ maven_install( "javax.activation:javax.activation-api:1.2.0", "javax.annotation:javax.annotation-api:1.3.2", "javax.inject:javax.inject:1", - "net.bytebuddy:byte-buddy-agent:1.11.13", - "net.bytebuddy:byte-buddy:1.11.13", + "net.bytebuddy:byte-buddy-agent:1.14.5", + "net.bytebuddy:byte-buddy:1.14.5", "org.apache.commons:commons-compress:1.19", "org.apache.commons:commons-pool2:2.8.0", "org.apache.tomcat:tomcat-annotations-api:8.0.5", @@ -559,7 +559,8 @@ maven_install( "com.google.protobuf:protobuf-java", "com.google.protobuf:protobuf-javalite", ], - fail_if_repin_required = False, + # Don't forget to change this to back to True before submitting your change. + fail_if_repin_required = True, maven_install_json = "//:maven_install.json", repositories = [ "https://repo1.maven.org/maven2", @@ -583,6 +584,7 @@ maven_install( "com.android.tools:common:30.1.3", "com.android.tools:repository:30.1.3", ], + # Don't forget to change this to back to True before submitting your change. fail_if_repin_required = True, maven_install_json = "//src/tools/android:maven_android_install.json", repositories = [ diff --git a/maven_install.json b/maven_install.json index 222bb478867ac1..f848bd34a83940 100644 --- a/maven_install.json +++ b/maven_install.json @@ -1,7 +1,7 @@ { "__AUTOGENERATED_FILE_DO_NOT_MODIFY_THIS_FILE_MANUALLY": "THERE_IS_NO_DATA_ONLY_ZUUL", - "__INPUT_ARTIFACTS_HASH": -482172833, - "__RESOLVED_ARTIFACTS_HASH": -552166642, + "__INPUT_ARTIFACTS_HASH": -466952660, + "__RESOLVED_ARTIFACTS_HASH": 276588240, "artifacts": { "aopalliance:aopalliance": { "shasums": { @@ -537,15 +537,15 @@ }, "net.bytebuddy:byte-buddy": { "shasums": { - "jar": "e29fa75b903432ac64d05c18c19d0e3b9026e74abda52bbd6f9065e55f4a29f5" + "jar": "e99761a526df0fefbbd3fe14436b0f953000cdfa5151dc63c0b18d37d9c46f1c" }, - "version": "1.11.13" + "version": "1.14.5" }, "net.bytebuddy:byte-buddy-agent": { "shasums": { - "jar": "49b43b0d10f8bb1d800d56137bdf0f44628412ebe1fbd804e45f363d495860fa" + "jar": "55f19862b870f5d85890ba5386b1b45e9bbc88d5fe1f819abe0c788b4929fa6b" }, - "version": "1.11.13" + "version": "1.14.5" }, "org.apache.commons:commons-compress": { "shasums": { @@ -603,15 +603,15 @@ }, "org.mockito:mockito-core": { "shasums": { - "jar": "75d4b14ba7aef836e92ba7b2d53ca7d6b215dd7db5625afbc39252f1358835fe" + "jar": "b1689b06617ea01fd777bfaedbdde512faf083d639a049f79b388d5a4e96d2e5" }, - "version": "3.12.4" + "version": "5.4.0" }, "org.objenesis:objenesis": { "shasums": { - "jar": "03d960bd5aef03c653eb000413ada15eb77cdd2b8e4448886edf5692805e35f3" + "jar": "02dfd0b0439a5591e35b708ed2f5474eb0948f53abf74637e959b8e4ef69bfeb" }, - "version": "3.2" + "version": "3.3" }, "org.ow2.asm:asm": { "shasums": { @@ -1802,11 +1802,13 @@ "net.bytebuddy.pool", "net.bytebuddy.utility", "net.bytebuddy.utility.dispatcher", + "net.bytebuddy.utility.nullability", "net.bytebuddy.utility.privilege", "net.bytebuddy.utility.visitor" ], "net.bytebuddy:byte-buddy-agent": [ - "net.bytebuddy.agent" + "net.bytebuddy.agent", + "net.bytebuddy.agent.utility.nullability" ], "org.apache.commons:commons-compress": [ "org.apache.commons.compress", @@ -2005,7 +2007,6 @@ "org.mockito.internal.invocation.finder", "org.mockito.internal.invocation.mockref", "org.mockito.internal.junit", - "org.mockito.internal.junit.util", "org.mockito.internal.listeners", "org.mockito.internal.matchers", "org.mockito.internal.matchers.apachecommons", @@ -2033,7 +2034,6 @@ "org.mockito.mock", "org.mockito.plugins", "org.mockito.quality", - "org.mockito.runners", "org.mockito.session", "org.mockito.stubbing", "org.mockito.verification" From 09188a906a8cb5237e6336f81eb5677c9d478621 Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 05:25:31 -0700 Subject: [PATCH 46/68] Fix error prone ComparisonOutOfRange in ZipTester `filename` has type `byte[]`, so the range of each element is `[-128,127]` PiperOrigin-RevId: 544327350 Change-Id: I58b4829044fa5891157c4ef7dbc3e70fb458e33f --- .../com/google/devtools/build/singlejar/ZipTester.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ZipTester.java b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ZipTester.java index f2e8b3a8445dd6..ac18f593e4dd4d 100644 --- a/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ZipTester.java +++ b/src/java_tools/singlejar/javatests/com/google/devtools/build/singlejar/ZipTester.java @@ -228,7 +228,7 @@ private void readEntry() throws IOException { String name = new String(filename, "UTF-8"); for (int i = 0; i < filename.length; i++) { - if ((filename[i] < ' ') || (filename[i] > 127)) { + if ((filename[i] < ' ')) { warn(entryDesc + ": file name has unexpected non-ascii characters"); } } From 5c4cf47a131c84506aad9ce0e014c6643c31a4ac Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 05:53:47 -0700 Subject: [PATCH 47/68] Fix non-declared symlink issue for local actions when BwoB. When prefetching non-declared symlink for local actions, we want to download the target artifact if they haven't been downloaded. Currently, the code use `path.getRelativePath(path.readSymbolicLink())` to mimic resolving relative symlink which is not correct. Replacing it with `path.readSymbolicLink()`. Fixes #18772. PiperOrigin-RevId: 544331900 Change-Id: Ie2a6bac298ab9f81e44d5f505f1b3d83519ba3ca --- .../remote/AbstractActionInputPrefetcher.java | 2 +- ...ildWithoutTheBytesIntegrationTestBase.java | 30 +++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/google/devtools/build/lib/remote/AbstractActionInputPrefetcher.java b/src/main/java/com/google/devtools/build/lib/remote/AbstractActionInputPrefetcher.java index d4533595848f76..f1700d6bd9be87 100644 --- a/src/main/java/com/google/devtools/build/lib/remote/AbstractActionInputPrefetcher.java +++ b/src/main/java/com/google/devtools/build/lib/remote/AbstractActionInputPrefetcher.java @@ -436,7 +436,7 @@ private Completable downloadFileNoCheckRx( Priority priority) { if (path.isSymbolicLink()) { try { - path = path.getRelative(path.readSymbolicLink()); + path = path.resolveSymbolicLinks(); } catch (IOException e) { return Completable.error(e); } diff --git a/src/test/java/com/google/devtools/build/lib/remote/BuildWithoutTheBytesIntegrationTestBase.java b/src/test/java/com/google/devtools/build/lib/remote/BuildWithoutTheBytesIntegrationTestBase.java index 7039d757d14bd7..8f6c0b3ad77610 100644 --- a/src/test/java/com/google/devtools/build/lib/remote/BuildWithoutTheBytesIntegrationTestBase.java +++ b/src/test/java/com/google/devtools/build/lib/remote/BuildWithoutTheBytesIntegrationTestBase.java @@ -1395,6 +1395,36 @@ public void remoteTreeFilesExpiredBetweenBuilds_rerunGeneratingActions() throws assertValidOutputFile("a/bar.out", "file-inside\nupdated bar" + lineSeparator()); } + @Test + public void nonDeclaredSymlinksFromLocalActions() throws Exception { + write( + "BUILD", + "genrule(", + " name = 'foo',", + " srcs = [],", + " outs = ['foo.txt'],", + " cmd = 'echo foo > $@',", + ")", + "genrule(", + " name = 'foo-link',", + " srcs = [':foo'],", + " outs = ['foo.link'],", + " cmd = 'ln -s foo.txt $@',", + " local = True,", + ")", + "genrule(", + " name = 'foobar',", + " srcs = [':foo-link'],", + " outs = ['foobar.txt'],", + " cmd = 'cat $(location :foo-link) > $@ && echo bar >> $@',", + " local = True,", + ")"); + + buildTarget("//:foobar"); + + assertValidOutputFile("foobar.txt", "foo\nbar\n"); + } + protected void assertOutputsDoNotExist(String target) throws Exception { for (Artifact output : getArtifacts(target)) { assertWithMessage( From 8fc8debf2499c9ad25a631adc960117f98949947 Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 07:07:17 -0700 Subject: [PATCH 48/68] Delete native j2objc_library PiperOrigin-RevId: 544345919 Change-Id: I3b3a07066b6afa9c229def37ccc8e21c808ec957 --- .../build/lib/bazel/rules/J2ObjcRules.java | 7 +- .../bazel/rules/objc/BazelJ2ObjcLibrary.java | 25 - .../rules/objc/BazelJ2ObjcLibraryRule.java | 3 +- .../build/lib/rules/objc/J2ObjcAspect.java | 962 ------------------ .../rules/objc/J2ObjcEntryClassProvider.java | 146 --- .../build/lib/rules/objc/J2ObjcLibrary.java | 130 --- .../lib/rules/objc/J2ObjcLibraryBaseRule.java | 9 +- .../rules/objc/J2ObjcMappingFileProvider.java | 156 --- .../build/lib/rules/objc/J2ObjcSource.java | 146 --- .../build/lib/rules/objc/ObjcCommon.java | 5 +- .../build/lib/rules/proto/ProtoCommon.java | 145 --- .../lib/rules/proto/ProtoConfiguration.java | 23 +- .../rules/BazelRuleClassProviderTest.java | 5 - .../rules/objc/BazelJ2ObjcLibraryTest.java | 3 +- .../lib/rules/objc/J2ObjcSourceTest.java | 96 -- 15 files changed, 29 insertions(+), 1832 deletions(-) delete mode 100644 src/main/java/com/google/devtools/build/lib/bazel/rules/objc/BazelJ2ObjcLibrary.java delete mode 100644 src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcAspect.java delete mode 100644 src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcEntryClassProvider.java delete mode 100644 src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcLibrary.java delete mode 100644 src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcMappingFileProvider.java delete mode 100644 src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcSource.java delete mode 100644 src/main/java/com/google/devtools/build/lib/rules/proto/ProtoCommon.java delete mode 100644 src/test/java/com/google/devtools/build/lib/rules/objc/J2ObjcSourceTest.java diff --git a/src/main/java/com/google/devtools/build/lib/bazel/rules/J2ObjcRules.java b/src/main/java/com/google/devtools/build/lib/bazel/rules/J2ObjcRules.java index e953f68772203b..23b46fbbed7052 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/rules/J2ObjcRules.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/J2ObjcRules.java @@ -16,10 +16,8 @@ import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider; import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider.RuleSet; -import com.google.devtools.build.lib.bazel.rules.cpp.BazelCppSemantics; import com.google.devtools.build.lib.bazel.rules.objc.BazelJ2ObjcLibraryRule; import com.google.devtools.build.lib.rules.core.CoreRules; -import com.google.devtools.build.lib.rules.objc.J2ObjcAspect; import com.google.devtools.build.lib.rules.objc.J2ObjcLibraryBaseRule; /** @@ -34,10 +32,7 @@ private J2ObjcRules() { @Override public void init(ConfiguredRuleClassProvider.Builder builder) { - J2ObjcAspect j2ObjcAspect = new J2ObjcAspect(builder, BazelCppSemantics.OBJC); - - builder.addNativeAspectClass(j2ObjcAspect); - builder.addRuleDefinition(new J2ObjcLibraryBaseRule(j2ObjcAspect)); + builder.addRuleDefinition(new J2ObjcLibraryBaseRule()); builder.addRuleDefinition(new BazelJ2ObjcLibraryRule()); } diff --git a/src/main/java/com/google/devtools/build/lib/bazel/rules/objc/BazelJ2ObjcLibrary.java b/src/main/java/com/google/devtools/build/lib/bazel/rules/objc/BazelJ2ObjcLibrary.java deleted file mode 100644 index d1b67993875699..00000000000000 --- a/src/main/java/com/google/devtools/build/lib/bazel/rules/objc/BazelJ2ObjcLibrary.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2021 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package com.google.devtools.build.lib.bazel.rules.objc; - -import com.google.devtools.build.lib.bazel.rules.cpp.BazelCppSemantics; -import com.google.devtools.build.lib.rules.objc.J2ObjcLibrary; - -/** Factory class for the {@code j2objc_library} rule. */ -public class BazelJ2ObjcLibrary extends J2ObjcLibrary { - public BazelJ2ObjcLibrary() { - super(BazelCppSemantics.OBJC); - } -} diff --git a/src/main/java/com/google/devtools/build/lib/bazel/rules/objc/BazelJ2ObjcLibraryRule.java b/src/main/java/com/google/devtools/build/lib/bazel/rules/objc/BazelJ2ObjcLibraryRule.java index 8f96ca75fd5a8e..01f0a73438f37b 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/rules/objc/BazelJ2ObjcLibraryRule.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/objc/BazelJ2ObjcLibraryRule.java @@ -14,6 +14,7 @@ package com.google.devtools.build.lib.bazel.rules.objc; +import com.google.devtools.build.lib.analysis.BaseRuleClasses; import com.google.devtools.build.lib.analysis.RuleDefinition; import com.google.devtools.build.lib.analysis.RuleDefinitionEnvironment; import com.google.devtools.build.lib.packages.RuleClass; @@ -34,7 +35,7 @@ public RuleClass build(RuleClass.Builder builder, RuleDefinitionEnvironment env) public Metadata getMetadata() { return RuleDefinition.Metadata.builder() .name("j2objc_library") - .factoryClass(BazelJ2ObjcLibrary.class) + .factoryClass(BaseRuleClasses.EmptyRuleConfiguredTargetFactory.class) .ancestors(J2ObjcLibraryBaseRule.class, ObjcRuleClasses.CrosstoolRule.class) .build(); } diff --git a/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcAspect.java b/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcAspect.java deleted file mode 100644 index 6045ba019edaaf..00000000000000 --- a/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcAspect.java +++ /dev/null @@ -1,962 +0,0 @@ -// Copyright 2015 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package com.google.devtools.build.lib.rules.objc; - -import static com.google.common.base.Preconditions.checkNotNull; -import static com.google.common.collect.ImmutableList.toImmutableList; -import static com.google.devtools.build.lib.packages.Attribute.attr; -import static com.google.devtools.build.lib.packages.BuildType.LABEL; -import static java.nio.charset.StandardCharsets.ISO_8859_1; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; -import com.google.devtools.build.lib.actions.Artifact; -import com.google.devtools.build.lib.actions.MutableActionGraph.ActionConflictException; -import com.google.devtools.build.lib.actions.ParamFileInfo; -import com.google.devtools.build.lib.actions.ParameterFile; -import com.google.devtools.build.lib.actions.ParameterFile.ParameterFileType; -import com.google.devtools.build.lib.analysis.ConfiguredAspect; -import com.google.devtools.build.lib.analysis.ConfiguredAspectFactory; -import com.google.devtools.build.lib.analysis.ConfiguredTarget; -import com.google.devtools.build.lib.analysis.RuleContext; -import com.google.devtools.build.lib.analysis.RuleDefinitionEnvironment; -import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; -import com.google.devtools.build.lib.analysis.actions.CustomCommandLine; -import com.google.devtools.build.lib.analysis.actions.CustomCommandLine.VectorArg; -import com.google.devtools.build.lib.analysis.actions.SpawnAction; -import com.google.devtools.build.lib.analysis.config.ConfigAwareAspectBuilder; -import com.google.devtools.build.lib.analysis.config.ExecutionTransitionFactory; -import com.google.devtools.build.lib.analysis.config.ToolchainTypeRequirement; -import com.google.devtools.build.lib.analysis.platform.ToolchainInfo; -import com.google.devtools.build.lib.cmdline.Label; -import com.google.devtools.build.lib.cmdline.RepositoryName; -import com.google.devtools.build.lib.collect.nestedset.NestedSet; -import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; -import com.google.devtools.build.lib.packages.AspectDefinition; -import com.google.devtools.build.lib.packages.AspectParameters; -import com.google.devtools.build.lib.packages.Attribute.LabelLateBoundDefault; -import com.google.devtools.build.lib.packages.Attribute.LateBoundDefault.Resolver; -import com.google.devtools.build.lib.packages.BuildType; -import com.google.devtools.build.lib.packages.ExecGroup; -import com.google.devtools.build.lib.packages.NativeAspectClass; -import com.google.devtools.build.lib.packages.RuleClass.ConfiguredTargetFactory.RuleErrorException; -import com.google.devtools.build.lib.packages.StarlarkInfo; -import com.google.devtools.build.lib.packages.StarlarkProviderIdentifier; -import com.google.devtools.build.lib.rules.apple.AppleConfiguration; -import com.google.devtools.build.lib.rules.apple.AppleToolchain; -import com.google.devtools.build.lib.rules.apple.XcodeConfigRule; -import com.google.devtools.build.lib.rules.cpp.CcCompilationContext; -import com.google.devtools.build.lib.rules.cpp.CcInfo; -import com.google.devtools.build.lib.rules.cpp.CcLinkingContext; -import com.google.devtools.build.lib.rules.cpp.CcToolchain; -import com.google.devtools.build.lib.rules.cpp.CcToolchainProvider; -import com.google.devtools.build.lib.rules.cpp.CppConfiguration; -import com.google.devtools.build.lib.rules.cpp.CppHelper; -import com.google.devtools.build.lib.rules.cpp.CppModuleMap.UmbrellaHeaderStrategy; -import com.google.devtools.build.lib.rules.cpp.CppRuleClasses; -import com.google.devtools.build.lib.rules.cpp.CppSemantics; -import com.google.devtools.build.lib.rules.java.JavaInfo; -import com.google.devtools.build.lib.rules.java.JavaRuleClasses; -import com.google.devtools.build.lib.rules.java.JavaSemantics; -import com.google.devtools.build.lib.rules.java.JavaToolchainProvider; -import com.google.devtools.build.lib.rules.objc.IntermediateArtifacts.AlwaysLink; -import com.google.devtools.build.lib.rules.objc.J2ObjcSource.SourceType; -import com.google.devtools.build.lib.rules.proto.ProtoCommon; -import com.google.devtools.build.lib.rules.proto.ProtoConfiguration; -import com.google.devtools.build.lib.rules.proto.ProtoInfo; -import com.google.devtools.build.lib.rules.proto.ProtoLangToolchainProvider; -import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec; -import com.google.devtools.build.lib.skyframe.serialization.autocodec.SerializationConstant; -import com.google.devtools.build.lib.vfs.PathFragment; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import javax.annotation.Nullable; -import net.starlark.java.eval.EvalException; -import net.starlark.java.eval.StarlarkList; -import net.starlark.java.eval.Tuple; - -/** J2ObjC transpilation aspect for Java and proto rules. */ -public class J2ObjcAspect extends NativeAspectClass implements ConfiguredAspectFactory { - public static final String NAME = "J2ObjcAspect"; - - private static LabelLateBoundDefault getProtoToolchainLabel( - String defaultValue) { - return LabelLateBoundDefault.fromTargetConfiguration( - ProtoConfiguration.class, - Label.parseCanonicalUnchecked(defaultValue), - (Resolver & Serializable) - (rule, attributes, protoConfig) -> protoConfig.protoToolchainForJ2objc()); - } - - private static final ImmutableList JAVA_DEPENDENT_ATTRIBUTES = - ImmutableList.of("$jre_lib", "deps", "exports", "runtime_deps"); - - private static final ImmutableList PROTO_DEPENDENT_ATTRIBUTES = ImmutableList.of("deps"); - - private static final String J2OBJC_PROTO_TOOLCHAIN_ATTR = ":j2objc_proto_toolchain"; - - @SerializationConstant @AutoCodec.VisibleForSerialization - static final LabelLateBoundDefault DEAD_CODE_REPORT = - LabelLateBoundDefault.fromTargetConfiguration( - J2ObjcConfiguration.class, - null, - (rule, attributes, j2objcConfig) -> j2objcConfig.deadCodeReport()); - - private final RepositoryName toolsRepository; - private final Label ccToolchainType; - private final LabelLateBoundDefault ccToolchain; - private final ToolchainTypeRequirement javaToolchainTypeRequirement; - - public J2ObjcAspect(RuleDefinitionEnvironment env, CppSemantics cppSemantics) { - this.toolsRepository = checkNotNull(env.getToolsRepository()); - this.ccToolchainType = CppRuleClasses.ccToolchainTypeAttribute(env); - this.ccToolchain = CppRuleClasses.ccToolchainAttribute(env); - this.javaToolchainTypeRequirement = JavaRuleClasses.javaToolchainTypeRequirement(env); - } - - @Override - public AspectDefinition getDefinition(AspectParameters aspectParameters) { - return ConfigAwareAspectBuilder.of(new AspectDefinition.Builder(this)) - .originalBuilder() - .propagateAlongAttribute("deps") - .propagateAlongAttribute("exports") - .propagateAlongAttribute("runtime_deps") - .requireStarlarkProviders(StarlarkProviderIdentifier.forKey(JavaInfo.PROVIDER.getKey())) - .requireStarlarkProviders(ProtoInfo.PROVIDER.id()) - .advertiseProvider(ImmutableList.of(ObjcProvider.STARLARK_CONSTRUCTOR.id())) - .requiresConfigurationFragments( - AppleConfiguration.class, - CppConfiguration.class, - J2ObjcConfiguration.class, - ObjcConfiguration.class, - ProtoConfiguration.class) - .addToolchainTypes(CppRuleClasses.ccToolchainTypeRequirement(ccToolchainType)) - .add( - attr("$grep_includes", LABEL) - .cfg(ExecutionTransitionFactory.createFactory()) - .value( - Label.parseCanonicalUnchecked(toolsRepository + "//tools/cpp:grep-includes"))) - .add( - attr("$j2objc", LABEL) - .cfg(ExecutionTransitionFactory.createFactory("j2objc")) - .exec() - .value( - Label.parseCanonicalUnchecked( - toolsRepository + "//tools/j2objc:j2objc_deploy.jar"))) - .add( - attr("$j2objc_wrapper", LABEL) - .cfg(ExecutionTransitionFactory.createFactory("j2objc")) - .exec() - .legacyAllowAnyFileType() - .value( - Label.parseCanonicalUnchecked( - toolsRepository + "//tools/j2objc:j2objc_wrapper_binary"))) - .add( - attr("$j2objc_header_map", LABEL) - .cfg(ExecutionTransitionFactory.createFactory("j2objc")) - .exec() - .legacyAllowAnyFileType() - .value( - Label.parseCanonicalUnchecked( - toolsRepository + "//tools/j2objc:j2objc_header_map_binary"))) - .add( - attr("$jre_emul_jar", LABEL) - .cfg(ExecutionTransitionFactory.createFactory("j2objc")) - .value( - Label.parseCanonicalUnchecked( - toolsRepository + "//third_party/java/j2objc:jre_emul.jar"))) - .add( - attr("$jre_emul_module", LABEL) - .cfg(ExecutionTransitionFactory.createFactory("j2objc")) - .value( - Label.parseCanonicalUnchecked( - toolsRepository + "//third_party/java/j2objc:jre_emul_module"))) - .add( - attr(":dead_code_report", LABEL) - .cfg(ExecutionTransitionFactory.createFactory("j2objc")) - .value(DEAD_CODE_REPORT)) - .add( - attr("$jre_lib", LABEL) - .value( - Label.parseCanonicalUnchecked( - toolsRepository + "//third_party/java/j2objc:jre_core_lib"))) - .add( - attr("$xcrunwrapper", LABEL) - .cfg(ExecutionTransitionFactory.createFactory()) - .exec() - .value( - Label.parseCanonicalUnchecked(toolsRepository + "//tools/objc:xcrunwrapper"))) - .add( - attr(XcodeConfigRule.XCODE_CONFIG_ATTR_NAME, LABEL) - .allowedRuleClasses("xcode_config") - .checkConstraints() - .value(AppleToolchain.getXcodeConfigLabel(toolsRepository))) - .add( - attr("$zipper", LABEL) - .cfg(ExecutionTransitionFactory.createFactory()) - .exec() - .value(Label.parseCanonicalUnchecked(toolsRepository + "//tools/zip:zipper"))) - .add( - attr(J2OBJC_PROTO_TOOLCHAIN_ATTR, LABEL) - .legacyAllowAnyFileType() - .value( - getProtoToolchainLabel( - toolsRepository + "//tools/j2objc:j2objc_proto_toolchain"))) - .add( - attr(JavaRuleClasses.JAVA_TOOLCHAIN_TYPE_ATTRIBUTE_NAME, LABEL) - .value(javaToolchainTypeRequirement.toolchainType())) - .add( - attr(CcToolchain.CC_TOOLCHAIN_DEFAULT_ATTRIBUTE_NAME, LABEL) - .mandatoryProviders(CcToolchainProvider.PROVIDER.id()) - .value(ccToolchain)) - .execGroups( - ImmutableMap.of( - "proto_compiler", - ExecGroup.builder().build(), - "j2objc", - ExecGroup.builder().addToolchainType(javaToolchainTypeRequirement).build())) - .build(); - } - - @Override - public ConfiguredAspect create( - Label targetLabel, - ConfiguredTarget ct, - RuleContext ruleContext, - AspectParameters parameters, - RepositoryName toolsRepository) - throws InterruptedException, ActionConflictException { - if (isProtoRule(ct)) { - return proto(ct, ruleContext); - } - try { - return java(ct, ruleContext); - } catch (EvalException | RuleErrorException e) { - ruleContext.ruleError(e.getMessage()); - return null; - } - } - - /** - * Returns a {@link IntermediateArtifacts} to be used to compile and link the ObjC source files - * generated by J2ObjC. - */ - private static IntermediateArtifacts j2objcIntermediateArtifacts(RuleContext ruleContext) { - // We need to append "_j2objc" to the name of the generated archive file to distinguish it from - // the C/C++ archive file created by proto_library targets with attribute cc_api_version - // specified. - // Generate an umbrella header for the module map. The headers declared in module maps are - // compiled using the #import directives which are incompatible with J2ObjC segmented headers. - // We need to #iclude all the headers in an umbrella header and then declare the umbrella header - // in the module map. - return new IntermediateArtifacts( - ruleContext, - /* archiveFileNameSuffix= */ "_j2objc", - UmbrellaHeaderStrategy.GENERATE, - AlwaysLink.TRUE); - } - - private ConfiguredAspect buildAspect( - ConfiguredTarget base, - RuleContext ruleContext, - J2ObjcSource j2ObjcSource, - J2ObjcMappingFileProvider directJ2ObjcMappingFileProvider, - List depAttributes, - List otherDeps) - throws InterruptedException, ActionConflictException { - ConfiguredAspect.Builder builder = new ConfiguredAspect.Builder(ruleContext); - ObjcCommon common; - CcCompilationContext ccCompilationContext = null; - CcLinkingContext ccLinkingContext = null; - - IntermediateArtifacts intermediateArtifacts = j2objcIntermediateArtifacts(ruleContext); - if (!j2ObjcSource.getObjcSrcs().isEmpty()) { - common = - common( - ObjcCommon.Purpose.COMPILE_AND_LINK, - ruleContext, - intermediateArtifacts, - j2ObjcSource.getObjcSrcs(), - j2ObjcSource.getObjcHdrs(), - j2ObjcSource.getHeaderSearchPaths(), - depAttributes, - otherDeps); - - try { - CcToolchainProvider ccToolchain = - CppHelper.getToolchain( - ruleContext, - ruleContext.getPrerequisite(CcToolchain.CC_TOOLCHAIN_DEFAULT_ATTRIBUTE_NAME), - ccToolchainType); - ImmutableList extraCompileArgs = - j2objcCompileWithARC(ruleContext) - ? ImmutableList.of("-fno-strict-overflow", "-fobjc-arc-exceptions") - : ImmutableList.of("-fno-strict-overflow", "-fobjc-weak"); - - Object starlarkFunc = - ruleContext.getStarlarkDefinedBuiltin( - "register_compile_and_archive_actions_for_j2objc"); - ruleContext.initStarlarkRuleContext(); - Tuple compilationResult = - (Tuple) - ruleContext.callStarlarkOrThrowRuleError( - starlarkFunc, - ImmutableList.of( - ruleContext.getStarlarkRuleContext(), - ccToolchain, - intermediateArtifacts, - common.getCompilationArtifacts().get(), - common.getObjcCompilationContext(), - StarlarkList.immutableCopyOf(common.getCcLinkingContexts()), - StarlarkList.immutableCopyOf(extraCompileArgs)), - new HashMap<>()); - - ccCompilationContext = (CcCompilationContext) compilationResult.get(0); - ccLinkingContext = (CcLinkingContext) compilationResult.get(1); - } catch (RuleErrorException e) { - ruleContext.ruleError(e.getMessage()); - } - } else { - common = - common( - ObjcCommon.Purpose.LINK_ONLY, - ruleContext, - intermediateArtifacts, - ImmutableList.of(), - ImmutableList.of(), - ImmutableList.of(), - depAttributes, - otherDeps); - ccCompilationContext = common.createCcCompilationContext(); - ccLinkingContext = common.createCcLinkingContext(); - } - - return builder - .addNativeDeclaredProvider( - exportedJ2ObjcMappingFileProvider(base, ruleContext, directJ2ObjcMappingFileProvider)) - .addNativeDeclaredProvider(common.getObjcProvider()) - .addNativeDeclaredProvider( - CcInfo.builder() - .setCcCompilationContext(ccCompilationContext) - .setCcLinkingContext(ccLinkingContext) - .build()) - .build(); - } - - private ConfiguredAspect java(ConfiguredTarget base, RuleContext ruleContext) - throws InterruptedException, ActionConflictException, EvalException, RuleErrorException { - NestedSet compileTimeJars = JavaInfo.transitiveCompileTimeJars(base); - ImmutableSet.Builder javaSourceFilesBuilder = ImmutableSet.builder(); - ImmutableSet.Builder javaSourceJarsBuilder = ImmutableSet.builder(); - - for (Artifact srcArtifact : ruleContext.getPrerequisiteArtifacts("srcs").list()) { - String srcFilename = srcArtifact.getExecPathString(); - if (JavaSemantics.SOURCE_JAR.apply(srcFilename)) { - javaSourceJarsBuilder.add(srcArtifact); - } else if (JavaSemantics.JAVA_SOURCE.apply(srcFilename)) { - javaSourceFilesBuilder.add(srcArtifact); - } - } - Artifact srcJar = - ruleContext.attributes().has("srcjar") - ? ruleContext.getPrerequisiteArtifact("srcjar") - : null; - if (srcJar != null) { - javaSourceJarsBuilder.add(srcJar); - } - - JavaInfo.genSourceJar(base).ifPresent(javaSourceJarsBuilder::add); - - ImmutableList javaSourceFiles = javaSourceFilesBuilder.build().asList(); - ImmutableList javaSourceJars = javaSourceJarsBuilder.build().asList(); - J2ObjcSource j2ObjcSource = javaJ2ObjcSource(ruleContext, javaSourceFiles, javaSourceJars); - J2ObjcMappingFileProvider depJ2ObjcMappingFileProvider = - depJ2ObjcMappingFileProvider(ruleContext); - - J2ObjcMappingFileProvider directJ2ObjcMappingFileProvider; - if (j2ObjcSource.getObjcSrcs().isEmpty()) { - directJ2ObjcMappingFileProvider = new J2ObjcMappingFileProvider.Builder().build(); - } else { - directJ2ObjcMappingFileProvider = - createJ2ObjcTranspilationAction( - ruleContext, - javaSourceFiles, - javaSourceJars, - depJ2ObjcMappingFileProvider, - compileTimeJars, - j2ObjcSource); - } - return buildAspect( - base, - ruleContext, - j2ObjcSource, - directJ2ObjcMappingFileProvider, - JAVA_DEPENDENT_ATTRIBUTES, - ImmutableList.of()); - } - - @Nullable - private ConfiguredAspect proto(ConfiguredTarget base, RuleContext ruleContext) - throws InterruptedException, ActionConflictException { - ProtoLangToolchainProvider protoToolchain = - ProtoLangToolchainProvider.get(ruleContext, J2OBJC_PROTO_TOOLCHAIN_ATTR); - StarlarkInfo starlarkProtoToolchain = - ProtoLangToolchainProvider.getStarlarkProvider(ruleContext, J2OBJC_PROTO_TOOLCHAIN_ATTR); - try { - // Avoid pulling in any generated files from forbidden protos. - ImmutableList filteredProtoSources = - ImmutableList.copyOf( - ProtoCommon.filterSources(ruleContext, base, starlarkProtoToolchain)); - - J2ObjcSource j2ObjcSource = protoJ2ObjcSource(ruleContext, base, filteredProtoSources); - - J2ObjcMappingFileProvider directJ2ObjcMappingFileProvider; - if (j2ObjcSource.getObjcSrcs().isEmpty()) { - directJ2ObjcMappingFileProvider = new J2ObjcMappingFileProvider.Builder().build(); - } else { - - directJ2ObjcMappingFileProvider = - createJ2ObjcProtoCompileActions( - base, starlarkProtoToolchain, ruleContext, filteredProtoSources, j2ObjcSource); - } - - return buildAspect( - base, - ruleContext, - j2ObjcSource, - directJ2ObjcMappingFileProvider, - PROTO_DEPENDENT_ATTRIBUTES, - ImmutableList.of(protoToolchain.runtime())); - } catch (RuleErrorException e) { - ruleContext.ruleError(e.getMessage()); - return null; - } - } - - private static J2ObjcMappingFileProvider exportedJ2ObjcMappingFileProvider( - ConfiguredTarget base, - RuleContext ruleContext, - J2ObjcMappingFileProvider directJ2ObjcMappingFileProvider) { - J2ObjcMappingFileProvider depJ2ObjcMappingFileProvider = - depJ2ObjcMappingFileProvider(ruleContext); - - NestedSetBuilder exportedHeaderMappingFiles = - NestedSetBuilder.stableOrder() - .addTransitive(directJ2ObjcMappingFileProvider.getHeaderMappingFiles()); - - NestedSetBuilder exportedClassMappingFiles = - NestedSetBuilder.stableOrder() - .addTransitive(directJ2ObjcMappingFileProvider.getClassMappingFiles()) - .addTransitive(depJ2ObjcMappingFileProvider.getClassMappingFiles()); - - NestedSetBuilder exportedDependencyMappingFiles = - NestedSetBuilder.stableOrder() - .addTransitive(directJ2ObjcMappingFileProvider.getDependencyMappingFiles()) - .addTransitive(depJ2ObjcMappingFileProvider.getDependencyMappingFiles()); - - NestedSetBuilder archiveSourceMappingFiles = - NestedSetBuilder.stableOrder() - .addTransitive(directJ2ObjcMappingFileProvider.getArchiveSourceMappingFiles()) - .addTransitive(depJ2ObjcMappingFileProvider.getArchiveSourceMappingFiles()); - - // J2ObjC merges all transitive input header mapping files into one header mapping file, - // so we only need to re-export other dependent output header mapping files in proto rules and - // rules where J2ObjC is not run (e.g., no sources). - // We also add the transitive header mapping files if experimental J2ObjC header mapping is - // turned on. The experimental support does not merge transitive input header mapping files. - boolean experimentalJ2ObjcHeaderMap = - ruleContext.getFragment(J2ObjcConfiguration.class).experimentalJ2ObjcHeaderMap(); - if (isProtoRule(base) || exportedHeaderMappingFiles.isEmpty() || experimentalJ2ObjcHeaderMap) { - exportedHeaderMappingFiles.addTransitive( - depJ2ObjcMappingFileProvider.getHeaderMappingFiles()); - } - - return new J2ObjcMappingFileProvider( - exportedHeaderMappingFiles.build(), - exportedClassMappingFiles.build(), - exportedDependencyMappingFiles.build(), - archiveSourceMappingFiles.build()); - } - - private static J2ObjcMappingFileProvider depJ2ObjcMappingFileProvider(RuleContext ruleContext) { - NestedSetBuilder depsHeaderMappingsBuilder = NestedSetBuilder.stableOrder(); - NestedSetBuilder depsClassMappingsBuilder = NestedSetBuilder.stableOrder(); - NestedSetBuilder depsDependencyMappingsBuilder = NestedSetBuilder.stableOrder(); - NestedSetBuilder depsArchiveSourceMappingsBuilder = NestedSetBuilder.stableOrder(); - - for (J2ObjcMappingFileProvider mapping : getJ2ObjCMappings(ruleContext)) { - depsHeaderMappingsBuilder.addTransitive(mapping.getHeaderMappingFiles()); - depsClassMappingsBuilder.addTransitive(mapping.getClassMappingFiles()); - depsDependencyMappingsBuilder.addTransitive(mapping.getDependencyMappingFiles()); - depsArchiveSourceMappingsBuilder.addTransitive(mapping.getArchiveSourceMappingFiles()); - } - - return new J2ObjcMappingFileProvider( - depsHeaderMappingsBuilder.build(), - depsClassMappingsBuilder.build(), - depsDependencyMappingsBuilder.build(), - depsArchiveSourceMappingsBuilder.build()); - } - - private static ImmutableList sourceJarFlags(RuleContext ruleContext) { - return ImmutableList.of( - "--output_gen_source_dir", - j2ObjcSourceJarTranslatedSourceTreeArtifact(ruleContext).getExecPathString(), - "--output_gen_header_dir", - j2objcSourceJarTranslatedHeaderTreeArtifact(ruleContext).getExecPathString()); - } - - private static J2ObjcMappingFileProvider createJ2ObjcTranspilationAction( - RuleContext ruleContext, - ImmutableList sources, - ImmutableList sourceJars, - J2ObjcMappingFileProvider depJ2ObjcMappingFileProvider, - NestedSet compileTimeJars, - J2ObjcSource j2ObjcSource) - throws EvalException { - CustomCommandLine.Builder argBuilder = CustomCommandLine.builder(); - ToolchainInfo toolchainInfo = - ruleContext - .getToolchainContexts() - .getToolchainContext("j2objc") - .forToolchainType( - ruleContext - .getPrerequisite(JavaRuleClasses.JAVA_TOOLCHAIN_TYPE_ATTRIBUTE_NAME) - .getLabel()); - JavaToolchainProvider provider = (JavaToolchainProvider) toolchainInfo.getValue("java"); - PathFragment javaExecutable = provider.getJavaRuntime().javaBinaryExecPathFragment(); - argBuilder.add("--java", javaExecutable.getPathString()); - - Artifact j2ObjcDeployJar = ruleContext.getPrerequisiteArtifact("$j2objc"); - argBuilder.addExecPath("--j2objc", j2ObjcDeployJar); - - argBuilder.add("--main_class").add("com.google.devtools.j2objc.J2ObjC"); - argBuilder.add("--objc_file_path").addPath(j2ObjcSource.getObjcFilePath()); - - Artifact outputDependencyMappingFile = j2ObjcOutputDependencyMappingFile(ruleContext); - argBuilder.addExecPath("--output_dependency_mapping_file", outputDependencyMappingFile); - - if (!sourceJars.isEmpty()) { - argBuilder.addExecPaths( - "--src_jars", VectorArg.join(",").each(ImmutableList.copyOf(sourceJars))); - argBuilder.addAll(sourceJarFlags(ruleContext)); - } - - List translationFlags = - ruleContext.getFragment(J2ObjcConfiguration.class).getTranslationFlags(); - argBuilder.addAll(ImmutableList.copyOf(translationFlags)); - - NestedSet depsHeaderMappingFiles = - depJ2ObjcMappingFileProvider.getHeaderMappingFiles(); - if (!depsHeaderMappingFiles.isEmpty()) { - argBuilder.addExecPaths("--header-mapping", VectorArg.join(",").each(depsHeaderMappingFiles)); - } - - boolean experimentalJ2ObjcHeaderMap = - ruleContext.getFragment(J2ObjcConfiguration.class).experimentalJ2ObjcHeaderMap(); - Artifact outputHeaderMappingFile = j2ObjcOutputHeaderMappingFile(ruleContext); - if (!experimentalJ2ObjcHeaderMap) { - argBuilder.addExecPath("--output-header-mapping", outputHeaderMappingFile); - } - - NestedSet depsClassMappingFiles = depJ2ObjcMappingFileProvider.getClassMappingFiles(); - if (!depsClassMappingFiles.isEmpty()) { - argBuilder.addExecPaths("--mapping", VectorArg.join(",").each(depsClassMappingFiles)); - } - - Artifact archiveSourceMappingFile = j2ObjcOutputArchiveSourceMappingFile(ruleContext); - argBuilder.addExecPath("--output_archive_source_mapping_file", archiveSourceMappingFile); - - Artifact compiledLibrary = j2objcIntermediateArtifacts(ruleContext).archive(); - argBuilder.addExecPath("--compiled_archive_file_path", compiledLibrary); - - Artifact bootclasspathJar = ruleContext.getPrerequisiteArtifact("$jre_emul_jar"); - argBuilder.addFormatted("-Xbootclasspath:%s", bootclasspathJar); - - // A valid Java system module contains 3 files. The top directory contains a file "release". - ImmutableList moduleFiles = - ruleContext.getPrerequisiteArtifacts("$jre_emul_module").list(); - for (Artifact a : moduleFiles) { - if (a.getFilename().equals("release")) { - argBuilder.add("--system", a.getDirname()); - break; - } - } - - Artifact deadCodeReport = ruleContext.getPrerequisiteArtifact(":dead_code_report"); - if (deadCodeReport != null) { - argBuilder.addExecPath("--dead-code-report", deadCodeReport); - } - - argBuilder.add("-d").addPath(j2ObjcSource.getObjcFilePath()); - - if (!compileTimeJars.isEmpty()) { - argBuilder.addExecPaths("-classpath", VectorArg.join(":").each(compileTimeJars)); - } - - argBuilder.addExecPaths(sources); - - SpawnAction.Builder transpilationAction = - new SpawnAction.Builder() - .setMnemonic("TranspilingJ2objc") - .setExecutable(ruleContext.getExecutablePrerequisite("$j2objc_wrapper")) - .addInput(j2ObjcDeployJar) - .addInput(bootclasspathJar) - .addInputs(moduleFiles) - .addInputs(sources) - .addInputs(sourceJars) - .addTransitiveInputs(compileTimeJars) - .addTransitiveInputs(provider.getJavaRuntime().javaBaseInputs()) - .addTransitiveInputs(depsHeaderMappingFiles) - .addTransitiveInputs(depsClassMappingFiles) - .addCommandLine( - argBuilder.build(), - ParamFileInfo.builder(ParameterFile.ParameterFileType.UNQUOTED) - .setCharset(ISO_8859_1) - .setUseAlways(true) - .build()) - .addOutputs(j2ObjcSource.getObjcSrcs()) - .addOutputs(j2ObjcSource.getObjcHdrs()) - .addOutput(outputDependencyMappingFile) - .addOutput(archiveSourceMappingFile) - .setExecGroup("j2objc"); - - if (deadCodeReport != null) { - transpilationAction.addInput(deadCodeReport); - } - - if (!experimentalJ2ObjcHeaderMap) { - transpilationAction.addOutput(outputHeaderMappingFile); - } - ruleContext.registerAction(transpilationAction.build(ruleContext)); - - if (experimentalJ2ObjcHeaderMap) { - CustomCommandLine.Builder headerMapCommandLine = CustomCommandLine.builder(); - if (!sources.isEmpty()) { - headerMapCommandLine.addExecPaths("--source_files", VectorArg.join(",").each(sources)); - } - if (!sourceJars.isEmpty()) { - headerMapCommandLine.addExecPaths("--source_jars", VectorArg.join(",").each(sourceJars)); - } - headerMapCommandLine.addExecPath("--output_mapping_file", outputHeaderMappingFile); - ruleContext.registerAction( - new SpawnAction.Builder() - .setMnemonic("GenerateJ2objcHeaderMap") - .setExecutable(ruleContext.getExecutablePrerequisite("$j2objc_header_map")) - .addInputs(sources) - .addInputs(sourceJars) - .addCommandLine( - headerMapCommandLine.build(), - ParamFileInfo.builder(ParameterFileType.SHELL_QUOTED).build()) - .addOutput(outputHeaderMappingFile) - .setExecGroup("j2objc") - .build(ruleContext)); - } - - return new J2ObjcMappingFileProvider( - NestedSetBuilder.stableOrder().add(outputHeaderMappingFile).build(), - NestedSetBuilder.stableOrder().build(), - NestedSetBuilder.stableOrder().add(outputDependencyMappingFile).build(), - NestedSetBuilder.stableOrder().add(archiveSourceMappingFile).build()); - } - - private J2ObjcMappingFileProvider createJ2ObjcProtoCompileActions( - ConfiguredTarget base, - StarlarkInfo protoToolchain, - RuleContext ruleContext, - ImmutableList filteredProtoSources, - J2ObjcSource j2ObjcSource) - throws RuleErrorException, InterruptedException { - ImmutableList outputHeaderMappingFiles = - filteredProtoSources.isEmpty() - ? ImmutableList.of() - : ProtoCommon.declareGeneratedFiles(ruleContext, base, ".j2objc.mapping"); - ImmutableList outputClassMappingFiles = - filteredProtoSources.isEmpty() - ? ImmutableList.of() - : ProtoCommon.declareGeneratedFiles(ruleContext, base, ".clsmap.properties"); - ImmutableList outputs = - ImmutableList.builder() - .addAll(j2ObjcSource.getObjcSrcs()) - .addAll(j2ObjcSource.getObjcHdrs()) - .addAll(outputHeaderMappingFiles) - .addAll(outputClassMappingFiles) - .build(); - - String bindirPath = getProtoOutputRoot(ruleContext).getPathString(); - - ProtoCommon.compile( - ruleContext, - base, - checkNotNull(protoToolchain), - outputs, - bindirPath, - "Generating j2objc proto_library %{label}", - "proto_compiler"); - - return new J2ObjcMappingFileProvider( - NestedSetBuilder.stableOrder().addAll(outputHeaderMappingFiles).build(), - NestedSetBuilder.stableOrder().addAll(outputClassMappingFiles).build(), - NestedSetBuilder.stableOrder().build(), - NestedSetBuilder.stableOrder().build()); - } - - private static List getJ2ObjCMappings(RuleContext context) { - ImmutableList.Builder mappingFileProviderBuilder = - new ImmutableList.Builder<>(); - addJ2ObjCMappingsForAttribute(mappingFileProviderBuilder, context, "deps"); - addJ2ObjCMappingsForAttribute(mappingFileProviderBuilder, context, "runtime_deps"); - addJ2ObjCMappingsForAttribute(mappingFileProviderBuilder, context, "exports"); - return mappingFileProviderBuilder.build(); - } - - private static void addJ2ObjCMappingsForAttribute( - ImmutableList.Builder builder, - RuleContext context, - String attributeName) { - if (context.attributes().has(attributeName, BuildType.LABEL_LIST)) { - for (TransitiveInfoCollection dependencyInfoDatum : context.getPrerequisites(attributeName)) { - J2ObjcMappingFileProvider provider = - dependencyInfoDatum.get(J2ObjcMappingFileProvider.PROVIDER); - if (provider != null) { - builder.add(provider); - } - } - } - } - - private static Artifact j2ObjcOutputHeaderMappingFile(RuleContext ruleContext) { - return ObjcRuleClasses.artifactByAppendingToBaseName(ruleContext, ".mapping.j2objc"); - } - - private static Artifact j2ObjcOutputDependencyMappingFile(RuleContext ruleContext) { - return ObjcRuleClasses.artifactByAppendingToBaseName(ruleContext, ".dependency_mapping.j2objc"); - } - - private static Artifact j2ObjcOutputArchiveSourceMappingFile(RuleContext ruleContext) { - return ObjcRuleClasses.artifactByAppendingToBaseName( - ruleContext, ".archive_source_mapping.j2objc"); - } - - private static Artifact j2ObjcSourceJarTranslatedSourceTreeArtifact(RuleContext ruleContext) { - PathFragment rootRelativePath = - ruleContext.getUniqueDirectory("_j2objc/src_jar_files").getRelative("source_files"); - return ruleContext.getTreeArtifact(rootRelativePath, ruleContext.getBinOrGenfilesDirectory()); - } - - /** - * Returns a unique path fragment for j2objc headers. The slightly shorter path is useful for very - * large app builds, which otherwise may have command lines that are too long to be executable. - */ - private static String j2objcHeaderBase(RuleContext ruleContext) { - boolean shorterPath = - ruleContext.getFragment(J2ObjcConfiguration.class).experimentalShorterHeaderPath(); - return shorterPath ? "_ios" : "_j2objc"; - } - - private static Artifact j2objcSourceJarTranslatedHeaderTreeArtifact(RuleContext ruleContext) { - String uniqueDirectoryPath = j2objcHeaderBase(ruleContext) + "/src_jar_files"; - PathFragment rootRelativePath = - ruleContext.getUniqueDirectory(uniqueDirectoryPath).getRelative("header_files"); - return ruleContext.getTreeArtifact(rootRelativePath, ruleContext.getBinOrGenfilesDirectory()); - } - - private static boolean j2objcCompileWithARC(RuleContext ruleContext) { - return ruleContext.getFragment(J2ObjcConfiguration.class).compileWithARC(); - } - - private static J2ObjcSource javaJ2ObjcSource( - RuleContext ruleContext, - ImmutableList javaInputSourceFiles, - ImmutableList javaSourceJarFiles) { - PathFragment objcFileRootRelativePath = - ruleContext.getUniqueDirectory(j2objcHeaderBase(ruleContext)); - PathFragment objcFileRootExecPath = - ruleContext.getBinFragment().getRelative(objcFileRootRelativePath); - - // Note that these are mutable lists so that we can add the translated file info below. - List objcSrcs = - getOutputObjcFiles(ruleContext, javaInputSourceFiles, objcFileRootRelativePath, ".m"); - List objcHdrs = - getOutputObjcFiles(ruleContext, javaInputSourceFiles, objcFileRootRelativePath, ".h"); - List headerSearchPaths = - j2objcSourceHeaderSearchPaths(ruleContext, objcFileRootExecPath, javaInputSourceFiles); - if (!javaSourceJarFiles.isEmpty()) { - // Add the translated source + header files. - objcSrcs.add(j2ObjcSourceJarTranslatedSourceTreeArtifact(ruleContext)); - Artifact translatedHeader = j2objcSourceJarTranslatedHeaderTreeArtifact(ruleContext); - objcHdrs.add(translatedHeader); - headerSearchPaths.add(translatedHeader.getExecPath()); - } - - return new J2ObjcSource( - ruleContext.getRule().getLabel(), - objcSrcs, - objcHdrs, - objcFileRootExecPath, - SourceType.JAVA, - headerSearchPaths, - j2objcCompileWithARC(ruleContext)); - } - - private static J2ObjcSource protoJ2ObjcSource( - RuleContext ruleContext, ConfiguredTarget protoTarget, ImmutableList protoSources) - throws RuleErrorException, InterruptedException { - PathFragment objcFileRootExecPath = getProtoOutputRoot(ruleContext); - - List headerSearchPaths = - j2objcSourceHeaderSearchPaths(ruleContext, objcFileRootExecPath, protoSources); - - return new J2ObjcSource( - ruleContext.getTarget().getLabel(), - protoSources.isEmpty() - ? ImmutableList.of() - : ProtoCommon.declareGeneratedFiles(ruleContext, protoTarget, ".j2objc.pb.m"), - protoSources.isEmpty() - ? ImmutableList.of() - : ProtoCommon.declareGeneratedFiles(ruleContext, protoTarget, ".j2objc.pb.h"), - objcFileRootExecPath, - SourceType.PROTO, - headerSearchPaths, - /*compileWithARC=*/ false); // generated protos do not support ARC. - } - - private static PathFragment getProtoOutputRoot(RuleContext ruleContext) { - if (ruleContext.getConfiguration().isSiblingRepositoryLayout()) { - return ruleContext.getBinFragment(); - } - return ruleContext - .getBinFragment() - .getRelative(ruleContext.getLabel().getRepository().getExecPath(false)); - } - - private static boolean isProtoRule(ConfiguredTarget base) { - try { - return base.get(ProtoInfo.PROVIDER) != null; - } catch (RuleErrorException e) { - return false; - } - } - - /** Returns a mutable List of objc output files. */ - private static List getOutputObjcFiles( - RuleContext ruleContext, - Collection javaSrcs, - PathFragment objcFileRootRelativePath, - String suffix) { - List objcSources = new ArrayList<>(); - for (Artifact javaSrc : javaSrcs) { - objcSources.add( - ruleContext.getRelatedArtifact( - objcFileRootRelativePath.getRelative(javaSrc.getExecPath()), suffix)); - } - return objcSources; - } - - /** - * Returns a mutable list of header search paths necessary to compile the J2ObjC-generated code - * from a single target. - * - * @param ruleContext the rule context - * @param objcFileRootExecPath the exec path under which all J2ObjC-generated file resides - * @param sourcesToTranslate the source files to be translated by J2ObjC in a single target - */ - private static List j2objcSourceHeaderSearchPaths( - RuleContext ruleContext, - PathFragment objcFileRootExecPath, - Collection sourcesToTranslate) { - PathFragment genRoot = ruleContext.getGenfilesFragment(); - List headerSearchPaths = new ArrayList<>(); - headerSearchPaths.add(objcFileRootExecPath); - // We add another header search path with gen root if we have generated sources to translate. - for (Artifact sourceToTranslate : sourcesToTranslate) { - if (!sourceToTranslate.isSourceArtifact()) { - headerSearchPaths.add(objcFileRootExecPath.getRelative(genRoot)); - return headerSearchPaths; - } - } - - return headerSearchPaths; - } - - /** Sets up and returns an {@link ObjcCommon} object containing the J2ObjC-translated code. */ - private static ObjcCommon common( - ObjcCommon.Purpose purpose, - RuleContext ruleContext, - IntermediateArtifacts intermediateArtifacts, - List transpiledSources, - List transpiledHeaders, - List headerSearchPaths, - List dependentAttributes, - List otherDeps) - throws InterruptedException { - ObjcCommon.Builder builder = new ObjcCommon.Builder(purpose, ruleContext); - - if (!transpiledSources.isEmpty() || !transpiledHeaders.isEmpty()) { - CompilationArtifacts compilationArtifacts; - if (j2objcCompileWithARC(ruleContext)) { - compilationArtifacts = - new CompilationArtifacts( - transpiledSources, - /* nonArcSrcs= */ ImmutableList.of(), - transpiledHeaders, - intermediateArtifacts); - } else { - compilationArtifacts = - new CompilationArtifacts( - /* srcs= */ ImmutableList.of(), - transpiledSources, - transpiledHeaders, - intermediateArtifacts); - } - builder.setCompilationArtifacts(compilationArtifacts); - builder.setHasModuleMap(); - } - - ImmutableList.Builder ccInfos = new ImmutableList.Builder<>(); - for (String attrName : dependentAttributes) { - if (ruleContext.attributes().has(attrName, BuildType.LABEL_LIST) - || ruleContext.attributes().has(attrName, BuildType.LABEL)) { - for (TransitiveInfoCollection dep : ruleContext.getPrerequisites(attrName)) { - CcInfo ccInfo = dep.get(CcInfo.PROVIDER); - if (ccInfo != null) { - ccInfos.add(ccInfo); - } - } - builder.addObjcProviders( - ruleContext.getPrerequisites(attrName, ObjcProvider.STARLARK_CONSTRUCTOR)); - } - } - builder.addCcInfos(ccInfos.build()); - - // We can't just use addDeps since that now takes ConfiguredTargetAndData and we only have - // TransitiveInfoCollections - builder.addObjcProviders( - otherDeps.stream() - .map(d -> d.get(ObjcProvider.STARLARK_CONSTRUCTOR)) - .collect(toImmutableList())); - builder.addCcInfos( - otherDeps.stream().map(d -> d.get(CcInfo.PROVIDER)).collect(toImmutableList())); - - return builder - .addIncludes(headerSearchPaths) - .setIntermediateArtifacts(intermediateArtifacts) - .build(); - } -} diff --git a/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcEntryClassProvider.java b/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcEntryClassProvider.java deleted file mode 100644 index 632d6ec55bf6d5..00000000000000 --- a/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcEntryClassProvider.java +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright 2014 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package com.google.devtools.build.lib.rules.objc; - -import com.google.devtools.build.lib.analysis.RuleContext; -import com.google.devtools.build.lib.collect.nestedset.NestedSet; -import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; -import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; -import com.google.devtools.build.lib.packages.BuildType; -import com.google.devtools.build.lib.packages.BuiltinProvider; -import com.google.devtools.build.lib.packages.NativeInfo; -import com.google.errorprone.annotations.CanIgnoreReturnValue; - -/** - * This provider is exported by j2objc_library to export entry class information necessary for - * J2ObjC dead code removal performed at the binary level in ObjC rules. - */ -@Immutable -public final class J2ObjcEntryClassProvider extends NativeInfo { - private final NestedSet entryClasses; - - public static final String NAME = "J2ObjcEntryClassInfo"; - public static final J2ObjcEntryClassProvider.Provider PROVIDER = - new J2ObjcEntryClassProvider.Provider(); - - @Override - public BuiltinProvider getProvider() { - return PROVIDER; - } - - /** - * A builder for J2ObjcEntryClassProvider. - */ - public static class Builder { - private final NestedSetBuilder entryClassesBuilder = NestedSetBuilder.stableOrder(); - - /** - * Constructs a new, empty J2ObjcEntryClassProvider builder. - */ - public Builder() {} - - /** - * Transitively adds the given {@link J2ObjcEntryClassProvider} and all its properties to this - * builder. - * - * @param provider the J2ObjcEntryClassProvider to add - * @return this builder - */ - @CanIgnoreReturnValue - public Builder addTransitive(J2ObjcEntryClassProvider provider) { - entryClassesBuilder.addTransitive(provider.getEntryClasses()); - return this; - } - - /** - * Transitively adds the given {@link J2ObjcEntryClassProvider}s and all their properties to - * this builder. - * - * @param providers the J2ObjcEntryClassProviders to add - * @return this builder - */ - @CanIgnoreReturnValue - public Builder addTransitive(Iterable providers) { - for (J2ObjcEntryClassProvider provider : providers) { - addTransitive(provider); - } - return this; - } - - /** - * Transitively adds all the J2ObjcEntryClassProviders and all their properties that can be - * reached through the "deps" attribute. - * - * @param ruleContext the rule context - * @return this builder - */ - @CanIgnoreReturnValue - public Builder addTransitive(RuleContext ruleContext) { - if (ruleContext.attributes().has("deps", BuildType.LABEL_LIST)) { - addTransitive(ruleContext.getPrerequisites("deps", J2ObjcEntryClassProvider.PROVIDER)); - } - - return this; - } - - /** - * Adds the given entry classes to this builder. See {@link #getEntryClasses()}. - * - * @param entryClasses the entry classes to add - * @return this builder - */ - @CanIgnoreReturnValue - public Builder addEntryClasses(Iterable entryClasses) { - entryClassesBuilder.addAll(entryClasses); - return this; - } - - /** - * Builds a J2ObjcEntryClassProvider from the information in this builder. - * - * @return the J2ObjcEntryClassProvider to be built - */ - public J2ObjcEntryClassProvider build() { - return new J2ObjcEntryClassProvider(entryClassesBuilder.build()); - } - } - - /** - * Constructs a {@link J2ObjcEntryClassProvider} to supply J2ObjC-translated ObjC sources to - * objc_binary for compilation and linking. - * - * @param entryClasses a set of names of Java classes to used as entry point for J2ObjC dead code - * analysis. The Java class names should be in canonical format as defined by the Java - * Language Specification. - */ - private J2ObjcEntryClassProvider(NestedSet entryClasses) { - this.entryClasses = entryClasses; - } - - /** - * Returns a set of entry classes specified on attribute entry_classes of j2objc_library targets - * transitively. - */ - public NestedSet getEntryClasses() { - return entryClasses; - } - - /** Provider */ - public static class Provider extends BuiltinProvider { - public Provider() { - super(J2ObjcEntryClassProvider.NAME, J2ObjcEntryClassProvider.class); - } - } -} diff --git a/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcLibrary.java b/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcLibrary.java deleted file mode 100644 index 9bbe0a5ff6a8d3..00000000000000 --- a/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcLibrary.java +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright 2015 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package com.google.devtools.build.lib.rules.objc; - -import static com.google.devtools.build.lib.collect.nestedset.Order.STABLE_ORDER; - -import com.google.common.collect.ImmutableList; -import com.google.devtools.build.lib.actions.Artifact; -import com.google.devtools.build.lib.actions.MutableActionGraph.ActionConflictException; -import com.google.devtools.build.lib.analysis.ConfiguredTarget; -import com.google.devtools.build.lib.analysis.RuleConfiguredTargetBuilder; -import com.google.devtools.build.lib.analysis.RuleConfiguredTargetFactory; -import com.google.devtools.build.lib.analysis.RuleContext; -import com.google.devtools.build.lib.analysis.RunfilesProvider; -import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; -import com.google.devtools.build.lib.packages.BuildType; -import com.google.devtools.build.lib.packages.Type; -import com.google.devtools.build.lib.rules.cpp.CcInfo; -import com.google.devtools.build.lib.rules.cpp.CppSemantics; -import java.util.List; -import javax.annotation.Nullable; - -/** - * Implementation for the "j2objc_library" rule, which exports ObjC source files translated from - * Java source files in java_library rules to dependent objc_binary rules for compilation and - * linking into the final application bundle. See {@link J2ObjcLibraryBaseRule} for details. - */ -public class J2ObjcLibrary implements RuleConfiguredTargetFactory { - - protected J2ObjcLibrary(CppSemantics cppSemantics) {} - - public static final String NO_ENTRY_CLASS_ERROR_MSG = - "Entry classes must be specified when flag --compilation_mode=opt is on in order to" - + " perform J2ObjC dead code stripping."; - - public static final ImmutableList J2OBJC_SUPPORTED_RULES = - ImmutableList.of("java_import", "java_library", "java_proto_library", "proto_library"); - - private ObjcCommon common(RuleContext ruleContext) throws InterruptedException { - List depsCcInfos = ruleContext.getPrerequisites("deps", CcInfo.PROVIDER); - return new ObjcCommon.Builder(ObjcCommon.Purpose.LINK_ONLY, ruleContext) - .setCompilationAttributes( - CompilationAttributes.Builder.fromRuleContext(ruleContext).build()) - .addDeps(ruleContext.getPrerequisites("deps")) - .addCcLinkingContexts(depsCcInfos) - .addDeps(ruleContext.getPrerequisites("jre_deps")) - .addDirectCcCompilationContexts(depsCcInfos) - .setIntermediateArtifacts(new IntermediateArtifacts(ruleContext)) - .build(); - } - - private static void j2objcLibraryLockdown(RuleContext ruleContext) throws RuleErrorException { - if (!ruleContext.getFragment(J2ObjcConfiguration.class).j2objcLibraryMigration()) { - return; - } - - if (!ruleContext - .getRule() - .getRuleTags() - .contains("__J2OBJC_LIBRARY_MIGRATION_DO_NOT_USE_WILL_BREAK__")) { - throw ruleContext.throwWithRuleError( - "j2objc_library is locked. Please do not use this rule since it will be deleted in the" - + " future."); - } - } - - @Override - @Nullable - public ConfiguredTarget create(RuleContext ruleContext) - throws InterruptedException, RuleErrorException, ActionConflictException { - j2objcLibraryLockdown(ruleContext); - - checkAttributes(ruleContext); - - if (ruleContext.hasErrors()) { - return null; - } - - J2ObjcEntryClassProvider j2ObjcEntryClassProvider = new J2ObjcEntryClassProvider.Builder() - .addTransitive(ruleContext) - .addEntryClasses(ruleContext.attributes().get("entry_classes", Type.STRING_LIST)) - .build(); - - ObjcCommon common = common(ruleContext); - ObjcProvider objcProvider = common.getObjcProvider(); - - J2ObjcMappingFileProvider j2ObjcMappingFileProvider = - J2ObjcMappingFileProvider.union( - ruleContext.getPrerequisites("deps", J2ObjcMappingFileProvider.PROVIDER)); - - return new RuleConfiguredTargetBuilder(ruleContext) - .setFilesToBuild(NestedSetBuilder.emptySet(STABLE_ORDER)) - .add(RunfilesProvider.class, RunfilesProvider.EMPTY) - .addNativeDeclaredProvider(j2ObjcEntryClassProvider) - .addNativeDeclaredProvider(j2ObjcMappingFileProvider) - .addNativeDeclaredProvider(objcProvider) - .addNativeDeclaredProvider(common.createCcInfo()) - .addStarlarkTransitiveInfo(ObjcProvider.STARLARK_NAME, objcProvider) - .build(); - } - - private static void checkAttributes(RuleContext ruleContext) { - checkAttributes(ruleContext, "deps"); - checkAttributes(ruleContext, "exports"); - } - - private static void checkAttributes(RuleContext ruleContext, String attributeName) { - if (!ruleContext.attributes().has(attributeName, BuildType.LABEL_LIST)) { - return; - } - - List entryClasses = ruleContext.attributes().get("entry_classes", Type.STRING_LIST); - J2ObjcConfiguration j2objcConfiguration = ruleContext.getFragment(J2ObjcConfiguration.class); - if (j2objcConfiguration.removeDeadCode() && (entryClasses == null || entryClasses.isEmpty())) { - ruleContext.attributeError("entry_classes", NO_ENTRY_CLASS_ERROR_MSG); - } - } -} diff --git a/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcLibraryBaseRule.java b/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcLibraryBaseRule.java index 87e11a0a7d2d04..90147504a0dc6b 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcLibraryBaseRule.java +++ b/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcLibraryBaseRule.java @@ -33,14 +33,10 @@ /** * Abstract rule definition for j2objc_library. + * + *

      This rule is implemented in Starlark. This class remains only for doc-gen purposes. */ public class J2ObjcLibraryBaseRule implements RuleDefinition { - private final J2ObjcAspect j2ObjcAspect; - - public J2ObjcLibraryBaseRule(J2ObjcAspect j2ObjcAspect) { - this.j2ObjcAspect = j2ObjcAspect; - } - @Override public RuleClass build(RuleClass.Builder builder, RuleDefinitionEnvironment env) { // TODO(rduan): Add support for package prefixes. @@ -81,7 +77,6 @@ public RuleClass build(RuleClass.Builder builder, RuleDefinitionEnvironment env) */ .add( attr("deps", LABEL_LIST) - .aspect(j2ObjcAspect) .allowedRuleClasses( "j2objc_library", "java_library", "java_import", "java_proto_library") .allowedFileTypes()) diff --git a/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcMappingFileProvider.java b/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcMappingFileProvider.java deleted file mode 100644 index 2940f8a9ae3274..00000000000000 --- a/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcMappingFileProvider.java +++ /dev/null @@ -1,156 +0,0 @@ -// Copyright 2014 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package com.google.devtools.build.lib.rules.objc; - -import com.google.devtools.build.lib.actions.Artifact; -import com.google.devtools.build.lib.collect.nestedset.NestedSet; -import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; -import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; -import com.google.devtools.build.lib.packages.BuiltinProvider; -import com.google.devtools.build.lib.packages.NativeInfo; -import com.google.errorprone.annotations.CanIgnoreReturnValue; - -/** - * This provider is exported by java_library rules and proto_library rules via the j2objc aspect. - */ -@Immutable -public final class J2ObjcMappingFileProvider extends NativeInfo { - - private final NestedSet headerMappingFiles; - private final NestedSet classMappingFiles; - private final NestedSet dependencyMappingFiles; - private final NestedSet archiveSourceMappingFiles; - - public static final String NAME = "J2ObjcMappingFileInfo"; - public static final J2ObjcMappingFileProvider.Provider PROVIDER = - new J2ObjcMappingFileProvider.Provider(); - - @Override - public BuiltinProvider getProvider() { - return PROVIDER; - } - - /** - * Returns a {@link J2ObjcMappingFileProvider} which combines all input - * {@link J2ObjcMappingFileProvider}s. All mapping files present in any of the input providers - * will be present in the output provider. - */ - public static J2ObjcMappingFileProvider union(Iterable providers) { - J2ObjcMappingFileProvider.Builder builder = new J2ObjcMappingFileProvider.Builder(); - for (J2ObjcMappingFileProvider provider : providers) { - builder.addTransitive(provider); - } - - return builder.build(); - } - - /** - * Constructs a {@link J2ObjcMappingFileProvider} with mapping files to export mappings required - * by J2ObjC translation and proto compilation. - * - * @param headerMappingFiles a nested set of header mapping files which map Java classes to - * their associated translated ObjC header. Used by J2ObjC to output correct import directives - * during translation. - * @param classMappingFiles a nested set of class mapping files which map Java class names to - * their associated ObjC class names. Used to support J2ObjC package prefixes. - * @param dependencyMappingFiles a nested set of dependency mapping files which map translated - * ObjC files to their translated direct dependency files. Used to support J2ObjC dead code - * analysis and removal. - * @param archiveSourceMappingFiles a nested set of files containing mappings between J2ObjC - * static library archives and their associated J2ObjC-translated source files. - */ - public J2ObjcMappingFileProvider(NestedSet headerMappingFiles, - NestedSet classMappingFiles, NestedSet dependencyMappingFiles, - NestedSet archiveSourceMappingFiles) { - this.headerMappingFiles = headerMappingFiles; - this.classMappingFiles = classMappingFiles; - this.dependencyMappingFiles = dependencyMappingFiles; - this.archiveSourceMappingFiles = archiveSourceMappingFiles; - } - - /** - * Returns the ObjC header to Java type mapping files for J2ObjC translation. J2ObjC needs these - * mapping files to be able to output translated files with correct header import paths in the - * same directories of the Java source files. - */ - public NestedSet getHeaderMappingFiles() { - return headerMappingFiles; - } - - /** - * Returns the Java class name to ObjC class name mapping files. J2ObjC transpiler and J2ObjC - * proto plugin needs this mapping files to support "objc_class_prefix" proto option, which sets - * the ObjC class prefix on generated protos. - */ - public NestedSet getClassMappingFiles() { - return classMappingFiles; - } - - /** - * Returns the mapping files containing file dependency information among the translated ObjC - * source files. When flag --j2objc_dead_code_removal is specified, they are used to strip unused - * object files inside J2ObjC static libraries before the linking action at binary level. - */ - public NestedSet getDependencyMappingFiles() { - return dependencyMappingFiles; - } - - /** - * Returns the files containing mappings between J2ObjC static library archives and their - * associated J2ObjC-translated source files. When flag --j2objc_dead_code_removal is specified, - * they are used to strip unused object files inside J2ObjC static libraries before the linking - * action at binary level. - */ - public NestedSet getArchiveSourceMappingFiles() { - return archiveSourceMappingFiles; - } - - /** - * A builder for this provider that is optimized for collection information from transitive - * dependencies. - */ - public static final class Builder { - private final NestedSetBuilder headerMappingFiles = NestedSetBuilder.stableOrder(); - private final NestedSetBuilder classMappingFiles = NestedSetBuilder.stableOrder(); - private final NestedSetBuilder depEntryFiles = NestedSetBuilder.stableOrder(); - private final NestedSetBuilder archiveSourceMappingFiles = - NestedSetBuilder.stableOrder(); - - @CanIgnoreReturnValue - public Builder addTransitive(J2ObjcMappingFileProvider provider) { - headerMappingFiles.addTransitive(provider.getHeaderMappingFiles()); - classMappingFiles.addTransitive(provider.getClassMappingFiles()); - depEntryFiles.addTransitive(provider.getDependencyMappingFiles()); - archiveSourceMappingFiles.addTransitive(provider.getArchiveSourceMappingFiles()); - - return this; - } - - public J2ObjcMappingFileProvider build() { - return new J2ObjcMappingFileProvider( - headerMappingFiles.build(), - classMappingFiles.build(), - depEntryFiles.build(), - archiveSourceMappingFiles.build()); - } - } - - /** Provider */ - public static class Provider extends BuiltinProvider { - public Provider() { - super(J2ObjcMappingFileProvider.NAME, J2ObjcMappingFileProvider.class); - } - } -} diff --git a/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcSource.java b/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcSource.java deleted file mode 100644 index 46465335a47501..00000000000000 --- a/src/main/java/com/google/devtools/build/lib/rules/objc/J2ObjcSource.java +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright 2014 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package com.google.devtools.build.lib.rules.objc; - -import com.google.common.base.Objects; -import com.google.common.collect.ImmutableList; -import com.google.devtools.build.lib.actions.Artifact; -import com.google.devtools.build.lib.cmdline.Label; -import com.google.devtools.build.lib.vfs.PathFragment; -import java.util.List; - -/** An object that captures information of ObjC files generated by J2ObjC in a single target. */ -public final class J2ObjcSource { - - /** - * Indicates the type of files from which the ObjC files included in {@link J2ObjcSource} are - * generated. - */ - public enum SourceType { - /** - * Indicates the original file type is java source file. - */ - JAVA, - - /** - * Indicates the original file type is proto file. - */ - PROTO; - } - - private final Label targetLabel; - private final List objcSrcs; - private final List objcHdrs; - private final PathFragment objcFilePath; - private final SourceType sourceType; - private final List headerSearchPaths; - private final boolean compileWithARC; - - /** - * Constructs a J2ObjcSource containing target information for j2objc transpilation. - * - * @param targetLabel the @{code Label} of the associated target. - * @param objcSrcs the {@code Iterable} containing objc source files generated by J2ObjC - * @param objcHdrs the {@code Iterable} containing objc header files generated by J2ObjC - * @param objcFilePath the {@code PathFragment} under which all the generated objc files are. It - * can be used as header search path for objc compilations. - * @param sourceType the type of files from which the ObjC files are generated. - * @param headerSearchPaths the {@code Iterable} of header search paths necessary for compiling - * the generated J2ObjC sources in {@code objcSrcs} - * @param compileWithARC whether the source files were generated to support ARC compilation - */ - public J2ObjcSource( - Label targetLabel, - List objcSrcs, - List objcHdrs, - PathFragment objcFilePath, - SourceType sourceType, - List headerSearchPaths, - boolean compileWithARC) { - this.targetLabel = targetLabel; - this.objcSrcs = ImmutableList.copyOf(objcSrcs); - this.objcHdrs = ImmutableList.copyOf(objcHdrs); - this.objcFilePath = objcFilePath; - this.sourceType = sourceType; - this.headerSearchPaths = ImmutableList.copyOf(headerSearchPaths); - this.compileWithARC = compileWithARC; - } - - /** - * Returns the label of the associated target. - */ - public Label getTargetLabel() { - return targetLabel; - } - - /** Returns the objc source files generated by J2ObjC. */ - public List getObjcSrcs() { - return objcSrcs; - } - - /* - * Returns the objc header files generated by J2ObjC - */ - public List getObjcHdrs() { - return objcHdrs; - } - - /** - * Returns the {@code PathFragment} which represents a directory where the generated ObjC files - * reside. - */ - public PathFragment getObjcFilePath() { - return objcFilePath; - } - - /** Returns a list of header search paths necessary for compiling the generated J2ObjC sources. */ - public List getHeaderSearchPaths() { - return headerSearchPaths; - } - - /** Returns whether output files were generated to support ARC compilation. */ - public boolean compileWithARC() { - return compileWithARC; - } - - @Override - public final boolean equals(Object other) { - if (!(other instanceof J2ObjcSource)) { - return false; - } - - J2ObjcSource that = (J2ObjcSource) other; - return Objects.equal(this.targetLabel, that.targetLabel) - && this.objcSrcs.equals(that.objcSrcs) - && this.objcHdrs.equals(that.objcHdrs) - && Objects.equal(this.objcFilePath, that.objcFilePath) - && this.sourceType == that.sourceType - && this.headerSearchPaths.equals(that.headerSearchPaths) - && this.compileWithARC == that.compileWithARC; - } - - @Override - public int hashCode() { - return Objects.hashCode( - targetLabel, - objcSrcs, - objcHdrs, - objcFilePath, - sourceType, - headerSearchPaths, - compileWithARC); - } -} - diff --git a/src/main/java/com/google/devtools/build/lib/rules/objc/ObjcCommon.java b/src/main/java/com/google/devtools/build/lib/rules/objc/ObjcCommon.java index bf1354700798ad..ef3eee2fe32a7b 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/objc/ObjcCommon.java +++ b/src/main/java/com/google/devtools/build/lib/rules/objc/ObjcCommon.java @@ -118,6 +118,9 @@ static class Builder { // TODO(b/171413861): remove after objc link info migration. private final List ccLinkingContextsForMerging = new ArrayList<>(); + private static final ImmutableSet J2OBJC_SUPPORTED_RULES = + ImmutableSet.of("java_import", "java_library", "java_proto_library", "proto_library"); + /** * Builder for {@link ObjcCommon} obtaining both attribute data and configuration data from the * given rule context. @@ -373,7 +376,7 @@ ObjcCommon build() { FileType.filter(artifacts.getSrcs(), HEADERS)); if (artifacts.getArchive().isPresent() - && J2ObjcLibrary.J2OBJC_SUPPORTED_RULES.contains(context.getRule().getRuleClass())) { + && J2OBJC_SUPPORTED_RULES.contains(context.getRule().getRuleClass())) { objcProvider.addAll(J2OBJC_LIBRARY, artifacts.getArchive().asSet()); } } diff --git a/src/main/java/com/google/devtools/build/lib/rules/proto/ProtoCommon.java b/src/main/java/com/google/devtools/build/lib/rules/proto/ProtoCommon.java deleted file mode 100644 index 05f88ce7138c55..00000000000000 --- a/src/main/java/com/google/devtools/build/lib/rules/proto/ProtoCommon.java +++ /dev/null @@ -1,145 +0,0 @@ -// Copyright 2015 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package com.google.devtools.build.lib.rules.proto; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Interner; -import com.google.devtools.build.lib.actions.Artifact; -import com.google.devtools.build.lib.analysis.ConfiguredTarget; -import com.google.devtools.build.lib.analysis.RuleContext; -import com.google.devtools.build.lib.cmdline.BazelModuleContext; -import com.google.devtools.build.lib.cmdline.Label; -import com.google.devtools.build.lib.concurrent.BlazeInterners; -import com.google.devtools.build.lib.packages.RuleClass.ConfiguredTargetFactory.RuleErrorException; -import com.google.devtools.build.lib.packages.StarlarkInfo; -import com.google.devtools.build.lib.vfs.PathFragment; -import javax.annotation.Nullable; -import net.starlark.java.eval.EvalException; -import net.starlark.java.eval.Module; -import net.starlark.java.eval.Sequence; -import net.starlark.java.eval.Starlark; -import net.starlark.java.eval.StarlarkFunction; -import net.starlark.java.eval.StarlarkList; -import net.starlark.java.eval.StarlarkThread; -import net.starlark.java.eval.Tuple; - -/** Utility functions for proto_library and proto aspect implementations. */ -public class ProtoCommon { - private ProtoCommon() { - throw new UnsupportedOperationException(); - } - - private static final Interner PROTO_SOURCE_ROOT_INTERNER = - BlazeInterners.newWeakInterner(); - - /** - * Returns a memory efficient version of the passed protoSourceRoot. - * - *

      Any sizable proto graph will contain many {@code .proto} sources with the same source root. - * We can't afford to have all of them represented as individual objects in memory. - * - * @param protoSourceRoot - * @return - */ - static PathFragment memoryEfficientProtoSourceRoot(PathFragment protoSourceRoot) { - return PROTO_SOURCE_ROOT_INTERNER.intern(protoSourceRoot); - } - - public static void checkPrivateStarlarkificationAllowlist(StarlarkThread thread) - throws EvalException { - Label label = - ((BazelModuleContext) Module.ofInnermostEnclosingStarlarkFunction(thread).getClientData()) - .label(); - if (!label.getPackageIdentifier().getRepository().toString().equals("@_builtins")) { - throw Starlark.errorf("Rule in '%s' cannot use private API", label.getPackageName()); - } - } - - public static ImmutableList declareGeneratedFiles( - RuleContext ruleContext, ConfiguredTarget protoTarget, String extension) - throws RuleErrorException, InterruptedException { - StarlarkFunction declareGeneratedFiles = - (StarlarkFunction) - ruleContext.getStarlarkDefinedBuiltin("proto_common_declare_generated_files"); - ruleContext.initStarlarkRuleContext(); - Sequence outputs = - (Sequence) - ruleContext.callStarlarkOrThrowRuleError( - declareGeneratedFiles, - ImmutableList.of( - /* actions */ ruleContext.getStarlarkRuleContext().actions(), - /* proto_info */ protoTarget.get(ProtoInfo.PROVIDER.getKey()), - /* extension */ extension), - ImmutableMap.of()); - try { - return Sequence.cast(outputs, Artifact.class, "declare_generated_files").getImmutableList(); - } catch (EvalException e) { - throw new RuleErrorException(e.getMessageWithStack()); - } - } - - public static void compile( - RuleContext ruleContext, - ConfiguredTarget protoTarget, - StarlarkInfo protoLangToolchainInfo, - Iterable generatedFiles, - @Nullable Object pluginOutput, - String progressMessage, - String execGroup) - throws RuleErrorException, InterruptedException { - StarlarkFunction compile = - (StarlarkFunction) ruleContext.getStarlarkDefinedBuiltin("proto_common_compile"); - ruleContext.initStarlarkRuleContext(); - ruleContext.callStarlarkOrThrowRuleError( - compile, - ImmutableList.of( - /* actions */ ruleContext.getStarlarkRuleContext().actions(), - /* proto_info */ protoTarget.get(ProtoInfo.PROVIDER.getKey()), - /* proto_lang_toolchain_info */ protoLangToolchainInfo, - /* generated_files */ StarlarkList.immutableCopyOf(generatedFiles), - /* plugin_output */ pluginOutput == null ? Starlark.NONE : pluginOutput), - ImmutableMap.of( - "experimental_progress_message", - progressMessage, - "experimental_exec_group", - execGroup)); - } - - public static Sequence filterSources( - RuleContext ruleContext, ConfiguredTarget protoTarget, StarlarkInfo protoLangToolchainInfo) - throws RuleErrorException, InterruptedException { - StarlarkFunction filterSources = - (StarlarkFunction) - ruleContext.getStarlarkDefinedBuiltin("proto_common_experimental_filter_sources"); - ruleContext.initStarlarkRuleContext(); - try { - return Sequence.cast( - ((Tuple) - ruleContext.callStarlarkOrThrowRuleError( - filterSources, - ImmutableList.of( - /* proto_info */ protoTarget.get(ProtoInfo.PROVIDER.getKey()), - /* proto_lang_toolchain_info */ protoLangToolchainInfo), - ImmutableMap.of())) - .get(0), - Artifact.class, - "included"); - } catch (EvalException e) { - - throw new RuleErrorException(e.getMessageWithStack()); - } - } -} diff --git a/src/main/java/com/google/devtools/build/lib/rules/proto/ProtoConfiguration.java b/src/main/java/com/google/devtools/build/lib/rules/proto/ProtoConfiguration.java index 99d5b112c04219..8fa47304690328 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/proto/ProtoConfiguration.java +++ b/src/main/java/com/google/devtools/build/lib/rules/proto/ProtoConfiguration.java @@ -22,6 +22,7 @@ import com.google.devtools.build.lib.analysis.config.FragmentOptions; import com.google.devtools.build.lib.analysis.config.RequiresOptions; import com.google.devtools.build.lib.analysis.starlark.annotations.StarlarkConfigurationField; +import com.google.devtools.build.lib.cmdline.BazelModuleContext; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.starlarkbuildapi.ProtoConfigurationApi; @@ -33,6 +34,8 @@ import java.util.List; import net.starlark.java.annot.StarlarkMethod; import net.starlark.java.eval.EvalException; +import net.starlark.java.eval.Module; +import net.starlark.java.eval.Starlark; import net.starlark.java.eval.StarlarkThread; /** Configuration for Protocol Buffer Libraries. */ @@ -203,13 +206,23 @@ public ImmutableList protocOpts() { return protocOpts; } + private static void checkPrivateStarlarkificationAllowlist(StarlarkThread thread) + throws EvalException { + Label label = + ((BazelModuleContext) Module.ofInnermostEnclosingStarlarkFunction(thread).getClientData()) + .label(); + if (!label.getPackageIdentifier().getRepository().toString().equals("@_builtins")) { + throw Starlark.errorf("Rule in '%s' cannot use private API", label.getPackageName()); + } + } + @StarlarkMethod( name = "experimental_proto_descriptorsets_include_source_info", useStarlarkThread = true, documented = false) public boolean experimentalProtoDescriptorSetsIncludeSourceInfoForStarlark(StarlarkThread thread) throws EvalException { - ProtoCommon.checkPrivateStarlarkificationAllowlist(thread); + checkPrivateStarlarkificationAllowlist(thread); return experimentalProtoDescriptorSetsIncludeSourceInfo(); } @@ -268,13 +281,13 @@ public Label protoToolchainForCc() { @StarlarkMethod(name = "strict_proto_deps", useStarlarkThread = true, documented = false) public String strictProtoDepsForStarlark(StarlarkThread thread) throws EvalException { - ProtoCommon.checkPrivateStarlarkificationAllowlist(thread); + checkPrivateStarlarkificationAllowlist(thread); return strictProtoDeps().toString(); } @StarlarkMethod(name = "strict_public_imports", useStarlarkThread = true, documented = false) public String strictPublicImportsForStarlark(StarlarkThread thread) throws EvalException { - ProtoCommon.checkPrivateStarlarkificationAllowlist(thread); + checkPrivateStarlarkificationAllowlist(thread); return options.strictPublicImports.toString(); } @@ -288,7 +301,7 @@ public StrictDepsMode strictProtoDeps() { documented = false) public List ccProtoLibraryHeaderSuffixesForStarlark(StarlarkThread thread) throws EvalException { - ProtoCommon.checkPrivateStarlarkificationAllowlist(thread); + checkPrivateStarlarkificationAllowlist(thread); return ccProtoLibraryHeaderSuffixes(); } @@ -302,7 +315,7 @@ public List ccProtoLibraryHeaderSuffixes() { documented = false) public List ccProtoLibrarySourceSuffixesForStarlark(StarlarkThread thread) throws EvalException { - ProtoCommon.checkPrivateStarlarkificationAllowlist(thread); + checkPrivateStarlarkificationAllowlist(thread); return ccProtoLibrarySourceSuffixes(); } diff --git a/src/test/java/com/google/devtools/build/lib/bazel/rules/BazelRuleClassProviderTest.java b/src/test/java/com/google/devtools/build/lib/bazel/rules/BazelRuleClassProviderTest.java index 60cbc8a9e593cf..87770babb5b860 100644 --- a/src/test/java/com/google/devtools/build/lib/bazel/rules/BazelRuleClassProviderTest.java +++ b/src/test/java/com/google/devtools/build/lib/bazel/rules/BazelRuleClassProviderTest.java @@ -147,11 +147,6 @@ public void objcConsistency() { checkModule(ObjcRules.INSTANCE); } - @Test - public void j2objcConsistency() { - checkModule(J2ObjcRules.INSTANCE); - } - @Test public void variousWorkspaceConsistency() { checkModule(BazelRuleClassProvider.VARIOUS_WORKSPACE_RULES); diff --git a/src/test/java/com/google/devtools/build/lib/rules/objc/BazelJ2ObjcLibraryTest.java b/src/test/java/com/google/devtools/build/lib/rules/objc/BazelJ2ObjcLibraryTest.java index af0941594d4928..018e99a51790f4 100644 --- a/src/test/java/com/google/devtools/build/lib/rules/objc/BazelJ2ObjcLibraryTest.java +++ b/src/test/java/com/google/devtools/build/lib/rules/objc/BazelJ2ObjcLibraryTest.java @@ -466,7 +466,8 @@ public void testMissingEntryClassesError() throws Exception { checkError( "java/com/google/dummy", "transpile", - J2ObjcLibrary.NO_ENTRY_CLASS_ERROR_MSG, + "Entry classes must be specified when flag --compilation_mode=opt is on in order to perform" + + " J2ObjC dead code stripping.", "j2objc_library(", " name = 'transpile',", " deps = ['//java/com/google/dummy/test:test'],", diff --git a/src/test/java/com/google/devtools/build/lib/rules/objc/J2ObjcSourceTest.java b/src/test/java/com/google/devtools/build/lib/rules/objc/J2ObjcSourceTest.java deleted file mode 100644 index fc7b6c1a1ab0db..00000000000000 --- a/src/test/java/com/google/devtools/build/lib/rules/objc/J2ObjcSourceTest.java +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2017 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package com.google.devtools.build.lib.rules.objc; - -import com.google.common.collect.ImmutableList; -import com.google.common.testing.EqualsTester; -import com.google.devtools.build.lib.actions.Artifact; -import com.google.devtools.build.lib.actions.ArtifactRoot; -import com.google.devtools.build.lib.actions.ArtifactRoot.RootType; -import com.google.devtools.build.lib.actions.util.ActionsTestUtil; -import com.google.devtools.build.lib.cmdline.Label; -import com.google.devtools.build.lib.testutil.Scratch; -import com.google.devtools.build.lib.vfs.Path; -import com.google.devtools.build.lib.vfs.PathFragment; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - -/** - * Unit test for {@link J2ObjcSource}. - */ -@RunWith(JUnit4.class) -public class J2ObjcSourceTest { - private ArtifactRoot rootDir; - - @Before - public final void setRootDir() throws Exception { - Scratch scratch = new Scratch(); - Path execRoot = scratch.getFileSystem().getPath("/exec"); - String outSegment = "root"; - execRoot.getChild(outSegment).createDirectoryAndParents(); - rootDir = ArtifactRoot.asDerivedRoot(execRoot, RootType.Output, outSegment); - } - - @Test - public void testEqualsAndHashCode() throws Exception { - new EqualsTester() - .addEqualityGroup( - getJ2ObjcSource("//a/b:c", "sourceA", J2ObjcSource.SourceType.JAVA, false), - getJ2ObjcSource("//a/b:c", "sourceA", J2ObjcSource.SourceType.JAVA, false)) - .addEqualityGroup( - getJ2ObjcSource("//a/b:d", "sourceA", J2ObjcSource.SourceType.JAVA, false), - getJ2ObjcSource("//a/b:d", "sourceA", J2ObjcSource.SourceType.JAVA, false)) - .addEqualityGroup( - getJ2ObjcSource("//a/b:d", "sourceC", J2ObjcSource.SourceType.JAVA, false), - getJ2ObjcSource("//a/b:d", "sourceC", J2ObjcSource.SourceType.JAVA, false)) - .addEqualityGroup( - getJ2ObjcSource("//a/b:d", "sourceC", J2ObjcSource.SourceType.PROTO, false), - getJ2ObjcSource("//a/b:d", "sourceC", J2ObjcSource.SourceType.PROTO, false)) - .addEqualityGroup( - getJ2ObjcSource("//a/b:c", "sourceA", J2ObjcSource.SourceType.JAVA, true), - getJ2ObjcSource("//a/b:c", "sourceA", J2ObjcSource.SourceType.JAVA, true)) - .addEqualityGroup( - getJ2ObjcSource("//a/b:d", "sourceA", J2ObjcSource.SourceType.JAVA, true), - getJ2ObjcSource("//a/b:d", "sourceA", J2ObjcSource.SourceType.JAVA, true)) - .addEqualityGroup( - getJ2ObjcSource("//a/b:d", "sourceC", J2ObjcSource.SourceType.JAVA, true), - getJ2ObjcSource("//a/b:d", "sourceC", J2ObjcSource.SourceType.JAVA, true)) - .addEqualityGroup( - getJ2ObjcSource("//a/b:d", "sourceC", J2ObjcSource.SourceType.PROTO, true), - getJ2ObjcSource("//a/b:d", "sourceC", J2ObjcSource.SourceType.PROTO, true)) - .testEquals(); - } - - private J2ObjcSource getJ2ObjcSource( - String label, String fileName, J2ObjcSource.SourceType sourceType, boolean compileWithARC) - throws Exception { - Label ruleLabel = Label.parseCanonical(label); - PathFragment path = ruleLabel.toPathFragment(); - return new J2ObjcSource( - ruleLabel, - ImmutableList.of(getArtifactForTest(path.getRelative(fileName + ".m").toString())), - ImmutableList.of(getArtifactForTest(path.getRelative(fileName + ".h").toString())), - path, - sourceType, - ImmutableList.of(path), - compileWithARC); - } - - private Artifact getArtifactForTest(String path) throws Exception { - return ActionsTestUtil.createArtifact(rootDir, path); - } -} From 33a689c5c15dea0df7150c8440271bb1fa551d27 Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 07:57:25 -0700 Subject: [PATCH 49/68] Consider max transitive source version in `FileSystemValueCheckerInferringAncestors` logic. The mtsv for nodes in incremental builds will always be null, so this is a no-op change. Separate out `FileSystemValueCheckerInferringAncestorsTestBase` for `FileSystemValueCheckerInferringAncestorsTest`. Add helper function `SkyframeExecutor#getSkyValueDirtinessCheckerForFiles` PiperOrigin-RevId: 544356414 Change-Id: Ia2beafe2b88cfddef9de2ae64ae6e7f331714a10 --- ...eSystemValueCheckerInferringAncestors.java | 108 ++++++++--- .../skyframe/SkyValueDirtinessChecker.java | 9 + .../build/lib/skyframe/SkyframeExecutor.java | 10 +- ...temValueCheckerInferringAncestorsTest.java | 172 +++++------------- ...alueCheckerInferringAncestorsTestBase.java | 144 +++++++++++++++ .../LocalDiffAwarenessIntegrationTest.java | 2 +- 6 files changed, 288 insertions(+), 157 deletions(-) create mode 100644 src/test/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestorsTestBase.java diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestors.java b/src/main/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestors.java index d78c598012585f..14f3571ff33788 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestors.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestors.java @@ -16,6 +16,8 @@ import static com.google.common.collect.ImmutableList.toImmutableList; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Futures; @@ -37,6 +39,7 @@ import com.google.devtools.build.skyframe.InMemoryGraph; import com.google.devtools.build.skyframe.InMemoryNodeEntry; import com.google.devtools.build.skyframe.SkyKey; +import com.google.devtools.build.skyframe.Version; import java.io.IOException; import java.util.Collections; import java.util.HashMap; @@ -63,12 +66,13 @@ * affected ancestor entries of nodes. It is also resilient to diffs which report only a root of * deleted subtree. */ -final class FileSystemValueCheckerInferringAncestors { +public final class FileSystemValueCheckerInferringAncestors { @Nullable private final TimestampGranularityMonitor tsgm; - private final InMemoryGraph inMemoryGraph; private final Map nodeStates; private final SyscallCache syscallCache; + private final SkyValueDirtinessChecker skyValueDirtinessChecker; + private final Set valuesToInvalidate = Sets.newConcurrentHashSet(); private final ConcurrentMap valuesToInject = new ConcurrentHashMap<>(); @@ -118,21 +122,37 @@ private FileSystemValueCheckerInferringAncestors( @Nullable TimestampGranularityMonitor tsgm, InMemoryGraph inMemoryGraph, Map nodeStates, - SyscallCache syscallCache) { + SyscallCache syscallCache, + SkyValueDirtinessChecker skyValueDirtinessChecker) { this.tsgm = tsgm; this.nodeStates = nodeStates; this.syscallCache = syscallCache; + this.skyValueDirtinessChecker = skyValueDirtinessChecker; this.inMemoryGraph = inMemoryGraph; } + @VisibleForTesting @SuppressWarnings("ReferenceEquality") - static ImmutableDiff getDiffWithInferredAncestors( + public static ImmutableDiff getDiffWithInferredAncestors( @Nullable TimestampGranularityMonitor tsgm, InMemoryGraph inMemoryGraph, Iterable modifiedKeys, int nThreads, - SyscallCache syscallCache) + SyscallCache syscallCache, + SkyValueDirtinessChecker skyValueDirtinessChecker) throws InterruptedException, AbruptExitException { + Map nodeStates = makeNodeVisitStates(modifiedKeys); + return new FileSystemValueCheckerInferringAncestors( + tsgm, + inMemoryGraph, + Collections.unmodifiableMap(nodeStates), + syscallCache, + skyValueDirtinessChecker) + .processEntries(nThreads); + } + + private static Map makeNodeVisitStates( + Iterable modifiedKeys) { Map nodeStates = new HashMap<>(); for (FileStateKey fileStateKey : modifiedKeys) { RootedPath top = fileStateKey.argument(); @@ -159,10 +179,7 @@ static ImmutableDiff getDiffWithInferredAncestors( lastCreated = existingState == null; } } - - return new FileSystemValueCheckerInferringAncestors( - tsgm, inMemoryGraph, Collections.unmodifiableMap(nodeStates), syscallCache) - .processEntries(nThreads); + return nodeStates; } private ImmutableDiff processEntries(int nThreads) @@ -248,8 +265,8 @@ private boolean visitEntry( throws StatFailedException { FileStateKey key = FileStateValue.key(path); @Nullable InMemoryNodeEntry fsvNode = inMemoryGraph.getIfPresent(key); - @Nullable FileStateValue fsv = fsvNode != null ? (FileStateValue) fsvNode.toValue() : null; - if (fsv == null) { + @Nullable FileStateValue oldFsv = fsvNode != null ? (FileStateValue) fsvNode.toValue() : null; + if (oldFsv == null) { visitUnknownEntry(key, isInferredDirectory, parentState); parentState.addMaybeDeletedChild(path.getRootRelativePath().getBaseName()); return true; @@ -259,33 +276,58 @@ private boolean visitEntry( || (maybeDeletedChildren != null && listingHasEntriesOutsideOf(path, maybeDeletedChildren))) { parentState.markInferredDirectory(); - if (fsv.getType().isDirectory()) { + if (oldFsv.getType().isDirectory()) { return false; } + // TODO(b/287632270) - handle this scenario valuesToInject.put(key, Delta.justNew(FileStateValue.DIRECTORY_FILE_STATE_NODE)); parentListingKey(path).ifPresent(valuesToInvalidate::add); return true; } - FileStateValue newFsv = getNewFileStateValueFromFileSystem(path); - if (!newFsv.equals(fsv)) { - valuesToInject.put(key, Delta.justNew(newFsv)); - } - + @Nullable FileStateValue newFsv = injectAndGetNewFileStateValueIfDirty(path, fsvNode, oldFsv); if (newFsv.getType().exists()) { parentState.markInferredDirectory(); - } else if (fsv.getType().exists()) { + } else if (oldFsv.getType().exists()) { // exists -> not exists -- deletion. parentState.addMaybeDeletedChild(path.getRootRelativePath().getBaseName()); } - boolean typeChanged = newFsv.getType() != fsv.getType(); + boolean typeChanged = newFsv.getType() != oldFsv.getType(); if (typeChanged) { parentListingKey(path).ifPresent(valuesToInvalidate::add); } return typeChanged; } + /** + * Injects the new file state value if dirty. Returns the old file state value if not dirty and + * the new file state value if dirty. + */ + private FileStateValue injectAndGetNewFileStateValueIfDirty( + RootedPath path, InMemoryNodeEntry oldFsvNode, FileStateValue oldFsv) + throws StatFailedException { + Preconditions.checkState(oldFsv != null, "Unexpected null FileStateValue."); + @Nullable Version oldMtsv = oldFsvNode.getMaxTransitiveSourceVersion(); + SkyValueDirtinessChecker.DirtyResult dirtyResult = + skyValueDirtinessChecker.check(oldFsvNode.getKey(), oldFsv, oldMtsv, syscallCache, tsgm); + if (!dirtyResult.isDirty()) { + return oldFsv; + } + @Nullable FileStateValue newFsv = (FileStateValue) dirtyResult.getNewValue(); + if (newFsv == null) { + throw new StatFailedException(path, new IOException("Filesystem access failed.")); + } + @Nullable Version newMtsv = dirtyResult.getNewMaxTransitiveSourceVersion(); + if (newMtsv == null && !skyValueDirtinessChecker.nullMaxTransitiveSourceVersionOk()) { + // TODO(b/287632270) - add test coverage for unexpected null mtsv's + throw new StatFailedException(path, new IOException("Filesystem access failed.")); + } + + valuesToInject.put(oldFsvNode.getKey(), Delta.justNew(newFsv, newMtsv)); + return newFsv; + } + private void visitUnknownEntry( FileStateKey key, boolean isInferredDirectory, NodeVisitState parentState) throws StatFailedException { @@ -313,9 +355,9 @@ private void visitUnknownEntry( // We don't take advantage of isInferredDirectory because we set it only in cases of a present // descendant/done listing which normally cannot exist without having FileStateValue for // ancestors. - FileStateValue value = getNewFileStateValueFromFileSystem(path); - valuesToInject.put(key, Delta.justNew(value)); - if (isInferredDirectory || value.getType().exists()) { + @Nullable FileStateValue newValue = injectAndGetNewFileStateValueForUnknownEntry(path, key); + + if (isInferredDirectory || newValue.getType().exists()) { parentState.markInferredDirectory(); } @@ -323,18 +365,28 @@ private void visitUnknownEntry( Dirent dirent = parentListing.getDirents().maybeGetDirent(path.getRootRelativePath().getBaseName()); @Nullable Dirent.Type typeInListing = dirent != null ? dirent.getType() : null; - if (!Objects.equals(typeInListing, direntTypeFromFileStateType(value.getType()))) { + if (!Objects.equals(typeInListing, direntTypeFromFileStateType(newValue.getType()))) { valuesToInvalidate.add(parentListingKey.get()); } } - private FileStateValue getNewFileStateValueFromFileSystem(RootedPath path) + /** Injects the new file state value for unknown entry. */ + private FileStateValue injectAndGetNewFileStateValueForUnknownEntry(RootedPath path, SkyKey key) throws StatFailedException { - try { - return FileStateValue.create(path, syscallCache, tsgm); - } catch (IOException e) { - throw new StatFailedException(path, e); + @Nullable + FileStateValue newValue = + (FileStateValue) skyValueDirtinessChecker.createNewValue(path, syscallCache, tsgm); + if (newValue == null) { + throw new StatFailedException(path, new IOException("Filesystem access failed.")); + } + Version newMtsv = + skyValueDirtinessChecker.getMaxTransitiveSourceVersionForNewValue(key, newValue); + if (newMtsv == null && !skyValueDirtinessChecker.nullMaxTransitiveSourceVersionOk()) { + // TODO(b/287632270) - add test coverage for unexpected null mtsv's + throw new StatFailedException(path, new IOException("Filesystem access failed.")); } + valuesToInject.put(key, Delta.justNew(newValue, newMtsv)); + return newValue; } private boolean listingHasEntriesOutsideOf(RootedPath path, Set allAffectedEntries) { diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/SkyValueDirtinessChecker.java b/src/main/java/com/google/devtools/build/lib/skyframe/SkyValueDirtinessChecker.java index 2bea163a8844c4..5451bce1de6b6e 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/SkyValueDirtinessChecker.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/SkyValueDirtinessChecker.java @@ -51,6 +51,15 @@ public Version getMaxTransitiveSourceVersionForNewValue(SkyKey key, SkyValue val return null; } + /** + * Returns whether it is ok for this {@link SkyValueDirtinessChecker} to return a null max + * transitive source version. If this method returns false, a null mtsv would indicate an {@link + * java.io.IOException} was thrown. + */ + public boolean nullMaxTransitiveSourceVersionOk() { + return true; + } + /** * If {@code applies(key)}, returns the result of checking whether this key's value is up to date. */ diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java b/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java index 8d15d1a95a43bb..f478a211510dda 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java @@ -186,6 +186,7 @@ import com.google.devtools.build.lib.skyframe.BuildDriverFunction.TransitiveActionLookupValuesHelper; import com.google.devtools.build.lib.skyframe.DiffAwarenessManager.ProcessableModifiedFileSet; import com.google.devtools.build.lib.skyframe.DirtinessCheckerUtils.ExternalDirtinessChecker; +import com.google.devtools.build.lib.skyframe.DirtinessCheckerUtils.FileDirtinessChecker; import com.google.devtools.build.lib.skyframe.DirtinessCheckerUtils.MissingDiffDirtinessChecker; import com.google.devtools.build.lib.skyframe.DirtinessCheckerUtils.UnionDirtinessChecker; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; @@ -1375,7 +1376,14 @@ protected Differencer.Diff getDiff( memoizingEvaluator.getInMemoryGraph(), dirtyFileStateSkyKeys, fsvcThreads, - syscallCache); + syscallCache, + getSkyValueDirtinessCheckerForFiles()); + } + + /** Returns the {@link SkyValueDirtinessChecker} relevant for files. */ + @ForOverride + protected SkyValueDirtinessChecker getSkyValueDirtinessCheckerForFiles() { + return new FileDirtinessChecker(); } /** diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestorsTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestorsTest.java index 6dad42c2f80117..138dfa8acba290 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestorsTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestorsTest.java @@ -22,101 +22,41 @@ import static com.google.devtools.build.lib.testing.common.DirectoryListingHelper.file; import static com.google.devtools.build.lib.testing.common.DirectoryListingHelper.symlink; import static org.junit.Assert.assertThrows; -import static org.junit.Assert.fail; -import com.google.common.base.Throwables; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import com.google.common.collect.Streams; import com.google.devtools.build.lib.actions.FileStateValue; import com.google.devtools.build.lib.server.FailureDetails.DiffAwareness.Code; -import com.google.devtools.build.lib.testutil.Scratch; +import com.google.devtools.build.lib.skyframe.DirtinessCheckerUtils.FileDirtinessChecker; import com.google.devtools.build.lib.util.AbruptExitException; -import com.google.devtools.build.lib.vfs.DelegateFileSystem; -import com.google.devtools.build.lib.vfs.Dirent; import com.google.devtools.build.lib.vfs.FileStateKey; -import com.google.devtools.build.lib.vfs.FileStatus; -import com.google.devtools.build.lib.vfs.FileSystem; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; -import com.google.devtools.build.lib.vfs.Root; -import com.google.devtools.build.lib.vfs.RootedPath; -import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.Differencer.DiffWithDelta.Delta; import com.google.devtools.build.skyframe.ImmutableDiff; -import com.google.devtools.build.skyframe.InMemoryGraph; -import com.google.devtools.build.skyframe.InMemoryNodeEntry; -import com.google.devtools.build.skyframe.NodeBatch; -import com.google.devtools.build.skyframe.NodeEntry.DirtyType; -import com.google.devtools.build.skyframe.QueryableGraph.Reason; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; -import com.google.devtools.build.skyframe.Version; -import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.testing.junit.testparameterinjector.TestParameter; import com.google.testing.junit.testparameterinjector.TestParameterInjector; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map.Entry; -import javax.annotation.Nullable; -import org.junit.After; -import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; /** Unit tests for {@link FileSystemValueCheckerInferringAncestors}. */ @RunWith(TestParameterInjector.class) -public final class FileSystemValueCheckerInferringAncestorsTest { +public final class FileSystemValueCheckerInferringAncestorsTest + extends FileSystemValueCheckerInferringAncestorsTestBase { private static final Delta DIRECTORY_FILE_STATE_NODE_DELTA = Delta.justNew(DIRECTORY_FILE_STATE_NODE); private static final Delta NONEXISTENT_FILE_STATE_NODE_DELTA = Delta.justNew(NONEXISTENT_FILE_STATE_NODE); - private final Scratch scratch = new Scratch(); - private final List statedPaths = new ArrayList<>(); - private final DefaultSyscallCache syscallCache = DefaultSyscallCache.newBuilder().build(); - private Root root; - private Root untrackedRoot; - private Exception throwOnStat; - - private final InMemoryGraph inMemoryGraph = InMemoryGraph.create(); + private final SkyValueDirtinessChecker skyValueDirtinessChecker = new FileDirtinessChecker(); @TestParameter({"1", "16"}) private int fsvcThreads; - @Before - public void createRoot() throws IOException { - Path srcRootPath = scratch.dir("/src"); - PathFragment srcRoot = srcRootPath.asFragment(); - FileSystem trackingFileSystem = - new DelegateFileSystem(scratch.getFileSystem()) { - @Nullable - @Override - public synchronized FileStatus statIfFound(PathFragment path, boolean followSymlinks) - throws IOException { - if (throwOnStat != null) { - Exception toThrow = throwOnStat; - throwOnStat = null; - Throwables.propagateIfPossible(toThrow, IOException.class); - fail("Unexpected exception type"); - } - statedPaths.add(path.relativeTo(srcRoot).toString()); - return super.statIfFound(path, followSymlinks); - } - }; - root = Root.fromPath(trackingFileSystem.getPath(srcRoot)); - scratch.setWorkingDir("/src"); - untrackedRoot = Root.fromPath(srcRootPath); - } - - @After - public void checkExceptionThrown() { - assertThat(throwOnStat).isNull(); - syscallCache.clear(); - } - @Test public void getDiffWithInferredAncestors_unknownFileChanged_returnsFileAndDirs() throws Exception { @@ -126,7 +66,8 @@ public void getDiffWithInferredAncestors_unknownFileChanged_returnsFileAndDirs() inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(fileStateValueKey("foo/file")), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); assertThat(diff.changedKeysWithoutNewValues()) .containsExactly( @@ -153,7 +94,8 @@ public void getDiffWithInferredAncestors_fileModified_returnsFileWithValues() th inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(key), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); Delta newValue = fileStateValueDelta("file"); assertThat(diff.changedKeysWithNewValues()).containsExactly(key, newValue); @@ -175,7 +117,8 @@ public void getDiffWithInferredAncestors_fileAdded_returnsFileAndDirListing() th inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(key), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); Delta delta = fileStateValueDelta("file"); assertThat(diff.changedKeysWithNewValues()).containsExactly(key, delta); @@ -206,7 +149,8 @@ public void getDiffWithInferredAncestors_fileWithDirsAdded_returnsFileAndInjects inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(fileKey), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); Delta delta = fileStateValueDelta("a/b/file"); assertThat(diff.changedKeysWithNewValues()) @@ -247,7 +191,8 @@ public void getDiffWithInferredAncestors_addedFileWithReportedDirs_returnsFileAn inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(fileKey, fileStateValueKey("a")), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); Delta newState = fileStateValueDelta("a/b/file"); assertThat(diff.changedKeysWithNewValues()) @@ -287,7 +232,8 @@ public void getDiffWithInferredAncestors_fileWithUnknownDirsAdded_returnsFileAnd inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(fileStateValueKey("a/b/c/d")), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); assertThat(diff.changedKeysWithoutNewValues()) .containsExactly( @@ -319,7 +265,8 @@ public void getDiffWithInferredAncestors_addEmptyDir_returnsDirAndParentListing( inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(key), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); assertThat(diff.changedKeysWithNewValues()).containsExactly(key, delta); assertThat(diff.changedKeysWithoutNewValues()) @@ -343,7 +290,8 @@ public void getDiffWithInferredAncestors_deleteFile_returnsFileParentListing() t inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(key), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); assertThat(diff.changedKeysWithNewValues()) .containsExactly(key, NONEXISTENT_FILE_STATE_NODE_DELTA); @@ -372,7 +320,8 @@ public void getDiffWithInferredAncestors_deleteFileFromDirWithListing_skipsDirSt inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(key), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); assertThat(diff.changedKeysWithNewValues()) .containsExactly(key, NONEXISTENT_FILE_STATE_NODE_DELTA); @@ -403,7 +352,8 @@ public void getDiffWithInferredAncestors_deleteLastFileFromDir_ignoresInvalidate inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(key), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); assertThat(diff.changedKeysWithNewValues()) .containsExactly(key, NONEXISTENT_FILE_STATE_NODE_DELTA); @@ -434,7 +384,8 @@ public void getDiffWithInferredAncestors_modifyAllUnknownEntriesInDirWithListing inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(fileKey, symlinkKey), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); assertThat(diff.changedKeysWithNewValues()) .containsExactly( @@ -464,7 +415,8 @@ public void getDiffWithInferredAncestors_replaceUnknownEntriesInDirWithListing_s inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(file1Key, file2Key), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); assertIsSubsetOf( diff.changedKeysWithNewValues().entrySet(), @@ -519,7 +471,8 @@ public void getDiffWithInferredAncestors_deleteAllFilesFromDir_returnsFilesAndDi inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(key1, key2, key3), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); assertThat(diff.changedKeysWithNewValues()) .containsExactly( @@ -559,7 +512,8 @@ public void getDiffWithInferredAncestors_deleteFileWithDirs_returnsFileAndDirs() inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(abcFileKey), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); assertThat(diff.changedKeysWithNewValues()) .containsExactly( @@ -602,7 +556,8 @@ public void getDiffWithInferredAncestors_deleteFileWithReportedDirs_returnsFileA inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(abcFileKey, abKey), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); assertThat(diff.changedKeysWithNewValues()) .containsExactly( @@ -643,7 +598,8 @@ public void getDiffWithInferredAncestors_deleteFile_infersDirFromModifiedSibling inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(file1Key, file2Key, fileStateValueKey("dir")), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); Delta file2NewValue = fileStateValueDelta("dir/file2"); assertThat(diff.changedKeysWithNewValues()) @@ -681,7 +637,8 @@ public void getDiffWithInferredAncestors_deleteDirReportDirOnly_returnsDir() thr inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(dirKey), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); assertThat(diff.changedKeysWithNewValues()) .containsExactly(dirKey, NONEXISTENT_FILE_STATE_NODE_DELTA); @@ -702,7 +659,8 @@ public void getDiffWithInferredAncestors_phantomChangeForNonexistentEntry_return inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(fileStateValueKey("file")), fsvcThreads, - syscallCache); + syscallCache, + skyValueDirtinessChecker); assertThat(diff.changedKeysWithoutNewValues()).isEmpty(); assertThat(diff.changedKeysWithNewValues()).isEmpty(); @@ -724,7 +682,8 @@ public void getDiffWithInferredAncestors_statFails_fails() throws Exception { inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(fileStateValueKey("file")), fsvcThreads, - syscallCache)); + syscallCache, + skyValueDirtinessChecker)); assertThat(e.getDetailedExitCode().getFailureDetail().hasDiffAwareness()).isTrue(); assertThat(e.getDetailedExitCode().getFailureDetail().getDiffAwareness().getCode()) @@ -746,7 +705,8 @@ public void getDiffWithInferredAncestors_statCrashes_fails() throws Exception { inMemoryGraph, /* modifiedKeys= */ ImmutableSet.of(fileStateValueKey("file")), fsvcThreads, - syscallCache)); + syscallCache, + skyValueDirtinessChecker)); } private static void assertIsSubsetOf(Iterable list, T... elements) { @@ -756,53 +716,11 @@ private static void assertIsSubsetOf(Iterable list, T... elements) { .containsAtLeastElementsIn(list); } - private FileStateKey fileStateValueKey(String relativePath) { - return FileStateValue.key( - RootedPath.toRootedPath(root, root.asPath().getRelative(relativePath))); - } - - private DirectoryListingStateValue.Key directoryListingStateValueKey(String relativePath) { - return DirectoryListingStateValue.key( - RootedPath.toRootedPath(root, root.asPath().getRelative(relativePath))); - } - - private static DirectoryListingStateValue directoryListingStateValue(Dirent... dirents) { - return DirectoryListingStateValue.create(ImmutableList.copyOf(dirents)); - } - - private FileStateValue fileStateValue(String relativePath) throws IOException { - return FileStateValue.create( - RootedPath.toRootedPath( - untrackedRoot, untrackedRoot.asPath().asFragment().getRelative(relativePath)), - SyscallCache.NO_CACHE, - /*tsgm=*/ null); - } - private Delta fileStateValueDelta(String relativePath) throws IOException { return Delta.justNew(fileStateValue(relativePath)); } - private void addDoneNodesAndThenMarkChanged(ImmutableMap values) - throws InterruptedException { - for (Entry entry : values.entrySet()) { - InMemoryNodeEntry node = addDoneNode(entry.getKey(), entry.getValue()); - node.markDirty(DirtyType.CHANGE); - } - } - private void addDoneNodes(ImmutableMap values) throws InterruptedException { - for (Entry entry : values.entrySet()) { - addDoneNode(entry.getKey(), entry.getValue()); - } - } - - @CanIgnoreReturnValue - private InMemoryNodeEntry addDoneNode(SkyKey key, SkyValue value) throws InterruptedException { - NodeBatch batch = inMemoryGraph.createIfAbsentBatch(null, Reason.OTHER, ImmutableList.of(key)); - InMemoryNodeEntry entry = (InMemoryNodeEntry) batch.get(key); - entry.addReverseDepAndCheckIfDone(null); - entry.markRebuilding(); - entry.setValue(value, Version.minimal(), null); - return entry; + addDoneNodes(values, /* mtsv= */ null); } } diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestorsTestBase.java b/src/test/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestorsTestBase.java new file mode 100644 index 00000000000000..aa871af188344f --- /dev/null +++ b/src/test/java/com/google/devtools/build/lib/skyframe/FileSystemValueCheckerInferringAncestorsTestBase.java @@ -0,0 +1,144 @@ +// Copyright 2023 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package com.google.devtools.build.lib.skyframe; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.fail; + +import com.google.common.base.Throwables; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.devtools.build.lib.actions.FileStateValue; +import com.google.devtools.build.lib.testutil.Scratch; +import com.google.devtools.build.lib.vfs.DelegateFileSystem; +import com.google.devtools.build.lib.vfs.Dirent; +import com.google.devtools.build.lib.vfs.FileStateKey; +import com.google.devtools.build.lib.vfs.FileStatus; +import com.google.devtools.build.lib.vfs.FileSystem; +import com.google.devtools.build.lib.vfs.Path; +import com.google.devtools.build.lib.vfs.PathFragment; +import com.google.devtools.build.lib.vfs.Root; +import com.google.devtools.build.lib.vfs.RootedPath; +import com.google.devtools.build.lib.vfs.SyscallCache; +import com.google.devtools.build.skyframe.InMemoryGraph; +import com.google.devtools.build.skyframe.InMemoryNodeEntry; +import com.google.devtools.build.skyframe.NodeBatch; +import com.google.devtools.build.skyframe.NodeEntry.DirtyType; +import com.google.devtools.build.skyframe.QueryableGraph.Reason; +import com.google.devtools.build.skyframe.SkyKey; +import com.google.devtools.build.skyframe.SkyValue; +import com.google.devtools.build.skyframe.Version; +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map.Entry; +import javax.annotation.Nullable; +import org.junit.After; +import org.junit.Before; + +public class FileSystemValueCheckerInferringAncestorsTestBase { + protected final Scratch scratch = new Scratch(); + protected final List statedPaths = new ArrayList<>(); + protected DefaultSyscallCache syscallCache = DefaultSyscallCache.newBuilder().build(); + protected Root root; + protected final InMemoryGraph inMemoryGraph = InMemoryGraph.create(); + + private Root untrackedRoot; + Exception throwOnStat; + + @Before + public void createRoot() throws IOException { + Path srcRootPath = scratch.dir("/src"); + PathFragment srcRoot = srcRootPath.asFragment(); + FileSystem trackingFileSystem = + new DelegateFileSystem(scratch.getFileSystem()) { + @Nullable + @Override + public synchronized FileStatus statIfFound(PathFragment path, boolean followSymlinks) + throws IOException { + if (throwOnStat != null) { + Exception toThrow = throwOnStat; + throwOnStat = null; + Throwables.propagateIfPossible(toThrow, IOException.class); + fail("Unexpected exception type"); + } + statedPaths.add(path.relativeTo(srcRoot).toString()); + return super.statIfFound(path, followSymlinks); + } + }; + root = Root.fromPath(trackingFileSystem.getPath(srcRoot)); + scratch.setWorkingDir("/src"); + untrackedRoot = Root.fromPath(srcRootPath); + } + + @After + public void checkExceptionThrown() { + assertThat(throwOnStat).isNull(); + syscallCache.clear(); + } + + protected FileStateKey fileStateValueKey(String relativePath) { + return FileStateValue.key( + RootedPath.toRootedPath(root, root.asPath().getRelative(relativePath))); + } + + protected DirectoryListingStateValue.Key directoryListingStateValueKey(String relativePath) { + return DirectoryListingStateValue.key( + RootedPath.toRootedPath(root, root.asPath().getRelative(relativePath))); + } + + protected FileStateValue fileStateValue(String relativePath) throws IOException { + return FileStateValue.create( + RootedPath.toRootedPath( + untrackedRoot, untrackedRoot.asPath().asFragment().getRelative(relativePath)), + SyscallCache.NO_CACHE, + /* tsgm= */ null); + } + + protected static DirectoryListingStateValue directoryListingStateValue(Dirent... dirents) { + return DirectoryListingStateValue.create(ImmutableList.copyOf(dirents)); + } + + protected void addDoneNodesAndThenMarkChanged(ImmutableMap values) + throws InterruptedException { + addDoneNodesAndThenMarkChanged(values, /* mtsv= */ null); + } + + protected void addDoneNodesAndThenMarkChanged( + ImmutableMap values, @Nullable Version mtsv) throws InterruptedException { + for (Entry entry : values.entrySet()) { + InMemoryNodeEntry node = addDoneNode(entry.getKey(), entry.getValue(), mtsv); + node.markDirty(DirtyType.CHANGE); + } + } + + protected void addDoneNodes(ImmutableMap values, @Nullable Version mtsv) + throws InterruptedException { + for (Entry entry : values.entrySet()) { + addDoneNode(entry.getKey(), entry.getValue(), mtsv); + } + } + + @CanIgnoreReturnValue + private InMemoryNodeEntry addDoneNode(SkyKey key, SkyValue value, @Nullable Version mtsv) + throws InterruptedException { + NodeBatch batch = inMemoryGraph.createIfAbsentBatch(null, Reason.OTHER, ImmutableList.of(key)); + InMemoryNodeEntry entry = (InMemoryNodeEntry) batch.get(key); + entry.addReverseDepAndCheckIfDone(null); + entry.markRebuilding(); + entry.setValue(value, Version.minimal(), /* maxTransitiveSourceVersion= */ mtsv); + return entry; + } +} diff --git a/src/test/java/com/google/devtools/build/lib/skyframe/LocalDiffAwarenessIntegrationTest.java b/src/test/java/com/google/devtools/build/lib/skyframe/LocalDiffAwarenessIntegrationTest.java index e1ba1f8cd3c0e3..72e93e2485d1ce 100644 --- a/src/test/java/com/google/devtools/build/lib/skyframe/LocalDiffAwarenessIntegrationTest.java +++ b/src/test/java/com/google/devtools/build/lib/skyframe/LocalDiffAwarenessIntegrationTest.java @@ -132,7 +132,7 @@ public void changedFile_statFails_throwsError() throws Exception { AbruptExitException.class, () -> buildTargetWithRetryUntilSeesChange("//foo", "foo/BUILD")); - assertThat(e).hasCauseThat().hasCauseThat().hasCauseThat().isSameInstanceAs(injectedException); + assertThat(e).hasCauseThat().hasCauseThat().hasCauseThat().isInstanceOf(IOException.class); } /** From 22d81a2ae2cf7bb3561450917bee062142918af0 Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 08:34:44 -0700 Subject: [PATCH 50/68] Fix bazel_rules_java_test.sh with non-release version of rules_java When we pin rules_java to a source archive generated by GitHub, the dirs are different, so only build @rules_java//java/... Related: https://github.com/bazelbuild/bazel/pull/18810 PiperOrigin-RevId: 544365273 Change-Id: Id11e8d2217f75f23d77dcd368675934dde965026 --- src/test/shell/bazel/bazel_rules_java_test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/shell/bazel/bazel_rules_java_test.sh b/src/test/shell/bazel/bazel_rules_java_test.sh index 0e09cafacc30a0..55d1d21a1515fa 100755 --- a/src/test/shell/bazel/bazel_rules_java_test.sh +++ b/src/test/shell/bazel/bazel_rules_java_test.sh @@ -94,7 +94,7 @@ function test_rules_java_repository_builds_itself() { setup_skylib_support # We test that a built-in @rules_java repository is buildable. - bazel build -- @rules_java//... -@rules_java//toolchains/... &> $TEST_log \ + bazel build -- @rules_java//java/... &> $TEST_log \ || fail "Build failed unexpectedly" } From f00439d8b7dd5b10ac357c413928418e0e1059c7 Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 08:39:42 -0700 Subject: [PATCH 51/68] On Unix, encode spawned process argv as utf-8 if required by sun.jnu.encoding Starting with JDK 19, on Unix platforms, the argument vector passed to ProcessBuilder gets encoded to bytes by java.lang.ProcessImpl using the sun.jnu.encoding encoding. This causes a problem on macOS, where * we switched to JDK 20 as of https://github.com/bazelbuild/bazel/commit/ecf9b9a82aa231cd1fa9178dd9ab64dd67adaf25 * (as on all platforms) argument strings originating from Starlark are in Bazel's pseudo latin1 encoding (i.e. byte arrays stored as strings) * sun.jnu.encoding is hard-coded as utf-8 by the JVM, regardless of what we set for the file.encoding property. This means we need to recode argv from pseudo latin1 to utf-8 before passing them to ProcessBuilder, so that ProcessImpl can reverse the process and pass correctly encoded byte arrays to the OS. Partially addresses #18792 PiperOrigin-RevId: 544366528 Change-Id: I1acb70e489123e0baa190c569e6625259c39de78 --- .../com/google/devtools/build/lib/shell/BUILD | 2 + .../lib/shell/JavaSubprocessFactory.java | 15 +++- src/test/shell/integration/unicode_test.bzl | 53 ++++++++++++++ src/test/shell/integration/unicode_test.sh | 71 +++++++++++++++++++ src/test/shell/integration/unicode_test_BUILD | 33 +++++++++ .../integration/unicode_test_expected.txt | 1 + 6 files changed, 174 insertions(+), 1 deletion(-) create mode 100644 src/test/shell/integration/unicode_test.bzl create mode 100755 src/test/shell/integration/unicode_test.sh create mode 100644 src/test/shell/integration/unicode_test_BUILD create mode 100644 src/test/shell/integration/unicode_test_expected.txt diff --git a/src/main/java/com/google/devtools/build/lib/shell/BUILD b/src/main/java/com/google/devtools/build/lib/shell/BUILD index 6ea795813746e9..d48b9ccbae4f61 100644 --- a/src/main/java/com/google/devtools/build/lib/shell/BUILD +++ b/src/main/java/com/google/devtools/build/lib/shell/BUILD @@ -20,6 +20,7 @@ java_library( "//src/main/java/com/google/devtools/build/lib/jni", "//src/main/java/com/google/devtools/build/lib/util:describable_execution_unit", "//src/main/java/com/google/devtools/build/lib/util:os", + "//src/main/java/com/google/devtools/build/lib/util:string", "//src/main/java/com/google/devtools/build/lib/vfs", "//src/main/java/com/google/devtools/build/lib/vfs:pathfragment", "//src/main/java/com/google/devtools/build/lib/windows:processes", @@ -45,6 +46,7 @@ bootstrap_java_library( "//src/main/java/com/google/devtools/build/lib/util:describable_execution_unit", "//src/main/java/com/google/devtools/build/lib/util:filetype", "//src/main/java/com/google/devtools/build/lib/util:os", + "//src/main/java/com/google/devtools/build/lib/util:string", "//src/main/java/com/google/devtools/build/lib/vfs:pathfragment", "//src/main/java/com/google/devtools/build/lib/windows:processes", "//third_party:auto_value-jars", diff --git a/src/main/java/com/google/devtools/build/lib/shell/JavaSubprocessFactory.java b/src/main/java/com/google/devtools/build/lib/shell/JavaSubprocessFactory.java index 1a45b8a3a275cf..649647c5f2ee24 100644 --- a/src/main/java/com/google/devtools/build/lib/shell/JavaSubprocessFactory.java +++ b/src/main/java/com/google/devtools/build/lib/shell/JavaSubprocessFactory.java @@ -14,12 +14,17 @@ package com.google.devtools.build.lib.shell; +import static com.google.common.collect.ImmutableList.toImmutableList; + +import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.shell.SubprocessBuilder.StreamAction; +import com.google.devtools.build.lib.util.StringUtil; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.ProcessBuilder.Redirect; +import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -150,7 +155,15 @@ private synchronized Process start(ProcessBuilder builder) throws IOException { @Override public Subprocess create(SubprocessBuilder params) throws IOException { ProcessBuilder builder = new ProcessBuilder(); - builder.command(params.getArgv()); + ImmutableList argv = params.getArgv(); + if (Runtime.version().feature() >= 19 + && Objects.equals(System.getProperty("sun.jnu.encoding"), "UTF-8")) { + // On JDK 19 and newer, java.lang.ProcessImpl#start encodes argv using sun.jnu.encoding, so if + // sun.jnu.encoding is set to UTF-8, our argv needs to be UTF-8. (Note that on some platforms, + // for example on macOS, sun.jnu.encoding is hard-coded in the JVM as UTF-8.) + argv = argv.stream().map(StringUtil::decodeBytestringUtf8).collect(toImmutableList()); + } + builder.command(argv); if (params.getEnv() != null) { builder.environment().clear(); builder.environment().putAll(params.getEnv()); diff --git a/src/test/shell/integration/unicode_test.bzl b/src/test/shell/integration/unicode_test.bzl new file mode 100644 index 00000000000000..301a2faf7507c3 --- /dev/null +++ b/src/test/shell/integration/unicode_test.bzl @@ -0,0 +1,53 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Rule implementations exercised in unicode_test""" + +def _run_executable_rule_impl(ctx): + out = ctx.outputs.out + ctx.actions.run( + executable = ctx.executable.executable, + arguments = [out.path] + ctx.attr.extra_arguments, + outputs = [out], + ) + return [DefaultInfo(files = depset([out]))] + +run_executable_rule = rule( + implementation = _run_executable_rule_impl, + doc = "Runs `executable` via ctx.actions.run() with `out` as the first argument and `extra_arguments` as remaining arguments", + attrs = { + "executable": attr.label(allow_single_file = True, executable = True, cfg = "exec"), + "out": attr.output(), + "extra_arguments": attr.string_list(), + }, +) + +def _write_file_rule_impl(ctx): + out = ctx.outputs.out + ctx.actions.write( + output = out, + content = ctx.attr.content, + is_executable = ctx.attr.is_executable, + ) + return [DefaultInfo(files = depset([out]))] + +write_file_rule = rule( + implementation = _write_file_rule_impl, + doc = "Writes `content` to `out` via ctx.actions.write()", + attrs = { + "content": attr.string(), + "out": attr.output(), + "is_executable": attr.bool(), + }, +) diff --git a/src/test/shell/integration/unicode_test.sh b/src/test/shell/integration/unicode_test.sh new file mode 100755 index 00000000000000..4f222ef798e77d --- /dev/null +++ b/src/test/shell/integration/unicode_test.sh @@ -0,0 +1,71 @@ +#!/bin/bash +# +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Test of Bazel's unicode i/o in actions + +# --- begin runfiles.bash initialization v3 --- +# Copy-pasted from the Bazel Bash runfiles library v3. +set -uo pipefail; set +e; f=bazel_tools/tools/bash/runfiles/runfiles.bash +source "${RUNFILES_DIR:-/dev/null}/$f" 2>/dev/null || \ + source "$(grep -sm1 "^$f " "${RUNFILES_MANIFEST_FILE:-/dev/null}" | cut -f2- -d' ')" 2>/dev/null || \ + source "$0.runfiles/$f" 2>/dev/null || \ + source "$(grep -sm1 "^$f " "$0.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \ + source "$(grep -sm1 "^$f " "$0.exe.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \ + { echo>&2 "ERROR: cannot find $f"; exit 1; }; f=; set -e +# --- end runfiles.bash initialization v3 --- + +source "$(rlocation "io_bazel/src/test/shell/integration_test_setup.sh")" \ + || { echo "integration_test_setup.sh not found!" >&2; exit 1; } + +touch WORKSPACE +cp "$(rlocation "io_bazel/src/test/shell/integration/unicode_test_BUILD")" BUILD +cp "$(rlocation "io_bazel/src/test/shell/integration/unicode_test.bzl")" . +cp "$(rlocation "io_bazel/src/test/shell/integration/unicode_test_expected.txt")" . + +function test_unicode_genrule_cmd { + local test_name="genrule_cmd" + bazel build --genrule_strategy=local --spawn_strategy=local \ + --verbose_failures "${test_name}" >& "$TEST_log" \ + || fail "expected build to succeed" + + diff -u "${PRODUCT_NAME}-genfiles/${test_name}.out" \ + unicode_test_expected.txt \ + >>"${TEST_log}" 2>&1 || fail "Output not as expected" +} + +function test_unicode_action_run_argument { + local test_name="action_run_argument" + bazel build --genrule_strategy=local --spawn_strategy=local \ + --verbose_failures "${test_name}" >& "$TEST_log" \ + || fail "expected build to succeed" + + diff -u "${PRODUCT_NAME}-bin/${test_name}.out" \ + unicode_test_expected.txt \ + >>"${TEST_log}" 2>&1 || fail "Output not as expected" +} + +function test_unicode_action_write_content { + local test_name="action_write_content" + bazel build --genrule_strategy=local --spawn_strategy=local \ + --verbose_failures "${test_name}" >& "$TEST_log" \ + || fail "expected build to succeed" + + diff -u "${PRODUCT_NAME}-bin/${test_name}.out" \ + unicode_test_expected.txt \ + >>"${TEST_log}" 2>&1 || fail "Output not as expected" +} + +run_suite "Integration tests for ${PRODUCT_NAME}'s unicode i/o in actions" \ No newline at end of file diff --git a/src/test/shell/integration/unicode_test_BUILD b/src/test/shell/integration/unicode_test_BUILD new file mode 100644 index 00000000000000..21c787dfda614f --- /dev/null +++ b/src/test/shell/integration/unicode_test_BUILD @@ -0,0 +1,33 @@ +# BUILD file for unicode_test +load(":unicode_test.bzl", "run_executable_rule", "write_file_rule") + +# In Russian and Bengali: "Down with mojibake! We want unicode!" +non_ascii_string = "Долой кракозябры! আমরা ইউনিকোড চাই!" + +genrule( + name = "genrule_cmd", + cmd = "echo -n \"%s\" > \"$@\"" % non_ascii_string, + outs = ["genrule_cmd.out"], +) + +write_file_rule( + name = "shell_echo", + content = """#!/bin/bash +outfile=$1; shift +exec echo -n $@ > $outfile""", + out = "shell_echo.sh", + is_executable = True, +) + +run_executable_rule( + name = "action_run_argument", + executable = "shell_echo.sh", + extra_arguments = [non_ascii_string], + out = "action_run_argument.out", +) + +write_file_rule( + name = "action_write_content", + content = non_ascii_string, + out = "action_write_content.out", +) diff --git a/src/test/shell/integration/unicode_test_expected.txt b/src/test/shell/integration/unicode_test_expected.txt new file mode 100644 index 00000000000000..fe5568813c332e --- /dev/null +++ b/src/test/shell/integration/unicode_test_expected.txt @@ -0,0 +1 @@ +Долой кракозябры! আমরা ইউনিকোড চাই! \ No newline at end of file From 19014e7ad004d3b3571b031853b8753c88a9c8ee Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 10:26:25 -0700 Subject: [PATCH 52/68] Give a name to the repo fetching worker thread pool. RELNOTES: None. PiperOrigin-RevId: 544394910 Change-Id: I7fa8178c70bf7af24912cd6c6f04f3ddede3e34b --- .../devtools/build/lib/bazel/BazelRepositoryModule.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/google/devtools/build/lib/bazel/BazelRepositoryModule.java b/src/main/java/com/google/devtools/build/lib/bazel/BazelRepositoryModule.java index 38fede1b7f3aa9..1bbcd0110f90b5 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/BazelRepositoryModule.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/BazelRepositoryModule.java @@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; +import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider; import com.google.devtools.build.lib.analysis.RuleDefinition; @@ -157,7 +158,9 @@ public class BazelRepositoryModule extends BlazeModule { private LockfileMode bazelLockfileMode = LockfileMode.OFF; private List allowedYankedVersions = ImmutableList.of(); private SingleExtensionEvalFunction singleExtensionEvalFunction; - private final ExecutorService repoFetchingWorkerThreadPool = Executors.newFixedThreadPool(100); + private final ExecutorService repoFetchingWorkerThreadPool = + Executors.newFixedThreadPool( + 100, new ThreadFactoryBuilder().setNameFormat("repo-fetching-worker-%d").build()); @Nullable private CredentialModule credentialModule; From 990d97e576d4ec7d0c45f3efa5732171492d50b1 Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 11:25:10 -0700 Subject: [PATCH 53/68] Automated rollback of commit 0bda661e589ded1caad9edd58c9bebc3f647e41d. *** Reason for rollback *** b/289354550 *** Original change description *** Clean up Label Interner flag and relevant unused code PiperOrigin-RevId: 544412858 Change-Id: Ibbdc6e0f0768702be236ab25c5622e226d524d25 --- .../devtools/build/lib/cmdline/Label.java | 12 ++++- .../com/google/devtools/build/skyframe/BUILD | 1 + .../build/skyframe/InMemoryGraphImpl.java | 8 +++- .../skyframe/UsePooledLabelInterningFlag.java | 44 +++++++++++++++++++ .../google/devtools/build/lib/cmdline/BUILD | 1 + .../com/google/devtools/build/skyframe/BUILD | 1 + 6 files changed, 63 insertions(+), 4 deletions(-) create mode 100644 src/main/java/com/google/devtools/build/skyframe/UsePooledLabelInterningFlag.java diff --git a/src/main/java/com/google/devtools/build/lib/cmdline/Label.java b/src/main/java/com/google/devtools/build/lib/cmdline/Label.java index 0b8f66c94580e3..25e432e4b9346d 100644 --- a/src/main/java/com/google/devtools/build/lib/cmdline/Label.java +++ b/src/main/java/com/google/devtools/build/lib/cmdline/Label.java @@ -19,6 +19,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ComparisonChain; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Interner; import com.google.common.util.concurrent.Striped; import com.google.devtools.build.docgen.annot.DocCategory; import com.google.devtools.build.lib.actions.CommandLineItem; @@ -33,6 +34,7 @@ import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; +import com.google.devtools.build.skyframe.UsePooledLabelInterningFlag; import java.util.Arrays; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; @@ -84,10 +86,16 @@ public final class Label implements Comparable

      When this flag is true, {@code LabelInterner} will be applied for all {@code Label}s so that + * they are able to switch between interning the instances between the regular bazel weak interner + * and the static global pool. + * + *

      Applying {@code LabelInterner} can reduce memory overhead of having duplicate {@code Label} + * instances in both weak interner and {@link InMemoryGraphImpl}. + */ +// TODO(b/250641010): This flag is temporary to facilitate a controlled rollout. So it should be +// removed after the new pooled interning for `Label` instances is fully released and stable. +public final class UsePooledLabelInterningFlag { + + private static final boolean USE_POOLED_LABEL_INTERNER = + Objects.equals(System.getProperty("BAZEL_USE_POOLED_LABEL_INTERNER"), "1") + || TestType.isInTest(); + + public static boolean usePooledLabelInterningFlag() { + return USE_POOLED_LABEL_INTERNER; + } + + private UsePooledLabelInterningFlag() {} +} diff --git a/src/test/java/com/google/devtools/build/lib/cmdline/BUILD b/src/test/java/com/google/devtools/build/lib/cmdline/BUILD index 4c67386a6584b6..94ea4e6b45f90b 100644 --- a/src/test/java/com/google/devtools/build/lib/cmdline/BUILD +++ b/src/test/java/com/google/devtools/build/lib/cmdline/BUILD @@ -50,6 +50,7 @@ java_test( java_test( name = "LabelInternerIntegrationTest", srcs = ["LabelInternerIntegrationTest.java"], + jvm_flags = ["-DBAZEL_USE_POOLED_LABEL_INTERNER=1"], deps = [ "//src/main/java/com/google/devtools/build/lib/cmdline", "//src/main/java/com/google/devtools/build/lib/concurrent", diff --git a/src/test/java/com/google/devtools/build/skyframe/BUILD b/src/test/java/com/google/devtools/build/skyframe/BUILD index 56a79a1e1e4772..b508c52cbaf32b 100644 --- a/src/test/java/com/google/devtools/build/skyframe/BUILD +++ b/src/test/java/com/google/devtools/build/skyframe/BUILD @@ -87,6 +87,7 @@ java_library( java_test( name = "SkyframeTests", size = "medium", + jvm_flags = ["-DBAZEL_USE_POOLED_LABEL_INTERNER=1"], shard_count = 2, tags = ["not_run:arm"], test_class = "com.google.devtools.build.skyframe.AllTests", From dac1780085c682f088aa069cf0b9694a29b0552d Mon Sep 17 00:00:00 2001 From: Googler Date: Thu, 29 Jun 2023 13:48:48 -0700 Subject: [PATCH 54/68] Add unicode_test to Bazel shell integration tests Missing from https://github.com/bazelbuild/bazel/commit/f00439d8b7dd5b10ac357c413928418e0e1059c7 PiperOrigin-RevId: 544451711 Change-Id: Ie7d897e4743d4cba4beaef986b6e978d80c3680a --- src/test/shell/integration/BUILD | 14 ++++++++++++++ src/test/shell/integration/unicode_test.sh | 18 +++++++++++------- 2 files changed, 25 insertions(+), 7 deletions(-) diff --git a/src/test/shell/integration/BUILD b/src/test/shell/integration/BUILD index 902eca78092354..05755b31570c25 100644 --- a/src/test/shell/integration/BUILD +++ b/src/test/shell/integration/BUILD @@ -862,6 +862,20 @@ sh_test( tags = ["no_windows"], ) +sh_test( + name = "unicode_test", + srcs = ["unicode_test.sh"], + data = [ + "unicode_test.bzl", + "unicode_test_BUILD", + "unicode_test_expected.txt", + ":test-deps", + "@bazel_tools//tools/bash/runfiles", + ], + # TODO(arostovtsev): figure out how to make this test Windows-compatible. + tags = ["no_windows"], +) + ######################################################################## # Test suites. diff --git a/src/test/shell/integration/unicode_test.sh b/src/test/shell/integration/unicode_test.sh index 4f222ef798e77d..7ef3a334c706ac 100755 --- a/src/test/shell/integration/unicode_test.sh +++ b/src/test/shell/integration/unicode_test.sh @@ -30,15 +30,19 @@ source "${RUNFILES_DIR:-/dev/null}/$f" 2>/dev/null || \ source "$(rlocation "io_bazel/src/test/shell/integration_test_setup.sh")" \ || { echo "integration_test_setup.sh not found!" >&2; exit 1; } -touch WORKSPACE -cp "$(rlocation "io_bazel/src/test/shell/integration/unicode_test_BUILD")" BUILD -cp "$(rlocation "io_bazel/src/test/shell/integration/unicode_test.bzl")" . -cp "$(rlocation "io_bazel/src/test/shell/integration/unicode_test_expected.txt")" . +export LC_ALL="C.UTF-8" + +function set_up { + touch WORKSPACE + cp -f "$(rlocation "io_bazel/src/test/shell/integration/unicode_test_BUILD")" BUILD + cp -f "$(rlocation "io_bazel/src/test/shell/integration/unicode_test.bzl")" . + cp -f "$(rlocation "io_bazel/src/test/shell/integration/unicode_test_expected.txt")" . +} function test_unicode_genrule_cmd { local test_name="genrule_cmd" bazel build --genrule_strategy=local --spawn_strategy=local \ - --verbose_failures "${test_name}" >& "$TEST_log" \ + --verbose_failures "//:${test_name}" >& "$TEST_log" \ || fail "expected build to succeed" diff -u "${PRODUCT_NAME}-genfiles/${test_name}.out" \ @@ -49,7 +53,7 @@ function test_unicode_genrule_cmd { function test_unicode_action_run_argument { local test_name="action_run_argument" bazel build --genrule_strategy=local --spawn_strategy=local \ - --verbose_failures "${test_name}" >& "$TEST_log" \ + --verbose_failures "//:${test_name}" >& "$TEST_log" \ || fail "expected build to succeed" diff -u "${PRODUCT_NAME}-bin/${test_name}.out" \ @@ -60,7 +64,7 @@ function test_unicode_action_run_argument { function test_unicode_action_write_content { local test_name="action_write_content" bazel build --genrule_strategy=local --spawn_strategy=local \ - --verbose_failures "${test_name}" >& "$TEST_log" \ + --verbose_failures "//:${test_name}" >& "$TEST_log" \ || fail "expected build to succeed" diff -u "${PRODUCT_NAME}-bin/${test_name}.out" \ From ebe7cb3423c92e4a5c837474ac46383b5fd3d925 Mon Sep 17 00:00:00 2001 From: Googler Date: Fri, 30 Jun 2023 01:40:26 -0700 Subject: [PATCH 55/68] Automatic code cleanup. PiperOrigin-RevId: 544590207 Change-Id: I48b835dafbb45cb0f1dc0a9820fa2123e004fa30 --- .../events/util/EventCollectionApparatus.java | 18 ---- .../util/BazelEvaluationTestCase.java | 82 ------------------- .../build/lib/testutil/MoreAsserts.java | 65 --------------- 3 files changed, 165 deletions(-) diff --git a/src/test/java/com/google/devtools/build/lib/events/util/EventCollectionApparatus.java b/src/test/java/com/google/devtools/build/lib/events/util/EventCollectionApparatus.java index 888153751d1490..cc734fb69207b1 100644 --- a/src/test/java/com/google/devtools/build/lib/events/util/EventCollectionApparatus.java +++ b/src/test/java/com/google/devtools/build/lib/events/util/EventCollectionApparatus.java @@ -191,14 +191,6 @@ public Event assertContainsWarning(String expectedMessage) { return MoreAsserts.assertContainsEvent(eventCollector, expectedMessage, EventKind.WARNING); } - /** - * Utility method: Assert that the {@link #collector()} has received a - * debug message with the {@code expectedMessage}. - */ - public Event assertContainsDebug(String expectedMessage) { - return MoreAsserts.assertContainsEvent(eventCollector, expectedMessage, EventKind.DEBUG); - } - /** * Utility method: Assert that the {@link #collector()} has received an event of the given type * and with the {@code expectedMessage}. @@ -213,16 +205,6 @@ public List assertContainsEventWithFrequency(String expectedMessage, expectedFrequency); } - /** - * Utility method: Assert that the {@link #collector()} has received an - * event with the {@code expectedMessage} in quotes. - */ - - public Event assertContainsEventWithWordsInQuotes(String... words) { - return MoreAsserts.assertContainsEventWithWordsInQuotes( - eventCollector, words); - } - public void assertDoesNotContainEvent(String unexpectedEvent) { MoreAsserts.assertDoesNotContainEvent(eventCollector, unexpectedEvent); } diff --git a/src/test/java/com/google/devtools/build/lib/starlark/util/BazelEvaluationTestCase.java b/src/test/java/com/google/devtools/build/lib/starlark/util/BazelEvaluationTestCase.java index c8d817893dea32..2afbb508886ff0 100644 --- a/src/test/java/com/google/devtools/build/lib/starlark/util/BazelEvaluationTestCase.java +++ b/src/test/java/com/google/devtools/build/lib/starlark/util/BazelEvaluationTestCase.java @@ -44,7 +44,6 @@ import net.starlark.java.eval.Starlark; import net.starlark.java.eval.StarlarkSemantics; import net.starlark.java.eval.StarlarkThread; -import net.starlark.java.syntax.Expression; import net.starlark.java.syntax.FileOptions; import net.starlark.java.syntax.ParserInput; import net.starlark.java.syntax.SyntaxError; @@ -83,15 +82,6 @@ public ExtendedEventHandler getEventHandler() { return eventCollectionApparatus.reporter(); } - // TODO(adonovan): don't let subclasses inherit vaguely specified "helpers". - // Separate all the tests clearly into tests of the scanner, parser, resolver, - // and evaluation. - - /** Parses an expression. */ - final Expression parseExpression(String... lines) throws SyntaxError.Exception { - return Expression.parse(ParserInput.fromLines(lines)); - } - /** Updates a global binding in the module. */ // TODO(adonovan): rename setGlobal. @CanIgnoreReturnValue @@ -191,14 +181,6 @@ public void checkEvalErrorContains(String msg, String... input) throws Exception } } - public void checkEvalErrorDoesNotContain(String msg, String... input) throws Exception { - try { - exec(input); - } catch (SyntaxError.Exception | EvalException | EventCollectionApparatus.FailFastException e) { - assertThat(e).hasMessageThat().doesNotContain(msg); - } - } - // Forward relevant methods to the EventCollectionApparatus @CanIgnoreReturnValue public BazelEvaluationTestCase setFailFast(boolean failFast) { @@ -206,12 +188,6 @@ public BazelEvaluationTestCase setFailFast(boolean failFast) { return this; } - @CanIgnoreReturnValue - public BazelEvaluationTestCase assertNoWarningsOrErrors() { - eventCollectionApparatus.assertNoWarningsOrErrors(); - return this; - } - public EventCollector getEventCollector() { return eventCollectionApparatus.collector(); } @@ -220,20 +196,6 @@ public Event assertContainsError(String expectedMessage) { return eventCollectionApparatus.assertContainsError(expectedMessage); } - public Event assertContainsWarning(String expectedMessage) { - return eventCollectionApparatus.assertContainsWarning(expectedMessage); - } - - public Event assertContainsDebug(String expectedMessage) { - return eventCollectionApparatus.assertContainsDebug(expectedMessage); - } - - @CanIgnoreReturnValue - public BazelEvaluationTestCase clearEvents() { - eventCollectionApparatus.clear(); - return this; - } - /** Encapsulates a separate test which can be executed by a Scenario. */ protected interface Testable { void run() throws Exception; @@ -297,13 +259,6 @@ public Scenario testExpression(String src, Object expected) throws Exception { return this; } - /** Evaluates an expression and compares its result to the ordered list of expected objects. */ - @CanIgnoreReturnValue - public Scenario testExactOrder(String src, Object... items) throws Exception { - runTest(collectionTestable(src, items)); - return this; - } - /** Evaluates an expression and checks whether it fails with the expected error. */ @CanIgnoreReturnValue public Scenario testIfExactError(String expectedError, String... lines) throws Exception { @@ -318,13 +273,6 @@ public Scenario testIfErrorContains(String expectedError, String... lines) throw return this; } - /** Looks up the value of the specified variable and compares it to the expected value. */ - @CanIgnoreReturnValue - public Scenario testLookup(String name, Object expected) throws Exception { - runTest(createLookUpTestable(name, expected)); - return this; - } - /** * Creates a Testable that checks whether the evaluation of the given expression fails with the * expected error. @@ -345,19 +293,6 @@ public void run() throws Exception { }; } - /** - * Creates a Testable that checks whether the value of the expression is a sequence containing - * the expected elements. - */ - private Testable collectionTestable(final String src, final Object... expected) { - return new Testable() { - @Override - public void run() throws Exception { - assertThat((Iterable) eval(src)).containsExactly(expected).inOrder(); - } - }; - } - /** * Creates a testable that compares the value of the expression to a specified result. * @@ -386,23 +321,6 @@ public void run() throws Exception { }; } - /** - * Creates a Testable that looks up the given variable and compares its value to the expected - * value - * - * @param name - * @param expected - * @return An instance of Testable that does both lookup and comparison - */ - private Testable createLookUpTestable(final String name, final Object expected) { - return new Testable() { - @Override - public void run() throws Exception { - assertThat(lookup(name)).isEqualTo(expected); - } - }; - } - /** Executes the given Testable */ void runTest(Testable testable) throws Exception { run(new TestableDecorator(setup, testable)); diff --git a/src/test/java/com/google/devtools/build/lib/testutil/MoreAsserts.java b/src/test/java/com/google/devtools/build/lib/testutil/MoreAsserts.java index 87519c318e93ed..d2f77939dbbd21 100644 --- a/src/test/java/com/google/devtools/build/lib/testutil/MoreAsserts.java +++ b/src/test/java/com/google/devtools/build/lib/testutil/MoreAsserts.java @@ -18,13 +18,11 @@ import static org.junit.Assert.fail; import com.google.common.base.Function; -import com.google.common.base.Joiner; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.EventCollector; import com.google.devtools.build.lib.events.EventKind; @@ -35,7 +33,6 @@ import java.util.ArrayDeque; import java.util.Arrays; import java.util.Collections; -import java.util.Comparator; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; @@ -48,10 +45,6 @@ */ public class MoreAsserts { - public static void assertEquals(T expected, T actual, Comparator comp) { - assertThat(comp.compare(expected, actual)).isEqualTo(0); - } - /** * Scans if an instance of given class is strongly reachable from a given * object. @@ -145,21 +138,6 @@ private static boolean isRetained(Predicate predicate, Object start) { return false; } - private static String getClassDescription(Object object) { - return object == null - ? "null" - : ("instance of " + object.getClass().getName()); - } - - public static String chattyFormat(String message, Object expected, Object actual) { - String expectedClass = getClassDescription(expected); - String actualClass = getClassDescription(actual); - - return Joiner.on('\n').join((message != null) ? ("\n" + message) : "", - " expected " + expectedClass + ": <" + expected + ">", - " but was " + actualClass + ": <" + actual + ">"); - } - public static void assertEqualsUnifyingLineEnds(String expected, String actual) { if (actual != null) { actual = actual.replaceAll(System.getProperty("line.separator"), "\n"); @@ -215,13 +193,6 @@ public static void assertEqualWithStdoutAndErr( } } - public static void assertStdoutContainsString(String expected, String stdout, String stderr) { - if (!stdout.contains(expected)) { - fail("expected stdout to contain string <" + expected + "> but stdout was <" - + stdout + "> and stderr was <" + stderr + ">"); - } - } - public static void assertStderrContainsString(String expected, String stdout, String stderr) { if (!stderr.contains(expected)) { fail("expected stderr to contain string <" + expected + "> but stdout was <" @@ -245,14 +216,6 @@ public static void assertStderrContainsRegex(String expectedRegex, } } - public static Set asStringSet(Iterable collection) { - Set set = Sets.newTreeSet(); - for (Object o : collection) { - set.add("\"" + o + "\""); - } - return set; - } - /** * If the specified EventCollector contains any events, an informative * assertion fails in the context of the specified TestCase. @@ -376,34 +339,6 @@ public static void assertDoesNotContainEvent(Iterable eventCollector, } } - /** - * If the specified EventCollector does not contain an event which has - * each of {@code words} surrounded by single quotes as a substring, an - * informative assertion fails. Otherwise the matching event is returned. - */ - public static Event assertContainsEventWithWordsInQuotes( - Iterable eventCollector, - String... words) { - for (Event event : eventCollector) { - boolean found = true; - for (String word : words) { - if (!event.getMessage().contains("'" + word + "'")) { - found = false; - break; - } - } - if (found) { - return event; - } - } - String eventsString = eventsToString(eventCollector); - assertWithMessage("Event containing words " + Arrays.toString(words) + " in " - + "single quotes not found" - + (eventsString.length() == 0 ? "" : ("; found these though:" + eventsString))) - .that(false).isTrue(); - return null; // unreachable - } - /** * Returns a string consisting of each event in the specified collector, * preceded by a newline. From b7aad6ca70687c5f2f4d62182f348df1edc21d22 Mon Sep 17 00:00:00 2001 From: Googler Date: Fri, 30 Jun 2023 01:42:52 -0700 Subject: [PATCH 56/68] Remove usage of `java_common.compile()` private APIs in AutoExecGroupsTest These are unnecessary for what the test is interested in, namely that the resource jar action be registered - for which `resources` are sufficient. Motivation for this change is that these parameters will no longer be available once `java_common` is in Starlark. PiperOrigin-RevId: 544590584 Change-Id: I6965667828a88789131a3ba4c797bbbe96c41c19 --- .../build/lib/analysis/AutoExecGroupsTest.java | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/src/test/java/com/google/devtools/build/lib/analysis/AutoExecGroupsTest.java b/src/test/java/com/google/devtools/build/lib/analysis/AutoExecGroupsTest.java index fc3b8ac43655f5..2344f17117ff5d 100644 --- a/src/test/java/com/google/devtools/build/lib/analysis/AutoExecGroupsTest.java +++ b/src/test/java/com/google/devtools/build/lib/analysis/AutoExecGroupsTest.java @@ -1399,8 +1399,6 @@ public void javaCommonCompile_automaticExecGroupsDisabled_lazyActionExecutesOnSe " output = output_jar,", " java_toolchain = ctx.toolchains['" + TestConstants.JAVA_TOOLCHAIN_TYPE + "'].java,", " resources = ctx.files.resources,", - " resource_jars = ctx.files.resource_jars,", - " classpath_resources = ctx.files.classpath_resources,", " )", " return [java_info, DefaultInfo(files = depset([output_jar]))]", "custom_rule = rule(", @@ -1408,8 +1406,6 @@ public void javaCommonCompile_automaticExecGroupsDisabled_lazyActionExecutesOnSe " toolchains = ['//rule:toolchain_type_2', '" + TestConstants.JAVA_TOOLCHAIN_TYPE + "'],", " attrs = {", " 'resources': attr.label_list(allow_files = True),", - " 'resource_jars': attr.label_list(allow_files = True),", - " 'classpath_resources': attr.label_list(allow_files = True),", " },", " provides = [JavaInfo],", " fragments = ['java']", @@ -1417,8 +1413,7 @@ public void javaCommonCompile_automaticExecGroupsDisabled_lazyActionExecutesOnSe scratch.file( "bazel_internal/test/BUILD", "load('//bazel_internal/test:defs.bzl', 'custom_rule')", - "custom_rule(name = 'custom_rule_name', resources = ['Resources.java'], resource_jars =" - + " ['ResourceJars.java'], classpath_resources = ['ClasspathResources.java'])"); + "custom_rule(name = 'custom_rule_name', resources = ['Resources.java'])"); useConfiguration( "--incompatible_auto_exec_groups", "--experimental_turbine_annotation_processing"); @@ -1446,8 +1441,6 @@ public void javaCommonCompile_automaticExecGroupsDisabled_lazyActionExecutesOnSe " output = output_jar,", " java_toolchain = ctx.toolchains['" + TestConstants.JAVA_TOOLCHAIN_TYPE + "'].java,", " resources = ctx.files.resources,", - " resource_jars = ctx.files.resource_jars,", - " classpath_resources = ctx.files.classpath_resources,", " )", " return [java_info, DefaultInfo(files = depset([output_jar]))]", "custom_rule = rule(", @@ -1455,8 +1448,6 @@ public void javaCommonCompile_automaticExecGroupsDisabled_lazyActionExecutesOnSe " toolchains = ['//rule:toolchain_type_2', '" + TestConstants.JAVA_TOOLCHAIN_TYPE + "'],", " attrs = {", " 'resources': attr.label_list(allow_files = True),", - " 'resource_jars': attr.label_list(allow_files = True),", - " 'classpath_resources': attr.label_list(allow_files = True),", " },", " provides = [JavaInfo],", " fragments = ['java']", @@ -1464,8 +1455,7 @@ public void javaCommonCompile_automaticExecGroupsDisabled_lazyActionExecutesOnSe scratch.file( "bazel_internal/test/BUILD", "load('//bazel_internal/test:defs.bzl', 'custom_rule')", - "custom_rule(name = 'custom_rule_name', resources = ['Resources.java'], resource_jars =" - + " ['ResourceJars.java'], classpath_resources = ['ClasspathResources.java'])"); + "custom_rule(name = 'custom_rule_name', resources = ['Resources.java'])"); useConfiguration("--experimental_turbine_annotation_processing"); ImmutableList actions = getActions("//bazel_internal/test:custom_rule_name"); From 86dbeb59ec50f3c57c3330776bf0217875b65d60 Mon Sep 17 00:00:00 2001 From: Googler Date: Fri, 30 Jun 2023 02:02:03 -0700 Subject: [PATCH 57/68] Remove mention of `experimental_worker_multiplex` from tests. Instead of this flag we should use `worker_multiplex` PiperOrigin-RevId: 544593756 Change-Id: I78f2bc3760128fec99bf46a0ab2acb6f2ad5a013 --- src/test/shell/bazel/android/desugarer_integration_test.sh | 2 +- .../shell/bazel/android/resource_processing_integration_test.sh | 2 +- src/test/shell/integration/bazel_worker_multiplexer_test.sh | 2 +- src/test/shell/integration/bazel_worker_test.sh | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/test/shell/bazel/android/desugarer_integration_test.sh b/src/test/shell/bazel/android/desugarer_integration_test.sh index ac6841e1f8d536..fa25ace0938de3 100755 --- a/src/test/shell/bazel/android/desugarer_integration_test.sh +++ b/src/test/shell/bazel/android/desugarer_integration_test.sh @@ -137,7 +137,7 @@ function test_java_8_android_binary_multiplex_worker_strategy() { create_java_8_android_binary assert_build //java/bazel:bin \ - --experimental_worker_multiplex \ + --worker_multiplex \ --persistent_multiplex_android_dex_desugar \ --worker_verbose &> $TEST_log expect_log "Created new non-sandboxed Desugar multiplex-worker (id [0-9]\+, key hash -\?[0-9]\+)" diff --git a/src/test/shell/bazel/android/resource_processing_integration_test.sh b/src/test/shell/bazel/android/resource_processing_integration_test.sh index cc302fc986a1c3..18fef259b8e456 100755 --- a/src/test/shell/bazel/android/resource_processing_integration_test.sh +++ b/src/test/shell/bazel/android/resource_processing_integration_test.sh @@ -126,7 +126,7 @@ function test_persistent_multiplex_resource_processor() { create_android_binary setup_font_resources - assert_build //java/bazel:bin --experimental_worker_multiplex \ + assert_build //java/bazel:bin --worker_multiplex \ --persistent_multiplex_android_tools \ --worker_verbose &> $TEST_log expect_log "Created new non-sandboxed AndroidResourceParser multiplex-worker (id [0-9]\+, key hash -\?[0-9]\+)" diff --git a/src/test/shell/integration/bazel_worker_multiplexer_test.sh b/src/test/shell/integration/bazel_worker_multiplexer_test.sh index d4e7dd294d85e4..1225cf0322655d 100755 --- a/src/test/shell/integration/bazel_worker_multiplexer_test.sh +++ b/src/test/shell/integration/bazel_worker_multiplexer_test.sh @@ -36,7 +36,7 @@ add_to_bazelrc "build -s" add_to_bazelrc "build --spawn_strategy=worker,standalone" add_to_bazelrc "build --worker_verbose --worker_max_instances=3" add_to_bazelrc "build --debug_print_action_contexts" -add_to_bazelrc "build --experimental_worker_multiplex" +add_to_bazelrc "build --worker_multiplex" add_to_bazelrc "build ${ADDITIONAL_BUILD_FLAGS}" function set_up() { diff --git a/src/test/shell/integration/bazel_worker_test.sh b/src/test/shell/integration/bazel_worker_test.sh index 95f902493ade6c..0a4201ef75ced6 100755 --- a/src/test/shell/integration/bazel_worker_test.sh +++ b/src/test/shell/integration/bazel_worker_test.sh @@ -37,7 +37,7 @@ add_to_bazelrc "build -s" add_to_bazelrc "build --spawn_strategy=worker,standalone" add_to_bazelrc "build --worker_verbose --worker_max_instances=1" add_to_bazelrc "build --debug_print_action_contexts" -add_to_bazelrc "build --noexperimental_worker_multiplex" +add_to_bazelrc "build --noworker_multiplex" add_to_bazelrc "build ${ADDITIONAL_BUILD_FLAGS}" function set_up() { From 828edc8e1f280023f80b51b67b617e1862fadd17 Mon Sep 17 00:00:00 2001 From: Googler Date: Fri, 30 Jun 2023 04:14:22 -0700 Subject: [PATCH 58/68] Update mirror_request.yml PiperOrigin-RevId: 544617842 Change-Id: Ia1f8cd36609476bf1a27d14f13519771126656e5 --- .github/ISSUE_TEMPLATE/mirror_request.yml | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/mirror_request.yml b/.github/ISSUE_TEMPLATE/mirror_request.yml index 2f315bad771969..5521cf18d9f937 100644 --- a/.github/ISSUE_TEMPLATE/mirror_request.yml +++ b/.github/ISSUE_TEMPLATE/mirror_request.yml @@ -9,12 +9,11 @@ body: - type: markdown attributes: value: > - **Attention:** if the archive you're trying to mirror is a GitHub release archive, - please use URLs of the form `https://github.com/$USER/$REPO/archive/refs/tags/$TAG`, - instead of the form without the `refs/tags/` part. The latter is *not* guaranteed to - have a stable hash (see - https://github.com/bazel-contrib/SIG-rules-authors/issues/11#issuecomment-1029861300 - for more details). + **Attention:** if the archive you're trying to mirror is from GitHub, + please use URLs in the form of `https://github.com/$USER/$REPO/releases/download/...` if available. + If you are the project maintainer, you should create and upload such an release archive. + GitHub doesn't guarantee a stable checksum of source archives in the form of `https://github.com///archive/...`, which are generated on demand. + Check [GitHub Archive Checksum Outage](https://blog.bazel.build/2023/02/15/github-archive-checksum.html) for more details. - type: textarea id: urls attributes: From 5c4940a40bbb3923bc0de860d94c21017844db10 Mon Sep 17 00:00:00 2001 From: Googler Date: Fri, 30 Jun 2023 06:31:32 -0700 Subject: [PATCH 59/68] Don't drop GLOB nodes after analysis when in Skymeld mode. PiperOrigin-RevId: 544640160 Change-Id: I38a8fae395c096264859a484b36db1e29ddddd4e --- .../google/devtools/build/lib/skyframe/BUILD | 15 +++++ .../NodeDroppingInconsistencyReceiver.java | 10 +++- .../skyframe/SequencedSkyframeExecutor.java | 18 ++++-- .../build/lib/skyframe/SkyframeExecutor.java | 5 ++ .../SkymeldInconsistencyReceiver.java | 60 +++++++++++++++++++ 5 files changed, 102 insertions(+), 6 deletions(-) create mode 100644 src/main/java/com/google/devtools/build/lib/skyframe/SkymeldInconsistencyReceiver.java diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/BUILD b/src/main/java/com/google/devtools/build/lib/skyframe/BUILD index e7100409b5a26c..0838022f2d0d18 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/BUILD +++ b/src/main/java/com/google/devtools/build/lib/skyframe/BUILD @@ -178,6 +178,7 @@ java_library( ":skyframe_error_processor", ":skyframe_executor_repository_helpers_holder", ":skyframe_incremental_build_monitor", + ":skymeld_inconsistency_receiver", ":starlark_builtins_value", ":state_informing_sky_function_environment", ":target_completion_value", @@ -2973,3 +2974,17 @@ java_library( "//third_party:jsr305", ], ) + +java_library( + name = "skymeld_inconsistency_receiver", + srcs = ["SkymeldInconsistencyReceiver.java"], + deps = [ + ":node_dropping_inconsistency_receiver", + ":sky_functions", + "//src/main/java/com/google/devtools/build/skyframe", + "//src/main/java/com/google/devtools/build/skyframe:graph_inconsistency_java_proto", + "//src/main/java/com/google/devtools/build/skyframe:skyframe-objects", + "//third_party:guava", + "//third_party:jsr305", + ], +) diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/NodeDroppingInconsistencyReceiver.java b/src/main/java/com/google/devtools/build/lib/skyframe/NodeDroppingInconsistencyReceiver.java index ae2a6ef669483c..5283fa8c43911b 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/NodeDroppingInconsistencyReceiver.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/NodeDroppingInconsistencyReceiver.java @@ -61,7 +61,15 @@ public void noteInconsistencyAndMaybeThrow( */ public static boolean isExpectedInconsistency( SkyKey key, @Nullable Collection otherKeys, Inconsistency inconsistency) { - SkyFunctionName expectedMissingChildType = EXPECTED_MISSING_CHILDREN.get(key.functionName()); + return isExpectedInconsistency(key, otherKeys, inconsistency, EXPECTED_MISSING_CHILDREN); + } + + static boolean isExpectedInconsistency( + SkyKey key, + @Nullable Collection otherKeys, + Inconsistency inconsistency, + ImmutableMap expectedMissingChildrenTypes) { + SkyFunctionName expectedMissingChildType = expectedMissingChildrenTypes.get(key.functionName()); if (expectedMissingChildType == null) { return false; } diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/SequencedSkyframeExecutor.java b/src/main/java/com/google/devtools/build/lib/skyframe/SequencedSkyframeExecutor.java index 10b313f8df89da..bdf21b7fa4ba42 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/SequencedSkyframeExecutor.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/SequencedSkyframeExecutor.java @@ -29,6 +29,7 @@ import com.google.devtools.build.lib.actions.CommandLineExpansionException; import com.google.devtools.build.lib.actions.FileValue; import com.google.devtools.build.lib.actions.RemoteArtifactChecker; +import com.google.devtools.build.lib.analysis.AnalysisOptions; import com.google.devtools.build.lib.analysis.AspectValue; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ConfiguredTarget; @@ -236,14 +237,21 @@ public WorkspaceInfoFromDiff sync( throws InterruptedException, AbruptExitException { if (evaluatorNeedsReset) { if (rewindingPermitted(options)) { + // Currently incompatible with Skymeld i.e. this code path won't be run in Skymeld mode. We + // may need to combine these GraphInconsistencyReceiver implementations in the future. var rewindableReceiver = new RewindableGraphInconsistencyReceiver(); rewindableReceiver.setHeuristicallyDropNodes(heuristicallyDropNodes); - this.inconsistencyReceiver = rewindableReceiver; + inconsistencyReceiver = rewindableReceiver; + } else if (isMergedSkyframeAnalysisExecution() + && ((options.getOptions(AnalysisOptions.class) != null + && options.getOptions(AnalysisOptions.class).discardAnalysisCache) + || !tracksStateForIncrementality() + || heuristicallyDropNodes)) { + inconsistencyReceiver = new SkymeldInconsistencyReceiver(heuristicallyDropNodes); + } else if (heuristicallyDropNodes) { + inconsistencyReceiver = new NodeDroppingInconsistencyReceiver(); } else { - inconsistencyReceiver = - heuristicallyDropNodes - ? new NodeDroppingInconsistencyReceiver() - : GraphInconsistencyReceiver.THROWING; + inconsistencyReceiver = GraphInconsistencyReceiver.THROWING; } // Recreate MemoizingEvaluator so that graph is recreated with correct edge-clearing status, diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java b/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java index f478a211510dda..2e4ba6b327bf56 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/SkyframeExecutor.java @@ -1497,6 +1497,11 @@ public void setMergedSkyframeAnalysisExecutionSupplier( this.mergedSkyframeAnalysisExecutionSupplier = mergedSkyframeAnalysisExecutionSupplier; } + boolean isMergedSkyframeAnalysisExecution() { + return mergedSkyframeAnalysisExecutionSupplier != null + && mergedSkyframeAnalysisExecutionSupplier.get(); + } + /** Sets the eventBus to use for posting events. */ public void setEventBus(@Nullable EventBus eventBus) { this.eventBus.set(eventBus); diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/SkymeldInconsistencyReceiver.java b/src/main/java/com/google/devtools/build/lib/skyframe/SkymeldInconsistencyReceiver.java new file mode 100644 index 00000000000000..4c3c15d50c484a --- /dev/null +++ b/src/main/java/com/google/devtools/build/lib/skyframe/SkymeldInconsistencyReceiver.java @@ -0,0 +1,60 @@ +// Copyright 2023 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package com.google.devtools.build.lib.skyframe; + +import static com.google.common.base.Preconditions.checkState; + +import com.google.common.collect.ImmutableMap; +import com.google.devtools.build.skyframe.GraphInconsistencyReceiver; +import com.google.devtools.build.skyframe.SkyFunctionName; +import com.google.devtools.build.skyframe.SkyKey; +import com.google.devtools.build.skyframe.proto.GraphInconsistency.Inconsistency; +import java.util.Collection; +import javax.annotation.Nullable; + +/** + * The {@link GraphInconsistencyReceiver} that tolerates inconsistencies resulted in dropping + * pre-execution nodes in Skymeld mode. + */ +public class SkymeldInconsistencyReceiver implements GraphInconsistencyReceiver { + private static final ImmutableMap + SKYMELD_EXPECTED_MISSING_CHILDREN = + ImmutableMap.of(SkyFunctions.ACTION_EXECUTION, SkyFunctions.GLOB); + + private final boolean heuristicallyDropNodes; + + public SkymeldInconsistencyReceiver(boolean heuristicallyDropNodes) { + this.heuristicallyDropNodes = heuristicallyDropNodes; + } + + @Override + public void noteInconsistencyAndMaybeThrow( + SkyKey key, @Nullable Collection otherKeys, Inconsistency inconsistency) { + if (heuristicallyDropNodes + && NodeDroppingInconsistencyReceiver.isExpectedInconsistency( + key, otherKeys, inconsistency)) { + // If `--heuristically_drop_nodes` is enabled, check whether the inconsistency is caused by + // dropped state node. If so, tolerate the inconsistency and return. + return; + } + + checkState( + NodeDroppingInconsistencyReceiver.isExpectedInconsistency( + key, otherKeys, inconsistency, SKYMELD_EXPECTED_MISSING_CHILDREN), + "Unexpected inconsistency: %s, %s, %s", + key, + otherKeys, + inconsistency); + } +} From 3071bee24cea3a4499260c8729555cb13850d4df Mon Sep 17 00:00:00 2001 From: Googler Date: Fri, 30 Jun 2023 08:08:17 -0700 Subject: [PATCH 60/68] Increase the test size for DexFileSplitter_synthetic_classes_test because the failure case runs very close to the timeout for a "medium" test (300s), leading to a flaky timeout rather than a (correct) failure. RELNOTES: None. PiperOrigin-RevId: 544658707 Change-Id: If179b25749f40e25731f0c68ac22334ff74dc27b --- src/test/shell/bazel/android/BUILD | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/shell/bazel/android/BUILD b/src/test/shell/bazel/android/BUILD index db8ee37d3105df..19f90c5e95e84c 100644 --- a/src/test/shell/bazel/android/BUILD +++ b/src/test/shell/bazel/android/BUILD @@ -187,7 +187,7 @@ android_sh_test( android_sh_test( name = "DexFileSplitter_synthetic_classes_test", - size = "medium", + size = "large", srcs = ["DexFileSplitter_synthetic_classes_test.sh"], data = [ ":android_helper", From bf6e469eddfe76ea019b0d19ce59af0476e43222 Mon Sep 17 00:00:00 2001 From: Googler Date: Fri, 30 Jun 2023 08:32:01 -0700 Subject: [PATCH 61/68] Avoid creating a new map of predeclared bindings for each starlark module. This was done only so that `GuardedValue` could be filtered. Instead, filter it when the value is requested. This allows starlark modules to share a map instance. PiperOrigin-RevId: 544663574 Change-Id: I5922077a54fa9845f97586cb16961c5653bec723 --- .../lib/skyframe/BzlCompileFunction.java | 4 +- .../java/net/starlark/java/eval/Module.java | 69 +++++++++++-------- 2 files changed, 42 insertions(+), 31 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/skyframe/BzlCompileFunction.java b/src/main/java/com/google/devtools/build/lib/skyframe/BzlCompileFunction.java index bcfdd33e3a887f..d18bfe2ca3e699 100644 --- a/src/main/java/com/google/devtools/build/lib/skyframe/BzlCompileFunction.java +++ b/src/main/java/com/google/devtools/build/lib/skyframe/BzlCompileFunction.java @@ -14,6 +14,7 @@ package com.google.devtools.build.lib.skyframe; +import com.google.common.collect.ImmutableMap; import com.google.common.hash.HashFunction; import com.google.devtools.build.lib.actions.FileValue; import com.google.devtools.build.lib.cmdline.BazelCompileContext; @@ -29,7 +30,6 @@ import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.io.IOException; -import java.util.Map; import javax.annotation.Nullable; import net.starlark.java.eval.Module; import net.starlark.java.eval.StarlarkSemantics; @@ -143,7 +143,7 @@ static BzlCompileValue computeInline( return null; } - Map predeclared; + ImmutableMap predeclared; if (key.isSclDialect()) { predeclared = bazelStarlarkEnvironment.getStarlarkGlobals().getSclToplevels(); } else if (key.kind == BzlCompileValue.Kind.BUILTINS) { diff --git a/src/main/java/net/starlark/java/eval/Module.java b/src/main/java/net/starlark/java/eval/Module.java index 3ddf8dbd39a316..740f275247c5bc 100644 --- a/src/main/java/net/starlark/java/eval/Module.java +++ b/src/main/java/net/starlark/java/eval/Module.java @@ -16,6 +16,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedHashMap; @@ -50,8 +51,9 @@ */ public final class Module implements Resolver.Module { - // The module's predeclared environment. Excludes UNIVERSE bindings. - private ImmutableMap predeclared; + // The module's predeclared environment. Excludes UNIVERSE bindings. Values that are conditionally + // present are stored as GuardedValues regardless of whether they are actually enabled. + private final ImmutableMap predeclared; // The module's global variables, in order of creation. private final LinkedHashMap globalIndex = new LinkedHashMap<>(); @@ -61,12 +63,18 @@ public final class Module implements Resolver.Module { // Its toString appears to Starlark in str(function): "". @Nullable private final Object clientData; + private final StarlarkSemantics semantics; + // An optional doc string for the module. Set after construction when evaluating a .bzl file. @Nullable private String documentation; - private Module(ImmutableMap predeclared, Object clientData) { + private Module( + ImmutableMap predeclared, + @Nullable Object clientData, + StarlarkSemantics semantics) { this.predeclared = predeclared; this.clientData = clientData; + this.semantics = semantics; } /** @@ -85,7 +93,7 @@ public static Module withPredeclared( */ public static Module withPredeclaredAndData( StarlarkSemantics semantics, Map predeclared, @Nullable Object clientData) { - return new Module(filter(predeclared, semantics, clientData), clientData); + return new Module(ImmutableMap.copyOf(predeclared), clientData, semantics); } /** @@ -93,7 +101,8 @@ public static Module withPredeclaredAndData( * Starlark#UNIVERSE}, and with no client data. */ public static Module create() { - return new Module(/*predeclared=*/ ImmutableMap.of(), null); + return new Module( + /* predeclared= */ ImmutableMap.of(), /* clientData= */ null, StarlarkSemantics.DEFAULT); } /** @@ -128,26 +137,18 @@ public static Module ofInnermostEnclosingStarlarkFunction(StarlarkThread thread) } /** - * Returns a map in which each {@link GuardedValue} that is enabled has been replaced by the value - * it guards. Disabled {@code GuardedValues} are left in place for error reporting upon access, - * and should be treated as unavailable. + * Replaces an enabled {@link GuardedValue} with the value it guards. * - *

      The iteration order is unchanged. + *

      A disabled {@link GuardedValue} is left in place for error reporting upon access, and should + * be treated as unavailable. */ - private static ImmutableMap filter( - Map predeclared, StarlarkSemantics semantics, @Nullable Object clientData) { - ImmutableMap.Builder filtered = ImmutableMap.builder(); - for (Map.Entry bind : predeclared.entrySet()) { - Object v = bind.getValue(); - if (v instanceof GuardedValue) { - GuardedValue gv = (GuardedValue) bind.getValue(); - if (gv.isObjectAccessibleUsingSemantics(semantics, clientData)) { - v = gv.getObject(); - } - } - filtered.put(bind.getKey(), v); + private Object filterGuardedValue(Object v) { + Preconditions.checkNotNull(v); + if (!(v instanceof GuardedValue)) { + return v; } - return filtered.build(); + GuardedValue gv = (GuardedValue) v; + return gv.isObjectAccessibleUsingSemantics(semantics, clientData) ? gv.getObject() : gv; } /** Returns the client data associated with this module. */ @@ -175,9 +176,19 @@ public String getDocumentation() { return documentation; } - /** Returns the value of a predeclared (not universal) binding in this module. */ - Object getPredeclared(String name) { - return predeclared.get(name); + /** + * Returns the value of a predeclared (not universal) binding in this module. + * + *

      In the case that the predeclared is a {@link GuardedValue}: If it is enabled, the underlying + * value is returned, otherwise the {@code GuardedValue} itself is returned for error reporting. + */ + @Nullable + public Object getPredeclared(String name) { + var value = predeclared.get(name); + if (value == null) { + return null; + } + return filterGuardedValue(value); } /** @@ -186,8 +197,8 @@ Object getPredeclared(String name) { *

      The map reflects any filtering of {@link GuardedValue}: enabled ones are replaced by the * underlying values that they guard, while disabled ones are left in place for error reporting. */ - public ImmutableMap getPredeclaredBindings() { - return predeclared; + public Map getPredeclaredBindings() { + return Maps.transformValues(predeclared, this::filterGuardedValue); } /** @@ -205,7 +216,7 @@ public ImmutableMap getGlobals() { m.put(e.getKey(), v); } } - return m.build(); + return m.buildOrThrow(); } /** Implements the resolver's module interface. */ @@ -217,7 +228,7 @@ public Resolver.Scope resolve(String name) throws Undefined { } // predeclared? - Object v = predeclared.get(name); + Object v = getPredeclared(name); if (v != null) { if (v instanceof GuardedValue) { // Name is correctly spelled, but access is disabled by a flag or by client data. From 0e899c5d9289771908648ce4362c45c2171def8d Mon Sep 17 00:00:00 2001 From: Googler Date: Fri, 30 Jun 2023 08:44:25 -0700 Subject: [PATCH 62/68] Better centralize package defaults into PackageArgs Package defaults are currently somewhat haphazardly specified in Package and PackageArgs. Instead, more consistently get package defaults from PackageArgs: * Remove all getDefault??? parameters from Package and Package.Builder. Instead declare getPackageArgs to get the container of all package defaults. * Move defaultHdrsCheck into PackageArgs (was in Package) as it IS a package default being set by the package() function. See NOTE below. * PackageArgs.processParams now uses a select statement for both readability and mild theoretical performance gains. * AttributeMap now just needs to simply declare an abstract getPackageArgs rather than an ever-growing list of getDefault??? Ultimately, this should clarify mutability a bit. The result of Package.getPackageArgs is 'done' and thus should no longer change. In contrast, Package.Builder.getPartialPackageArgs is subject to change as the build file is further processed. NOTE: This does allow Bazel users to specify default_hdrs_check inside of package() (in order to make handling this attribute more consistent with all of the others). As future work: PackageSerializer can almost assuredly be simplified to more clearly and directly serialize/deserialize PackageArgs rather than inline this ser-des process into itself. Remove Package.Builder.getPartialPackageArgs. This is clearly incomplete and the result of these reads depends on the location of package() in the BUILD file. default_hdrs_check likely needs further cleanup to act more consistently with the other attributes. Need to be more consistent on where getPackageArgs is called (in a ComputedDefault attached to an attribute versus transitively in CobinguredTargetFunction via RuleContext lookups). This is likely the underlying culprit for some attributes acting strangely under query. The default_settings portion of the output formatters should likely be removed in favor of something more principled (either just relying on having those attributes be populated via ComputedDefault or dumping all of PackageArgs into default_settings). PiperOrigin-RevId: 544666167 Change-Id: I2251aaf7b4f2d21372eec60d333679cc6d851b7f --- .../analysis/AspectAwareAttributeMapper.java | 20 +-- .../build/lib/analysis/BaseRuleClasses.java | 4 +- .../lib/analysis/DependencyResolver.java | 4 +- .../build/lib/analysis/RuleContext.java | 2 +- .../lib/packages/AbstractAttributeMapper.java | 19 +-- .../build/lib/packages/AttributeMap.java | 13 +- .../packages/DelegatingAttributeMapper.java | 19 +-- .../build/lib/packages/FileTarget.java | 4 +- .../build/lib/packages/InputFile.java | 2 +- .../devtools/build/lib/packages/Package.java | 104 +----------- .../build/lib/packages/PackageArgs.java | 159 ++++++++++-------- .../devtools/build/lib/packages/Rule.java | 6 +- .../build/lib/packages/RuleClass.java | 9 +- .../VisibilityLicenseSpecifiedInputFile.java | 4 +- .../query/output/ProtoOutputFormatter.java | 4 +- .../query/output/XmlOutputFormatter.java | 4 +- .../lib/rules/cpp/CcStarlarkInternal.java | 32 +++- .../select/AbstractAttributeMapperTest.java | 6 +- .../lib/packages/PackageFactoryTest.java | 15 +- .../lib/pkgcache/PackageLoadingTest.java | 2 +- .../google/devtools/build/lib/testutil/BUILD | 1 + .../lib/testutil/FakeAttributeMapper.java | 20 +-- 22 files changed, 172 insertions(+), 281 deletions(-) diff --git a/src/main/java/com/google/devtools/build/lib/analysis/AspectAwareAttributeMapper.java b/src/main/java/com/google/devtools/build/lib/analysis/AspectAwareAttributeMapper.java index 3ea773f1a6879e..d61e714daab92b 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/AspectAwareAttributeMapper.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/AspectAwareAttributeMapper.java @@ -20,6 +20,7 @@ import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.AttributeMap; import com.google.devtools.build.lib.packages.DependencyFilter; +import com.google.devtools.build.lib.packages.PackageArgs; import com.google.devtools.build.lib.packages.Type; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -137,23 +138,8 @@ public void visitLabels(DependencyFilter filter, BiConsumer co } @Override - public String getPackageDefaultHdrsCheck() { - return ruleAttributes.getPackageDefaultHdrsCheck(); - } - - @Override - public boolean isPackageDefaultHdrsCheckSet() { - return ruleAttributes.isPackageDefaultHdrsCheckSet(); - } - - @Override - public Boolean getPackageDefaultTestOnly() { - return ruleAttributes.getPackageDefaultTestOnly(); - } - - @Override - public String getPackageDefaultDeprecation() { - return ruleAttributes.getPackageDefaultDeprecation(); + public PackageArgs getPackageArgs() { + return ruleAttributes.getPackageArgs(); } @Override diff --git a/src/main/java/com/google/devtools/build/lib/analysis/BaseRuleClasses.java b/src/main/java/com/google/devtools/build/lib/analysis/BaseRuleClasses.java index 4045acc3cf1b7c..ac600e3f5d4f34 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/BaseRuleClasses.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/BaseRuleClasses.java @@ -68,7 +68,7 @@ private BaseRuleClasses() {} new Attribute.ComputedDefault() { @Override public Object getDefault(AttributeMap rule) { - return rule.getPackageDefaultTestOnly(); + return rule.getPackageArgs().defaultTestOnly(); } @Override @@ -82,7 +82,7 @@ public boolean resolvableWithRawAttributes() { new Attribute.ComputedDefault() { @Override public Object getDefault(AttributeMap rule) { - return rule.getPackageDefaultDeprecation(); + return rule.getPackageArgs().defaultDeprecation(); } @Override diff --git a/src/main/java/com/google/devtools/build/lib/analysis/DependencyResolver.java b/src/main/java/com/google/devtools/build/lib/analysis/DependencyResolver.java index 8080517b9d725b..9178f98318f4e4 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/DependencyResolver.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/DependencyResolver.java @@ -590,14 +590,14 @@ private static void visitRule( outgoingLabels, rule, RuleClass.COMPATIBLE_ENVIRONMENT_ATTR, - rule.getPackage().getDefaultCompatibleWith()); + rule.getPackage().getPackageArgs().defaultCompatibleWith()); } if (!rule.isAttributeValueExplicitlySpecified(RuleClass.RESTRICTED_ENVIRONMENT_ATTR)) { addExplicitDeps( outgoingLabels, rule, RuleClass.RESTRICTED_ENVIRONMENT_ATTR, - rule.getPackage().getDefaultRestrictedTo()); + rule.getPackage().getPackageArgs().defaultRestrictedTo()); } addToolchainDeps(toolchainContexts, outgoingLabels); diff --git a/src/main/java/com/google/devtools/build/lib/analysis/RuleContext.java b/src/main/java/com/google/devtools/build/lib/analysis/RuleContext.java index f897c66994ae9d..bb2999a1833afb 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/RuleContext.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/RuleContext.java @@ -224,7 +224,7 @@ private RuleContext( } private FeatureSet computeFeatures() { - FeatureSet pkg = rule.getPackage().getFeatures(); + FeatureSet pkg = rule.getPackage().getPackageArgs().features(); FeatureSet rule = attributes().has("features", Type.STRING_LIST) ? FeatureSet.parse(attributes().get("features", Type.STRING_LIST)) diff --git a/src/main/java/com/google/devtools/build/lib/packages/AbstractAttributeMapper.java b/src/main/java/com/google/devtools/build/lib/packages/AbstractAttributeMapper.java index 164b17b5f105a4..8eddb8d13edf9c 100644 --- a/src/main/java/com/google/devtools/build/lib/packages/AbstractAttributeMapper.java +++ b/src/main/java/com/google/devtools/build/lib/packages/AbstractAttributeMapper.java @@ -135,23 +135,8 @@ public boolean isAttributeValueExplicitlySpecified(String attributeName) { } @Override - public String getPackageDefaultHdrsCheck() { - return rule.getPackage().getDefaultHdrsCheck(); - } - - @Override - public boolean isPackageDefaultHdrsCheckSet() { - return rule.getPackage().isDefaultHdrsCheckSet(); - } - - @Override - public Boolean getPackageDefaultTestOnly() { - return rule.getPackage().getDefaultTestOnly(); - } - - @Override - public String getPackageDefaultDeprecation() { - return rule.getPackage().getDefaultDeprecation(); + public PackageArgs getPackageArgs() { + return rule.getPackage().getPackageArgs(); } @Override diff --git a/src/main/java/com/google/devtools/build/lib/packages/AttributeMap.java b/src/main/java/com/google/devtools/build/lib/packages/AttributeMap.java index 944339122d9cb6..6874045fee1fc0 100644 --- a/src/main/java/com/google/devtools/build/lib/packages/AttributeMap.java +++ b/src/main/java/com/google/devtools/build/lib/packages/AttributeMap.java @@ -130,16 +130,5 @@ default T getOrDefault(String attributeName, Type type, T defaultValue) { /** Same as {@link #visitAllLabels} but for attributes matching a {@link DependencyFilter}. */ void visitLabels(DependencyFilter filter, BiConsumer consumer); - // TODO(bazel-team): These methods are here to support computed defaults that inherit - // package-level default values. Instead, we should auto-inherit and remove the computed - // defaults. If we really need to give access to package-level defaults, we should come up with - // a more generic interface. - String getPackageDefaultHdrsCheck(); - - boolean isPackageDefaultHdrsCheckSet(); - - Boolean getPackageDefaultTestOnly(); - - String getPackageDefaultDeprecation(); - + PackageArgs getPackageArgs(); } diff --git a/src/main/java/com/google/devtools/build/lib/packages/DelegatingAttributeMapper.java b/src/main/java/com/google/devtools/build/lib/packages/DelegatingAttributeMapper.java index 8737da4795b735..9c0f9c93a497d5 100644 --- a/src/main/java/com/google/devtools/build/lib/packages/DelegatingAttributeMapper.java +++ b/src/main/java/com/google/devtools/build/lib/packages/DelegatingAttributeMapper.java @@ -88,23 +88,8 @@ public void visitLabels(DependencyFilter filter, BiConsumer co } @Override - public String getPackageDefaultHdrsCheck() { - return delegate.getPackageDefaultHdrsCheck(); - } - - @Override - public boolean isPackageDefaultHdrsCheckSet() { - return delegate.isPackageDefaultHdrsCheckSet(); - } - - @Override - public Boolean getPackageDefaultTestOnly() { - return delegate.getPackageDefaultTestOnly(); - } - - @Override - public String getPackageDefaultDeprecation() { - return delegate.getPackageDefaultDeprecation(); + public PackageArgs getPackageArgs() { + return delegate.getPackageArgs(); } @Override diff --git a/src/main/java/com/google/devtools/build/lib/packages/FileTarget.java b/src/main/java/com/google/devtools/build/lib/packages/FileTarget.java index 72bb4737a76b61..853dc11d7988cc 100644 --- a/src/main/java/com/google/devtools/build/lib/packages/FileTarget.java +++ b/src/main/java/com/google/devtools/build/lib/packages/FileTarget.java @@ -66,7 +66,7 @@ public String toString() { @Override public Set getDistributions() { - return getPackage().getDefaultDistribs(); + return getPackage().getPackageArgs().distribs(); } /** @@ -83,6 +83,6 @@ public Set getDistributions() { */ @Override public License getLicense() { - return getPackage().getDefaultLicense(); + return getPackage().getPackageArgs().license(); } } diff --git a/src/main/java/com/google/devtools/build/lib/packages/InputFile.java b/src/main/java/com/google/devtools/build/lib/packages/InputFile.java index ce6cef9abf1578..75b47b3213ec4a 100644 --- a/src/main/java/com/google/devtools/build/lib/packages/InputFile.java +++ b/src/main/java/com/google/devtools/build/lib/packages/InputFile.java @@ -64,7 +64,7 @@ public boolean isVisibilitySpecified() { @Override public RuleVisibility getVisibility() { - return pkg.getDefaultVisibility(); + return pkg.getPackageArgs().defaultVisibility(); } @Override diff --git a/src/main/java/com/google/devtools/build/lib/packages/Package.java b/src/main/java/com/google/devtools/build/lib/packages/Package.java index c191e2caa1b430..522d6d35f88db5 100644 --- a/src/main/java/com/google/devtools/build/lib/packages/Package.java +++ b/src/main/java/com/google/devtools/build/lib/packages/Package.java @@ -28,7 +28,6 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.devtools.build.lib.analysis.config.FeatureSet; import com.google.devtools.build.lib.bugreport.BugReport; import com.google.devtools.build.lib.cmdline.BazelModuleContext; import com.google.devtools.build.lib.cmdline.BazelModuleContext.LoadGraphVisitor; @@ -172,6 +171,10 @@ private NameConflictException(String message) { private PackageArgs packageArgs = PackageArgs.DEFAULT; + public PackageArgs getPackageArgs() { + return packageArgs; + } + /** * How to enforce config_setting visibility settings. * @@ -190,11 +193,6 @@ public enum ConfigSettingVisibilityPolicy { private ConfigSettingVisibilityPolicy configSettingVisibilityPolicy; - /** - * Default header strictness checking for rules that do not specify it. - */ - private String defaultHdrsCheck; - /** * The InputFile target corresponding to this package's BUILD file. */ @@ -325,18 +323,6 @@ public RepositoryMapping getRepositoryMapping() { return this.externalPackageRepositoryMappings; } - /** - * Package initialization: part 2 of 3: sets this package's default header - * strictness checking. - * - *

      This is needed to support C++-related rule classes - * which accesses {@link #getDefaultHdrsCheck} from the still-under-construction - * package. - */ - private void setDefaultHdrsCheck(String defaultHdrsCheck) { - this.defaultHdrsCheck = defaultHdrsCheck; - } - /** * Returns the source root (a directory) beneath which this package's BUILD file was found, or * {@link Optional#empty} if this package was derived from a workspace file. @@ -643,11 +629,6 @@ public String getWorkspaceName() { return workspaceName; } - /** Returns the features specified in the package() declaration. */ - public FeatureSet getFeatures() { - return packageArgs.features(); - } - /** * Returns the target (a member of this package) whose name is "targetName". * First rules are searched, then output files, then input files. The target @@ -723,13 +704,6 @@ private String getAlternateTargetSuggestion(String targetName) { } } - /** - * Returns the default visibility for this package. - */ - public RuleVisibility getDefaultVisibility() { - return packageArgs.defaultVisibility(); - } - /** * How to enforce visibility on config_setting See * {@link ConfigSettingVisibilityPolicy} for details. @@ -738,63 +712,6 @@ public ConfigSettingVisibilityPolicy getConfigSettingVisibilityPolicy() { return configSettingVisibilityPolicy; } - /** - * Returns the default testonly value. - */ - public Boolean getDefaultTestOnly() { - return packageArgs.defaultTestOnly(); - } - - /** - * Returns the default deprecation value. - */ - public String getDefaultDeprecation() { - return packageArgs.defaultDeprecation(); - } - - /** Gets the default header checking mode. */ - public String getDefaultHdrsCheck() { - return defaultHdrsCheck != null ? defaultHdrsCheck : "strict"; - } - - /** - * Returns whether the default header checking mode has been set or it is the - * default value. - */ - public boolean isDefaultHdrsCheckSet() { - return defaultHdrsCheck != null; - } - - /** Gets the package metadata list for the default metadata declared by this package. */ - ImmutableList

      Unless otherwise specified, these are only used when the rule does not provide an explicit + * override value in the associated attribute. */ @AutoValue public abstract class PackageArgs { @@ -46,40 +49,61 @@ public abstract class PackageArgs { .setDefaultPackageMetadata(ImmutableList.of()) .build(); - /** See {@link Package#getDefaultVisibility()}. */ + /** The default visibility value for the package. */ @Nullable - abstract RuleVisibility defaultVisibility(); + public abstract RuleVisibility defaultVisibility(); - /** See {@link Package#getDefaultTestOnly()}. */ + /** The default testonly value for the package. */ @Nullable - abstract Boolean defaultTestOnly(); + public abstract Boolean defaultTestOnly(); - /** See {@link Package#getDefaultDeprecation()}. */ + /** The default deprecation value for the package. */ @Nullable - abstract String defaultDeprecation(); + public abstract String defaultDeprecation(); - /** See {@link Package#getFeatures()}. */ - abstract FeatureSet features(); + /** + * The default (generally C/C++) features value for the package. + * + *

      Note that this is actually additive with features set by a rule where the rule has priority + * for turning specific features on or off. + */ + public abstract FeatureSet features(); - /** See {@link Package#getDefaultLicense()}. */ + /** The default license value for the package. */ @Nullable - abstract License license(); + public abstract License license(); - /** See {@link Package#getDefaultDistribs()}. */ + /** The default distributions value for the package. */ @Nullable - abstract ImmutableSet distribs(); + public abstract ImmutableSet distribs(); - /** See {@link Package#getDefaultCompatibleWith()}. */ + /** The default {@link RuleClass#COMPATIBLE_ENVIRONMENT_ATTR} value for the package. */ @Nullable - abstract ImmutableSet