From a5e84a085d129f7ff6a1101b4c26b3d1077a1740 Mon Sep 17 00:00:00 2001 From: He-Pin Date: Sat, 13 Jan 2024 15:07:59 +0800 Subject: [PATCH] feat: Add mapWithResource stream operator. --- .../protobuf/msg/TwoPhaseSetMessages.java | 722 ++++++++++-------- .../persistence/proto/FlightAppModels.java | 435 ++++++----- .../Source-or-Flow/mapWithResource.md | 69 ++ .../main/paradox/stream/operators/index.md | 2 + .../sourceorflow/MapWithResource.java | 80 ++ .../sourceorflow/MapWithResource.scala | 70 ++ .../scaladsl/FlowMapWithResourceSpec.scala | 416 ++++++++++ .../org/apache/pekko/stream/impl/Stages.scala | 1 + .../apache/pekko/stream/impl/fusing/Ops.scala | 2 +- .../apache/pekko/stream/javadsl/Flow.scala | 40 + .../apache/pekko/stream/javadsl/Source.scala | 40 + .../apache/pekko/stream/javadsl/SubFlow.scala | 40 + .../pekko/stream/javadsl/SubSource.scala | 40 + .../apache/pekko/stream/scaladsl/Flow.scala | 41 +- 14 files changed, 1516 insertions(+), 482 deletions(-) create mode 100644 docs/src/main/paradox/stream/operators/Source-or-Flow/mapWithResource.md create mode 100644 docs/src/test/java/jdocs/stream/operators/sourceorflow/MapWithResource.java create mode 100644 docs/src/test/scala/docs/stream/operators/sourceorflow/MapWithResource.scala create mode 100644 stream-tests/src/test/scala/org/apache/pekko/stream/scaladsl/FlowMapWithResourceSpec.scala diff --git a/docs/src/main/java/docs/ddata/protobuf/msg/TwoPhaseSetMessages.java b/docs/src/main/java/docs/ddata/protobuf/msg/TwoPhaseSetMessages.java index 33bcb26072e..e70e8d68146 100644 --- a/docs/src/main/java/docs/ddata/protobuf/msg/TwoPhaseSetMessages.java +++ b/docs/src/main/java/docs/ddata/protobuf/msg/TwoPhaseSetMessages.java @@ -18,81 +18,87 @@ public final class TwoPhaseSetMessages { private TwoPhaseSetMessages() {} + public static void registerAllExtensions( - org.apache.pekko.protobufv3.internal.ExtensionRegistryLite registry) { - } + org.apache.pekko.protobufv3.internal.ExtensionRegistryLite registry) {} public static void registerAllExtensions( org.apache.pekko.protobufv3.internal.ExtensionRegistry registry) { - registerAllExtensions( - (org.apache.pekko.protobufv3.internal.ExtensionRegistryLite) registry); + registerAllExtensions((org.apache.pekko.protobufv3.internal.ExtensionRegistryLite) registry); } - public interface TwoPhaseSetOrBuilder extends + + public interface TwoPhaseSetOrBuilder + extends // @@protoc_insertion_point(interface_extends:docs.ddata.TwoPhaseSet) org.apache.pekko.protobufv3.internal.MessageOrBuilder { /** * repeated string adds = 1; + * * @return A list containing the adds. */ - java.util.List - getAddsList(); + java.util.List getAddsList(); /** * repeated string adds = 1; + * * @return The count of adds. */ int getAddsCount(); /** * repeated string adds = 1; + * * @param index The index of the element to return. * @return The adds at the given index. */ java.lang.String getAdds(int index); /** * repeated string adds = 1; + * * @param index The index of the value to return. * @return The bytes of the adds at the given index. */ - org.apache.pekko.protobufv3.internal.ByteString - getAddsBytes(int index); + org.apache.pekko.protobufv3.internal.ByteString getAddsBytes(int index); /** * repeated string removals = 2; + * * @return A list containing the removals. */ - java.util.List - getRemovalsList(); + java.util.List getRemovalsList(); /** * repeated string removals = 2; + * * @return The count of removals. */ int getRemovalsCount(); /** * repeated string removals = 2; + * * @param index The index of the element to return. * @return The removals at the given index. */ java.lang.String getRemovals(int index); /** * repeated string removals = 2; + * * @param index The index of the value to return. * @return The bytes of the removals at the given index. */ - org.apache.pekko.protobufv3.internal.ByteString - getRemovalsBytes(int index); + org.apache.pekko.protobufv3.internal.ByteString getRemovalsBytes(int index); } - /** - * Protobuf type {@code docs.ddata.TwoPhaseSet} - */ - public static final class TwoPhaseSet extends - org.apache.pekko.protobufv3.internal.GeneratedMessageV3 implements + /** Protobuf type {@code docs.ddata.TwoPhaseSet} */ + public static final class TwoPhaseSet + extends org.apache.pekko.protobufv3.internal.GeneratedMessageV3 + implements // @@protoc_insertion_point(message_implements:docs.ddata.TwoPhaseSet) TwoPhaseSetOrBuilder { - private static final long serialVersionUID = 0L; + private static final long serialVersionUID = 0L; // Use TwoPhaseSet.newBuilder() to construct. - private TwoPhaseSet(org.apache.pekko.protobufv3.internal.GeneratedMessageV3.Builder builder) { + private TwoPhaseSet( + org.apache.pekko.protobufv3.internal.GeneratedMessageV3.Builder builder) { super(builder); } + private TwoPhaseSet() { adds_ = org.apache.pekko.protobufv3.internal.LazyStringArrayList.EMPTY; removals_ = org.apache.pekko.protobufv3.internal.LazyStringArrayList.EMPTY; @@ -106,10 +112,10 @@ protected java.lang.Object newInstance( } @java.lang.Override - public final org.apache.pekko.protobufv3.internal.UnknownFieldSet - getUnknownFields() { + public final org.apache.pekko.protobufv3.internal.UnknownFieldSet getUnknownFields() { return this.unknownFields; } + private TwoPhaseSet( org.apache.pekko.protobufv3.internal.CodedInputStream input, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) @@ -129,38 +135,40 @@ private TwoPhaseSet( case 0: done = true; break; - case 10: { - org.apache.pekko.protobufv3.internal.ByteString bs = input.readBytes(); - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - adds_ = new org.apache.pekko.protobufv3.internal.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000001; + case 10: + { + org.apache.pekko.protobufv3.internal.ByteString bs = input.readBytes(); + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + adds_ = new org.apache.pekko.protobufv3.internal.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000001; + } + adds_.add(bs); + break; } - adds_.add(bs); - break; - } - case 18: { - org.apache.pekko.protobufv3.internal.ByteString bs = input.readBytes(); - if (!((mutable_bitField0_ & 0x00000002) != 0)) { - removals_ = new org.apache.pekko.protobufv3.internal.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000002; + case 18: + { + org.apache.pekko.protobufv3.internal.ByteString bs = input.readBytes(); + if (!((mutable_bitField0_ & 0x00000002) != 0)) { + removals_ = new org.apache.pekko.protobufv3.internal.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000002; + } + removals_.add(bs); + break; } - removals_.add(bs); - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; } - break; - } } } } catch (org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { - throw new org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); + throw new org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException(e) + .setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { adds_ = adds_.getUnmodifiableView(); @@ -172,31 +180,36 @@ private TwoPhaseSet( makeExtensionsImmutable(); } } + public static final org.apache.pekko.protobufv3.internal.Descriptors.Descriptor getDescriptor() { - return docs.ddata.protobuf.msg.TwoPhaseSetMessages.internal_static_docs_ddata_TwoPhaseSet_descriptor; + return docs.ddata.protobuf.msg.TwoPhaseSetMessages + .internal_static_docs_ddata_TwoPhaseSet_descriptor; } @java.lang.Override protected org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return docs.ddata.protobuf.msg.TwoPhaseSetMessages.internal_static_docs_ddata_TwoPhaseSet_fieldAccessorTable + return docs.ddata.protobuf.msg.TwoPhaseSetMessages + .internal_static_docs_ddata_TwoPhaseSet_fieldAccessorTable .ensureFieldAccessorsInitialized( - docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet.class, docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet.Builder.class); + docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet.class, + docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet.Builder.class); } public static final int ADDS_FIELD_NUMBER = 1; private org.apache.pekko.protobufv3.internal.LazyStringList adds_; /** * repeated string adds = 1; + * * @return A list containing the adds. */ - public org.apache.pekko.protobufv3.internal.ProtocolStringList - getAddsList() { + public org.apache.pekko.protobufv3.internal.ProtocolStringList getAddsList() { return adds_; } /** * repeated string adds = 1; + * * @return The count of adds. */ public int getAddsCount() { @@ -204,6 +217,7 @@ public int getAddsCount() { } /** * repeated string adds = 1; + * * @param index The index of the element to return. * @return The adds at the given index. */ @@ -212,11 +226,11 @@ public java.lang.String getAdds(int index) { } /** * repeated string adds = 1; + * * @param index The index of the value to return. * @return The bytes of the adds at the given index. */ - public org.apache.pekko.protobufv3.internal.ByteString - getAddsBytes(int index) { + public org.apache.pekko.protobufv3.internal.ByteString getAddsBytes(int index) { return adds_.getByteString(index); } @@ -224,14 +238,15 @@ public java.lang.String getAdds(int index) { private org.apache.pekko.protobufv3.internal.LazyStringList removals_; /** * repeated string removals = 2; + * * @return A list containing the removals. */ - public org.apache.pekko.protobufv3.internal.ProtocolStringList - getRemovalsList() { + public org.apache.pekko.protobufv3.internal.ProtocolStringList getRemovalsList() { return removals_; } /** * repeated string removals = 2; + * * @return The count of removals. */ public int getRemovalsCount() { @@ -239,6 +254,7 @@ public int getRemovalsCount() { } /** * repeated string removals = 2; + * * @param index The index of the element to return. * @return The removals at the given index. */ @@ -247,15 +263,16 @@ public java.lang.String getRemovals(int index) { } /** * repeated string removals = 2; + * * @param index The index of the value to return. * @return The bytes of the removals at the given index. */ - public org.apache.pekko.protobufv3.internal.ByteString - getRemovalsBytes(int index) { + public org.apache.pekko.protobufv3.internal.ByteString getRemovalsBytes(int index) { return removals_.getByteString(index); } private byte memoizedIsInitialized = -1; + @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; @@ -268,12 +285,14 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(org.apache.pekko.protobufv3.internal.CodedOutputStream output) - throws java.io.IOException { + throws java.io.IOException { for (int i = 0; i < adds_.size(); i++) { - org.apache.pekko.protobufv3.internal.GeneratedMessageV3.writeString(output, 1, adds_.getRaw(i)); + org.apache.pekko.protobufv3.internal.GeneratedMessageV3.writeString( + output, 1, adds_.getRaw(i)); } for (int i = 0; i < removals_.size(); i++) { - org.apache.pekko.protobufv3.internal.GeneratedMessageV3.writeString(output, 2, removals_.getRaw(i)); + org.apache.pekko.protobufv3.internal.GeneratedMessageV3.writeString( + output, 2, removals_.getRaw(i)); } unknownFields.writeTo(output); } @@ -308,17 +327,16 @@ public int getSerializedSize() { @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { - return true; + return true; } if (!(obj instanceof docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet)) { return super.equals(obj); } - docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet other = (docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet) obj; + docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet other = + (docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet) obj; - if (!getAddsList() - .equals(other.getAddsList())) return false; - if (!getRemovalsList() - .equals(other.getRemovalsList())) return false; + if (!getAddsList().equals(other.getAddsList())) return false; + if (!getRemovalsList().equals(other.getRemovalsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -348,83 +366,97 @@ public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet parseFrom( throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet parseFrom( java.nio.ByteBuffer data, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet parseFrom( org.apache.pekko.protobufv3.internal.ByteString data) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet parseFrom( org.apache.pekko.protobufv3.internal.ByteString data, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet parseFrom(byte[] data) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet parseFrom( - byte[] data, - org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) + byte[] data, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet parseFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseWithIOException(PARSER, input); + + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet parseFrom( + java.io.InputStream input) throws java.io.IOException { + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseWithIOException( + PARSER, input); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet parseFrom( java.io.InputStream input, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); } - public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); + + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet parseDelimitedFrom( java.io.InputStream input, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet parseFrom( - org.apache.pekko.protobufv3.internal.CodedInputStream input) - throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseWithIOException(PARSER, input); + org.apache.pekko.protobufv3.internal.CodedInputStream input) throws java.io.IOException { + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseWithIOException( + PARSER, input); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet parseFrom( org.apache.pekko.protobufv3.internal.CodedInputStream input, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); } @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } + public Builder newBuilderForType() { + return newBuilder(); + } + public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet prototype) { + + public static Builder newBuilder( + docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } + @java.lang.Override public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override @@ -433,24 +465,26 @@ protected Builder newBuilderForType( Builder builder = new Builder(parent); return builder; } - /** - * Protobuf type {@code docs.ddata.TwoPhaseSet} - */ - public static final class Builder extends - org.apache.pekko.protobufv3.internal.GeneratedMessageV3.Builder implements + /** Protobuf type {@code docs.ddata.TwoPhaseSet} */ + public static final class Builder + extends org.apache.pekko.protobufv3.internal.GeneratedMessageV3.Builder + implements // @@protoc_insertion_point(builder_implements:docs.ddata.TwoPhaseSet) docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSetOrBuilder { public static final org.apache.pekko.protobufv3.internal.Descriptors.Descriptor getDescriptor() { - return docs.ddata.protobuf.msg.TwoPhaseSetMessages.internal_static_docs_ddata_TwoPhaseSet_descriptor; + return docs.ddata.protobuf.msg.TwoPhaseSetMessages + .internal_static_docs_ddata_TwoPhaseSet_descriptor; } @java.lang.Override protected org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return docs.ddata.protobuf.msg.TwoPhaseSetMessages.internal_static_docs_ddata_TwoPhaseSet_fieldAccessorTable + return docs.ddata.protobuf.msg.TwoPhaseSetMessages + .internal_static_docs_ddata_TwoPhaseSet_fieldAccessorTable .ensureFieldAccessorsInitialized( - docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet.class, docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet.Builder.class); + docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet.class, + docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet.Builder.class); } // Construct using docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet.newBuilder() @@ -463,11 +497,11 @@ private Builder( super(parent); maybeForceBuilderInitialization(); } + private void maybeForceBuilderInitialization() { - if (org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } + if (org.apache.pekko.protobufv3.internal.GeneratedMessageV3.alwaysUseFieldBuilders) {} } + @java.lang.Override public Builder clear() { super.clear(); @@ -479,9 +513,9 @@ public Builder clear() { } @java.lang.Override - public org.apache.pekko.protobufv3.internal.Descriptors.Descriptor - getDescriptorForType() { - return docs.ddata.protobuf.msg.TwoPhaseSetMessages.internal_static_docs_ddata_TwoPhaseSet_descriptor; + public org.apache.pekko.protobufv3.internal.Descriptors.Descriptor getDescriptorForType() { + return docs.ddata.protobuf.msg.TwoPhaseSetMessages + .internal_static_docs_ddata_TwoPhaseSet_descriptor; } @java.lang.Override @@ -500,7 +534,8 @@ public docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet build() { @java.lang.Override public docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet buildPartial() { - docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet result = new docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet(this); + docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet result = + new docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) != 0)) { adds_ = adds_.getUnmodifiableView(); @@ -520,38 +555,45 @@ public docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet buildPartial() { public Builder clone() { return super.clone(); } + @java.lang.Override public Builder setField( org.apache.pekko.protobufv3.internal.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } + @java.lang.Override public Builder clearField( org.apache.pekko.protobufv3.internal.Descriptors.FieldDescriptor field) { return super.clearField(field); } + @java.lang.Override public Builder clearOneof( org.apache.pekko.protobufv3.internal.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } + @java.lang.Override public Builder setRepeatedField( org.apache.pekko.protobufv3.internal.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { + int index, + java.lang.Object value) { return super.setRepeatedField(field, index, value); } + @java.lang.Override public Builder addRepeatedField( org.apache.pekko.protobufv3.internal.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } + @java.lang.Override public Builder mergeFrom(org.apache.pekko.protobufv3.internal.Message other) { if (other instanceof docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet) { - return mergeFrom((docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet)other); + return mergeFrom((docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet) other); } else { super.mergeFrom(other); return this; @@ -559,7 +601,8 @@ public Builder mergeFrom(org.apache.pekko.protobufv3.internal.Message other) { } public Builder mergeFrom(docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet other) { - if (other == docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet.getDefaultInstance()) return this; + if (other == docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet.getDefaultInstance()) + return this; if (!other.adds_.isEmpty()) { if (adds_.isEmpty()) { adds_ = other.adds_; @@ -599,7 +642,8 @@ public Builder mergeFrom( try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException e) { - parsedMessage = (docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet) e.getUnfinishedMessage(); + parsedMessage = + (docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { @@ -608,25 +652,29 @@ public Builder mergeFrom( } return this; } + private int bitField0_; - private org.apache.pekko.protobufv3.internal.LazyStringList adds_ = org.apache.pekko.protobufv3.internal.LazyStringArrayList.EMPTY; + private org.apache.pekko.protobufv3.internal.LazyStringList adds_ = + org.apache.pekko.protobufv3.internal.LazyStringArrayList.EMPTY; + private void ensureAddsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { adds_ = new org.apache.pekko.protobufv3.internal.LazyStringArrayList(adds_); bitField0_ |= 0x00000001; - } + } } /** * repeated string adds = 1; + * * @return A list containing the adds. */ - public org.apache.pekko.protobufv3.internal.ProtocolStringList - getAddsList() { + public org.apache.pekko.protobufv3.internal.ProtocolStringList getAddsList() { return adds_.getUnmodifiableView(); } /** * repeated string adds = 1; + * * @return The count of adds. */ public int getAddsCount() { @@ -634,6 +682,7 @@ public int getAddsCount() { } /** * repeated string adds = 1; + * * @param index The index of the element to return. * @return The adds at the given index. */ @@ -642,59 +691,59 @@ public java.lang.String getAdds(int index) { } /** * repeated string adds = 1; + * * @param index The index of the value to return. * @return The bytes of the adds at the given index. */ - public org.apache.pekko.protobufv3.internal.ByteString - getAddsBytes(int index) { + public org.apache.pekko.protobufv3.internal.ByteString getAddsBytes(int index) { return adds_.getByteString(index); } /** * repeated string adds = 1; + * * @param index The index to set the value at. * @param value The adds to set. * @return This builder for chaining. */ - public Builder setAdds( - int index, java.lang.String value) { + public Builder setAdds(int index, java.lang.String value) { if (value == null) { - throw new NullPointerException(); - } - ensureAddsIsMutable(); + throw new NullPointerException(); + } + ensureAddsIsMutable(); adds_.set(index, value); onChanged(); return this; } /** * repeated string adds = 1; + * * @param value The adds to add. * @return This builder for chaining. */ - public Builder addAdds( - java.lang.String value) { + public Builder addAdds(java.lang.String value) { if (value == null) { - throw new NullPointerException(); - } - ensureAddsIsMutable(); + throw new NullPointerException(); + } + ensureAddsIsMutable(); adds_.add(value); onChanged(); return this; } /** * repeated string adds = 1; + * * @param values The adds to add. * @return This builder for chaining. */ - public Builder addAllAdds( - java.lang.Iterable values) { + public Builder addAllAdds(java.lang.Iterable values) { ensureAddsIsMutable(); - org.apache.pekko.protobufv3.internal.AbstractMessageLite.Builder.addAll( - values, adds_); + org.apache.pekko.protobufv3.internal.AbstractMessageLite.Builder.addAll(values, adds_); onChanged(); return this; } /** * repeated string adds = 1; + * * @return This builder for chaining. */ public Builder clearAdds() { @@ -705,37 +754,40 @@ public Builder clearAdds() { } /** * repeated string adds = 1; + * * @param value The bytes of the adds to add. * @return This builder for chaining. */ - public Builder addAddsBytes( - org.apache.pekko.protobufv3.internal.ByteString value) { + public Builder addAddsBytes(org.apache.pekko.protobufv3.internal.ByteString value) { if (value == null) { - throw new NullPointerException(); - } - ensureAddsIsMutable(); + throw new NullPointerException(); + } + ensureAddsIsMutable(); adds_.add(value); onChanged(); return this; } - private org.apache.pekko.protobufv3.internal.LazyStringList removals_ = org.apache.pekko.protobufv3.internal.LazyStringArrayList.EMPTY; + private org.apache.pekko.protobufv3.internal.LazyStringList removals_ = + org.apache.pekko.protobufv3.internal.LazyStringArrayList.EMPTY; + private void ensureRemovalsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { removals_ = new org.apache.pekko.protobufv3.internal.LazyStringArrayList(removals_); bitField0_ |= 0x00000002; - } + } } /** * repeated string removals = 2; + * * @return A list containing the removals. */ - public org.apache.pekko.protobufv3.internal.ProtocolStringList - getRemovalsList() { + public org.apache.pekko.protobufv3.internal.ProtocolStringList getRemovalsList() { return removals_.getUnmodifiableView(); } /** * repeated string removals = 2; + * * @return The count of removals. */ public int getRemovalsCount() { @@ -743,6 +795,7 @@ public int getRemovalsCount() { } /** * repeated string removals = 2; + * * @param index The index of the element to return. * @return The removals at the given index. */ @@ -751,59 +804,59 @@ public java.lang.String getRemovals(int index) { } /** * repeated string removals = 2; + * * @param index The index of the value to return. * @return The bytes of the removals at the given index. */ - public org.apache.pekko.protobufv3.internal.ByteString - getRemovalsBytes(int index) { + public org.apache.pekko.protobufv3.internal.ByteString getRemovalsBytes(int index) { return removals_.getByteString(index); } /** * repeated string removals = 2; + * * @param index The index to set the value at. * @param value The removals to set. * @return This builder for chaining. */ - public Builder setRemovals( - int index, java.lang.String value) { + public Builder setRemovals(int index, java.lang.String value) { if (value == null) { - throw new NullPointerException(); - } - ensureRemovalsIsMutable(); + throw new NullPointerException(); + } + ensureRemovalsIsMutable(); removals_.set(index, value); onChanged(); return this; } /** * repeated string removals = 2; + * * @param value The removals to add. * @return This builder for chaining. */ - public Builder addRemovals( - java.lang.String value) { + public Builder addRemovals(java.lang.String value) { if (value == null) { - throw new NullPointerException(); - } - ensureRemovalsIsMutable(); + throw new NullPointerException(); + } + ensureRemovalsIsMutable(); removals_.add(value); onChanged(); return this; } /** * repeated string removals = 2; + * * @param values The removals to add. * @return This builder for chaining. */ - public Builder addAllRemovals( - java.lang.Iterable values) { + public Builder addAllRemovals(java.lang.Iterable values) { ensureRemovalsIsMutable(); - org.apache.pekko.protobufv3.internal.AbstractMessageLite.Builder.addAll( - values, removals_); + org.apache.pekko.protobufv3.internal.AbstractMessageLite.Builder.addAll(values, removals_); onChanged(); return this; } /** * repeated string removals = 2; + * * @return This builder for chaining. */ public Builder clearRemovals() { @@ -814,19 +867,20 @@ public Builder clearRemovals() { } /** * repeated string removals = 2; + * * @param value The bytes of the removals to add. * @return This builder for chaining. */ - public Builder addRemovalsBytes( - org.apache.pekko.protobufv3.internal.ByteString value) { + public Builder addRemovalsBytes(org.apache.pekko.protobufv3.internal.ByteString value) { if (value == null) { - throw new NullPointerException(); - } - ensureRemovalsIsMutable(); + throw new NullPointerException(); + } + ensureRemovalsIsMutable(); removals_.add(value); onChanged(); return this; } + @java.lang.Override public final Builder setUnknownFields( final org.apache.pekko.protobufv3.internal.UnknownFieldSet unknownFields) { @@ -839,12 +893,12 @@ public final Builder mergeUnknownFields( return super.mergeUnknownFields(unknownFields); } - // @@protoc_insertion_point(builder_scope:docs.ddata.TwoPhaseSet) } // @@protoc_insertion_point(class_scope:docs.ddata.TwoPhaseSet) private static final docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet DEFAULT_INSTANCE; + static { DEFAULT_INSTANCE = new docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet(); } @@ -853,16 +907,17 @@ public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet getDefault return DEFAULT_INSTANCE; } - @java.lang.Deprecated public static final org.apache.pekko.protobufv3.internal.Parser - PARSER = new org.apache.pekko.protobufv3.internal.AbstractParser() { - @java.lang.Override - public TwoPhaseSet parsePartialFrom( - org.apache.pekko.protobufv3.internal.CodedInputStream input, - org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) - throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { - return new TwoPhaseSet(input, extensionRegistry); - } - }; + @java.lang.Deprecated + public static final org.apache.pekko.protobufv3.internal.Parser PARSER = + new org.apache.pekko.protobufv3.internal.AbstractParser() { + @java.lang.Override + public TwoPhaseSet parsePartialFrom( + org.apache.pekko.protobufv3.internal.CodedInputStream input, + org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) + throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { + return new TwoPhaseSet(input, extensionRegistry); + } + }; public static org.apache.pekko.protobufv3.internal.Parser parser() { return PARSER; @@ -877,51 +932,60 @@ public org.apache.pekko.protobufv3.internal.Parser getParserForType public docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet getDefaultInstanceForType() { return DEFAULT_INSTANCE; } - } - public interface TwoPhaseSet2OrBuilder extends + public interface TwoPhaseSet2OrBuilder + extends // @@protoc_insertion_point(interface_extends:docs.ddata.TwoPhaseSet2) org.apache.pekko.protobufv3.internal.MessageOrBuilder { /** * optional bytes adds = 1; + * * @return Whether the adds field is set. */ boolean hasAdds(); /** * optional bytes adds = 1; + * * @return The adds. */ org.apache.pekko.protobufv3.internal.ByteString getAdds(); /** * optional bytes removals = 2; + * * @return Whether the removals field is set. */ boolean hasRemovals(); /** * optional bytes removals = 2; + * * @return The removals. */ org.apache.pekko.protobufv3.internal.ByteString getRemovals(); } /** + * + * *
-   *#twophaseset2
+   * #twophaseset2
    * 
* * Protobuf type {@code docs.ddata.TwoPhaseSet2} */ - public static final class TwoPhaseSet2 extends - org.apache.pekko.protobufv3.internal.GeneratedMessageV3 implements + public static final class TwoPhaseSet2 + extends org.apache.pekko.protobufv3.internal.GeneratedMessageV3 + implements // @@protoc_insertion_point(message_implements:docs.ddata.TwoPhaseSet2) TwoPhaseSet2OrBuilder { - private static final long serialVersionUID = 0L; + private static final long serialVersionUID = 0L; // Use TwoPhaseSet2.newBuilder() to construct. - private TwoPhaseSet2(org.apache.pekko.protobufv3.internal.GeneratedMessageV3.Builder builder) { + private TwoPhaseSet2( + org.apache.pekko.protobufv3.internal.GeneratedMessageV3.Builder builder) { super(builder); } + private TwoPhaseSet2() { adds_ = org.apache.pekko.protobufv3.internal.ByteString.EMPTY; removals_ = org.apache.pekko.protobufv3.internal.ByteString.EMPTY; @@ -935,10 +999,10 @@ protected java.lang.Object newInstance( } @java.lang.Override - public final org.apache.pekko.protobufv3.internal.UnknownFieldSet - getUnknownFields() { + public final org.apache.pekko.protobufv3.internal.UnknownFieldSet getUnknownFields() { return this.unknownFields; } + private TwoPhaseSet2( org.apache.pekko.protobufv3.internal.CodedInputStream input, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) @@ -958,46 +1022,52 @@ private TwoPhaseSet2( case 0: done = true; break; - case 10: { - bitField0_ |= 0x00000001; - adds_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - removals_ = input.readBytes(); - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; + case 10: + { + bitField0_ |= 0x00000001; + adds_ = input.readBytes(); + break; + } + case 18: + { + bitField0_ |= 0x00000002; + removals_ = input.readBytes(); + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; } - break; - } } } } catch (org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { - throw new org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); + throw new org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException(e) + .setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } + public static final org.apache.pekko.protobufv3.internal.Descriptors.Descriptor getDescriptor() { - return docs.ddata.protobuf.msg.TwoPhaseSetMessages.internal_static_docs_ddata_TwoPhaseSet2_descriptor; + return docs.ddata.protobuf.msg.TwoPhaseSetMessages + .internal_static_docs_ddata_TwoPhaseSet2_descriptor; } @java.lang.Override protected org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return docs.ddata.protobuf.msg.TwoPhaseSetMessages.internal_static_docs_ddata_TwoPhaseSet2_fieldAccessorTable + return docs.ddata.protobuf.msg.TwoPhaseSetMessages + .internal_static_docs_ddata_TwoPhaseSet2_fieldAccessorTable .ensureFieldAccessorsInitialized( - docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2.class, docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2.Builder.class); + docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2.class, + docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2.Builder.class); } private int bitField0_; @@ -1005,6 +1075,7 @@ private TwoPhaseSet2( private org.apache.pekko.protobufv3.internal.ByteString adds_; /** * optional bytes adds = 1; + * * @return Whether the adds field is set. */ public boolean hasAdds() { @@ -1012,6 +1083,7 @@ public boolean hasAdds() { } /** * optional bytes adds = 1; + * * @return The adds. */ public org.apache.pekko.protobufv3.internal.ByteString getAdds() { @@ -1022,6 +1094,7 @@ public org.apache.pekko.protobufv3.internal.ByteString getAdds() { private org.apache.pekko.protobufv3.internal.ByteString removals_; /** * optional bytes removals = 2; + * * @return Whether the removals field is set. */ public boolean hasRemovals() { @@ -1029,6 +1102,7 @@ public boolean hasRemovals() { } /** * optional bytes removals = 2; + * * @return The removals. */ public org.apache.pekko.protobufv3.internal.ByteString getRemovals() { @@ -1036,6 +1110,7 @@ public org.apache.pekko.protobufv3.internal.ByteString getRemovals() { } private byte memoizedIsInitialized = -1; + @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; @@ -1048,7 +1123,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(org.apache.pekko.protobufv3.internal.CodedOutputStream output) - throws java.io.IOException { + throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeBytes(1, adds_); } @@ -1065,12 +1140,11 @@ public int getSerializedSize() { size = 0; if (((bitField0_ & 0x00000001) != 0)) { - size += org.apache.pekko.protobufv3.internal.CodedOutputStream - .computeBytesSize(1, adds_); + size += org.apache.pekko.protobufv3.internal.CodedOutputStream.computeBytesSize(1, adds_); } if (((bitField0_ & 0x00000002) != 0)) { - size += org.apache.pekko.protobufv3.internal.CodedOutputStream - .computeBytesSize(2, removals_); + size += + org.apache.pekko.protobufv3.internal.CodedOutputStream.computeBytesSize(2, removals_); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -1080,22 +1154,21 @@ public int getSerializedSize() { @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { - return true; + return true; } if (!(obj instanceof docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2)) { return super.equals(obj); } - docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 other = (docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2) obj; + docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 other = + (docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2) obj; if (hasAdds() != other.hasAdds()) return false; if (hasAdds()) { - if (!getAdds() - .equals(other.getAdds())) return false; + if (!getAdds().equals(other.getAdds())) return false; } if (hasRemovals() != other.hasRemovals()) return false; if (hasRemovals()) { - if (!getRemovals() - .equals(other.getRemovals())) return false; + if (!getRemovals().equals(other.getRemovals())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; @@ -1126,83 +1199,97 @@ public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 parseFrom throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 parseFrom( java.nio.ByteBuffer data, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 parseFrom( org.apache.pekko.protobufv3.internal.ByteString data) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 parseFrom( org.apache.pekko.protobufv3.internal.ByteString data, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 parseFrom(byte[] data) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 parseFrom( - byte[] data, - org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) + byte[] data, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 parseFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseWithIOException(PARSER, input); + + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 parseFrom( + java.io.InputStream input) throws java.io.IOException { + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseWithIOException( + PARSER, input); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 parseFrom( java.io.InputStream input, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); } - public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); + + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 parseDelimitedFrom( java.io.InputStream input, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 parseFrom( - org.apache.pekko.protobufv3.internal.CodedInputStream input) - throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseWithIOException(PARSER, input); + org.apache.pekko.protobufv3.internal.CodedInputStream input) throws java.io.IOException { + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseWithIOException( + PARSER, input); } + public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 parseFrom( org.apache.pekko.protobufv3.internal.CodedInputStream input, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); } @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } + public Builder newBuilderForType() { + return newBuilder(); + } + public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 prototype) { + + public static Builder newBuilder( + docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } + @java.lang.Override public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override @@ -1212,27 +1299,33 @@ protected Builder newBuilderForType( return builder; } /** + * + * *
-     *#twophaseset2
+     * #twophaseset2
      * 
* * Protobuf type {@code docs.ddata.TwoPhaseSet2} */ - public static final class Builder extends - org.apache.pekko.protobufv3.internal.GeneratedMessageV3.Builder implements + public static final class Builder + extends org.apache.pekko.protobufv3.internal.GeneratedMessageV3.Builder + implements // @@protoc_insertion_point(builder_implements:docs.ddata.TwoPhaseSet2) docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2OrBuilder { public static final org.apache.pekko.protobufv3.internal.Descriptors.Descriptor getDescriptor() { - return docs.ddata.protobuf.msg.TwoPhaseSetMessages.internal_static_docs_ddata_TwoPhaseSet2_descriptor; + return docs.ddata.protobuf.msg.TwoPhaseSetMessages + .internal_static_docs_ddata_TwoPhaseSet2_descriptor; } @java.lang.Override protected org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return docs.ddata.protobuf.msg.TwoPhaseSetMessages.internal_static_docs_ddata_TwoPhaseSet2_fieldAccessorTable + return docs.ddata.protobuf.msg.TwoPhaseSetMessages + .internal_static_docs_ddata_TwoPhaseSet2_fieldAccessorTable .ensureFieldAccessorsInitialized( - docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2.class, docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2.Builder.class); + docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2.class, + docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2.Builder.class); } // Construct using docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2.newBuilder() @@ -1245,11 +1338,11 @@ private Builder( super(parent); maybeForceBuilderInitialization(); } + private void maybeForceBuilderInitialization() { - if (org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } + if (org.apache.pekko.protobufv3.internal.GeneratedMessageV3.alwaysUseFieldBuilders) {} } + @java.lang.Override public Builder clear() { super.clear(); @@ -1261,9 +1354,9 @@ public Builder clear() { } @java.lang.Override - public org.apache.pekko.protobufv3.internal.Descriptors.Descriptor - getDescriptorForType() { - return docs.ddata.protobuf.msg.TwoPhaseSetMessages.internal_static_docs_ddata_TwoPhaseSet2_descriptor; + public org.apache.pekko.protobufv3.internal.Descriptors.Descriptor getDescriptorForType() { + return docs.ddata.protobuf.msg.TwoPhaseSetMessages + .internal_static_docs_ddata_TwoPhaseSet2_descriptor; } @java.lang.Override @@ -1282,7 +1375,8 @@ public docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 build() { @java.lang.Override public docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 buildPartial() { - docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 result = new docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2(this); + docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 result = + new docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { @@ -1302,38 +1396,45 @@ public docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 buildPartial() { public Builder clone() { return super.clone(); } + @java.lang.Override public Builder setField( org.apache.pekko.protobufv3.internal.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } + @java.lang.Override public Builder clearField( org.apache.pekko.protobufv3.internal.Descriptors.FieldDescriptor field) { return super.clearField(field); } + @java.lang.Override public Builder clearOneof( org.apache.pekko.protobufv3.internal.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } + @java.lang.Override public Builder setRepeatedField( org.apache.pekko.protobufv3.internal.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { + int index, + java.lang.Object value) { return super.setRepeatedField(field, index, value); } + @java.lang.Override public Builder addRepeatedField( org.apache.pekko.protobufv3.internal.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } + @java.lang.Override public Builder mergeFrom(org.apache.pekko.protobufv3.internal.Message other) { if (other instanceof docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2) { - return mergeFrom((docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2)other); + return mergeFrom((docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2) other); } else { super.mergeFrom(other); return this; @@ -1341,7 +1442,8 @@ public Builder mergeFrom(org.apache.pekko.protobufv3.internal.Message other) { } public Builder mergeFrom(docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 other) { - if (other == docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2.getDefaultInstance()) return this; + if (other == docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2.getDefaultInstance()) + return this; if (other.hasAdds()) { setAdds(other.getAdds()); } @@ -1367,7 +1469,8 @@ public Builder mergeFrom( try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException e) { - parsedMessage = (docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2) e.getUnfinishedMessage(); + parsedMessage = + (docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { @@ -1376,11 +1479,14 @@ public Builder mergeFrom( } return this; } + private int bitField0_; - private org.apache.pekko.protobufv3.internal.ByteString adds_ = org.apache.pekko.protobufv3.internal.ByteString.EMPTY; + private org.apache.pekko.protobufv3.internal.ByteString adds_ = + org.apache.pekko.protobufv3.internal.ByteString.EMPTY; /** * optional bytes adds = 1; + * * @return Whether the adds field is set. */ public boolean hasAdds() { @@ -1388,6 +1494,7 @@ public boolean hasAdds() { } /** * optional bytes adds = 1; + * * @return The adds. */ public org.apache.pekko.protobufv3.internal.ByteString getAdds() { @@ -1395,20 +1502,22 @@ public org.apache.pekko.protobufv3.internal.ByteString getAdds() { } /** * optional bytes adds = 1; + * * @param value The adds to set. * @return This builder for chaining. */ public Builder setAdds(org.apache.pekko.protobufv3.internal.ByteString value) { if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; adds_ = value; onChanged(); return this; } /** * optional bytes adds = 1; + * * @return This builder for chaining. */ public Builder clearAdds() { @@ -1418,9 +1527,11 @@ public Builder clearAdds() { return this; } - private org.apache.pekko.protobufv3.internal.ByteString removals_ = org.apache.pekko.protobufv3.internal.ByteString.EMPTY; + private org.apache.pekko.protobufv3.internal.ByteString removals_ = + org.apache.pekko.protobufv3.internal.ByteString.EMPTY; /** * optional bytes removals = 2; + * * @return Whether the removals field is set. */ public boolean hasRemovals() { @@ -1428,6 +1539,7 @@ public boolean hasRemovals() { } /** * optional bytes removals = 2; + * * @return The removals. */ public org.apache.pekko.protobufv3.internal.ByteString getRemovals() { @@ -1435,20 +1547,22 @@ public org.apache.pekko.protobufv3.internal.ByteString getRemovals() { } /** * optional bytes removals = 2; + * * @param value The removals to set. * @return This builder for chaining. */ public Builder setRemovals(org.apache.pekko.protobufv3.internal.ByteString value) { if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; removals_ = value; onChanged(); return this; } /** * optional bytes removals = 2; + * * @return This builder for chaining. */ public Builder clearRemovals() { @@ -1457,6 +1571,7 @@ public Builder clearRemovals() { onChanged(); return this; } + @java.lang.Override public final Builder setUnknownFields( final org.apache.pekko.protobufv3.internal.UnknownFieldSet unknownFields) { @@ -1469,12 +1584,12 @@ public final Builder mergeUnknownFields( return super.mergeUnknownFields(unknownFields); } - // @@protoc_insertion_point(builder_scope:docs.ddata.TwoPhaseSet2) } // @@protoc_insertion_point(class_scope:docs.ddata.TwoPhaseSet2) private static final docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 DEFAULT_INSTANCE; + static { DEFAULT_INSTANCE = new docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2(); } @@ -1483,16 +1598,17 @@ public static docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 getDefaul return DEFAULT_INSTANCE; } - @java.lang.Deprecated public static final org.apache.pekko.protobufv3.internal.Parser - PARSER = new org.apache.pekko.protobufv3.internal.AbstractParser() { - @java.lang.Override - public TwoPhaseSet2 parsePartialFrom( - org.apache.pekko.protobufv3.internal.CodedInputStream input, - org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) - throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { - return new TwoPhaseSet2(input, extensionRegistry); - } - }; + @java.lang.Deprecated + public static final org.apache.pekko.protobufv3.internal.Parser PARSER = + new org.apache.pekko.protobufv3.internal.AbstractParser() { + @java.lang.Override + public TwoPhaseSet2 parsePartialFrom( + org.apache.pekko.protobufv3.internal.CodedInputStream input, + org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) + throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { + return new TwoPhaseSet2(input, extensionRegistry); + } + }; public static org.apache.pekko.protobufv3.internal.Parser parser() { return PARSER; @@ -1507,50 +1623,50 @@ public org.apache.pekko.protobufv3.internal.Parser getParserForTyp public docs.ddata.protobuf.msg.TwoPhaseSetMessages.TwoPhaseSet2 getDefaultInstanceForType() { return DEFAULT_INSTANCE; } - } private static final org.apache.pekko.protobufv3.internal.Descriptors.Descriptor - internal_static_docs_ddata_TwoPhaseSet_descriptor; - private static final - org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable + internal_static_docs_ddata_TwoPhaseSet_descriptor; + private static final org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable internal_static_docs_ddata_TwoPhaseSet_fieldAccessorTable; private static final org.apache.pekko.protobufv3.internal.Descriptors.Descriptor - internal_static_docs_ddata_TwoPhaseSet2_descriptor; - private static final - org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable + internal_static_docs_ddata_TwoPhaseSet2_descriptor; + private static final org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable internal_static_docs_ddata_TwoPhaseSet2_fieldAccessorTable; - public static org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor - getDescriptor() { + public static org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor - descriptor; + + private static org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor descriptor; + static { java.lang.String[] descriptorData = { - "\n\031TwoPhaseSetMessages.proto\022\ndocs.ddata\"" + - "-\n\013TwoPhaseSet\022\014\n\004adds\030\001 \003(\t\022\020\n\010removals" + - "\030\002 \003(\t\".\n\014TwoPhaseSet2\022\014\n\004adds\030\001 \001(\014\022\020\n\010" + - "removals\030\002 \001(\014B\033\n\027docs.ddata.protobuf.ms" + - "gH\001" + "\n\031TwoPhaseSetMessages.proto\022\ndocs.ddata\"" + + "-\n\013TwoPhaseSet\022\014\n\004adds\030\001 \003(\t\022\020\n\010removals" + + "\030\002 \003(\t\".\n\014TwoPhaseSet2\022\014\n\004adds\030\001 \001(\014\022\020\n\010" + + "removals\030\002 \001(\014B\033\n\027docs.ddata.protobuf.ms" + + "gH\001" }; - descriptor = org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor[] { - }); - internal_static_docs_ddata_TwoPhaseSet_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_docs_ddata_TwoPhaseSet_fieldAccessorTable = new - org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable( - internal_static_docs_ddata_TwoPhaseSet_descriptor, - new java.lang.String[] { "Adds", "Removals", }); - internal_static_docs_ddata_TwoPhaseSet2_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_docs_ddata_TwoPhaseSet2_fieldAccessorTable = new - org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable( - internal_static_docs_ddata_TwoPhaseSet2_descriptor, - new java.lang.String[] { "Adds", "Removals", }); + descriptor = + org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom( + descriptorData, + new org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor[] {}); + internal_static_docs_ddata_TwoPhaseSet_descriptor = getDescriptor().getMessageTypes().get(0); + internal_static_docs_ddata_TwoPhaseSet_fieldAccessorTable = + new org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable( + internal_static_docs_ddata_TwoPhaseSet_descriptor, + new java.lang.String[] { + "Adds", "Removals", + }); + internal_static_docs_ddata_TwoPhaseSet2_descriptor = getDescriptor().getMessageTypes().get(1); + internal_static_docs_ddata_TwoPhaseSet2_fieldAccessorTable = + new org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable( + internal_static_docs_ddata_TwoPhaseSet2_descriptor, + new java.lang.String[] { + "Adds", "Removals", + }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/docs/src/main/java/docs/persistence/proto/FlightAppModels.java b/docs/src/main/java/docs/persistence/proto/FlightAppModels.java index d4f46265df7..15317f061af 100644 --- a/docs/src/main/java/docs/persistence/proto/FlightAppModels.java +++ b/docs/src/main/java/docs/persistence/proto/FlightAppModels.java @@ -18,88 +18,102 @@ public final class FlightAppModels { private FlightAppModels() {} + public static void registerAllExtensions( - org.apache.pekko.protobufv3.internal.ExtensionRegistryLite registry) { - } + org.apache.pekko.protobufv3.internal.ExtensionRegistryLite registry) {} public static void registerAllExtensions( org.apache.pekko.protobufv3.internal.ExtensionRegistry registry) { - registerAllExtensions( - (org.apache.pekko.protobufv3.internal.ExtensionRegistryLite) registry); + registerAllExtensions((org.apache.pekko.protobufv3.internal.ExtensionRegistryLite) registry); } - public interface SeatReservedOrBuilder extends + + public interface SeatReservedOrBuilder + extends // @@protoc_insertion_point(interface_extends:docs.persistence.SeatReserved) org.apache.pekko.protobufv3.internal.MessageOrBuilder { /** * required string letter = 1; + * * @return Whether the letter field is set. */ boolean hasLetter(); /** * required string letter = 1; + * * @return The letter. */ java.lang.String getLetter(); /** * required string letter = 1; + * * @return The bytes for letter. */ - org.apache.pekko.protobufv3.internal.ByteString - getLetterBytes(); + org.apache.pekko.protobufv3.internal.ByteString getLetterBytes(); /** * required uint32 row = 2; + * * @return Whether the row field is set. */ boolean hasRow(); /** * required uint32 row = 2; + * * @return The row. */ int getRow(); /** + * + * *
      * the new field
      * 
* * optional string seatType = 3; + * * @return Whether the seatType field is set. */ boolean hasSeatType(); /** + * + * *
      * the new field
      * 
* * optional string seatType = 3; + * * @return The seatType. */ java.lang.String getSeatType(); /** + * + * *
      * the new field
      * 
* * optional string seatType = 3; + * * @return The bytes for seatType. */ - org.apache.pekko.protobufv3.internal.ByteString - getSeatTypeBytes(); + org.apache.pekko.protobufv3.internal.ByteString getSeatTypeBytes(); } - /** - * Protobuf type {@code docs.persistence.SeatReserved} - */ - public static final class SeatReserved extends - org.apache.pekko.protobufv3.internal.GeneratedMessageV3 implements + /** Protobuf type {@code docs.persistence.SeatReserved} */ + public static final class SeatReserved + extends org.apache.pekko.protobufv3.internal.GeneratedMessageV3 + implements // @@protoc_insertion_point(message_implements:docs.persistence.SeatReserved) SeatReservedOrBuilder { - private static final long serialVersionUID = 0L; + private static final long serialVersionUID = 0L; // Use SeatReserved.newBuilder() to construct. - private SeatReserved(org.apache.pekko.protobufv3.internal.GeneratedMessageV3.Builder builder) { + private SeatReserved( + org.apache.pekko.protobufv3.internal.GeneratedMessageV3.Builder builder) { super(builder); } + private SeatReserved() { letter_ = ""; seatType_ = ""; @@ -113,10 +127,10 @@ protected java.lang.Object newInstance( } @java.lang.Override - public final org.apache.pekko.protobufv3.internal.UnknownFieldSet - getUnknownFields() { + public final org.apache.pekko.protobufv3.internal.UnknownFieldSet getUnknownFields() { return this.unknownFields; } + private SeatReserved( org.apache.pekko.protobufv3.internal.CodedInputStream input, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) @@ -136,53 +150,60 @@ private SeatReserved( case 0: done = true; break; - case 10: { - org.apache.pekko.protobufv3.internal.ByteString bs = input.readBytes(); - bitField0_ |= 0x00000001; - letter_ = bs; - break; - } - case 16: { - bitField0_ |= 0x00000002; - row_ = input.readUInt32(); - break; - } - case 26: { - org.apache.pekko.protobufv3.internal.ByteString bs = input.readBytes(); - bitField0_ |= 0x00000004; - seatType_ = bs; - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; + case 10: + { + org.apache.pekko.protobufv3.internal.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000001; + letter_ = bs; + break; + } + case 16: + { + bitField0_ |= 0x00000002; + row_ = input.readUInt32(); + break; + } + case 26: + { + org.apache.pekko.protobufv3.internal.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000004; + seatType_ = bs; + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; } - break; - } } } } catch (org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { - throw new org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); + throw new org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException(e) + .setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } + public static final org.apache.pekko.protobufv3.internal.Descriptors.Descriptor getDescriptor() { - return docs.persistence.proto.FlightAppModels.internal_static_docs_persistence_SeatReserved_descriptor; + return docs.persistence.proto.FlightAppModels + .internal_static_docs_persistence_SeatReserved_descriptor; } @java.lang.Override protected org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return docs.persistence.proto.FlightAppModels.internal_static_docs_persistence_SeatReserved_fieldAccessorTable + return docs.persistence.proto.FlightAppModels + .internal_static_docs_persistence_SeatReserved_fieldAccessorTable .ensureFieldAccessorsInitialized( - docs.persistence.proto.FlightAppModels.SeatReserved.class, docs.persistence.proto.FlightAppModels.SeatReserved.Builder.class); + docs.persistence.proto.FlightAppModels.SeatReserved.class, + docs.persistence.proto.FlightAppModels.SeatReserved.Builder.class); } private int bitField0_; @@ -190,6 +211,7 @@ private SeatReserved( private volatile java.lang.Object letter_; /** * required string letter = 1; + * * @return Whether the letter field is set. */ public boolean hasLetter() { @@ -197,6 +219,7 @@ public boolean hasLetter() { } /** * required string letter = 1; + * * @return The letter. */ public java.lang.String getLetter() { @@ -204,7 +227,7 @@ public java.lang.String getLetter() { if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { - org.apache.pekko.protobufv3.internal.ByteString bs = + org.apache.pekko.protobufv3.internal.ByteString bs = (org.apache.pekko.protobufv3.internal.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { @@ -215,15 +238,14 @@ public java.lang.String getLetter() { } /** * required string letter = 1; + * * @return The bytes for letter. */ - public org.apache.pekko.protobufv3.internal.ByteString - getLetterBytes() { + public org.apache.pekko.protobufv3.internal.ByteString getLetterBytes() { java.lang.Object ref = letter_; if (ref instanceof java.lang.String) { - org.apache.pekko.protobufv3.internal.ByteString b = - org.apache.pekko.protobufv3.internal.ByteString.copyFromUtf8( - (java.lang.String) ref); + org.apache.pekko.protobufv3.internal.ByteString b = + org.apache.pekko.protobufv3.internal.ByteString.copyFromUtf8((java.lang.String) ref); letter_ = b; return b; } else { @@ -235,6 +257,7 @@ public java.lang.String getLetter() { private int row_; /** * required uint32 row = 2; + * * @return Whether the row field is set. */ public boolean hasRow() { @@ -242,6 +265,7 @@ public boolean hasRow() { } /** * required uint32 row = 2; + * * @return The row. */ public int getRow() { @@ -251,22 +275,28 @@ public int getRow() { public static final int SEATTYPE_FIELD_NUMBER = 3; private volatile java.lang.Object seatType_; /** + * + * *
      * the new field
      * 
* * optional string seatType = 3; + * * @return Whether the seatType field is set. */ public boolean hasSeatType() { return ((bitField0_ & 0x00000004) != 0); } /** + * + * *
      * the new field
      * 
* * optional string seatType = 3; + * * @return The seatType. */ public java.lang.String getSeatType() { @@ -274,7 +304,7 @@ public java.lang.String getSeatType() { if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { - org.apache.pekko.protobufv3.internal.ByteString bs = + org.apache.pekko.protobufv3.internal.ByteString bs = (org.apache.pekko.protobufv3.internal.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { @@ -284,20 +314,21 @@ public java.lang.String getSeatType() { } } /** + * + * *
      * the new field
      * 
* * optional string seatType = 3; + * * @return The bytes for seatType. */ - public org.apache.pekko.protobufv3.internal.ByteString - getSeatTypeBytes() { + public org.apache.pekko.protobufv3.internal.ByteString getSeatTypeBytes() { java.lang.Object ref = seatType_; if (ref instanceof java.lang.String) { - org.apache.pekko.protobufv3.internal.ByteString b = - org.apache.pekko.protobufv3.internal.ByteString.copyFromUtf8( - (java.lang.String) ref); + org.apache.pekko.protobufv3.internal.ByteString b = + org.apache.pekko.protobufv3.internal.ByteString.copyFromUtf8((java.lang.String) ref); seatType_ = b; return b; } else { @@ -306,6 +337,7 @@ public java.lang.String getSeatType() { } private byte memoizedIsInitialized = -1; + @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; @@ -326,7 +358,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(org.apache.pekko.protobufv3.internal.CodedOutputStream output) - throws java.io.IOException { + throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { org.apache.pekko.protobufv3.internal.GeneratedMessageV3.writeString(output, 1, letter_); } @@ -346,14 +378,15 @@ public int getSerializedSize() { size = 0; if (((bitField0_ & 0x00000001) != 0)) { - size += org.apache.pekko.protobufv3.internal.GeneratedMessageV3.computeStringSize(1, letter_); + size += + org.apache.pekko.protobufv3.internal.GeneratedMessageV3.computeStringSize(1, letter_); } if (((bitField0_ & 0x00000002) != 0)) { - size += org.apache.pekko.protobufv3.internal.CodedOutputStream - .computeUInt32Size(2, row_); + size += org.apache.pekko.protobufv3.internal.CodedOutputStream.computeUInt32Size(2, row_); } if (((bitField0_ & 0x00000004) != 0)) { - size += org.apache.pekko.protobufv3.internal.GeneratedMessageV3.computeStringSize(3, seatType_); + size += + org.apache.pekko.protobufv3.internal.GeneratedMessageV3.computeStringSize(3, seatType_); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -363,27 +396,25 @@ public int getSerializedSize() { @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { - return true; + return true; } if (!(obj instanceof docs.persistence.proto.FlightAppModels.SeatReserved)) { return super.equals(obj); } - docs.persistence.proto.FlightAppModels.SeatReserved other = (docs.persistence.proto.FlightAppModels.SeatReserved) obj; + docs.persistence.proto.FlightAppModels.SeatReserved other = + (docs.persistence.proto.FlightAppModels.SeatReserved) obj; if (hasLetter() != other.hasLetter()) return false; if (hasLetter()) { - if (!getLetter() - .equals(other.getLetter())) return false; + if (!getLetter().equals(other.getLetter())) return false; } if (hasRow() != other.hasRow()) return false; if (hasRow()) { - if (getRow() - != other.getRow()) return false; + if (getRow() != other.getRow()) return false; } if (hasSeatType() != other.hasSeatType()) return false; if (hasSeatType()) { - if (!getSeatType() - .equals(other.getSeatType())) return false; + if (!getSeatType().equals(other.getSeatType())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; @@ -418,83 +449,97 @@ public static docs.persistence.proto.FlightAppModels.SeatReserved parseFrom( throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data); } + public static docs.persistence.proto.FlightAppModels.SeatReserved parseFrom( java.nio.ByteBuffer data, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } + public static docs.persistence.proto.FlightAppModels.SeatReserved parseFrom( org.apache.pekko.protobufv3.internal.ByteString data) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data); } + public static docs.persistence.proto.FlightAppModels.SeatReserved parseFrom( org.apache.pekko.protobufv3.internal.ByteString data, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } + public static docs.persistence.proto.FlightAppModels.SeatReserved parseFrom(byte[] data) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data); } + public static docs.persistence.proto.FlightAppModels.SeatReserved parseFrom( - byte[] data, - org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) + byte[] data, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static docs.persistence.proto.FlightAppModels.SeatReserved parseFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseWithIOException(PARSER, input); + + public static docs.persistence.proto.FlightAppModels.SeatReserved parseFrom( + java.io.InputStream input) throws java.io.IOException { + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseWithIOException( + PARSER, input); } + public static docs.persistence.proto.FlightAppModels.SeatReserved parseFrom( java.io.InputStream input, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); } - public static docs.persistence.proto.FlightAppModels.SeatReserved parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); + + public static docs.persistence.proto.FlightAppModels.SeatReserved parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input); } + public static docs.persistence.proto.FlightAppModels.SeatReserved parseDelimitedFrom( java.io.InputStream input, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); } + public static docs.persistence.proto.FlightAppModels.SeatReserved parseFrom( - org.apache.pekko.protobufv3.internal.CodedInputStream input) - throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseWithIOException(PARSER, input); + org.apache.pekko.protobufv3.internal.CodedInputStream input) throws java.io.IOException { + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseWithIOException( + PARSER, input); } + public static docs.persistence.proto.FlightAppModels.SeatReserved parseFrom( org.apache.pekko.protobufv3.internal.CodedInputStream input, org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); + return org.apache.pekko.protobufv3.internal.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); } @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } + public Builder newBuilderForType() { + return newBuilder(); + } + public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(docs.persistence.proto.FlightAppModels.SeatReserved prototype) { + + public static Builder newBuilder( + docs.persistence.proto.FlightAppModels.SeatReserved prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } + @java.lang.Override public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override @@ -503,24 +548,26 @@ protected Builder newBuilderForType( Builder builder = new Builder(parent); return builder; } - /** - * Protobuf type {@code docs.persistence.SeatReserved} - */ - public static final class Builder extends - org.apache.pekko.protobufv3.internal.GeneratedMessageV3.Builder implements + /** Protobuf type {@code docs.persistence.SeatReserved} */ + public static final class Builder + extends org.apache.pekko.protobufv3.internal.GeneratedMessageV3.Builder + implements // @@protoc_insertion_point(builder_implements:docs.persistence.SeatReserved) docs.persistence.proto.FlightAppModels.SeatReservedOrBuilder { public static final org.apache.pekko.protobufv3.internal.Descriptors.Descriptor getDescriptor() { - return docs.persistence.proto.FlightAppModels.internal_static_docs_persistence_SeatReserved_descriptor; + return docs.persistence.proto.FlightAppModels + .internal_static_docs_persistence_SeatReserved_descriptor; } @java.lang.Override protected org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return docs.persistence.proto.FlightAppModels.internal_static_docs_persistence_SeatReserved_fieldAccessorTable + return docs.persistence.proto.FlightAppModels + .internal_static_docs_persistence_SeatReserved_fieldAccessorTable .ensureFieldAccessorsInitialized( - docs.persistence.proto.FlightAppModels.SeatReserved.class, docs.persistence.proto.FlightAppModels.SeatReserved.Builder.class); + docs.persistence.proto.FlightAppModels.SeatReserved.class, + docs.persistence.proto.FlightAppModels.SeatReserved.Builder.class); } // Construct using docs.persistence.proto.FlightAppModels.SeatReserved.newBuilder() @@ -533,11 +580,11 @@ private Builder( super(parent); maybeForceBuilderInitialization(); } + private void maybeForceBuilderInitialization() { - if (org.apache.pekko.protobufv3.internal.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } + if (org.apache.pekko.protobufv3.internal.GeneratedMessageV3.alwaysUseFieldBuilders) {} } + @java.lang.Override public Builder clear() { super.clear(); @@ -551,9 +598,9 @@ public Builder clear() { } @java.lang.Override - public org.apache.pekko.protobufv3.internal.Descriptors.Descriptor - getDescriptorForType() { - return docs.persistence.proto.FlightAppModels.internal_static_docs_persistence_SeatReserved_descriptor; + public org.apache.pekko.protobufv3.internal.Descriptors.Descriptor getDescriptorForType() { + return docs.persistence.proto.FlightAppModels + .internal_static_docs_persistence_SeatReserved_descriptor; } @java.lang.Override @@ -572,7 +619,8 @@ public docs.persistence.proto.FlightAppModels.SeatReserved build() { @java.lang.Override public docs.persistence.proto.FlightAppModels.SeatReserved buildPartial() { - docs.persistence.proto.FlightAppModels.SeatReserved result = new docs.persistence.proto.FlightAppModels.SeatReserved(this); + docs.persistence.proto.FlightAppModels.SeatReserved result = + new docs.persistence.proto.FlightAppModels.SeatReserved(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { @@ -596,38 +644,45 @@ public docs.persistence.proto.FlightAppModels.SeatReserved buildPartial() { public Builder clone() { return super.clone(); } + @java.lang.Override public Builder setField( org.apache.pekko.protobufv3.internal.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } + @java.lang.Override public Builder clearField( org.apache.pekko.protobufv3.internal.Descriptors.FieldDescriptor field) { return super.clearField(field); } + @java.lang.Override public Builder clearOneof( org.apache.pekko.protobufv3.internal.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } + @java.lang.Override public Builder setRepeatedField( org.apache.pekko.protobufv3.internal.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { + int index, + java.lang.Object value) { return super.setRepeatedField(field, index, value); } + @java.lang.Override public Builder addRepeatedField( org.apache.pekko.protobufv3.internal.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } + @java.lang.Override public Builder mergeFrom(org.apache.pekko.protobufv3.internal.Message other) { if (other instanceof docs.persistence.proto.FlightAppModels.SeatReserved) { - return mergeFrom((docs.persistence.proto.FlightAppModels.SeatReserved)other); + return mergeFrom((docs.persistence.proto.FlightAppModels.SeatReserved) other); } else { super.mergeFrom(other); return this; @@ -635,7 +690,8 @@ public Builder mergeFrom(org.apache.pekko.protobufv3.internal.Message other) { } public Builder mergeFrom(docs.persistence.proto.FlightAppModels.SeatReserved other) { - if (other == docs.persistence.proto.FlightAppModels.SeatReserved.getDefaultInstance()) return this; + if (other == docs.persistence.proto.FlightAppModels.SeatReserved.getDefaultInstance()) + return this; if (other.hasLetter()) { bitField0_ |= 0x00000001; letter_ = other.letter_; @@ -674,7 +730,8 @@ public Builder mergeFrom( try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException e) { - parsedMessage = (docs.persistence.proto.FlightAppModels.SeatReserved) e.getUnfinishedMessage(); + parsedMessage = + (docs.persistence.proto.FlightAppModels.SeatReserved) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { @@ -683,11 +740,13 @@ public Builder mergeFrom( } return this; } + private int bitField0_; private java.lang.Object letter_ = ""; /** * required string letter = 1; + * * @return Whether the letter field is set. */ public boolean hasLetter() { @@ -695,6 +754,7 @@ public boolean hasLetter() { } /** * required string letter = 1; + * * @return The letter. */ public java.lang.String getLetter() { @@ -713,15 +773,14 @@ public java.lang.String getLetter() { } /** * required string letter = 1; + * * @return The bytes for letter. */ - public org.apache.pekko.protobufv3.internal.ByteString - getLetterBytes() { + public org.apache.pekko.protobufv3.internal.ByteString getLetterBytes() { java.lang.Object ref = letter_; if (ref instanceof String) { - org.apache.pekko.protobufv3.internal.ByteString b = - org.apache.pekko.protobufv3.internal.ByteString.copyFromUtf8( - (java.lang.String) ref); + org.apache.pekko.protobufv3.internal.ByteString b = + org.apache.pekko.protobufv3.internal.ByteString.copyFromUtf8((java.lang.String) ref); letter_ = b; return b; } else { @@ -730,21 +789,22 @@ public java.lang.String getLetter() { } /** * required string letter = 1; + * * @param value The letter to set. * @return This builder for chaining. */ - public Builder setLetter( - java.lang.String value) { + public Builder setLetter(java.lang.String value) { if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; letter_ = value; onChanged(); return this; } /** * required string letter = 1; + * * @return This builder for chaining. */ public Builder clearLetter() { @@ -755,23 +815,24 @@ public Builder clearLetter() { } /** * required string letter = 1; + * * @param value The bytes for letter to set. * @return This builder for chaining. */ - public Builder setLetterBytes( - org.apache.pekko.protobufv3.internal.ByteString value) { + public Builder setLetterBytes(org.apache.pekko.protobufv3.internal.ByteString value) { if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; letter_ = value; onChanged(); return this; } - private int row_ ; + private int row_; /** * required uint32 row = 2; + * * @return Whether the row field is set. */ public boolean hasRow() { @@ -779,6 +840,7 @@ public boolean hasRow() { } /** * required uint32 row = 2; + * * @return The row. */ public int getRow() { @@ -786,6 +848,7 @@ public int getRow() { } /** * required uint32 row = 2; + * * @param value The row to set. * @return This builder for chaining. */ @@ -797,6 +860,7 @@ public Builder setRow(int value) { } /** * required uint32 row = 2; + * * @return This builder for chaining. */ public Builder clearRow() { @@ -808,22 +872,28 @@ public Builder clearRow() { private java.lang.Object seatType_ = ""; /** + * + * *
        * the new field
        * 
* * optional string seatType = 3; + * * @return Whether the seatType field is set. */ public boolean hasSeatType() { return ((bitField0_ & 0x00000004) != 0); } /** + * + * *
        * the new field
        * 
* * optional string seatType = 3; + * * @return The seatType. */ public java.lang.String getSeatType() { @@ -841,20 +911,21 @@ public java.lang.String getSeatType() { } } /** + * + * *
        * the new field
        * 
* * optional string seatType = 3; + * * @return The bytes for seatType. */ - public org.apache.pekko.protobufv3.internal.ByteString - getSeatTypeBytes() { + public org.apache.pekko.protobufv3.internal.ByteString getSeatTypeBytes() { java.lang.Object ref = seatType_; if (ref instanceof String) { - org.apache.pekko.protobufv3.internal.ByteString b = - org.apache.pekko.protobufv3.internal.ByteString.copyFromUtf8( - (java.lang.String) ref); + org.apache.pekko.protobufv3.internal.ByteString b = + org.apache.pekko.protobufv3.internal.ByteString.copyFromUtf8((java.lang.String) ref); seatType_ = b; return b; } else { @@ -862,30 +933,35 @@ public java.lang.String getSeatType() { } } /** + * + * *
        * the new field
        * 
* * optional string seatType = 3; + * * @param value The seatType to set. * @return This builder for chaining. */ - public Builder setSeatType( - java.lang.String value) { + public Builder setSeatType(java.lang.String value) { if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; seatType_ = value; onChanged(); return this; } /** + * + * *
        * the new field
        * 
* * optional string seatType = 3; + * * @return This builder for chaining. */ public Builder clearSeatType() { @@ -895,24 +971,27 @@ public Builder clearSeatType() { return this; } /** + * + * *
        * the new field
        * 
* * optional string seatType = 3; + * * @param value The bytes for seatType to set. * @return This builder for chaining. */ - public Builder setSeatTypeBytes( - org.apache.pekko.protobufv3.internal.ByteString value) { + public Builder setSeatTypeBytes(org.apache.pekko.protobufv3.internal.ByteString value) { if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; seatType_ = value; onChanged(); return this; } + @java.lang.Override public final Builder setUnknownFields( final org.apache.pekko.protobufv3.internal.UnknownFieldSet unknownFields) { @@ -925,12 +1004,12 @@ public final Builder mergeUnknownFields( return super.mergeUnknownFields(unknownFields); } - // @@protoc_insertion_point(builder_scope:docs.persistence.SeatReserved) } // @@protoc_insertion_point(class_scope:docs.persistence.SeatReserved) private static final docs.persistence.proto.FlightAppModels.SeatReserved DEFAULT_INSTANCE; + static { DEFAULT_INSTANCE = new docs.persistence.proto.FlightAppModels.SeatReserved(); } @@ -939,16 +1018,17 @@ public static docs.persistence.proto.FlightAppModels.SeatReserved getDefaultInst return DEFAULT_INSTANCE; } - @java.lang.Deprecated public static final org.apache.pekko.protobufv3.internal.Parser - PARSER = new org.apache.pekko.protobufv3.internal.AbstractParser() { - @java.lang.Override - public SeatReserved parsePartialFrom( - org.apache.pekko.protobufv3.internal.CodedInputStream input, - org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) - throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { - return new SeatReserved(input, extensionRegistry); - } - }; + @java.lang.Deprecated + public static final org.apache.pekko.protobufv3.internal.Parser PARSER = + new org.apache.pekko.protobufv3.internal.AbstractParser() { + @java.lang.Override + public SeatReserved parsePartialFrom( + org.apache.pekko.protobufv3.internal.CodedInputStream input, + org.apache.pekko.protobufv3.internal.ExtensionRegistryLite extensionRegistry) + throws org.apache.pekko.protobufv3.internal.InvalidProtocolBufferException { + return new SeatReserved(input, extensionRegistry); + } + }; public static org.apache.pekko.protobufv3.internal.Parser parser() { return PARSER; @@ -963,38 +1043,39 @@ public org.apache.pekko.protobufv3.internal.Parser getParserForTyp public docs.persistence.proto.FlightAppModels.SeatReserved getDefaultInstanceForType() { return DEFAULT_INSTANCE; } - } private static final org.apache.pekko.protobufv3.internal.Descriptors.Descriptor - internal_static_docs_persistence_SeatReserved_descriptor; - private static final - org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable + internal_static_docs_persistence_SeatReserved_descriptor; + private static final org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable internal_static_docs_persistence_SeatReserved_fieldAccessorTable; - public static org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor - getDescriptor() { + public static org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor - descriptor; + + private static org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor descriptor; + static { java.lang.String[] descriptorData = { - "\n\025FlightAppModels.proto\022\020docs.persistenc" + - "e\"=\n\014SeatReserved\022\016\n\006letter\030\001 \002(\t\022\013\n\003row" + - "\030\002 \002(\r\022\020\n\010seatType\030\003 \001(\tB\032\n\026docs.persist" + - "ence.protoH\001" + "\n\025FlightAppModels.proto\022\020docs.persistenc" + + "e\"=\n\014SeatReserved\022\016\n\006letter\030\001 \002(\t\022\013\n\003row" + + "\030\002 \002(\r\022\020\n\010seatType\030\003 \001(\tB\032\n\026docs.persist" + + "ence.protoH\001" }; - descriptor = org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor[] { - }); + descriptor = + org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom( + descriptorData, + new org.apache.pekko.protobufv3.internal.Descriptors.FileDescriptor[] {}); internal_static_docs_persistence_SeatReserved_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_docs_persistence_SeatReserved_fieldAccessorTable = new - org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable( - internal_static_docs_persistence_SeatReserved_descriptor, - new java.lang.String[] { "Letter", "Row", "SeatType", }); + getDescriptor().getMessageTypes().get(0); + internal_static_docs_persistence_SeatReserved_fieldAccessorTable = + new org.apache.pekko.protobufv3.internal.GeneratedMessageV3.FieldAccessorTable( + internal_static_docs_persistence_SeatReserved_descriptor, + new java.lang.String[] { + "Letter", "Row", "SeatType", + }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/docs/src/main/paradox/stream/operators/Source-or-Flow/mapWithResource.md b/docs/src/main/paradox/stream/operators/Source-or-Flow/mapWithResource.md new file mode 100644 index 00000000000..9fbc093bd53 --- /dev/null +++ b/docs/src/main/paradox/stream/operators/Source-or-Flow/mapWithResource.md @@ -0,0 +1,69 @@ +# mapWithResource + +Map elements with the help of a resource that can be opened, transform each element (in a blocking way) and closed. + +@ref[Simple operators](../index.md#simple-operators) + +## Signature + +@apidoc[Flow.mapWithResource](Flow) { scala="#mapWithResource%5BS%2C%20T%5D%28create%3A%20%28%29%20%3D%3E%20S%29%28f%3A%20%28S%2C%20Out%29%20%3D%3E%20T%2C%20close%3A%20S%20%3D%3E%20Option%5BT%5D%29%3A%20Repr%5BT%5D" java="#mapWithResource(java.util.function.Supplier,java.util.function.BiFunction,java.util.function.Function)" } +1. `create`: Open or Create the resource. +2. `f`: Transform each element inputs with the help of resource. +3. `close`: Close the resource, invoked on end of stream or if the stream fails, optionally outputting a last element. + +## Description + +Transform each stream element with the help of a resource. +The functions are by default called on Pekko's dispatcher for blocking IO to avoid interfering with other stream operations. +See @ref:[Blocking Needs Careful Management](../../../typed/dispatchers.md#blocking-needs-careful-management) for an explanation on why this is important. +The resource creation function is invoked once when the stream is materialized and the returned resource is passed to the mapping function for mapping the first element. The mapping function returns a mapped element to emit downstream. The returned T MUST NOT be null as it is illegal as stream element - according to the Reactive Streams specification. + +The close function is called when upstream or downstream completes normally or exceptionally, and will be called only once. +- upstream completes or fails, the optional value returns by `close` will be emitted to downstream if defined. +- downstream cancels or fails, the optional value returns by `close` will be ignored. +- shutdowns abruptly, the optional value returns by `close` will be ignored. + You can do some clean-up here. + +Early completion can be done with combination of the @apidoc[Flow.takeWhile](Flow) operator. + +See also @ref:[unfoldResource](../Source/unfoldResource.md), @ref:[unfoldResourceAsync](../Source/unfoldResourceAsync.md). + +You can configure the default dispatcher for this Source by changing the `org.apache.pekko.stream.materializer.blocking-io-dispatcher` +or set it for a given Source by using ActorAttributes. + +## Examples + +Imagine we have a database API which may potentially block when we perform a query, +and the database connection can be reused for each query. + +Scala +: @@snip [UnfoldResource.scala](/docs/src/test/scala/docs/stream/operators/sourceorflow/MapWithResource.scala) { #mapWithResource-blocking-api } + +Java +: @@snip [UnfoldResource.java](/docs/src/test/java/jdocs/stream/operators/sourceorflow/MapWithResource.java) { #mapWithResource-blocking-api } + +Let's see how we make use of the API above safely through `mapWithResource`: + +Scala +: @@snip [UnfoldResource.scala](/docs/src/test/scala/docs/stream/operators/sourceorflow/MapWithResource.scala) { #mapWithResource } + +Java +: @@snip [UnfoldResource.java](/docs/src/test/java/jdocs/stream/operators/sourceorflow/MapWithResource.java) { #mapWithResource } + +In this example we retrieve data form two tables with the same shared connection, and transform the results +to individual records with @scala[`mapConcat(identity)`]@java[`mapConcat(elems -> elems)`], once done the connection is closed. + + +## Reactive Streams semantics + +@@@div { .callout } + +**emits** the mapping function returns an element and downstream is ready to consume it + +**backpressures** downstream backpressures + +**completes** upstream completes + +**cancels** downstream cancels + +@@@ \ No newline at end of file diff --git a/docs/src/main/paradox/stream/operators/index.md b/docs/src/main/paradox/stream/operators/index.md index d797f53942d..3cc66e654f1 100644 --- a/docs/src/main/paradox/stream/operators/index.md +++ b/docs/src/main/paradox/stream/operators/index.md @@ -170,6 +170,7 @@ depending on being backpressured by downstream or not. |Source/Flow|@ref[logWithMarker](Source-or-Flow/logWithMarker.md)|Log elements flowing through the stream as well as completion and erroring.| |Source/Flow|@ref[map](Source-or-Flow/map.md)|Transform each element in the stream by calling a mapping function with it and passing the returned value downstream.| |Source/Flow|@ref[mapConcat](Source-or-Flow/mapConcat.md)|Transform each element into zero or more elements that are individually passed downstream.| +|Source/Flow|@ref[mapWithResource](Source-or-Flow/mapWithResource.md)|Map elements with the help of a resource that can be opened, transform each element (in a blocking way) and closed.| |Source/Flow|@ref[preMaterialize](Source-or-Flow/preMaterialize.md)|Materializes this Graph, immediately returning (1) its materialized value, and (2) a new pre-materialized Graph.| |Source/Flow|@ref[reduce](Source-or-Flow/reduce.md)|Start with first element and then apply the current and next value to the given function, when upstream complete the current value is emitted downstream.| |Source/Flow|@ref[scan](Source-or-Flow/scan.md)|Emit its current value, which starts at `zero`, and then apply the current and next value to the given function, emitting the next current value.| @@ -520,6 +521,7 @@ For more background see the @ref[Error Handling in Streams](../stream-error.md) * [mapAsyncUnordered](Source-or-Flow/mapAsyncUnordered.md) * [mapConcat](Source-or-Flow/mapConcat.md) * [mapError](Source-or-Flow/mapError.md) +* [mapWithResource](Source-or-Flow/mapWithResource.md) * [maybe](Source/maybe.md) * [merge](Source-or-Flow/merge.md) * [mergeAll](Source-or-Flow/mergeAll.md) diff --git a/docs/src/test/java/jdocs/stream/operators/sourceorflow/MapWithResource.java b/docs/src/test/java/jdocs/stream/operators/sourceorflow/MapWithResource.java new file mode 100644 index 00000000000..02c9ba85945 --- /dev/null +++ b/docs/src/test/java/jdocs/stream/operators/sourceorflow/MapWithResource.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package jdocs.stream.operators.sourceorflow; + +import org.apache.pekko.actor.ActorSystem; +import org.apache.pekko.stream.javadsl.Source; + +import java.net.URL; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; + +public interface MapWithResource { + // #mapWithResource-blocking-api + interface DBDriver { + Connection create(URL url, String userName, String password); + } + + interface Connection { + void close(); + } + + interface Database { + // blocking query + QueryResult doQuery(Connection connection, String query); + } + + interface QueryResult { + boolean hasMore(); + + // potentially blocking retrieval of each element + DatabaseRecord next(); + + // potentially blocking retrieval all element + List toList(); + } + + interface DatabaseRecord {} + // #mapWithResource-blocking-api + + default void mapWithResourceExample() { + final ActorSystem system = null; + final URL url = null; + final String userName = "Akka"; + final String password = "Hakking"; + final DBDriver dbDriver = null; + // #mapWithResource + // some database for JVM + final Database db = null; + Source.from( + Arrays.asList( + "SELECT * FROM shop ORDER BY article-0000 order by gmtModified desc limit 100;", + "SELECT * FROM shop ORDER BY article-0001 order by gmtModified desc limit 100;")) + .mapWithResource( + () -> dbDriver.create(url, userName, password), + (connection, query) -> db.doQuery(connection, query).toList(), + connection -> { + connection.close(); + return Optional.empty(); + }) + .mapConcat(elems -> elems) + .runForeach(System.out::println, system); + // #mapWithResource + } +} diff --git a/docs/src/test/scala/docs/stream/operators/sourceorflow/MapWithResource.scala b/docs/src/test/scala/docs/stream/operators/sourceorflow/MapWithResource.scala new file mode 100644 index 00000000000..bc866244cfe --- /dev/null +++ b/docs/src/test/scala/docs/stream/operators/sourceorflow/MapWithResource.scala @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package docs.stream.operators.sourceorflow + +import org.apache.pekko.actor.ActorSystem +import org.apache.pekko.stream.scaladsl.Source + +import java.net.URL + +object MapWithResource { + implicit val actorSystem: ActorSystem = ??? + + // #mapWithResource-blocking-api + trait DBDriver { + def create(url: URL, userName: String, password: String): Connection + } + trait Connection { + def close(): Unit + } + trait Database { + // blocking query + def doQuery(connection: Connection, query: String): QueryResult = ??? + } + trait QueryResult { + def hasMore: Boolean + // potentially blocking retrieval of each element + def next(): DataBaseRecord + // potentially blocking retrieval all element + def toList(): List[DataBaseRecord] + } + trait DataBaseRecord + // #mapWithResource-blocking-api + val url: URL = ??? + val userName = "Akka" + val password = "Hakking" + val dbDriver: DBDriver = ??? + def mapWithResourceExample(): Unit = { + // #mapWithResource + // some database for JVM + val db: Database = ??? + Source( + List( + "SELECT * FROM shop ORDER BY article-0000 order by gmtModified desc limit 100;", + "SELECT * FROM shop ORDER BY article-0001 order by gmtModified desc limit 100;")) + .mapWithResource(() => dbDriver.create(url, userName, password))( + (connection, query) => db.doQuery(connection, query).toList(), + conn => { + conn.close() + None + }) + .mapConcat(identity) + .runForeach(println) + // #mapWithResource + } +} diff --git a/stream-tests/src/test/scala/org/apache/pekko/stream/scaladsl/FlowMapWithResourceSpec.scala b/stream-tests/src/test/scala/org/apache/pekko/stream/scaladsl/FlowMapWithResourceSpec.scala new file mode 100644 index 00000000000..d316548f6aa --- /dev/null +++ b/stream-tests/src/test/scala/org/apache/pekko/stream/scaladsl/FlowMapWithResourceSpec.scala @@ -0,0 +1,416 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.pekko.stream.scaladsl + +import java.io.BufferedReader +import java.nio.charset.StandardCharsets +import java.nio.file.Files +import java.util.concurrent.atomic.AtomicInteger + +import scala.annotation.{ nowarn, tailrec } +import scala.collection.mutable.ListBuffer +import scala.concurrent.{ Await, Promise } +import scala.concurrent.duration.DurationInt +import scala.util.Success +import scala.util.control.NoStackTrace + +import com.google.common.jimfs.{ Configuration, Jimfs } +import org.apache.pekko +import pekko.Done +import pekko.stream.{ AbruptTerminationException, ActorAttributes, ActorMaterializer, SystemMaterializer } +import pekko.stream.ActorAttributes.supervisionStrategy +import pekko.stream.Supervision.{ restartingDecider, resumingDecider } +import pekko.stream.impl.{ PhasedFusingActorMaterializer, StreamSupervisor } +import pekko.stream.impl.StreamSupervisor.Children +import pekko.stream.testkit.{ StreamSpec, TestSubscriber } +import pekko.stream.testkit.Utils.{ assertDispatcher, TE, UnboundedMailboxConfig } +import pekko.stream.testkit.scaladsl.{ TestSink, TestSource } +import pekko.testkit.EventFilter +import pekko.util.ByteString + +class FlowMapWithResourceSpec extends StreamSpec(UnboundedMailboxConfig) { + + private val fs = Jimfs.newFileSystem("MapWithResourceSpec", Configuration.unix()) + private val ex = new Exception("TEST") with NoStackTrace + + private val manyLines = { + ("a" * 100 + "\n") * 10 + + ("b" * 100 + "\n") * 10 + + ("c" * 100 + "\n") * 10 + + ("d" * 100 + "\n") * 10 + + ("e" * 100 + "\n") * 10 + + ("f" * 100 + "\n") * 10 + } + private val manyLinesArray = manyLines.split("\n") + + private val manyLinesPath = { + val file = Files.createFile(fs.getPath("/testMapWithResource.dat")) + Files.write(file, manyLines.getBytes(StandardCharsets.UTF_8)) + } + private def newBufferedReader() = Files.newBufferedReader(manyLinesPath, StandardCharsets.UTF_8) + + private def readLines(reader: BufferedReader, maxCount: Int): List[String] = { + if (maxCount == 0) { + return List.empty + } + + @tailrec + def loop(builder: ListBuffer[String], n: Int): ListBuffer[String] = { + if (n == 0) { + builder + } else { + val line = reader.readLine() + if (line eq null) + builder + else { + builder += line + loop(builder, n - 1) + } + } + } + loop(ListBuffer.empty, maxCount).result() + } + + "MapWithResource" must { + "can read contents from a file" in { + val p = Source(List(1, 10, 20, 30)) + .mapWithResource(() => newBufferedReader())((reader, count) => { + readLines(reader, count) + }, + reader => { + reader.close() + None + }) + .mapConcat(identity) + .runWith(Sink.asPublisher(false)) + + val c = TestSubscriber.manualProbe[String]() + p.subscribe(c) + val sub = c.expectSubscription() + + val chunks = manyLinesArray.toList.iterator + + sub.request(1) + c.expectNext() should ===(chunks.next()) + sub.request(1) + c.expectNext() should ===(chunks.next()) + c.expectNoMessage(300.millis) + + while (chunks.hasNext) { + sub.request(1) + c.expectNext() should ===(chunks.next()) + } + sub.request(1) + + c.expectComplete() + } + + "continue when Strategy is Resume and exception happened" in { + val p = Source + .repeat(1) + .take(100) + .mapWithResource(() => newBufferedReader())((reader, _) => { + val s = reader.readLine() + if (s != null && s.contains("b")) throw TE("") else s + }, + reader => { + reader.close() + None + }) + .withAttributes(supervisionStrategy(resumingDecider)) + .runWith(Sink.asPublisher(false)) + val c = TestSubscriber.manualProbe[String]() + + p.subscribe(c) + val sub = c.expectSubscription() + + (0 to 49).foreach(i => { + sub.request(1) + c.expectNext() should ===(if (i < 10) manyLinesArray(i) else manyLinesArray(i + 10)) + }) + sub.request(1) + c.expectComplete() + } + + "close and open stream again when Strategy is Restart" in { + val p = Source + .repeat(1) + .take(100) + .mapWithResource(() => newBufferedReader())((reader, _) => { + val s = reader.readLine() + if (s != null && s.contains("b")) throw TE("") else s + }, + reader => { + reader.close() + None + }) + .withAttributes(supervisionStrategy(restartingDecider)) + .runWith(Sink.asPublisher(false)) + val c = TestSubscriber.manualProbe[String]() + + p.subscribe(c) + val sub = c.expectSubscription() + + (0 to 19).foreach(_ => { + sub.request(1) + c.expectNext() should ===(manyLinesArray(0)) + }) + sub.cancel() + } + + "work with ByteString as well" in { + val chunkSize = 50 + val buffer = new Array[Char](chunkSize) + val p = Source + .repeat(1) + .mapWithResource(() => newBufferedReader())((reader, _) => { + val s = reader.read(buffer) + if (s > 0) Some(ByteString(buffer.mkString("")).take(s)) else None + }, + reader => { + reader.close() + None + }) + .takeWhile(_.isDefined) + .collect { + case Some(bytes) => bytes + } + .runWith(Sink.asPublisher(false)) + val c = TestSubscriber.manualProbe[ByteString]() + + var remaining = manyLines + def nextChunk() = { + val (chunk, rest) = remaining.splitAt(chunkSize) + remaining = rest + chunk + } + + p.subscribe(c) + val sub = c.expectSubscription() + + (0 to 121).foreach(_ => { + sub.request(1) + c.expectNext().utf8String should ===(nextChunk()) + }) + sub.request(1) + c.expectComplete() + } + + "use dedicated blocking-io-dispatcher by default" in { + val p = Source + .single(1) + .mapWithResource(() => newBufferedReader())((reader, _) => Option(reader.readLine()), + reader => { + reader.close() + None + }) + .runWith(TestSink.probe) + + SystemMaterializer(system).materializer + .asInstanceOf[PhasedFusingActorMaterializer] + .supervisor + .tell(StreamSupervisor.GetChildren, testActor) + val ref = expectMsgType[Children].children.find(_.path.toString contains "mapWithResource").get + try assertDispatcher(ref, ActorAttributes.IODispatcher.dispatcher) + finally p.cancel() + } + + "fail when create throws exception" in { + EventFilter[TE](occurrences = 1).intercept { + val p = Source + .single(1) + .mapWithResource[BufferedReader, String](() => throw TE(""))((reader, _) => reader.readLine(), + reader => { + reader.close() + None + }) + .runWith(Sink.asPublisher(false)) + val c = TestSubscriber.manualProbe[String]() + p.subscribe(c) + + c.expectSubscription() + c.expectError(TE("")) + } + } + + "fail when close throws exception" in { + val (pub, sub) = TestSource[Int]() + .mapWithResource(() => Iterator("a"))((it, _) => if (it.hasNext) Some(it.next()) else None, _ => throw TE("")) + .collect { case Some(line) => line } + .toMat(TestSink())(Keep.both) + .run() + + pub.ensureSubscription() + sub.ensureSubscription() + sub.request(1) + pub.sendNext(1) + sub.expectNext("a") + pub.sendComplete() + sub.expectError(TE("")) + } + + "not close the resource twice when read fails" in { + val closedCounter = new AtomicInteger(0) + val probe = Source + .repeat(1) + .mapWithResource(() => 23)( // the best resource there is + (_, _) => throw TE("failing read"), + _ => { + closedCounter.incrementAndGet() + None + }) + .runWith(TestSink.probe[Int]) + + probe.request(1) + probe.expectError(TE("failing read")) + closedCounter.get() should ===(1) + } + + "not close the resource twice when read fails and then close fails" in { + val closedCounter = new AtomicInteger(0) + val probe = Source + .repeat(1) + .mapWithResource(() => 23)((_, _) => throw TE("failing read"), + _ => { + closedCounter.incrementAndGet() + if (closedCounter.get == 1) throw TE("boom") + None + }) + .runWith(TestSink.probe[Int]) + + EventFilter[TE](occurrences = 1).intercept { + probe.request(1) + probe.expectError(TE("boom")) + } + closedCounter.get() should ===(1) + } + + "will close the resource when upstream complete" in { + val closedCounter = new AtomicInteger(0) + val (pub, sub) = TestSource + .probe[Int] + .mapWithResource(() => newBufferedReader())((reader, count) => readLines(reader, count), + reader => { + reader.close() + closedCounter.incrementAndGet() + Some(List("End")) + }) + .mapConcat(identity) + .toMat(TestSink.probe)(Keep.both) + .run() + sub.expectSubscription().request(2) + pub.sendNext(1) + sub.expectNext(manyLinesArray(0)) + pub.sendComplete() + sub.expectNext("End") + sub.expectComplete() + closedCounter.get shouldBe 1 + } + + "will close the resource when upstream fail" in { + val closedCounter = new AtomicInteger(0) + val (pub, sub) = TestSource + .probe[Int] + .mapWithResource(() => newBufferedReader())((reader, count) => readLines(reader, count), + reader => { + reader.close() + closedCounter.incrementAndGet() + Some(List("End")) + }) + .mapConcat(identity) + .toMat(TestSink.probe)(Keep.both) + .run() + sub.expectSubscription().request(2) + pub.sendNext(1) + sub.expectNext(manyLinesArray(0)) + pub.sendError(ex) + sub.expectNext("End") + sub.expectError(ex) + closedCounter.get shouldBe 1 + } + + "will close the resource when downstream cancel" in { + val closedCounter = new AtomicInteger(0) + val (pub, sub) = TestSource + .probe[Int] + .mapWithResource(() => newBufferedReader())((reader, count) => readLines(reader, count), + reader => { + reader.close() + closedCounter.incrementAndGet() + Some(List("End")) + }) + .mapConcat(identity) + .toMat(TestSink.probe)(Keep.both) + .run() + val subscription = sub.expectSubscription() + subscription.request(2) + pub.sendNext(1) + sub.expectNext(manyLinesArray(0)) + subscription.cancel() + pub.expectCancellation() + closedCounter.get shouldBe 1 + } + + "will close the resource when downstream fail" in { + val closedCounter = new AtomicInteger(0) + val (pub, sub) = TestSource + .probe[Int] + .mapWithResource(() => newBufferedReader())((reader, count) => readLines(reader, count), + reader => { + reader.close() + closedCounter.incrementAndGet() + Some(List("End")) + }) + .mapConcat(identity) + .toMat(TestSink.probe)(Keep.both) + .run() + sub.request(2) + pub.sendNext(2) + sub.expectNext(manyLinesArray(0)) + sub.expectNext(manyLinesArray(1)) + sub.cancel(ex) + pub.expectCancellationWithCause(ex) + closedCounter.get shouldBe 1 + } + + "will close the resource on abrupt materializer termination" in { + @nowarn("msg=deprecated") + val mat = ActorMaterializer() + val promise = Promise[Done]() + val matVal = Source + .single(1) + .mapWithResource(() => { + newBufferedReader() + })((reader, count) => readLines(reader, count), + reader => { + reader.close() + promise.complete(Success(Done)) + Some(List("End")) + }) + .mapConcat(identity) + .runWith(Sink.never)(mat) + mat.shutdown() + matVal.failed.futureValue shouldBe an[AbruptTerminationException] + Await.result(promise.future, 3.seconds) shouldBe Done + } + + } + override def afterTermination(): Unit = { + fs.close() + } +} diff --git a/stream/src/main/scala/org/apache/pekko/stream/impl/Stages.scala b/stream/src/main/scala/org/apache/pekko/stream/impl/Stages.scala index 8ec7409dc30..8d6b67501e4 100755 --- a/stream/src/main/scala/org/apache/pekko/stream/impl/Stages.scala +++ b/stream/src/main/scala/org/apache/pekko/stream/impl/Stages.scala @@ -42,6 +42,7 @@ import pekko.stream.Attributes._ val mapAsyncUnordered = name("mapAsyncUnordered") val mapAsyncPartition = name("mapAsyncPartition") val mapAsyncPartitionUnordered = name("mapAsyncPartitionUnordered") + val mapWithResource = name("mapWithResource") and IODispatcher val ask = name("ask") val grouped = name("grouped") val groupedWithin = name("groupedWithin") diff --git a/stream/src/main/scala/org/apache/pekko/stream/impl/fusing/Ops.scala b/stream/src/main/scala/org/apache/pekko/stream/impl/fusing/Ops.scala index cd8e26de59e..468f287d365 100644 --- a/stream/src/main/scala/org/apache/pekko/stream/impl/fusing/Ops.scala +++ b/stream/src/main/scala/org/apache/pekko/stream/impl/fusing/Ops.scala @@ -2225,7 +2225,7 @@ private[pekko] final class StatefulMap[S, In, Out](create: () => S, f: (S, In) = private val out = Outlet[Out]("StatefulMap.out") override val shape: FlowShape[In, Out] = FlowShape(in, out) - override protected def initialAttributes: Attributes = DefaultAttributes.statefulMap and SourceLocation.forLambda(f) + override protected def initialAttributes: Attributes = Attributes(SourceLocation.forLambda(f)) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) with InHandler with OutHandler { diff --git a/stream/src/main/scala/org/apache/pekko/stream/javadsl/Flow.scala b/stream/src/main/scala/org/apache/pekko/stream/javadsl/Flow.scala index 030f3ebfa4e..4bd0bfd2dba 100755 --- a/stream/src/main/scala/org/apache/pekko/stream/javadsl/Flow.scala +++ b/stream/src/main/scala/org/apache/pekko/stream/javadsl/Flow.scala @@ -770,6 +770,46 @@ final class Flow[In, Out, Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends Gr (s: S, out: Out) => f.apply(s, out).toScala, (s: S) => onComplete.apply(s).toScala)) + /** + * Transform each stream element with the help of a resource. + * + * The resource creation function is invoked once when the stream is materialized and the returned resource is passed to + * the mapping function for mapping the first element. The mapping function returns a mapped element to emit + * downstream. The returned `T` MUST NOT be `null` as it is illegal as stream element - according to the Reactive Streams specification. + * + * The `close` function is called only once when the upstream or downstream finishes or fails. You can do some clean-up here, + * and if the returned value is not empty, it will be emitted to the downstream if available, otherwise the value will be dropped. + * + * Early completion can be done with combination of the [[takeWhile]] operator. + * + * Adheres to the [[ActorAttributes.SupervisionStrategy]] attribute. + * + * You can configure the default dispatcher for this Source by changing the `akka.stream.materializer.blocking-io-dispatcher` or + * set it for a given Source by using [[ActorAttributes]]. + * + * '''Emits when''' the mapping function returns an element and downstream is ready to consume it + * + * '''Backpressures when''' downstream backpressures + * + * '''Completes when''' upstream completes + * + * '''Cancels when''' downstream cancels + * + * @tparam R the type of the resource + * @tparam T the type of the output elements + * @param create function that creates the resource + * @param f function that transforms the upstream element and the resource to output element + * @param close function that closes the resource, optionally outputting a last element + */ + def mapWithResource[R, T]( + create: java.util.function.Supplier[R], + f: java.util.function.BiFunction[R, Out, T], + close: java.util.function.Function[R, Optional[T]]): javadsl.Flow[In, T, Mat] = + new Flow( + delegate.mapWithResource(() => create.get())( + (resource, out) => f(resource, out), + resource => close.apply(resource).toScala)) + /** * Transform each input element into an `Iterable` of output elements that is * then flattened into the output stream. The transformation is meant to be stateful, diff --git a/stream/src/main/scala/org/apache/pekko/stream/javadsl/Source.scala b/stream/src/main/scala/org/apache/pekko/stream/javadsl/Source.scala index 36cb1823f9b..733b98a21cd 100755 --- a/stream/src/main/scala/org/apache/pekko/stream/javadsl/Source.scala +++ b/stream/src/main/scala/org/apache/pekko/stream/javadsl/Source.scala @@ -2500,6 +2500,46 @@ final class Source[Out, Mat](delegate: scaladsl.Source[Out, Mat]) extends Graph[ (s: S, out: Out) => f.apply(s, out).toScala, (s: S) => onComplete.apply(s).toScala)) + /** + * Transform each stream element with the help of a resource. + * + * The resource creation function is invoked once when the stream is materialized and the returned resource is passed to + * the mapping function for mapping the first element. The mapping function returns a mapped element to emit + * downstream. The returned `T` MUST NOT be `null` as it is illegal as stream element - according to the Reactive Streams specification. + * + * The `close` function is called only once when the upstream or downstream finishes or fails. You can do some clean-up here, + * and if the returned value is not empty, it will be emitted to the downstream if available, otherwise the value will be dropped. + * + * Early completion can be done with combination of the [[takeWhile]] operator. + * + * Adheres to the [[ActorAttributes.SupervisionStrategy]] attribute. + * + * You can configure the default dispatcher for this Source by changing the `akka.stream.materializer.blocking-io-dispatcher` or + * set it for a given Source by using [[ActorAttributes]]. + * + * '''Emits when''' the mapping function returns an element and downstream is ready to consume it + * + * '''Backpressures when''' downstream backpressures + * + * '''Completes when''' upstream completes + * + * '''Cancels when''' downstream cancels + * + * @tparam R the type of the resource + * @tparam T the type of the output elements + * @param create function that creates the resource + * @param f function that transforms the upstream element and the resource to output element + * @param close function that closes the resource, optionally outputting a last element + */ + def mapWithResource[R, T]( + create: java.util.function.Supplier[R], + f: java.util.function.BiFunction[R, Out, T], + close: java.util.function.Function[R, Optional[T]]): javadsl.Source[T, Mat] = + new Source( + delegate.mapWithResource(() => create.get())( + (resource, out) => f(resource, out), + resource => close.apply(resource).toScala)) + /** * Transform each input element into an `Iterable` of output elements that is * then flattened into the output stream. The transformation is meant to be stateful, diff --git a/stream/src/main/scala/org/apache/pekko/stream/javadsl/SubFlow.scala b/stream/src/main/scala/org/apache/pekko/stream/javadsl/SubFlow.scala index eee009f8242..0680aae1257 100755 --- a/stream/src/main/scala/org/apache/pekko/stream/javadsl/SubFlow.scala +++ b/stream/src/main/scala/org/apache/pekko/stream/javadsl/SubFlow.scala @@ -244,6 +244,46 @@ class SubFlow[In, Out, Mat]( (s: S, out: Out) => f.apply(s, out).toScala, (s: S) => onComplete.apply(s).toScala)) + /** + * Transform each stream element with the help of a resource. + * + * The resource creation function is invoked once when the stream is materialized and the returned resource is passed to + * the mapping function for mapping the first element. The mapping function returns a mapped element to emit + * downstream. The returned `T` MUST NOT be `null` as it is illegal as stream element - according to the Reactive Streams specification. + * + * The `close` function is called only once when the upstream or downstream finishes or fails. You can do some clean-up here, + * and if the returned value is not empty, it will be emitted to the downstream if available, otherwise the value will be dropped. + * + * Early completion can be done with combination of the [[takeWhile]] operator. + * + * Adheres to the [[ActorAttributes.SupervisionStrategy]] attribute. + * + * You can configure the default dispatcher for this Source by changing the `akka.stream.materializer.blocking-io-dispatcher` or + * set it for a given Source by using [[ActorAttributes]]. + * + * '''Emits when''' the mapping function returns an element and downstream is ready to consume it + * + * '''Backpressures when''' downstream backpressures + * + * '''Completes when''' upstream completes + * + * '''Cancels when''' downstream cancels + * + * @tparam R the type of the resource + * @tparam T the type of the output elements + * @param create function that creates the resource + * @param f function that transforms the upstream element and the resource to output element + * @param close function that closes the resource, optionally outputting a last element + */ + def mapWithResource[R, T]( + create: java.util.function.Supplier[R], + f: java.util.function.BiFunction[R, Out, T], + close: java.util.function.Function[R, Optional[T]]): javadsl.SubFlow[In, T, Mat] = + new SubFlow( + delegate.mapWithResource(() => create.get())( + (resource, out) => f(resource, out), + resource => close.apply(resource).toScala)) + /** * Transform each input element into an `Iterable` of output elements that is * then flattened into the output stream. The transformation is meant to be stateful, diff --git a/stream/src/main/scala/org/apache/pekko/stream/javadsl/SubSource.scala b/stream/src/main/scala/org/apache/pekko/stream/javadsl/SubSource.scala index eb7e4d38a6f..4669021cdaf 100755 --- a/stream/src/main/scala/org/apache/pekko/stream/javadsl/SubSource.scala +++ b/stream/src/main/scala/org/apache/pekko/stream/javadsl/SubSource.scala @@ -235,6 +235,46 @@ class SubSource[Out, Mat]( (s: S, out: Out) => f.apply(s, out).toScala, (s: S) => onComplete.apply(s).toScala)) + /** + * Transform each stream element with the help of a resource. + * + * The resource creation function is invoked once when the stream is materialized and the returned resource is passed to + * the mapping function for mapping the first element. The mapping function returns a mapped element to emit + * downstream. The returned `T` MUST NOT be `null` as it is illegal as stream element - according to the Reactive Streams specification. + * + * The `close` function is called only once when the upstream or downstream finishes or fails. You can do some clean-up here, + * and if the returned value is not empty, it will be emitted to the downstream if available, otherwise the value will be dropped. + * + * Early completion can be done with combination of the [[takeWhile]] operator. + * + * Adheres to the [[ActorAttributes.SupervisionStrategy]] attribute. + * + * You can configure the default dispatcher for this Source by changing the `akka.stream.materializer.blocking-io-dispatcher` or + * set it for a given Source by using [[ActorAttributes]]. + * + * '''Emits when''' the mapping function returns an element and downstream is ready to consume it + * + * '''Backpressures when''' downstream backpressures + * + * '''Completes when''' upstream completes + * + * '''Cancels when''' downstream cancels + * + * @tparam R the type of the resource + * @tparam T the type of the output elements + * @param create function that creates the resource + * @param f function that transforms the upstream element and the resource to output element + * @param close function that closes the resource, optionally outputting a last element + */ + def mapWithResource[R, T]( + create: java.util.function.Supplier[R], + f: java.util.function.BiFunction[R, Out, T], + close: java.util.function.Function[R, Optional[T]]): javadsl.SubSource[T, Mat] = + new SubSource( + delegate.mapWithResource(() => create.get())( + (resource, out) => f(resource, out), + resource => close.apply(resource).toScala)) + /** * Transform each input element into an `Iterable` of output elements that is * then flattened into the output stream. The transformation is meant to be stateful, diff --git a/stream/src/main/scala/org/apache/pekko/stream/scaladsl/Flow.scala b/stream/src/main/scala/org/apache/pekko/stream/scaladsl/Flow.scala index 9452ffc972e..aa7482f8b80 100755 --- a/stream/src/main/scala/org/apache/pekko/stream/scaladsl/Flow.scala +++ b/stream/src/main/scala/org/apache/pekko/stream/scaladsl/Flow.scala @@ -1082,7 +1082,46 @@ trait FlowOps[+Out, +Mat] { * @param onComplete a function that transforms the ongoing state into an optional output element */ def statefulMap[S, T](create: () => S)(f: (S, Out) => (S, T), onComplete: S => Option[T]): Repr[T] = - via(new StatefulMap[S, Out, T](create, f, onComplete)) + via(new StatefulMap[S, Out, T](create, f, onComplete).withAttributes(DefaultAttributes.statefulMap)) + + /** + * Transform each stream element with the help of a resource. + * + * The resource creation function is invoked once when the stream is materialized and the returned resource is passed to + * the mapping function for mapping the first element. The mapping function returns a mapped element to emit + * downstream. The returned `T` MUST NOT be `null` as it is illegal as stream element - according to the Reactive Streams specification. + * + * The `close` function is called only once when the upstream or downstream finishes or fails. You can do some clean-up here, + * and if the returned value is not empty, it will be emitted to the downstream if available, otherwise the value will be dropped. + * + * Early completion can be done with combination of the [[takeWhile]] operator. + * + * Adheres to the [[ActorAttributes.SupervisionStrategy]] attribute. + * + * You can configure the default dispatcher for this Source by changing the `akka.stream.materializer.blocking-io-dispatcher` or + * set it for a given Source by using [[ActorAttributes]]. + * + * '''Emits when''' the mapping function returns an element and downstream is ready to consume it + * + * '''Backpressures when''' downstream backpressures + * + * '''Completes when''' upstream completes + * + * '''Cancels when''' downstream cancels + * + * @tparam R the type of the resource + * @tparam T the type of the output elements + * @param create function that creates the resource + * @param f function that transforms the upstream element and the resource to output element + * @param close function that closes the resource, optionally outputting a last element + */ + def mapWithResource[R, T](create: () => R)(f: (R, Out) => T, close: R => Option[T]): Repr[T] = + via( + new StatefulMap[R, Out, T]( + create, + (resource, out) => (resource, f(resource, out)), + resource => close(resource)) + .withAttributes(DefaultAttributes.mapWithResource)) /** * Transform each input element into an `Iterable` of output elements that is