+ implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkRequestProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -11925,22 +10641,24 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessageV3
- .alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getWorkSpecFieldBuilder();
getFragmentRuntimeInfoFieldBuilder();
}
}
- @java.lang.Override
+ private static Builder create() {
+ return new Builder();
+ }
+
public Builder clear() {
super.clear();
if (workSpecBuilder_ == null) {
- workSpec_ = null;
+ workSpec_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance();
} else {
workSpecBuilder_.clear();
}
@@ -11960,7 +10678,7 @@ public Builder clear() {
credentialsBinary_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000080);
if (fragmentRuntimeInfoBuilder_ == null) {
- fragmentRuntimeInfo_ = null;
+ fragmentRuntimeInfo_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance();
} else {
fragmentRuntimeInfoBuilder_.clear();
}
@@ -11978,18 +10696,19 @@ public Builder clear() {
return this;
}
- @java.lang.Override
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkRequestProto_descriptor;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto.getDefaultInstance();
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto result = buildPartial();
if (!result.isInitialized()) {
@@ -11998,113 +10717,79 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWor
return result;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) != 0)) {
- if (workSpecBuilder_ == null) {
- result.workSpec_ = workSpec_;
- } else {
- result.workSpec_ = workSpecBuilder_.build();
- }
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
- if (((from_bitField0_ & 0x00000002) != 0)) {
+ if (workSpecBuilder_ == null) {
+ result.workSpec_ = workSpec_;
+ } else {
+ result.workSpec_ = workSpecBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.workSpecSignature_ = workSpecSignature_;
- if (((from_bitField0_ & 0x00000004) != 0)) {
- result.fragmentNumber_ = fragmentNumber_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
- if (((from_bitField0_ & 0x00000008) != 0)) {
- result.attemptNumber_ = attemptNumber_;
+ result.fragmentNumber_ = fragmentNumber_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
- if (((from_bitField0_ & 0x00000010) != 0)) {
+ result.attemptNumber_ = attemptNumber_;
+ if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.containerIdString_ = containerIdString_;
- if (((from_bitField0_ & 0x00000020) != 0)) {
+ if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
result.amHost_ = amHost_;
- if (((from_bitField0_ & 0x00000040) != 0)) {
- result.amPort_ = amPort_;
+ if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000040;
}
- if (((from_bitField0_ & 0x00000080) != 0)) {
+ result.amPort_ = amPort_;
+ if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
to_bitField0_ |= 0x00000080;
}
result.credentialsBinary_ = credentialsBinary_;
- if (((from_bitField0_ & 0x00000100) != 0)) {
- if (fragmentRuntimeInfoBuilder_ == null) {
- result.fragmentRuntimeInfo_ = fragmentRuntimeInfo_;
- } else {
- result.fragmentRuntimeInfo_ = fragmentRuntimeInfoBuilder_.build();
- }
+ if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
to_bitField0_ |= 0x00000100;
}
- if (((from_bitField0_ & 0x00000200) != 0)) {
+ if (fragmentRuntimeInfoBuilder_ == null) {
+ result.fragmentRuntimeInfo_ = fragmentRuntimeInfo_;
+ } else {
+ result.fragmentRuntimeInfo_ = fragmentRuntimeInfoBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
to_bitField0_ |= 0x00000200;
}
result.initialEventBytes_ = initialEventBytes_;
- if (((from_bitField0_ & 0x00000400) != 0)) {
+ if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
to_bitField0_ |= 0x00000400;
}
result.initialEventSignature_ = initialEventSignature_;
- if (((from_bitField0_ & 0x00000800) != 0)) {
- result.isGuaranteed_ = isGuaranteed_;
+ if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
to_bitField0_ |= 0x00000800;
}
- if (((from_bitField0_ & 0x00001000) != 0)) {
+ result.isGuaranteed_ = isGuaranteed_;
+ if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
to_bitField0_ |= 0x00001000;
}
result.jwt_ = jwt_;
- if (((from_bitField0_ & 0x00002000) != 0)) {
- result.isExternalClientRequest_ = isExternalClientRequest_;
+ if (((from_bitField0_ & 0x00002000) == 0x00002000)) {
to_bitField0_ |= 0x00002000;
}
+ result.isExternalClientRequest_ = isExternalClientRequest_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
- @java.lang.Override
- public Builder clone() {
- return super.clone();
- }
- @java.lang.Override
- public Builder setField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.setField(field, value);
- }
- @java.lang.Override
- public Builder clearField(
- com.google.protobuf.Descriptors.FieldDescriptor field) {
- return super.clearField(field);
- }
- @java.lang.Override
- public Builder clearOneof(
- com.google.protobuf.Descriptors.OneofDescriptor oneof) {
- return super.clearOneof(oneof);
- }
- @java.lang.Override
- public Builder setRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- int index, java.lang.Object value) {
- return super.setRepeatedField(field, index, value);
- }
- @java.lang.Override
- public Builder addRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.addRepeatedField(field, value);
- }
- @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto)other);
@@ -12164,17 +10849,14 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
if (other.hasIsExternalClientRequest()) {
setIsExternalClientRequest(other.getIsExternalClientRequest());
}
- this.mergeUnknownFields(other.unknownFields);
- onChanged();
+ this.mergeUnknownFields(other.getUnknownFields());
return this;
}
- @java.lang.Override
public final boolean isInitialized() {
return true;
}
- @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -12184,7 +10866,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto) e.getUnfinishedMessage();
- throw e.unwrapIOException();
+ throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -12194,23 +10876,22 @@ public Builder mergeFrom(
}
private int bitField0_;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary workSpec_;
- private com.google.protobuf.SingleFieldBuilderV3<
+ // optional .VertexOrBinary work_spec = 1;
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary workSpec_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinaryOrBuilder> workSpecBuilder_;
/**
* optional .VertexOrBinary work_spec = 1;
- * @return Whether the workSpec field is set.
*/
public boolean hasWorkSpec() {
- return ((bitField0_ & 0x00000001) != 0);
+ return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional .VertexOrBinary work_spec = 1;
- * @return The workSpec.
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary getWorkSpec() {
if (workSpecBuilder_ == null) {
- return workSpec_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance() : workSpec_;
+ return workSpec_;
} else {
return workSpecBuilder_.getMessage();
}
@@ -12250,8 +10931,7 @@ public Builder setWorkSpec(
*/
public Builder mergeWorkSpec(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary value) {
if (workSpecBuilder_ == null) {
- if (((bitField0_ & 0x00000001) != 0) &&
- workSpec_ != null &&
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
workSpec_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance()) {
workSpec_ =
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.newBuilder(workSpec_).mergeFrom(value).buildPartial();
@@ -12270,7 +10950,7 @@ public Builder mergeWorkSpec(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonPr
*/
public Builder clearWorkSpec() {
if (workSpecBuilder_ == null) {
- workSpec_ = null;
+ workSpec_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance();
onChanged();
} else {
workSpecBuilder_.clear();
@@ -12293,20 +10973,19 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrB
if (workSpecBuilder_ != null) {
return workSpecBuilder_.getMessageOrBuilder();
} else {
- return workSpec_ == null ?
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance() : workSpec_;
+ return workSpec_;
}
}
/**
* optional .VertexOrBinary work_spec = 1;
*/
- private com.google.protobuf.SingleFieldBuilderV3<
+ private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinaryOrBuilder>
getWorkSpecFieldBuilder() {
if (workSpecBuilder_ == null) {
- workSpecBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ workSpecBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinaryOrBuilder>(
- getWorkSpec(),
+ workSpec_,
getParentForChildren(),
isClean());
workSpec_ = null;
@@ -12314,27 +10993,22 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrB
return workSpecBuilder_;
}
+ // optional bytes work_spec_signature = 2;
private com.google.protobuf.ByteString workSpecSignature_ = com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes work_spec_signature = 2;
- * @return Whether the workSpecSignature field is set.
*/
- @java.lang.Override
public boolean hasWorkSpecSignature() {
- return ((bitField0_ & 0x00000002) != 0);
+ return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional bytes work_spec_signature = 2;
- * @return The workSpecSignature.
*/
- @java.lang.Override
public com.google.protobuf.ByteString getWorkSpecSignature() {
return workSpecSignature_;
}
/**
* optional bytes work_spec_signature = 2;
- * @param value The workSpecSignature to set.
- * @return This builder for chaining.
*/
public Builder setWorkSpecSignature(com.google.protobuf.ByteString value) {
if (value == null) {
@@ -12347,7 +11021,6 @@ public Builder setWorkSpecSignature(com.google.protobuf.ByteString value) {
}
/**
* optional bytes work_spec_signature = 2;
- * @return This builder for chaining.
*/
public Builder clearWorkSpecSignature() {
bitField0_ = (bitField0_ & ~0x00000002);
@@ -12356,27 +11029,22 @@ public Builder clearWorkSpecSignature() {
return this;
}
+ // optional int32 fragment_number = 3;
private int fragmentNumber_ ;
/**
* optional int32 fragment_number = 3;
- * @return Whether the fragmentNumber field is set.
*/
- @java.lang.Override
public boolean hasFragmentNumber() {
- return ((bitField0_ & 0x00000004) != 0);
+ return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional int32 fragment_number = 3;
- * @return The fragmentNumber.
*/
- @java.lang.Override
public int getFragmentNumber() {
return fragmentNumber_;
}
/**
* optional int32 fragment_number = 3;
- * @param value The fragmentNumber to set.
- * @return This builder for chaining.
*/
public Builder setFragmentNumber(int value) {
bitField0_ |= 0x00000004;
@@ -12386,7 +11054,6 @@ public Builder setFragmentNumber(int value) {
}
/**
* optional int32 fragment_number = 3;
- * @return This builder for chaining.
*/
public Builder clearFragmentNumber() {
bitField0_ = (bitField0_ & ~0x00000004);
@@ -12395,27 +11062,22 @@ public Builder clearFragmentNumber() {
return this;
}
+ // optional int32 attempt_number = 4;
private int attemptNumber_ ;
/**
* optional int32 attempt_number = 4;
- * @return Whether the attemptNumber field is set.
*/
- @java.lang.Override
public boolean hasAttemptNumber() {
- return ((bitField0_ & 0x00000008) != 0);
+ return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* optional int32 attempt_number = 4;
- * @return The attemptNumber.
*/
- @java.lang.Override
public int getAttemptNumber() {
return attemptNumber_;
}
/**
* optional int32 attempt_number = 4;
- * @param value The attemptNumber to set.
- * @return This builder for chaining.
*/
public Builder setAttemptNumber(int value) {
bitField0_ |= 0x00000008;
@@ -12425,7 +11087,6 @@ public Builder setAttemptNumber(int value) {
}
/**
* optional int32 attempt_number = 4;
- * @return This builder for chaining.
*/
public Builder clearAttemptNumber() {
bitField0_ = (bitField0_ & ~0x00000008);
@@ -12434,27 +11095,23 @@ public Builder clearAttemptNumber() {
return this;
}
+ // optional string container_id_string = 5;
private java.lang.Object containerIdString_ = "";
/**
* optional string container_id_string = 5;
- * @return Whether the containerIdString field is set.
*/
public boolean hasContainerIdString() {
- return ((bitField0_ & 0x00000010) != 0);
+ return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* optional string container_id_string = 5;
- * @return The containerIdString.
*/
public java.lang.String getContainerIdString() {
java.lang.Object ref = containerIdString_;
if (!(ref instanceof java.lang.String)) {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- if (bs.isValidUtf8()) {
- containerIdString_ = s;
- }
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ containerIdString_ = s;
return s;
} else {
return (java.lang.String) ref;
@@ -12462,7 +11119,6 @@ public java.lang.String getContainerIdString() {
}
/**
* optional string container_id_string = 5;
- * @return The bytes for containerIdString.
*/
public com.google.protobuf.ByteString
getContainerIdStringBytes() {
@@ -12479,8 +11135,6 @@ public java.lang.String getContainerIdString() {
}
/**
* optional string container_id_string = 5;
- * @param value The containerIdString to set.
- * @return This builder for chaining.
*/
public Builder setContainerIdString(
java.lang.String value) {
@@ -12494,7 +11148,6 @@ public Builder setContainerIdString(
}
/**
* optional string container_id_string = 5;
- * @return This builder for chaining.
*/
public Builder clearContainerIdString() {
bitField0_ = (bitField0_ & ~0x00000010);
@@ -12504,8 +11157,6 @@ public Builder clearContainerIdString() {
}
/**
* optional string container_id_string = 5;
- * @param value The bytes for containerIdString to set.
- * @return This builder for chaining.
*/
public Builder setContainerIdStringBytes(
com.google.protobuf.ByteString value) {
@@ -12518,27 +11169,23 @@ public Builder setContainerIdStringBytes(
return this;
}
+ // optional string am_host = 6;
private java.lang.Object amHost_ = "";
/**
* optional string am_host = 6;
- * @return Whether the amHost field is set.
*/
public boolean hasAmHost() {
- return ((bitField0_ & 0x00000020) != 0);
+ return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* optional string am_host = 6;
- * @return The amHost.
*/
public java.lang.String getAmHost() {
java.lang.Object ref = amHost_;
if (!(ref instanceof java.lang.String)) {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- if (bs.isValidUtf8()) {
- amHost_ = s;
- }
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ amHost_ = s;
return s;
} else {
return (java.lang.String) ref;
@@ -12546,7 +11193,6 @@ public java.lang.String getAmHost() {
}
/**
* optional string am_host = 6;
- * @return The bytes for amHost.
*/
public com.google.protobuf.ByteString
getAmHostBytes() {
@@ -12563,8 +11209,6 @@ public java.lang.String getAmHost() {
}
/**
* optional string am_host = 6;
- * @param value The amHost to set.
- * @return This builder for chaining.
*/
public Builder setAmHost(
java.lang.String value) {
@@ -12578,7 +11222,6 @@ public Builder setAmHost(
}
/**
* optional string am_host = 6;
- * @return This builder for chaining.
*/
public Builder clearAmHost() {
bitField0_ = (bitField0_ & ~0x00000020);
@@ -12588,8 +11231,6 @@ public Builder clearAmHost() {
}
/**
* optional string am_host = 6;
- * @param value The bytes for amHost to set.
- * @return This builder for chaining.
*/
public Builder setAmHostBytes(
com.google.protobuf.ByteString value) {
@@ -12602,27 +11243,22 @@ public Builder setAmHostBytes(
return this;
}
+ // optional int32 am_port = 7;
private int amPort_ ;
/**
* optional int32 am_port = 7;
- * @return Whether the amPort field is set.
*/
- @java.lang.Override
public boolean hasAmPort() {
- return ((bitField0_ & 0x00000040) != 0);
+ return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* optional int32 am_port = 7;
- * @return The amPort.
*/
- @java.lang.Override
public int getAmPort() {
return amPort_;
}
/**
* optional int32 am_port = 7;
- * @param value The amPort to set.
- * @return This builder for chaining.
*/
public Builder setAmPort(int value) {
bitField0_ |= 0x00000040;
@@ -12632,7 +11268,6 @@ public Builder setAmPort(int value) {
}
/**
* optional int32 am_port = 7;
- * @return This builder for chaining.
*/
public Builder clearAmPort() {
bitField0_ = (bitField0_ & ~0x00000040);
@@ -12641,39 +11276,34 @@ public Builder clearAmPort() {
return this;
}
+ // optional bytes credentials_binary = 8;
private com.google.protobuf.ByteString credentialsBinary_ = com.google.protobuf.ByteString.EMPTY;
/**
+ * optional bytes credentials_binary = 8;
+ *
*
* Credentials are not signed - the client can add e.g. his own HDFS tokens.
*
- *
- * optional bytes credentials_binary = 8;
- * @return Whether the credentialsBinary field is set.
*/
- @java.lang.Override
public boolean hasCredentialsBinary() {
- return ((bitField0_ & 0x00000080) != 0);
+ return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
+ * optional bytes credentials_binary = 8;
+ *
*
* Credentials are not signed - the client can add e.g. his own HDFS tokens.
*
- *
- * optional bytes credentials_binary = 8;
- * @return The credentialsBinary.
*/
- @java.lang.Override
public com.google.protobuf.ByteString getCredentialsBinary() {
return credentialsBinary_;
}
/**
+ * optional bytes credentials_binary = 8;
+ *
*
* Credentials are not signed - the client can add e.g. his own HDFS tokens.
*
- *
- * optional bytes credentials_binary = 8;
- * @param value The credentialsBinary to set.
- * @return This builder for chaining.
*/
public Builder setCredentialsBinary(com.google.protobuf.ByteString value) {
if (value == null) {
@@ -12685,12 +11315,11 @@ public Builder setCredentialsBinary(com.google.protobuf.ByteString value) {
return this;
}
/**
+ * optional bytes credentials_binary = 8;
+ *
*
* Credentials are not signed - the client can add e.g. his own HDFS tokens.
*
- *
- * optional bytes credentials_binary = 8;
- * @return This builder for chaining.
*/
public Builder clearCredentialsBinary() {
bitField0_ = (bitField0_ & ~0x00000080);
@@ -12699,41 +11328,40 @@ public Builder clearCredentialsBinary() {
return this;
}
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo fragmentRuntimeInfo_;
- private com.google.protobuf.SingleFieldBuilderV3<
+ // optional .FragmentRuntimeInfo fragment_runtime_info = 9;
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo fragmentRuntimeInfo_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder> fragmentRuntimeInfoBuilder_;
/**
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
+ *
*
* Not supported/honored for external clients right now.
*
- *
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
- * @return Whether the fragmentRuntimeInfo field is set.
*/
public boolean hasFragmentRuntimeInfo() {
- return ((bitField0_ & 0x00000100) != 0);
+ return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
+ *
*
* Not supported/honored for external clients right now.
*
- *
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
- * @return The fragmentRuntimeInfo.
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo getFragmentRuntimeInfo() {
if (fragmentRuntimeInfoBuilder_ == null) {
- return fragmentRuntimeInfo_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance() : fragmentRuntimeInfo_;
+ return fragmentRuntimeInfo_;
} else {
return fragmentRuntimeInfoBuilder_.getMessage();
}
}
/**
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
+ *
*
* Not supported/honored for external clients right now.
*
- *
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
*/
public Builder setFragmentRuntimeInfo(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo value) {
if (fragmentRuntimeInfoBuilder_ == null) {
@@ -12749,11 +11377,11 @@ public Builder setFragmentRuntimeInfo(org.apache.hadoop.hive.llap.daemon.rpc.Lla
return this;
}
/**
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
+ *
*
* Not supported/honored for external clients right now.
*
- *
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
*/
public Builder setFragmentRuntimeInfo(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder builderForValue) {
@@ -12767,16 +11395,15 @@ public Builder setFragmentRuntimeInfo(
return this;
}
/**
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
+ *
*
* Not supported/honored for external clients right now.
*
- *
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
*/
public Builder mergeFragmentRuntimeInfo(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo value) {
if (fragmentRuntimeInfoBuilder_ == null) {
- if (((bitField0_ & 0x00000100) != 0) &&
- fragmentRuntimeInfo_ != null &&
+ if (((bitField0_ & 0x00000100) == 0x00000100) &&
fragmentRuntimeInfo_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance()) {
fragmentRuntimeInfo_ =
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.newBuilder(fragmentRuntimeInfo_).mergeFrom(value).buildPartial();
@@ -12791,15 +11418,15 @@ public Builder mergeFragmentRuntimeInfo(org.apache.hadoop.hive.llap.daemon.rpc.L
return this;
}
/**
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
+ *
*
* Not supported/honored for external clients right now.
*
- *
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
*/
public Builder clearFragmentRuntimeInfo() {
if (fragmentRuntimeInfoBuilder_ == null) {
- fragmentRuntimeInfo_ = null;
+ fragmentRuntimeInfo_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance();
onChanged();
} else {
fragmentRuntimeInfoBuilder_.clear();
@@ -12808,11 +11435,11 @@ public Builder clearFragmentRuntimeInfo() {
return this;
}
/**
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
+ *
*
* Not supported/honored for external clients right now.
*
- *
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder getFragmentRuntimeInfoBuilder() {
bitField0_ |= 0x00000100;
@@ -12820,34 +11447,33 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentR
return getFragmentRuntimeInfoFieldBuilder().getBuilder();
}
/**
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
+ *
*
* Not supported/honored for external clients right now.
*
- *
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder getFragmentRuntimeInfoOrBuilder() {
if (fragmentRuntimeInfoBuilder_ != null) {
return fragmentRuntimeInfoBuilder_.getMessageOrBuilder();
} else {
- return fragmentRuntimeInfo_ == null ?
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance() : fragmentRuntimeInfo_;
+ return fragmentRuntimeInfo_;
}
}
/**
+ * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
+ *
*
* Not supported/honored for external clients right now.
*
- *
- * optional .FragmentRuntimeInfo fragment_runtime_info = 9;
*/
- private com.google.protobuf.SingleFieldBuilderV3<
+ private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder>
getFragmentRuntimeInfoFieldBuilder() {
if (fragmentRuntimeInfoBuilder_ == null) {
- fragmentRuntimeInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ fragmentRuntimeInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder>(
- getFragmentRuntimeInfo(),
+ fragmentRuntimeInfo_,
getParentForChildren(),
isClean());
fragmentRuntimeInfo_ = null;
@@ -12855,39 +11481,34 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentR
return fragmentRuntimeInfoBuilder_;
}
+ // optional bytes initial_event_bytes = 10;
private com.google.protobuf.ByteString initialEventBytes_ = com.google.protobuf.ByteString.EMPTY;
/**
+ * optional bytes initial_event_bytes = 10;
+ *
*
* Serialized (and signed) NotTezEvent; used only for external clients for now.
*
- *
- * optional bytes initial_event_bytes = 10;
- * @return Whether the initialEventBytes field is set.
*/
- @java.lang.Override
public boolean hasInitialEventBytes() {
- return ((bitField0_ & 0x00000200) != 0);
+ return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
+ * optional bytes initial_event_bytes = 10;
+ *
*
* Serialized (and signed) NotTezEvent; used only for external clients for now.
*
- *
- * optional bytes initial_event_bytes = 10;
- * @return The initialEventBytes.
*/
- @java.lang.Override
public com.google.protobuf.ByteString getInitialEventBytes() {
return initialEventBytes_;
}
/**
+ * optional bytes initial_event_bytes = 10;
+ *
*
* Serialized (and signed) NotTezEvent; used only for external clients for now.
*
- *
- * optional bytes initial_event_bytes = 10;
- * @param value The initialEventBytes to set.
- * @return This builder for chaining.
*/
public Builder setInitialEventBytes(com.google.protobuf.ByteString value) {
if (value == null) {
@@ -12899,12 +11520,11 @@ public Builder setInitialEventBytes(com.google.protobuf.ByteString value) {
return this;
}
/**
+ * optional bytes initial_event_bytes = 10;
+ *
*
* Serialized (and signed) NotTezEvent; used only for external clients for now.
*
- *
- * optional bytes initial_event_bytes = 10;
- * @return This builder for chaining.
*/
public Builder clearInitialEventBytes() {
bitField0_ = (bitField0_ & ~0x00000200);
@@ -12913,27 +11533,22 @@ public Builder clearInitialEventBytes() {
return this;
}
+ // optional bytes initial_event_signature = 11;
private com.google.protobuf.ByteString initialEventSignature_ = com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes initial_event_signature = 11;
- * @return Whether the initialEventSignature field is set.
*/
- @java.lang.Override
public boolean hasInitialEventSignature() {
- return ((bitField0_ & 0x00000400) != 0);
+ return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* optional bytes initial_event_signature = 11;
- * @return The initialEventSignature.
*/
- @java.lang.Override
public com.google.protobuf.ByteString getInitialEventSignature() {
return initialEventSignature_;
}
/**
* optional bytes initial_event_signature = 11;
- * @param value The initialEventSignature to set.
- * @return This builder for chaining.
*/
public Builder setInitialEventSignature(com.google.protobuf.ByteString value) {
if (value == null) {
@@ -12946,7 +11561,6 @@ public Builder setInitialEventSignature(com.google.protobuf.ByteString value) {
}
/**
* optional bytes initial_event_signature = 11;
- * @return This builder for chaining.
*/
public Builder clearInitialEventSignature() {
bitField0_ = (bitField0_ & ~0x00000400);
@@ -12955,27 +11569,22 @@ public Builder clearInitialEventSignature() {
return this;
}
+ // optional bool is_guaranteed = 12 [default = false];
private boolean isGuaranteed_ ;
/**
* optional bool is_guaranteed = 12 [default = false];
- * @return Whether the isGuaranteed field is set.
*/
- @java.lang.Override
public boolean hasIsGuaranteed() {
- return ((bitField0_ & 0x00000800) != 0);
+ return ((bitField0_ & 0x00000800) == 0x00000800);
}
/**
* optional bool is_guaranteed = 12 [default = false];
- * @return The isGuaranteed.
*/
- @java.lang.Override
public boolean getIsGuaranteed() {
return isGuaranteed_;
}
/**
* optional bool is_guaranteed = 12 [default = false];
- * @param value The isGuaranteed to set.
- * @return This builder for chaining.
*/
public Builder setIsGuaranteed(boolean value) {
bitField0_ |= 0x00000800;
@@ -12985,7 +11594,6 @@ public Builder setIsGuaranteed(boolean value) {
}
/**
* optional bool is_guaranteed = 12 [default = false];
- * @return This builder for chaining.
*/
public Builder clearIsGuaranteed() {
bitField0_ = (bitField0_ & ~0x00000800);
@@ -12994,27 +11602,23 @@ public Builder clearIsGuaranteed() {
return this;
}
+ // optional string jwt = 13;
private java.lang.Object jwt_ = "";
/**
* optional string jwt = 13;
- * @return Whether the jwt field is set.
*/
public boolean hasJwt() {
- return ((bitField0_ & 0x00001000) != 0);
+ return ((bitField0_ & 0x00001000) == 0x00001000);
}
/**
* optional string jwt = 13;
- * @return The jwt.
*/
public java.lang.String getJwt() {
java.lang.Object ref = jwt_;
if (!(ref instanceof java.lang.String)) {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- if (bs.isValidUtf8()) {
- jwt_ = s;
- }
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ jwt_ = s;
return s;
} else {
return (java.lang.String) ref;
@@ -13022,7 +11626,6 @@ public java.lang.String getJwt() {
}
/**
* optional string jwt = 13;
- * @return The bytes for jwt.
*/
public com.google.protobuf.ByteString
getJwtBytes() {
@@ -13039,8 +11642,6 @@ public java.lang.String getJwt() {
}
/**
* optional string jwt = 13;
- * @param value The jwt to set.
- * @return This builder for chaining.
*/
public Builder setJwt(
java.lang.String value) {
@@ -13054,7 +11655,6 @@ public Builder setJwt(
}
/**
* optional string jwt = 13;
- * @return This builder for chaining.
*/
public Builder clearJwt() {
bitField0_ = (bitField0_ & ~0x00001000);
@@ -13064,8 +11664,6 @@ public Builder clearJwt() {
}
/**
* optional string jwt = 13;
- * @param value The bytes for jwt to set.
- * @return This builder for chaining.
*/
public Builder setJwtBytes(
com.google.protobuf.ByteString value) {
@@ -13078,27 +11676,22 @@ public Builder setJwtBytes(
return this;
}
+ // optional bool is_external_client_request = 14 [default = false];
private boolean isExternalClientRequest_ ;
/**
* optional bool is_external_client_request = 14 [default = false];
- * @return Whether the isExternalClientRequest field is set.
*/
- @java.lang.Override
public boolean hasIsExternalClientRequest() {
- return ((bitField0_ & 0x00002000) != 0);
+ return ((bitField0_ & 0x00002000) == 0x00002000);
}
/**
* optional bool is_external_client_request = 14 [default = false];
- * @return The isExternalClientRequest.
*/
- @java.lang.Override
public boolean getIsExternalClientRequest() {
return isExternalClientRequest_;
}
/**
* optional bool is_external_client_request = 14 [default = false];
- * @param value The isExternalClientRequest to set.
- * @return This builder for chaining.
*/
public Builder setIsExternalClientRequest(boolean value) {
bitField0_ |= 0x00002000;
@@ -13108,7 +11701,6 @@ public Builder setIsExternalClientRequest(boolean value) {
}
/**
* optional bool is_external_client_request = 14 [default = false];
- * @return This builder for chaining.
*/
public Builder clearIsExternalClientRequest() {
bitField0_ = (bitField0_ & ~0x00002000);
@@ -13116,88 +11708,43 @@ public Builder clearIsExternalClientRequest() {
onChanged();
return this;
}
- @java.lang.Override
- public final Builder setUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.setUnknownFields(unknownFields);
- }
-
- @java.lang.Override
- public final Builder mergeUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.mergeUnknownFields(unknownFields);
- }
-
// @@protoc_insertion_point(builder_scope:SubmitWorkRequestProto)
}
- // @@protoc_insertion_point(class_scope:SubmitWorkRequestProto)
- private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto DEFAULT_INSTANCE;
static {
- DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto();
- }
-
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto getDefaultInstance() {
- return DEFAULT_INSTANCE;
- }
-
- @java.lang.Deprecated public static final com.google.protobuf.Parser
- PARSER = new com.google.protobuf.AbstractParser() {
- @java.lang.Override
- public SubmitWorkRequestProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new SubmitWorkRequestProto(input, extensionRegistry);
- }
- };
-
- public static com.google.protobuf.Parser parser() {
- return PARSER;
- }
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
- @java.lang.Override
- public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto getDefaultInstanceForType() {
- return DEFAULT_INSTANCE;
+ defaultInstance = new SubmitWorkRequestProto(true);
+ defaultInstance.initFields();
}
+ // @@protoc_insertion_point(class_scope:SubmitWorkRequestProto)
}
- public interface RegisterDagRequestProtoOrBuilder extends
- // @@protoc_insertion_point(interface_extends:RegisterDagRequestProto)
- com.google.protobuf.MessageOrBuilder {
+ public interface RegisterDagRequestProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ // optional string user = 1;
/**
* optional string user = 1;
- * @return Whether the user field is set.
*/
boolean hasUser();
/**
* optional string user = 1;
- * @return The user.
*/
java.lang.String getUser();
/**
* optional string user = 1;
- * @return The bytes for user.
*/
com.google.protobuf.ByteString
getUserBytes();
+ // required .QueryIdentifierProto query_identifier = 2;
/**
* required .QueryIdentifierProto query_identifier = 2;
- * @return Whether the queryIdentifier field is set.
*/
boolean hasQueryIdentifier();
/**
* required .QueryIdentifierProto query_identifier = 2;
- * @return The queryIdentifier.
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier();
/**
@@ -13205,14 +11752,13 @@ public interface RegisterDagRequestProtoOrBuilder extends
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder();
+ // optional bytes credentials_binary = 3;
/**
* optional bytes credentials_binary = 3;
- * @return Whether the credentialsBinary field is set.
*/
boolean hasCredentialsBinary();
/**
* optional bytes credentials_binary = 3;
- * @return The credentialsBinary.
*/
com.google.protobuf.ByteString getCredentialsBinary();
}
@@ -13220,39 +11766,35 @@ public interface RegisterDagRequestProtoOrBuilder extends
* Protobuf type {@code RegisterDagRequestProto}
*/
public static final class RegisterDagRequestProto extends
- com.google.protobuf.GeneratedMessageV3 implements
- // @@protoc_insertion_point(message_implements:RegisterDagRequestProto)
- RegisterDagRequestProtoOrBuilder {
- private static final long serialVersionUID = 0L;
+ com.google.protobuf.GeneratedMessage
+ implements RegisterDagRequestProtoOrBuilder {
// Use RegisterDagRequestProto.newBuilder() to construct.
- private RegisterDagRequestProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ private RegisterDagRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private RegisterDagRequestProto() {
- user_ = "";
- credentialsBinary_ = com.google.protobuf.ByteString.EMPTY;
+ private RegisterDagRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final RegisterDagRequestProto defaultInstance;
+ public static RegisterDagRequestProto getDefaultInstance() {
+ return defaultInstance;
}
- @java.lang.Override
- @SuppressWarnings({"unused"})
- protected java.lang.Object newInstance(
- UnusedPrivateParameter unused) {
- return new RegisterDagRequestProto();
+ public RegisterDagRequestProto getDefaultInstanceForType() {
+ return defaultInstance;
}
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private RegisterDagRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- this();
- if (extensionRegistry == null) {
- throw new java.lang.NullPointerException();
- }
+ initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -13264,15 +11806,21 @@ private RegisterDagRequestProto(
case 0:
done = true;
break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
case 10: {
- com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
- user_ = bs;
+ user_ = input.readBytes();
break;
}
case 18: {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null;
- if (((bitField0_ & 0x00000002) != 0)) {
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
subBuilder = queryIdentifier_.toBuilder();
}
queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry);
@@ -13288,22 +11836,13 @@ private RegisterDagRequestProto(
credentialsBinary_ = input.readBytes();
break;
}
- default: {
- if (!parseUnknownField(
- input, unknownFields, extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
- } catch (com.google.protobuf.UninitializedMessageException e) {
- throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e).setUnfinishedMessage(this);
+ e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -13314,30 +11853,41 @@ private RegisterDagRequestProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagRequestProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto.Builder.class);
}
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public RegisterDagRequestProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new RegisterDagRequestProto(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
private int bitField0_;
+ // optional string user = 1;
public static final int USER_FIELD_NUMBER = 1;
- private volatile java.lang.Object user_;
+ private java.lang.Object user_;
/**
* optional string user = 1;
- * @return Whether the user field is set.
*/
- @java.lang.Override
public boolean hasUser() {
- return ((bitField0_ & 0x00000001) != 0);
+ return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional string user = 1;
- * @return The user.
*/
- @java.lang.Override
public java.lang.String getUser() {
java.lang.Object ref = user_;
if (ref instanceof java.lang.String) {
@@ -13354,9 +11904,7 @@ public java.lang.String getUser() {
}
/**
* optional string user = 1;
- * @return The bytes for user.
*/
- @java.lang.Override
public com.google.protobuf.ByteString
getUserBytes() {
java.lang.Object ref = user_;
@@ -13371,57 +11919,53 @@ public java.lang.String getUser() {
}
}
+ // required .QueryIdentifierProto query_identifier = 2;
public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 2;
private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
/**
* required .QueryIdentifierProto query_identifier = 2;
- * @return Whether the queryIdentifier field is set.
*/
- @java.lang.Override
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000002) != 0);
+ return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* required .QueryIdentifierProto query_identifier = 2;
- * @return The queryIdentifier.
*/
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
- return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
}
/**
* required .QueryIdentifierProto query_identifier = 2;
*/
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() {
- return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
}
+ // optional bytes credentials_binary = 3;
public static final int CREDENTIALS_BINARY_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString credentialsBinary_;
/**
* optional bytes credentials_binary = 3;
- * @return Whether the credentialsBinary field is set.
*/
- @java.lang.Override
public boolean hasCredentialsBinary() {
- return ((bitField0_ & 0x00000004) != 0);
+ return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional bytes credentials_binary = 3;
- * @return The credentialsBinary.
*/
- @java.lang.Override
public com.google.protobuf.ByteString getCredentialsBinary() {
return credentialsBinary_;
}
+ private void initFields() {
+ user_ = "";
+ queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ credentialsBinary_ = com.google.protobuf.ByteString.EMPTY;
+ }
private byte memoizedIsInitialized = -1;
- @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized == 1) return true;
- if (isInitialized == 0) return false;
+ if (isInitialized != -1) return isInitialized == 1;
if (!hasQueryIdentifier()) {
memoizedIsInitialized = 0;
@@ -13431,43 +11975,51 @@ public final boolean isInitialized() {
return true;
}
- @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (((bitField0_ & 0x00000001) != 0)) {
- com.google.protobuf.GeneratedMessageV3.writeString(output, 1, user_);
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getUserBytes());
}
- if (((bitField0_ & 0x00000002) != 0)) {
- output.writeMessage(2, getQueryIdentifier());
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeMessage(2, queryIdentifier_);
}
- if (((bitField0_ & 0x00000004) != 0)) {
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, credentialsBinary_);
}
- unknownFields.writeTo(output);
+ getUnknownFields().writeTo(output);
}
- @java.lang.Override
+ private int memoizedSerializedSize = -1;
public int getSerializedSize() {
- int size = memoizedSize;
+ int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) != 0)) {
- size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, user_);
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getUserBytes());
}
- if (((bitField0_ & 0x00000002) != 0)) {
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(2, getQueryIdentifier());
+ .computeMessageSize(2, queryIdentifier_);
}
- if (((bitField0_ & 0x00000004) != 0)) {
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, credentialsBinary_);
}
- size += unknownFields.getSerializedSize();
- memoizedSize = size;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
return size;
}
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -13478,32 +12030,35 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto) obj;
- if (hasUser() != other.hasUser()) return false;
+ boolean result = true;
+ result = result && (hasUser() == other.hasUser());
if (hasUser()) {
- if (!getUser()
- .equals(other.getUser())) return false;
+ result = result && getUser()
+ .equals(other.getUser());
}
- if (hasQueryIdentifier() != other.hasQueryIdentifier()) return false;
+ result = result && (hasQueryIdentifier() == other.hasQueryIdentifier());
if (hasQueryIdentifier()) {
- if (!getQueryIdentifier()
- .equals(other.getQueryIdentifier())) return false;
+ result = result && getQueryIdentifier()
+ .equals(other.getQueryIdentifier());
}
- if (hasCredentialsBinary() != other.hasCredentialsBinary()) return false;
+ result = result && (hasCredentialsBinary() == other.hasCredentialsBinary());
if (hasCredentialsBinary()) {
- if (!getCredentialsBinary()
- .equals(other.getCredentialsBinary())) return false;
+ result = result && getCredentialsBinary()
+ .equals(other.getCredentialsBinary());
}
- if (!unknownFields.equals(other.unknownFields)) return false;
- return true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
}
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasUser()) {
hash = (37 * hash) + USER_FIELD_NUMBER;
hash = (53 * hash) + getUser().hashCode();
@@ -13516,22 +12071,11 @@ public int hashCode() {
hash = (37 * hash) + CREDENTIALS_BINARY_FIELD_NUMBER;
hash = (53 * hash) + getCredentialsBinary().hashCode();
}
- hash = (29 * hash) + unknownFields.hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseFrom(
- java.nio.ByteBuffer data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseFrom(
- java.nio.ByteBuffer data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -13555,59 +12099,46 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Re
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input);
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
- @java.lang.Override
+ public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder() {
- return DEFAULT_INSTANCE.toBuilder();
- }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto prototype) {
- return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
- }
- @java.lang.Override
- public Builder toBuilder() {
- return this == DEFAULT_INSTANCE
- ? new Builder() : new Builder().mergeFrom(this);
+ return newBuilder().mergeFrom(prototype);
}
+ public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -13615,16 +12146,14 @@ protected Builder newBuilderForType(
* Protobuf type {@code RegisterDagRequestProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessageV3.Builder implements
- // @@protoc_insertion_point(builder_implements:RegisterDagRequestProto)
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagRequestProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -13637,23 +12166,25 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessageV3
- .alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getQueryIdentifierFieldBuilder();
}
}
- @java.lang.Override
+ private static Builder create() {
+ return new Builder();
+ }
+
public Builder clear() {
super.clear();
user_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = null;
+ queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
} else {
queryIdentifierBuilder_.clear();
}
@@ -13663,18 +12194,19 @@ public Builder clear() {
return this;
}
- @java.lang.Override
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagRequestProto_descriptor;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto.getDefaultInstance();
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto result = buildPartial();
if (!result.isInitialized()) {
@@ -13683,24 +12215,23 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterD
return result;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) != 0)) {
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.user_ = user_;
- if (((from_bitField0_ & 0x00000002) != 0)) {
- if (queryIdentifierBuilder_ == null) {
- result.queryIdentifier_ = queryIdentifier_;
- } else {
- result.queryIdentifier_ = queryIdentifierBuilder_.build();
- }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
- if (((from_bitField0_ & 0x00000004) != 0)) {
+ if (queryIdentifierBuilder_ == null) {
+ result.queryIdentifier_ = queryIdentifier_;
+ } else {
+ result.queryIdentifier_ = queryIdentifierBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.credentialsBinary_ = credentialsBinary_;
@@ -13709,39 +12240,6 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterD
return result;
}
- @java.lang.Override
- public Builder clone() {
- return super.clone();
- }
- @java.lang.Override
- public Builder setField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.setField(field, value);
- }
- @java.lang.Override
- public Builder clearField(
- com.google.protobuf.Descriptors.FieldDescriptor field) {
- return super.clearField(field);
- }
- @java.lang.Override
- public Builder clearOneof(
- com.google.protobuf.Descriptors.OneofDescriptor oneof) {
- return super.clearOneof(oneof);
- }
- @java.lang.Override
- public Builder setRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- int index, java.lang.Object value) {
- return super.setRepeatedField(field, index, value);
- }
- @java.lang.Override
- public Builder addRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.addRepeatedField(field, value);
- }
- @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto)other);
@@ -13764,20 +12262,18 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
if (other.hasCredentialsBinary()) {
setCredentialsBinary(other.getCredentialsBinary());
}
- this.mergeUnknownFields(other.unknownFields);
- onChanged();
+ this.mergeUnknownFields(other.getUnknownFields());
return this;
}
- @java.lang.Override
public final boolean isInitialized() {
if (!hasQueryIdentifier()) {
+
return false;
}
return true;
}
- @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -13787,7 +12283,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto) e.getUnfinishedMessage();
- throw e.unwrapIOException();
+ throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -13797,27 +12293,23 @@ public Builder mergeFrom(
}
private int bitField0_;
+ // optional string user = 1;
private java.lang.Object user_ = "";
/**
* optional string user = 1;
- * @return Whether the user field is set.
*/
public boolean hasUser() {
- return ((bitField0_ & 0x00000001) != 0);
+ return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional string user = 1;
- * @return The user.
*/
public java.lang.String getUser() {
java.lang.Object ref = user_;
if (!(ref instanceof java.lang.String)) {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- if (bs.isValidUtf8()) {
- user_ = s;
- }
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ user_ = s;
return s;
} else {
return (java.lang.String) ref;
@@ -13825,7 +12317,6 @@ public java.lang.String getUser() {
}
/**
* optional string user = 1;
- * @return The bytes for user.
*/
public com.google.protobuf.ByteString
getUserBytes() {
@@ -13842,8 +12333,6 @@ public java.lang.String getUser() {
}
/**
* optional string user = 1;
- * @param value The user to set.
- * @return This builder for chaining.
*/
public Builder setUser(
java.lang.String value) {
@@ -13857,7 +12346,6 @@ public Builder setUser(
}
/**
* optional string user = 1;
- * @return This builder for chaining.
*/
public Builder clearUser() {
bitField0_ = (bitField0_ & ~0x00000001);
@@ -13867,8 +12355,6 @@ public Builder clearUser() {
}
/**
* optional string user = 1;
- * @param value The bytes for user to set.
- * @return This builder for chaining.
*/
public Builder setUserBytes(
com.google.protobuf.ByteString value) {
@@ -13881,23 +12367,22 @@ public Builder setUserBytes(
return this;
}
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
- private com.google.protobuf.SingleFieldBuilderV3<
+ // required .QueryIdentifierProto query_identifier = 2;
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_;
/**
* required .QueryIdentifierProto query_identifier = 2;
- * @return Whether the queryIdentifier field is set.
*/
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000002) != 0);
+ return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* required .QueryIdentifierProto query_identifier = 2;
- * @return The queryIdentifier.
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
} else {
return queryIdentifierBuilder_.getMessage();
}
@@ -13937,8 +12422,7 @@ public Builder setQueryIdentifier(
*/
public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) {
if (queryIdentifierBuilder_ == null) {
- if (((bitField0_ & 0x00000002) != 0) &&
- queryIdentifier_ != null &&
+ if (((bitField0_ & 0x00000002) == 0x00000002) &&
queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) {
queryIdentifier_ =
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial();
@@ -13957,7 +12441,7 @@ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapD
*/
public Builder clearQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = null;
+ queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
onChanged();
} else {
queryIdentifierBuilder_.clear();
@@ -13980,20 +12464,19 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
if (queryIdentifierBuilder_ != null) {
return queryIdentifierBuilder_.getMessageOrBuilder();
} else {
- return queryIdentifier_ == null ?
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
}
}
/**
* required .QueryIdentifierProto query_identifier = 2;
*/
- private com.google.protobuf.SingleFieldBuilderV3<
+ private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>
getQueryIdentifierFieldBuilder() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>(
- getQueryIdentifier(),
+ queryIdentifier_,
getParentForChildren(),
isClean());
queryIdentifier_ = null;
@@ -14001,27 +12484,22 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
return queryIdentifierBuilder_;
}
+ // optional bytes credentials_binary = 3;
private com.google.protobuf.ByteString credentialsBinary_ = com.google.protobuf.ByteString.EMPTY;
/**
* optional bytes credentials_binary = 3;
- * @return Whether the credentialsBinary field is set.
*/
- @java.lang.Override
public boolean hasCredentialsBinary() {
- return ((bitField0_ & 0x00000004) != 0);
+ return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional bytes credentials_binary = 3;
- * @return The credentialsBinary.
*/
- @java.lang.Override
public com.google.protobuf.ByteString getCredentialsBinary() {
return credentialsBinary_;
}
/**
* optional bytes credentials_binary = 3;
- * @param value The credentialsBinary to set.
- * @return This builder for chaining.
*/
public Builder setCredentialsBinary(com.google.protobuf.ByteString value) {
if (value == null) {
@@ -14034,7 +12512,6 @@ public Builder setCredentialsBinary(com.google.protobuf.ByteString value) {
}
/**
* optional bytes credentials_binary = 3;
- * @return This builder for chaining.
*/
public Builder clearCredentialsBinary() {
bitField0_ = (bitField0_ & ~0x00000004);
@@ -14042,98 +12519,54 @@ public Builder clearCredentialsBinary() {
onChanged();
return this;
}
- @java.lang.Override
- public final Builder setUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.setUnknownFields(unknownFields);
- }
-
- @java.lang.Override
- public final Builder mergeUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.mergeUnknownFields(unknownFields);
- }
-
// @@protoc_insertion_point(builder_scope:RegisterDagRequestProto)
}
- // @@protoc_insertion_point(class_scope:RegisterDagRequestProto)
- private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto DEFAULT_INSTANCE;
static {
- DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto();
+ defaultInstance = new RegisterDagRequestProto(true);
+ defaultInstance.initFields();
}
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto getDefaultInstance() {
- return DEFAULT_INSTANCE;
+ // @@protoc_insertion_point(class_scope:RegisterDagRequestProto)
+ }
+
+ public interface RegisterDagResponseProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ /**
+ * Protobuf type {@code RegisterDagResponseProto}
+ */
+ public static final class RegisterDagResponseProto extends
+ com.google.protobuf.GeneratedMessage
+ implements RegisterDagResponseProtoOrBuilder {
+ // Use RegisterDagResponseProto.newBuilder() to construct.
+ private RegisterDagResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
+ private RegisterDagResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
- @java.lang.Deprecated public static final com.google.protobuf.Parser
- PARSER = new com.google.protobuf.AbstractParser() {
- @java.lang.Override
- public RegisterDagRequestProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new RegisterDagRequestProto(input, extensionRegistry);
- }
- };
+ private static final RegisterDagResponseProto defaultInstance;
+ public static RegisterDagResponseProto getDefaultInstance() {
+ return defaultInstance;
+ }
- public static com.google.protobuf.Parser parser() {
- return PARSER;
- }
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
- @java.lang.Override
- public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto getDefaultInstanceForType() {
- return DEFAULT_INSTANCE;
- }
-
- }
-
- public interface RegisterDagResponseProtoOrBuilder extends
- // @@protoc_insertion_point(interface_extends:RegisterDagResponseProto)
- com.google.protobuf.MessageOrBuilder {
- }
- /**
- * Protobuf type {@code RegisterDagResponseProto}
- */
- public static final class RegisterDagResponseProto extends
- com.google.protobuf.GeneratedMessageV3 implements
- // @@protoc_insertion_point(message_implements:RegisterDagResponseProto)
- RegisterDagResponseProtoOrBuilder {
- private static final long serialVersionUID = 0L;
- // Use RegisterDagResponseProto.newBuilder() to construct.
- private RegisterDagResponseProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
- super(builder);
- }
- private RegisterDagResponseProto() {
- }
-
- @java.lang.Override
- @SuppressWarnings({"unused"})
- protected java.lang.Object newInstance(
- UnusedPrivateParameter unused) {
- return new RegisterDagResponseProto();
+ public RegisterDagResponseProto getDefaultInstanceForType() {
+ return defaultInstance;
}
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private RegisterDagResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- this();
- if (extensionRegistry == null) {
- throw new java.lang.NullPointerException();
- }
+ initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
@@ -14145,8 +12578,8 @@ private RegisterDagResponseProto(
done = true;
break;
default: {
- if (!parseUnknownField(
- input, unknownFields, extensionRegistry, tag)) {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
done = true;
}
break;
@@ -14155,11 +12588,9 @@ private RegisterDagResponseProto(
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
- } catch (com.google.protobuf.UninitializedMessageException e) {
- throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e).setUnfinishedMessage(this);
+ e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -14170,42 +12601,63 @@ private RegisterDagResponseProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagResponseProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto.Builder.class);
}
- private byte memoizedIsInitialized = -1;
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public RegisterDagResponseProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new RegisterDagResponseProto(input, extensionRegistry);
+ }
+ };
+
@java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized == 1) return true;
- if (isInitialized == 0) return false;
+ if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
- @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- unknownFields.writeTo(output);
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
}
- @java.lang.Override
+ private int memoizedSerializedSize = -1;
public int getSerializedSize() {
- int size = memoizedSize;
+ int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
- size += unknownFields.getSerializedSize();
- memoizedSize = size;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
return size;
}
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -14216,33 +12668,25 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto) obj;
- if (!unknownFields.equals(other.unknownFields)) return false;
- return true;
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
}
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptor().hashCode();
- hash = (29 * hash) + unknownFields.hashCode();
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseFrom(
- java.nio.ByteBuffer data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseFrom(
- java.nio.ByteBuffer data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -14266,59 +12710,46 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Re
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input);
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
- @java.lang.Override
+ public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder() {
- return DEFAULT_INSTANCE.toBuilder();
- }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto prototype) {
- return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
- }
- @java.lang.Override
- public Builder toBuilder() {
- return this == DEFAULT_INSTANCE
- ? new Builder() : new Builder().mergeFrom(this);
+ return newBuilder().mergeFrom(prototype);
}
+ public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -14326,16 +12757,14 @@ protected Builder newBuilderForType(
* Protobuf type {@code RegisterDagResponseProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessageV3.Builder implements
- // @@protoc_insertion_point(builder_implements:RegisterDagResponseProto)
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagResponseProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -14348,33 +12777,36 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessageV3
- .alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
- @java.lang.Override
+ private static Builder create() {
+ return new Builder();
+ }
+
public Builder clear() {
super.clear();
return this;
}
- @java.lang.Override
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_RegisterDagResponseProto_descriptor;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto.getDefaultInstance();
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto result = buildPartial();
if (!result.isInitialized()) {
@@ -14383,46 +12815,12 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterD
return result;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto(this);
onBuilt();
return result;
}
- @java.lang.Override
- public Builder clone() {
- return super.clone();
- }
- @java.lang.Override
- public Builder setField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.setField(field, value);
- }
- @java.lang.Override
- public Builder clearField(
- com.google.protobuf.Descriptors.FieldDescriptor field) {
- return super.clearField(field);
- }
- @java.lang.Override
- public Builder clearOneof(
- com.google.protobuf.Descriptors.OneofDescriptor oneof) {
- return super.clearOneof(oneof);
- }
- @java.lang.Override
- public Builder setRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- int index, java.lang.Object value) {
- return super.setRepeatedField(field, index, value);
- }
- @java.lang.Override
- public Builder addRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.addRepeatedField(field, value);
- }
- @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto)other);
@@ -14434,17 +12832,14 @@ public Builder mergeFrom(com.google.protobuf.Message other) {
public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto other) {
if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto.getDefaultInstance()) return this;
- this.mergeUnknownFields(other.unknownFields);
- onChanged();
+ this.mergeUnknownFields(other.getUnknownFields());
return this;
}
- @java.lang.Override
public final boolean isInitialized() {
return true;
}
- @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -14454,7 +12849,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto) e.getUnfinishedMessage();
- throw e.unwrapIOException();
+ throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -14462,87 +12857,42 @@ public Builder mergeFrom(
}
return this;
}
- @java.lang.Override
- public final Builder setUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.setUnknownFields(unknownFields);
- }
-
- @java.lang.Override
- public final Builder mergeUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.mergeUnknownFields(unknownFields);
- }
-
// @@protoc_insertion_point(builder_scope:RegisterDagResponseProto)
}
- // @@protoc_insertion_point(class_scope:RegisterDagResponseProto)
- private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto DEFAULT_INSTANCE;
static {
- DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto();
- }
-
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto getDefaultInstance() {
- return DEFAULT_INSTANCE;
- }
-
- @java.lang.Deprecated public static final com.google.protobuf.Parser
- PARSER = new com.google.protobuf.AbstractParser() {
- @java.lang.Override
- public RegisterDagResponseProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new RegisterDagResponseProto(input, extensionRegistry);
- }
- };
-
- public static com.google.protobuf.Parser parser() {
- return PARSER;
- }
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
- @java.lang.Override
- public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagResponseProto getDefaultInstanceForType() {
- return DEFAULT_INSTANCE;
+ defaultInstance = new RegisterDagResponseProto(true);
+ defaultInstance.initFields();
}
+ // @@protoc_insertion_point(class_scope:RegisterDagResponseProto)
}
- public interface SubmitWorkResponseProtoOrBuilder extends
- // @@protoc_insertion_point(interface_extends:SubmitWorkResponseProto)
- com.google.protobuf.MessageOrBuilder {
+ public interface SubmitWorkResponseProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ // optional .SubmissionStateProto submission_state = 1;
/**
* optional .SubmissionStateProto submission_state = 1;
- * @return Whether the submissionState field is set.
*/
boolean hasSubmissionState();
/**
* optional .SubmissionStateProto submission_state = 1;
- * @return The submissionState.
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto getSubmissionState();
+ // optional string unique_node_id = 2;
/**
* optional string unique_node_id = 2;
- * @return Whether the uniqueNodeId field is set.
*/
boolean hasUniqueNodeId();
/**
* optional string unique_node_id = 2;
- * @return The uniqueNodeId.
*/
java.lang.String getUniqueNodeId();
/**
* optional string unique_node_id = 2;
- * @return The bytes for uniqueNodeId.
*/
com.google.protobuf.ByteString
getUniqueNodeIdBytes();
@@ -14551,39 +12901,35 @@ public interface SubmitWorkResponseProtoOrBuilder extends
* Protobuf type {@code SubmitWorkResponseProto}
*/
public static final class SubmitWorkResponseProto extends
- com.google.protobuf.GeneratedMessageV3 implements
- // @@protoc_insertion_point(message_implements:SubmitWorkResponseProto)
- SubmitWorkResponseProtoOrBuilder {
- private static final long serialVersionUID = 0L;
+ com.google.protobuf.GeneratedMessage
+ implements SubmitWorkResponseProtoOrBuilder {
// Use SubmitWorkResponseProto.newBuilder() to construct.
- private SubmitWorkResponseProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ private SubmitWorkResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private SubmitWorkResponseProto() {
- submissionState_ = 1;
- uniqueNodeId_ = "";
+ private SubmitWorkResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final SubmitWorkResponseProto defaultInstance;
+ public static SubmitWorkResponseProto getDefaultInstance() {
+ return defaultInstance;
}
- @java.lang.Override
- @SuppressWarnings({"unused"})
- protected java.lang.Object newInstance(
- UnusedPrivateParameter unused) {
- return new SubmitWorkResponseProto();
+ public SubmitWorkResponseProto getDefaultInstanceForType() {
+ return defaultInstance;
}
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private SubmitWorkResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- this();
- if (extensionRegistry == null) {
- throw new java.lang.NullPointerException();
- }
+ initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -14595,40 +12941,36 @@ private SubmitWorkResponseProto(
case 0:
done = true;
break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
case 8: {
int rawValue = input.readEnum();
- @SuppressWarnings("deprecation")
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto value = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
- submissionState_ = rawValue;
+ submissionState_ = value;
}
break;
}
case 18: {
- com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
- uniqueNodeId_ = bs;
- break;
- }
- default: {
- if (!parseUnknownField(
- input, unknownFields, extensionRegistry, tag)) {
- done = true;
- }
+ uniqueNodeId_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
- } catch (com.google.protobuf.UninitializedMessageException e) {
- throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e).setUnfinishedMessage(this);
+ e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -14639,49 +12981,57 @@ private SubmitWorkResponseProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.Builder.class);
}
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public SubmitWorkResponseProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new SubmitWorkResponseProto(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
private int bitField0_;
+ // optional .SubmissionStateProto submission_state = 1;
public static final int SUBMISSION_STATE_FIELD_NUMBER = 1;
- private int submissionState_;
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto submissionState_;
/**
* optional .SubmissionStateProto submission_state = 1;
- * @return Whether the submissionState field is set.
*/
- @java.lang.Override public boolean hasSubmissionState() {
- return ((bitField0_ & 0x00000001) != 0);
+ public boolean hasSubmissionState() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional .SubmissionStateProto submission_state = 1;
- * @return The submissionState.
*/
- @java.lang.Override public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto getSubmissionState() {
- @SuppressWarnings("deprecation")
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto result = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.valueOf(submissionState_);
- return result == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED : result;
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto getSubmissionState() {
+ return submissionState_;
}
+ // optional string unique_node_id = 2;
public static final int UNIQUE_NODE_ID_FIELD_NUMBER = 2;
- private volatile java.lang.Object uniqueNodeId_;
+ private java.lang.Object uniqueNodeId_;
/**
* optional string unique_node_id = 2;
- * @return Whether the uniqueNodeId field is set.
*/
- @java.lang.Override
public boolean hasUniqueNodeId() {
- return ((bitField0_ & 0x00000002) != 0);
+ return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional string unique_node_id = 2;
- * @return The uniqueNodeId.
*/
- @java.lang.Override
public java.lang.String getUniqueNodeId() {
java.lang.Object ref = uniqueNodeId_;
if (ref instanceof java.lang.String) {
@@ -14698,9 +13048,7 @@ public java.lang.String getUniqueNodeId() {
}
/**
* optional string unique_node_id = 2;
- * @return The bytes for uniqueNodeId.
*/
- @java.lang.Override
public com.google.protobuf.ByteString
getUniqueNodeIdBytes() {
java.lang.Object ref = uniqueNodeId_;
@@ -14715,47 +13063,57 @@ public java.lang.String getUniqueNodeId() {
}
}
+ private void initFields() {
+ submissionState_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED;
+ uniqueNodeId_ = "";
+ }
private byte memoizedIsInitialized = -1;
- @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized == 1) return true;
- if (isInitialized == 0) return false;
+ if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
- @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (((bitField0_ & 0x00000001) != 0)) {
- output.writeEnum(1, submissionState_);
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeEnum(1, submissionState_.getNumber());
}
- if (((bitField0_ & 0x00000002) != 0)) {
- com.google.protobuf.GeneratedMessageV3.writeString(output, 2, uniqueNodeId_);
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, getUniqueNodeIdBytes());
}
- unknownFields.writeTo(output);
+ getUnknownFields().writeTo(output);
}
- @java.lang.Override
+ private int memoizedSerializedSize = -1;
public int getSerializedSize() {
- int size = memoizedSize;
+ int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) != 0)) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
- .computeEnumSize(1, submissionState_);
+ .computeEnumSize(1, submissionState_.getNumber());
}
- if (((bitField0_ & 0x00000002) != 0)) {
- size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, uniqueNodeId_);
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, getUniqueNodeIdBytes());
}
- size += unknownFields.getSerializedSize();
- memoizedSize = size;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
return size;
}
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -14766,50 +13124,43 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto) obj;
- if (hasSubmissionState() != other.hasSubmissionState()) return false;
+ boolean result = true;
+ result = result && (hasSubmissionState() == other.hasSubmissionState());
if (hasSubmissionState()) {
- if (submissionState_ != other.submissionState_) return false;
+ result = result &&
+ (getSubmissionState() == other.getSubmissionState());
}
- if (hasUniqueNodeId() != other.hasUniqueNodeId()) return false;
+ result = result && (hasUniqueNodeId() == other.hasUniqueNodeId());
if (hasUniqueNodeId()) {
- if (!getUniqueNodeId()
- .equals(other.getUniqueNodeId())) return false;
+ result = result && getUniqueNodeId()
+ .equals(other.getUniqueNodeId());
}
- if (!unknownFields.equals(other.unknownFields)) return false;
- return true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
}
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasSubmissionState()) {
hash = (37 * hash) + SUBMISSION_STATE_FIELD_NUMBER;
- hash = (53 * hash) + submissionState_;
+ hash = (53 * hash) + hashEnum(getSubmissionState());
}
if (hasUniqueNodeId()) {
hash = (37 * hash) + UNIQUE_NODE_ID_FIELD_NUMBER;
hash = (53 * hash) + getUniqueNodeId().hashCode();
}
- hash = (29 * hash) + unknownFields.hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(
- java.nio.ByteBuffer data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(
- java.nio.ByteBuffer data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -14833,59 +13184,46 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Su
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input);
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
- @java.lang.Override
+ public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder() {
- return DEFAULT_INSTANCE.toBuilder();
- }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto prototype) {
- return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
- }
- @java.lang.Override
- public Builder toBuilder() {
- return this == DEFAULT_INSTANCE
- ? new Builder() : new Builder().mergeFrom(this);
+ return newBuilder().mergeFrom(prototype);
}
+ public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -14893,16 +13231,14 @@ protected Builder newBuilderForType(
* Protobuf type {@code SubmitWorkResponseProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessageV3.Builder implements
- // @@protoc_insertion_point(builder_implements:SubmitWorkResponseProto)
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -14915,37 +13251,40 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessageV3
- .alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
- @java.lang.Override
+ private static Builder create() {
+ return new Builder();
+ }
+
public Builder clear() {
super.clear();
- submissionState_ = 1;
+ submissionState_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED;
bitField0_ = (bitField0_ & ~0x00000001);
uniqueNodeId_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
- @java.lang.Override
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_descriptor;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.getDefaultInstance();
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto result = buildPartial();
if (!result.isInitialized()) {
@@ -14954,16 +13293,15 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWor
return result;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) != 0)) {
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.submissionState_ = submissionState_;
- if (((from_bitField0_ & 0x00000002) != 0)) {
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.uniqueNodeId_ = uniqueNodeId_;
@@ -14972,39 +13310,6 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWor
return result;
}
- @java.lang.Override
- public Builder clone() {
- return super.clone();
- }
- @java.lang.Override
- public Builder setField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.setField(field, value);
- }
- @java.lang.Override
- public Builder clearField(
- com.google.protobuf.Descriptors.FieldDescriptor field) {
- return super.clearField(field);
- }
- @java.lang.Override
- public Builder clearOneof(
- com.google.protobuf.Descriptors.OneofDescriptor oneof) {
- return super.clearOneof(oneof);
- }
- @java.lang.Override
- public Builder setRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- int index, java.lang.Object value) {
- return super.setRepeatedField(field, index, value);
- }
- @java.lang.Override
- public Builder addRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.addRepeatedField(field, value);
- }
- @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto)other);
@@ -15024,17 +13329,14 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
uniqueNodeId_ = other.uniqueNodeId_;
onChanged();
}
- this.mergeUnknownFields(other.unknownFields);
- onChanged();
+ this.mergeUnknownFields(other.getUnknownFields());
return this;
}
- @java.lang.Override
public final boolean isInitialized() {
return true;
}
- @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -15044,7 +13346,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto) e.getUnfinishedMessage();
- throw e.unwrapIOException();
+ throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -15054,70 +13356,59 @@ public Builder mergeFrom(
}
private int bitField0_;
- private int submissionState_ = 1;
+ // optional .SubmissionStateProto submission_state = 1;
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto submissionState_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED;
/**
* optional .SubmissionStateProto submission_state = 1;
- * @return Whether the submissionState field is set.
*/
- @java.lang.Override public boolean hasSubmissionState() {
- return ((bitField0_ & 0x00000001) != 0);
+ public boolean hasSubmissionState() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional .SubmissionStateProto submission_state = 1;
- * @return The submissionState.
*/
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto getSubmissionState() {
- @SuppressWarnings("deprecation")
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto result = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.valueOf(submissionState_);
- return result == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED : result;
+ return submissionState_;
}
/**
* optional .SubmissionStateProto submission_state = 1;
- * @param value The submissionState to set.
- * @return This builder for chaining.
*/
public Builder setSubmissionState(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
- submissionState_ = value.getNumber();
+ submissionState_ = value;
onChanged();
return this;
}
/**
* optional .SubmissionStateProto submission_state = 1;
- * @return This builder for chaining.
*/
public Builder clearSubmissionState() {
bitField0_ = (bitField0_ & ~0x00000001);
- submissionState_ = 1;
+ submissionState_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED;
onChanged();
return this;
}
+ // optional string unique_node_id = 2;
private java.lang.Object uniqueNodeId_ = "";
/**
* optional string unique_node_id = 2;
- * @return Whether the uniqueNodeId field is set.
*/
public boolean hasUniqueNodeId() {
- return ((bitField0_ & 0x00000002) != 0);
+ return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional string unique_node_id = 2;
- * @return The uniqueNodeId.
*/
public java.lang.String getUniqueNodeId() {
java.lang.Object ref = uniqueNodeId_;
if (!(ref instanceof java.lang.String)) {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- if (bs.isValidUtf8()) {
- uniqueNodeId_ = s;
- }
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ uniqueNodeId_ = s;
return s;
} else {
return (java.lang.String) ref;
@@ -15125,7 +13416,6 @@ public java.lang.String getUniqueNodeId() {
}
/**
* optional string unique_node_id = 2;
- * @return The bytes for uniqueNodeId.
*/
public com.google.protobuf.ByteString
getUniqueNodeIdBytes() {
@@ -15142,8 +13432,6 @@ public java.lang.String getUniqueNodeId() {
}
/**
* optional string unique_node_id = 2;
- * @param value The uniqueNodeId to set.
- * @return This builder for chaining.
*/
public Builder setUniqueNodeId(
java.lang.String value) {
@@ -15157,7 +13445,6 @@ public Builder setUniqueNodeId(
}
/**
* optional string unique_node_id = 2;
- * @return This builder for chaining.
*/
public Builder clearUniqueNodeId() {
bitField0_ = (bitField0_ & ~0x00000002);
@@ -15167,8 +13454,6 @@ public Builder clearUniqueNodeId() {
}
/**
* optional string unique_node_id = 2;
- * @param value The bytes for uniqueNodeId to set.
- * @return This builder for chaining.
*/
public Builder setUniqueNodeIdBytes(
com.google.protobuf.ByteString value) {
@@ -15180,71 +13465,28 @@ public Builder setUniqueNodeIdBytes(
onChanged();
return this;
}
- @java.lang.Override
- public final Builder setUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.setUnknownFields(unknownFields);
- }
-
- @java.lang.Override
- public final Builder mergeUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.mergeUnknownFields(unknownFields);
- }
-
// @@protoc_insertion_point(builder_scope:SubmitWorkResponseProto)
}
- // @@protoc_insertion_point(class_scope:SubmitWorkResponseProto)
- private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto DEFAULT_INSTANCE;
static {
- DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto();
- }
-
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto getDefaultInstance() {
- return DEFAULT_INSTANCE;
- }
-
- @java.lang.Deprecated public static final com.google.protobuf.Parser
- PARSER = new com.google.protobuf.AbstractParser() {
- @java.lang.Override
- public SubmitWorkResponseProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new SubmitWorkResponseProto(input, extensionRegistry);
- }
- };
-
- public static com.google.protobuf.Parser parser() {
- return PARSER;
- }
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
- @java.lang.Override
- public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto getDefaultInstanceForType() {
- return DEFAULT_INSTANCE;
+ defaultInstance = new SubmitWorkResponseProto(true);
+ defaultInstance.initFields();
}
+ // @@protoc_insertion_point(class_scope:SubmitWorkResponseProto)
}
- public interface SourceStateUpdatedRequestProtoOrBuilder extends
- // @@protoc_insertion_point(interface_extends:SourceStateUpdatedRequestProto)
- com.google.protobuf.MessageOrBuilder {
+ public interface SourceStateUpdatedRequestProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ // optional .QueryIdentifierProto query_identifier = 1;
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return Whether the queryIdentifier field is set.
*/
boolean hasQueryIdentifier();
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return The queryIdentifier.
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier();
/**
@@ -15252,31 +13494,28 @@ public interface SourceStateUpdatedRequestProtoOrBuilder extends
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder();
+ // optional string src_name = 2;
/**
* optional string src_name = 2;
- * @return Whether the srcName field is set.
*/
boolean hasSrcName();
/**
* optional string src_name = 2;
- * @return The srcName.
*/
java.lang.String getSrcName();
/**
* optional string src_name = 2;
- * @return The bytes for srcName.
*/
com.google.protobuf.ByteString
getSrcNameBytes();
+ // optional .SourceStateProto state = 3;
/**
* optional .SourceStateProto state = 3;
- * @return Whether the state field is set.
*/
boolean hasState();
/**
* optional .SourceStateProto state = 3;
- * @return The state.
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto getState();
}
@@ -15284,39 +13523,35 @@ public interface SourceStateUpdatedRequestProtoOrBuilder extends
* Protobuf type {@code SourceStateUpdatedRequestProto}
*/
public static final class SourceStateUpdatedRequestProto extends
- com.google.protobuf.GeneratedMessageV3 implements
- // @@protoc_insertion_point(message_implements:SourceStateUpdatedRequestProto)
- SourceStateUpdatedRequestProtoOrBuilder {
- private static final long serialVersionUID = 0L;
+ com.google.protobuf.GeneratedMessage
+ implements SourceStateUpdatedRequestProtoOrBuilder {
// Use SourceStateUpdatedRequestProto.newBuilder() to construct.
- private SourceStateUpdatedRequestProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ private SourceStateUpdatedRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private SourceStateUpdatedRequestProto() {
- srcName_ = "";
- state_ = 1;
+ private SourceStateUpdatedRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final SourceStateUpdatedRequestProto defaultInstance;
+ public static SourceStateUpdatedRequestProto getDefaultInstance() {
+ return defaultInstance;
}
- @java.lang.Override
- @SuppressWarnings({"unused"})
- protected java.lang.Object newInstance(
- UnusedPrivateParameter unused) {
- return new SourceStateUpdatedRequestProto();
+ public SourceStateUpdatedRequestProto getDefaultInstanceForType() {
+ return defaultInstance;
}
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private SourceStateUpdatedRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- this();
- if (extensionRegistry == null) {
- throw new java.lang.NullPointerException();
- }
+ initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -15328,9 +13563,16 @@ private SourceStateUpdatedRequestProto(
case 0:
done = true;
break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
case 10: {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null;
- if (((bitField0_ & 0x00000001) != 0)) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = queryIdentifier_.toBuilder();
}
queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry);
@@ -15342,27 +13584,18 @@ private SourceStateUpdatedRequestProto(
break;
}
case 18: {
- com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
- srcName_ = bs;
+ srcName_ = input.readBytes();
break;
}
case 24: {
int rawValue = input.readEnum();
- @SuppressWarnings("deprecation")
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto value = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(3, rawValue);
} else {
bitField0_ |= 0x00000004;
- state_ = rawValue;
- }
- break;
- }
- default: {
- if (!parseUnknownField(
- input, unknownFields, extensionRegistry, tag)) {
- done = true;
+ state_ = value;
}
break;
}
@@ -15370,11 +13603,9 @@ private SourceStateUpdatedRequestProto(
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
- } catch (com.google.protobuf.UninitializedMessageException e) {
- throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e).setUnfinishedMessage(this);
+ e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -15385,56 +13616,63 @@ private SourceStateUpdatedRequestProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.Builder.class);
}
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public SourceStateUpdatedRequestProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new SourceStateUpdatedRequestProto(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
private int bitField0_;
+ // optional .QueryIdentifierProto query_identifier = 1;
public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 1;
private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return Whether the queryIdentifier field is set.
*/
- @java.lang.Override
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000001) != 0);
+ return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return The queryIdentifier.
*/
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
- return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
*/
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() {
- return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
}
+ // optional string src_name = 2;
public static final int SRC_NAME_FIELD_NUMBER = 2;
- private volatile java.lang.Object srcName_;
+ private java.lang.Object srcName_;
/**
* optional string src_name = 2;
- * @return Whether the srcName field is set.
*/
- @java.lang.Override
public boolean hasSrcName() {
- return ((bitField0_ & 0x00000002) != 0);
+ return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional string src_name = 2;
- * @return The srcName.
*/
- @java.lang.Override
public java.lang.String getSrcName() {
java.lang.Object ref = srcName_;
if (ref instanceof java.lang.String) {
@@ -15451,9 +13689,7 @@ public java.lang.String getSrcName() {
}
/**
* optional string src_name = 2;
- * @return The bytes for srcName.
*/
- @java.lang.Override
public com.google.protobuf.ByteString
getSrcNameBytes() {
java.lang.Object ref = srcName_;
@@ -15468,73 +13704,81 @@ public java.lang.String getSrcName() {
}
}
+ // optional .SourceStateProto state = 3;
public static final int STATE_FIELD_NUMBER = 3;
- private int state_;
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto state_;
/**
* optional .SourceStateProto state = 3;
- * @return Whether the state field is set.
*/
- @java.lang.Override public boolean hasState() {
- return ((bitField0_ & 0x00000004) != 0);
+ public boolean hasState() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional .SourceStateProto state = 3;
- * @return The state.
*/
- @java.lang.Override public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto getState() {
- @SuppressWarnings("deprecation")
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto result = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.valueOf(state_);
- return result == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED : result;
+ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto getState() {
+ return state_;
}
+ private void initFields() {
+ queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ srcName_ = "";
+ state_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED;
+ }
private byte memoizedIsInitialized = -1;
- @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized == 1) return true;
- if (isInitialized == 0) return false;
+ if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
- @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (((bitField0_ & 0x00000001) != 0)) {
- output.writeMessage(1, getQueryIdentifier());
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, queryIdentifier_);
}
- if (((bitField0_ & 0x00000002) != 0)) {
- com.google.protobuf.GeneratedMessageV3.writeString(output, 2, srcName_);
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, getSrcNameBytes());
}
- if (((bitField0_ & 0x00000004) != 0)) {
- output.writeEnum(3, state_);
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeEnum(3, state_.getNumber());
}
- unknownFields.writeTo(output);
+ getUnknownFields().writeTo(output);
}
- @java.lang.Override
+ private int memoizedSerializedSize = -1;
public int getSerializedSize() {
- int size = memoizedSize;
+ int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) != 0)) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(1, getQueryIdentifier());
+ .computeMessageSize(1, queryIdentifier_);
}
- if (((bitField0_ & 0x00000002) != 0)) {
- size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, srcName_);
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, getSrcNameBytes());
}
- if (((bitField0_ & 0x00000004) != 0)) {
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
- .computeEnumSize(3, state_);
+ .computeEnumSize(3, state_.getNumber());
}
- size += unknownFields.getSerializedSize();
- memoizedSize = size;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
return size;
}
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -15545,31 +13789,35 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto) obj;
- if (hasQueryIdentifier() != other.hasQueryIdentifier()) return false;
+ boolean result = true;
+ result = result && (hasQueryIdentifier() == other.hasQueryIdentifier());
if (hasQueryIdentifier()) {
- if (!getQueryIdentifier()
- .equals(other.getQueryIdentifier())) return false;
+ result = result && getQueryIdentifier()
+ .equals(other.getQueryIdentifier());
}
- if (hasSrcName() != other.hasSrcName()) return false;
+ result = result && (hasSrcName() == other.hasSrcName());
if (hasSrcName()) {
- if (!getSrcName()
- .equals(other.getSrcName())) return false;
+ result = result && getSrcName()
+ .equals(other.getSrcName());
}
- if (hasState() != other.hasState()) return false;
+ result = result && (hasState() == other.hasState());
if (hasState()) {
- if (state_ != other.state_) return false;
+ result = result &&
+ (getState() == other.getState());
}
- if (!unknownFields.equals(other.unknownFields)) return false;
- return true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
}
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasQueryIdentifier()) {
hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER;
hash = (53 * hash) + getQueryIdentifier().hashCode();
@@ -15580,24 +13828,13 @@ public int hashCode() {
}
if (hasState()) {
hash = (37 * hash) + STATE_FIELD_NUMBER;
- hash = (53 * hash) + state_;
+ hash = (53 * hash) + hashEnum(getState());
}
- hash = (29 * hash) + unknownFields.hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(
- java.nio.ByteBuffer data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(
- java.nio.ByteBuffer data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -15621,59 +13858,46 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.So
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input);
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
- @java.lang.Override
+ public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder() {
- return DEFAULT_INSTANCE.toBuilder();
- }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto prototype) {
- return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
- }
- @java.lang.Override
- public Builder toBuilder() {
- return this == DEFAULT_INSTANCE
- ? new Builder() : new Builder().mergeFrom(this);
+ return newBuilder().mergeFrom(prototype);
}
+ public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -15681,16 +13905,14 @@ protected Builder newBuilderForType(
* Protobuf type {@code SourceStateUpdatedRequestProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessageV3.Builder implements
- // @@protoc_insertion_point(builder_implements:SourceStateUpdatedRequestProto)
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -15703,44 +13925,47 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessageV3
- .alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getQueryIdentifierFieldBuilder();
}
}
- @java.lang.Override
+ private static Builder create() {
+ return new Builder();
+ }
+
public Builder clear() {
super.clear();
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = null;
+ queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
} else {
queryIdentifierBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
srcName_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
- state_ = 1;
+ state_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
- @java.lang.Override
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_descriptor;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.getDefaultInstance();
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto result = buildPartial();
if (!result.isInitialized()) {
@@ -15749,24 +13974,23 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceSta
return result;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) != 0)) {
- if (queryIdentifierBuilder_ == null) {
- result.queryIdentifier_ = queryIdentifier_;
- } else {
- result.queryIdentifier_ = queryIdentifierBuilder_.build();
- }
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
- if (((from_bitField0_ & 0x00000002) != 0)) {
+ if (queryIdentifierBuilder_ == null) {
+ result.queryIdentifier_ = queryIdentifier_;
+ } else {
+ result.queryIdentifier_ = queryIdentifierBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.srcName_ = srcName_;
- if (((from_bitField0_ & 0x00000004) != 0)) {
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.state_ = state_;
@@ -15775,39 +13999,6 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceSta
return result;
}
- @java.lang.Override
- public Builder clone() {
- return super.clone();
- }
- @java.lang.Override
- public Builder setField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.setField(field, value);
- }
- @java.lang.Override
- public Builder clearField(
- com.google.protobuf.Descriptors.FieldDescriptor field) {
- return super.clearField(field);
- }
- @java.lang.Override
- public Builder clearOneof(
- com.google.protobuf.Descriptors.OneofDescriptor oneof) {
- return super.clearOneof(oneof);
- }
- @java.lang.Override
- public Builder setRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- int index, java.lang.Object value) {
- return super.setRepeatedField(field, index, value);
- }
- @java.lang.Override
- public Builder addRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.addRepeatedField(field, value);
- }
- @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto)other);
@@ -15830,17 +14021,14 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
if (other.hasState()) {
setState(other.getState());
}
- this.mergeUnknownFields(other.unknownFields);
- onChanged();
+ this.mergeUnknownFields(other.getUnknownFields());
return this;
}
- @java.lang.Override
public final boolean isInitialized() {
return true;
}
- @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -15850,7 +14038,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto) e.getUnfinishedMessage();
- throw e.unwrapIOException();
+ throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -15860,23 +14048,22 @@ public Builder mergeFrom(
}
private int bitField0_;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
- private com.google.protobuf.SingleFieldBuilderV3<
+ // optional .QueryIdentifierProto query_identifier = 1;
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_;
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return Whether the queryIdentifier field is set.
*/
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000001) != 0);
+ return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return The queryIdentifier.
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
} else {
return queryIdentifierBuilder_.getMessage();
}
@@ -15916,8 +14103,7 @@ public Builder setQueryIdentifier(
*/
public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) {
if (queryIdentifierBuilder_ == null) {
- if (((bitField0_ & 0x00000001) != 0) &&
- queryIdentifier_ != null &&
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) {
queryIdentifier_ =
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial();
@@ -15936,7 +14122,7 @@ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapD
*/
public Builder clearQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = null;
+ queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
onChanged();
} else {
queryIdentifierBuilder_.clear();
@@ -15959,20 +14145,19 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
if (queryIdentifierBuilder_ != null) {
return queryIdentifierBuilder_.getMessageOrBuilder();
} else {
- return queryIdentifier_ == null ?
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
}
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
*/
- private com.google.protobuf.SingleFieldBuilderV3<
+ private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>
getQueryIdentifierFieldBuilder() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>(
- getQueryIdentifier(),
+ queryIdentifier_,
getParentForChildren(),
isClean());
queryIdentifier_ = null;
@@ -15980,27 +14165,23 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
return queryIdentifierBuilder_;
}
+ // optional string src_name = 2;
private java.lang.Object srcName_ = "";
/**
* optional string src_name = 2;
- * @return Whether the srcName field is set.
*/
public boolean hasSrcName() {
- return ((bitField0_ & 0x00000002) != 0);
+ return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional string src_name = 2;
- * @return The srcName.
*/
public java.lang.String getSrcName() {
java.lang.Object ref = srcName_;
if (!(ref instanceof java.lang.String)) {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- if (bs.isValidUtf8()) {
- srcName_ = s;
- }
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ srcName_ = s;
return s;
} else {
return (java.lang.String) ref;
@@ -16008,7 +14189,6 @@ public java.lang.String getSrcName() {
}
/**
* optional string src_name = 2;
- * @return The bytes for srcName.
*/
public com.google.protobuf.ByteString
getSrcNameBytes() {
@@ -16025,8 +14205,6 @@ public java.lang.String getSrcName() {
}
/**
* optional string src_name = 2;
- * @param value The srcName to set.
- * @return This builder for chaining.
*/
public Builder setSrcName(
java.lang.String value) {
@@ -16040,7 +14218,6 @@ public Builder setSrcName(
}
/**
* optional string src_name = 2;
- * @return This builder for chaining.
*/
public Builder clearSrcName() {
bitField0_ = (bitField0_ & ~0x00000002);
@@ -16050,8 +14227,6 @@ public Builder clearSrcName() {
}
/**
* optional string src_name = 2;
- * @param value The bytes for srcName to set.
- * @return This builder for chaining.
*/
public Builder setSrcNameBytes(
com.google.protobuf.ByteString value) {
@@ -16064,140 +14239,89 @@ public Builder setSrcNameBytes(
return this;
}
- private int state_ = 1;
+ // optional .SourceStateProto state = 3;
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto state_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED;
/**
* optional .SourceStateProto state = 3;
- * @return Whether the state field is set.
*/
- @java.lang.Override public boolean hasState() {
- return ((bitField0_ & 0x00000004) != 0);
+ public boolean hasState() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* optional .SourceStateProto state = 3;
- * @return The state.
*/
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto getState() {
- @SuppressWarnings("deprecation")
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto result = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.valueOf(state_);
- return result == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED : result;
+ return state_;
}
/**
* optional .SourceStateProto state = 3;
- * @param value The state to set.
- * @return This builder for chaining.
*/
public Builder setState(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
- state_ = value.getNumber();
+ state_ = value;
onChanged();
return this;
}
/**
* optional .SourceStateProto state = 3;
- * @return This builder for chaining.
*/
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000004);
- state_ = 1;
+ state_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED;
onChanged();
return this;
}
- @java.lang.Override
- public final Builder setUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.setUnknownFields(unknownFields);
- }
-
- @java.lang.Override
- public final Builder mergeUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.mergeUnknownFields(unknownFields);
- }
-
// @@protoc_insertion_point(builder_scope:SourceStateUpdatedRequestProto)
}
- // @@protoc_insertion_point(class_scope:SourceStateUpdatedRequestProto)
- private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto DEFAULT_INSTANCE;
static {
- DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto();
- }
-
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto getDefaultInstance() {
- return DEFAULT_INSTANCE;
- }
-
- @java.lang.Deprecated public static final com.google.protobuf.Parser
- PARSER = new com.google.protobuf.AbstractParser() {
- @java.lang.Override
- public SourceStateUpdatedRequestProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new SourceStateUpdatedRequestProto(input, extensionRegistry);
- }
- };
-
- public static com.google.protobuf.Parser parser() {
- return PARSER;
- }
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
- @java.lang.Override
- public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto getDefaultInstanceForType() {
- return DEFAULT_INSTANCE;
+ defaultInstance = new SourceStateUpdatedRequestProto(true);
+ defaultInstance.initFields();
}
+ // @@protoc_insertion_point(class_scope:SourceStateUpdatedRequestProto)
}
- public interface SourceStateUpdatedResponseProtoOrBuilder extends
- // @@protoc_insertion_point(interface_extends:SourceStateUpdatedResponseProto)
- com.google.protobuf.MessageOrBuilder {
+ public interface SourceStateUpdatedResponseProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code SourceStateUpdatedResponseProto}
*/
public static final class SourceStateUpdatedResponseProto extends
- com.google.protobuf.GeneratedMessageV3 implements
- // @@protoc_insertion_point(message_implements:SourceStateUpdatedResponseProto)
- SourceStateUpdatedResponseProtoOrBuilder {
- private static final long serialVersionUID = 0L;
+ com.google.protobuf.GeneratedMessage
+ implements SourceStateUpdatedResponseProtoOrBuilder {
// Use SourceStateUpdatedResponseProto.newBuilder() to construct.
- private SourceStateUpdatedResponseProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ private SourceStateUpdatedResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private SourceStateUpdatedResponseProto() {
+ private SourceStateUpdatedResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final SourceStateUpdatedResponseProto defaultInstance;
+ public static SourceStateUpdatedResponseProto getDefaultInstance() {
+ return defaultInstance;
}
- @java.lang.Override
- @SuppressWarnings({"unused"})
- protected java.lang.Object newInstance(
- UnusedPrivateParameter unused) {
- return new SourceStateUpdatedResponseProto();
+ public SourceStateUpdatedResponseProto getDefaultInstanceForType() {
+ return defaultInstance;
}
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private SourceStateUpdatedResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- this();
- if (extensionRegistry == null) {
- throw new java.lang.NullPointerException();
- }
+ initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
@@ -16209,8 +14333,8 @@ private SourceStateUpdatedResponseProto(
done = true;
break;
default: {
- if (!parseUnknownField(
- input, unknownFields, extensionRegistry, tag)) {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
done = true;
}
break;
@@ -16219,11 +14343,9 @@ private SourceStateUpdatedResponseProto(
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
- } catch (com.google.protobuf.UninitializedMessageException e) {
- throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e).setUnfinishedMessage(this);
+ e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -16234,42 +14356,63 @@ private SourceStateUpdatedResponseProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.Builder.class);
}
- private byte memoizedIsInitialized = -1;
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public SourceStateUpdatedResponseProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new SourceStateUpdatedResponseProto(input, extensionRegistry);
+ }
+ };
+
@java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized == 1) return true;
- if (isInitialized == 0) return false;
+ if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
- @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- unknownFields.writeTo(output);
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
}
- @java.lang.Override
+ private int memoizedSerializedSize = -1;
public int getSerializedSize() {
- int size = memoizedSize;
+ int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
- size += unknownFields.getSerializedSize();
- memoizedSize = size;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
return size;
}
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -16280,33 +14423,25 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto) obj;
- if (!unknownFields.equals(other.unknownFields)) return false;
- return true;
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
}
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptor().hashCode();
- hash = (29 * hash) + unknownFields.hashCode();
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(
- java.nio.ByteBuffer data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(
- java.nio.ByteBuffer data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -16330,59 +14465,46 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.So
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input);
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
- @java.lang.Override
+ public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder() {
- return DEFAULT_INSTANCE.toBuilder();
- }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto prototype) {
- return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
- }
- @java.lang.Override
- public Builder toBuilder() {
- return this == DEFAULT_INSTANCE
- ? new Builder() : new Builder().mergeFrom(this);
+ return newBuilder().mergeFrom(prototype);
}
+ public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -16390,16 +14512,14 @@ protected Builder newBuilderForType(
* Protobuf type {@code SourceStateUpdatedResponseProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessageV3.Builder implements
- // @@protoc_insertion_point(builder_implements:SourceStateUpdatedResponseProto)
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -16412,33 +14532,36 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessageV3
- .alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
- @java.lang.Override
+ private static Builder create() {
+ return new Builder();
+ }
+
public Builder clear() {
super.clear();
return this;
}
- @java.lang.Override
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_descriptor;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.getDefaultInstance();
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto result = buildPartial();
if (!result.isInitialized()) {
@@ -16447,46 +14570,12 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceSta
return result;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto(this);
onBuilt();
return result;
}
- @java.lang.Override
- public Builder clone() {
- return super.clone();
- }
- @java.lang.Override
- public Builder setField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.setField(field, value);
- }
- @java.lang.Override
- public Builder clearField(
- com.google.protobuf.Descriptors.FieldDescriptor field) {
- return super.clearField(field);
- }
- @java.lang.Override
- public Builder clearOneof(
- com.google.protobuf.Descriptors.OneofDescriptor oneof) {
- return super.clearOneof(oneof);
- }
- @java.lang.Override
- public Builder setRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- int index, java.lang.Object value) {
- return super.setRepeatedField(field, index, value);
- }
- @java.lang.Override
- public Builder addRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.addRepeatedField(field, value);
- }
- @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto)other);
@@ -16498,17 +14587,14 @@ public Builder mergeFrom(com.google.protobuf.Message other) {
public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto other) {
if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.getDefaultInstance()) return this;
- this.mergeUnknownFields(other.unknownFields);
- onChanged();
+ this.mergeUnknownFields(other.getUnknownFields());
return this;
}
- @java.lang.Override
public final boolean isInitialized() {
return true;
}
- @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -16518,7 +14604,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto) e.getUnfinishedMessage();
- throw e.unwrapIOException();
+ throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -16526,71 +14612,28 @@ public Builder mergeFrom(
}
return this;
}
- @java.lang.Override
- public final Builder setUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.setUnknownFields(unknownFields);
- }
-
- @java.lang.Override
- public final Builder mergeUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.mergeUnknownFields(unknownFields);
- }
-
// @@protoc_insertion_point(builder_scope:SourceStateUpdatedResponseProto)
}
- // @@protoc_insertion_point(class_scope:SourceStateUpdatedResponseProto)
- private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto DEFAULT_INSTANCE;
static {
- DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto();
- }
-
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto getDefaultInstance() {
- return DEFAULT_INSTANCE;
- }
-
- @java.lang.Deprecated public static final com.google.protobuf.Parser
- PARSER = new com.google.protobuf.AbstractParser() {
- @java.lang.Override
- public SourceStateUpdatedResponseProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new SourceStateUpdatedResponseProto(input, extensionRegistry);
- }
- };
-
- public static com.google.protobuf.Parser parser() {
- return PARSER;
- }
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
- @java.lang.Override
- public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto getDefaultInstanceForType() {
- return DEFAULT_INSTANCE;
+ defaultInstance = new SourceStateUpdatedResponseProto(true);
+ defaultInstance.initFields();
}
+ // @@protoc_insertion_point(class_scope:SourceStateUpdatedResponseProto)
}
- public interface QueryCompleteRequestProtoOrBuilder extends
- // @@protoc_insertion_point(interface_extends:QueryCompleteRequestProto)
- com.google.protobuf.MessageOrBuilder {
+ public interface QueryCompleteRequestProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ // optional .QueryIdentifierProto query_identifier = 1;
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return Whether the queryIdentifier field is set.
*/
boolean hasQueryIdentifier();
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return The queryIdentifier.
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier();
/**
@@ -16598,14 +14641,13 @@ public interface QueryCompleteRequestProtoOrBuilder extends
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder();
+ // optional int64 delete_delay = 2 [default = 0];
/**
* optional int64 delete_delay = 2 [default = 0];
- * @return Whether the deleteDelay field is set.
*/
boolean hasDeleteDelay();
/**
* optional int64 delete_delay = 2 [default = 0];
- * @return The deleteDelay.
*/
long getDeleteDelay();
}
@@ -16613,37 +14655,35 @@ public interface QueryCompleteRequestProtoOrBuilder extends
* Protobuf type {@code QueryCompleteRequestProto}
*/
public static final class QueryCompleteRequestProto extends
- com.google.protobuf.GeneratedMessageV3 implements
- // @@protoc_insertion_point(message_implements:QueryCompleteRequestProto)
- QueryCompleteRequestProtoOrBuilder {
- private static final long serialVersionUID = 0L;
+ com.google.protobuf.GeneratedMessage
+ implements QueryCompleteRequestProtoOrBuilder {
// Use QueryCompleteRequestProto.newBuilder() to construct.
- private QueryCompleteRequestProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ private QueryCompleteRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private QueryCompleteRequestProto() {
+ private QueryCompleteRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final QueryCompleteRequestProto defaultInstance;
+ public static QueryCompleteRequestProto getDefaultInstance() {
+ return defaultInstance;
}
- @java.lang.Override
- @SuppressWarnings({"unused"})
- protected java.lang.Object newInstance(
- UnusedPrivateParameter unused) {
- return new QueryCompleteRequestProto();
+ public QueryCompleteRequestProto getDefaultInstanceForType() {
+ return defaultInstance;
}
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private QueryCompleteRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- this();
- if (extensionRegistry == null) {
- throw new java.lang.NullPointerException();
- }
+ initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -16655,9 +14695,16 @@ private QueryCompleteRequestProto(
case 0:
done = true;
break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
case 10: {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null;
- if (((bitField0_ & 0x00000001) != 0)) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = queryIdentifier_.toBuilder();
}
queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry);
@@ -16673,22 +14720,13 @@ private QueryCompleteRequestProto(
deleteDelay_ = input.readInt64();
break;
}
- default: {
- if (!parseUnknownField(
- input, unknownFields, extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
- } catch (com.google.protobuf.UninitializedMessageException e) {
- throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e).setUnfinishedMessage(this);
+ e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -16699,102 +14737,118 @@ private QueryCompleteRequestProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.Builder.class);
}
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public QueryCompleteRequestProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new QueryCompleteRequestProto(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
private int bitField0_;
+ // optional .QueryIdentifierProto query_identifier = 1;
public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 1;
private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return Whether the queryIdentifier field is set.
*/
- @java.lang.Override
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000001) != 0);
+ return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return The queryIdentifier.
*/
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
- return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
*/
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() {
- return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
}
+ // optional int64 delete_delay = 2 [default = 0];
public static final int DELETE_DELAY_FIELD_NUMBER = 2;
private long deleteDelay_;
/**
* optional int64 delete_delay = 2 [default = 0];
- * @return Whether the deleteDelay field is set.
*/
- @java.lang.Override
public boolean hasDeleteDelay() {
- return ((bitField0_ & 0x00000002) != 0);
+ return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional int64 delete_delay = 2 [default = 0];
- * @return The deleteDelay.
*/
- @java.lang.Override
public long getDeleteDelay() {
return deleteDelay_;
}
+ private void initFields() {
+ queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ deleteDelay_ = 0L;
+ }
private byte memoizedIsInitialized = -1;
- @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized == 1) return true;
- if (isInitialized == 0) return false;
+ if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
- @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (((bitField0_ & 0x00000001) != 0)) {
- output.writeMessage(1, getQueryIdentifier());
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, queryIdentifier_);
}
- if (((bitField0_ & 0x00000002) != 0)) {
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt64(2, deleteDelay_);
}
- unknownFields.writeTo(output);
+ getUnknownFields().writeTo(output);
}
- @java.lang.Override
+ private int memoizedSerializedSize = -1;
public int getSerializedSize() {
- int size = memoizedSize;
+ int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) != 0)) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(1, getQueryIdentifier());
+ .computeMessageSize(1, queryIdentifier_);
}
- if (((bitField0_ & 0x00000002) != 0)) {
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(2, deleteDelay_);
}
- size += unknownFields.getSerializedSize();
- memoizedSize = size;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
return size;
}
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -16805,52 +14859,43 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto) obj;
- if (hasQueryIdentifier() != other.hasQueryIdentifier()) return false;
+ boolean result = true;
+ result = result && (hasQueryIdentifier() == other.hasQueryIdentifier());
if (hasQueryIdentifier()) {
- if (!getQueryIdentifier()
- .equals(other.getQueryIdentifier())) return false;
+ result = result && getQueryIdentifier()
+ .equals(other.getQueryIdentifier());
}
- if (hasDeleteDelay() != other.hasDeleteDelay()) return false;
+ result = result && (hasDeleteDelay() == other.hasDeleteDelay());
if (hasDeleteDelay()) {
- if (getDeleteDelay()
- != other.getDeleteDelay()) return false;
+ result = result && (getDeleteDelay()
+ == other.getDeleteDelay());
}
- if (!unknownFields.equals(other.unknownFields)) return false;
- return true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
}
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasQueryIdentifier()) {
hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER;
hash = (53 * hash) + getQueryIdentifier().hashCode();
}
if (hasDeleteDelay()) {
hash = (37 * hash) + DELETE_DELAY_FIELD_NUMBER;
- hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
- getDeleteDelay());
+ hash = (53 * hash) + hashLong(getDeleteDelay());
}
- hash = (29 * hash) + unknownFields.hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(
- java.nio.ByteBuffer data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(
- java.nio.ByteBuffer data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -16874,59 +14919,46 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Qu
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input);
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
- @java.lang.Override
+ public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder() {
- return DEFAULT_INSTANCE.toBuilder();
- }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto prototype) {
- return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
- }
- @java.lang.Override
- public Builder toBuilder() {
- return this == DEFAULT_INSTANCE
- ? new Builder() : new Builder().mergeFrom(this);
+ return newBuilder().mergeFrom(prototype);
}
+ public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -16934,16 +14966,14 @@ protected Builder newBuilderForType(
* Protobuf type {@code QueryCompleteRequestProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessageV3.Builder implements
- // @@protoc_insertion_point(builder_implements:QueryCompleteRequestProto)
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -16956,21 +14986,23 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessageV3
- .alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getQueryIdentifierFieldBuilder();
}
}
- @java.lang.Override
+ private static Builder create() {
+ return new Builder();
+ }
+
public Builder clear() {
super.clear();
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = null;
+ queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
} else {
queryIdentifierBuilder_.clear();
}
@@ -16980,18 +15012,19 @@ public Builder clear() {
return this;
}
- @java.lang.Override
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_descriptor;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.getDefaultInstance();
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto result = buildPartial();
if (!result.isInitialized()) {
@@ -17000,61 +15033,27 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryComp
return result;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) != 0)) {
- if (queryIdentifierBuilder_ == null) {
- result.queryIdentifier_ = queryIdentifier_;
- } else {
- result.queryIdentifier_ = queryIdentifierBuilder_.build();
- }
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
- if (((from_bitField0_ & 0x00000002) != 0)) {
- result.deleteDelay_ = deleteDelay_;
+ if (queryIdentifierBuilder_ == null) {
+ result.queryIdentifier_ = queryIdentifier_;
+ } else {
+ result.queryIdentifier_ = queryIdentifierBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
+ result.deleteDelay_ = deleteDelay_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
- @java.lang.Override
- public Builder clone() {
- return super.clone();
- }
- @java.lang.Override
- public Builder setField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.setField(field, value);
- }
- @java.lang.Override
- public Builder clearField(
- com.google.protobuf.Descriptors.FieldDescriptor field) {
- return super.clearField(field);
- }
- @java.lang.Override
- public Builder clearOneof(
- com.google.protobuf.Descriptors.OneofDescriptor oneof) {
- return super.clearOneof(oneof);
- }
- @java.lang.Override
- public Builder setRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- int index, java.lang.Object value) {
- return super.setRepeatedField(field, index, value);
- }
- @java.lang.Override
- public Builder addRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.addRepeatedField(field, value);
- }
- @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto)other);
@@ -17072,17 +15071,14 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
if (other.hasDeleteDelay()) {
setDeleteDelay(other.getDeleteDelay());
}
- this.mergeUnknownFields(other.unknownFields);
- onChanged();
+ this.mergeUnknownFields(other.getUnknownFields());
return this;
}
- @java.lang.Override
public final boolean isInitialized() {
return true;
}
- @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -17092,7 +15088,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto) e.getUnfinishedMessage();
- throw e.unwrapIOException();
+ throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -17102,23 +15098,22 @@ public Builder mergeFrom(
}
private int bitField0_;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
- private com.google.protobuf.SingleFieldBuilderV3<
+ // optional .QueryIdentifierProto query_identifier = 1;
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_;
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return Whether the queryIdentifier field is set.
*/
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000001) != 0);
+ return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return The queryIdentifier.
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
} else {
return queryIdentifierBuilder_.getMessage();
}
@@ -17158,8 +15153,7 @@ public Builder setQueryIdentifier(
*/
public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) {
if (queryIdentifierBuilder_ == null) {
- if (((bitField0_ & 0x00000001) != 0) &&
- queryIdentifier_ != null &&
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) {
queryIdentifier_ =
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial();
@@ -17178,7 +15172,7 @@ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapD
*/
public Builder clearQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = null;
+ queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
onChanged();
} else {
queryIdentifierBuilder_.clear();
@@ -17201,20 +15195,19 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
if (queryIdentifierBuilder_ != null) {
return queryIdentifierBuilder_.getMessageOrBuilder();
} else {
- return queryIdentifier_ == null ?
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
}
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
*/
- private com.google.protobuf.SingleFieldBuilderV3<
+ private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>
getQueryIdentifierFieldBuilder() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>(
- getQueryIdentifier(),
+ queryIdentifier_,
getParentForChildren(),
isClean());
queryIdentifier_ = null;
@@ -17222,27 +15215,22 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
return queryIdentifierBuilder_;
}
+ // optional int64 delete_delay = 2 [default = 0];
private long deleteDelay_ ;
/**
* optional int64 delete_delay = 2 [default = 0];
- * @return Whether the deleteDelay field is set.
*/
- @java.lang.Override
public boolean hasDeleteDelay() {
- return ((bitField0_ & 0x00000002) != 0);
+ return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional int64 delete_delay = 2 [default = 0];
- * @return The deleteDelay.
*/
- @java.lang.Override
public long getDeleteDelay() {
return deleteDelay_;
}
/**
* optional int64 delete_delay = 2 [default = 0];
- * @param value The deleteDelay to set.
- * @return This builder for chaining.
*/
public Builder setDeleteDelay(long value) {
bitField0_ |= 0x00000002;
@@ -17252,7 +15240,6 @@ public Builder setDeleteDelay(long value) {
}
/**
* optional int64 delete_delay = 2 [default = 0];
- * @return This builder for chaining.
*/
public Builder clearDeleteDelay() {
bitField0_ = (bitField0_ & ~0x00000002);
@@ -17260,98 +15247,54 @@ public Builder clearDeleteDelay() {
onChanged();
return this;
}
- @java.lang.Override
- public final Builder setUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.setUnknownFields(unknownFields);
- }
-
- @java.lang.Override
- public final Builder mergeUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.mergeUnknownFields(unknownFields);
- }
-
// @@protoc_insertion_point(builder_scope:QueryCompleteRequestProto)
}
- // @@protoc_insertion_point(class_scope:QueryCompleteRequestProto)
- private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto DEFAULT_INSTANCE;
static {
- DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto();
- }
-
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto getDefaultInstance() {
- return DEFAULT_INSTANCE;
- }
-
- @java.lang.Deprecated public static final com.google.protobuf.Parser
- PARSER = new com.google.protobuf.AbstractParser() {
- @java.lang.Override
- public QueryCompleteRequestProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new QueryCompleteRequestProto(input, extensionRegistry);
- }
- };
-
- public static com.google.protobuf.Parser parser() {
- return PARSER;
- }
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
- @java.lang.Override
- public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto getDefaultInstanceForType() {
- return DEFAULT_INSTANCE;
+ defaultInstance = new QueryCompleteRequestProto(true);
+ defaultInstance.initFields();
}
+ // @@protoc_insertion_point(class_scope:QueryCompleteRequestProto)
}
- public interface QueryCompleteResponseProtoOrBuilder extends
- // @@protoc_insertion_point(interface_extends:QueryCompleteResponseProto)
- com.google.protobuf.MessageOrBuilder {
+ public interface QueryCompleteResponseProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code QueryCompleteResponseProto}
*/
public static final class QueryCompleteResponseProto extends
- com.google.protobuf.GeneratedMessageV3 implements
- // @@protoc_insertion_point(message_implements:QueryCompleteResponseProto)
- QueryCompleteResponseProtoOrBuilder {
- private static final long serialVersionUID = 0L;
+ com.google.protobuf.GeneratedMessage
+ implements QueryCompleteResponseProtoOrBuilder {
// Use QueryCompleteResponseProto.newBuilder() to construct.
- private QueryCompleteResponseProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ private QueryCompleteResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private QueryCompleteResponseProto() {
+ private QueryCompleteResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final QueryCompleteResponseProto defaultInstance;
+ public static QueryCompleteResponseProto getDefaultInstance() {
+ return defaultInstance;
}
- @java.lang.Override
- @SuppressWarnings({"unused"})
- protected java.lang.Object newInstance(
- UnusedPrivateParameter unused) {
- return new QueryCompleteResponseProto();
+ public QueryCompleteResponseProto getDefaultInstanceForType() {
+ return defaultInstance;
}
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private QueryCompleteResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- this();
- if (extensionRegistry == null) {
- throw new java.lang.NullPointerException();
- }
+ initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
@@ -17363,8 +15306,8 @@ private QueryCompleteResponseProto(
done = true;
break;
default: {
- if (!parseUnknownField(
- input, unknownFields, extensionRegistry, tag)) {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
done = true;
}
break;
@@ -17373,11 +15316,9 @@ private QueryCompleteResponseProto(
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
- } catch (com.google.protobuf.UninitializedMessageException e) {
- throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e).setUnfinishedMessage(this);
+ e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -17388,42 +15329,63 @@ private QueryCompleteResponseProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.Builder.class);
}
- private byte memoizedIsInitialized = -1;
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public QueryCompleteResponseProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new QueryCompleteResponseProto(input, extensionRegistry);
+ }
+ };
+
@java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized == 1) return true;
- if (isInitialized == 0) return false;
+ if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
- @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- unknownFields.writeTo(output);
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
}
- @java.lang.Override
+ private int memoizedSerializedSize = -1;
public int getSerializedSize() {
- int size = memoizedSize;
+ int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
- size += unknownFields.getSerializedSize();
- memoizedSize = size;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
return size;
}
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -17434,33 +15396,25 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto) obj;
- if (!unknownFields.equals(other.unknownFields)) return false;
- return true;
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
}
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptor().hashCode();
- hash = (29 * hash) + unknownFields.hashCode();
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(
- java.nio.ByteBuffer data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(
- java.nio.ByteBuffer data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -17484,59 +15438,46 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Qu
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input);
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
- @java.lang.Override
+ public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder() {
- return DEFAULT_INSTANCE.toBuilder();
- }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto prototype) {
- return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
- }
- @java.lang.Override
- public Builder toBuilder() {
- return this == DEFAULT_INSTANCE
- ? new Builder() : new Builder().mergeFrom(this);
+ return newBuilder().mergeFrom(prototype);
}
+ public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -17544,16 +15485,14 @@ protected Builder newBuilderForType(
* Protobuf type {@code QueryCompleteResponseProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessageV3.Builder implements
- // @@protoc_insertion_point(builder_implements:QueryCompleteResponseProto)
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -17566,33 +15505,36 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessageV3
- .alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
- @java.lang.Override
+ private static Builder create() {
+ return new Builder();
+ }
+
public Builder clear() {
super.clear();
return this;
}
- @java.lang.Override
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_descriptor;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.getDefaultInstance();
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto result = buildPartial();
if (!result.isInitialized()) {
@@ -17601,46 +15543,12 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryComp
return result;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto(this);
onBuilt();
return result;
}
- @java.lang.Override
- public Builder clone() {
- return super.clone();
- }
- @java.lang.Override
- public Builder setField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.setField(field, value);
- }
- @java.lang.Override
- public Builder clearField(
- com.google.protobuf.Descriptors.FieldDescriptor field) {
- return super.clearField(field);
- }
- @java.lang.Override
- public Builder clearOneof(
- com.google.protobuf.Descriptors.OneofDescriptor oneof) {
- return super.clearOneof(oneof);
- }
- @java.lang.Override
- public Builder setRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- int index, java.lang.Object value) {
- return super.setRepeatedField(field, index, value);
- }
- @java.lang.Override
- public Builder addRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.addRepeatedField(field, value);
- }
- @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto)other);
@@ -17652,17 +15560,14 @@ public Builder mergeFrom(com.google.protobuf.Message other) {
public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto other) {
if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.getDefaultInstance()) return this;
- this.mergeUnknownFields(other.unknownFields);
- onChanged();
+ this.mergeUnknownFields(other.getUnknownFields());
return this;
}
- @java.lang.Override
public final boolean isInitialized() {
return true;
}
- @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -17672,7 +15577,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto) e.getUnfinishedMessage();
- throw e.unwrapIOException();
+ throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -17680,71 +15585,28 @@ public Builder mergeFrom(
}
return this;
}
- @java.lang.Override
- public final Builder setUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.setUnknownFields(unknownFields);
- }
-
- @java.lang.Override
- public final Builder mergeUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.mergeUnknownFields(unknownFields);
- }
-
-
- // @@protoc_insertion_point(builder_scope:QueryCompleteResponseProto)
- }
-
- // @@protoc_insertion_point(class_scope:QueryCompleteResponseProto)
- private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto DEFAULT_INSTANCE;
- static {
- DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto();
- }
-
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto getDefaultInstance() {
- return DEFAULT_INSTANCE;
- }
-
- @java.lang.Deprecated public static final com.google.protobuf.Parser
- PARSER = new com.google.protobuf.AbstractParser() {
- @java.lang.Override
- public QueryCompleteResponseProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new QueryCompleteResponseProto(input, extensionRegistry);
- }
- };
-
- public static com.google.protobuf.Parser parser() {
- return PARSER;
- }
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
+
+ // @@protoc_insertion_point(builder_scope:QueryCompleteResponseProto)
}
- @java.lang.Override
- public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto getDefaultInstanceForType() {
- return DEFAULT_INSTANCE;
+ static {
+ defaultInstance = new QueryCompleteResponseProto(true);
+ defaultInstance.initFields();
}
+ // @@protoc_insertion_point(class_scope:QueryCompleteResponseProto)
}
- public interface TerminateFragmentRequestProtoOrBuilder extends
- // @@protoc_insertion_point(interface_extends:TerminateFragmentRequestProto)
- com.google.protobuf.MessageOrBuilder {
+ public interface TerminateFragmentRequestProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ // optional .QueryIdentifierProto query_identifier = 1;
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return Whether the queryIdentifier field is set.
*/
boolean hasQueryIdentifier();
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return The queryIdentifier.
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier();
/**
@@ -17752,19 +15614,17 @@ public interface TerminateFragmentRequestProtoOrBuilder extends
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder();
+ // optional string fragment_identifier_string = 2;
/**
* optional string fragment_identifier_string = 2;
- * @return Whether the fragmentIdentifierString field is set.
*/
boolean hasFragmentIdentifierString();
/**
* optional string fragment_identifier_string = 2;
- * @return The fragmentIdentifierString.
*/
java.lang.String getFragmentIdentifierString();
/**
* optional string fragment_identifier_string = 2;
- * @return The bytes for fragmentIdentifierString.
*/
com.google.protobuf.ByteString
getFragmentIdentifierStringBytes();
@@ -17773,38 +15633,35 @@ public interface TerminateFragmentRequestProtoOrBuilder extends
* Protobuf type {@code TerminateFragmentRequestProto}
*/
public static final class TerminateFragmentRequestProto extends
- com.google.protobuf.GeneratedMessageV3 implements
- // @@protoc_insertion_point(message_implements:TerminateFragmentRequestProto)
- TerminateFragmentRequestProtoOrBuilder {
- private static final long serialVersionUID = 0L;
+ com.google.protobuf.GeneratedMessage
+ implements TerminateFragmentRequestProtoOrBuilder {
// Use TerminateFragmentRequestProto.newBuilder() to construct.
- private TerminateFragmentRequestProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ private TerminateFragmentRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private TerminateFragmentRequestProto() {
- fragmentIdentifierString_ = "";
+ private TerminateFragmentRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final TerminateFragmentRequestProto defaultInstance;
+ public static TerminateFragmentRequestProto getDefaultInstance() {
+ return defaultInstance;
}
- @java.lang.Override
- @SuppressWarnings({"unused"})
- protected java.lang.Object newInstance(
- UnusedPrivateParameter unused) {
- return new TerminateFragmentRequestProto();
+ public TerminateFragmentRequestProto getDefaultInstanceForType() {
+ return defaultInstance;
}
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private TerminateFragmentRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- this();
- if (extensionRegistry == null) {
- throw new java.lang.NullPointerException();
- }
+ initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -17816,9 +15673,16 @@ private TerminateFragmentRequestProto(
case 0:
done = true;
break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
case 10: {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null;
- if (((bitField0_ & 0x00000001) != 0)) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = queryIdentifier_.toBuilder();
}
queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry);
@@ -17830,27 +15694,17 @@ private TerminateFragmentRequestProto(
break;
}
case 18: {
- com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
- fragmentIdentifierString_ = bs;
- break;
- }
- default: {
- if (!parseUnknownField(
- input, unknownFields, extensionRegistry, tag)) {
- done = true;
- }
+ fragmentIdentifierString_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
- } catch (com.google.protobuf.UninitializedMessageException e) {
- throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e).setUnfinishedMessage(this);
+ e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -17861,56 +15715,63 @@ private TerminateFragmentRequestProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.Builder.class);
}
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public TerminateFragmentRequestProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new TerminateFragmentRequestProto(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
private int bitField0_;
+ // optional .QueryIdentifierProto query_identifier = 1;
public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 1;
private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return Whether the queryIdentifier field is set.
*/
- @java.lang.Override
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000001) != 0);
+ return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return The queryIdentifier.
*/
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
- return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
*/
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() {
- return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
}
+ // optional string fragment_identifier_string = 2;
public static final int FRAGMENT_IDENTIFIER_STRING_FIELD_NUMBER = 2;
- private volatile java.lang.Object fragmentIdentifierString_;
+ private java.lang.Object fragmentIdentifierString_;
/**
* optional string fragment_identifier_string = 2;
- * @return Whether the fragmentIdentifierString field is set.
*/
- @java.lang.Override
public boolean hasFragmentIdentifierString() {
- return ((bitField0_ & 0x00000002) != 0);
+ return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional string fragment_identifier_string = 2;
- * @return The fragmentIdentifierString.
*/
- @java.lang.Override
public java.lang.String getFragmentIdentifierString() {
java.lang.Object ref = fragmentIdentifierString_;
if (ref instanceof java.lang.String) {
@@ -17927,9 +15788,7 @@ public java.lang.String getFragmentIdentifierString() {
}
/**
* optional string fragment_identifier_string = 2;
- * @return The bytes for fragmentIdentifierString.
*/
- @java.lang.Override
public com.google.protobuf.ByteString
getFragmentIdentifierStringBytes() {
java.lang.Object ref = fragmentIdentifierString_;
@@ -17944,47 +15803,57 @@ public java.lang.String getFragmentIdentifierString() {
}
}
+ private void initFields() {
+ queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ fragmentIdentifierString_ = "";
+ }
private byte memoizedIsInitialized = -1;
- @java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized == 1) return true;
- if (isInitialized == 0) return false;
+ if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
- @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (((bitField0_ & 0x00000001) != 0)) {
- output.writeMessage(1, getQueryIdentifier());
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, queryIdentifier_);
}
- if (((bitField0_ & 0x00000002) != 0)) {
- com.google.protobuf.GeneratedMessageV3.writeString(output, 2, fragmentIdentifierString_);
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, getFragmentIdentifierStringBytes());
}
- unknownFields.writeTo(output);
+ getUnknownFields().writeTo(output);
}
- @java.lang.Override
+ private int memoizedSerializedSize = -1;
public int getSerializedSize() {
- int size = memoizedSize;
+ int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
- if (((bitField0_ & 0x00000001) != 0)) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(1, getQueryIdentifier());
+ .computeMessageSize(1, queryIdentifier_);
}
- if (((bitField0_ & 0x00000002) != 0)) {
- size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, fragmentIdentifierString_);
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, getFragmentIdentifierStringBytes());
}
- size += unknownFields.getSerializedSize();
- memoizedSize = size;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
return size;
}
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -17995,27 +15864,30 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto) obj;
- if (hasQueryIdentifier() != other.hasQueryIdentifier()) return false;
+ boolean result = true;
+ result = result && (hasQueryIdentifier() == other.hasQueryIdentifier());
if (hasQueryIdentifier()) {
- if (!getQueryIdentifier()
- .equals(other.getQueryIdentifier())) return false;
+ result = result && getQueryIdentifier()
+ .equals(other.getQueryIdentifier());
}
- if (hasFragmentIdentifierString() != other.hasFragmentIdentifierString()) return false;
+ result = result && (hasFragmentIdentifierString() == other.hasFragmentIdentifierString());
if (hasFragmentIdentifierString()) {
- if (!getFragmentIdentifierString()
- .equals(other.getFragmentIdentifierString())) return false;
+ result = result && getFragmentIdentifierString()
+ .equals(other.getFragmentIdentifierString());
}
- if (!unknownFields.equals(other.unknownFields)) return false;
- return true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
}
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasQueryIdentifier()) {
hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER;
hash = (53 * hash) + getQueryIdentifier().hashCode();
@@ -18024,22 +15896,11 @@ public int hashCode() {
hash = (37 * hash) + FRAGMENT_IDENTIFIER_STRING_FIELD_NUMBER;
hash = (53 * hash) + getFragmentIdentifierString().hashCode();
}
- hash = (29 * hash) + unknownFields.hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(
- java.nio.ByteBuffer data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(
- java.nio.ByteBuffer data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -18063,59 +15924,46 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Te
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input);
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
- @java.lang.Override
+ public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder() {
- return DEFAULT_INSTANCE.toBuilder();
- }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto prototype) {
- return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
- }
- @java.lang.Override
- public Builder toBuilder() {
- return this == DEFAULT_INSTANCE
- ? new Builder() : new Builder().mergeFrom(this);
+ return newBuilder().mergeFrom(prototype);
}
+ public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -18123,16 +15971,14 @@ protected Builder newBuilderForType(
* Protobuf type {@code TerminateFragmentRequestProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessageV3.Builder implements
- // @@protoc_insertion_point(builder_implements:TerminateFragmentRequestProto)
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProtoOrBuilder {
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -18145,21 +15991,23 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessageV3
- .alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getQueryIdentifierFieldBuilder();
}
}
- @java.lang.Override
+ private static Builder create() {
+ return new Builder();
+ }
+
public Builder clear() {
super.clear();
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = null;
+ queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
} else {
queryIdentifierBuilder_.clear();
}
@@ -18169,18 +16017,19 @@ public Builder clear() {
return this;
}
- @java.lang.Override
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_descriptor;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.getDefaultInstance();
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto result = buildPartial();
if (!result.isInitialized()) {
@@ -18189,20 +16038,19 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Terminate
return result;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) != 0)) {
- if (queryIdentifierBuilder_ == null) {
- result.queryIdentifier_ = queryIdentifier_;
- } else {
- result.queryIdentifier_ = queryIdentifierBuilder_.build();
- }
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
- if (((from_bitField0_ & 0x00000002) != 0)) {
+ if (queryIdentifierBuilder_ == null) {
+ result.queryIdentifier_ = queryIdentifier_;
+ } else {
+ result.queryIdentifier_ = queryIdentifierBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.fragmentIdentifierString_ = fragmentIdentifierString_;
@@ -18211,39 +16059,6 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Terminate
return result;
}
- @java.lang.Override
- public Builder clone() {
- return super.clone();
- }
- @java.lang.Override
- public Builder setField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.setField(field, value);
- }
- @java.lang.Override
- public Builder clearField(
- com.google.protobuf.Descriptors.FieldDescriptor field) {
- return super.clearField(field);
- }
- @java.lang.Override
- public Builder clearOneof(
- com.google.protobuf.Descriptors.OneofDescriptor oneof) {
- return super.clearOneof(oneof);
- }
- @java.lang.Override
- public Builder setRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- int index, java.lang.Object value) {
- return super.setRepeatedField(field, index, value);
- }
- @java.lang.Override
- public Builder addRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.addRepeatedField(field, value);
- }
- @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto)other);
@@ -18263,17 +16078,14 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
fragmentIdentifierString_ = other.fragmentIdentifierString_;
onChanged();
}
- this.mergeUnknownFields(other.unknownFields);
- onChanged();
+ this.mergeUnknownFields(other.getUnknownFields());
return this;
}
- @java.lang.Override
public final boolean isInitialized() {
return true;
}
- @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -18283,7 +16095,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto) e.getUnfinishedMessage();
- throw e.unwrapIOException();
+ throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -18293,23 +16105,22 @@ public Builder mergeFrom(
}
private int bitField0_;
- private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
- private com.google.protobuf.SingleFieldBuilderV3<
+ // optional .QueryIdentifierProto query_identifier = 1;
+ private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_;
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return Whether the queryIdentifier field is set.
*/
public boolean hasQueryIdentifier() {
- return ((bitField0_ & 0x00000001) != 0);
+ return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return The queryIdentifier.
*/
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- return queryIdentifier_ == null ? org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
} else {
return queryIdentifierBuilder_.getMessage();
}
@@ -18349,8 +16160,7 @@ public Builder setQueryIdentifier(
*/
public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) {
if (queryIdentifierBuilder_ == null) {
- if (((bitField0_ & 0x00000001) != 0) &&
- queryIdentifier_ != null &&
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) {
queryIdentifier_ =
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial();
@@ -18369,7 +16179,7 @@ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapD
*/
public Builder clearQueryIdentifier() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifier_ = null;
+ queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
onChanged();
} else {
queryIdentifierBuilder_.clear();
@@ -18392,20 +16202,19 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
if (queryIdentifierBuilder_ != null) {
return queryIdentifierBuilder_.getMessageOrBuilder();
} else {
- return queryIdentifier_ == null ?
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance() : queryIdentifier_;
+ return queryIdentifier_;
}
}
/**
* optional .QueryIdentifierProto query_identifier = 1;
*/
- private com.google.protobuf.SingleFieldBuilderV3<
+ private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>
getQueryIdentifierFieldBuilder() {
if (queryIdentifierBuilder_ == null) {
- queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+ queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>(
- getQueryIdentifier(),
+ queryIdentifier_,
getParentForChildren(),
isClean());
queryIdentifier_ = null;
@@ -18413,27 +16222,23 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIden
return queryIdentifierBuilder_;
}
+ // optional string fragment_identifier_string = 2;
private java.lang.Object fragmentIdentifierString_ = "";
/**
* optional string fragment_identifier_string = 2;
- * @return Whether the fragmentIdentifierString field is set.
*/
public boolean hasFragmentIdentifierString() {
- return ((bitField0_ & 0x00000002) != 0);
+ return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* optional string fragment_identifier_string = 2;
- * @return The fragmentIdentifierString.
*/
public java.lang.String getFragmentIdentifierString() {
java.lang.Object ref = fragmentIdentifierString_;
if (!(ref instanceof java.lang.String)) {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- if (bs.isValidUtf8()) {
- fragmentIdentifierString_ = s;
- }
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ fragmentIdentifierString_ = s;
return s;
} else {
return (java.lang.String) ref;
@@ -18441,7 +16246,6 @@ public java.lang.String getFragmentIdentifierString() {
}
/**
* optional string fragment_identifier_string = 2;
- * @return The bytes for fragmentIdentifierString.
*/
public com.google.protobuf.ByteString
getFragmentIdentifierStringBytes() {
@@ -18458,8 +16262,6 @@ public java.lang.String getFragmentIdentifierString() {
}
/**
* optional string fragment_identifier_string = 2;
- * @param value The fragmentIdentifierString to set.
- * @return This builder for chaining.
*/
public Builder setFragmentIdentifierString(
java.lang.String value) {
@@ -18473,7 +16275,6 @@ public Builder setFragmentIdentifierString(
}
/**
* optional string fragment_identifier_string = 2;
- * @return This builder for chaining.
*/
public Builder clearFragmentIdentifierString() {
bitField0_ = (bitField0_ & ~0x00000002);
@@ -18483,8 +16284,6 @@ public Builder clearFragmentIdentifierString() {
}
/**
* optional string fragment_identifier_string = 2;
- * @param value The bytes for fragmentIdentifierString to set.
- * @return This builder for chaining.
*/
public Builder setFragmentIdentifierStringBytes(
com.google.protobuf.ByteString value) {
@@ -18496,98 +16295,54 @@ public Builder setFragmentIdentifierStringBytes(
onChanged();
return this;
}
- @java.lang.Override
- public final Builder setUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.setUnknownFields(unknownFields);
- }
-
- @java.lang.Override
- public final Builder mergeUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.mergeUnknownFields(unknownFields);
- }
-
// @@protoc_insertion_point(builder_scope:TerminateFragmentRequestProto)
}
- // @@protoc_insertion_point(class_scope:TerminateFragmentRequestProto)
- private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto DEFAULT_INSTANCE;
static {
- DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto();
- }
-
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto getDefaultInstance() {
- return DEFAULT_INSTANCE;
- }
-
- @java.lang.Deprecated public static final com.google.protobuf.Parser
- PARSER = new com.google.protobuf.AbstractParser() {
- @java.lang.Override
- public TerminateFragmentRequestProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new TerminateFragmentRequestProto(input, extensionRegistry);
- }
- };
-
- public static com.google.protobuf.Parser parser() {
- return PARSER;
- }
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
- @java.lang.Override
- public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto getDefaultInstanceForType() {
- return DEFAULT_INSTANCE;
+ defaultInstance = new TerminateFragmentRequestProto(true);
+ defaultInstance.initFields();
}
+ // @@protoc_insertion_point(class_scope:TerminateFragmentRequestProto)
}
- public interface TerminateFragmentResponseProtoOrBuilder extends
- // @@protoc_insertion_point(interface_extends:TerminateFragmentResponseProto)
- com.google.protobuf.MessageOrBuilder {
+ public interface TerminateFragmentResponseProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code TerminateFragmentResponseProto}
*/
public static final class TerminateFragmentResponseProto extends
- com.google.protobuf.GeneratedMessageV3 implements
- // @@protoc_insertion_point(message_implements:TerminateFragmentResponseProto)
- TerminateFragmentResponseProtoOrBuilder {
- private static final long serialVersionUID = 0L;
+ com.google.protobuf.GeneratedMessage
+ implements TerminateFragmentResponseProtoOrBuilder {
// Use TerminateFragmentResponseProto.newBuilder() to construct.
- private TerminateFragmentResponseProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ private TerminateFragmentResponseProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private TerminateFragmentResponseProto() {
+ private TerminateFragmentResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final TerminateFragmentResponseProto defaultInstance;
+ public static TerminateFragmentResponseProto getDefaultInstance() {
+ return defaultInstance;
}
- @java.lang.Override
- @SuppressWarnings({"unused"})
- protected java.lang.Object newInstance(
- UnusedPrivateParameter unused) {
- return new TerminateFragmentResponseProto();
+ public TerminateFragmentResponseProto getDefaultInstanceForType() {
+ return defaultInstance;
}
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private TerminateFragmentResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- this();
- if (extensionRegistry == null) {
- throw new java.lang.NullPointerException();
- }
+ initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
@@ -18599,8 +16354,8 @@ private TerminateFragmentResponseProto(
done = true;
break;
default: {
- if (!parseUnknownField(
- input, unknownFields, extensionRegistry, tag)) {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
done = true;
}
break;
@@ -18609,11 +16364,9 @@ private TerminateFragmentResponseProto(
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
- } catch (com.google.protobuf.UninitializedMessageException e) {
- throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e).setUnfinishedMessage(this);
+ e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -18624,42 +16377,63 @@ private TerminateFragmentResponseProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.Builder.class);
}
- private byte memoizedIsInitialized = -1;
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public TerminateFragmentResponseProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new TerminateFragmentResponseProto(input, extensionRegistry);
+ }
+ };
+
@java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
- if (isInitialized == 1) return true;
- if (isInitialized == 0) return false;
+ if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
- @java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- unknownFields.writeTo(output);
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
}
- @java.lang.Override
+ private int memoizedSerializedSize = -1;
public int getSerializedSize() {
- int size = memoizedSize;
+ int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
- size += unknownFields.getSerializedSize();
- memoizedSize = size;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
return size;
}
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -18670,33 +16444,25 @@ public boolean equals(final java.lang.Object obj) {
}
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto) obj;
- if (!unknownFields.equals(other.unknownFields)) return false;
- return true;
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
}
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
- hash = (19 * hash) + getDescriptor().hashCode();
- hash = (29 * hash) + unknownFields.hashCode();
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(
- java.nio.ByteBuffer data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(
- java.nio.ByteBuffer data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -18720,59 +16486,46 @@ public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Te
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input);
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
+ return PARSER.parseFrom(input, extensionRegistry);
}
- @java.lang.Override
+ public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder() {
- return DEFAULT_INSTANCE.toBuilder();
- }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto prototype) {
- return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
- }
- @java.lang.Override
- public Builder toBuilder() {
- return this == DEFAULT_INSTANCE
- ? new Builder() : new Builder().mergeFrom(this);
+ return newBuilder().mergeFrom(prototype);
}
+ public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -18780,16 +16533,14 @@ protected Builder newBuilderForType(
* Protobuf type {@code TerminateFragmentResponseProto}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessageV3.Builder implements
- // @@protoc_insertion_point(builder_implements:TerminateFragmentResponseProto)
- org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProtoOrBuilder {
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -18802,33 +16553,36 @@ private Builder() {
}
private Builder(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessageV3
- .alwaysUseFieldBuilders) {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
- @java.lang.Override
+ private static Builder create() {
+ return new Builder();
+ }
+
public Builder clear() {
super.clear();
return this;
}
- @java.lang.Override
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_descriptor;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto getDefaultInstanceForType() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.getDefaultInstance();
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto build() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto result = buildPartial();
if (!result.isInitialized()) {
@@ -18837,46 +16591,12 @@ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.Terminate
return result;
}
- @java.lang.Override
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto buildPartial() {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto(this);
onBuilt();
return result;
}
- @java.lang.Override
- public Builder clone() {
- return super.clone();
- }
- @java.lang.Override
- public Builder setField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.setField(field, value);
- }
- @java.lang.Override
- public Builder clearField(
- com.google.protobuf.Descriptors.FieldDescriptor field) {
- return super.clearField(field);
- }
- @java.lang.Override
- public Builder clearOneof(
- com.google.protobuf.Descriptors.OneofDescriptor oneof) {
- return super.clearOneof(oneof);
- }
- @java.lang.Override
- public Builder setRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- int index, java.lang.Object value) {
- return super.setRepeatedField(field, index, value);
- }
- @java.lang.Override
- public Builder addRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
- java.lang.Object value) {
- return super.addRepeatedField(field, value);
- }
- @java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto) {
return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto)other);
@@ -18888,17 +16608,14 @@ public Builder mergeFrom(com.google.protobuf.Message other) {
public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto other) {
if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.getDefaultInstance()) return this;
- this.mergeUnknownFields(other.unknownFields);
- onChanged();
+ this.mergeUnknownFields(other.getUnknownFields());
return this;
}
- @java.lang.Override
public final boolean isInitialized() {
return true;
}
- @java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@@ -18908,7 +16625,7 @@ public Builder mergeFrom(
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto) e.getUnfinishedMessage();
- throw e.unwrapIOException();
+ throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
@@ -18916,71 +16633,28 @@ public Builder mergeFrom(
}
return this;
}
- @java.lang.Override
- public final Builder setUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.setUnknownFields(unknownFields);
- }
-
- @java.lang.Override
- public final Builder mergeUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
- return super.mergeUnknownFields(unknownFields);
- }
-
// @@protoc_insertion_point(builder_scope:TerminateFragmentResponseProto)
}
- // @@protoc_insertion_point(class_scope:TerminateFragmentResponseProto)
- private static final org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto DEFAULT_INSTANCE;
static {
- DEFAULT_INSTANCE = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto();
- }
-
- public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto getDefaultInstance() {
- return DEFAULT_INSTANCE;
- }
-
- @java.lang.Deprecated public static final com.google.protobuf.Parser
- PARSER = new com.google.protobuf.AbstractParser() {
- @java.lang.Override
- public TerminateFragmentResponseProto parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new TerminateFragmentResponseProto(input, extensionRegistry);
- }
- };
-
- public static com.google.protobuf.Parser parser() {
- return PARSER;
- }
-
- @java.lang.Override
- public com.google.protobuf.Parser getParserForType() {
- return PARSER;
- }
-
- @java.lang.Override
- public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto getDefaultInstanceForType() {
- return DEFAULT_INSTANCE;
+ defaultInstance = new TerminateFragmentResponseProto(true);
+ defaultInstance.initFields();
}
+ // @@protoc_insertion_point(class_scope:TerminateFragmentResponseProto)
}
- public interface UpdateFragmentRequestProtoOrBuilder extends
- // @@protoc_insertion_point(interface_extends:UpdateFragmentRequestProto)
- com.google.protobuf.MessageOrBuilder {
+ public interface UpdateFragmentRequestProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ // optional .QueryIdentifierProto query_identifier = 1;
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return Whether the queryIdentifier field is set.
*/
boolean hasQueryIdentifier();
/**
* optional .QueryIdentifierProto query_identifier = 1;
- * @return The queryIdentifier.
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier();
/**
@@ -18988,31 +16662,28 @@ public interface UpdateFragmentRequestProtoOrBuilder extends
*/
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder();
+ // optional string fragment_identifier_string = 2;
/**
* optional string fragment_identifier_string = 2;
- * @return Whether the fragmentIdentifierString field is set.
*/
boolean hasFragmentIdentifierString();
/**
* optional string fragment_identifier_string = 2;
- * @return The fragmentIdentifierString.
*/
java.lang.String getFragmentIdentifierString();
/**
* optional string fragment_identifier_string = 2;
- * @return The bytes for fragmentIdentifierString.
*/
com.google.protobuf.ByteString
getFragmentIdentifierStringBytes();
+ // optional bool is_guaranteed = 3;
/**
* optional bool is_guaranteed = 3;
- * @return Whether the isGuaranteed field is set.
*/
boolean hasIsGuaranteed();
/**
* optional bool is_guaranteed = 3;
- * @return The isGuaranteed.
*/
boolean getIsGuaranteed();
}
@@ -19020,38 +16691,35 @@ public interface UpdateFragmentRequestProtoOrBuilder extends
* Protobuf type {@code UpdateFragmentRequestProto}
*/
public static final class UpdateFragmentRequestProto extends
- com.google.protobuf.GeneratedMessageV3 implements
- // @@protoc_insertion_point(message_implements:UpdateFragmentRequestProto)
- UpdateFragmentRequestProtoOrBuilder {
- private static final long serialVersionUID = 0L;
+ com.google.protobuf.GeneratedMessage
+ implements UpdateFragmentRequestProtoOrBuilder {
// Use UpdateFragmentRequestProto.newBuilder() to construct.
- private UpdateFragmentRequestProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ private UpdateFragmentRequestProto(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private UpdateFragmentRequestProto() {
- fragmentIdentifierString_ = "";
+ private UpdateFragmentRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final UpdateFragmentRequestProto defaultInstance;
+ public static UpdateFragmentRequestProto getDefaultInstance() {
+ return defaultInstance;
}
- @java.lang.Override
- @SuppressWarnings({"unused"})
- protected java.lang.Object newInstance(
- UnusedPrivateParameter unused) {
- return new UpdateFragmentRequestProto();
+ public UpdateFragmentRequestProto getDefaultInstanceForType() {
+ return defaultInstance;
}
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
+ getUnknownFields() {
return this.unknownFields;
}
private UpdateFragmentRequestProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- this();
- if (extensionRegistry == null) {
- throw new java.lang.NullPointerException();
- }
+ initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -19063,9 +16731,16 @@ private UpdateFragmentRequestProto(
case 0:
done = true;
break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
case 10: {
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null;
- if (((bitField0_ & 0x00000001) != 0)) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = queryIdentifier_.toBuilder();
}
queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry);
@@ -19077,9 +16752,8 @@ private UpdateFragmentRequestProto(
break;
}
case 18: {
- com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
- fragmentIdentifierString_ = bs;
+ fragmentIdentifierString_ = input.readBytes();
break;
}
case 24: {
@@ -19087,22 +16761,13 @@ private UpdateFragmentRequestProto(
isGuaranteed_ = input.readBool();
break;
}
- default: {
- if (!parseUnknownField(
- input, unknownFields, extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
- } catch (com.google.protobuf.UninitializedMessageException e) {
- throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
- e).setUnfinishedMessage(this);
+ e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
@@ -19113,56 +16778,63 @@ private UpdateFragmentRequestProto(
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UpdateFragmentRequestProto_descriptor;
}
- @java.lang.Override
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UpdateFragmentRequestProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto.Builder.class);
}
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public UpdateFragmentRequestProto parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new UpdateFragmentRequestProto(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser