| // Generated by the protocol buffer compiler. DO NOT EDIT! |
| // source: hdfs.proto |
| |
| package org.apache.hadoop.hdfs.protocol.proto; |
| |
| public final class HdfsProtos { |
| private HdfsProtos() {} |
| public static void registerAllExtensions( |
| com.google.protobuf.ExtensionRegistry registry) { |
| } |
| public interface ExtendedBlockProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // required string poolId = 1; |
| boolean hasPoolId(); |
| String getPoolId(); |
| |
| // required uint64 blockId = 2; |
| boolean hasBlockId(); |
| long getBlockId(); |
| |
| // required uint64 numBytes = 3; |
| boolean hasNumBytes(); |
| long getNumBytes(); |
| |
| // required uint64 generationStamp = 4; |
| boolean hasGenerationStamp(); |
| long getGenerationStamp(); |
| } |
| public static final class ExtendedBlockProto extends |
| com.google.protobuf.GeneratedMessage |
| implements ExtendedBlockProtoOrBuilder { |
| // Use ExtendedBlockProto.newBuilder() to construct. |
| private ExtendedBlockProto(Builder builder) { |
| super(builder); |
| } |
| private ExtendedBlockProto(boolean noInit) {} |
| |
| private static final ExtendedBlockProto defaultInstance; |
| public static ExtendedBlockProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public ExtendedBlockProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExtendedBlockProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExtendedBlockProto_fieldAccessorTable; |
| } |
| |
| private int bitField0_; |
| // required string poolId = 1; |
| public static final int POOLID_FIELD_NUMBER = 1; |
| private java.lang.Object poolId_; |
| public boolean hasPoolId() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public String getPoolId() { |
| java.lang.Object ref = poolId_; |
| if (ref instanceof String) { |
| return (String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| String s = bs.toStringUtf8(); |
| if (com.google.protobuf.Internal.isValidUtf8(bs)) { |
| poolId_ = s; |
| } |
| return s; |
| } |
| } |
| private com.google.protobuf.ByteString getPoolIdBytes() { |
| java.lang.Object ref = poolId_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8((String) ref); |
| poolId_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // required uint64 blockId = 2; |
| public static final int BLOCKID_FIELD_NUMBER = 2; |
| private long blockId_; |
| public boolean hasBlockId() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public long getBlockId() { |
| return blockId_; |
| } |
| |
| // required uint64 numBytes = 3; |
| public static final int NUMBYTES_FIELD_NUMBER = 3; |
| private long numBytes_; |
| public boolean hasNumBytes() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| public long getNumBytes() { |
| return numBytes_; |
| } |
| |
| // required uint64 generationStamp = 4; |
| public static final int GENERATIONSTAMP_FIELD_NUMBER = 4; |
| private long generationStamp_; |
| public boolean hasGenerationStamp() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| public long getGenerationStamp() { |
| return generationStamp_; |
| } |
| |
| private void initFields() { |
| poolId_ = ""; |
| blockId_ = 0L; |
| numBytes_ = 0L; |
| generationStamp_ = 0L; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| if (!hasPoolId()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!hasBlockId()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!hasNumBytes()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!hasGenerationStamp()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeBytes(1, getPoolIdBytes()); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeUInt64(2, blockId_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| output.writeUInt64(3, numBytes_); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| output.writeUInt64(4, generationStamp_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(1, getPoolIdBytes()); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeUInt64Size(2, blockId_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeUInt64Size(3, numBytes_); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeUInt64Size(4, generationStamp_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto) obj; |
| |
| boolean result = true; |
| result = result && (hasPoolId() == other.hasPoolId()); |
| if (hasPoolId()) { |
| result = result && getPoolId() |
| .equals(other.getPoolId()); |
| } |
| result = result && (hasBlockId() == other.hasBlockId()); |
| if (hasBlockId()) { |
| result = result && (getBlockId() |
| == other.getBlockId()); |
| } |
| result = result && (hasNumBytes() == other.hasNumBytes()); |
| if (hasNumBytes()) { |
| result = result && (getNumBytes() |
| == other.getNumBytes()); |
| } |
| result = result && (hasGenerationStamp() == other.hasGenerationStamp()); |
| if (hasGenerationStamp()) { |
| result = result && (getGenerationStamp() |
| == other.getGenerationStamp()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| @java.lang.Override |
| public int hashCode() { |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasPoolId()) { |
| hash = (37 * hash) + POOLID_FIELD_NUMBER; |
| hash = (53 * hash) + getPoolId().hashCode(); |
| } |
| if (hasBlockId()) { |
| hash = (37 * hash) + BLOCKID_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getBlockId()); |
| } |
| if (hasNumBytes()) { |
| hash = (37 * hash) + NUMBYTES_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getNumBytes()); |
| } |
| if (hasGenerationStamp()) { |
| hash = (37 * hash) + GENERATIONSTAMP_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getGenerationStamp()); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input, extensionRegistry)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExtendedBlockProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExtendedBlockProto_fieldAccessorTable; |
| } |
| |
| // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder(BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| poolId_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| blockId_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| numBytes_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000004); |
| generationStamp_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000008); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDescriptor(); |
| } |
| |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto build() { |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto buildParsed() |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException( |
| result).asInvalidProtocolBufferException(); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto buildPartial() { |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.poolId_ = poolId_; |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.blockId_ = blockId_; |
| if (((from_bitField0_ & 0x00000004) == 0x00000004)) { |
| to_bitField0_ |= 0x00000004; |
| } |
| result.numBytes_ = numBytes_; |
| if (((from_bitField0_ & 0x00000008) == 0x00000008)) { |
| to_bitField0_ |= 0x00000008; |
| } |
| result.generationStamp_ = generationStamp_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto) { |
| return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto other) { |
| if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance()) return this; |
| if (other.hasPoolId()) { |
| setPoolId(other.getPoolId()); |
| } |
| if (other.hasBlockId()) { |
| setBlockId(other.getBlockId()); |
| } |
| if (other.hasNumBytes()) { |
| setNumBytes(other.getNumBytes()); |
| } |
| if (other.hasGenerationStamp()) { |
| setGenerationStamp(other.getGenerationStamp()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| if (!hasPoolId()) { |
| |
| return false; |
| } |
| if (!hasBlockId()) { |
| |
| return false; |
| } |
| if (!hasNumBytes()) { |
| |
| return false; |
| } |
| if (!hasGenerationStamp()) { |
| |
| return false; |
| } |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder( |
| this.getUnknownFields()); |
| while (true) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| } |
| break; |
| } |
| case 10: { |
| bitField0_ |= 0x00000001; |
| poolId_ = input.readBytes(); |
| break; |
| } |
| case 16: { |
| bitField0_ |= 0x00000002; |
| blockId_ = input.readUInt64(); |
| break; |
| } |
| case 24: { |
| bitField0_ |= 0x00000004; |
| numBytes_ = input.readUInt64(); |
| break; |
| } |
| case 32: { |
| bitField0_ |= 0x00000008; |
| generationStamp_ = input.readUInt64(); |
| break; |
| } |
| } |
| } |
| } |
| |
| private int bitField0_; |
| |
| // required string poolId = 1; |
| private java.lang.Object poolId_ = ""; |
| public boolean hasPoolId() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public String getPoolId() { |
| java.lang.Object ref = poolId_; |
| if (!(ref instanceof String)) { |
| String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); |
| poolId_ = s; |
| return s; |
| } else { |
| return (String) ref; |
| } |
| } |
| public Builder setPoolId(String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| poolId_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearPoolId() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| poolId_ = getDefaultInstance().getPoolId(); |
| onChanged(); |
| return this; |
| } |
| void setPoolId(com.google.protobuf.ByteString value) { |
| bitField0_ |= 0x00000001; |
| poolId_ = value; |
| onChanged(); |
| } |
| |
| // required uint64 blockId = 2; |
| private long blockId_ ; |
| public boolean hasBlockId() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public long getBlockId() { |
| return blockId_; |
| } |
| public Builder setBlockId(long value) { |
| bitField0_ |= 0x00000002; |
| blockId_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearBlockId() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| blockId_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // required uint64 numBytes = 3; |
| private long numBytes_ ; |
| public boolean hasNumBytes() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| public long getNumBytes() { |
| return numBytes_; |
| } |
| public Builder setNumBytes(long value) { |
| bitField0_ |= 0x00000004; |
| numBytes_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearNumBytes() { |
| bitField0_ = (bitField0_ & ~0x00000004); |
| numBytes_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // required uint64 generationStamp = 4; |
| private long generationStamp_ ; |
| public boolean hasGenerationStamp() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| public long getGenerationStamp() { |
| return generationStamp_; |
| } |
| public Builder setGenerationStamp(long value) { |
| bitField0_ |= 0x00000008; |
| generationStamp_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearGenerationStamp() { |
| bitField0_ = (bitField0_ & ~0x00000008); |
| generationStamp_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:ExtendedBlockProto) |
| } |
| |
| static { |
| defaultInstance = new ExtendedBlockProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:ExtendedBlockProto) |
| } |
| |
| public interface BlockTokenIdentifierProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // required bytes identifier = 1; |
| boolean hasIdentifier(); |
| com.google.protobuf.ByteString getIdentifier(); |
| |
| // required bytes password = 2; |
| boolean hasPassword(); |
| com.google.protobuf.ByteString getPassword(); |
| |
| // required string kind = 3; |
| boolean hasKind(); |
| String getKind(); |
| |
| // required string service = 4; |
| boolean hasService(); |
| String getService(); |
| } |
| public static final class BlockTokenIdentifierProto extends |
| com.google.protobuf.GeneratedMessage |
| implements BlockTokenIdentifierProtoOrBuilder { |
| // Use BlockTokenIdentifierProto.newBuilder() to construct. |
| private BlockTokenIdentifierProto(Builder builder) { |
| super(builder); |
| } |
| private BlockTokenIdentifierProto(boolean noInit) {} |
| |
| private static final BlockTokenIdentifierProto defaultInstance; |
| public static BlockTokenIdentifierProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public BlockTokenIdentifierProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockTokenIdentifierProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockTokenIdentifierProto_fieldAccessorTable; |
| } |
| |
| private int bitField0_; |
| // required bytes identifier = 1; |
| public static final int IDENTIFIER_FIELD_NUMBER = 1; |
| private com.google.protobuf.ByteString identifier_; |
| public boolean hasIdentifier() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public com.google.protobuf.ByteString getIdentifier() { |
| return identifier_; |
| } |
| |
| // required bytes password = 2; |
| public static final int PASSWORD_FIELD_NUMBER = 2; |
| private com.google.protobuf.ByteString password_; |
| public boolean hasPassword() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public com.google.protobuf.ByteString getPassword() { |
| return password_; |
| } |
| |
| // required string kind = 3; |
| public static final int KIND_FIELD_NUMBER = 3; |
| private java.lang.Object kind_; |
| public boolean hasKind() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| public String getKind() { |
| java.lang.Object ref = kind_; |
| if (ref instanceof String) { |
| return (String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| String s = bs.toStringUtf8(); |
| if (com.google.protobuf.Internal.isValidUtf8(bs)) { |
| kind_ = s; |
| } |
| return s; |
| } |
| } |
| private com.google.protobuf.ByteString getKindBytes() { |
| java.lang.Object ref = kind_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8((String) ref); |
| kind_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // required string service = 4; |
| public static final int SERVICE_FIELD_NUMBER = 4; |
| private java.lang.Object service_; |
| public boolean hasService() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| public String getService() { |
| java.lang.Object ref = service_; |
| if (ref instanceof String) { |
| return (String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| String s = bs.toStringUtf8(); |
| if (com.google.protobuf.Internal.isValidUtf8(bs)) { |
| service_ = s; |
| } |
| return s; |
| } |
| } |
| private com.google.protobuf.ByteString getServiceBytes() { |
| java.lang.Object ref = service_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8((String) ref); |
| service_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| private void initFields() { |
| identifier_ = com.google.protobuf.ByteString.EMPTY; |
| password_ = com.google.protobuf.ByteString.EMPTY; |
| kind_ = ""; |
| service_ = ""; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| if (!hasIdentifier()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!hasPassword()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!hasKind()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!hasService()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeBytes(1, identifier_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeBytes(2, password_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| output.writeBytes(3, getKindBytes()); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| output.writeBytes(4, getServiceBytes()); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(1, identifier_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(2, password_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(3, getKindBytes()); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(4, getServiceBytes()); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto) obj; |
| |
| boolean result = true; |
| result = result && (hasIdentifier() == other.hasIdentifier()); |
| if (hasIdentifier()) { |
| result = result && getIdentifier() |
| .equals(other.getIdentifier()); |
| } |
| result = result && (hasPassword() == other.hasPassword()); |
| if (hasPassword()) { |
| result = result && getPassword() |
| .equals(other.getPassword()); |
| } |
| result = result && (hasKind() == other.hasKind()); |
| if (hasKind()) { |
| result = result && getKind() |
| .equals(other.getKind()); |
| } |
| result = result && (hasService() == other.hasService()); |
| if (hasService()) { |
| result = result && getService() |
| .equals(other.getService()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| @java.lang.Override |
| public int hashCode() { |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasIdentifier()) { |
| hash = (37 * hash) + IDENTIFIER_FIELD_NUMBER; |
| hash = (53 * hash) + getIdentifier().hashCode(); |
| } |
| if (hasPassword()) { |
| hash = (37 * hash) + PASSWORD_FIELD_NUMBER; |
| hash = (53 * hash) + getPassword().hashCode(); |
| } |
| if (hasKind()) { |
| hash = (37 * hash) + KIND_FIELD_NUMBER; |
| hash = (53 * hash) + getKind().hashCode(); |
| } |
| if (hasService()) { |
| hash = (37 * hash) + SERVICE_FIELD_NUMBER; |
| hash = (53 * hash) + getService().hashCode(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input, extensionRegistry)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockTokenIdentifierProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockTokenIdentifierProto_fieldAccessorTable; |
| } |
| |
| // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder(BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| identifier_ = com.google.protobuf.ByteString.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| password_ = com.google.protobuf.ByteString.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| kind_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000004); |
| service_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000008); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.getDescriptor(); |
| } |
| |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto build() { |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto buildParsed() |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException( |
| result).asInvalidProtocolBufferException(); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto buildPartial() { |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.identifier_ = identifier_; |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.password_ = password_; |
| if (((from_bitField0_ & 0x00000004) == 0x00000004)) { |
| to_bitField0_ |= 0x00000004; |
| } |
| result.kind_ = kind_; |
| if (((from_bitField0_ & 0x00000008) == 0x00000008)) { |
| to_bitField0_ |= 0x00000008; |
| } |
| result.service_ = service_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto) { |
| return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto other) { |
| if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.getDefaultInstance()) return this; |
| if (other.hasIdentifier()) { |
| setIdentifier(other.getIdentifier()); |
| } |
| if (other.hasPassword()) { |
| setPassword(other.getPassword()); |
| } |
| if (other.hasKind()) { |
| setKind(other.getKind()); |
| } |
| if (other.hasService()) { |
| setService(other.getService()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| if (!hasIdentifier()) { |
| |
| return false; |
| } |
| if (!hasPassword()) { |
| |
| return false; |
| } |
| if (!hasKind()) { |
| |
| return false; |
| } |
| if (!hasService()) { |
| |
| return false; |
| } |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder( |
| this.getUnknownFields()); |
| while (true) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| } |
| break; |
| } |
| case 10: { |
| bitField0_ |= 0x00000001; |
| identifier_ = input.readBytes(); |
| break; |
| } |
| case 18: { |
| bitField0_ |= 0x00000002; |
| password_ = input.readBytes(); |
| break; |
| } |
| case 26: { |
| bitField0_ |= 0x00000004; |
| kind_ = input.readBytes(); |
| break; |
| } |
| case 34: { |
| bitField0_ |= 0x00000008; |
| service_ = input.readBytes(); |
| break; |
| } |
| } |
| } |
| } |
| |
| private int bitField0_; |
| |
| // required bytes identifier = 1; |
| private com.google.protobuf.ByteString identifier_ = com.google.protobuf.ByteString.EMPTY; |
| public boolean hasIdentifier() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public com.google.protobuf.ByteString getIdentifier() { |
| return identifier_; |
| } |
| public Builder setIdentifier(com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| identifier_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearIdentifier() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| identifier_ = getDefaultInstance().getIdentifier(); |
| onChanged(); |
| return this; |
| } |
| |
| // required bytes password = 2; |
| private com.google.protobuf.ByteString password_ = com.google.protobuf.ByteString.EMPTY; |
| public boolean hasPassword() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public com.google.protobuf.ByteString getPassword() { |
| return password_; |
| } |
| public Builder setPassword(com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000002; |
| password_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearPassword() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| password_ = getDefaultInstance().getPassword(); |
| onChanged(); |
| return this; |
| } |
| |
| // required string kind = 3; |
| private java.lang.Object kind_ = ""; |
| public boolean hasKind() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| public String getKind() { |
| java.lang.Object ref = kind_; |
| if (!(ref instanceof String)) { |
| String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); |
| kind_ = s; |
| return s; |
| } else { |
| return (String) ref; |
| } |
| } |
| public Builder setKind(String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000004; |
| kind_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearKind() { |
| bitField0_ = (bitField0_ & ~0x00000004); |
| kind_ = getDefaultInstance().getKind(); |
| onChanged(); |
| return this; |
| } |
| void setKind(com.google.protobuf.ByteString value) { |
| bitField0_ |= 0x00000004; |
| kind_ = value; |
| onChanged(); |
| } |
| |
| // required string service = 4; |
| private java.lang.Object service_ = ""; |
| public boolean hasService() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| public String getService() { |
| java.lang.Object ref = service_; |
| if (!(ref instanceof String)) { |
| String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); |
| service_ = s; |
| return s; |
| } else { |
| return (String) ref; |
| } |
| } |
| public Builder setService(String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000008; |
| service_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearService() { |
| bitField0_ = (bitField0_ & ~0x00000008); |
| service_ = getDefaultInstance().getService(); |
| onChanged(); |
| return this; |
| } |
| void setService(com.google.protobuf.ByteString value) { |
| bitField0_ |= 0x00000008; |
| service_ = value; |
| onChanged(); |
| } |
| |
| // @@protoc_insertion_point(builder_scope:BlockTokenIdentifierProto) |
| } |
| |
| static { |
| defaultInstance = new BlockTokenIdentifierProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:BlockTokenIdentifierProto) |
| } |
| |
| public interface DatanodeIDProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // required string name = 1; |
| boolean hasName(); |
| String getName(); |
| |
| // required string storageID = 2; |
| boolean hasStorageID(); |
| String getStorageID(); |
| |
| // required uint32 infoPort = 3; |
| boolean hasInfoPort(); |
| int getInfoPort(); |
| } |
| public static final class DatanodeIDProto extends |
| com.google.protobuf.GeneratedMessage |
| implements DatanodeIDProtoOrBuilder { |
| // Use DatanodeIDProto.newBuilder() to construct. |
| private DatanodeIDProto(Builder builder) { |
| super(builder); |
| } |
| private DatanodeIDProto(boolean noInit) {} |
| |
| private static final DatanodeIDProto defaultInstance; |
| public static DatanodeIDProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public DatanodeIDProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeIDProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeIDProto_fieldAccessorTable; |
| } |
| |
| private int bitField0_; |
| // required string name = 1; |
| public static final int NAME_FIELD_NUMBER = 1; |
| private java.lang.Object name_; |
| public boolean hasName() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public String getName() { |
| java.lang.Object ref = name_; |
| if (ref instanceof String) { |
| return (String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| String s = bs.toStringUtf8(); |
| if (com.google.protobuf.Internal.isValidUtf8(bs)) { |
| name_ = s; |
| } |
| return s; |
| } |
| } |
| private com.google.protobuf.ByteString getNameBytes() { |
| java.lang.Object ref = name_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8((String) ref); |
| name_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // required string storageID = 2; |
| public static final int STORAGEID_FIELD_NUMBER = 2; |
| private java.lang.Object storageID_; |
| public boolean hasStorageID() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public String getStorageID() { |
| java.lang.Object ref = storageID_; |
| if (ref instanceof String) { |
| return (String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| String s = bs.toStringUtf8(); |
| if (com.google.protobuf.Internal.isValidUtf8(bs)) { |
| storageID_ = s; |
| } |
| return s; |
| } |
| } |
| private com.google.protobuf.ByteString getStorageIDBytes() { |
| java.lang.Object ref = storageID_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8((String) ref); |
| storageID_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // required uint32 infoPort = 3; |
| public static final int INFOPORT_FIELD_NUMBER = 3; |
| private int infoPort_; |
| public boolean hasInfoPort() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| public int getInfoPort() { |
| return infoPort_; |
| } |
| |
| private void initFields() { |
| name_ = ""; |
| storageID_ = ""; |
| infoPort_ = 0; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| if (!hasName()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!hasStorageID()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!hasInfoPort()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeBytes(1, getNameBytes()); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeBytes(2, getStorageIDBytes()); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| output.writeUInt32(3, infoPort_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(1, getNameBytes()); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(2, getStorageIDBytes()); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeUInt32Size(3, infoPort_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto) obj; |
| |
| boolean result = true; |
| result = result && (hasName() == other.hasName()); |
| if (hasName()) { |
| result = result && getName() |
| .equals(other.getName()); |
| } |
| result = result && (hasStorageID() == other.hasStorageID()); |
| if (hasStorageID()) { |
| result = result && getStorageID() |
| .equals(other.getStorageID()); |
| } |
| result = result && (hasInfoPort() == other.hasInfoPort()); |
| if (hasInfoPort()) { |
| result = result && (getInfoPort() |
| == other.getInfoPort()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| @java.lang.Override |
| public int hashCode() { |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasName()) { |
| hash = (37 * hash) + NAME_FIELD_NUMBER; |
| hash = (53 * hash) + getName().hashCode(); |
| } |
| if (hasStorageID()) { |
| hash = (37 * hash) + STORAGEID_FIELD_NUMBER; |
| hash = (53 * hash) + getStorageID().hashCode(); |
| } |
| if (hasInfoPort()) { |
| hash = (37 * hash) + INFOPORT_FIELD_NUMBER; |
| hash = (53 * hash) + getInfoPort(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input, extensionRegistry)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeIDProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeIDProto_fieldAccessorTable; |
| } |
| |
| // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder(BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| name_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| storageID_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| infoPort_ = 0; |
| bitField0_ = (bitField0_ & ~0x00000004); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDescriptor(); |
| } |
| |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto build() { |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto buildParsed() |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException( |
| result).asInvalidProtocolBufferException(); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto buildPartial() { |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.name_ = name_; |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.storageID_ = storageID_; |
| if (((from_bitField0_ & 0x00000004) == 0x00000004)) { |
| to_bitField0_ |= 0x00000004; |
| } |
| result.infoPort_ = infoPort_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto) { |
| return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto other) { |
| if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance()) return this; |
| if (other.hasName()) { |
| setName(other.getName()); |
| } |
| if (other.hasStorageID()) { |
| setStorageID(other.getStorageID()); |
| } |
| if (other.hasInfoPort()) { |
| setInfoPort(other.getInfoPort()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| if (!hasName()) { |
| |
| return false; |
| } |
| if (!hasStorageID()) { |
| |
| return false; |
| } |
| if (!hasInfoPort()) { |
| |
| return false; |
| } |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder( |
| this.getUnknownFields()); |
| while (true) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| } |
| break; |
| } |
| case 10: { |
| bitField0_ |= 0x00000001; |
| name_ = input.readBytes(); |
| break; |
| } |
| case 18: { |
| bitField0_ |= 0x00000002; |
| storageID_ = input.readBytes(); |
| break; |
| } |
| case 24: { |
| bitField0_ |= 0x00000004; |
| infoPort_ = input.readUInt32(); |
| break; |
| } |
| } |
| } |
| } |
| |
| private int bitField0_; |
| |
| // required string name = 1; |
| private java.lang.Object name_ = ""; |
| public boolean hasName() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public String getName() { |
| java.lang.Object ref = name_; |
| if (!(ref instanceof String)) { |
| String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); |
| name_ = s; |
| return s; |
| } else { |
| return (String) ref; |
| } |
| } |
| public Builder setName(String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| name_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearName() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| name_ = getDefaultInstance().getName(); |
| onChanged(); |
| return this; |
| } |
| void setName(com.google.protobuf.ByteString value) { |
| bitField0_ |= 0x00000001; |
| name_ = value; |
| onChanged(); |
| } |
| |
| // required string storageID = 2; |
| private java.lang.Object storageID_ = ""; |
| public boolean hasStorageID() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public String getStorageID() { |
| java.lang.Object ref = storageID_; |
| if (!(ref instanceof String)) { |
| String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); |
| storageID_ = s; |
| return s; |
| } else { |
| return (String) ref; |
| } |
| } |
| public Builder setStorageID(String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000002; |
| storageID_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearStorageID() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| storageID_ = getDefaultInstance().getStorageID(); |
| onChanged(); |
| return this; |
| } |
| void setStorageID(com.google.protobuf.ByteString value) { |
| bitField0_ |= 0x00000002; |
| storageID_ = value; |
| onChanged(); |
| } |
| |
| // required uint32 infoPort = 3; |
| private int infoPort_ ; |
| public boolean hasInfoPort() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| public int getInfoPort() { |
| return infoPort_; |
| } |
| public Builder setInfoPort(int value) { |
| bitField0_ |= 0x00000004; |
| infoPort_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearInfoPort() { |
| bitField0_ = (bitField0_ & ~0x00000004); |
| infoPort_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:DatanodeIDProto) |
| } |
| |
| static { |
| defaultInstance = new DatanodeIDProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:DatanodeIDProto) |
| } |
| |
| public interface DatanodeInfoProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // required .DatanodeIDProto id = 1; |
| boolean hasId(); |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto getId(); |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder getIdOrBuilder(); |
| |
| // optional uint64 capacity = 2; |
| boolean hasCapacity(); |
| long getCapacity(); |
| |
| // optional uint64 dfsUsed = 3; |
| boolean hasDfsUsed(); |
| long getDfsUsed(); |
| |
| // optional uint64 remaining = 4; |
| boolean hasRemaining(); |
| long getRemaining(); |
| |
| // optional uint64 blockPoolUsed = 5; |
| boolean hasBlockPoolUsed(); |
| long getBlockPoolUsed(); |
| |
| // optional uint64 lastUpdate = 6; |
| boolean hasLastUpdate(); |
| long getLastUpdate(); |
| |
| // optional uint32 xceiverCount = 7; |
| boolean hasXceiverCount(); |
| int getXceiverCount(); |
| |
| // optional string location = 8; |
| boolean hasLocation(); |
| String getLocation(); |
| |
| // optional string hostName = 9; |
| boolean hasHostName(); |
| String getHostName(); |
| |
| // optional .DatanodeInfoProto.AdminState adminState = 10; |
| boolean hasAdminState(); |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState getAdminState(); |
| } |
| public static final class DatanodeInfoProto extends |
| com.google.protobuf.GeneratedMessage |
| implements DatanodeInfoProtoOrBuilder { |
| // Use DatanodeInfoProto.newBuilder() to construct. |
| private DatanodeInfoProto(Builder builder) { |
| super(builder); |
| } |
| private DatanodeInfoProto(boolean noInit) {} |
| |
| private static final DatanodeInfoProto defaultInstance; |
| public static DatanodeInfoProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public DatanodeInfoProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeInfoProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeInfoProto_fieldAccessorTable; |
| } |
| |
| public enum AdminState |
| implements com.google.protobuf.ProtocolMessageEnum { |
| NORMAL(0, 0), |
| DECOMMISSION_INPROGRESS(1, 1), |
| DECOMMISSIONED(2, 2), |
| ; |
| |
| public static final int NORMAL_VALUE = 0; |
| public static final int DECOMMISSION_INPROGRESS_VALUE = 1; |
| public static final int DECOMMISSIONED_VALUE = 2; |
| |
| |
| public final int getNumber() { return value; } |
| |
| public static AdminState valueOf(int value) { |
| switch (value) { |
| case 0: return NORMAL; |
| case 1: return DECOMMISSION_INPROGRESS; |
| case 2: return DECOMMISSIONED; |
| default: return null; |
| } |
| } |
| |
| public static com.google.protobuf.Internal.EnumLiteMap<AdminState> |
| internalGetValueMap() { |
| return internalValueMap; |
| } |
| private static com.google.protobuf.Internal.EnumLiteMap<AdminState> |
| internalValueMap = |
| new com.google.protobuf.Internal.EnumLiteMap<AdminState>() { |
| public AdminState findValueByNumber(int number) { |
| return AdminState.valueOf(number); |
| } |
| }; |
| |
| public final com.google.protobuf.Descriptors.EnumValueDescriptor |
| getValueDescriptor() { |
| return getDescriptor().getValues().get(index); |
| } |
| public final com.google.protobuf.Descriptors.EnumDescriptor |
| getDescriptorForType() { |
| return getDescriptor(); |
| } |
| public static final com.google.protobuf.Descriptors.EnumDescriptor |
| getDescriptor() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.getDescriptor().getEnumTypes().get(0); |
| } |
| |
| private static final AdminState[] VALUES = { |
| NORMAL, DECOMMISSION_INPROGRESS, DECOMMISSIONED, |
| }; |
| |
| public static AdminState valueOf( |
| com.google.protobuf.Descriptors.EnumValueDescriptor desc) { |
| if (desc.getType() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "EnumValueDescriptor is not for this type."); |
| } |
| return VALUES[desc.getIndex()]; |
| } |
| |
| private final int index; |
| private final int value; |
| |
| private AdminState(int index, int value) { |
| this.index = index; |
| this.value = value; |
| } |
| |
| // @@protoc_insertion_point(enum_scope:DatanodeInfoProto.AdminState) |
| } |
| |
| private int bitField0_; |
| // required .DatanodeIDProto id = 1; |
| public static final int ID_FIELD_NUMBER = 1; |
| private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto id_; |
| public boolean hasId() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto getId() { |
| return id_; |
| } |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder getIdOrBuilder() { |
| return id_; |
| } |
| |
| // optional uint64 capacity = 2; |
| public static final int CAPACITY_FIELD_NUMBER = 2; |
| private long capacity_; |
| public boolean hasCapacity() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public long getCapacity() { |
| return capacity_; |
| } |
| |
| // optional uint64 dfsUsed = 3; |
| public static final int DFSUSED_FIELD_NUMBER = 3; |
| private long dfsUsed_; |
| public boolean hasDfsUsed() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| public long getDfsUsed() { |
| return dfsUsed_; |
| } |
| |
| // optional uint64 remaining = 4; |
| public static final int REMAINING_FIELD_NUMBER = 4; |
| private long remaining_; |
| public boolean hasRemaining() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| public long getRemaining() { |
| return remaining_; |
| } |
| |
| // optional uint64 blockPoolUsed = 5; |
| public static final int BLOCKPOOLUSED_FIELD_NUMBER = 5; |
| private long blockPoolUsed_; |
| public boolean hasBlockPoolUsed() { |
| return ((bitField0_ & 0x00000010) == 0x00000010); |
| } |
| public long getBlockPoolUsed() { |
| return blockPoolUsed_; |
| } |
| |
| // optional uint64 lastUpdate = 6; |
| public static final int LASTUPDATE_FIELD_NUMBER = 6; |
| private long lastUpdate_; |
| public boolean hasLastUpdate() { |
| return ((bitField0_ & 0x00000020) == 0x00000020); |
| } |
| public long getLastUpdate() { |
| return lastUpdate_; |
| } |
| |
| // optional uint32 xceiverCount = 7; |
| public static final int XCEIVERCOUNT_FIELD_NUMBER = 7; |
| private int xceiverCount_; |
| public boolean hasXceiverCount() { |
| return ((bitField0_ & 0x00000040) == 0x00000040); |
| } |
| public int getXceiverCount() { |
| return xceiverCount_; |
| } |
| |
| // optional string location = 8; |
| public static final int LOCATION_FIELD_NUMBER = 8; |
| private java.lang.Object location_; |
| public boolean hasLocation() { |
| return ((bitField0_ & 0x00000080) == 0x00000080); |
| } |
| public String getLocation() { |
| java.lang.Object ref = location_; |
| if (ref instanceof String) { |
| return (String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| String s = bs.toStringUtf8(); |
| if (com.google.protobuf.Internal.isValidUtf8(bs)) { |
| location_ = s; |
| } |
| return s; |
| } |
| } |
| private com.google.protobuf.ByteString getLocationBytes() { |
| java.lang.Object ref = location_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8((String) ref); |
| location_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional string hostName = 9; |
| public static final int HOSTNAME_FIELD_NUMBER = 9; |
| private java.lang.Object hostName_; |
| public boolean hasHostName() { |
| return ((bitField0_ & 0x00000100) == 0x00000100); |
| } |
| public String getHostName() { |
| java.lang.Object ref = hostName_; |
| if (ref instanceof String) { |
| return (String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| String s = bs.toStringUtf8(); |
| if (com.google.protobuf.Internal.isValidUtf8(bs)) { |
| hostName_ = s; |
| } |
| return s; |
| } |
| } |
| private com.google.protobuf.ByteString getHostNameBytes() { |
| java.lang.Object ref = hostName_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8((String) ref); |
| hostName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional .DatanodeInfoProto.AdminState adminState = 10; |
| public static final int ADMINSTATE_FIELD_NUMBER = 10; |
| private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState adminState_; |
| public boolean hasAdminState() { |
| return ((bitField0_ & 0x00000200) == 0x00000200); |
| } |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState getAdminState() { |
| return adminState_; |
| } |
| |
| private void initFields() { |
| id_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance(); |
| capacity_ = 0L; |
| dfsUsed_ = 0L; |
| remaining_ = 0L; |
| blockPoolUsed_ = 0L; |
| lastUpdate_ = 0L; |
| xceiverCount_ = 0; |
| location_ = ""; |
| hostName_ = ""; |
| adminState_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState.NORMAL; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| if (!hasId()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!getId().isInitialized()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeMessage(1, id_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeUInt64(2, capacity_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| output.writeUInt64(3, dfsUsed_); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| output.writeUInt64(4, remaining_); |
| } |
| if (((bitField0_ & 0x00000010) == 0x00000010)) { |
| output.writeUInt64(5, blockPoolUsed_); |
| } |
| if (((bitField0_ & 0x00000020) == 0x00000020)) { |
| output.writeUInt64(6, lastUpdate_); |
| } |
| if (((bitField0_ & 0x00000040) == 0x00000040)) { |
| output.writeUInt32(7, xceiverCount_); |
| } |
| if (((bitField0_ & 0x00000080) == 0x00000080)) { |
| output.writeBytes(8, getLocationBytes()); |
| } |
| if (((bitField0_ & 0x00000100) == 0x00000100)) { |
| output.writeBytes(9, getHostNameBytes()); |
| } |
| if (((bitField0_ & 0x00000200) == 0x00000200)) { |
| output.writeEnum(10, adminState_.getNumber()); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(1, id_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeUInt64Size(2, capacity_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeUInt64Size(3, dfsUsed_); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeUInt64Size(4, remaining_); |
| } |
| if (((bitField0_ & 0x00000010) == 0x00000010)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeUInt64Size(5, blockPoolUsed_); |
| } |
| if (((bitField0_ & 0x00000020) == 0x00000020)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeUInt64Size(6, lastUpdate_); |
| } |
| if (((bitField0_ & 0x00000040) == 0x00000040)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeUInt32Size(7, xceiverCount_); |
| } |
| if (((bitField0_ & 0x00000080) == 0x00000080)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(8, getLocationBytes()); |
| } |
| if (((bitField0_ & 0x00000100) == 0x00000100)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(9, getHostNameBytes()); |
| } |
| if (((bitField0_ & 0x00000200) == 0x00000200)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeEnumSize(10, adminState_.getNumber()); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto) obj; |
| |
| boolean result = true; |
| result = result && (hasId() == other.hasId()); |
| if (hasId()) { |
| result = result && getId() |
| .equals(other.getId()); |
| } |
| result = result && (hasCapacity() == other.hasCapacity()); |
| if (hasCapacity()) { |
| result = result && (getCapacity() |
| == other.getCapacity()); |
| } |
| result = result && (hasDfsUsed() == other.hasDfsUsed()); |
| if (hasDfsUsed()) { |
| result = result && (getDfsUsed() |
| == other.getDfsUsed()); |
| } |
| result = result && (hasRemaining() == other.hasRemaining()); |
| if (hasRemaining()) { |
| result = result && (getRemaining() |
| == other.getRemaining()); |
| } |
| result = result && (hasBlockPoolUsed() == other.hasBlockPoolUsed()); |
| if (hasBlockPoolUsed()) { |
| result = result && (getBlockPoolUsed() |
| == other.getBlockPoolUsed()); |
| } |
| result = result && (hasLastUpdate() == other.hasLastUpdate()); |
| if (hasLastUpdate()) { |
| result = result && (getLastUpdate() |
| == other.getLastUpdate()); |
| } |
| result = result && (hasXceiverCount() == other.hasXceiverCount()); |
| if (hasXceiverCount()) { |
| result = result && (getXceiverCount() |
| == other.getXceiverCount()); |
| } |
| result = result && (hasLocation() == other.hasLocation()); |
| if (hasLocation()) { |
| result = result && getLocation() |
| .equals(other.getLocation()); |
| } |
| result = result && (hasHostName() == other.hasHostName()); |
| if (hasHostName()) { |
| result = result && getHostName() |
| .equals(other.getHostName()); |
| } |
| result = result && (hasAdminState() == other.hasAdminState()); |
| if (hasAdminState()) { |
| result = result && |
| (getAdminState() == other.getAdminState()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| @java.lang.Override |
| public int hashCode() { |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasId()) { |
| hash = (37 * hash) + ID_FIELD_NUMBER; |
| hash = (53 * hash) + getId().hashCode(); |
| } |
| if (hasCapacity()) { |
| hash = (37 * hash) + CAPACITY_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getCapacity()); |
| } |
| if (hasDfsUsed()) { |
| hash = (37 * hash) + DFSUSED_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getDfsUsed()); |
| } |
| if (hasRemaining()) { |
| hash = (37 * hash) + REMAINING_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getRemaining()); |
| } |
| if (hasBlockPoolUsed()) { |
| hash = (37 * hash) + BLOCKPOOLUSED_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getBlockPoolUsed()); |
| } |
| if (hasLastUpdate()) { |
| hash = (37 * hash) + LASTUPDATE_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getLastUpdate()); |
| } |
| if (hasXceiverCount()) { |
| hash = (37 * hash) + XCEIVERCOUNT_FIELD_NUMBER; |
| hash = (53 * hash) + getXceiverCount(); |
| } |
| if (hasLocation()) { |
| hash = (37 * hash) + LOCATION_FIELD_NUMBER; |
| hash = (53 * hash) + getLocation().hashCode(); |
| } |
| if (hasHostName()) { |
| hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; |
| hash = (53 * hash) + getHostName().hashCode(); |
| } |
| if (hasAdminState()) { |
| hash = (37 * hash) + ADMINSTATE_FIELD_NUMBER; |
| hash = (53 * hash) + hashEnum(getAdminState()); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input, extensionRegistry)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeInfoProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeInfoProto_fieldAccessorTable; |
| } |
| |
| // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder(BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| getIdFieldBuilder(); |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| if (idBuilder_ == null) { |
| id_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance(); |
| } else { |
| idBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| capacity_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| dfsUsed_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000004); |
| remaining_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000008); |
| blockPoolUsed_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000010); |
| lastUpdate_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000020); |
| xceiverCount_ = 0; |
| bitField0_ = (bitField0_ & ~0x00000040); |
| location_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000080); |
| hostName_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000100); |
| adminState_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState.NORMAL; |
| bitField0_ = (bitField0_ & ~0x00000200); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.getDescriptor(); |
| } |
| |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto build() { |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto buildParsed() |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException( |
| result).asInvalidProtocolBufferException(); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto buildPartial() { |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| if (idBuilder_ == null) { |
| result.id_ = id_; |
| } else { |
| result.id_ = idBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.capacity_ = capacity_; |
| if (((from_bitField0_ & 0x00000004) == 0x00000004)) { |
| to_bitField0_ |= 0x00000004; |
| } |
| result.dfsUsed_ = dfsUsed_; |
| if (((from_bitField0_ & 0x00000008) == 0x00000008)) { |
| to_bitField0_ |= 0x00000008; |
| } |
| result.remaining_ = remaining_; |
| if (((from_bitField0_ & 0x00000010) == 0x00000010)) { |
| to_bitField0_ |= 0x00000010; |
| } |
| result.blockPoolUsed_ = blockPoolUsed_; |
| if (((from_bitField0_ & 0x00000020) == 0x00000020)) { |
| to_bitField0_ |= 0x00000020; |
| } |
| result.lastUpdate_ = lastUpdate_; |
| if (((from_bitField0_ & 0x00000040) == 0x00000040)) { |
| to_bitField0_ |= 0x00000040; |
| } |
| result.xceiverCount_ = xceiverCount_; |
| if (((from_bitField0_ & 0x00000080) == 0x00000080)) { |
| to_bitField0_ |= 0x00000080; |
| } |
| result.location_ = location_; |
| if (((from_bitField0_ & 0x00000100) == 0x00000100)) { |
| to_bitField0_ |= 0x00000100; |
| } |
| result.hostName_ = hostName_; |
| if (((from_bitField0_ & 0x00000200) == 0x00000200)) { |
| to_bitField0_ |= 0x00000200; |
| } |
| result.adminState_ = adminState_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto) { |
| return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto other) { |
| if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.getDefaultInstance()) return this; |
| if (other.hasId()) { |
| mergeId(other.getId()); |
| } |
| if (other.hasCapacity()) { |
| setCapacity(other.getCapacity()); |
| } |
| if (other.hasDfsUsed()) { |
| setDfsUsed(other.getDfsUsed()); |
| } |
| if (other.hasRemaining()) { |
| setRemaining(other.getRemaining()); |
| } |
| if (other.hasBlockPoolUsed()) { |
| setBlockPoolUsed(other.getBlockPoolUsed()); |
| } |
| if (other.hasLastUpdate()) { |
| setLastUpdate(other.getLastUpdate()); |
| } |
| if (other.hasXceiverCount()) { |
| setXceiverCount(other.getXceiverCount()); |
| } |
| if (other.hasLocation()) { |
| setLocation(other.getLocation()); |
| } |
| if (other.hasHostName()) { |
| setHostName(other.getHostName()); |
| } |
| if (other.hasAdminState()) { |
| setAdminState(other.getAdminState()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| if (!hasId()) { |
| |
| return false; |
| } |
| if (!getId().isInitialized()) { |
| |
| return false; |
| } |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder( |
| this.getUnknownFields()); |
| while (true) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| } |
| break; |
| } |
| case 10: { |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.Builder subBuilder = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.newBuilder(); |
| if (hasId()) { |
| subBuilder.mergeFrom(getId()); |
| } |
| input.readMessage(subBuilder, extensionRegistry); |
| setId(subBuilder.buildPartial()); |
| break; |
| } |
| case 16: { |
| bitField0_ |= 0x00000002; |
| capacity_ = input.readUInt64(); |
| break; |
| } |
| case 24: { |
| bitField0_ |= 0x00000004; |
| dfsUsed_ = input.readUInt64(); |
| break; |
| } |
| case 32: { |
| bitField0_ |= 0x00000008; |
| remaining_ = input.readUInt64(); |
| break; |
| } |
| case 40: { |
| bitField0_ |= 0x00000010; |
| blockPoolUsed_ = input.readUInt64(); |
| break; |
| } |
| case 48: { |
| bitField0_ |= 0x00000020; |
| lastUpdate_ = input.readUInt64(); |
| break; |
| } |
| case 56: { |
| bitField0_ |= 0x00000040; |
| xceiverCount_ = input.readUInt32(); |
| break; |
| } |
| case 66: { |
| bitField0_ |= 0x00000080; |
| location_ = input.readBytes(); |
| break; |
| } |
| case 74: { |
| bitField0_ |= 0x00000100; |
| hostName_ = input.readBytes(); |
| break; |
| } |
| case 80: { |
| int rawValue = input.readEnum(); |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState value = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState.valueOf(rawValue); |
| if (value == null) { |
| unknownFields.mergeVarintField(10, rawValue); |
| } else { |
| bitField0_ |= 0x00000200; |
| adminState_ = value; |
| } |
| break; |
| } |
| } |
| } |
| } |
| |
| private int bitField0_; |
| |
| // required .DatanodeIDProto id = 1; |
| private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto id_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder> idBuilder_; |
| public boolean hasId() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto getId() { |
| if (idBuilder_ == null) { |
| return id_; |
| } else { |
| return idBuilder_.getMessage(); |
| } |
| } |
| public Builder setId(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto value) { |
| if (idBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| id_ = value; |
| onChanged(); |
| } else { |
| idBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder setId( |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.Builder builderForValue) { |
| if (idBuilder_ == null) { |
| id_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| idBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder mergeId(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto value) { |
| if (idBuilder_ == null) { |
| if (((bitField0_ & 0x00000001) == 0x00000001) && |
| id_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance()) { |
| id_ = |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.newBuilder(id_).mergeFrom(value).buildPartial(); |
| } else { |
| id_ = value; |
| } |
| onChanged(); |
| } else { |
| idBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder clearId() { |
| if (idBuilder_ == null) { |
| id_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance(); |
| onChanged(); |
| } else { |
| idBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| return this; |
| } |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.Builder getIdBuilder() { |
| bitField0_ |= 0x00000001; |
| onChanged(); |
| return getIdFieldBuilder().getBuilder(); |
| } |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder getIdOrBuilder() { |
| if (idBuilder_ != null) { |
| return idBuilder_.getMessageOrBuilder(); |
| } else { |
| return id_; |
| } |
| } |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder> |
| getIdFieldBuilder() { |
| if (idBuilder_ == null) { |
| idBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.Builder, org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder>( |
| id_, |
| getParentForChildren(), |
| isClean()); |
| id_ = null; |
| } |
| return idBuilder_; |
| } |
| |
| // optional uint64 capacity = 2; |
| private long capacity_ ; |
| public boolean hasCapacity() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public long getCapacity() { |
| return capacity_; |
| } |
| public Builder setCapacity(long value) { |
| bitField0_ |= 0x00000002; |
| capacity_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearCapacity() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| capacity_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // optional uint64 dfsUsed = 3; |
| private long dfsUsed_ ; |
| public boolean hasDfsUsed() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| public long getDfsUsed() { |
| return dfsUsed_; |
| } |
| public Builder setDfsUsed(long value) { |
| bitField0_ |= 0x00000004; |
| dfsUsed_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearDfsUsed() { |
| bitField0_ = (bitField0_ & ~0x00000004); |
| dfsUsed_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // optional uint64 remaining = 4; |
| private long remaining_ ; |
| public boolean hasRemaining() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| public long getRemaining() { |
| return remaining_; |
| } |
| public Builder setRemaining(long value) { |
| bitField0_ |= 0x00000008; |
| remaining_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearRemaining() { |
| bitField0_ = (bitField0_ & ~0x00000008); |
| remaining_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // optional uint64 blockPoolUsed = 5; |
| private long blockPoolUsed_ ; |
| public boolean hasBlockPoolUsed() { |
| return ((bitField0_ & 0x00000010) == 0x00000010); |
| } |
| public long getBlockPoolUsed() { |
| return blockPoolUsed_; |
| } |
| public Builder setBlockPoolUsed(long value) { |
| bitField0_ |= 0x00000010; |
| blockPoolUsed_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearBlockPoolUsed() { |
| bitField0_ = (bitField0_ & ~0x00000010); |
| blockPoolUsed_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // optional uint64 lastUpdate = 6; |
| private long lastUpdate_ ; |
| public boolean hasLastUpdate() { |
| return ((bitField0_ & 0x00000020) == 0x00000020); |
| } |
| public long getLastUpdate() { |
| return lastUpdate_; |
| } |
| public Builder setLastUpdate(long value) { |
| bitField0_ |= 0x00000020; |
| lastUpdate_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearLastUpdate() { |
| bitField0_ = (bitField0_ & ~0x00000020); |
| lastUpdate_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // optional uint32 xceiverCount = 7; |
| private int xceiverCount_ ; |
| public boolean hasXceiverCount() { |
| return ((bitField0_ & 0x00000040) == 0x00000040); |
| } |
| public int getXceiverCount() { |
| return xceiverCount_; |
| } |
| public Builder setXceiverCount(int value) { |
| bitField0_ |= 0x00000040; |
| xceiverCount_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearXceiverCount() { |
| bitField0_ = (bitField0_ & ~0x00000040); |
| xceiverCount_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // optional string location = 8; |
| private java.lang.Object location_ = ""; |
| public boolean hasLocation() { |
| return ((bitField0_ & 0x00000080) == 0x00000080); |
| } |
| public String getLocation() { |
| java.lang.Object ref = location_; |
| if (!(ref instanceof String)) { |
| String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); |
| location_ = s; |
| return s; |
| } else { |
| return (String) ref; |
| } |
| } |
| public Builder setLocation(String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000080; |
| location_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearLocation() { |
| bitField0_ = (bitField0_ & ~0x00000080); |
| location_ = getDefaultInstance().getLocation(); |
| onChanged(); |
| return this; |
| } |
| void setLocation(com.google.protobuf.ByteString value) { |
| bitField0_ |= 0x00000080; |
| location_ = value; |
| onChanged(); |
| } |
| |
| // optional string hostName = 9; |
| private java.lang.Object hostName_ = ""; |
| public boolean hasHostName() { |
| return ((bitField0_ & 0x00000100) == 0x00000100); |
| } |
| public String getHostName() { |
| java.lang.Object ref = hostName_; |
| if (!(ref instanceof String)) { |
| String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); |
| hostName_ = s; |
| return s; |
| } else { |
| return (String) ref; |
| } |
| } |
| public Builder setHostName(String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000100; |
| hostName_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearHostName() { |
| bitField0_ = (bitField0_ & ~0x00000100); |
| hostName_ = getDefaultInstance().getHostName(); |
| onChanged(); |
| return this; |
| } |
| void setHostName(com.google.protobuf.ByteString value) { |
| bitField0_ |= 0x00000100; |
| hostName_ = value; |
| onChanged(); |
| } |
| |
| // optional .DatanodeInfoProto.AdminState adminState = 10; |
| private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState adminState_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState.NORMAL; |
| public boolean hasAdminState() { |
| return ((bitField0_ & 0x00000200) == 0x00000200); |
| } |
| public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState getAdminState() { |
| return adminState_; |
| } |
| public Builder setAdminState(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000200; |
| adminState_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearAdminState() { |
| bitField0_ = (bitField0_ & ~0x00000200); |
| adminState_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState.NORMAL; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:DatanodeInfoProto) |
| } |
| |
| static { |
| defaultInstance = new DatanodeInfoProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:DatanodeInfoProto) |
| } |
| |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_ExtendedBlockProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_ExtendedBlockProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_BlockTokenIdentifierProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_BlockTokenIdentifierProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_DatanodeIDProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_DatanodeIDProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_DatanodeInfoProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_DatanodeInfoProto_fieldAccessorTable; |
| |
| public static com.google.protobuf.Descriptors.FileDescriptor |
| getDescriptor() { |
| return descriptor; |
| } |
| private static com.google.protobuf.Descriptors.FileDescriptor |
| descriptor; |
| static { |
| java.lang.String[] descriptorData = { |
| "\n\nhdfs.proto\"`\n\022ExtendedBlockProto\022\016\n\006po" + |
| "olId\030\001 \002(\t\022\017\n\007blockId\030\002 \002(\004\022\020\n\010numBytes\030" + |
| "\003 \002(\004\022\027\n\017generationStamp\030\004 \002(\004\"`\n\031BlockT" + |
| "okenIdentifierProto\022\022\n\nidentifier\030\001 \002(\014\022" + |
| "\020\n\010password\030\002 \002(\014\022\014\n\004kind\030\003 \002(\t\022\017\n\007servi" + |
| "ce\030\004 \002(\t\"D\n\017DatanodeIDProto\022\014\n\004name\030\001 \002(" + |
| "\t\022\021\n\tstorageID\030\002 \002(\t\022\020\n\010infoPort\030\003 \002(\r\"\312" + |
| "\002\n\021DatanodeInfoProto\022\034\n\002id\030\001 \002(\0132\020.Datan" + |
| "odeIDProto\022\020\n\010capacity\030\002 \001(\004\022\017\n\007dfsUsed\030" + |
| "\003 \001(\004\022\021\n\tremaining\030\004 \001(\004\022\025\n\rblockPoolUse", |
| "d\030\005 \001(\004\022\022\n\nlastUpdate\030\006 \001(\004\022\024\n\014xceiverCo" + |
| "unt\030\007 \001(\r\022\020\n\010location\030\010 \001(\t\022\020\n\010hostName\030" + |
| "\t \001(\t\0221\n\nadminState\030\n \001(\0162\035.DatanodeInfo" + |
| "Proto.AdminState\"I\n\nAdminState\022\n\n\006NORMAL" + |
| "\020\000\022\033\n\027DECOMMISSION_INPROGRESS\020\001\022\022\n\016DECOM" + |
| "MISSIONED\020\002B6\n%org.apache.hadoop.hdfs.pr" + |
| "otocol.protoB\nHdfsProtos\240\001\001" |
| }; |
| com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = |
| new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { |
| public com.google.protobuf.ExtensionRegistry assignDescriptors( |
| com.google.protobuf.Descriptors.FileDescriptor root) { |
| descriptor = root; |
| internal_static_ExtendedBlockProto_descriptor = |
| getDescriptor().getMessageTypes().get(0); |
| internal_static_ExtendedBlockProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_ExtendedBlockProto_descriptor, |
| new java.lang.String[] { "PoolId", "BlockId", "NumBytes", "GenerationStamp", }, |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.class, |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.Builder.class); |
| internal_static_BlockTokenIdentifierProto_descriptor = |
| getDescriptor().getMessageTypes().get(1); |
| internal_static_BlockTokenIdentifierProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_BlockTokenIdentifierProto_descriptor, |
| new java.lang.String[] { "Identifier", "Password", "Kind", "Service", }, |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.class, |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.Builder.class); |
| internal_static_DatanodeIDProto_descriptor = |
| getDescriptor().getMessageTypes().get(2); |
| internal_static_DatanodeIDProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_DatanodeIDProto_descriptor, |
| new java.lang.String[] { "Name", "StorageID", "InfoPort", }, |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.class, |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.Builder.class); |
| internal_static_DatanodeInfoProto_descriptor = |
| getDescriptor().getMessageTypes().get(3); |
| internal_static_DatanodeInfoProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_DatanodeInfoProto_descriptor, |
| new java.lang.String[] { "Id", "Capacity", "DfsUsed", "Remaining", "BlockPoolUsed", "LastUpdate", "XceiverCount", "Location", "HostName", "AdminState", }, |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.class, |
| org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.Builder.class); |
| return null; |
| } |
| }; |
| com.google.protobuf.Descriptors.FileDescriptor |
| .internalBuildGeneratedFileFrom(descriptorData, |
| new com.google.protobuf.Descriptors.FileDescriptor[] { |
| }, assigner); |
| } |
| |
| // @@protoc_insertion_point(outer_class_scope) |
| } |