blob: 86bb526276fb9ef2d053ac918963b4201384a96c [file] [log] [blame]
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: src/test/core/org/apache/hadoop/io/ProtoTest.proto
package org.apache.hadoop.io;
public final class ProtoTest {
private ProtoTest() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public static final class ProtoKey extends
com.google.protobuf.GeneratedMessage {
// Use ProtoKey.newBuilder() to construct.
private ProtoKey() {
initFields();
}
private ProtoKey(boolean noInit) {}
private static final ProtoKey defaultInstance;
public static ProtoKey getDefaultInstance() {
return defaultInstance;
}
public ProtoKey getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.io.ProtoTest.internal_static_org_apache_hadoop_io_ProtoKey_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.io.ProtoTest.internal_static_org_apache_hadoop_io_ProtoKey_fieldAccessorTable;
}
// required int32 value = 1;
public static final int VALUE_FIELD_NUMBER = 1;
private boolean hasValue;
private int value_ = 0;
public boolean hasValue() { return hasValue; }
public int getValue() { return value_; }
private void initFields() {
}
public final boolean isInitialized() {
if (!hasValue) return false;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (hasValue()) {
output.writeInt32(1, getValue());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasValue()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(1, getValue());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.io.ProtoTest.ProtoKey parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.io.ProtoTest.ProtoKey parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.io.ProtoTest.ProtoKey prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private org.apache.hadoop.io.ProtoTest.ProtoKey result;
// Construct using org.apache.hadoop.io.ProtoTest.ProtoKey.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new org.apache.hadoop.io.ProtoTest.ProtoKey();
return builder;
}
protected org.apache.hadoop.io.ProtoTest.ProtoKey internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new org.apache.hadoop.io.ProtoTest.ProtoKey();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.io.ProtoTest.ProtoKey.getDescriptor();
}
public org.apache.hadoop.io.ProtoTest.ProtoKey getDefaultInstanceForType() {
return org.apache.hadoop.io.ProtoTest.ProtoKey.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public org.apache.hadoop.io.ProtoTest.ProtoKey build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private org.apache.hadoop.io.ProtoTest.ProtoKey buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public org.apache.hadoop.io.ProtoTest.ProtoKey buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
org.apache.hadoop.io.ProtoTest.ProtoKey returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.io.ProtoTest.ProtoKey) {
return mergeFrom((org.apache.hadoop.io.ProtoTest.ProtoKey)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.io.ProtoTest.ProtoKey other) {
if (other == org.apache.hadoop.io.ProtoTest.ProtoKey.getDefaultInstance()) return this;
if (other.hasValue()) {
setValue(other.getValue());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 8: {
setValue(input.readInt32());
break;
}
}
}
}
// required int32 value = 1;
public boolean hasValue() {
return result.hasValue();
}
public int getValue() {
return result.getValue();
}
public Builder setValue(int value) {
result.hasValue = true;
result.value_ = value;
return this;
}
public Builder clearValue() {
result.hasValue = false;
result.value_ = 0;
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.io.ProtoKey)
}
static {
defaultInstance = new ProtoKey(true);
org.apache.hadoop.io.ProtoTest.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.io.ProtoKey)
}
public static final class ProtoValue extends
com.google.protobuf.GeneratedMessage {
// Use ProtoValue.newBuilder() to construct.
private ProtoValue() {
initFields();
}
private ProtoValue(boolean noInit) {}
private static final ProtoValue defaultInstance;
public static ProtoValue getDefaultInstance() {
return defaultInstance;
}
public ProtoValue getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.io.ProtoTest.internal_static_org_apache_hadoop_io_ProtoValue_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.io.ProtoTest.internal_static_org_apache_hadoop_io_ProtoValue_fieldAccessorTable;
}
// required string value = 2;
public static final int VALUE_FIELD_NUMBER = 2;
private boolean hasValue;
private java.lang.String value_ = "";
public boolean hasValue() { return hasValue; }
public java.lang.String getValue() { return value_; }
private void initFields() {
}
public final boolean isInitialized() {
if (!hasValue) return false;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (hasValue()) {
output.writeString(2, getValue());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasValue()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(2, getValue());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.io.ProtoTest.ProtoValue parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.io.ProtoTest.ProtoValue parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.io.ProtoTest.ProtoValue prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private org.apache.hadoop.io.ProtoTest.ProtoValue result;
// Construct using org.apache.hadoop.io.ProtoTest.ProtoValue.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new org.apache.hadoop.io.ProtoTest.ProtoValue();
return builder;
}
protected org.apache.hadoop.io.ProtoTest.ProtoValue internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new org.apache.hadoop.io.ProtoTest.ProtoValue();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.io.ProtoTest.ProtoValue.getDescriptor();
}
public org.apache.hadoop.io.ProtoTest.ProtoValue getDefaultInstanceForType() {
return org.apache.hadoop.io.ProtoTest.ProtoValue.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public org.apache.hadoop.io.ProtoTest.ProtoValue build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private org.apache.hadoop.io.ProtoTest.ProtoValue buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public org.apache.hadoop.io.ProtoTest.ProtoValue buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
org.apache.hadoop.io.ProtoTest.ProtoValue returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.io.ProtoTest.ProtoValue) {
return mergeFrom((org.apache.hadoop.io.ProtoTest.ProtoValue)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.io.ProtoTest.ProtoValue other) {
if (other == org.apache.hadoop.io.ProtoTest.ProtoValue.getDefaultInstance()) return this;
if (other.hasValue()) {
setValue(other.getValue());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 18: {
setValue(input.readString());
break;
}
}
}
}
// required string value = 2;
public boolean hasValue() {
return result.hasValue();
}
public java.lang.String getValue() {
return result.getValue();
}
public Builder setValue(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasValue = true;
result.value_ = value;
return this;
}
public Builder clearValue() {
result.hasValue = false;
result.value_ = getDefaultInstance().getValue();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.io.ProtoValue)
}
static {
defaultInstance = new ProtoValue(true);
org.apache.hadoop.io.ProtoTest.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.io.ProtoValue)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_io_ProtoKey_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_io_ProtoKey_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_io_ProtoValue_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_io_ProtoValue_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n2src/test/core/org/apache/hadoop/io/Pro" +
"toTest.proto\022\024org.apache.hadoop.io\"\031\n\010Pr" +
"otoKey\022\r\n\005value\030\001 \002(\005\"\033\n\nProtoValue\022\r\n\005v" +
"alue\030\002 \002(\tB\002H\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_org_apache_hadoop_io_ProtoKey_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_org_apache_hadoop_io_ProtoKey_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_io_ProtoKey_descriptor,
new java.lang.String[] { "Value", },
org.apache.hadoop.io.ProtoTest.ProtoKey.class,
org.apache.hadoop.io.ProtoTest.ProtoKey.Builder.class);
internal_static_org_apache_hadoop_io_ProtoValue_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_org_apache_hadoop_io_ProtoValue_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_io_ProtoValue_descriptor,
new java.lang.String[] { "Value", },
org.apache.hadoop.io.ProtoTest.ProtoValue.class,
org.apache.hadoop.io.ProtoTest.ProtoValue.Builder.class);
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
public static void internalForceInit() {}
// @@protoc_insertion_point(outer_class_scope)
}