概述
遇见问题寻求解决办法
因为不知道是什么原因引起了这个问题,所以就尽可能详尽的把所有东西都贴上了,如果是没用的可以自行跳过。
流程是从用spark从kakfa读取数据,将数据存储到hbase里面。数据用protobuf序列化,所以反序列化时用到了protobuf里面的ListValue方法。
然后本地运行得时候数据可以存到hbase,但是将程序打成jar包在集群上用standalone模式运行会报错。
在打好得jar包里面有两个protobuf的包,一个在hbase的threeparty下面版本为有一个2.1.0,另一个为pom导入的protobuf包版本为3.5.0,因为只有这个版本里面才有ListValue的方法。
下面是我写的demo。
import java.nio.charset.StandardCharsets
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter
import org.apache.hadoop.hbase.client.{ConnectionFactory, Put}
import org.apache.hadoop.hbase.io.compress.Compression
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.{HBaseConfiguration, HColumnDescriptor, HTableDescriptor, TableName}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}
object Test extends Serializable {
val HBASE_TABLE_NAME_PREFIX: String = "DR_RT_OPC_KAFKA_"
val FORMATTER: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyyMM")
def main(args: Array[String]): Unit = {
val spark = SparkSession.builder().appName("kafka2spark2hbase").master("local[2]").getOrCreate()
//.master("local[2]")
val batchDuration = Seconds(5) //时间单位为秒
val ssc = new StreamingContext(spark.sparkContext, batchDuration)
//ssc.checkpoint("/Users/eric/SparkWorkspace/checkpoint")
val topics = Array(PropertiesUtil.getProperty("topics")).toSet
val kafkaParams = Map[String, Object](
"bootstrap.servers" -> PropertiesUtil.getProperty("bootstrap.servers"),
"key.deserializer" -> classOf[StringDeserializer],
"value.deserializer" -> classOf[MyDeserializer], //实现自定义序列化方式
"group.id" -> PropertiesUtil.getProperty("group"),
"auto.offset.reset" -> "latest", //第一次消费从最后的offset消费
"enable.auto.commit" -> (true: java.lang.Boolean) // kafka 保存消费的offset
)
//创建DStream流
val stream = KafkaUtils.createDirectStream(ssc, LocationStrategies.PreferConsistent,
ConsumerStrategies.Subscribe[String, Protoc.Bean](topics, kafkaParams))
//hbase配置
val hbaseConf = HBaseConfiguration.create()
hbaseConf.set("hbase.zookeeper.quorum", PropertiesUtil.getProperty("quorum")) //zookeeper 集群
hbaseConf.set("hbase.zookeeper.property.client", "2181")
hbaseConf.set("hbase.master", PropertiesUtil.getProperty("hbase_master"))
hbaseConf.set("hbase.defaults.for.version.skip", "true")
//获取连接
val conn = ConnectionFactory.createConnection(hbaseConf)
val admin = conn.getAdmin
import scala.collection.JavaConversions._
val tableName = HBASE_TABLE_NAME_PREFIX + FORMATTER.format(LocalDateTime.now.plusDays(1)) + "--"
val tn1 = TableName.valueOf(tableName)
var isExit = true
val tables = admin.listTables("^" + tableName + "$")
if (tables.size == 0) {
isExit = false
}
//判断表是否存在,不存在创建表
if (!isExit) {
val table: HTableDescriptor = new HTableDescriptor(tn1)
val family = new HColumnDescriptor(Bytes.toBytes("info"))
family.setCompressionType(Compression.Algorithm.SNAPPY)
table.addFamily(family)
admin.createTable(table)
}
//将DStream转化为RDD进行操作
if (stream != null) {
stream.foreachRDD(rdd => {
if (!rdd.isEmpty()) {
rdd.foreachPartition(line => {
import scala.collection.mutable.ListBuffer
var putList: ListBuffer[Put] = ListBuffer()
val hbaseConf = HBaseConfiguration.create()
hbaseConf.set("hbase.zookeeper.quorum", PropertiesUtil.getProperty("quorum")) //zookeeper 集群
hbaseConf.set("hbase.zookeeper.property.client", "2181")
hbaseConf.set("hbase.master", PropertiesUtil.getProperty("hbase_master"))
hbaseConf.set("hbase.defaults.for.version.skip", "true")
var rowKey: String = ""
line.foreach(record => {
rowKey = record.key().toString
val p = new Put(Bytes.toBytes(record.value().getName + "_" + record.key()))
p.addColumn("info".getBytes(StandardCharsets.UTF_8), "name".getBytes(StandardCharsets.UTF_8), record.value().getName.getBytes(StandardCharsets.UTF_8))
p.addColumn("info".getBytes(StandardCharsets.UTF_8), "value".getBytes(StandardCharsets.UTF_8), record.value().getValue.getBytes(StandardCharsets.UTF_8))
p.addColumn("info".getBytes(StandardCharsets.UTF_8), "describe".getBytes(StandardCharsets.UTF_8), record.value().getDescribe.getBytes(StandardCharsets.UTF_8))
putList = putList :+ p
})
val conn = ConnectionFactory.createConnection(hbaseConf)
val admin = conn.getAdmin
val tn1 = TableName.valueOf(tableName.toString)
val tables = conn.getTable(tn1)
tables.put(putList)
})
} else {
println("RDD为空,数据消费完毕")
}
})
} else {
println("DStream为空")
}
ssc.start()
ssc.awaitTermination()
}
}
这是用protobuf的工具生成的java文件,用来反序列化。TagInfoProto为类,TagKafkaInfo为实体类对象。
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: TagInfoProto.proto
public final class TagInfoProto {
private TagInfoProto() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(com.google.protobuf.ExtensionRegistryLite) registry);
}
public interface TagKafkaInfoOrBuilder extends
// @@protoc_insertion_point(interface_extends:TagKafkaInfo)
com.google.protobuf.MessageOrBuilder {
/**
* <code>string name = 1;</code>
*/
String getName();
/**
* <code>string name = 1;</code>
*/
com.google.protobuf.ByteString
getNameBytes();
/**
* <code>string describe = 2;</code>
*/
String getDescribe();
/**
* <code>string describe = 2;</code>
*/
com.google.protobuf.ByteString
getDescribeBytes();
/**
* <code>string value = 3;</code>
*/
String getValue();
/**
* <code>string value = 3;</code>
*/
com.google.protobuf.ByteString
getValueBytes();
/**
* <code>string time = 4;</code>
*/
String getTime();
/**
* <code>string time = 4;</code>
*/
com.google.protobuf.ByteString
getTimeBytes();
}
/**
* Protobuf type {@code TagKafkaInfo}
*/
public static final class TagKafkaInfo extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:TagKafkaInfo)
TagKafkaInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use TagKafkaInfo.newBuilder() to construct.
private TagKafkaInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TagKafkaInfo() {
name_ = "";
describe_ = "";
value_ = "";
time_ = "";
}
@Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TagKafkaInfo(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 18: {
String s = input.readStringRequireUtf8();
describe_ = s;
break;
}
case 26: {
String s = input.readStringRequireUtf8();
value_ = s;
break;
}
case 34: {
String s = input.readStringRequireUtf8();
time_ = s;
break;
}
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return TagInfoProto.internal_static_TagKafkaInfo_descriptor;
}
@Override
protected FieldAccessorTable
internalGetFieldAccessorTable() {
return TagInfoProto.internal_static_TagKafkaInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
TagInfoProto.TagKafkaInfo.class, TagInfoProto.TagKafkaInfo.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile Object name_;
/**
* <code>string name = 1;</code>
*/
public String getName() {
Object ref = name_;
if (ref instanceof String) {
return (String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
* <code>string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DESCRIBE_FIELD_NUMBER = 2;
private volatile Object describe_;
/**
* <code>string describe = 2;</code>
*/
public String getDescribe() {
Object ref = describe_;
if (ref instanceof String) {
return (String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
describe_ = s;
return s;
}
}
/**
* <code>string describe = 2;</code>
*/
public com.google.protobuf.ByteString
getDescribeBytes() {
Object ref = describe_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(String) ref);
describe_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int VALUE_FIELD_NUMBER = 3;
private volatile Object value_;
/**
* <code>string value = 3;</code>
*/
public String getValue() {
Object ref = value_;
if (ref instanceof String) {
return (String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
value_ = s;
return s;
}
}
/**
* <code>string value = 3;</code>
*/
public com.google.protobuf.ByteString
getValueBytes() {
Object ref = value_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(String) ref);
value_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TIME_FIELD_NUMBER = 4;
private volatile Object time_;
/**
* <code>string time = 4;</code>
*/
public String getTime() {
Object ref = time_;
if (ref instanceof String) {
return (String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
time_ = s;
return s;
}
}
/**
* <code>string time = 4;</code>
*/
public com.google.protobuf.ByteString
getTimeBytes() {
Object ref = time_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(String) ref);
time_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!getDescribeBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, describe_);
}
if (!getValueBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, value_);
}
if (!getTimeBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, time_);
}
unknownFields.writeTo(output);
}
@Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!getDescribeBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, describe_);
}
if (!getValueBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, value_);
}
if (!getTimeBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, time_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@Override
public boolean equals(final Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof TagInfoProto.TagKafkaInfo)) {
return super.equals(obj);
}
TagInfoProto.TagKafkaInfo other = ( TagInfoProto.TagKafkaInfo) obj;
boolean result = true;
result = result && getName()
.equals(other.getName());
result = result && getDescribe()
.equals(other.getDescribe());
result = result && getValue()
.equals(other.getValue());
result = result && getTime()
.equals(other.getTime());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + DESCRIBE_FIELD_NUMBER;
hash = (53 * hash) + getDescribe().hashCode();
hash = (37 * hash) + VALUE_FIELD_NUMBER;
hash = (53 * hash) + getValue().hashCode();
hash = (37 * hash) + TIME_FIELD_NUMBER;
hash = (53 * hash) + getTime().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static TagInfoProto.TagKafkaInfo parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static TagInfoProto.TagKafkaInfo parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static TagInfoProto.TagKafkaInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static TagInfoProto.TagKafkaInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static TagInfoProto.TagKafkaInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static TagInfoProto.TagKafkaInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static TagInfoProto.TagKafkaInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static TagInfoProto.TagKafkaInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static TagInfoProto.TagKafkaInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static TagInfoProto.TagKafkaInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static TagInfoProto.TagKafkaInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static TagInfoProto.TagKafkaInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder( TagInfoProto.TagKafkaInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@Override
protected Builder newBuilderForType(
BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code TagKafkaInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:TagKafkaInfo)
TagInfoProto.TagKafkaInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return TagInfoProto.internal_static_TagKafkaInfo_descriptor;
}
@Override
protected FieldAccessorTable
internalGetFieldAccessorTable() {
return TagInfoProto.internal_static_TagKafkaInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
TagInfoProto.TagKafkaInfo.class, TagInfoProto.TagKafkaInfo.Builder.class);
}
// Construct using TagInfoProto.TagKafkaInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@Override
public Builder clear() {
super.clear();
name_ = "";
describe_ = "";
value_ = "";
time_ = "";
return this;
}
@Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return TagInfoProto.internal_static_TagKafkaInfo_descriptor;
}
@Override
public TagInfoProto.TagKafkaInfo getDefaultInstanceForType() {
return TagInfoProto.TagKafkaInfo.getDefaultInstance();
}
@Override
public TagInfoProto.TagKafkaInfo build() {
TagInfoProto.TagKafkaInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@Override
public TagInfoProto.TagKafkaInfo buildPartial() {
TagInfoProto.TagKafkaInfo result = new TagInfoProto.TagKafkaInfo(this);
result.name_ = name_;
result.describe_ = describe_;
result.value_ = value_;
result.time_ = time_;
onBuilt();
return result;
}
@Override
public Builder clone() {
return (Builder) super.clone();
}
@Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
@Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
@Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
@Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
@Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
@Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof TagInfoProto.TagKafkaInfo) {
return mergeFrom(( TagInfoProto.TagKafkaInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom( TagInfoProto.TagKafkaInfo other) {
if (other == TagInfoProto.TagKafkaInfo.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (!other.getDescribe().isEmpty()) {
describe_ = other.describe_;
onChanged();
}
if (!other.getValue().isEmpty()) {
value_ = other.value_;
onChanged();
}
if (!other.getTime().isEmpty()) {
time_ = other.time_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@Override
public final boolean isInitialized() {
return true;
}
@Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
TagInfoProto.TagKafkaInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = ( TagInfoProto.TagKafkaInfo) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private Object name_ = "";
/**
* <code>string name = 1;</code>
*/
public String getName() {
Object ref = name_;
if (!(ref instanceof String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (String) ref;
}
}
/**
* <code>string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>string name = 1;</code>
*/
public Builder setName(
String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
* <code>string name = 1;</code>
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* <code>string name = 1;</code>
*/
public Builder setNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private Object describe_ = "";
/**
* <code>string describe = 2;</code>
*/
public String getDescribe() {
Object ref = describe_;
if (!(ref instanceof String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
describe_ = s;
return s;
} else {
return (String) ref;
}
}
/**
* <code>string describe = 2;</code>
*/
public com.google.protobuf.ByteString
getDescribeBytes() {
Object ref = describe_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(String) ref);
describe_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>string describe = 2;</code>
*/
public Builder setDescribe(
String value) {
if (value == null) {
throw new NullPointerException();
}
describe_ = value;
onChanged();
return this;
}
/**
* <code>string describe = 2;</code>
*/
public Builder clearDescribe() {
describe_ = getDefaultInstance().getDescribe();
onChanged();
return this;
}
/**
* <code>string describe = 2;</code>
*/
public Builder setDescribeBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
describe_ = value;
onChanged();
return this;
}
private Object value_ = "";
/**
* <code>string value = 3;</code>
*/
public String getValue() {
Object ref = value_;
if (!(ref instanceof String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
value_ = s;
return s;
} else {
return (String) ref;
}
}
/**
* <code>string value = 3;</code>
*/
public com.google.protobuf.ByteString
getValueBytes() {
Object ref = value_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(String) ref);
value_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>string value = 3;</code>
*/
public Builder setValue(
String value) {
if (value == null) {
throw new NullPointerException();
}
value_ = value;
onChanged();
return this;
}
/**
* <code>string value = 3;</code>
*/
public Builder clearValue() {
value_ = getDefaultInstance().getValue();
onChanged();
return this;
}
/**
* <code>string value = 3;</code>
*/
public Builder setValueBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
value_ = value;
onChanged();
return this;
}
private Object time_ = "";
/**
* <code>string time = 4;</code>
*/
public String getTime() {
Object ref = time_;
if (!(ref instanceof String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
time_ = s;
return s;
} else {
return (String) ref;
}
}
/**
* <code>string time = 4;</code>
*/
public com.google.protobuf.ByteString
getTimeBytes() {
Object ref = time_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(String) ref);
time_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>string time = 4;</code>
*/
public Builder setTime(
String value) {
if (value == null) {
throw new NullPointerException();
}
time_ = value;
onChanged();
return this;
}
/**
* <code>string time = 4;</code>
*/
public Builder clearTime() {
time_ = getDefaultInstance().getTime();
onChanged();
return this;
}
/**
* <code>string time = 4;</code>
*/
public Builder setTimeBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
time_ = value;
onChanged();
return this;
}
@Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
@Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:TagKafkaInfo)
}
// @@protoc_insertion_point(class_scope:TagKafkaInfo)
private static final TagInfoProto.TagKafkaInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new TagInfoProto.TagKafkaInfo();
}
public static TagInfoProto.TagKafkaInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TagKafkaInfo>
PARSER = new com.google.protobuf.AbstractParser<TagKafkaInfo>() {
@Override
public TagKafkaInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new TagKafkaInfo(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<TagKafkaInfo> parser() {
return PARSER;
}
@Override
public com.google.protobuf.Parser<TagKafkaInfo> getParserForType() {
return PARSER;
}
@Override
public TagInfoProto.TagKafkaInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_TagKafkaInfo_descriptor;
private static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_TagKafkaInfo_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
String[] descriptorData = {
"n 22TagInfoProto.proto"Kn 14TagKafkaInfo 22 14n 04" +
"name 30 01 01(t 22 20n 10describe 30 02 01(t 22rn 05value 30 03 " +
"