老师,你好,通过学习课程进行实际生产的使用,但是出现以下问题
程序flume kafka 会造成阻塞,造成新的数据插入不进去, 然后阻塞 一两个小时后又恢复原样,
以下是flume 配置
不知道是哪里出问题,请给建议,有没有不合适的配置
a1.channels = c1
a1.sources = r1
a1.sinks = k1
a1.channels.c1.type = memory
a1.channels = c1
a1.channels.c1.capacity = 500000
a1.channels.c1.transactionCapacity = 100000
a1.channels.c1.byteCapacityBufferPercentage = 10
a1.channels.c1.byteCapacity = 3000000
a1.channels.c1.keep-alive = 5
a1.sources.r1.type = thrift
a1.sources.r1.bind = hadoop11
a1.sources.r1.port = 9090
a1.sources.r1.batchTimeout = 3000
a1.sources.r1.threads =100
#a1.sources.r1.kafka.consumer.timeout.ms = 100
a1.sinks.k1.type=org.apache.flume.sink.kafka.KafkaSink
a1.sinks.k1.topic = streaming_spark230_topic
a1.sinks.k1.brokerList =hadoop10:9092,hadoop11:9092,hadoop12:9092
a1.sinks.k1.requiredAcks = 1
a1.sinks.k1.batchSize = 1000
a1.sources.r1.channels = c1
a1.sinks.k1.channel = c1