义齿
実装機能:
あるトピック、消費者グループのオフセット量をkafkaから読み出す
読み出しoffsetに基づいてkafka読み出しフローを作成する
各パーティションのオフセット量をredisに保存します.
あるトピック、消費者グループのオフセット量をkafkaから読み出す
読み出しoffsetに基づいてkafka読み出しフローを作成する
各パーティションのオフセット量をredisに保存します.
-
-
import Kafka010.Utils.{MyKafkaUtils, RedisUtilsDemo}
-
import org.apache.kafka.clients.consumer.ConsumerRecord
-
import org.apache.kafka.common.TopicPartition
-
import org.apache.spark.SparkConf
-
import org.apache.spark.streaming.dstream.InputDStream
-
import org.apache.spark.streaming.kafka010._
-
import org.apache.spark.streaming.{Seconds, StreamingContext}
-
-
object Test{
-
def main(args:
Array[
String]): Unit = {
-
// spark
-
val conf =
new SparkConf()
-
.setMaster(
"local[*]")
-
.setAppName(s
"${this.getClass.getCanonicalName}")
-
// sparkStreamingContext
-
val ssc =
new StreamingContext(conf, Seconds(
2))
-
// kafka
-
val groupId =
"SparkKafka010"
-
val topics = List(
"datacollection")
-
// kafka , MyKafkaUtils ,
-
val kafkaParams = MyKafkaUtils.getKafkaConsumerParams(groupId,
"false")
-
-
// redis offset
-
val offsets:
Map[TopicPartition, Long] = RedisUtilsDemo.getOffsetFromRedis(
"datacollection", groupId)
-
// kafka
-
val ds: InputDStream[ConsumerRecord[
String,
String]] = KafkaUtils.createDirectStream[
String,
String](ssc,
-
LocationStrategies.PreferConsistent,
-
ConsumerStrategies.Subscribe[
String,
String](topics, kafkaParams,offsets)
-
)
-
-
ds.foreachRDD(
rdd => {
-
val ranges:
Array[OffsetRange] = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
-
-
//
-
if (! rdd.isEmpty())
-
println(rdd.count)
-
-
// offset
-
ranges.foreach(
offset =>
-
println(s
"${offset.partition}, ${offset.fromOffset}, ${offset.untilOffset}")
-
)
-
// offset redis
-
RedisUtilsDemo.saveOffsetToRedis(ranges, groupId)
-
})
-
-
ssc.start()
-
ssc.awaitTermination()
-
}
-
}
MyKafkaUtils
-
import org.apache.kafka.clients.consumer.ConsumerConfig
-
import org.apache.kafka.common.serialization.StringDeserializer
-
-
object MyKafkaUtils {
-
def getKafkaConsumerParams(groupId:
String =
"SparkStreaming010",
autoCommit:
String=
"true"):
Map[
String,
String] = {
-
val kafkaParams =
Map[
String,
String](
-
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG ->
"mini1:9092,mini2:9092",
-
ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer].getName,
-
ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer].getName,
-
ConsumerConfig.GROUP_ID_CONFIG -> groupId,
-
ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG -> autoCommit)
-
kafkaParams
-
}
-
-
def main(args:
Array[
String]): Unit = {
-
-
}
-
}
RedisUtilDemo
-
-
import cn.bigdata.antispider.common.util.jedis.{JedisConUtil, JedisConnectionUtil}
-
import org.apache.kafka.common.TopicPartition
-
import org.apache.spark.streaming.kafka010.OffsetRange
-
-
-
object RedisUtilsDemo {
-
// jedis
-
private
val jedis = JedisConUtil.getJedisClient()
-
// offset
-
// key(kafka:topic:groupid) value(parition:offset;parition:offset)
-
// key , set
-
def getOffsetFromRedis(topic: String, groupid: String): Map[TopicPartition,
Long] = {
-
val key = s
"kafka:$topic:$groupid"
-
val offsetStr: String = jedis.
get(key)
-
-
offsetStr.split(
";").map(str => {
-
val fileds = str.split(
":")
-
val parition:
Int = fileds.head.toInt
-
val offset:
Long = fileds.last.toLong
-
(new TopicPartition(topic, parition) -> offset)
-
}).toMap
-
-
}
-
-
// topic; topic offset
-
def getOffsetFromRedis2(topics: Iterator[String], groupid: String): Iterator[Option[Map[TopicPartition,
Long]]] = {
-
topics.map { topic =>
-
val key = s
"kafka:$topic:$groupid"
-
val offsetStr: String = jedis.
get(key)
-
-
if (offsetStr !=
null && offsetStr.trim.size>
0 ) {
-
val offsets = offsetStr.split(
";").map { str =>
-
val fileds = str.split(
":")
-
val parition:
Int = fileds.head.toInt
-
val offset:
Long = fileds.last.toLong
-
(new TopicPartition(topic, parition) -> offset)
-
}.toMap
-
Some(offsets)
-
}
-
else None
-
}
-
}
-
-
def getOffsetFromRedis1(topic: String, groupid: String): Option[Map[TopicPartition,
Long]] = {
-
val key = s
"kafka:$topic:$groupid"
-
val offsetStr: String = jedis.
get(key)
-
-
if (offsetStr !=
null && offsetStr.trim.size>
0 ) {
-
val offsets = offsetStr.split(
";").map(str => {
-
val fileds = str.split(
":")
-
val parition:
Int = fileds.head.toInt
-
val offset:
Long = fileds.last.toLong
-
(new TopicPartition(topic, parition) -> offset)
-
}).toMap
-
Some(offsets)
-
}
-
else
-
None
-
}
-
-
// offset
-
// key(kafka:topic:groupid) value(parition:offset;parition:offset)
-
def saveOffsetToRedis(ranges: Array[OffsetRange], groupId: String):
Unit = {
-
ranges.map(offsets => (offsets.topic, (offsets.partition, offsets.untilOffset)))
-
.groupBy(_._1)
-
.foreach{case ((topic, buffer)) =>
-
val key = s
"kafka:$topic:$groupId"
-
val value = buffer.map{case (_, (partition, untilOffset)) => s
"$partition:$untilOffset"}.mkString(
";")
-
jedis.
set(key, value)
-
}
-
}
-
-
-
-
}