golang连接kafka的例子好少,而支持offset追踪者更少,但也是基础的要求。”github.com/bsm/sarama-cluster”结合“github.com/Shopify/sarama”满足了此要求。
环境:
golang 1.7
kafka 0.10
centos7.2
package main
import (
"fmt"
"os"
"strings"
"time"
"github.com/Shopify/sarama"
"github.com/bsm/sarama-cluster" //support automatic consumer-group rebalancing and offset tracking
"github.com/golang/glog"
)
func main() {
groupID := "group-1"
topicList := "topic_1"
config := cluster.NewConfig()
config.Consumer.Return.Errors = true
config.Group.Return.Notifications = true
config.Consumer.Offsets.CommitInterval = 1 * time.Second
config.Consumer.Offsets.Initial = sarama.OffsetNewest //初始从最新的offset开始
c, err := cluster.NewConsumer(strings.Split("localhost:9092", ","), groupID, strings.Split(topicList, ","), config)
if err != nil {
glog.Errorf("Failed open consumer: %v", err)
return
}
defer c.Close()
go func() {
for err := range c.Errors() {
glog.Errorf("Error: %s\n", err.Error())
}
}()
go func() {
for note := range c.Notifications() {
glog.Infof("Rebalanced: %+v\n", note)
}
}()
for msg := range c.Messages() {
fmt.Fprintf(os.Stdout, "%s/%d/%d\t%s\n", msg.Topic, msg.Partition, msg.Offset, msg.Value)
c.MarkOffset(msg, "") //MarkOffset 并不是实时写入kafka,有可能在程序crash时丢掉未提交的offset
}
}
参考:
http://pastebin.com/9ZsnP2eU
https://github.com/Shopify/sarama
https://github.com/bsm/sarama-cluster
有疑问加站长微信联系(非本文作者)