Go连接Kafka

Go连接Kafka的例子好少,而支持offset追踪者更少,但也是基础的要求。”github.com/bsm/sarama-cluster”结合“github.com/Shopify/sarama”满足了此要求。 

并发量小时,可以用同步生产者,但是并发量大时,必须使用异步生产者。 

环境: 

golang 1.8 

kafka 0.10 

centos 7.2

package main import ( "fmt" "math/rand" "os" "strconv" "strings" "time" "github.com/Shopify/sarama" "github.com/bsm/sarama-cluster" //support automatic consumer-group rebalancing and offset tracking "github.com/sdbaiguanghe/glog" ) var ( topics = "topics_1" ) // consumer 消费者 func consumer() { groupID := "group-1" config := cluster.NewConfig() config.Group.Return.Notifications = true config.Consumer.Offsets.CommitInterval = 1 * time.Second config.Consumer.Offsets.Initial = sarama.OffsetNewest //初始从最新的offset开始 c, err := cluster.NewConsumer(strings.Split("localhost:9092", ","), groupID, strings.Split(topics, ","), config) if err != nil { glog.Errorf("Failed open consumer: %v", err) return } defer c.Close() go func(c *cluster.Consumer) { errors := c.Errors() noti := c.Notifications() for { select { case err := <-errors: glog.Errorln(err) case <-noti: } } }(c) for msg := range c.Messages() { fmt.Fprintf(os.Stdout, "%s/%d/%d\t%s\n", msg.Topic, msg.Partition, msg.Offset, msg.Value) c.MarkOffset(msg, "") //MarkOffset 并不是实时写入kafka,有可能在程序crash时丢掉未提交的offset } } // syncProducer 同步生产者 // 并发量小时,可以用这种方式 func syncProducer() { config := sarama.NewConfig() // config.Producer.RequiredAcks = sarama.WaitForAll // config.Producer.Partitioner = sarama.NewRandomPartitioner config.Producer.Return.Successes = true config.Producer.Timeout = 5 * time.Second p, err := sarama.NewSyncProducer(strings.Split("localhost:9092", ","), config) defer p.Close() if err != nil { glog.Errorln(err) return } v := "sync: " + strconv.Itoa(rand.New(rand.NewSource(time.Now().UnixNano())).Intn(10000)) fmt.Fprintln(os.Stdout, v) msg := &sarama.ProducerMessage{ Topic: topics, Value: sarama.ByteEncoder(v), } if _, _, err := p.SendMessage(msg); err != nil { glog.Errorln(err) return } } // asyncProducer 异步生产者 // 并发量大时,必须采用这种方式 func asyncProducer() { config := sarama.NewConfig() config.Producer.Return.Successes = true //必须有这个选项 config.Producer.Timeout = 5 * time.Second p, err := sarama.NewAsyncProducer(strings.Split("localhost:9092", ","), config) defer p.Close() if err != nil { return } //必须有这个匿名函数内容 go func(p sarama.AsyncProducer) { errors := p.Errors() success := p.Successes() for { select { case err := <-errors: if err != nil { glog.Errorln(err) } case <-success: } } }(p) v := "async: " + strconv.Itoa(rand.New(rand.NewSource(time.Now().UnixNano())).Intn(10000)) fmt.Fprintln(os.Stdout, v) msg := &sarama.ProducerMessage{ Topic: topics, Value: sarama.ByteEncoder(v), } p.Input() <- msg }

参考: 

http://pastebin.com/9ZsnP2eU 

https://github.com/Shopify/sarama 

https://github.com/bsm/sarama-cluster

猜你喜欢

转载自blog.csdn.net/shangsongwww/article/details/88758601
今日推荐