天天看点

【GO】14.sarama lib实现kafka实例Mac安装kafkasarama使用kafka实例

  • Mac安装kafka

Homebrew安装

brew install kafka
安装会依赖zookeeper。 
注意:安装目录:/usr/local/Cellar/kafka/2.2.1
           

安装的配置文件路径

/usr/local/etc/kafka/server.properties
/usr/local/etc/kafka/zookeeper.properties
           

启动zookeeper

zookeeper-server-start /usr/local/etc/kafka/zookeeper.properties &
           

启动kafka

kafka-server-start /usr/local/etc/kafka/server.properties &
           

另外打开一个terminal

创建topic

kafka-topics --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic test
           

查看创建的topic

kafka-topics --list --zookeeper localhost:2181
           

发送消息

Kafka提供了一个命令行客户端,它将从文件或标准输入接收输入,并将其作为消息发送到Kafka集群。默认情况下,每行都将作为单独的消息发送。

运行生产者,然后在控制台中键入一些消息发送到服务器。

kafka-console-producer --broker-list localhost:9092 --topic test
>first message
>second message
>exit
           

消费消息

kafka-console-consumer --bootstrap-server localhost:9092 --topic test --from-beginning
first message
second message
exit
           
  • sarama使用kafka实例

安装sarama库

go get github.com/Shopify/sarama

go get github.com/bsm/sarama-cluster

生产者

package main

import (
	"bufio"
	"fmt"
	"github.com/Shopify/sarama"
	"os"
	"strings"
)

func main(){
	config := sarama.NewConfig()
	config.Producer.Return.Successes = true
	config.Producer.RequiredAcks = sarama.WaitForAll
	config.Producer.Partitioner = sarama.NewRandomPartitioner

	producer, err := sarama.NewSyncProducer([]string{"localhost:9092"}, config)

	if err != nil{
		panic(err)
	}
	defer producer.Close()

	msg := &sarama.ProducerMessage{
		Topic:"testgo",
		Partition:int32(-1),
		Key:sarama.StringEncoder("key"),
	}

	var value string
	for {
		inputReader := bufio.NewReader(os.Stdin)
		value, err = inputReader.ReadString('\n')

		if err != nil{
			panic(err)
		}

		value = strings.Replace(value, "\n", "", -1)
		msg.Value = sarama.ByteEncoder(value)
		partition, offset, err := producer.SendMessage(msg)

		if err != nil{
			fmt.Println("Send Message Fail!")
		}

		fmt.Printf("Partion = %d, offset = %d\n", partition, offset)

	}

}
           

消费者

package main

import (
	"fmt"
	"github.com/Shopify/sarama"
	"github.com/bsm/sarama-cluster"
	"os"
	"strings"
	"time"
)

var (
	topics = "testgo"
)

func main(){
	groupID := "group-1"
	config := cluster.NewConfig()
	config.Group.Return.Notifications = true
	config.Consumer.Offsets.CommitInterval = 1 * time.Second
	config.Consumer.Offsets.Initial = sarama.OffsetNewest //初始从最新的offset开始

	//创建消费者
	c, err := cluster.NewConsumer(strings.Split("localhost:9092", ","), groupID, strings.Split(topics, ","), config)
	if err != nil {
		fmt.Println("Failed to start consumer: %s", err)
		return
	}
	defer c.Close()

	go func(c *cluster.Consumer) {
		errors := c.Errors()
		noti := c.Notifications()
		for {
			select {
			case err := <-errors:
				fmt.Println(err)
			case <-noti:
			}
		}
	}(c)

	for msg := range c.Messages(){
		fmt.Fprintf(os.Stdout, "%s/%d/%d\t%s\n", msg.Topic, msg.Partition, msg.Offset, msg.Value)
		c.MarkOffset(msg, "") //MarkOffset 并不是实时写入kafka,有可能在程序crash时丢掉未提交的offset
	}
}
           

测试如下

【GO】14.sarama lib实现kafka实例Mac安装kafkasarama使用kafka实例