feat kafka

This commit is contained in:
2026-01-24 22:46:15 +08:00
parent b45df9c4ee
commit 81e422d304
4 changed files with 9 additions and 15 deletions

View File

@@ -9,12 +9,11 @@ import (
var client *Client
type Client struct {
producer sarama.AsyncProducer
consumer sarama.ConsumerGroup
serverName string
producer sarama.AsyncProducer
consumer sarama.ConsumerGroup
}
func Init(cfg *config.KafkaConfig, serverName string) error {
func Init(cfg *config.KafkaConfig) error {
producer, err := getAsyncProducer(cfg)
if err != nil {
return err
@@ -24,9 +23,8 @@ func Init(cfg *config.KafkaConfig, serverName string) error {
return err
}
client = &Client{
producer: producer,
consumer: consumer,
serverName: serverName,
producer: producer,
consumer: consumer,
}
go producerError()
go consumerError()

View File

@@ -63,7 +63,7 @@ func (h *handler) Cleanup(sess sarama.ConsumerGroupSession) error {
func (h *handler) ConsumeClaim(sess sarama.ConsumerGroupSession, claim sarama.ConsumerGroupClaim) error {
for message := range claim.Messages() {
ctx := NewCarrier().ExtractConsumer(message.Headers)
_, span := otel.Tracer(client.serverName).Start(ctx, "kafka.consume")
_, span := otel.Tracer("common.db.kafka").Start(ctx, "kafka.consume")
if err := h.handler(ctx, message); err != nil {
if stack, ok := err.(interface{ StackTrace() string }); ok {
span.AddEvent("Stack Trace", trace.WithAttributes(

View File

@@ -26,7 +26,7 @@ func (c *Producer) Produce(ctx context.Context, topic, value string) {
func producerError() {
for err := range client.producer.Errors() {
ctx := NewCarrier().ExtractProducer(err.Msg.Headers)
_, span := otel.Tracer(client.serverName).Start(ctx, "kafka.producer")
_, span := otel.Tracer("common.db.kafka").Start(ctx, "kafka.producer.error")
span.SetStatus(otelcodes.Error, err.Error())
span.End()
}