5

I am trying to use zap logger package to create a core with file, console and Kafka sinks. I have some really specific INFO level logs that I want to send to a Kafka topic for processing by a downstream consumer. However, with the current implementation I am getting all INFO level logs in the Kafka topic, even the ones I do not want.

Is there a way to prevent the unwanted logs of the same level to not go to any one specific sink, using a common zap logger object?

Below is the function that I am using to create the single logger object.

func newZapLogger(config Configuration) (Logger, error) {
    var writer zapcore.WriteSyncer
    cores := []zapcore.Core{}

    if config.EnableFile {
        getLogLevel(config.FileLevel)
        if config.LogConfig == true {
            writer = zapcore.Lock(zapcore.AddSync(&lj.Logger{
                Filename: config.FileLocation,
                MaxSize:  config.LogMaxSize,
                Compress: config.LogCompression,
                MaxAge:   config.LogMaxAge,
            }))
        } else {
            writer = zapcore.Lock(zapcore.AddSync(&lj.Logger{
                Filename: config.FileLocation,
            }))
        }
        cores = append(cores, zapcore.NewCore(getEncoder(config.FileJSONFormat, config.IsColour), writer, atomLevel))
    }
    if config.EnableConsole {
        getLogLevel(config.ConsoleLevel)
        switch config.Stream {
        case 1:
            writer = zapcore.Lock(os.Stdout)
        case 2:
            writer = zapcore.Lock(os.Stderr)
        case 3:
            writer = zapcore.Lock(zapcore.AddSync(ioutil.Discard))
        default:
            writer = zapcore.Lock(os.Stdout)
        }
        cores = append(cores, zapcore.NewCore(getEncoder(config.ConsoleJSONFormat, config.IsColour), writer, atomLevel))
    }
if config.EnableKafka == true {
        highPriority := zap.LevelEnablerFunc(func(lvl zapcore.Level) bool {
            return lvl >= zapcore.WarnLevel
        })

        if len(brokerConn) > 0 {
            var (
                kl  LogKafka
                err error
            )
            kl.Topic = config.KafkaTopic
            config := sarama.NewConfig()
            config.Producer.RequiredAcks = sarama.WaitForAll
            config.Producer.Partitioner = sarama.NewRandomPartitioner
            config.Producer.Return.Successes = true
            config.Producer.Return.Errors = true

            kl.Producer, err = sarama.NewSyncProducer(brokerConn, config)
            if err != nil {
                return nil, fmt.Errorf("Failed to initialise kafka logger, connect to kafka failed: %v", err)
            } else {
                topicErrors := zapcore.AddSync(&kl)
                kafkaEncoder := zapcore.NewJSONEncoder(zap.NewDevelopmentEncoderConfig())
                cores = append(cores, zapcore.NewCore(kafkaEncoder, topicErrors, highPriority))
            }
        } else {
            return nil, fmt.Errorf("Failed to initialise kafka logger, no broker specified")
        }
    }

    appendedCore := zapcore.NewTee(cores...)
    logger := zap.New(appendedCore, zap.AddCaller(), zap.AddCallerSkip(1)).Sugar()
    defer logger.Sync()
    return logger, nil
}

I am using Sarama package to implement Kafka producer. I have also thought of using a custom logging level. However, it is not supported in zap v1.0.

blackgreen
  • 34,072
  • 23
  • 111
  • 129
aru_sha4
  • 368
  • 2
  • 11

0 Answers0