feat:optimize log waring output
This commit is contained in:
parent
3094ba679e
commit
04546ff646
|
|
@ -74,11 +74,13 @@ func ParseComtradeFile(ctx context.Context, monitorDir string, dbName string, ad
|
||||||
}
|
}
|
||||||
|
|
||||||
err := processComtradeFile(ctx, addFilePath, monitorDir, dbName, comtradeMap, addChan, delChan, pool, logger)
|
err := processComtradeFile(ctx, addFilePath, monitorDir, dbName, comtradeMap, addChan, delChan, pool, logger)
|
||||||
if errors.Is(err, nil) {
|
if err != nil {
|
||||||
// TODO 写log error报警
|
if errors.Is(err, constant.ErrNotFindDatFile) {
|
||||||
logger.Error("process comtrade file failed", zap.Error(err))
|
logger.Info("process comtrade file failed", zap.Error(err))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
logger.Error("process comtrade file failed", zap.Error(err))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -94,13 +96,13 @@ func processComtradeFile(ctx context.Context, addFilePath string, monitorDir str
|
||||||
err := processConfigFile(ctx, dbName, addFilePath, delChan, comtradeMap, pool)
|
err := processConfigFile(ctx, dbName, addFilePath, delChan, comtradeMap, pool)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// TODO 对error进行嵌套返回
|
// TODO 对error进行嵌套返回
|
||||||
return fmt.Errorf("%w", err)
|
return fmt.Errorf("process comtrade failed:%w", err)
|
||||||
}
|
}
|
||||||
case constant.DataFileSuffix:
|
case constant.DataFileSuffix:
|
||||||
err := processDataFile(monitorDir, fileName, addFilePath, comtradeMap, addChan, logger)
|
err := processDataFile(monitorDir, fileName, addFilePath, comtradeMap, addChan, logger)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// TODO 对error进行嵌套返回
|
// TODO 对error进行嵌套返回
|
||||||
return fmt.Errorf("%w", err)
|
return fmt.Errorf("process comtrade failed:%w", err)
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
logger.Warn("no support file style", zap.String("file_style", fileExtension))
|
logger.Warn("no support file style", zap.String("file_style", fileExtension))
|
||||||
|
|
@ -113,7 +115,8 @@ func processConfigFile(ctx context.Context, dbName string, configFilePath string
|
||||||
dataFilePath, exist := comtradeMap.Load(configFilePath)
|
dataFilePath, exist := comtradeMap.Load(configFilePath)
|
||||||
if exist {
|
if exist {
|
||||||
if dataFilePath == "" {
|
if dataFilePath == "" {
|
||||||
return errors.New("can not find dat file in map")
|
fmt.Println(11111)
|
||||||
|
return constant.ErrNotFindDatFile
|
||||||
}
|
}
|
||||||
|
|
||||||
pool.Invoke(config.ComtradeDataStorageConfig{
|
pool.Invoke(config.ComtradeDataStorageConfig{
|
||||||
|
|
@ -126,7 +129,8 @@ func processConfigFile(ctx context.Context, dbName string, configFilePath string
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
comtradeMap.Store(configFilePath, "")
|
comtradeMap.Store(configFilePath, "")
|
||||||
return nil
|
fmt.Println(22222)
|
||||||
|
return constant.ErrNotFindDatFile
|
||||||
}
|
}
|
||||||
|
|
||||||
func processDataFile(monitorDir string, fileName string, dataFilePath string, comtradeMap *sync.Map, addChan chan string, logger *zap.Logger) error {
|
func processDataFile(monitorDir string, fileName string, dataFilePath string, comtradeMap *sync.Map, addChan chan string, logger *zap.Logger) error {
|
||||||
|
|
|
||||||
|
|
@ -3,8 +3,11 @@ package config
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"wave_record/constant"
|
||||||
"wave_record/log"
|
"wave_record/log"
|
||||||
|
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
|
|
@ -17,7 +20,7 @@ type WaveRecordConfig struct {
|
||||||
ParseConcurrentQuantity int // parse comtrade file concurrent quantity
|
ParseConcurrentQuantity int // parse comtrade file concurrent quantity
|
||||||
MongoDBURI string
|
MongoDBURI string
|
||||||
MongoDBDataBase string
|
MongoDBDataBase string
|
||||||
LCfg log.LogConfig // log config
|
LCfg log.CutLogConfig // log config
|
||||||
}
|
}
|
||||||
|
|
||||||
// ComtradeDataStorageConfig define config struct of storage comtrade data
|
// ComtradeDataStorageConfig define config struct of storage comtrade data
|
||||||
|
|
@ -51,7 +54,7 @@ func ReadAndInitConfig(configDir, configName, configType string) (waveRecordConf
|
||||||
waveRecordConfig.MongoDBDataBase = config.GetString("mongodb_database")
|
waveRecordConfig.MongoDBDataBase = config.GetString("mongodb_database")
|
||||||
// init zap log config from config.yaml
|
// init zap log config from config.yaml
|
||||||
waveRecordConfig.LCfg.Level = config.GetString("log_level")
|
waveRecordConfig.LCfg.Level = config.GetString("log_level")
|
||||||
waveRecordConfig.LCfg.FileName = config.GetString("log_filename")
|
waveRecordConfig.LCfg.FileName = fmt.Sprintf(config.GetString("log_filepath"), time.Now().Format(constant.LogTimeFormate))
|
||||||
waveRecordConfig.LCfg.MaxSize = config.GetInt("log_maxsize")
|
waveRecordConfig.LCfg.MaxSize = config.GetInt("log_maxsize")
|
||||||
waveRecordConfig.LCfg.MaxBackups = config.GetInt("log_maxbackups")
|
waveRecordConfig.LCfg.MaxBackups = config.GetInt("log_maxbackups")
|
||||||
waveRecordConfig.LCfg.MaxAge = config.GetInt("log_maxage")
|
waveRecordConfig.LCfg.MaxAge = config.GetInt("log_maxage")
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ mongodb_port: "27017"
|
||||||
mongodb_database: "wave_record"
|
mongodb_database: "wave_record"
|
||||||
|
|
||||||
log_level: "debug"
|
log_level: "debug"
|
||||||
log_filename: "/home/douxu/log/wave_record.log"
|
log_filepath: "/home/douxu/log/wave_record-%s.log"
|
||||||
log_maxsize: 1
|
log_maxsize: 1
|
||||||
log_maxbackups: 5
|
log_maxbackups: 5
|
||||||
log_maxage: 30
|
log_maxage: 30
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,7 @@
|
||||||
|
// Package constant define constant value
|
||||||
|
package constant
|
||||||
|
|
||||||
|
import "errors"
|
||||||
|
|
||||||
|
// ErrNotFindDatFile define error for not find comtrade data file
|
||||||
|
var ErrNotFindDatFile = errors.New("can not find dat file in map")
|
||||||
|
|
@ -0,0 +1,7 @@
|
||||||
|
// Package constant define constant value
|
||||||
|
package constant
|
||||||
|
|
||||||
|
const (
|
||||||
|
// LogTimeFormate define time format for log file name
|
||||||
|
LogTimeFormate = "2006-01-02 15-04-05"
|
||||||
|
)
|
||||||
|
|
@ -15,8 +15,8 @@ var (
|
||||||
once sync.Once
|
once sync.Once
|
||||||
)
|
)
|
||||||
|
|
||||||
// LogConfig define log config of wave record project
|
// CutLogConfig define log config of wave record project
|
||||||
type LogConfig struct {
|
type CutLogConfig struct {
|
||||||
Level string `json:"level"` // Level 最低日志等级,DEBUG<INFO<WARN<ERROR<FATAL INFO-->收集INFO等级以上的日志
|
Level string `json:"level"` // Level 最低日志等级,DEBUG<INFO<WARN<ERROR<FATAL INFO-->收集INFO等级以上的日志
|
||||||
FileName string `json:"file_name"` // FileName 日志文件位置
|
FileName string `json:"file_name"` // FileName 日志文件位置
|
||||||
MaxSize int `json:"max_size"` // MaxSize 进行切割之前,日志文件的最大大小(MB为单位)默认为100MB
|
MaxSize int `json:"max_size"` // MaxSize 进行切割之前,日志文件的最大大小(MB为单位)默认为100MB
|
||||||
|
|
@ -51,7 +51,7 @@ func getLogWriter(filename string, maxsize, maxBackup, maxAge int) zapcore.Write
|
||||||
}
|
}
|
||||||
|
|
||||||
// initLogger return successfully initialized zap logger
|
// initLogger return successfully initialized zap logger
|
||||||
func initLogger(lCfg LogConfig) *zap.Logger {
|
func initLogger(lCfg CutLogConfig) *zap.Logger {
|
||||||
writeSyncer := getLogWriter(lCfg.FileName, lCfg.MaxSize, lCfg.MaxBackups, lCfg.MaxAge)
|
writeSyncer := getLogWriter(lCfg.FileName, lCfg.MaxSize, lCfg.MaxBackups, lCfg.MaxAge)
|
||||||
encoder := getEncoder()
|
encoder := getEncoder()
|
||||||
|
|
||||||
|
|
@ -69,7 +69,7 @@ func initLogger(lCfg LogConfig) *zap.Logger {
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetLoggerInstance return instance of zap logger
|
// GetLoggerInstance return instance of zap logger
|
||||||
func GetLoggerInstance(lCfg LogConfig) *zap.Logger {
|
func GetLoggerInstance(lCfg CutLogConfig) *zap.Logger {
|
||||||
once.Do(func() {
|
once.Do(func() {
|
||||||
logger = initLogger(lCfg)
|
logger = initLogger(lCfg)
|
||||||
})
|
})
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue