diff --git a/cmd/humanlog/main.go b/cmd/humanlog/main.go index ded03aa..f04395c 100644 --- a/cmd/humanlog/main.go +++ b/cmd/humanlog/main.go @@ -10,6 +10,7 @@ import ( "github.com/fatih/color" "github.com/humanlogio/humanlog" "github.com/humanlogio/humanlog/internal/pkg/config" + "github.com/humanlogio/humanlog/internal/pkg/sink" "github.com/mattn/go-colorable" "github.com/urfave/cli" ) @@ -92,7 +93,7 @@ func newApp() *cli.App { timeFormat := cli.StringFlag{ Name: "time-format", Usage: "output time format, see https://golang.org/pkg/time/ for details", - Value: humanlog.DefaultOptions.TimeFormat, + Value: sink.DefaultStdioOpts.TimeFormat, } ignoreInterrupts := cli.BoolFlag{ @@ -224,10 +225,17 @@ func newApp() *cli.App { fatalf(c, "can only use one of %q and %q", skipFlag.Name, keepFlag.Name) } - opts := humanlog.HandlerOptionsFrom(*cfg) + sinkOpts, errs := sink.StdioOptsFrom(*cfg) + if len(errs) > 0 { + for _, err := range errs { + log.Printf("config error: %v", err) + } + } + sink := sink.NewStdio(colorable.NewColorableStdout(), sinkOpts) + handlerOpts := humanlog.HandlerOptionsFrom(*cfg) log.Print("reading stdin...") - if err := humanlog.Scanner(os.Stdin, colorable.NewColorableStdout(), opts); err != nil { + if err := humanlog.Scanner(os.Stdin, sink, handlerOpts); err != nil { log.Fatalf("scanning caught an error: %v", err) } return nil diff --git a/docker_compose_handler.go b/docker_compose_handler.go index f88d8d3..43bbfdc 100644 --- a/docker_compose_handler.go +++ b/docker_compose_handler.go @@ -2,6 +2,8 @@ package humanlog import ( "regexp" + + "github.com/humanlogio/humanlog/internal/pkg/model" ) // dcLogsPrefixRe parses out a prefix like 'web_1 | ' from docker-compose @@ -14,23 +16,26 @@ import ( var dcLogsPrefixRe = regexp.MustCompile("^(?:\x1b\\[\\d+m)?(?P[a-zA-Z0-9._-]+)\\s+\\|(?:\x1b\\[0m)? (?P.*)$") type handler interface { - TryHandle([]byte) bool - setField(key, val []byte) + TryHandle([]byte, *model.Structured) bool } -func tryDockerComposePrefix(d []byte, nextHandler handler) bool { +func tryDockerComposePrefix(d []byte, ev *model.Structured, nextHandler handler) bool { matches := dcLogsPrefixRe.FindSubmatch(d) if matches != nil { - if nextHandler.TryHandle(matches[2]) { - nextHandler.setField([]byte(`service`), matches[1]) + if nextHandler.TryHandle(matches[2], ev) { + ev.KVs = append(ev.KVs, model.KV{ + Key: "service", Value: string(matches[1]), + }) return true } // The Zap Development handler is only built for `JSONHandler`s so // short-circuit calls for LogFmtHandlers switch h := nextHandler.(type) { case *JSONHandler: - if tryZapDevDCPrefix(matches[2], h) { - h.setField([]byte(`service`), matches[1]) + if tryZapDevDCPrefix(matches[2], ev, h) { + ev.KVs = append(ev.KVs, model.KV{ + Key: "service", Value: string(matches[1]), + }) return true } } diff --git a/e2e_test.go b/e2e_test.go index 672baaf..4f9fe8a 100644 --- a/e2e_test.go +++ b/e2e_test.go @@ -9,6 +9,7 @@ import ( "testing" "github.com/humanlogio/humanlog/internal/pkg/config" + "github.com/humanlogio/humanlog/internal/pkg/sink" ) func TestHarness(t *testing.T) { @@ -40,9 +41,13 @@ func TestHarness(t *testing.T) { if err := json.Unmarshal(cfgjson, &cfg); err != nil { t.Fatalf("unmarshaling config: %v", err) } - opts := HandlerOptionsFrom(cfg) gotw := bytes.NewBuffer(nil) - err = Scanner(bytes.NewReader(input), gotw, opts) + sinkOpts, errs := sink.StdioOptsFrom(cfg) + if len(errs) > 1 { + t.Fatalf("errs=%v", errs) + } + s := sink.NewStdio(gotw, sinkOpts) + err = Scanner(bytes.NewReader(input), s, HandlerOptionsFrom(cfg)) if err != nil { t.Fatalf("scanning input: %v", err) } diff --git a/handler.go b/handler.go index d3adfdf..62010bf 100644 --- a/handler.go +++ b/handler.go @@ -1,9 +1,6 @@ package humanlog import ( - "log" - "time" - "github.com/humanlogio/humanlog/internal/pkg/config" "github.com/kr/logfmt" ) @@ -16,47 +13,21 @@ type Handler interface { } var DefaultOptions = &HandlerOptions{ - SortLongest: true, - SkipUnchanged: true, - Truncates: true, - LightBg: false, - TruncateLength: 15, - TimeFormat: time.Stamp, - TimeFields: []string{"time", "ts", "@timestamp", "timestamp"}, MessageFields: []string{"message", "msg"}, LevelFields: []string{"level", "lvl", "loglevel", "severity"}, - - Palette: DefaultPalette, } type HandlerOptions struct { - Skip map[string]struct{} - Keep map[string]struct{} - TimeFields []string MessageFields []string LevelFields []string - - SortLongest bool - SkipUnchanged bool - Truncates bool - LightBg bool - TruncateLength int - TimeFormat string - Palette Palette } var _ = HandlerOptionsFrom(config.DefaultConfig) // ensure it's valid func HandlerOptionsFrom(cfg config.Config) *HandlerOptions { opts := DefaultOptions - if cfg.Skip != nil { - opts.Skip = sliceToSet(cfg.Skip) - } - if cfg.Keep != nil { - opts.Keep = sliceToSet(cfg.Keep) - } if cfg.TimeFields != nil { opts.TimeFields = *cfg.TimeFields } @@ -66,65 +37,5 @@ func HandlerOptionsFrom(cfg config.Config) *HandlerOptions { if cfg.LevelFields != nil { opts.LevelFields = *cfg.LevelFields } - if cfg.SortLongest != nil { - opts.SortLongest = *cfg.SortLongest - } - if cfg.SkipUnchanged != nil { - opts.SkipUnchanged = *cfg.SkipUnchanged - } - if cfg.Truncates != nil { - opts.Truncates = *cfg.Truncates - } - if cfg.LightBg != nil { - opts.LightBg = *cfg.LightBg - } - if cfg.TruncateLength != nil { - opts.TruncateLength = *cfg.TruncateLength - } - if cfg.TimeFormat != nil { - opts.TimeFormat = *cfg.TimeFormat - } - if cfg.Palette != nil { - pl, err := PaletteFrom(*cfg.Palette) - if err != nil { - log.Printf("invalid palette, using default one: %v", err) - } else { - opts.Palette = *pl - } - } return opts } - -func (h *HandlerOptions) shouldShowKey(key string) bool { - if len(h.Keep) != 0 { - if _, keep := h.Keep[key]; keep { - return true - } - } - if len(h.Skip) != 0 { - if _, skip := h.Skip[key]; skip { - return false - } - } - return true -} - -func (h *HandlerOptions) shouldShowUnchanged(key string) bool { - if len(h.Keep) != 0 { - if _, keep := h.Keep[key]; keep { - return true - } - } - return false -} - -func sliceToSet(arr *[]string) map[string]struct{} { - if arr == nil { - return nil - } - out := make(map[string]struct{}) - for _, key := range *arr { - out[key] = struct{}{} - } - return out -} diff --git a/internal/pkg/model/types.go b/internal/pkg/model/types.go new file mode 100644 index 0000000..5e6367d --- /dev/null +++ b/internal/pkg/model/types.go @@ -0,0 +1,20 @@ +package model + +import "time" + +type KV struct { + Key string + Value string +} + +type Structured struct { + Time time.Time + Level string + Msg string + KVs []KV +} + +type Event struct { + Raw []byte + Structured *Structured +} diff --git a/internal/pkg/sink/sink.go b/internal/pkg/sink/sink.go new file mode 100644 index 0000000..c57ae54 --- /dev/null +++ b/internal/pkg/sink/sink.go @@ -0,0 +1,7 @@ +package sink + +import "github.com/humanlogio/humanlog/internal/pkg/model" + +type Sink interface { + Receive(*model.Event) error +} diff --git a/color.go b/internal/pkg/sink/stdio.go similarity index 50% rename from color.go rename to internal/pkg/sink/stdio.go index 311bd7b..c90c8fc 100644 --- a/color.go +++ b/internal/pkg/sink/stdio.go @@ -1,12 +1,240 @@ -package humanlog +package sink import ( + "bytes" "fmt" + "io" + "sort" + "strings" + "text/tabwriter" + "time" "github.com/fatih/color" "github.com/humanlogio/humanlog/internal/pkg/config" + "github.com/humanlogio/humanlog/internal/pkg/model" ) +var ( + eol = [...]byte{'\n'} +) + +type Stdio struct { + w io.Writer + opts StdioOpts + + lastEvent *model.Event + lastKVs map[string]string +} + +type StdioOpts struct { + Skip map[string]struct{} + Keep map[string]struct{} + SkipUnchanged bool + SortLongest bool + TimeFormat string + Truncates bool + TruncateLength int + + LightBg bool + Palette Palette +} + +var DefaultStdioOpts = StdioOpts{ + SkipUnchanged: true, + SortLongest: true, + Truncates: true, + LightBg: false, + TruncateLength: 15, + TimeFormat: time.Stamp, + + Palette: DefaultPalette, +} + +func StdioOptsFrom(cfg config.Config) (StdioOpts, []error) { + var errs []error + opts := DefaultStdioOpts + if cfg.Skip != nil { + opts.Skip = sliceToSet(cfg.Skip) + } + if cfg.Keep != nil { + opts.Keep = sliceToSet(cfg.Keep) + } + if cfg.SortLongest != nil { + opts.SortLongest = *cfg.SortLongest + } + if cfg.SkipUnchanged != nil { + opts.SkipUnchanged = *cfg.SkipUnchanged + } + if cfg.Truncates != nil { + opts.Truncates = *cfg.Truncates + } + if cfg.LightBg != nil { + opts.LightBg = *cfg.LightBg + } + if cfg.TruncateLength != nil { + opts.TruncateLength = *cfg.TruncateLength + } + if cfg.TimeFormat != nil { + opts.TimeFormat = *cfg.TimeFormat + } + if cfg.Palette != nil { + pl, err := PaletteFrom(*cfg.Palette) + if err != nil { + errs = append(errs, fmt.Errorf("invalid palette, using default one: %v", err)) + } else { + opts.Palette = *pl + } + } + return opts, errs +} + +var _ Sink = (*Stdio)(nil) + +func NewStdio(w io.Writer, opts StdioOpts) *Stdio { + return &Stdio{ + w: w, + opts: opts, + } +} + +func (std *Stdio) Receive(ev *model.Event) error { + if ev.Structured == nil { + if _, err := std.w.Write(ev.Raw); err != nil { + return err + } + return nil + } + data := ev.Structured + + buf := bytes.NewBuffer(nil) + out := tabwriter.NewWriter(buf, 0, 1, 0, '\t', 0) + + var ( + msgColor *color.Color + msgAbsentColor *color.Color + ) + if std.opts.LightBg { + msgColor = std.opts.Palette.MsgLightBgColor + msgAbsentColor = std.opts.Palette.MsgAbsentLightBgColor + } else { + msgColor = std.opts.Palette.MsgDarkBgColor + msgAbsentColor = std.opts.Palette.MsgAbsentDarkBgColor + } + var msg string + if data.Msg == "" { + msg = msgAbsentColor.Sprint("") + } else { + msg = msgColor.Sprint(data.Msg) + } + + lvl := strings.ToUpper(data.Level)[:imin(4, len(data.Level))] + var level string + switch data.Level { + case "debug": + level = std.opts.Palette.DebugLevelColor.Sprint(lvl) + case "info": + level = std.opts.Palette.InfoLevelColor.Sprint(lvl) + case "warn", "warning": + level = std.opts.Palette.WarnLevelColor.Sprint(lvl) + case "error": + level = std.opts.Palette.ErrorLevelColor.Sprint(lvl) + case "fatal", "panic": + level = std.opts.Palette.FatalLevelColor.Sprint(lvl) + default: + level = std.opts.Palette.UnknownLevelColor.Sprint(lvl) + } + + var timeColor *color.Color + if std.opts.LightBg { + timeColor = std.opts.Palette.TimeLightBgColor + } else { + timeColor = std.opts.Palette.TimeDarkBgColor + } + + _, _ = fmt.Fprintf(out, "%s |%s| %s\t %s", + timeColor.Sprint(data.Time.Format(std.opts.TimeFormat)), + level, + msg, + strings.Join(std.joinKVs(data, "="), "\t "), + ) + + if err := out.Flush(); err != nil { + return err + } + + buf.Write(eol[:]) + + if _, err := buf.WriteTo(std.w); err != nil { + return err + } + + kvs := make(map[string]string, len(data.KVs)) + for _, kv := range data.KVs { + kvs[kv.Key] = kv.Value + } + std.lastEvent = ev + std.lastKVs = kvs + return nil +} + +func (std *Stdio) joinKVs(data *model.Structured, sep string) []string { + + kv := make([]string, 0, len(data.KVs)) + for _, pair := range data.KVs { + k, v := pair.Key, pair.Value + if !std.opts.shouldShowKey(k) { + continue + } + + if std.opts.SkipUnchanged { + if lastV, ok := std.lastKVs[k]; ok && lastV == v && !std.opts.shouldShowUnchanged(k) { + continue + } + } + kstr := std.opts.Palette.KeyColor.Sprint(k) + + var vstr string + if std.opts.Truncates && len(v) > std.opts.TruncateLength { + vstr = v[:std.opts.TruncateLength] + "..." + } else { + vstr = v + } + vstr = std.opts.Palette.ValColor.Sprint(vstr) + kv = append(kv, kstr+sep+vstr) + } + + sort.Strings(kv) + + if std.opts.SortLongest { + sort.Stable(byLongest(kv)) + } + + return kv +} + +func (opts *StdioOpts) shouldShowKey(key string) bool { + if len(opts.Keep) != 0 { + if _, keep := opts.Keep[key]; keep { + return true + } + } + if len(opts.Skip) != 0 { + if _, skip := opts.Skip[key]; skip { + return false + } + } + return true +} + +func (opts *StdioOpts) shouldShowUnchanged(key string) bool { + if len(opts.Keep) != 0 { + if _, keep := opts.Keep[key]; keep { + return true + } + } + return false +} + var DefaultPalette = Palette{ KeyColor: color.New(color.FgGreen), ValColor: color.New(color.FgHiWhite), @@ -155,3 +383,27 @@ var colorAttributeIndex = map[string]color.Attribute{ "bg_hi_cyan": color.BgHiCyan, "bg_hi_white": color.BgHiWhite, } + +type byLongest []string + +func (s byLongest) Len() int { return len(s) } +func (s byLongest) Less(i, j int) bool { return len(s[i]) < len(s[j]) } +func (s byLongest) Swap(i, j int) { s[i], s[j] = s[j], s[i] } + +func imin(a, b int) int { + if a < b { + return a + } + return b +} + +func sliceToSet(arr *[]string) map[string]struct{} { + if arr == nil { + return nil + } + out := make(map[string]struct{}) + for _, key := range *arr { + out[key] = struct{}{} + } + return out +} diff --git a/json_handler.go b/json_handler.go index fa5d2b1..3b21fb1 100644 --- a/json_handler.go +++ b/json_handler.go @@ -1,32 +1,23 @@ package humanlog import ( - "bytes" "encoding/json" "fmt" "math" - "sort" "strings" - "text/tabwriter" "time" - "github.com/fatih/color" + "github.com/humanlogio/humanlog/internal/pkg/model" ) // JSONHandler can handle logs emitted by logrus.TextFormatter loggers. type JSONHandler struct { - buf *bytes.Buffer - out *tabwriter.Writer - truncKV int - Opts *HandlerOptions Level string Time time.Time Message string Fields map[string]string - - last map[string]string } // searchJSON searches a document for a key using the found func to determine if the value is accepted. @@ -71,19 +62,21 @@ func (h *JSONHandler) clear() { h.Level = "" h.Time = time.Time{} h.Message = "" - h.last = h.Fields h.Fields = make(map[string]string) - if h.buf != nil { - h.buf.Reset() - } } // TryHandle tells if this line was handled by this handler. -func (h *JSONHandler) TryHandle(d []byte) bool { +func (h *JSONHandler) TryHandle(d []byte, out *model.Structured) bool { if !h.UnmarshalJSON(d) { h.clear() return false } + out.Time = h.Time + out.Msg = h.Message + out.Level = h.Level + for k, v := range h.Fields { + out.KVs = append(out.KVs, model.KV{Key: k, Value: v}) + } return true } @@ -174,111 +167,6 @@ func (h *JSONHandler) UnmarshalJSON(data []byte) bool { return true } -func (h *JSONHandler) setField(key, val []byte) { - if h.Fields == nil { - h.Fields = make(map[string]string) - } - h.Fields[string(key)] = string(val) -} - -// Prettify the output in a logrus like fashion. -func (h *JSONHandler) Prettify(skipUnchanged bool) []byte { - defer h.clear() - if h.out == nil { - if h.Opts == nil { - h.Opts = DefaultOptions - } - h.buf = bytes.NewBuffer(nil) - h.out = tabwriter.NewWriter(h.buf, 0, 1, 0, '\t', 0) - } - - var ( - msgColor *color.Color - msgAbsentColor *color.Color - ) - if h.Opts.LightBg { - msgColor = h.Opts.Palette.MsgLightBgColor - msgAbsentColor = h.Opts.Palette.MsgAbsentLightBgColor - } else { - msgColor = h.Opts.Palette.MsgDarkBgColor - msgAbsentColor = h.Opts.Palette.MsgAbsentDarkBgColor - } - - var msg string - if h.Message == "" { - msg = msgAbsentColor.Sprint("") - } else { - msg = msgColor.Sprint(h.Message) - } - - lvl := strings.ToUpper(h.Level)[:imin(4, len(h.Level))] - var level string - switch h.Level { - case "debug": - level = h.Opts.Palette.DebugLevelColor.Sprint(lvl) - case "info": - level = h.Opts.Palette.InfoLevelColor.Sprint(lvl) - case "warn", "warning": - level = h.Opts.Palette.WarnLevelColor.Sprint(lvl) - case "error": - level = h.Opts.Palette.ErrorLevelColor.Sprint(lvl) - case "fatal", "panic": - level = h.Opts.Palette.FatalLevelColor.Sprint(lvl) - default: - level = h.Opts.Palette.UnknownLevelColor.Sprint(lvl) - } - - var timeColor *color.Color - if h.Opts.LightBg { - timeColor = h.Opts.Palette.TimeLightBgColor - } else { - timeColor = h.Opts.Palette.TimeDarkBgColor - } - _, _ = fmt.Fprintf(h.out, "%s |%s| %s\t %s", - timeColor.Sprint(h.Time.Format(h.Opts.TimeFormat)), - level, - msg, - strings.Join(h.joinKVs(skipUnchanged, "="), "\t "), - ) - - _ = h.out.Flush() - - return h.buf.Bytes() -} - -func (h *JSONHandler) joinKVs(skipUnchanged bool, sep string) []string { - - kv := make([]string, 0, len(h.Fields)) - for k, v := range h.Fields { - if !h.Opts.shouldShowKey(k) { - continue - } - - if skipUnchanged { - if lastV, ok := h.last[k]; ok && lastV == v && !h.Opts.shouldShowUnchanged(k) { - continue - } - } - kstr := h.Opts.Palette.KeyColor.Sprint(k) - - var vstr string - if h.Opts.Truncates && len(v) > h.Opts.TruncateLength { - vstr = v[:h.Opts.TruncateLength] + "..." - } else { - vstr = v - } - vstr = h.Opts.Palette.ValColor.Sprint(vstr) - kv = append(kv, kstr+sep+vstr) - } - - sort.Strings(kv) - - if h.Opts.SortLongest { - sort.Stable(byLongest(kv)) - } - - return kv -} // convertBunyanLogLevel returns a human readable log level given a numerical bunyan level // https://github.com/trentm/node-bunyan#levels diff --git a/json_handler_test.go b/json_handler_test.go index 84c98e6..e24a953 100644 --- a/json_handler_test.go +++ b/json_handler_test.go @@ -6,6 +6,7 @@ import ( "time" "github.com/humanlogio/humanlog" + "github.com/humanlogio/humanlog/internal/pkg/model" ) func TestJSONHandler_UnmarshalJSON_ParsesFields(t *testing.T) { @@ -22,8 +23,8 @@ func TestJSONHandler_UnmarshalJSON_ParsesFields(t *testing.T) { opts := *humanlog.DefaultOptions h := humanlog.JSONHandler{Opts: &opts} - - if !h.TryHandle(raw) { + ev := new(model.Structured) + if !h.TryHandle(raw, ev) { t.Fatalf("failed to parse log level") } @@ -58,7 +59,8 @@ func TestJSONHandler_UnmarshalJSON_ParsesCustomFields(t *testing.T) { h := humanlog.JSONHandler{Opts: &opts} - if !h.TryHandle(raw) { + ev := new(model.Structured) + if !h.TryHandle(raw, ev) { t.Fatalf("failed to parse log level") } @@ -91,8 +93,8 @@ func TestJSONHandler_UnmarshalJSON_ParsesCustomNestedFields(t *testing.T) { opts.TimeFields = []string{"data.time"} h := humanlog.JSONHandler{Opts: &opts} - - if !h.TryHandle(raw) { + ev := new(model.Structured) + if !h.TryHandle(raw, ev) { t.Fatalf("failed to handle log") } @@ -134,8 +136,8 @@ func TestJSONHandler_UnmarshalJSON_ParsesCustomMultiNestedFields(t *testing.T) { opts.TimeFields = []string{"data.l2.time"} h := humanlog.JSONHandler{Opts: &opts} - - if !h.TryHandle(raw) { + ev := new(model.Structured) + if !h.TryHandle(raw, ev) { t.Fatalf("failed to handle log") } diff --git a/logfmt_handler.go b/logfmt_handler.go index 61fbe69..24ae7c7 100644 --- a/logfmt_handler.go +++ b/logfmt_handler.go @@ -2,59 +2,53 @@ package humanlog import ( "bytes" - "fmt" - "sort" "strconv" - "strings" - "text/tabwriter" "time" - "github.com/fatih/color" "github.com/go-logfmt/logfmt" + "github.com/humanlogio/humanlog/internal/pkg/model" ) // LogfmtHandler can handle logs emmited by logrus.TextFormatter loggers. type LogfmtHandler struct { - buf *bytes.Buffer - out *tabwriter.Writer - truncKV int - Opts *HandlerOptions Level string Time time.Time Message string Fields map[string]string - - last map[string]string } func (h *LogfmtHandler) clear() { h.Level = "" h.Time = time.Time{} h.Message = "" - h.last = h.Fields h.Fields = make(map[string]string) - if h.buf != nil { - h.buf.Reset() - } } // CanHandle tells if this line can be handled by this handler. -func (h *LogfmtHandler) TryHandle(d []byte) bool { +func (h *LogfmtHandler) TryHandle(d []byte, out *model.Structured) bool { if !bytes.ContainsRune(d, '=') { return false } - if !h.UnmarshalLogfmt(d) { h.clear() return false } + out.Time = h.Time + out.Msg = h.Message + out.Level = h.Level + for k, v := range h.Fields { + out.KVs = append(out.KVs, model.KV{Key: k, Value: v}) + } return true } // HandleLogfmt sets the fields of the handler. func (h *LogfmtHandler) UnmarshalLogfmt(data []byte) bool { + if h.Fields == nil { + h.Fields = make(map[string]string) + } dec := logfmt.NewDecoder(bytes.NewReader(data)) for dec.ScanRecord() { next_kv: @@ -100,77 +94,12 @@ func (h *LogfmtHandler) UnmarshalLogfmt(data []byte) bool { } } - h.setField(key, val) + h.Fields[string(key)] = string(val) } } return dec.Err() == nil } -// Prettify the output in a logrus like fashion. -func (h *LogfmtHandler) Prettify(skipUnchanged bool) []byte { - defer h.clear() - if h.out == nil { - if h.Opts == nil { - h.Opts = DefaultOptions - } - h.buf = bytes.NewBuffer(nil) - h.out = tabwriter.NewWriter(h.buf, 0, 1, 0, '\t', 0) - } - - var ( - msgColor *color.Color - msgAbsentColor *color.Color - ) - if h.Opts.LightBg { - msgColor = h.Opts.Palette.MsgLightBgColor - msgAbsentColor = h.Opts.Palette.MsgAbsentLightBgColor - } else { - msgColor = h.Opts.Palette.MsgDarkBgColor - msgAbsentColor = h.Opts.Palette.MsgAbsentDarkBgColor - } - - var msg string - if h.Message == "" { - msg = msgAbsentColor.Sprint("") - } else { - msg = msgColor.Sprint(h.Message) - } - - lvl := strings.ToUpper(h.Level)[:imin(4, len(h.Level))] - var level string - switch h.Level { - case "debug": - level = h.Opts.Palette.DebugLevelColor.Sprint(lvl) - case "info": - level = h.Opts.Palette.InfoLevelColor.Sprint(lvl) - case "warn", "warning": - level = h.Opts.Palette.WarnLevelColor.Sprint(lvl) - case "error": - level = h.Opts.Palette.ErrorLevelColor.Sprint(lvl) - case "fatal", "panic": - level = h.Opts.Palette.FatalLevelColor.Sprint(lvl) - default: - level = h.Opts.Palette.UnknownLevelColor.Sprint(lvl) - } - - var timeColor *color.Color - if h.Opts.LightBg { - timeColor = h.Opts.Palette.TimeLightBgColor - } else { - timeColor = h.Opts.Palette.TimeDarkBgColor - } - _, _ = fmt.Fprintf(h.out, "%s |%s| %s\t %s", - timeColor.Sprint(h.Time.Format(h.Opts.TimeFormat)), - level, - msg, - strings.Join(h.joinKVs(skipUnchanged, "="), "\t "), - ) - - _ = h.out.Flush() - - return h.buf.Bytes() -} - func (h *LogfmtHandler) setLevel(val []byte) { h.Level = string(val) } func (h *LogfmtHandler) setMessage(val []byte) { h.Message = string(val) } func (h *LogfmtHandler) setTime(val []byte) (parsed bool) { @@ -182,58 +111,3 @@ func (h *LogfmtHandler) setTime(val []byte) (parsed bool) { } return } - -func (h *LogfmtHandler) setField(key, val []byte) { - if h.Fields == nil { - h.Fields = make(map[string]string) - } - h.Fields[string(key)] = string(val) -} - -func (h *LogfmtHandler) joinKVs(skipUnchanged bool, sep string) []string { - - kv := make([]string, 0, len(h.Fields)) - for k, v := range h.Fields { - if !h.Opts.shouldShowKey(k) { - continue - } - - if skipUnchanged { - if lastV, ok := h.last[k]; ok && lastV == v && !h.Opts.shouldShowUnchanged(k) { - continue - } - } - - kstr := h.Opts.Palette.KeyColor.Sprint(k) - - var vstr string - if h.Opts.Truncates && len(v) > h.Opts.TruncateLength { - vstr = v[:h.Opts.TruncateLength] + "..." - } else { - vstr = v - } - vstr = h.Opts.Palette.ValColor.Sprint(vstr) - kv = append(kv, kstr+sep+vstr) - } - - sort.Strings(kv) - - if h.Opts.SortLongest { - sort.Stable(byLongest(kv)) - } - - return kv -} - -type byLongest []string - -func (s byLongest) Len() int { return len(s) } -func (s byLongest) Less(i, j int) bool { return len(s[i]) < len(s[j]) } -func (s byLongest) Swap(i, j int) { s[i], s[j] = s[j], s[i] } - -func imin(a, b int) int { - if a < b { - return a - } - return b -} diff --git a/scanner.go b/scanner.go index 2b26c33..3a1a501 100644 --- a/scanner.go +++ b/scanner.go @@ -4,62 +4,62 @@ import ( "bufio" "bytes" "io" -) + "time" -var ( - eol = [...]byte{'\n'} + "github.com/humanlogio/humanlog/internal/pkg/model" + "github.com/humanlogio/humanlog/internal/pkg/sink" ) // Scanner reads JSON-structured lines from src and prettify them onto dst. If // the lines aren't JSON-structured, it will simply write them out with no // prettification. -func Scanner(src io.Reader, dst io.Writer, opts *HandlerOptions) error { +func Scanner(src io.Reader, sink sink.Sink, opts *HandlerOptions) error { in := bufio.NewScanner(src) in.Split(bufio.ScanLines) var line uint64 - var lastLogfmt bool - var lastJSON bool - logfmtEntry := LogfmtHandler{Opts: opts} jsonEntry := JSONHandler{Opts: opts} + ev := new(model.Event) + data := new(model.Structured) + ev.Structured = data + for in.Scan() { line++ lineData := in.Bytes() + if ev.Structured == nil { + ev.Structured = data + } else { + data.Time = time.Time{} + data.Msg = "" + data.Level = "" + data.KVs = data.KVs[:0] + } + ev.Raw = lineData + // remove that pesky syslog crap lineData = bytes.TrimPrefix(lineData, []byte("@cee: ")) - switch { - case jsonEntry.TryHandle(lineData): - dst.Write(jsonEntry.Prettify(opts.SkipUnchanged && lastJSON)) - lastJSON = true + case jsonEntry.TryHandle(lineData, data): - case logfmtEntry.TryHandle(lineData): - dst.Write(logfmtEntry.Prettify(opts.SkipUnchanged && lastLogfmt)) - lastLogfmt = true + case logfmtEntry.TryHandle(lineData, data): - case tryDockerComposePrefix(lineData, &jsonEntry): - dst.Write(jsonEntry.Prettify(opts.SkipUnchanged && lastJSON)) - lastJSON = true + case tryDockerComposePrefix(lineData, data, &jsonEntry): - case tryDockerComposePrefix(lineData, &logfmtEntry): - dst.Write(logfmtEntry.Prettify(opts.SkipUnchanged && lastLogfmt)) - lastLogfmt = true + case tryDockerComposePrefix(lineData, data, &logfmtEntry): - case tryZapDevPrefix(lineData, &jsonEntry): - dst.Write(jsonEntry.Prettify(opts.SkipUnchanged && lastJSON)) - lastJSON = true + case tryZapDevPrefix(lineData, data, &jsonEntry): default: - lastLogfmt = false - lastJSON = false - dst.Write(lineData) + ev.Structured = nil + } + if err := sink.Receive(ev); err != nil { + return err } - dst.Write(eol[:]) } diff --git a/scripts/README.tmpl.md b/script/README.tmpl.md similarity index 100% rename from scripts/README.tmpl.md rename to script/README.tmpl.md diff --git a/scripts/release.sh b/script/release.sh similarity index 100% rename from scripts/release.sh rename to script/release.sh diff --git a/test_inputs b/test_inputs deleted file mode 100644 index 712901d..0000000 --- a/test_inputs +++ /dev/null @@ -1,4 +0,0 @@ -time=2022-08-10T03:03:24.259999353Z level=INFO msg="Started Worker" Namespace=runtime TaskQueue=runtimed -time=2022-08-10T03:05:19.289159884Z level=ERROR msg="processing job" error="worker error from receive: twirp error internal: failed to do request: Post \"http://localhost:18081/twirp/aqueduct.api.v1.JobQueueService/Receive\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" -time=2022-08-10T03:06:20.422461071Z level=DEBUG msg="inbound request" twirp_svc=ManagementsAPI twirp_method=GetEnableStatus twirp_req=*managements.GetEnableStatusRequest -{"level":"info","ts":"2022-08-10T03:03:18.312Z","msg":"temporal-sys-tq-scanner-workflow workflow successfully started","service":"worker","logging-call-at":"scanner.go:202"} diff --git a/zap_development_handler.go b/zap_development_handler.go index 4823bf0..8456f52 100644 --- a/zap_development_handler.go +++ b/zap_development_handler.go @@ -4,16 +4,18 @@ import ( "regexp" "strings" "time" + + "github.com/humanlogio/humanlog/internal/pkg/model" ) // Zap Development logs are made up of the following separated by whitespace -// 1. timestamp in ISO-8601 (??) -// 2. Log Level (one of DEBUG ERROR INFO WARN FATAL) -// 3. Caller Location in the source -// 4. The main logged message -// 5. a JSON object containing the structured k/v pairs -// 6. optional context lines - but since they are on a separate line the main -// scanner loop will never capture them +// 1. timestamp in ISO-8601 (??) +// 2. Log Level (one of DEBUG ERROR INFO WARN FATAL) +// 3. Caller Location in the source +// 4. The main logged message +// 5. a JSON object containing the structured k/v pairs +// 6. optional context lines - but since they are on a separate line the main +// scanner loop will never capture them var zapDevLogsPrefixRe = regexp.MustCompile("^(?P\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d{3}-\\d{4})\\s+(?P\\w{4,5})\\s+(?P\\S+)\\s+(?P[^{]+?)\\s+(?P{.+})$") // Zap Development Logs when run in Docker-Compose are nearly identical to before @@ -26,18 +28,19 @@ var zapDevDCLogsPrefixRe = regexp.MustCompile("^(?P\\d{4}-\\d{2}-\\d{ // time package which is worrisome but this pattern does work. const someRFC = "2006-01-02T15:04:05.000-0700" -func tryZapDevPrefix(d []byte, handler *JSONHandler) bool { +func tryZapDevPrefix(d []byte, ev *model.Structured, handler *JSONHandler) bool { if matches := zapDevLogsPrefixRe.FindSubmatch(d); matches != nil { - if handler.TryHandle(matches[5]) { + if handler.TryHandle(matches[5], ev) { t, err := time.Parse(someRFC, string(matches[1])) if err != nil { return false } - handler.Time = t - - handler.Level = strings.ToLower(string(matches[2])) - handler.setField([]byte("caller"), matches[3]) - handler.Message = string(matches[4]) + ev.Time = t + ev.Level = strings.ToLower(string(matches[2])) + ev.Msg = string(matches[4]) + ev.KVs = append(ev.KVs, model.KV{ + Key: "caller", Value: string(matches[3]), + }) return true } } @@ -48,18 +51,20 @@ func tryZapDevPrefix(d []byte, handler *JSONHandler) bool { // time package which is worrisome but this pattern does work. const someOtherRFC = "2006-01-02T15:04:05.000Z" -func tryZapDevDCPrefix(d []byte, handler *JSONHandler) bool { +func tryZapDevDCPrefix(d []byte, ev *model.Structured, handler *JSONHandler) bool { if matches := zapDevDCLogsPrefixRe.FindSubmatch(d); matches != nil { - if handler.TryHandle(matches[5]) { + if handler.TryHandle(matches[5], ev) { t, err := time.Parse(someOtherRFC, string(matches[1])) if err != nil { return false } - handler.Time = t - - handler.Level = strings.ToLower(string(matches[2])) - handler.setField([]byte("caller"), matches[3]) - handler.Message = string(matches[4]) + ev.Time = t + ev.Level = strings.ToLower(string(matches[2])) + ev.Msg = string(matches[4]) + ev.KVs = append( + ev.KVs, + model.KV{Key: "caller", Value: string(matches[3])}, + ) return true } } diff --git a/zap_development_handler_test.go b/zap_development_handler_test.go index 9b5bdba..b1c0e54 100644 --- a/zap_development_handler_test.go +++ b/zap_development_handler_test.go @@ -4,6 +4,8 @@ import ( "reflect" "testing" "time" + + "github.com/humanlogio/humanlog/internal/pkg/model" ) var logLinesByLevel = map[string][]byte{ @@ -193,7 +195,8 @@ func Test_tryZapDevPrefix(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { h := &JSONHandler{} - m := tryZapDevPrefix(test.logLine, h) + ev := new(model.Structured) + m := tryZapDevPrefix(test.logLine, ev, h) if m != test.wantMatch { t.Error("expected the prefix to match, it did not") @@ -407,7 +410,8 @@ func Test_tryZapDevDCPrefix(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { h := &JSONHandler{} - m := tryZapDevDCPrefix(test.logLine, h) + ev := new(model.Structured) + m := tryZapDevDCPrefix(test.logLine, ev, h) if m != test.wantMatch { t.Error("expected the prefix to match, it did not")