handle split logs when data stream is large

pull/16/head
Eoin McAfee 3 years ago
parent 0c2b0a9c62
commit e6109dd869

@ -7,41 +7,49 @@ import (
"regexp" "regexp"
) )
const Esc = "\u001B"
var ( var (
prefix = Esc + "]1338;" prefix = []byte("\u001B]1338;")
suffix = []byte("\u001B]0m")
re = regexp.MustCompilePOSIX("\u001B]1338;((.*?)\u001B]0m)") re = regexp.MustCompilePOSIX("\u001B]1338;((.*?)\u001B]0m)")
) )
type Writer struct { type Writer struct {
base io.Writer base io.Writer
file []byte file []byte
chunked bool
} }
func New(w io.Writer) *Writer { func New(w io.Writer) *Writer {
return &Writer{w, nil} return &Writer{w, nil, false}
} }
func (e *Writer) Write(p []byte) (n int, err error) { func (e *Writer) Write(p []byte) (n int, err error) {
if bytes.HasPrefix(p, []byte(prefix)) == false { if bytes.HasPrefix(p, prefix) == false && e.chunked == false {
return e.base.Write(p) return e.base.Write(p)
} }
card := re.FindStringSubmatch(string(p))
if len(card) != 0 { // if the data does not include the ansi suffix,
data, err := base64.StdEncoding.DecodeString(card[len(card)-1:][0]) // it exceeds the size of the buffer and is chunked.
if err == nil { e.chunked = !bytes.Contains(p, suffix)
e.file = data
} // trim the ansi prefix and suffix from the data,
return e.base.Write([]byte("")) // and also trim any spacing or newlines that could
} // cause confusion.
return e.base.Write(p) p = bytes.TrimSpace(p)
p = bytes.TrimPrefix(p, prefix)
p = bytes.TrimSuffix(p, suffix)
e.file = append(e.file, p...)
return n, nil
} }
func (e *Writer) File() ([]byte, bool) { func (e *Writer) File() ([]byte, bool) {
if len(e.file) > 0 { if len(e.file) == 0 {
return e.file, true return nil, false
} else { }
data, err := base64.StdEncoding.DecodeString(string(e.file))
if err != nil {
return nil, false return nil, false
} }
return data, true
} }

Loading…
Cancel
Save