From d5127b7d974e367137048f002257ab2532749fa9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Medzi=C5=84ski?= Date: Thu, 26 Apr 2018 07:30:15 +0200 Subject: [PATCH] Increase JSON scraper max buffer to 1MB (#112) Sometimes applications, especially Java based, can produce huge log lines. The executor does not use much memory so extra megabyte should not be a problem and it gives us some reserve of memory (so we can avoid some bufio.ErrTooLong errors). Default max buffer for bufio.Scanner was 64 kilobytes. --- servicelog/scraper/json.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/servicelog/scraper/json.go b/servicelog/scraper/json.go index 370b2c91..8d146b9e 100644 --- a/servicelog/scraper/json.go +++ b/servicelog/scraper/json.go @@ -11,6 +11,11 @@ import ( "github.com/allegro/mesos-executor/servicelog" ) +const ( + kilobyte = 1024 + megabyte = 1024 * kilobyte +) + // JSON is a scraper for logs represented as JSON objects. type JSON struct { KeyFilter Filter @@ -21,6 +26,7 @@ type JSON struct { // as the passed reader does not return an io.EOF error. func (j *JSON) StartScraping(reader io.Reader) <-chan servicelog.Entry { scanner := bufio.NewScanner(reader) + scanner.Buffer(make([]byte, 64*kilobyte), megabyte) logEntries := make(chan servicelog.Entry) go func() {