From 02b5b3fd74f4e1982d82cef8dc7e856410d6a989 Mon Sep 17 00:00:00 2001 From: KN4CK3R Date: Mon, 25 Oct 2021 00:42:32 +0200 Subject: [PATCH] Fix CSV render error (#17406) closed #17378 Both errors from #17378 were caused by #15175. Problem 1 (error with added file): `ToUTF8WithFallbackReader` creates a `MultiReader` from a `byte[2048]` and the remaining reader. `CreateReaderAndGuessDelimiter` tries to read 10000 bytes from this reader but only gets 2048 because that's the first reader in the `MultiReader`. Then the `if size < 1e4` thinks the input is at EOF and just returns that. Problem 2 (error with changed file): The blob reader gets defer closed. That was fine because the old version reads the whole file into memory. Now with the streaming version the close needs to defer after the method. --- modules/csv/csv.go | 20 ++++++-------------- routers/web/repo/compare.go | 23 +++++++++++++++-------- 2 files changed, 21 insertions(+), 22 deletions(-) diff --git a/modules/csv/csv.go b/modules/csv/csv.go index ee54452891923..202d7a07342ee 100644 --- a/modules/csv/csv.go +++ b/modules/csv/csv.go @@ -28,6 +28,7 @@ func CreateReader(input io.Reader, delimiter rune) *stdcsv.Reader { } // CreateReaderAndGuessDelimiter tries to guess the field delimiter from the content and creates a csv.Reader. +// Reads at most 10k bytes. func CreateReaderAndGuessDelimiter(rd io.Reader) (*stdcsv.Reader, error) { var data = make([]byte, 1e4) size, err := rd.Read(data) @@ -38,25 +39,16 @@ func CreateReaderAndGuessDelimiter(rd io.Reader) (*stdcsv.Reader, error) { return nil, err } - delimiter := guessDelimiter(data[:size]) - - var newInput io.Reader - if size < 1e4 { - newInput = bytes.NewReader(data[:size]) - } else { - newInput = io.MultiReader(bytes.NewReader(data), rd) - } - - return CreateReader(newInput, delimiter), nil + return CreateReader( + io.MultiReader(bytes.NewReader(data[:size]), rd), + guessDelimiter(data[:size]), + ), nil } // guessDelimiter scores the input CSV data against delimiters, and returns the best match. -// Reads at most 10k bytes & 10 lines. func guessDelimiter(data []byte) rune { maxLines := 10 - maxBytes := util.Min(len(data), 1e4) - text := string(data[:maxBytes]) - text = quoteRegexp.ReplaceAllLiteralString(text, "") + text := quoteRegexp.ReplaceAllLiteralString(string(data), "") lines := strings.SplitN(text, "\n", maxLines+1) lines = lines[:util.Min(maxLines, len(lines))] diff --git a/routers/web/repo/compare.go b/routers/web/repo/compare.go index 2d9670d36a877..982a1b000861d 100644 --- a/routers/web/repo/compare.go +++ b/routers/web/repo/compare.go @@ -10,6 +10,7 @@ import ( "errors" "fmt" "html" + "io" "net/http" "path" "path/filepath" @@ -104,30 +105,36 @@ func setCsvCompareContext(ctx *context.Context) { errTooLarge := errors.New(ctx.Locale.Tr("repo.error.csv.too_large")) - csvReaderFromCommit := func(c *git.Commit) (*csv.Reader, error) { + csvReaderFromCommit := func(c *git.Commit) (*csv.Reader, io.Closer, error) { blob, err := c.GetBlobByPath(diffFile.Name) if err != nil { - return nil, err + return nil, nil, err } if setting.UI.CSV.MaxFileSize != 0 && setting.UI.CSV.MaxFileSize < blob.Size() { - return nil, errTooLarge + return nil, nil, errTooLarge } reader, err := blob.DataAsync() if err != nil { - return nil, err + return nil, nil, err } - defer reader.Close() - return csv_module.CreateReaderAndGuessDelimiter(charset.ToUTF8WithFallbackReader(reader)) + csvReader, err := csv_module.CreateReaderAndGuessDelimiter(charset.ToUTF8WithFallbackReader(reader)) + return csvReader, reader, err } - baseReader, err := csvReaderFromCommit(baseCommit) + baseReader, baseBlobCloser, err := csvReaderFromCommit(baseCommit) + if baseBlobCloser != nil { + defer baseBlobCloser.Close() + } if err == errTooLarge { return CsvDiffResult{nil, err.Error()} } - headReader, err := csvReaderFromCommit(headCommit) + headReader, headBlobCloser, err := csvReaderFromCommit(headCommit) + if headBlobCloser != nil { + defer headBlobCloser.Close() + } if err == errTooLarge { return CsvDiffResult{nil, err.Error()} }