1
0
Fork 0
mirror of https://codeberg.org/forgejo/forgejo.git synced 2024-12-21 12:44:49 -05:00

Convert files to utf-8 for indexing (#7814)

* Convert files to utf-8 for indexing

* Move utf8 functions to modules/base

* Bump repoIndexerLatestVersion to 3

* Add tests for base/encoding.go

* Changes to pass gosimple

* Move UTF8 funcs into new modules/charset package
This commit is contained in:
guillep2k 2019-08-15 09:07:28 -03:00 committed by Lunny Xiao
parent c2c35d169c
commit 5a44be627c
13 changed files with 371 additions and 166 deletions

View file

@ -20,6 +20,7 @@ import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/models/migrations"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/setting"
"github.com/go-xorm/xorm"
@ -106,7 +107,7 @@ func readSQLFromFile(version string) (string, error) {
if err != nil {
return "", err
}
return string(base.RemoveBOMIfPresent(bytes)), nil
return string(charset.RemoveBOMIfPresent(bytes)), nil
}
func restoreOldDB(t *testing.T, version string) bool {

View file

@ -19,7 +19,7 @@ import (
"strconv"
"strings"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/highlight"
"code.gitea.io/gitea/modules/log"
@ -27,7 +27,7 @@ import (
"code.gitea.io/gitea/modules/setting"
"github.com/Unknwon/com"
"github.com/sergi/go-diff/diffmatchpatch"
"golang.org/x/net/html/charset"
stdcharset "golang.org/x/net/html/charset"
"golang.org/x/text/transform"
)
@ -641,9 +641,9 @@ func ParsePatch(maxLines, maxLineCharacters, maxFiles int, reader io.Reader) (*D
buf.WriteString("\n")
}
}
charsetLabel, err := base.DetectEncoding(buf.Bytes())
charsetLabel, err := charset.DetectEncoding(buf.Bytes())
if charsetLabel != "UTF-8" && err == nil {
encoding, _ := charset.Lookup(charsetLabel)
encoding, _ := stdcharset.Lookup(charsetLabel)
if encoding != nil {
d := encoding.NewDecoder()
for _, sec := range f.Sections {

View file

@ -10,6 +10,7 @@ import (
"strings"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/indexer"
"code.gitea.io/gitea/modules/log"
@ -207,6 +208,7 @@ func addUpdate(update fileUpdate, repo *Repository, batch rupture.FlushingBatch)
if err != nil {
return err
} else if !base.IsTextFile(fileContents) {
// FIXME: UTF-16 files will probably fail here
return nil
}
indexerUpdate := indexer.RepoIndexerUpdate{
@ -214,7 +216,7 @@ func addUpdate(update fileUpdate, repo *Repository, batch rupture.FlushingBatch)
Op: indexer.RepoIndexerOpUpdate,
Data: &indexer.RepoIndexerData{
RepoID: repo.ID,
Content: string(fileContents),
Content: string(charset.ToUTF8DropErrors(fileContents)),
},
}
return indexerUpdate.AddToFlushingBatch(batch)

View file

@ -5,7 +5,6 @@
package base
import (
"bytes"
"crypto/md5"
"crypto/rand"
"crypto/sha1"
@ -26,7 +25,6 @@ import (
"strings"
"time"
"unicode"
"unicode/utf8"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
@ -35,12 +33,8 @@ import (
"github.com/Unknwon/com"
"github.com/Unknwon/i18n"
"github.com/gogits/chardet"
)
// UTF8BOM is the utf-8 byte-order marker
var UTF8BOM = []byte{'\xef', '\xbb', '\xbf'}
// EncodeMD5 encodes string to md5 hex value.
func EncodeMD5(str string) string {
m := md5.New()
@ -68,49 +62,6 @@ func ShortSha(sha1 string) string {
return TruncateString(sha1, 10)
}
// DetectEncoding detect the encoding of content
func DetectEncoding(content []byte) (string, error) {
if utf8.Valid(content) {
log.Debug("Detected encoding: utf-8 (fast)")
return "UTF-8", nil
}
textDetector := chardet.NewTextDetector()
var detectContent []byte
if len(content) < 1024 {
// Check if original content is valid
if _, err := textDetector.DetectBest(content); err != nil {
return "", err
}
times := 1024 / len(content)
detectContent = make([]byte, 0, times*len(content))
for i := 0; i < times; i++ {
detectContent = append(detectContent, content...)
}
} else {
detectContent = content
}
result, err := textDetector.DetectBest(detectContent)
if err != nil {
return "", err
}
if result.Charset != "UTF-8" && len(setting.Repository.AnsiCharset) > 0 {
log.Debug("Using default AnsiCharset: %s", setting.Repository.AnsiCharset)
return setting.Repository.AnsiCharset, err
}
log.Debug("Detected encoding: %s", result.Charset)
return result.Charset, err
}
// RemoveBOMIfPresent removes a UTF-8 BOM from a []byte
func RemoveBOMIfPresent(content []byte) []byte {
if len(content) > 2 && bytes.Equal(content[0:3], UTF8BOM) {
return content[3:]
}
return content
}
// BasicAuthDecode decode basic auth string
func BasicAuthDecode(encoded string) (string, string, error) {
s, err := base64.StdEncoding.DecodeString(encoded)

View file

@ -64,42 +64,6 @@ func TestShortSha(t *testing.T) {
assert.Equal(t, "veryverylo", ShortSha("veryverylong"))
}
func TestDetectEncoding(t *testing.T) {
testSuccess := func(b []byte, expected string) {
encoding, err := DetectEncoding(b)
assert.NoError(t, err)
assert.Equal(t, expected, encoding)
}
// utf-8
b := []byte("just some ascii")
testSuccess(b, "UTF-8")
// utf-8-sig: "hey" (with BOM)
b = []byte{0xef, 0xbb, 0xbf, 0x68, 0x65, 0x79}
testSuccess(b, "UTF-8")
// utf-16: "hey<accented G>"
b = []byte{0xff, 0xfe, 0x68, 0x00, 0x65, 0x00, 0x79, 0x00, 0xf4, 0x01}
testSuccess(b, "UTF-16LE")
// iso-8859-1: d<accented e>cor<newline>
b = []byte{0x44, 0xe9, 0x63, 0x6f, 0x72, 0x0a}
encoding, err := DetectEncoding(b)
assert.NoError(t, err)
// due to a race condition in `chardet` library, it could either detect
// "ISO-8859-1" or "IS0-8859-2" here. Technically either is correct, so
// we accept either.
assert.Contains(t, encoding, "ISO-8859")
setting.Repository.AnsiCharset = "placeholder"
testSuccess(b, "placeholder")
// invalid bytes
b = []byte{0xfa}
_, err = DetectEncoding(b)
assert.Error(t, err)
}
func TestBasicAuthDecode(t *testing.T) {
_, _, err := BasicAuthDecode("?")
assert.Equal(t, "illegal base64 data at input byte 0", err.Error())

152
modules/charset/charset.go Normal file
View file

@ -0,0 +1,152 @@
// Copyright 2014 The Gogs Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package charset
import (
"bytes"
"fmt"
"unicode/utf8"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"github.com/gogits/chardet"
"golang.org/x/net/html/charset"
"golang.org/x/text/transform"
)
// UTF8BOM is the utf-8 byte-order marker
var UTF8BOM = []byte{'\xef', '\xbb', '\xbf'}
// ToUTF8WithErr converts content to UTF8 encoding
func ToUTF8WithErr(content []byte) (string, error) {
charsetLabel, err := DetectEncoding(content)
if err != nil {
return "", err
} else if charsetLabel == "UTF-8" {
return string(RemoveBOMIfPresent(content)), nil
}
encoding, _ := charset.Lookup(charsetLabel)
if encoding == nil {
return string(content), fmt.Errorf("Unknown encoding: %s", charsetLabel)
}
// If there is an error, we concatenate the nicely decoded part and the
// original left over. This way we won't lose data.
result, n, err := transform.Bytes(encoding.NewDecoder(), content)
if err != nil {
result = append(result, content[n:]...)
}
result = RemoveBOMIfPresent(result)
return string(result), err
}
// ToUTF8WithFallback detects the encoding of content and coverts to UTF-8 if possible
func ToUTF8WithFallback(content []byte) []byte {
charsetLabel, err := DetectEncoding(content)
if err != nil || charsetLabel == "UTF-8" {
return RemoveBOMIfPresent(content)
}
encoding, _ := charset.Lookup(charsetLabel)
if encoding == nil {
return content
}
// If there is an error, we concatenate the nicely decoded part and the
// original left over. This way we won't lose data.
result, n, err := transform.Bytes(encoding.NewDecoder(), content)
if err != nil {
return append(result, content[n:]...)
}
return RemoveBOMIfPresent(result)
}
// ToUTF8 converts content to UTF8 encoding and ignore error
func ToUTF8(content string) string {
res, _ := ToUTF8WithErr([]byte(content))
return res
}
// ToUTF8DropErrors makes sure the return string is valid utf-8; attempts conversion if possible
func ToUTF8DropErrors(content []byte) []byte {
charsetLabel, err := DetectEncoding(content)
if err != nil || charsetLabel == "UTF-8" {
return RemoveBOMIfPresent(content)
}
encoding, _ := charset.Lookup(charsetLabel)
if encoding == nil {
return content
}
// We ignore any non-decodable parts from the file.
// Some parts might be lost
var decoded []byte
decoder := encoding.NewDecoder()
idx := 0
for {
result, n, err := transform.Bytes(decoder, content[idx:])
decoded = append(decoded, result...)
if err == nil {
break
}
decoded = append(decoded, ' ')
idx = idx + n + 1
if idx >= len(content) {
break
}
}
return RemoveBOMIfPresent(decoded)
}
// RemoveBOMIfPresent removes a UTF-8 BOM from a []byte
func RemoveBOMIfPresent(content []byte) []byte {
if len(content) > 2 && bytes.Equal(content[0:3], UTF8BOM) {
return content[3:]
}
return content
}
// DetectEncoding detect the encoding of content
func DetectEncoding(content []byte) (string, error) {
if utf8.Valid(content) {
log.Debug("Detected encoding: utf-8 (fast)")
return "UTF-8", nil
}
textDetector := chardet.NewTextDetector()
var detectContent []byte
if len(content) < 1024 {
// Check if original content is valid
if _, err := textDetector.DetectBest(content); err != nil {
return "", err
}
times := 1024 / len(content)
detectContent = make([]byte, 0, times*len(content))
for i := 0; i < times; i++ {
detectContent = append(detectContent, content...)
}
} else {
detectContent = content
}
result, err := textDetector.DetectBest(detectContent)
if err != nil {
return "", err
}
// FIXME: to properly decouple this function the fallback ANSI charset should be passed as an argument
if result.Charset != "UTF-8" && len(setting.Repository.AnsiCharset) > 0 {
log.Debug("Using default AnsiCharset: %s", setting.Repository.AnsiCharset)
return setting.Repository.AnsiCharset, err
}
log.Debug("Detected encoding: %s", result.Charset)
return result.Charset, err
}

View file

@ -0,0 +1,191 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package charset
import (
"testing"
"code.gitea.io/gitea/modules/setting"
"github.com/stretchr/testify/assert"
)
func TestRemoveBOMIfPresent(t *testing.T) {
res := RemoveBOMIfPresent([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba})
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
res = RemoveBOMIfPresent([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba})
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
}
func TestToUTF8WithErr(t *testing.T) {
var res string
var err error
res, err = ToUTF8WithErr([]byte{0x41, 0x42, 0x43})
assert.Equal(t, "ABC", res)
assert.NoError(t, err)
res, err = ToUTF8WithErr([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba})
assert.Equal(t, "áéíóú", res)
assert.NoError(t, err)
res, err = ToUTF8WithErr([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba})
assert.Equal(t, "áéíóú", res)
assert.NoError(t, err)
// This test FAILS
res, err = ToUTF8WithErr([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0xF1, 0x6F, 0x73})
assert.Equal(t, "Hola, así cómo ños", res)
assert.NoError(t, err)
res, err = ToUTF8WithErr([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x07, 0xA4, 0x6F, 0x73})
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
assert.Regexp(t, "^Hola, así cómo", res)
assert.NoError(t, err)
res, err = ToUTF8WithErr([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x81, 0xA4, 0x6F, 0x73})
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
assert.Regexp(t, "^Hola, así cómo", res)
assert.NoError(t, err)
// Japanese (Shift-JIS)
res, err = ToUTF8WithErr([]byte{0x93, 0xFA, 0x91, 0xAE, 0x94, 0xE9, 0x82, 0xBC, 0x82, 0xB5, 0x82, 0xBF, 0x82, 0xE3, 0x81, 0x42})
assert.Equal(t, "日属秘ぞしちゅ。", res)
assert.NoError(t, err)
res, err = ToUTF8WithErr([]byte{0x00, 0x00, 0x00, 0x00})
assert.Equal(t, "\x00\x00\x00\x00", res)
assert.NoError(t, err)
}
func TestToUTF8WithFallback(t *testing.T) {
res := ToUTF8WithFallback([]byte{0x41, 0x42, 0x43})
assert.Equal(t, []byte("ABC"), res)
res = ToUTF8WithFallback([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba})
assert.Equal(t, []byte("áéíóú"), res)
res = ToUTF8WithFallback([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba})
assert.Equal(t, []byte("áéíóú"), res)
res = ToUTF8WithFallback([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0xF1, 0x6F, 0x73})
assert.Equal(t, []byte("Hola, así cómo ños"), res)
minmatch := []byte("Hola, así cómo ")
res = ToUTF8WithFallback([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x07, 0xA4, 0x6F, 0x73})
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
assert.Equal(t, minmatch, res[0:len(minmatch)])
res = ToUTF8WithFallback([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x81, 0xA4, 0x6F, 0x73})
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
assert.Equal(t, minmatch, res[0:len(minmatch)])
// Japanese (Shift-JIS)
res = ToUTF8WithFallback([]byte{0x93, 0xFA, 0x91, 0xAE, 0x94, 0xE9, 0x82, 0xBC, 0x82, 0xB5, 0x82, 0xBF, 0x82, 0xE3, 0x81, 0x42})
assert.Equal(t, []byte("日属秘ぞしちゅ。"), res)
res = ToUTF8WithFallback([]byte{0x00, 0x00, 0x00, 0x00})
assert.Equal(t, []byte{0x00, 0x00, 0x00, 0x00}, res)
}
func TestToUTF8(t *testing.T) {
res := ToUTF8("ABC")
assert.Equal(t, "ABC", res)
res = ToUTF8("áéíóú")
assert.Equal(t, "áéíóú", res)
// With utf-8 BOM
res = ToUTF8("\ufeffáéíóú")
assert.Equal(t, "áéíóú", res)
res = ToUTF8("Hola, así cómo ños")
assert.Equal(t, "Hola, así cómo ños", res)
res = ToUTF8("Hola, así cómo \x07ños")
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
assert.Regexp(t, "^Hola, así cómo", res)
// This test FAILS
// res = ToUTF8("Hola, así cómo \x81ños")
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
// assert.Regexp(t, "^Hola, así cómo", res)
// Japanese (Shift-JIS)
res = ToUTF8("\x93\xFA\x91\xAE\x94\xE9\x82\xBC\x82\xB5\x82\xBF\x82\xE3\x81\x42")
assert.Equal(t, "日属秘ぞしちゅ。", res)
res = ToUTF8("\x00\x00\x00\x00")
assert.Equal(t, "\x00\x00\x00\x00", res)
}
func TestToUTF8DropErrors(t *testing.T) {
res := ToUTF8DropErrors([]byte{0x41, 0x42, 0x43})
assert.Equal(t, []byte("ABC"), res)
res = ToUTF8DropErrors([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba})
assert.Equal(t, []byte("áéíóú"), res)
res = ToUTF8DropErrors([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba})
assert.Equal(t, []byte("áéíóú"), res)
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0xF1, 0x6F, 0x73})
assert.Equal(t, []byte("Hola, así cómo ños"), res)
minmatch := []byte("Hola, así cómo ")
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x07, 0xA4, 0x6F, 0x73})
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
assert.Equal(t, minmatch, res[0:len(minmatch)])
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x81, 0xA4, 0x6F, 0x73})
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
assert.Equal(t, minmatch, res[0:len(minmatch)])
// Japanese (Shift-JIS)
res = ToUTF8DropErrors([]byte{0x93, 0xFA, 0x91, 0xAE, 0x94, 0xE9, 0x82, 0xBC, 0x82, 0xB5, 0x82, 0xBF, 0x82, 0xE3, 0x81, 0x42})
assert.Equal(t, []byte("日属秘ぞしちゅ。"), res)
res = ToUTF8DropErrors([]byte{0x00, 0x00, 0x00, 0x00})
assert.Equal(t, []byte{0x00, 0x00, 0x00, 0x00}, res)
}
func TestDetectEncoding(t *testing.T) {
testSuccess := func(b []byte, expected string) {
encoding, err := DetectEncoding(b)
assert.NoError(t, err)
assert.Equal(t, expected, encoding)
}
// utf-8
b := []byte("just some ascii")
testSuccess(b, "UTF-8")
// utf-8-sig: "hey" (with BOM)
b = []byte{0xef, 0xbb, 0xbf, 0x68, 0x65, 0x79}
testSuccess(b, "UTF-8")
// utf-16: "hey<accented G>"
b = []byte{0xff, 0xfe, 0x68, 0x00, 0x65, 0x00, 0x79, 0x00, 0xf4, 0x01}
testSuccess(b, "UTF-16LE")
// iso-8859-1: d<accented e>cor<newline>
b = []byte{0x44, 0xe9, 0x63, 0x6f, 0x72, 0x0a}
encoding, err := DetectEncoding(b)
assert.NoError(t, err)
// due to a race condition in `chardet` library, it could either detect
// "ISO-8859-1" or "IS0-8859-2" here. Technically either is correct, so
// we accept either.
assert.Contains(t, encoding, "ISO-8859")
setting.Repository.AnsiCharset = "placeholder"
testSuccess(b, "placeholder")
// invalid bytes
b = []byte{0xfa}
_, err = DetectEncoding(b)
assert.Error(t, err)
}

View file

@ -23,7 +23,7 @@ const (
repoIndexerAnalyzer = "repoIndexerAnalyzer"
repoIndexerDocType = "repoIndexerDocType"
repoIndexerLatestVersion = 2
repoIndexerLatestVersion = 3
)
// repoIndexer (thread-safe) index for repository contents

View file

@ -11,17 +11,17 @@ import (
"path"
"strings"
"golang.org/x/net/html/charset"
"golang.org/x/text/transform"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/cache"
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/structs"
stdcharset "golang.org/x/net/html/charset"
"golang.org/x/text/transform"
)
// IdentityOptions for a person's identity like an author or committer
@ -87,15 +87,15 @@ func detectEncodingAndBOM(entry *git.TreeEntry, repo *models.Repository) (string
}
encoding, err := base.DetectEncoding(buf)
encoding, err := charset.DetectEncoding(buf)
if err != nil {
// just default to utf-8 and no bom
return "UTF-8", false
}
if encoding == "UTF-8" {
return encoding, bytes.Equal(buf[0:3], base.UTF8BOM)
return encoding, bytes.Equal(buf[0:3], charset.UTF8BOM)
}
charsetEncoding, _ := charset.Lookup(encoding)
charsetEncoding, _ := stdcharset.Lookup(encoding)
if charsetEncoding == nil {
return "UTF-8", false
}
@ -107,7 +107,7 @@ func detectEncodingAndBOM(entry *git.TreeEntry, repo *models.Repository) (string
}
if n > 2 {
return encoding, bytes.Equal([]byte(result)[0:3], base.UTF8BOM)
return encoding, bytes.Equal([]byte(result)[0:3], charset.UTF8BOM)
}
return encoding, false
@ -321,10 +321,10 @@ func CreateOrUpdateRepoFile(repo *models.Repository, doer *models.User, opts *Up
content := opts.Content
if bom {
content = string(base.UTF8BOM) + content
content = string(charset.UTF8BOM) + content
}
if encoding != "UTF-8" {
charsetEncoding, _ := charset.Lookup(encoding)
charsetEncoding, _ := stdcharset.Lookup(encoding)
if charsetEncoding != nil {
result, _, err := transform.String(charsetEncoding.NewEncoder(), content)
if err != nil {

View file

@ -28,8 +28,6 @@ import (
"code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/setting"
"golang.org/x/net/html/charset"
"golang.org/x/text/transform"
"gopkg.in/editorconfig/editorconfig-core-go.v1"
)
@ -274,60 +272,6 @@ func Sha1(str string) string {
return base.EncodeSha1(str)
}
// ToUTF8WithErr converts content to UTF8 encoding
func ToUTF8WithErr(content []byte) (string, error) {
charsetLabel, err := base.DetectEncoding(content)
if err != nil {
return "", err
} else if charsetLabel == "UTF-8" {
return string(base.RemoveBOMIfPresent(content)), nil
}
encoding, _ := charset.Lookup(charsetLabel)
if encoding == nil {
return string(content), fmt.Errorf("Unknown encoding: %s", charsetLabel)
}
// If there is an error, we concatenate the nicely decoded part and the
// original left over. This way we won't lose data.
result, n, err := transform.Bytes(encoding.NewDecoder(), content)
if err != nil {
result = append(result, content[n:]...)
}
result = base.RemoveBOMIfPresent(result)
return string(result), err
}
// ToUTF8WithFallback detects the encoding of content and coverts to UTF-8 if possible
func ToUTF8WithFallback(content []byte) []byte {
charsetLabel, err := base.DetectEncoding(content)
if err != nil || charsetLabel == "UTF-8" {
return base.RemoveBOMIfPresent(content)
}
encoding, _ := charset.Lookup(charsetLabel)
if encoding == nil {
return content
}
// If there is an error, we concatenate the nicely decoded part and the
// original left over. This way we won't lose data.
result, n, err := transform.Bytes(encoding.NewDecoder(), content)
if err != nil {
return append(result, content[n:]...)
}
return base.RemoveBOMIfPresent(result)
}
// ToUTF8 converts content to UTF8 encoding and ignore error
func ToUTF8(content string) string {
res, _ := ToUTF8WithErr([]byte(content))
return res
}
// ReplaceLeft replaces all prefixes 'oldS' in 's' with 'newS'.
func ReplaceLeft(s, oldS, newS string) string {
oldLen, newLen, i, n := len(oldS), len(newS), 0, 0

View file

@ -11,11 +11,11 @@ import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/templates"
)
const (
@ -251,7 +251,7 @@ func Diff(ctx *context.Context) {
note := &git.Note{}
err = git.GetNote(ctx.Repo.GitRepo, commitID, note)
if err == nil {
ctx.Data["Note"] = string(templates.ToUTF8WithFallback(note.Message))
ctx.Data["Note"] = string(charset.ToUTF8WithFallback(note.Message))
ctx.Data["NoteCommit"] = note.Commit
ctx.Data["NoteAuthor"] = models.ValidateCommitWithEmail(note.Commit)
}

View file

@ -14,12 +14,12 @@ import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/auth"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/repofiles"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/templates"
"code.gitea.io/gitea/modules/upload"
"code.gitea.io/gitea/modules/util"
)
@ -118,7 +118,7 @@ func editFile(ctx *context.Context, isNewFile bool) {
d, _ := ioutil.ReadAll(dataRc)
buf = append(buf, d...)
if content, err := templates.ToUTF8WithErr(buf); err != nil {
if content, err := charset.ToUTF8WithErr(buf); err != nil {
log.Error("ToUTF8WithErr: %v", err)
ctx.Data["FileContent"] = string(buf)
} else {

View file

@ -16,6 +16,7 @@ import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/highlight"
@ -23,7 +24,6 @@ import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/templates"
)
const (
@ -160,7 +160,7 @@ func renderDirectory(ctx *context.Context, treeLink string) {
ctx.Data["FileSize"] = fileSize
} else {
d, _ := ioutil.ReadAll(dataRc)
buf = templates.ToUTF8WithFallback(append(buf, d...))
buf = charset.ToUTF8WithFallback(append(buf, d...))
if markup.Type(readmeFile.Name()) != "" {
ctx.Data["IsMarkup"] = true
@ -278,7 +278,7 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st
}
d, _ := ioutil.ReadAll(dataRc)
buf = templates.ToUTF8WithFallback(append(buf, d...))
buf = charset.ToUTF8WithFallback(append(buf, d...))
readmeExist := markup.IsReadmeFile(blob.Name())
ctx.Data["ReadmeExist"] = readmeExist
@ -293,7 +293,7 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st
} else {
// Building code view blocks with line number on server side.
var fileContent string
if content, err := templates.ToUTF8WithErr(buf); err != nil {
if content, err := charset.ToUTF8WithErr(buf); err != nil {
log.Error("ToUTF8WithErr: %v", err)
fileContent = string(buf)
} else {