mirror of
https://github.com/go-gitea/gitea.git
synced 2025-12-14 21:15:18 +08:00
Compare commits
10 Commits
02c11993e3
...
4fb4211e3d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4fb4211e3d | ||
|
|
7190519fb3 | ||
|
|
1f5237e0d7 | ||
|
|
9ab4910c37 | ||
|
|
29057ea55f | ||
|
|
ac8308b5cb | ||
|
|
fb475470f0 | ||
|
|
e2870aaa88 | ||
|
|
7ebcb24131 | ||
|
|
dec3c8cd38 |
@ -5,12 +5,10 @@ package charset
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
@ -23,60 +21,39 @@ import (
|
||||
var UTF8BOM = []byte{'\xef', '\xbb', '\xbf'}
|
||||
|
||||
type ConvertOpts struct {
|
||||
KeepBOM bool
|
||||
KeepBOM bool
|
||||
ErrorReplacement []byte
|
||||
ErrorReturnOrigin bool
|
||||
}
|
||||
|
||||
var ToUTF8WithFallbackReaderPrefetchSize = 16 * 1024
|
||||
|
||||
// ToUTF8WithFallbackReader detects the encoding of content and converts to UTF-8 reader if possible
|
||||
func ToUTF8WithFallbackReader(rd io.Reader, opts ConvertOpts) io.Reader {
|
||||
buf := make([]byte, 2048)
|
||||
buf := make([]byte, ToUTF8WithFallbackReaderPrefetchSize)
|
||||
n, err := util.ReadAtMost(rd, buf)
|
||||
if err != nil {
|
||||
return io.MultiReader(bytes.NewReader(MaybeRemoveBOM(buf[:n], opts)), rd)
|
||||
}
|
||||
|
||||
charsetLabel, err := DetectEncoding(buf[:n])
|
||||
if err != nil || charsetLabel == "UTF-8" {
|
||||
return io.MultiReader(bytes.NewReader(MaybeRemoveBOM(buf[:n], opts)), rd)
|
||||
}
|
||||
|
||||
encoding, _ := charset.Lookup(charsetLabel)
|
||||
if encoding == nil {
|
||||
// read error occurs, don't do any processing
|
||||
return io.MultiReader(bytes.NewReader(buf[:n]), rd)
|
||||
}
|
||||
|
||||
return transform.NewReader(
|
||||
io.MultiReader(
|
||||
bytes.NewReader(MaybeRemoveBOM(buf[:n], opts)),
|
||||
rd,
|
||||
),
|
||||
encoding.NewDecoder(),
|
||||
)
|
||||
}
|
||||
|
||||
// ToUTF8 converts content to UTF8 encoding
|
||||
func ToUTF8(content []byte, opts ConvertOpts) (string, error) {
|
||||
charsetLabel, err := DetectEncoding(content)
|
||||
if err != nil {
|
||||
return "", err
|
||||
} else if charsetLabel == "UTF-8" {
|
||||
return string(MaybeRemoveBOM(content, opts)), nil
|
||||
charsetLabel, _ := DetectEncoding(buf[:n])
|
||||
if charsetLabel == "UTF-8" {
|
||||
// is utf-8, try to remove BOM and read it as-is
|
||||
return io.MultiReader(bytes.NewReader(maybeRemoveBOM(buf[:n], opts)), rd)
|
||||
}
|
||||
|
||||
encoding, _ := charset.Lookup(charsetLabel)
|
||||
if encoding == nil {
|
||||
return string(content), fmt.Errorf("Unknown encoding: %s", charsetLabel)
|
||||
// unknown charset, don't do any processing
|
||||
return io.MultiReader(bytes.NewReader(buf[:n]), rd)
|
||||
}
|
||||
|
||||
// If there is an error, we concatenate the nicely decoded part and the
|
||||
// original left over. This way we won't lose much data.
|
||||
result, n, err := transform.Bytes(encoding.NewDecoder(), content)
|
||||
if err != nil {
|
||||
result = append(result, content[n:]...)
|
||||
}
|
||||
|
||||
result = MaybeRemoveBOM(result, opts)
|
||||
|
||||
return string(result), err
|
||||
// convert from charset to utf-8
|
||||
return transform.NewReader(
|
||||
io.MultiReader(bytes.NewReader(buf[:n]), rd),
|
||||
encoding.NewDecoder(),
|
||||
)
|
||||
}
|
||||
|
||||
// ToUTF8WithFallback detects the encoding of content and converts to UTF-8 if possible
|
||||
@ -85,73 +62,84 @@ func ToUTF8WithFallback(content []byte, opts ConvertOpts) []byte {
|
||||
return bs
|
||||
}
|
||||
|
||||
// ToUTF8DropErrors makes sure the return string is valid utf-8; attempts conversion if possible
|
||||
func ToUTF8DropErrors(content []byte, opts ConvertOpts) []byte {
|
||||
charsetLabel, err := DetectEncoding(content)
|
||||
if err != nil || charsetLabel == "UTF-8" {
|
||||
return MaybeRemoveBOM(content, opts)
|
||||
func ToUTF8DropErrors(content []byte) []byte {
|
||||
return ToUTF8(content, ConvertOpts{ErrorReplacement: []byte{' '}})
|
||||
}
|
||||
|
||||
func ToUTF8(content []byte, opts ConvertOpts) []byte {
|
||||
charsetLabel, _ := DetectEncoding(content)
|
||||
if charsetLabel == "UTF-8" {
|
||||
return maybeRemoveBOM(content, opts)
|
||||
}
|
||||
|
||||
encoding, _ := charset.Lookup(charsetLabel)
|
||||
if encoding == nil {
|
||||
setting.PanicInDevOrTesting("unsupported detected charset %q, it shouldn't happen", charsetLabel)
|
||||
return content
|
||||
}
|
||||
|
||||
// We ignore any non-decodable parts from the file.
|
||||
// Some parts might be lost
|
||||
var decoded []byte
|
||||
decoder := encoding.NewDecoder()
|
||||
idx := 0
|
||||
for {
|
||||
for idx < len(content) {
|
||||
result, n, err := transform.Bytes(decoder, content[idx:])
|
||||
decoded = append(decoded, result...)
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
decoded = append(decoded, ' ')
|
||||
idx = idx + n + 1
|
||||
if idx >= len(content) {
|
||||
break
|
||||
if opts.ErrorReturnOrigin {
|
||||
return content
|
||||
}
|
||||
if opts.ErrorReplacement == nil {
|
||||
decoded = append(decoded, content[idx+n])
|
||||
} else {
|
||||
decoded = append(decoded, opts.ErrorReplacement...)
|
||||
}
|
||||
idx += n + 1
|
||||
}
|
||||
|
||||
return MaybeRemoveBOM(decoded, opts)
|
||||
return maybeRemoveBOM(decoded, opts)
|
||||
}
|
||||
|
||||
// MaybeRemoveBOM removes a UTF-8 BOM from a []byte when opts.KeepBOM is false
|
||||
func MaybeRemoveBOM(content []byte, opts ConvertOpts) []byte {
|
||||
// maybeRemoveBOM removes a UTF-8 BOM from a []byte when opts.KeepBOM is false
|
||||
func maybeRemoveBOM(content []byte, opts ConvertOpts) []byte {
|
||||
if opts.KeepBOM {
|
||||
return content
|
||||
}
|
||||
if len(content) > 2 && bytes.Equal(content[0:3], UTF8BOM) {
|
||||
return content[3:]
|
||||
}
|
||||
return content
|
||||
return bytes.TrimPrefix(content, UTF8BOM)
|
||||
}
|
||||
|
||||
// DetectEncoding detect the encoding of content
|
||||
func DetectEncoding(content []byte) (string, error) {
|
||||
// it always returns a detected or guessed "encoding" string, no matter error happens or not
|
||||
func DetectEncoding(content []byte) (encoding string, _ error) {
|
||||
// First we check if the content represents valid utf8 content excepting a truncated character at the end.
|
||||
|
||||
// Now we could decode all the runes in turn but this is not necessarily the cheapest thing to do
|
||||
// instead we walk backwards from the end to trim off a the incomplete character
|
||||
// instead we walk backwards from the end to trim off the incomplete character
|
||||
toValidate := content
|
||||
end := len(toValidate) - 1
|
||||
|
||||
if end < 0 {
|
||||
// no-op
|
||||
} else if toValidate[end]>>5 == 0b110 {
|
||||
// Incomplete 1 byte extension e.g. © <c2><a9> which has been truncated to <c2>
|
||||
toValidate = toValidate[:end]
|
||||
} else if end > 0 && toValidate[end]>>6 == 0b10 && toValidate[end-1]>>4 == 0b1110 {
|
||||
// Incomplete 2 byte extension e.g. ⛔ <e2><9b><94> which has been truncated to <e2><9b>
|
||||
toValidate = toValidate[:end-1]
|
||||
} else if end > 1 && toValidate[end]>>6 == 0b10 && toValidate[end-1]>>6 == 0b10 && toValidate[end-2]>>3 == 0b11110 {
|
||||
// Incomplete 3 byte extension e.g. 💩 <f0><9f><92><a9> which has been truncated to <f0><9f><92>
|
||||
toValidate = toValidate[:end-2]
|
||||
// U+0000 U+007F 0yyyzzzz
|
||||
// U+0080 U+07FF 110xxxyy 10yyzzzz
|
||||
// U+0800 U+FFFF 1110wwww 10xxxxyy 10yyzzzz
|
||||
// U+010000 U+10FFFF 11110uvv 10vvwwww 10xxxxyy 10yyzzzz
|
||||
cnt := 0
|
||||
for end >= 0 && cnt < 4 {
|
||||
c := toValidate[end]
|
||||
if c>>5 == 0b110 || c>>4 == 0b1110 || c>>3 == 0b11110 {
|
||||
// a leading byte
|
||||
toValidate = toValidate[:end]
|
||||
break
|
||||
} else if c>>6 == 0b10 {
|
||||
// a continuation byte
|
||||
end--
|
||||
} else {
|
||||
// not an utf-8 byte
|
||||
break
|
||||
}
|
||||
cnt++
|
||||
}
|
||||
|
||||
if utf8.Valid(toValidate) {
|
||||
log.Debug("Detected encoding: utf-8 (fast)")
|
||||
return "UTF-8", nil
|
||||
}
|
||||
|
||||
@ -160,7 +148,7 @@ func DetectEncoding(content []byte) (string, error) {
|
||||
if len(content) < 1024 {
|
||||
// Check if original content is valid
|
||||
if _, err := textDetector.DetectBest(content); err != nil {
|
||||
return "", err
|
||||
return util.IfZero(setting.Repository.AnsiCharset, "UTF-8"), err
|
||||
}
|
||||
times := 1024 / len(content)
|
||||
detectContent = make([]byte, 0, times*len(content))
|
||||
@ -171,14 +159,10 @@ func DetectEncoding(content []byte) (string, error) {
|
||||
detectContent = content
|
||||
}
|
||||
|
||||
// Now we can't use DetectBest or just results[0] because the result isn't stable - so we need a tie break
|
||||
// Now we can't use DetectBest or just results[0] because the result isn't stable - so we need a tie-break
|
||||
results, err := textDetector.DetectAll(detectContent)
|
||||
if err != nil {
|
||||
if err == chardet.NotDetectedError && len(setting.Repository.AnsiCharset) > 0 {
|
||||
log.Debug("Using default AnsiCharset: %s", setting.Repository.AnsiCharset)
|
||||
return setting.Repository.AnsiCharset, nil
|
||||
}
|
||||
return "", err
|
||||
return util.IfZero(setting.Repository.AnsiCharset, "UTF-8"), err
|
||||
}
|
||||
|
||||
topConfidence := results[0].Confidence
|
||||
@ -201,11 +185,9 @@ func DetectEncoding(content []byte) (string, error) {
|
||||
}
|
||||
|
||||
// FIXME: to properly decouple this function the fallback ANSI charset should be passed as an argument
|
||||
if topResult.Charset != "UTF-8" && len(setting.Repository.AnsiCharset) > 0 {
|
||||
log.Debug("Using default AnsiCharset: %s", setting.Repository.AnsiCharset)
|
||||
if topResult.Charset != "UTF-8" && setting.Repository.AnsiCharset != "" {
|
||||
return setting.Repository.AnsiCharset, err
|
||||
}
|
||||
|
||||
log.Debug("Detected encoding: %s", topResult.Charset)
|
||||
return topResult.Charset, err
|
||||
return topResult.Charset, nil
|
||||
}
|
||||
|
||||
@ -4,108 +4,89 @@
|
||||
package charset
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/test"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func resetDefaultCharsetsOrder() {
|
||||
defaultDetectedCharsetsOrder := make([]string, 0, len(setting.Repository.DetectedCharsetsOrder))
|
||||
for _, charset := range setting.Repository.DetectedCharsetsOrder {
|
||||
defaultDetectedCharsetsOrder = append(defaultDetectedCharsetsOrder, strings.ToLower(strings.TrimSpace(charset)))
|
||||
}
|
||||
func TestMain(m *testing.M) {
|
||||
setting.Repository.DetectedCharsetScore = map[string]int{}
|
||||
i := 0
|
||||
for _, charset := range defaultDetectedCharsetsOrder {
|
||||
canonicalCharset := strings.ToLower(strings.TrimSpace(charset))
|
||||
if _, has := setting.Repository.DetectedCharsetScore[canonicalCharset]; !has {
|
||||
setting.Repository.DetectedCharsetScore[canonicalCharset] = i
|
||||
i++
|
||||
}
|
||||
for i, charset := range setting.Repository.DetectedCharsetsOrder {
|
||||
setting.Repository.DetectedCharsetScore[strings.ToLower(charset)] = i
|
||||
}
|
||||
os.Exit(m.Run())
|
||||
}
|
||||
|
||||
func TestMaybeRemoveBOM(t *testing.T) {
|
||||
res := MaybeRemoveBOM([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
res := maybeRemoveBOM([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||
|
||||
res = MaybeRemoveBOM([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
res = maybeRemoveBOM([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||
}
|
||||
|
||||
func TestToUTF8(t *testing.T) {
|
||||
resetDefaultCharsetsOrder()
|
||||
|
||||
// Note: golang compiler seems so behave differently depending on the current
|
||||
// locale, so some conversions might behave differently. For that reason, we don't
|
||||
// depend on particular conversions but in expected behaviors.
|
||||
|
||||
res, err := ToUTF8([]byte{0x41, 0x42, 0x43}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "ABC", res)
|
||||
res := ToUTF8([]byte{0x41, 0x42, 0x43}, ConvertOpts{})
|
||||
assert.Equal(t, "ABC", string(res))
|
||||
|
||||
// "áéíóú"
|
||||
res, err = ToUTF8([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, []byte(res))
|
||||
res = ToUTF8([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||
|
||||
// "áéíóú"
|
||||
res, err = ToUTF8([]byte{
|
||||
res = ToUTF8([]byte{
|
||||
0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3,
|
||||
0xc3, 0xba,
|
||||
}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, []byte(res))
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||
|
||||
res, err = ToUTF8([]byte{
|
||||
res = ToUTF8([]byte{
|
||||
0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63,
|
||||
0xF3, 0x6D, 0x6F, 0x20, 0xF1, 0x6F, 0x73, 0x41, 0x41, 0x41, 0x2e,
|
||||
}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
stringMustStartWith(t, "Hola,", res)
|
||||
stringMustEndWith(t, "AAA.", res)
|
||||
|
||||
res, err = ToUTF8([]byte{
|
||||
res = ToUTF8([]byte{
|
||||
0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63,
|
||||
0xF3, 0x6D, 0x6F, 0x20, 0x07, 0xA4, 0x6F, 0x73, 0x41, 0x41, 0x41, 0x2e,
|
||||
}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
stringMustStartWith(t, "Hola,", res)
|
||||
stringMustEndWith(t, "AAA.", res)
|
||||
|
||||
res, err = ToUTF8([]byte{
|
||||
res = ToUTF8([]byte{
|
||||
0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63,
|
||||
0xF3, 0x6D, 0x6F, 0x20, 0x81, 0xA4, 0x6F, 0x73, 0x41, 0x41, 0x41, 0x2e,
|
||||
}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
stringMustStartWith(t, "Hola,", res)
|
||||
stringMustEndWith(t, "AAA.", res)
|
||||
|
||||
// Japanese (Shift-JIS)
|
||||
// 日属秘ぞしちゅ。
|
||||
res, err = ToUTF8([]byte{
|
||||
res = ToUTF8([]byte{
|
||||
0x93, 0xFA, 0x91, 0xAE, 0x94, 0xE9, 0x82, 0xBC, 0x82, 0xB5, 0x82,
|
||||
0xBF, 0x82, 0xE3, 0x81, 0x42,
|
||||
}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []byte{
|
||||
0xE6, 0x97, 0xA5, 0xE5, 0xB1, 0x9E, 0xE7, 0xA7, 0x98, 0xE3,
|
||||
0x81, 0x9E, 0xE3, 0x81, 0x97, 0xE3, 0x81, 0xA1, 0xE3, 0x82, 0x85, 0xE3, 0x80, 0x82,
|
||||
},
|
||||
[]byte(res))
|
||||
}, res)
|
||||
|
||||
res, err = ToUTF8([]byte{0x00, 0x00, 0x00, 0x00}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []byte{0x00, 0x00, 0x00, 0x00}, []byte(res))
|
||||
res = ToUTF8([]byte{0x00, 0x00, 0x00, 0x00}, ConvertOpts{})
|
||||
assert.Equal(t, []byte{0x00, 0x00, 0x00, 0x00}, res)
|
||||
}
|
||||
|
||||
func TestToUTF8WithFallback(t *testing.T) {
|
||||
resetDefaultCharsetsOrder()
|
||||
// "ABC"
|
||||
res := ToUTF8WithFallback([]byte{0x41, 0x42, 0x43}, ConvertOpts{})
|
||||
assert.Equal(t, []byte{0x41, 0x42, 0x43}, res)
|
||||
@ -152,54 +133,58 @@ func TestToUTF8WithFallback(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestToUTF8DropErrors(t *testing.T) {
|
||||
resetDefaultCharsetsOrder()
|
||||
// "ABC"
|
||||
res := ToUTF8DropErrors([]byte{0x41, 0x42, 0x43}, ConvertOpts{})
|
||||
res := ToUTF8DropErrors([]byte{0x41, 0x42, 0x43})
|
||||
assert.Equal(t, []byte{0x41, 0x42, 0x43}, res)
|
||||
|
||||
// "áéíóú"
|
||||
res = ToUTF8DropErrors([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
res = ToUTF8DropErrors([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba})
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||
|
||||
// UTF8 BOM + "áéíóú"
|
||||
res = ToUTF8DropErrors([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
res = ToUTF8DropErrors([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba})
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||
|
||||
// "Hola, así cómo ños"
|
||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0xF1, 0x6F, 0x73}, ConvertOpts{})
|
||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0xF1, 0x6F, 0x73})
|
||||
assert.Equal(t, []byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73}, res[:8])
|
||||
assert.Equal(t, []byte{0x73}, res[len(res)-1:])
|
||||
|
||||
// "Hola, así cómo "
|
||||
minmatch := []byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xC3, 0xAD, 0x20, 0x63, 0xC3, 0xB3, 0x6D, 0x6F, 0x20}
|
||||
|
||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x07, 0xA4, 0x6F, 0x73}, ConvertOpts{})
|
||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x07, 0xA4, 0x6F, 0x73})
|
||||
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
|
||||
assert.Equal(t, minmatch, res[0:len(minmatch)])
|
||||
|
||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x81, 0xA4, 0x6F, 0x73}, ConvertOpts{})
|
||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x81, 0xA4, 0x6F, 0x73})
|
||||
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
|
||||
assert.Equal(t, minmatch, res[0:len(minmatch)])
|
||||
|
||||
// Japanese (Shift-JIS)
|
||||
// "日属秘ぞしちゅ。"
|
||||
res = ToUTF8DropErrors([]byte{0x93, 0xFA, 0x91, 0xAE, 0x94, 0xE9, 0x82, 0xBC, 0x82, 0xB5, 0x82, 0xBF, 0x82, 0xE3, 0x81, 0x42}, ConvertOpts{})
|
||||
res = ToUTF8DropErrors([]byte{0x93, 0xFA, 0x91, 0xAE, 0x94, 0xE9, 0x82, 0xBC, 0x82, 0xB5, 0x82, 0xBF, 0x82, 0xE3, 0x81, 0x42})
|
||||
assert.Equal(t, []byte{
|
||||
0xE6, 0x97, 0xA5, 0xE5, 0xB1, 0x9E, 0xE7, 0xA7, 0x98, 0xE3,
|
||||
0x81, 0x9E, 0xE3, 0x81, 0x97, 0xE3, 0x81, 0xA1, 0xE3, 0x82, 0x85, 0xE3, 0x80, 0x82,
|
||||
}, res)
|
||||
|
||||
res = ToUTF8DropErrors([]byte{0x00, 0x00, 0x00, 0x00}, ConvertOpts{})
|
||||
res = ToUTF8DropErrors([]byte{0x00, 0x00, 0x00, 0x00})
|
||||
assert.Equal(t, []byte{0x00, 0x00, 0x00, 0x00}, res)
|
||||
}
|
||||
|
||||
func TestDetectEncoding(t *testing.T) {
|
||||
resetDefaultCharsetsOrder()
|
||||
testSuccess := func(b []byte, expected string) {
|
||||
encoding, err := DetectEncoding(b)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, expected, encoding)
|
||||
}
|
||||
|
||||
// invalid bytes
|
||||
encoding, err := DetectEncoding([]byte{0xfa})
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, "UTF-8", encoding)
|
||||
|
||||
// utf-8
|
||||
b := []byte("just some ascii")
|
||||
testSuccess(b, "UTF-8")
|
||||
@ -214,169 +199,49 @@ func TestDetectEncoding(t *testing.T) {
|
||||
|
||||
// iso-8859-1: d<accented e>cor<newline>
|
||||
b = []byte{0x44, 0xe9, 0x63, 0x6f, 0x72, 0x0a}
|
||||
encoding, err := DetectEncoding(b)
|
||||
encoding, err = DetectEncoding(b)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, encoding, "ISO-8859-1")
|
||||
|
||||
old := setting.Repository.AnsiCharset
|
||||
setting.Repository.AnsiCharset = "placeholder"
|
||||
defer func() {
|
||||
setting.Repository.AnsiCharset = old
|
||||
}()
|
||||
testSuccess(b, "placeholder")
|
||||
|
||||
// invalid bytes
|
||||
b = []byte{0xfa}
|
||||
_, err = DetectEncoding(b)
|
||||
assert.Error(t, err)
|
||||
defer test.MockVariableValue(&setting.Repository.AnsiCharset, "MyEncoding")()
|
||||
testSuccess(b, "MyEncoding")
|
||||
}
|
||||
|
||||
func stringMustStartWith(t *testing.T, expected, value string) {
|
||||
assert.Equal(t, expected, value[:len(expected)])
|
||||
func stringMustStartWith(t *testing.T, expected string, value []byte) {
|
||||
assert.Equal(t, expected, string(value[:len(expected)]))
|
||||
}
|
||||
|
||||
func stringMustEndWith(t *testing.T, expected, value string) {
|
||||
assert.Equal(t, expected, value[len(value)-len(expected):])
|
||||
func stringMustEndWith(t *testing.T, expected string, value []byte) {
|
||||
assert.Equal(t, expected, string(value[len(value)-len(expected):]))
|
||||
}
|
||||
|
||||
func TestToUTF8WithFallbackReader(t *testing.T) {
|
||||
resetDefaultCharsetsOrder()
|
||||
test.MockVariableValue(&ToUTF8WithFallbackReaderPrefetchSize)
|
||||
|
||||
for testLen := range 2048 {
|
||||
pattern := " test { () }\n"
|
||||
input := ""
|
||||
for len(input) < testLen {
|
||||
input += pattern
|
||||
}
|
||||
input = input[:testLen]
|
||||
input += "// Выключаем"
|
||||
rd := ToUTF8WithFallbackReader(bytes.NewReader([]byte(input)), ConvertOpts{})
|
||||
block := "aá啊🤔"
|
||||
runes := []rune(block)
|
||||
assert.Len(t, string(runes[0]), 1)
|
||||
assert.Len(t, string(runes[1]), 2)
|
||||
assert.Len(t, string(runes[2]), 3)
|
||||
assert.Len(t, string(runes[3]), 4)
|
||||
|
||||
content := strings.Repeat(block, 2)
|
||||
for i := 1; i < len(content); i++ {
|
||||
encoding, err := DetectEncoding([]byte(content[:i]))
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "UTF-8", encoding)
|
||||
|
||||
ToUTF8WithFallbackReaderPrefetchSize = i
|
||||
rd := ToUTF8WithFallbackReader(strings.NewReader(content), ConvertOpts{})
|
||||
r, _ := io.ReadAll(rd)
|
||||
assert.Equalf(t, input, string(r), "testing string len=%d", testLen)
|
||||
assert.Equal(t, content, string(r))
|
||||
}
|
||||
for _, r := range runes {
|
||||
content = "abc abc " + string(r) + string(r) + string(r)
|
||||
for i := 0; i < len(content); i++ {
|
||||
encoding, err := DetectEncoding([]byte(content[:i]))
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "UTF-8", encoding)
|
||||
}
|
||||
}
|
||||
|
||||
truncatedOneByteExtension := failFastBytes
|
||||
encoding, _ := DetectEncoding(truncatedOneByteExtension)
|
||||
assert.Equal(t, "UTF-8", encoding)
|
||||
|
||||
truncatedTwoByteExtension := failFastBytes
|
||||
truncatedTwoByteExtension[len(failFastBytes)-1] = 0x9b
|
||||
truncatedTwoByteExtension[len(failFastBytes)-2] = 0xe2
|
||||
|
||||
encoding, _ = DetectEncoding(truncatedTwoByteExtension)
|
||||
assert.Equal(t, "UTF-8", encoding)
|
||||
|
||||
truncatedThreeByteExtension := failFastBytes
|
||||
truncatedThreeByteExtension[len(failFastBytes)-1] = 0x92
|
||||
truncatedThreeByteExtension[len(failFastBytes)-2] = 0x9f
|
||||
truncatedThreeByteExtension[len(failFastBytes)-3] = 0xf0
|
||||
|
||||
encoding, _ = DetectEncoding(truncatedThreeByteExtension)
|
||||
assert.Equal(t, "UTF-8", encoding)
|
||||
}
|
||||
|
||||
var failFastBytes = []byte{
|
||||
0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x70, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x74, 0x6f,
|
||||
0x6f, 0x6c, 0x73, 0x2e, 0x61, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x73, 0x6b, 0x64, 0x65, 0x66, 0x73, 0x2e, 0x63, 0x6f, 0x6e,
|
||||
0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x4f, 0x73, 0x0a, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6f, 0x72, 0x67,
|
||||
0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x62, 0x6f, 0x6f,
|
||||
0x74, 0x2e, 0x67, 0x72, 0x61, 0x64, 0x6c, 0x65, 0x2e, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x2e, 0x72, 0x75, 0x6e, 0x2e, 0x42,
|
||||
0x6f, 0x6f, 0x74, 0x52, 0x75, 0x6e, 0x0a, 0x0a, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x20, 0x7b, 0x0a, 0x20, 0x20,
|
||||
0x20, 0x20, 0x69, 0x64, 0x28, 0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d,
|
||||
0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x62, 0x6f, 0x6f, 0x74, 0x22, 0x29, 0x0a, 0x7d, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x65,
|
||||
0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65,
|
||||
0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a,
|
||||
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a, 0x61, 0x70, 0x69, 0x22, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d,
|
||||
0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74,
|
||||
0x28, 0x22, 0x3a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a, 0x61, 0x70, 0x69, 0x2d, 0x64, 0x6f, 0x63, 0x73, 0x22, 0x29,
|
||||
0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e,
|
||||
0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a, 0x64, 0x62,
|
||||
0x22, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69,
|
||||
0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a,
|
||||
0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x22, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65,
|
||||
0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a,
|
||||
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2d, 0x66,
|
||||
0x73, 0x22, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74,
|
||||
0x69, 0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72,
|
||||
0x3a, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2d, 0x6d, 0x71, 0x22, 0x29, 0x29, 0x0a, 0x0a,
|
||||
0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22,
|
||||
0x6a, 0x66, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x65, 0x3a, 0x70, 0x65, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e,
|
||||
0x2d, 0x61, 0x75, 0x74, 0x68, 0x2d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2d, 0x73, 0x74, 0x61, 0x72, 0x74,
|
||||
0x65, 0x72, 0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74,
|
||||
0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6a, 0x66, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x65, 0x3a, 0x70, 0x65, 0x2d, 0x63,
|
||||
0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2d, 0x68, 0x61, 0x6c, 0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c,
|
||||
0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6a, 0x66, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x2e,
|
||||
0x70, 0x65, 0x3a, 0x70, 0x65, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2d, 0x63, 0x6f, 0x72, 0x65, 0x22, 0x29, 0x0a,
|
||||
0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28,
|
||||
0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b,
|
||||
0x2e, 0x62, 0x6f, 0x6f, 0x74, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x62, 0x6f, 0x6f, 0x74, 0x2d, 0x73, 0x74,
|
||||
0x61, 0x72, 0x74, 0x65, 0x72, 0x2d, 0x77, 0x65, 0x62, 0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c,
|
||||
0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69,
|
||||
0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x62, 0x6f, 0x6f, 0x74, 0x3a, 0x73, 0x70, 0x72,
|
||||
0x69, 0x6e, 0x67, 0x2d, 0x62, 0x6f, 0x6f, 0x74, 0x2d, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x72, 0x2d, 0x61, 0x6f, 0x70,
|
||||
0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f,
|
||||
0x6e, 0x28, 0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f,
|
||||
0x72, 0x6b, 0x2e, 0x62, 0x6f, 0x6f, 0x74, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x62, 0x6f, 0x6f, 0x74, 0x2d,
|
||||
0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x72, 0x2d, 0x61, 0x63, 0x74, 0x75, 0x61, 0x74, 0x6f, 0x72, 0x22, 0x29, 0x0a, 0x20,
|
||||
0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f,
|
||||
0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x63,
|
||||
0x6c, 0x6f, 0x75, 0x64, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2d, 0x73, 0x74,
|
||||
0x61, 0x72, 0x74, 0x65, 0x72, 0x2d, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x22, 0x29, 0x0a, 0x20, 0x20,
|
||||
0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f, 0x72,
|
||||
0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x63, 0x6c,
|
||||
0x6f, 0x75, 0x64, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2d, 0x73, 0x74, 0x61,
|
||||
0x72, 0x74, 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6c, 0x2d, 0x61, 0x6c, 0x6c, 0x22, 0x29, 0x0a, 0x20, 0x20,
|
||||
0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f, 0x72,
|
||||
0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x63, 0x6c,
|
||||
0x6f, 0x75, 0x64, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2d, 0x73, 0x74, 0x61,
|
||||
0x72, 0x74, 0x65, 0x72, 0x2d, 0x73, 0x6c, 0x65, 0x75, 0x74, 0x68, 0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d,
|
||||
0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70,
|
||||
0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x72, 0x65, 0x74, 0x72, 0x79, 0x3a,
|
||||
0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x72, 0x65, 0x74, 0x72, 0x79, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20,
|
||||
0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x63, 0x68, 0x2e, 0x71,
|
||||
0x6f, 0x73, 0x2e, 0x6c, 0x6f, 0x67, 0x62, 0x61, 0x63, 0x6b, 0x3a, 0x6c, 0x6f, 0x67, 0x62, 0x61, 0x63, 0x6b, 0x2d, 0x63,
|
||||
0x6c, 0x61, 0x73, 0x73, 0x69, 0x63, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d,
|
||||
0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x69, 0x6f, 0x2e, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x6d, 0x65,
|
||||
0x74, 0x65, 0x72, 0x3a, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x2d, 0x72, 0x65, 0x67, 0x69, 0x73,
|
||||
0x74, 0x72, 0x79, 0x2d, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20,
|
||||
0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6b, 0x6f, 0x74,
|
||||
0x6c, 0x69, 0x6e, 0x28, 0x22, 0x73, 0x74, 0x64, 0x6c, 0x69, 0x62, 0x22, 0x29, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20,
|
||||
0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f,
|
||||
0x2f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x54, 0x65, 0x73, 0x74, 0x20, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64,
|
||||
0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f,
|
||||
0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74,
|
||||
0x65, 0x73, 0x74, 0x49, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6a,
|
||||
0x66, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x65, 0x3a, 0x70, 0x65, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2d,
|
||||
0x74, 0x65, 0x73, 0x74, 0x22, 0x29, 0x0a, 0x7d, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x20, 0x70, 0x61, 0x74, 0x63, 0x68, 0x4a,
|
||||
0x61, 0x72, 0x20, 0x62, 0x79, 0x20, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x2e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72,
|
||||
0x69, 0x6e, 0x67, 0x28, 0x4a, 0x61, 0x72, 0x3a, 0x3a, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20,
|
||||
0x20, 0x20, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, 0x65, 0x72, 0x2e,
|
||||
0x73, 0x65, 0x74, 0x28, 0x22, 0x70, 0x61, 0x74, 0x63, 0x68, 0x65, 0x64, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20,
|
||||
0x76, 0x61, 0x6c, 0x20, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x70, 0x61, 0x74, 0x68,
|
||||
0x20, 0x62, 0x79, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x67,
|
||||
0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x6e, 0x69, 0x66, 0x65, 0x73, 0x74,
|
||||
0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65,
|
||||
0x73, 0x28, 0x22, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x2d, 0x50, 0x61, 0x74, 0x68, 0x22, 0x20, 0x74, 0x6f, 0x20, 0x6f, 0x62,
|
||||
0x6a, 0x65, 0x63, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70,
|
||||
0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x20, 0x76, 0x61, 0x6c, 0x20, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x20, 0x3d,
|
||||
0x20, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x3a, 0x2f, 0x2b, 0x22, 0x2e, 0x74, 0x6f, 0x52, 0x65, 0x67, 0x65, 0x78, 0x28, 0x29,
|
||||
0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64,
|
||||
0x65, 0x20, 0x66, 0x75, 0x6e, 0x20, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x3a, 0x20, 0x53, 0x74,
|
||||
0x72, 0x69, 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x70,
|
||||
0x61, 0x74, 0x68, 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x6f, 0x53, 0x74, 0x72, 0x69,
|
||||
0x6e, 0x67, 0x28, 0x22, 0x20, 0x22, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
|
||||
0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x74, 0x2e, 0x74, 0x6f, 0x55, 0x52, 0x49, 0x28, 0x29, 0x2e, 0x74, 0x6f, 0x55,
|
||||
0x52, 0x4c, 0x28, 0x29, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c,
|
||||
0x61, 0x63, 0x65, 0x46, 0x69, 0x72, 0x73, 0x74, 0x28, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x2c, 0x20, 0x22, 0x2f,
|
||||
0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20,
|
||||
0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x7d, 0x0a, 0x0a, 0x74, 0x61, 0x73,
|
||||
0x6b, 0x73, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x3c, 0x42, 0x6f, 0x6f, 0x74, 0x52, 0x75, 0x6e, 0x3e, 0x28, 0x22, 0x62,
|
||||
0x6f, 0x6f, 0x74, 0x52, 0x75, 0x6e, 0x22, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x4f,
|
||||
0x73, 0x2e, 0x69, 0x73, 0x46, 0x61, 0x6d, 0x69, 0x6c, 0x79, 0x28, 0x4f, 0x73, 0x2e, 0x46, 0x41, 0x4d, 0x49, 0x4c, 0x59,
|
||||
0x5f, 0x57, 0x49, 0x4e, 0x44, 0x4f, 0x57, 0x53, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
|
||||
0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x70, 0x61, 0x74, 0x68, 0x20, 0x3d, 0x20, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x28, 0x73,
|
||||
0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x65, 0x74, 0x73, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x28, 0x22, 0x6d, 0x61, 0x69,
|
||||
0x6e, 0x22, 0x29, 0x2e, 0x6d, 0x61, 0x70, 0x20, 0x7b, 0x20, 0x69, 0x74, 0x2e, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x20,
|
||||
0x7d, 0x2c, 0x20, 0x70, 0x61, 0x74, 0x63, 0x68, 0x4a, 0x61, 0x72, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a,
|
||||
0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0xd0,
|
||||
}
|
||||
|
||||
@ -20,14 +20,17 @@ import (
|
||||
// RuneNBSP is the codepoint for NBSP
|
||||
const RuneNBSP = 0xa0
|
||||
|
||||
// EscapeControlHTML escapes the unicode control sequences in a provided html document
|
||||
// EscapeControlHTML escapes the Unicode control sequences in a provided html document
|
||||
func EscapeControlHTML(html template.HTML, locale translation.Locale, allowed ...rune) (escaped *EscapeStatus, output template.HTML) {
|
||||
if !setting.UI.AmbiguousUnicodeDetection {
|
||||
return &EscapeStatus{}, html
|
||||
}
|
||||
sb := &strings.Builder{}
|
||||
escaped, _ = EscapeControlReader(strings.NewReader(string(html)), sb, locale, allowed...) // err has been handled in EscapeControlReader
|
||||
return escaped, template.HTML(sb.String())
|
||||
}
|
||||
|
||||
// EscapeControlReader escapes the unicode control sequences in a provided reader of HTML content and writer in a locale and returns the findings as an EscapeStatus
|
||||
// EscapeControlReader escapes the Unicode control sequences in a provided reader of HTML content and writer in a locale and returns the findings as an EscapeStatus
|
||||
func EscapeControlReader(reader io.Reader, writer io.Writer, locale translation.Locale, allowed ...rune) (escaped *EscapeStatus, err error) {
|
||||
if !setting.UI.AmbiguousUnicodeDetection {
|
||||
_, err = io.Copy(writer, reader)
|
||||
|
||||
@ -323,14 +323,6 @@ func GetFullCommitID(ctx context.Context, repoPath, shortID string) (string, err
|
||||
return strings.TrimSpace(commitID), nil
|
||||
}
|
||||
|
||||
// GetRepositoryDefaultPublicGPGKey returns the default public key for this commit
|
||||
func (c *Commit) GetRepositoryDefaultPublicGPGKey(forceUpdate bool) (*GPGSettings, error) {
|
||||
if c.repo == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return c.repo.GetDefaultPublicGPGKey(forceUpdate)
|
||||
}
|
||||
|
||||
func IsStringLikelyCommitID(objFmt ObjectFormat, s string, minLength ...int) bool {
|
||||
maxLen := 64 // sha256
|
||||
if objFmt != nil {
|
||||
|
||||
102
modules/git/gpg.go
Normal file
102
modules/git/gpg.go
Normal file
@ -0,0 +1,102 @@
|
||||
// Copyright 2015 The Gogs Authors. All rights reserved.
|
||||
// Copyright 2017 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package git
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"code.gitea.io/gitea/modules/git/gitcmd"
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
)
|
||||
|
||||
// GPGSettings represents the default GPG settings for this repository
|
||||
type GPGSettings struct {
|
||||
Sign bool
|
||||
KeyID string
|
||||
Email string
|
||||
Name string
|
||||
PublicKeyContent string
|
||||
Format string
|
||||
}
|
||||
|
||||
// LoadPublicKeyContent will load the key from gpg
|
||||
func (gpgSettings *GPGSettings) LoadPublicKeyContent() error {
|
||||
if gpgSettings.PublicKeyContent != "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
if gpgSettings.Format == SigningKeyFormatSSH {
|
||||
content, err := os.ReadFile(gpgSettings.KeyID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to read SSH public key file: %s, %w", gpgSettings.KeyID, err)
|
||||
}
|
||||
gpgSettings.PublicKeyContent = string(content)
|
||||
return nil
|
||||
}
|
||||
content, stderr, err := process.GetManager().Exec(
|
||||
"gpg -a --export",
|
||||
"gpg", "-a", "--export", gpgSettings.KeyID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to get default signing key: %s, %s, %w", gpgSettings.KeyID, stderr, err)
|
||||
}
|
||||
gpgSettings.PublicKeyContent = content
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
loadPublicGPGKeyMutex sync.RWMutex
|
||||
globalGPGSettings *GPGSettings
|
||||
)
|
||||
|
||||
// GetDefaultPublicGPGKey will return and cache the default public GPG settings
|
||||
func GetDefaultPublicGPGKey(ctx context.Context, forceUpdate bool) (*GPGSettings, error) {
|
||||
if !forceUpdate {
|
||||
loadPublicGPGKeyMutex.RLock()
|
||||
if globalGPGSettings != nil {
|
||||
defer loadPublicGPGKeyMutex.RUnlock()
|
||||
return globalGPGSettings, nil
|
||||
}
|
||||
loadPublicGPGKeyMutex.RUnlock()
|
||||
}
|
||||
|
||||
loadPublicGPGKeyMutex.Lock()
|
||||
defer loadPublicGPGKeyMutex.Unlock()
|
||||
|
||||
if globalGPGSettings != nil && !forceUpdate {
|
||||
return globalGPGSettings, nil
|
||||
}
|
||||
|
||||
globalGPGSettings = &GPGSettings{
|
||||
Sign: true,
|
||||
}
|
||||
|
||||
value, _, _ := gitcmd.NewCommand("config", "--global", "--get", "commit.gpgsign").RunStdString(ctx)
|
||||
sign, valid := ParseBool(strings.TrimSpace(value))
|
||||
if !sign || !valid {
|
||||
globalGPGSettings.Sign = false
|
||||
return globalGPGSettings, nil
|
||||
}
|
||||
|
||||
signingKey, _, _ := gitcmd.NewCommand("config", "--global", "--get", "user.signingkey").RunStdString(ctx)
|
||||
globalGPGSettings.KeyID = strings.TrimSpace(signingKey)
|
||||
|
||||
format, _, _ := gitcmd.NewCommand("config", "--global", "--default", SigningKeyFormatOpenPGP, "--get", "gpg.format").RunStdString(ctx)
|
||||
globalGPGSettings.Format = strings.TrimSpace(format)
|
||||
|
||||
defaultEmail, _, _ := gitcmd.NewCommand("config", "--global", "--get", "user.email").RunStdString(ctx)
|
||||
globalGPGSettings.Email = strings.TrimSpace(defaultEmail)
|
||||
|
||||
defaultName, _, _ := gitcmd.NewCommand("config", "--global", "--get", "user.name").RunStdString(ctx)
|
||||
globalGPGSettings.Name = strings.TrimSpace(defaultName)
|
||||
|
||||
if err := globalGPGSettings.LoadPublicKeyContent(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return globalGPGSettings, nil
|
||||
}
|
||||
@ -32,23 +32,23 @@ func (s *SigningKey) String() string {
|
||||
}
|
||||
|
||||
// GetSigningKey returns the KeyID and git Signature for the repo
|
||||
func GetSigningKey(ctx context.Context, repoPath string) (*SigningKey, *Signature) {
|
||||
func GetSigningKey(ctx context.Context) (*SigningKey, *Signature) {
|
||||
if setting.Repository.Signing.SigningKey == "none" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if setting.Repository.Signing.SigningKey == "default" || setting.Repository.Signing.SigningKey == "" {
|
||||
// Can ignore the error here as it means that commit.gpgsign is not set
|
||||
value, _, _ := gitcmd.NewCommand("config", "--get", "commit.gpgsign").WithDir(repoPath).RunStdString(ctx)
|
||||
value, _, _ := gitcmd.NewCommand("config", "--global", "--get", "commit.gpgsign").RunStdString(ctx)
|
||||
sign, valid := ParseBool(strings.TrimSpace(value))
|
||||
if !sign || !valid {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
format, _, _ := gitcmd.NewCommand("config", "--default", SigningKeyFormatOpenPGP, "--get", "gpg.format").WithDir(repoPath).RunStdString(ctx)
|
||||
signingKey, _, _ := gitcmd.NewCommand("config", "--get", "user.signingkey").WithDir(repoPath).RunStdString(ctx)
|
||||
signingName, _, _ := gitcmd.NewCommand("config", "--get", "user.name").WithDir(repoPath).RunStdString(ctx)
|
||||
signingEmail, _, _ := gitcmd.NewCommand("config", "--get", "user.email").WithDir(repoPath).RunStdString(ctx)
|
||||
format, _, _ := gitcmd.NewCommand("config", "--global", "--default", SigningKeyFormatOpenPGP, "--get", "gpg.format").RunStdString(ctx)
|
||||
signingKey, _, _ := gitcmd.NewCommand("config", "--global", "--get", "user.signingkey").RunStdString(ctx)
|
||||
signingName, _, _ := gitcmd.NewCommand("config", "--global", "--get", "user.name").RunStdString(ctx)
|
||||
signingEmail, _, _ := gitcmd.NewCommand("config", "--global", "--get", "user.email").RunStdString(ctx)
|
||||
|
||||
if strings.TrimSpace(signingKey) == "" {
|
||||
return nil, nil
|
||||
|
||||
@ -20,16 +20,6 @@ import (
|
||||
"code.gitea.io/gitea/modules/proxy"
|
||||
)
|
||||
|
||||
// GPGSettings represents the default GPG settings for this repository
|
||||
type GPGSettings struct {
|
||||
Sign bool
|
||||
KeyID string
|
||||
Email string
|
||||
Name string
|
||||
PublicKeyContent string
|
||||
Format string
|
||||
}
|
||||
|
||||
const prettyLogFormat = `--pretty=format:%H`
|
||||
|
||||
func (repo *Repository) ShowPrettyFormatLogToList(ctx context.Context, revisionRange string) ([]*Commit, error) {
|
||||
|
||||
@ -32,7 +32,6 @@ type Repository struct {
|
||||
|
||||
gogitRepo *gogit.Repository
|
||||
gogitStorage *filesystem.Storage
|
||||
gpgSettings *GPGSettings
|
||||
|
||||
Ctx context.Context
|
||||
LastCommitCache *LastCommitCache
|
||||
|
||||
@ -23,8 +23,6 @@ type Repository struct {
|
||||
|
||||
tagCache *ObjectCache[*Tag]
|
||||
|
||||
gpgSettings *GPGSettings
|
||||
|
||||
batchInUse bool
|
||||
batch *Batch
|
||||
|
||||
|
||||
@ -1,71 +0,0 @@
|
||||
// Copyright 2015 The Gogs Authors. All rights reserved.
|
||||
// Copyright 2017 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package git
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/git/gitcmd"
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
)
|
||||
|
||||
// LoadPublicKeyContent will load the key from gpg
|
||||
func (gpgSettings *GPGSettings) LoadPublicKeyContent() error {
|
||||
if gpgSettings.Format == SigningKeyFormatSSH {
|
||||
content, err := os.ReadFile(gpgSettings.KeyID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to read SSH public key file: %s, %w", gpgSettings.KeyID, err)
|
||||
}
|
||||
gpgSettings.PublicKeyContent = string(content)
|
||||
return nil
|
||||
}
|
||||
content, stderr, err := process.GetManager().Exec(
|
||||
"gpg -a --export",
|
||||
"gpg", "-a", "--export", gpgSettings.KeyID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to get default signing key: %s, %s, %w", gpgSettings.KeyID, stderr, err)
|
||||
}
|
||||
gpgSettings.PublicKeyContent = content
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetDefaultPublicGPGKey will return and cache the default public GPG settings for this repository
|
||||
func (repo *Repository) GetDefaultPublicGPGKey(forceUpdate bool) (*GPGSettings, error) {
|
||||
if repo.gpgSettings != nil && !forceUpdate {
|
||||
return repo.gpgSettings, nil
|
||||
}
|
||||
|
||||
gpgSettings := &GPGSettings{
|
||||
Sign: true,
|
||||
}
|
||||
|
||||
value, _, _ := gitcmd.NewCommand("config", "--get", "commit.gpgsign").WithDir(repo.Path).RunStdString(repo.Ctx)
|
||||
sign, valid := ParseBool(strings.TrimSpace(value))
|
||||
if !sign || !valid {
|
||||
gpgSettings.Sign = false
|
||||
repo.gpgSettings = gpgSettings
|
||||
return gpgSettings, nil
|
||||
}
|
||||
|
||||
signingKey, _, _ := gitcmd.NewCommand("config", "--get", "user.signingkey").WithDir(repo.Path).RunStdString(repo.Ctx)
|
||||
gpgSettings.KeyID = strings.TrimSpace(signingKey)
|
||||
|
||||
format, _, _ := gitcmd.NewCommand("config", "--default", SigningKeyFormatOpenPGP, "--get", "gpg.format").WithDir(repo.Path).RunStdString(repo.Ctx)
|
||||
gpgSettings.Format = strings.TrimSpace(format)
|
||||
|
||||
defaultEmail, _, _ := gitcmd.NewCommand("config", "--get", "user.email").WithDir(repo.Path).RunStdString(repo.Ctx)
|
||||
gpgSettings.Email = strings.TrimSpace(defaultEmail)
|
||||
|
||||
defaultName, _, _ := gitcmd.NewCommand("config", "--get", "user.name").WithDir(repo.Path).RunStdString(repo.Ctx)
|
||||
gpgSettings.Name = strings.TrimSpace(defaultName)
|
||||
|
||||
if err := gpgSettings.LoadPublicKeyContent(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
repo.gpgSettings = gpgSettings
|
||||
return repo.gpgSettings, nil
|
||||
}
|
||||
@ -9,6 +9,6 @@ import (
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
)
|
||||
|
||||
func GetSigningKey(ctx context.Context, repo Repository) (*git.SigningKey, *git.Signature) {
|
||||
return git.GetSigningKey(ctx, repoPath(repo))
|
||||
func GetSigningKey(ctx context.Context) (*git.SigningKey, *git.Signature) {
|
||||
return git.GetSigningKey(ctx)
|
||||
}
|
||||
|
||||
@ -56,7 +56,39 @@ func NewContext() {
|
||||
})
|
||||
}
|
||||
|
||||
// Code returns a HTML version of code string with chroma syntax highlighting classes and the matched lexer name
|
||||
// UnsafeSplitHighlightedLines splits highlighted code into lines preserving HTML tags
|
||||
// It always includes '\n', '\n' can appear at the end of each line or in the middle of HTML tags
|
||||
// The '\n' is necessary for copying code from web UI to preserve original code lines
|
||||
// ATTENTION: It uses the unsafe conversion between string and []byte for performance reason
|
||||
// DO NOT make any modification to the returned [][]byte slice items
|
||||
func UnsafeSplitHighlightedLines(code template.HTML) (ret [][]byte) {
|
||||
buf := util.UnsafeStringToBytes(string(code))
|
||||
lineCount := bytes.Count(buf, []byte("\n")) + 1
|
||||
ret = make([][]byte, 0, lineCount)
|
||||
nlTagClose := []byte("\n</")
|
||||
for {
|
||||
pos := bytes.IndexByte(buf, '\n')
|
||||
if pos == -1 {
|
||||
if len(buf) > 0 {
|
||||
ret = append(ret, buf)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
// Chroma highlighting output sometimes have "</span>" right after \n, sometimes before.
|
||||
// * "<span>text\n</span>"
|
||||
// * "<span>text</span>\n"
|
||||
if bytes.HasPrefix(buf[pos:], nlTagClose) {
|
||||
pos1 := bytes.IndexByte(buf[pos:], '>')
|
||||
if pos1 != -1 {
|
||||
pos += pos1
|
||||
}
|
||||
}
|
||||
ret = append(ret, buf[:pos+1])
|
||||
buf = buf[pos+1:]
|
||||
}
|
||||
}
|
||||
|
||||
// Code returns an HTML version of code string with chroma syntax highlighting classes and the matched lexer name
|
||||
func Code(fileName, language, code string) (output template.HTML, lexerName string) {
|
||||
NewContext()
|
||||
|
||||
|
||||
@ -181,3 +181,21 @@ c=2`),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestUnsafeSplitHighlightedLines(t *testing.T) {
|
||||
ret := UnsafeSplitHighlightedLines("")
|
||||
assert.Empty(t, ret)
|
||||
|
||||
ret = UnsafeSplitHighlightedLines("a")
|
||||
assert.Len(t, ret, 1)
|
||||
assert.Equal(t, "a", string(ret[0]))
|
||||
|
||||
ret = UnsafeSplitHighlightedLines("\n")
|
||||
assert.Len(t, ret, 1)
|
||||
assert.Equal(t, "\n", string(ret[0]))
|
||||
|
||||
ret = UnsafeSplitHighlightedLines("<span>a</span>\n<span>b\n</span>")
|
||||
assert.Len(t, ret, 2)
|
||||
assert.Equal(t, "<span>a</span>\n", string(ret[0]))
|
||||
assert.Equal(t, "<span>b\n</span>", string(ret[1]))
|
||||
}
|
||||
|
||||
@ -19,7 +19,6 @@ import (
|
||||
charsetModule "code.gitea.io/gitea/modules/charset"
|
||||
"code.gitea.io/gitea/modules/container"
|
||||
"code.gitea.io/gitea/modules/httpcache"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/typesniffer"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
@ -109,11 +108,7 @@ func setServeHeadersByFile(r *http.Request, w http.ResponseWriter, mineBuf []byt
|
||||
}
|
||||
|
||||
if isPlain {
|
||||
charset, err := charsetModule.DetectEncoding(mineBuf)
|
||||
if err != nil {
|
||||
log.Error("Detect raw file %s charset failed: %v, using by default utf-8", opts.Filename, err)
|
||||
charset = "utf-8"
|
||||
}
|
||||
charset, _ := charsetModule.DetectEncoding(mineBuf)
|
||||
opts.ContentTypeCharset = strings.ToLower(charset)
|
||||
}
|
||||
|
||||
|
||||
@ -203,7 +203,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
|
||||
RepoID: repo.ID,
|
||||
CommitID: commitSha,
|
||||
Filename: update.Filename,
|
||||
Content: string(charset.ToUTF8DropErrors(fileContents, charset.ConvertOpts{})),
|
||||
Content: string(charset.ToUTF8DropErrors(fileContents)),
|
||||
Language: analyze.GetCodeLanguage(update.Filename, fileContents),
|
||||
UpdatedAt: time.Now().UTC(),
|
||||
})
|
||||
|
||||
@ -191,7 +191,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
|
||||
Doc(map[string]any{
|
||||
"repo_id": repo.ID,
|
||||
"filename": update.Filename,
|
||||
"content": string(charset.ToUTF8DropErrors(fileContents, charset.ConvertOpts{})),
|
||||
"content": string(charset.ToUTF8DropErrors(fileContents)),
|
||||
"commit_id": sha,
|
||||
"language": analyze.GetCodeLanguage(update.Filename, fileContents),
|
||||
"updated_at": timeutil.TimeStampNow(),
|
||||
|
||||
@ -240,4 +240,5 @@ func PanicInDevOrTesting(msg string, a ...any) {
|
||||
if !IsProd || IsInTesting {
|
||||
panic(fmt.Sprintf(msg, a...))
|
||||
}
|
||||
log.Error(msg, a...)
|
||||
}
|
||||
|
||||
@ -152,7 +152,7 @@ func repoAssignment() func(ctx *context.APIContext) {
|
||||
if err != nil {
|
||||
if user_model.IsErrUserNotExist(err) {
|
||||
if redirectUserID, err := user_model.LookupUserRedirect(ctx, userName); err == nil {
|
||||
context.RedirectToUser(ctx.Base, userName, redirectUserID)
|
||||
context.RedirectToUser(ctx.Base, ctx.Doer, userName, redirectUserID)
|
||||
} else if user_model.IsErrUserRedirectNotExist(err) {
|
||||
ctx.APIErrorNotFound("GetUserByName", err)
|
||||
} else {
|
||||
@ -612,7 +612,7 @@ func orgAssignment(args ...bool) func(ctx *context.APIContext) {
|
||||
if organization.IsErrOrgNotExist(err) {
|
||||
redirectUserID, err := user_model.LookupUserRedirect(ctx, ctx.PathParam("org"))
|
||||
if err == nil {
|
||||
context.RedirectToUser(ctx.Base, ctx.PathParam("org"), redirectUserID)
|
||||
context.RedirectToUser(ctx.Base, ctx.Doer, ctx.PathParam("org"), redirectUserID)
|
||||
} else if user_model.IsErrUserRedirectNotExist(err) {
|
||||
ctx.APIErrorNotFound("GetOrgByName", err)
|
||||
} else {
|
||||
|
||||
@ -10,13 +10,8 @@ import (
|
||||
)
|
||||
|
||||
func getSigningKey(ctx *context.APIContext, expectedFormat string) {
|
||||
// if the handler is in the repo's route group, get the repo's signing key
|
||||
// otherwise, get the global signing key
|
||||
path := ""
|
||||
if ctx.Repo != nil && ctx.Repo.Repository != nil {
|
||||
path = ctx.Repo.Repository.RepoPath()
|
||||
}
|
||||
content, format, err := asymkey_service.PublicSigningKey(ctx, path)
|
||||
// get the global signing key
|
||||
content, format, err := asymkey_service.PublicSigningKey(ctx)
|
||||
if err != nil {
|
||||
ctx.APIErrorInternal(err)
|
||||
return
|
||||
|
||||
@ -16,7 +16,7 @@ func GetUserByPathParam(ctx *context.APIContext, name string) *user_model.User {
|
||||
if err != nil {
|
||||
if user_model.IsErrUserNotExist(err) {
|
||||
if redirectUserID, err2 := user_model.LookupUserRedirect(ctx, username); err2 == nil {
|
||||
context.RedirectToUser(ctx.Base, username, redirectUserID)
|
||||
context.RedirectToUser(ctx.Base, ctx.Doer, username, redirectUserID)
|
||||
} else {
|
||||
ctx.APIErrorNotFound("GetUserByName", err)
|
||||
}
|
||||
|
||||
@ -4,8 +4,9 @@
|
||||
package repo
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
gotemplate "html/template"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
@ -25,18 +26,17 @@ import (
|
||||
)
|
||||
|
||||
type blameRow struct {
|
||||
RowNumber int
|
||||
Avatar gotemplate.HTML
|
||||
RepoLink string
|
||||
PartSha string
|
||||
RowNumber int
|
||||
|
||||
Avatar template.HTML
|
||||
PreviousSha string
|
||||
PreviousShaURL string
|
||||
IsFirstCommit bool
|
||||
CommitURL string
|
||||
CommitMessage string
|
||||
CommitSince gotemplate.HTML
|
||||
Code gotemplate.HTML
|
||||
EscapeStatus *charset.EscapeStatus
|
||||
CommitSince template.HTML
|
||||
|
||||
Code template.HTML
|
||||
EscapeStatus *charset.EscapeStatus
|
||||
}
|
||||
|
||||
// RefBlame render blame page
|
||||
@ -220,76 +220,64 @@ func processBlameParts(ctx *context.Context, blameParts []*git.BlamePart) map[st
|
||||
return commitNames
|
||||
}
|
||||
|
||||
func renderBlame(ctx *context.Context, blameParts []*git.BlamePart, commitNames map[string]*user_model.UserCommit) {
|
||||
repoLink := ctx.Repo.RepoLink
|
||||
func renderBlameFillFirstBlameRow(repoLink string, avatarUtils *templates.AvatarUtils, part *git.BlamePart, commit *user_model.UserCommit, br *blameRow) {
|
||||
if commit.User != nil {
|
||||
br.Avatar = avatarUtils.Avatar(commit.User, 18)
|
||||
} else {
|
||||
br.Avatar = avatarUtils.AvatarByEmail(commit.Author.Email, commit.Author.Name, 18)
|
||||
}
|
||||
|
||||
br.PreviousSha = part.PreviousSha
|
||||
br.PreviousShaURL = fmt.Sprintf("%s/blame/commit/%s/%s", repoLink, url.PathEscape(part.PreviousSha), util.PathEscapeSegments(part.PreviousPath))
|
||||
br.CommitURL = fmt.Sprintf("%s/commit/%s", repoLink, url.PathEscape(part.Sha))
|
||||
br.CommitMessage = commit.CommitMessage
|
||||
br.CommitSince = templates.TimeSince(commit.Author.When)
|
||||
}
|
||||
|
||||
func renderBlame(ctx *context.Context, blameParts []*git.BlamePart, commitNames map[string]*user_model.UserCommit) {
|
||||
language, err := languagestats.GetFileLanguage(ctx, ctx.Repo.GitRepo, ctx.Repo.CommitID, ctx.Repo.TreePath)
|
||||
if err != nil {
|
||||
log.Error("Unable to get file language for %-v:%s. Error: %v", ctx.Repo.Repository, ctx.Repo.TreePath, err)
|
||||
}
|
||||
|
||||
lines := make([]string, 0)
|
||||
buf := &bytes.Buffer{}
|
||||
rows := make([]*blameRow, 0)
|
||||
avatarUtils := templates.NewAvatarUtils(ctx)
|
||||
rowNumber := 0 // will be 1-based
|
||||
for _, part := range blameParts {
|
||||
for partLineIdx, line := range part.Lines {
|
||||
rowNumber++
|
||||
|
||||
br := &blameRow{RowNumber: rowNumber}
|
||||
rows = append(rows, br)
|
||||
|
||||
if int64(buf.Len()) < setting.UI.MaxDisplayFileSize {
|
||||
buf.WriteString(line)
|
||||
buf.WriteByte('\n')
|
||||
}
|
||||
|
||||
if partLineIdx == 0 {
|
||||
renderBlameFillFirstBlameRow(ctx.Repo.RepoLink, avatarUtils, part, commitNames[part.Sha], br)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
escapeStatus := &charset.EscapeStatus{}
|
||||
|
||||
var lexerName string
|
||||
|
||||
avatarUtils := templates.NewAvatarUtils(ctx)
|
||||
i := 0
|
||||
commitCnt := 0
|
||||
for _, part := range blameParts {
|
||||
for index, line := range part.Lines {
|
||||
i++
|
||||
lines = append(lines, line)
|
||||
|
||||
br := &blameRow{
|
||||
RowNumber: i,
|
||||
}
|
||||
|
||||
commit := commitNames[part.Sha]
|
||||
if index == 0 {
|
||||
// Count commit number
|
||||
commitCnt++
|
||||
|
||||
// User avatar image
|
||||
commitSince := templates.TimeSince(commit.Author.When)
|
||||
|
||||
var avatar string
|
||||
if commit.User != nil {
|
||||
avatar = string(avatarUtils.Avatar(commit.User, 18))
|
||||
} else {
|
||||
avatar = string(avatarUtils.AvatarByEmail(commit.Author.Email, commit.Author.Name, 18, "tw-mr-2"))
|
||||
}
|
||||
|
||||
br.Avatar = gotemplate.HTML(avatar)
|
||||
br.RepoLink = repoLink
|
||||
br.PartSha = part.Sha
|
||||
br.PreviousSha = part.PreviousSha
|
||||
br.PreviousShaURL = fmt.Sprintf("%s/blame/commit/%s/%s", repoLink, url.PathEscape(part.PreviousSha), util.PathEscapeSegments(part.PreviousPath))
|
||||
br.CommitURL = fmt.Sprintf("%s/commit/%s", repoLink, url.PathEscape(part.Sha))
|
||||
br.CommitMessage = commit.CommitMessage
|
||||
br.CommitSince = commitSince
|
||||
}
|
||||
|
||||
if i != len(lines)-1 {
|
||||
line += "\n"
|
||||
}
|
||||
line, lexerNameForLine := highlight.Code(path.Base(ctx.Repo.TreePath), language, line)
|
||||
|
||||
// set lexer name to the first detected lexer. this is certainly suboptimal and
|
||||
// we should instead highlight the whole file at once
|
||||
if lexerName == "" {
|
||||
lexerName = lexerNameForLine
|
||||
}
|
||||
|
||||
br.EscapeStatus, br.Code = charset.EscapeControlHTML(line, ctx.Locale)
|
||||
rows = append(rows, br)
|
||||
escapeStatus = escapeStatus.Or(br.EscapeStatus)
|
||||
bufContent := buf.Bytes()
|
||||
bufContent = charset.ToUTF8(bufContent, charset.ConvertOpts{})
|
||||
highlighted, lexerName := highlight.Code(path.Base(ctx.Repo.TreePath), language, util.UnsafeBytesToString(bufContent))
|
||||
unsafeLines := highlight.UnsafeSplitHighlightedLines(highlighted)
|
||||
for i, br := range rows {
|
||||
var line template.HTML
|
||||
if i < len(rows) {
|
||||
line = template.HTML(util.UnsafeBytesToString(unsafeLines[i]))
|
||||
}
|
||||
br.EscapeStatus, br.Code = charset.EscapeControlHTML(line, ctx.Locale)
|
||||
escapeStatus = escapeStatus.Or(br.EscapeStatus)
|
||||
}
|
||||
|
||||
ctx.Data["EscapeStatus"] = escapeStatus
|
||||
ctx.Data["BlameRows"] = rows
|
||||
ctx.Data["CommitCnt"] = commitCnt
|
||||
ctx.Data["LexerName"] = lexerName
|
||||
}
|
||||
|
||||
@ -317,11 +317,7 @@ func EditFile(ctx *context.Context) {
|
||||
ctx.ServerError("ReadAll", err)
|
||||
return
|
||||
}
|
||||
if content, err := charset.ToUTF8(buf, charset.ConvertOpts{KeepBOM: true}); err != nil {
|
||||
ctx.Data["FileContent"] = string(buf)
|
||||
} else {
|
||||
ctx.Data["FileContent"] = content
|
||||
}
|
||||
ctx.Data["FileContent"] = string(charset.ToUTF8(buf, charset.ConvertOpts{KeepBOM: true, ErrorReturnOrigin: true}))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -61,7 +61,7 @@ func SettingsCtxData(ctx *context.Context) {
|
||||
ctx.Data["MinimumMirrorInterval"] = setting.Mirror.MinInterval
|
||||
ctx.Data["CanConvertFork"] = ctx.Repo.Repository.IsFork && ctx.Doer.CanCreateRepoIn(ctx.Repo.Repository.Owner)
|
||||
|
||||
signing, _ := gitrepo.GetSigningKey(ctx, ctx.Repo.Repository)
|
||||
signing, _ := gitrepo.GetSigningKey(ctx)
|
||||
ctx.Data["SigningKeyAvailable"] = signing != nil
|
||||
ctx.Data["SigningSettings"] = setting.Repository.Signing
|
||||
ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
|
||||
@ -104,7 +104,7 @@ func SettingsPost(ctx *context.Context) {
|
||||
ctx.Data["DefaultMirrorInterval"] = setting.Mirror.DefaultInterval
|
||||
ctx.Data["MinimumMirrorInterval"] = setting.Mirror.MinInterval
|
||||
|
||||
signing, _ := gitrepo.GetSigningKey(ctx, ctx.Repo.Repository)
|
||||
signing, _ := gitrepo.GetSigningKey(ctx)
|
||||
ctx.Data["SigningKeyAvailable"] = signing != nil
|
||||
ctx.Data["SigningSettings"] = setting.Repository.Signing
|
||||
ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
|
||||
|
||||
@ -162,7 +162,7 @@ func parseCommitWithGPGSignature(ctx context.Context, c *git.Commit, committer *
|
||||
}
|
||||
}
|
||||
|
||||
defaultGPGSettings, err := c.GetRepositoryDefaultPublicGPGKey(false)
|
||||
defaultGPGSettings, err := git.GetDefaultPublicGPGKey(ctx, false)
|
||||
if err != nil {
|
||||
log.Error("Error getting default public gpg key: %v", err)
|
||||
} else if defaultGPGSettings == nil {
|
||||
|
||||
@ -108,34 +108,34 @@ func IsErrWontSign(err error) bool {
|
||||
return ok
|
||||
}
|
||||
|
||||
// PublicSigningKey gets the public signing key within a provided repository directory
|
||||
func PublicSigningKey(ctx context.Context, repoPath string) (content, format string, err error) {
|
||||
signingKey, _ := git.GetSigningKey(ctx, repoPath)
|
||||
// PublicSigningKey gets the public signing key
|
||||
func PublicSigningKey(ctx context.Context) (content, format string, err error) {
|
||||
signingKey, _ := git.GetSigningKey(ctx)
|
||||
if signingKey == nil {
|
||||
return "", "", nil
|
||||
}
|
||||
if signingKey.Format == git.SigningKeyFormatSSH {
|
||||
content, err := os.ReadFile(signingKey.KeyID)
|
||||
if err != nil {
|
||||
log.Error("Unable to read SSH public key file in %s: %s, %v", repoPath, signingKey, err)
|
||||
log.Error("Unable to read SSH public key file: %s, %v", signingKey, err)
|
||||
return "", signingKey.Format, err
|
||||
}
|
||||
return string(content), signingKey.Format, nil
|
||||
}
|
||||
|
||||
content, stderr, err := process.GetManager().ExecDir(ctx, -1, repoPath,
|
||||
content, stderr, err := process.GetManager().ExecDir(ctx, -1, setting.Git.HomePath,
|
||||
"gpg --export -a", "gpg", "--export", "-a", signingKey.KeyID)
|
||||
if err != nil {
|
||||
log.Error("Unable to get default signing key in %s: %s, %s, %v", repoPath, signingKey, stderr, err)
|
||||
log.Error("Unable to get default signing key: %s, %s, %v", signingKey, stderr, err)
|
||||
return "", signingKey.Format, err
|
||||
}
|
||||
return content, signingKey.Format, nil
|
||||
}
|
||||
|
||||
// SignInitialCommit determines if we should sign the initial commit to this repository
|
||||
func SignInitialCommit(ctx context.Context, repoPath string, u *user_model.User) (bool, *git.SigningKey, *git.Signature, error) {
|
||||
func SignInitialCommit(ctx context.Context, u *user_model.User) (bool, *git.SigningKey, *git.Signature, error) {
|
||||
rules := signingModeFromStrings(setting.Repository.Signing.InitialCommit)
|
||||
signingKey, sig := git.GetSigningKey(ctx, repoPath)
|
||||
signingKey, sig := git.GetSigningKey(ctx)
|
||||
if signingKey == nil {
|
||||
return false, nil, nil, &ErrWontSign{noKey}
|
||||
}
|
||||
@ -171,7 +171,7 @@ Loop:
|
||||
// SignWikiCommit determines if we should sign the commits to this repository wiki
|
||||
func SignWikiCommit(ctx context.Context, repo *repo_model.Repository, u *user_model.User) (bool, *git.SigningKey, *git.Signature, error) {
|
||||
rules := signingModeFromStrings(setting.Repository.Signing.Wiki)
|
||||
signingKey, sig := gitrepo.GetSigningKey(ctx, repo.WikiStorageRepo())
|
||||
signingKey, sig := gitrepo.GetSigningKey(ctx)
|
||||
if signingKey == nil {
|
||||
return false, nil, nil, &ErrWontSign{noKey}
|
||||
}
|
||||
@ -222,9 +222,9 @@ Loop:
|
||||
}
|
||||
|
||||
// SignCRUDAction determines if we should sign a CRUD commit to this repository
|
||||
func SignCRUDAction(ctx context.Context, repoPath string, u *user_model.User, tmpBasePath, parentCommit string) (bool, *git.SigningKey, *git.Signature, error) {
|
||||
func SignCRUDAction(ctx context.Context, u *user_model.User, tmpBasePath, parentCommit string) (bool, *git.SigningKey, *git.Signature, error) {
|
||||
rules := signingModeFromStrings(setting.Repository.Signing.CRUDActions)
|
||||
signingKey, sig := git.GetSigningKey(ctx, repoPath)
|
||||
signingKey, sig := git.GetSigningKey(ctx)
|
||||
if signingKey == nil {
|
||||
return false, nil, nil, &ErrWontSign{noKey}
|
||||
}
|
||||
@ -288,7 +288,7 @@ func SignMerge(ctx context.Context, pr *issues_model.PullRequest, u *user_model.
|
||||
}
|
||||
repo := pr.BaseRepo
|
||||
|
||||
signingKey, signer := gitrepo.GetSigningKey(ctx, repo)
|
||||
signingKey, signer := gitrepo.GetSigningKey(ctx)
|
||||
if signingKey == nil {
|
||||
return false, nil, nil, &ErrWontSign{noKey}
|
||||
}
|
||||
|
||||
@ -20,15 +20,27 @@ import (
|
||||
"code.gitea.io/gitea/modules/httplib"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/templates"
|
||||
"code.gitea.io/gitea/modules/web/middleware"
|
||||
)
|
||||
|
||||
// RedirectToUser redirect to a differently-named user
|
||||
func RedirectToUser(ctx *Base, userName string, redirectUserID int64) {
|
||||
func RedirectToUser(ctx *Base, doer *user_model.User, userName string, redirectUserID int64) {
|
||||
user, err := user_model.GetUserByID(ctx, redirectUserID)
|
||||
if err != nil {
|
||||
ctx.HTTPError(http.StatusInternalServerError, "unable to get user")
|
||||
if user_model.IsErrUserNotExist(err) {
|
||||
ctx.HTTPError(http.StatusNotFound, "user does not exist")
|
||||
} else {
|
||||
ctx.HTTPError(http.StatusInternalServerError, "unable to get user")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Handle Visibility
|
||||
if user.Visibility != structs.VisibleTypePublic && doer == nil {
|
||||
// We must be signed in to see limited or private organizations
|
||||
ctx.HTTPError(http.StatusNotFound, "user does not exist")
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@ -49,7 +49,7 @@ func GetOrganizationByParams(ctx *Context) {
|
||||
if organization.IsErrOrgNotExist(err) {
|
||||
redirectUserID, err := user_model.LookupUserRedirect(ctx, orgName)
|
||||
if err == nil {
|
||||
RedirectToUser(ctx.Base, orgName, redirectUserID)
|
||||
RedirectToUser(ctx.Base, ctx.Doer, orgName, redirectUserID)
|
||||
} else if user_model.IsErrUserRedirectNotExist(err) {
|
||||
ctx.NotFound(err)
|
||||
} else {
|
||||
|
||||
@ -140,7 +140,7 @@ func PrepareCommitFormOptions(ctx *Context, doer *user_model.User, targetRepo *r
|
||||
protectionRequireSigned = protectedBranch.RequireSignedCommits
|
||||
}
|
||||
|
||||
willSign, signKey, _, err := asymkey_service.SignCRUDAction(ctx, targetRepo.RepoPath(), doer, targetRepo.RepoPath(), refName.String())
|
||||
willSign, signKey, _, err := asymkey_service.SignCRUDAction(ctx, doer, targetRepo.RepoPath(), refName.String())
|
||||
wontSignReason := ""
|
||||
if asymkey_service.IsErrWontSign(err) {
|
||||
wontSignReason = string(err.(*asymkey_service.ErrWontSign).Reason)
|
||||
@ -443,7 +443,7 @@ func RepoAssignment(ctx *Context) {
|
||||
}
|
||||
|
||||
if redirectUserID, err := user_model.LookupUserRedirect(ctx, userName); err == nil {
|
||||
RedirectToUser(ctx.Base, userName, redirectUserID)
|
||||
RedirectToUser(ctx.Base, ctx.Doer, userName, redirectUserID)
|
||||
} else if user_model.IsErrUserRedirectNotExist(err) {
|
||||
ctx.NotFound(nil)
|
||||
} else {
|
||||
|
||||
@ -69,7 +69,7 @@ func userAssignment(ctx *Base, doer *user_model.User, errCb func(int, any)) (con
|
||||
if err != nil {
|
||||
if user_model.IsErrUserNotExist(err) {
|
||||
if redirectUserID, err := user_model.LookupUserRedirect(ctx, username); err == nil {
|
||||
RedirectToUser(ctx, username, redirectUserID)
|
||||
RedirectToUser(ctx, doer, username, redirectUserID)
|
||||
} else if user_model.IsErrUserRedirectNotExist(err) {
|
||||
errCb(http.StatusNotFound, err)
|
||||
} else {
|
||||
|
||||
@ -835,11 +835,11 @@ parsingLoop:
|
||||
if buffer.Len() == 0 {
|
||||
continue
|
||||
}
|
||||
charsetLabel, err := charset.DetectEncoding(buffer.Bytes())
|
||||
if charsetLabel != "UTF-8" && err == nil {
|
||||
encoding, _ := stdcharset.Lookup(charsetLabel)
|
||||
if encoding != nil {
|
||||
diffLineTypeDecoders[lineType] = encoding.NewDecoder()
|
||||
charsetLabel, _ := charset.DetectEncoding(buffer.Bytes())
|
||||
if charsetLabel != "UTF-8" {
|
||||
charsetEncoding, _ := stdcharset.Lookup(charsetLabel)
|
||||
if charsetEncoding != nil {
|
||||
diffLineTypeDecoders[lineType] = charsetEncoding.NewDecoder()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1325,10 +1325,10 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit
|
||||
shouldFullFileHighlight := !setting.Git.DisableDiffHighlight && attrDiff.Value() == ""
|
||||
if shouldFullFileHighlight {
|
||||
if limitedContent.LeftContent != nil && limitedContent.LeftContent.buf.Len() < MaxDiffHighlightEntireFileSize {
|
||||
diffFile.highlightedLeftLines = highlightCodeLines(diffFile, true /* left */, limitedContent.LeftContent.buf.String())
|
||||
diffFile.highlightedLeftLines = highlightCodeLines(diffFile, true /* left */, limitedContent.LeftContent.buf.Bytes())
|
||||
}
|
||||
if limitedContent.RightContent != nil && limitedContent.RightContent.buf.Len() < MaxDiffHighlightEntireFileSize {
|
||||
diffFile.highlightedRightLines = highlightCodeLines(diffFile, false /* right */, limitedContent.RightContent.buf.String())
|
||||
diffFile.highlightedRightLines = highlightCodeLines(diffFile, false /* right */, limitedContent.RightContent.buf.Bytes())
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1336,10 +1336,11 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit
|
||||
return diff, nil
|
||||
}
|
||||
|
||||
func highlightCodeLines(diffFile *DiffFile, isLeft bool, content string) map[int]template.HTML {
|
||||
func highlightCodeLines(diffFile *DiffFile, isLeft bool, rawContent []byte) map[int]template.HTML {
|
||||
content := util.UnsafeBytesToString(charset.ToUTF8(rawContent, charset.ConvertOpts{}))
|
||||
highlightedNewContent, _ := highlight.Code(diffFile.Name, diffFile.Language, content)
|
||||
splitLines := strings.Split(string(highlightedNewContent), "\n")
|
||||
lines := make(map[int]template.HTML, len(splitLines))
|
||||
unsafeLines := highlight.UnsafeSplitHighlightedLines(highlightedNewContent)
|
||||
lines := make(map[int]template.HTML, len(unsafeLines))
|
||||
// only save the highlighted lines we need, but not the whole file, to save memory
|
||||
for _, sec := range diffFile.Sections {
|
||||
for _, ln := range sec.Lines {
|
||||
@ -1349,8 +1350,8 @@ func highlightCodeLines(diffFile *DiffFile, isLeft bool, content string) map[int
|
||||
}
|
||||
if lineIdx >= 1 {
|
||||
idx := lineIdx - 1
|
||||
if idx < len(splitLines) {
|
||||
lines[idx] = template.HTML(splitLines[idx])
|
||||
if idx < len(unsafeLines) {
|
||||
lines[idx] = template.HTML(util.UnsafeBytesToString(unsafeLines[idx]))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -5,6 +5,7 @@
|
||||
package gitdiff
|
||||
|
||||
import (
|
||||
"html/template"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
@ -1106,3 +1107,41 @@ func TestDiffLine_GetExpandDirection(t *testing.T) {
|
||||
assert.Equal(t, c.direction, c.diffLine.GetExpandDirection(), "case %s expected direction: %s", c.name, c.direction)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHighlightCodeLines(t *testing.T) {
|
||||
t.Run("CharsetDetecting", func(t *testing.T) {
|
||||
diffFile := &DiffFile{
|
||||
Name: "a.c",
|
||||
Language: "c",
|
||||
Sections: []*DiffSection{
|
||||
{
|
||||
Lines: []*DiffLine{{LeftIdx: 1}},
|
||||
},
|
||||
},
|
||||
}
|
||||
ret := highlightCodeLines(diffFile, true, []byte("// abc\xcc def\xcd")) // ISO-8859-1 bytes
|
||||
assert.Equal(t, "<span class=\"c1\">// abcÌ defÍ\n</span>", string(ret[0]))
|
||||
})
|
||||
|
||||
t.Run("LeftLines", func(t *testing.T) {
|
||||
diffFile := &DiffFile{
|
||||
Name: "a.c",
|
||||
Language: "c",
|
||||
Sections: []*DiffSection{
|
||||
{
|
||||
Lines: []*DiffLine{
|
||||
{LeftIdx: 1},
|
||||
{LeftIdx: 2},
|
||||
{LeftIdx: 3},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
const nl = "\n"
|
||||
ret := highlightCodeLines(diffFile, true, []byte("a\nb\n"))
|
||||
assert.Equal(t, map[int]template.HTML{
|
||||
0: `<span class="n">a</span>` + nl,
|
||||
1: `<span class="n">b</span>`,
|
||||
}, ret)
|
||||
})
|
||||
}
|
||||
|
||||
@ -25,12 +25,12 @@ func TestDiffWithHighlight(t *testing.T) {
|
||||
|
||||
t.Run("CleanUp", func(t *testing.T) {
|
||||
hcd := newHighlightCodeDiff()
|
||||
codeA := template.HTML(`<span class="cm>this is a comment</span>`)
|
||||
codeB := template.HTML(`<span class="cm>this is updated comment</span>`)
|
||||
codeA := template.HTML(`<span class="cm">this is a comment</span>`)
|
||||
codeB := template.HTML(`<span class="cm">this is updated comment</span>`)
|
||||
outDel := hcd.diffLineWithHighlight(DiffLineDel, codeA, codeB)
|
||||
assert.Equal(t, `<span class="cm>this is <span class="removed-code">a</span> comment</span>`, string(outDel))
|
||||
assert.Equal(t, `<span class="cm">this is <span class="removed-code">a</span> comment</span>`, string(outDel))
|
||||
outAdd := hcd.diffLineWithHighlight(DiffLineAdd, codeA, codeB)
|
||||
assert.Equal(t, `<span class="cm>this is <span class="added-code">updated</span> comment</span>`, string(outAdd))
|
||||
assert.Equal(t, `<span class="cm">this is <span class="added-code">updated</span> comment</span>`, string(outAdd))
|
||||
})
|
||||
|
||||
t.Run("OpenCloseTags", func(t *testing.T) {
|
||||
|
||||
@ -95,7 +95,7 @@ func (opts *ApplyDiffPatchOptions) Validate(ctx context.Context, repo *repo_mode
|
||||
}
|
||||
}
|
||||
if protectedBranch != nil && protectedBranch.RequireSignedCommits {
|
||||
_, _, _, err := asymkey_service.SignCRUDAction(ctx, repo.RepoPath(), doer, repo.RepoPath(), opts.OldBranch)
|
||||
_, _, _, err := asymkey_service.SignCRUDAction(ctx, doer, repo.RepoPath(), opts.OldBranch)
|
||||
if err != nil {
|
||||
if !asymkey_service.IsErrWontSign(err) {
|
||||
return err
|
||||
|
||||
@ -303,9 +303,9 @@ func (t *TemporaryUploadRepository) CommitTree(ctx context.Context, opts *Commit
|
||||
var key *git.SigningKey
|
||||
var signer *git.Signature
|
||||
if opts.ParentCommitID != "" {
|
||||
sign, key, signer, _ = asymkey_service.SignCRUDAction(ctx, t.repo.RepoPath(), opts.DoerUser, t.basePath, opts.ParentCommitID)
|
||||
sign, key, signer, _ = asymkey_service.SignCRUDAction(ctx, opts.DoerUser, t.basePath, opts.ParentCommitID)
|
||||
} else {
|
||||
sign, key, signer, _ = asymkey_service.SignInitialCommit(ctx, t.repo.RepoPath(), opts.DoerUser)
|
||||
sign, key, signer, _ = asymkey_service.SignInitialCommit(ctx, opts.DoerUser)
|
||||
}
|
||||
if sign {
|
||||
if key.Format != "" {
|
||||
|
||||
@ -686,7 +686,7 @@ func VerifyBranchProtection(ctx context.Context, repo *repo_model.Repository, do
|
||||
}
|
||||
}
|
||||
if protectedBranch.RequireSignedCommits {
|
||||
_, _, _, err := asymkey_service.SignCRUDAction(ctx, repo.RepoPath(), doer, repo.RepoPath(), branchName)
|
||||
_, _, _, err := asymkey_service.SignCRUDAction(ctx, doer, repo.RepoPath(), branchName)
|
||||
if err != nil {
|
||||
if !asymkey_service.IsErrWontSign(err) {
|
||||
return err
|
||||
|
||||
@ -41,7 +41,7 @@ func initRepoCommit(ctx context.Context, tmpPath string, repo *repo_model.Reposi
|
||||
cmd := gitcmd.NewCommand("commit", "--message=Initial commit").
|
||||
AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email)
|
||||
|
||||
sign, key, signer, _ := asymkey_service.SignInitialCommit(ctx, tmpPath, u)
|
||||
sign, key, signer, _ := asymkey_service.SignInitialCommit(ctx, u)
|
||||
if sign {
|
||||
if key.Format != "" {
|
||||
cmd.AddConfig("gpg.format", key.Format)
|
||||
|
||||
@ -38,7 +38,7 @@
|
||||
<table>
|
||||
<tbody>
|
||||
{{range $row := .BlameRows}}
|
||||
<tr class="{{if and (gt $.CommitCnt 1) ($row.CommitMessage)}}top-line-blame{{end}}">
|
||||
<tr class="{{if $row.CommitURL}}top-line-blame{{end}}">
|
||||
<td class="lines-commit">
|
||||
<div class="blame-info">
|
||||
<div class="blame-data">
|
||||
|
||||
@ -4,6 +4,7 @@
|
||||
package migrations
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"context"
|
||||
"database/sql"
|
||||
@ -21,7 +22,6 @@ import (
|
||||
"code.gitea.io/gitea/models/migrations"
|
||||
migrate_base "code.gitea.io/gitea/models/migrations/base"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
"code.gitea.io/gitea/modules/charset"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
@ -108,11 +108,11 @@ func readSQLFromFile(version string) (string, error) {
|
||||
}
|
||||
defer gr.Close()
|
||||
|
||||
bytes, err := io.ReadAll(gr)
|
||||
buf, err := io.ReadAll(gr)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(charset.MaybeRemoveBOM(bytes, charset.ConvertOpts{})), nil
|
||||
return string(bytes.TrimPrefix(buf, []byte{'\xef', '\xbb', '\xbf'})), nil
|
||||
}
|
||||
|
||||
func restoreOldDB(t *testing.T, version string) {
|
||||
|
||||
@ -45,6 +45,78 @@ func TestRenameUsername(t *testing.T) {
|
||||
unittest.AssertNotExistsBean(t, &user_model.User{Name: "user2"})
|
||||
}
|
||||
|
||||
func TestViewLimitedAndPrivateUserAndRename(t *testing.T) {
|
||||
defer tests.PrepareTestEnv(t)()
|
||||
|
||||
// user 22 is a limited visibility org
|
||||
org22 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 22})
|
||||
req := NewRequest(t, "GET", "/"+org22.Name)
|
||||
MakeRequest(t, req, http.StatusNotFound)
|
||||
|
||||
session := loginUser(t, "user1")
|
||||
oldName := org22.Name
|
||||
newName := "org22_renamed"
|
||||
req = NewRequestWithValues(t, "POST", "/org/"+oldName+"/settings/rename", map[string]string{
|
||||
"_csrf": GetUserCSRFToken(t, session),
|
||||
"org_name": oldName,
|
||||
"new_org_name": newName,
|
||||
})
|
||||
session.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
unittest.AssertExistsAndLoadBean(t, &user_model.User{Name: newName})
|
||||
unittest.AssertNotExistsBean(t, &user_model.User{Name: oldName})
|
||||
|
||||
req = NewRequest(t, "GET", "/"+oldName)
|
||||
MakeRequest(t, req, http.StatusNotFound) // anonymous user cannot visit limited visibility org via old name
|
||||
req = NewRequest(t, "GET", "/"+oldName)
|
||||
session.MakeRequest(t, req, http.StatusTemporaryRedirect) // login user can visit limited visibility org via old name
|
||||
|
||||
// org 23 is a private visibility org
|
||||
org23 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 23})
|
||||
req = NewRequest(t, "GET", "/"+org23.Name)
|
||||
MakeRequest(t, req, http.StatusNotFound)
|
||||
|
||||
oldName = org23.Name
|
||||
newName = "org23_renamed"
|
||||
req = NewRequestWithValues(t, "POST", "/org/"+oldName+"/settings/rename", map[string]string{
|
||||
"_csrf": GetUserCSRFToken(t, session),
|
||||
"org_name": oldName,
|
||||
"new_org_name": newName,
|
||||
})
|
||||
session.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
unittest.AssertExistsAndLoadBean(t, &user_model.User{Name: newName})
|
||||
unittest.AssertNotExistsBean(t, &user_model.User{Name: oldName})
|
||||
|
||||
req = NewRequest(t, "GET", "/"+oldName)
|
||||
MakeRequest(t, req, http.StatusNotFound) // anonymous user cannot visit limited visibility org via old name
|
||||
req = NewRequest(t, "GET", "/"+oldName)
|
||||
session.MakeRequest(t, req, http.StatusTemporaryRedirect) // login user can visit limited visibility org via old name
|
||||
|
||||
// user 31 is a private visibility user
|
||||
user31 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 31})
|
||||
req = NewRequest(t, "GET", "/"+user31.Name)
|
||||
MakeRequest(t, req, http.StatusNotFound)
|
||||
|
||||
oldName = user31.Name
|
||||
newName = "user31_renamed"
|
||||
session2 := loginUser(t, oldName)
|
||||
req = NewRequestWithValues(t, "POST", "/user/settings", map[string]string{
|
||||
"_csrf": GetUserCSRFToken(t, session2),
|
||||
"name": newName,
|
||||
"visibility": "2", // private
|
||||
})
|
||||
session2.MakeRequest(t, req, http.StatusSeeOther)
|
||||
|
||||
unittest.AssertExistsAndLoadBean(t, &user_model.User{Name: newName})
|
||||
unittest.AssertNotExistsBean(t, &user_model.User{Name: oldName})
|
||||
|
||||
req = NewRequest(t, "GET", "/"+oldName)
|
||||
MakeRequest(t, req, http.StatusNotFound) // anonymous user cannot visit private visibility user via old name
|
||||
req = NewRequest(t, "GET", "/"+oldName)
|
||||
session.MakeRequest(t, req, http.StatusTemporaryRedirect) // login user2 can visit private visibility user via old name
|
||||
}
|
||||
|
||||
func TestRenameInvalidUsername(t *testing.T) {
|
||||
defer tests.PrepareTestEnv(t)()
|
||||
|
||||
|
||||
@ -919,7 +919,7 @@ overflow-menu .ui.label {
|
||||
.blame-avatar {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
margin-right: 4px;
|
||||
margin-right: 6px;
|
||||
}
|
||||
|
||||
tr.top-line-blame {
|
||||
|
||||
@ -2,20 +2,10 @@
|
||||
import {SvgIcon} from '../svg.ts';
|
||||
import {isPlainClick} from '../utils/dom.ts';
|
||||
import {shallowRef} from 'vue';
|
||||
import {type createViewFileTreeStore} from './ViewFileTreeStore.ts';
|
||||
|
||||
export type Item = {
|
||||
entryName: string;
|
||||
entryMode: 'blob' | 'exec' | 'tree' | 'commit' | 'symlink' | 'unknown';
|
||||
entryIcon: string;
|
||||
entryIconOpen: string;
|
||||
fullPath: string;
|
||||
submoduleUrl?: string;
|
||||
children?: Item[];
|
||||
};
|
||||
import type {createViewFileTreeStore, FileTreeItem} from './ViewFileTreeStore.ts';
|
||||
|
||||
const props = defineProps<{
|
||||
item: Item,
|
||||
item: FileTreeItem,
|
||||
store: ReturnType<typeof createViewFileTreeStore>
|
||||
}>();
|
||||
|
||||
|
||||
@ -3,11 +3,20 @@ import {GET} from '../modules/fetch.ts';
|
||||
import {pathEscapeSegments} from '../utils/url.ts';
|
||||
import {createElementFromHTML} from '../utils/dom.ts';
|
||||
import {html} from '../utils/html.ts';
|
||||
import type {Item} from './ViewFileTreeItem.vue';
|
||||
|
||||
export type FileTreeItem = {
|
||||
entryName: string;
|
||||
entryMode: 'blob' | 'exec' | 'tree' | 'commit' | 'symlink' | 'unknown';
|
||||
entryIcon: string;
|
||||
entryIconOpen: string;
|
||||
fullPath: string;
|
||||
submoduleUrl?: string;
|
||||
children?: Array<FileTreeItem>;
|
||||
};
|
||||
|
||||
export function createViewFileTreeStore(props: {repoLink: string, treePath: string, currentRefNameSubURL: string}) {
|
||||
const store = reactive({
|
||||
rootFiles: [] as Array<Item>,
|
||||
rootFiles: [] as Array<FileTreeItem>,
|
||||
selectedItem: props.treePath,
|
||||
|
||||
async loadChildren(treePath: string, subPath: string = '') {
|
||||
|
||||
4
web_src/js/globals.d.ts
vendored
4
web_src/js/globals.d.ts
vendored
@ -12,8 +12,8 @@ declare module '*.vue' {
|
||||
import type {DefineComponent} from 'vue';
|
||||
const component: DefineComponent<unknown, unknown, any>;
|
||||
export default component;
|
||||
// List of named exports from vue components, used to make `tsc` output clean.
|
||||
// To actually lint .vue files, `vue-tsc` is used because `tsc` can not parse them.
|
||||
// Here we declare all exports from vue files so `tsc` or `tsgo` can work for
|
||||
// non-vue files. To lint .vue files, `vue-tsc` must be used.
|
||||
export function initDashboardRepoList(): void;
|
||||
export function initRepositoryActionView(): void;
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user