1
0
Fork 0
mirror of https://codeberg.org/forgejo/forgejo.git synced 2024-12-22 12:54:53 -05:00

#637 #738 fix markdown: render special links

- code format for #1020
This commit is contained in:
Unknwon 2015-03-23 18:32:24 -04:00
parent 750d82b8e2
commit ab6b288141

View file

@ -15,10 +15,9 @@ import (
"strings" "strings"
"github.com/russross/blackfriday" "github.com/russross/blackfriday"
"golang.org/x/net/html"
"github.com/gogits/gogs/modules/setting" "github.com/gogits/gogs/modules/setting"
"golang.org/x/net/html"
) )
func isletter(c byte) bool { func isletter(c byte) bool {
@ -109,40 +108,22 @@ func (options *CustomRender) Image(out *bytes.Buffer, link []byte, title []byte,
} }
var ( var (
MentionPattern = regexp.MustCompile(`(\s|^)@[0-9a-zA-Z_]+`) MentionPattern = regexp.MustCompile(`(\s|^)@[0-9a-zA-Z_\.]+`)
commitPattern = regexp.MustCompile(`(\s|^)https?.*commit/[0-9a-zA-Z]+(#+[0-9a-zA-Z-]*)?`) commitPattern = regexp.MustCompile(`(\s|^)https?.*commit/[0-9a-zA-Z]+(#+[0-9a-zA-Z-]*)?`)
issueFullPattern = regexp.MustCompile(`(\s|^)https?.*issues/[0-9]+(#+[0-9a-zA-Z-]*)?`) issueFullPattern = regexp.MustCompile(`(\s|^)https?.*issues/[0-9]+(#+[0-9a-zA-Z-]*)?`)
issueIndexPattern = regexp.MustCompile(`( |^)#[0-9]+`) issueIndexPattern = regexp.MustCompile(`( |^)#[0-9]+\b`)
sha1CurrentPattern = regexp.MustCompile(`\b[0-9a-f]{40}\b`) sha1CurrentPattern = regexp.MustCompile(`\b[0-9a-f]{40}\b`)
) )
func RenderSpecialLink(rawBytes []byte, urlPrefix string) []byte { func RenderSpecialLink(rawBytes []byte, urlPrefix string) []byte {
buf := bytes.NewBufferString("") ms := MentionPattern.FindAll(rawBytes, -1)
inCodeBlock := false for _, m := range ms {
codeBlockPrefix := []byte("```") m = bytes.TrimSpace(m)
lineBreak := []byte("\n") rawBytes = bytes.Replace(rawBytes, m,
tab := []byte("\t") []byte(fmt.Sprintf(`<a href="%s/%s">%s</a>`, setting.AppSubUrl, m[1:], m)), -1)
lines := bytes.Split(rawBytes, lineBreak)
for _, line := range lines {
if bytes.HasPrefix(line, codeBlockPrefix) {
inCodeBlock = !inCodeBlock
}
if !inCodeBlock && !bytes.HasPrefix(line, tab) {
ms := MentionPattern.FindAll(line, -1)
for _, m := range ms {
m = bytes.TrimSpace(m)
line = bytes.Replace(line, m,
[]byte(fmt.Sprintf(`<a href="%s/%s">%s</a>`, setting.AppSubUrl, m[1:], m)), -1)
}
}
buf.Write(line)
buf.Write(lineBreak)
} }
rawBytes = buf.Bytes() ms = commitPattern.FindAll(rawBytes, -1)
ms := commitPattern.FindAll(rawBytes, -1)
for _, m := range ms { for _, m := range ms {
m = bytes.TrimSpace(m) m = bytes.TrimSpace(m)
i := strings.Index(string(m), "commit/") i := strings.Index(string(m), "commit/")
@ -181,8 +162,14 @@ func RenderSha1CurrentPattern(rawBytes []byte, urlPrefix string) []byte {
func RenderIssueIndexPattern(rawBytes []byte, urlPrefix string) []byte { func RenderIssueIndexPattern(rawBytes []byte, urlPrefix string) []byte {
ms := issueIndexPattern.FindAll(rawBytes, -1) ms := issueIndexPattern.FindAll(rawBytes, -1)
for _, m := range ms { for _, m := range ms {
rawBytes = bytes.Replace(rawBytes, m, []byte(fmt.Sprintf(`<a href="%s/issues/%s">%s</a>`, var space string
urlPrefix, strings.TrimPrefix(string(m[1:]), "#"), m)), -1) m2 := m
if m2[0] == ' ' {
space = " "
m2 = m2[1:]
}
rawBytes = bytes.Replace(rawBytes, m, []byte(fmt.Sprintf(`%s<a href="%s/issues/%s">%s</a>`,
space, urlPrefix, m2[1:], m2)), 1)
} }
return rawBytes return rawBytes
} }
@ -219,6 +206,47 @@ func RenderRawMarkdown(body []byte, urlPrefix string) []byte {
return body return body
} }
// PostProcessMarkdown treats different types of HTML differently,
// and only renders special links for plain text blocks.
func PostProcessMarkdown(rawHtml []byte, urlPrefix string) []byte {
var buf bytes.Buffer
tokenizer := html.NewTokenizer(bytes.NewReader(rawHtml))
for html.ErrorToken != tokenizer.Next() {
token := tokenizer.Token()
switch token.Type {
case html.TextToken:
buf.Write(RenderSpecialLink([]byte(token.String()), urlPrefix))
case html.StartTagToken:
buf.WriteString(token.String())
tagName := token.Data
// If this is an excluded tag, we skip processing all output until a close tag is encountered.
if strings.EqualFold("a", tagName) || strings.EqualFold("code", tagName) || strings.EqualFold("pre", tagName) {
for html.ErrorToken != tokenizer.Next() {
token = tokenizer.Token()
// Copy the token to the output verbatim
buf.WriteString(token.String())
// If this is the close tag, we are done
if html.EndTagToken == token.Type && strings.EqualFold(tagName, token.Data) {
break
}
}
}
default:
buf.WriteString(token.String())
}
}
if io.EOF == tokenizer.Err() {
return buf.Bytes()
}
// If we are not at the end of the input, then some other parsing error has occurred,
// so return the input verbatim.
return rawHtml
}
func RenderMarkdown(rawBytes []byte, urlPrefix string) []byte { func RenderMarkdown(rawBytes []byte, urlPrefix string) []byte {
result := RenderRawMarkdown(rawBytes, urlPrefix) result := RenderRawMarkdown(rawBytes, urlPrefix)
result = PostProcessMarkdown(result, urlPrefix) result = PostProcessMarkdown(result, urlPrefix)
@ -229,44 +257,3 @@ func RenderMarkdown(rawBytes []byte, urlPrefix string) []byte {
func RenderMarkdownString(raw, urlPrefix string) string { func RenderMarkdownString(raw, urlPrefix string) string {
return string(RenderMarkdown([]byte(raw), urlPrefix)) return string(RenderMarkdown([]byte(raw), urlPrefix))
} }
func PostProcessMarkdown(rawHtml []byte, urlPrefix string) []byte {
var buf bytes.Buffer
tokenizer := html.NewTokenizer(bytes.NewReader(rawHtml))
for html.ErrorToken != tokenizer.Next() {
token := tokenizer.Token()
switch token.Type {
case html.TextToken:
text := []byte(token.String())
text = RenderSpecialLink(text, urlPrefix)
buf.Write(text)
case html.StartTagToken:
buf.WriteString(token.String())
tagName := token.Data
// If this is an excluded tag, we skip processing all output until a close tag is encountered
if strings.EqualFold("a", tagName) || strings.EqualFold("code", tagName) || strings.EqualFold("pre", tagName) {
for html.ErrorToken != tokenizer.Next() {
token = tokenizer.Token()
// Copy the token to the output verbatim
buf.WriteString(token.String())
// If this is the close tag, we are done
if html.EndTagToken == token.Type && strings.EqualFold(tagName, token.Data) { break }
}
}
default:
buf.WriteString(token.String())
}
}
if io.EOF == tokenizer.Err() {
return buf.Bytes()
}
// If we are not at the end of the input, then some other parsing error has occurred, so return
// the input verbatim.
return rawHtml
}