1
0
Fork 0
mirror of https://codeberg.org/forgejo/forgejo.git synced 2024-12-22 12:54:53 -05:00

Merge pull request #1020 from jcracknell/tag-exclude-nethtml

Exclude HTML tags from Markdown post-processing
This commit is contained in:
无闻 2015-03-23 18:03:47 -04:00
commit 750d82b8e2

View file

@ -7,6 +7,7 @@ package base
import (
"bytes"
"fmt"
"io"
"net/http"
"path"
"path/filepath"
@ -16,6 +17,8 @@ import (
"github.com/russross/blackfriday"
"github.com/gogits/gogs/modules/setting"
"golang.org/x/net/html"
)
func isletter(c byte) bool {
@ -217,12 +220,53 @@ func RenderRawMarkdown(body []byte, urlPrefix string) []byte {
}
func RenderMarkdown(rawBytes []byte, urlPrefix string) []byte {
body := RenderSpecialLink(rawBytes, urlPrefix)
body = RenderRawMarkdown(body, urlPrefix)
body = Sanitizer.SanitizeBytes(body)
return body
result := RenderRawMarkdown(rawBytes, urlPrefix)
result = PostProcessMarkdown(result, urlPrefix)
result = Sanitizer.SanitizeBytes(result)
return result
}
func RenderMarkdownString(raw, urlPrefix string) string {
return string(RenderMarkdown([]byte(raw), urlPrefix))
}
func PostProcessMarkdown(rawHtml []byte, urlPrefix string) []byte {
var buf bytes.Buffer
tokenizer := html.NewTokenizer(bytes.NewReader(rawHtml))
for html.ErrorToken != tokenizer.Next() {
token := tokenizer.Token()
switch token.Type {
case html.TextToken:
text := []byte(token.String())
text = RenderSpecialLink(text, urlPrefix)
buf.Write(text)
case html.StartTagToken:
buf.WriteString(token.String())
tagName := token.Data
// If this is an excluded tag, we skip processing all output until a close tag is encountered
if strings.EqualFold("a", tagName) || strings.EqualFold("code", tagName) || strings.EqualFold("pre", tagName) {
for html.ErrorToken != tokenizer.Next() {
token = tokenizer.Token()
// Copy the token to the output verbatim
buf.WriteString(token.String())
// If this is the close tag, we are done
if html.EndTagToken == token.Type && strings.EqualFold(tagName, token.Data) { break }
}
}
default:
buf.WriteString(token.String())
}
}
if io.EOF == tokenizer.Err() {
return buf.Bytes()
}
// If we are not at the end of the input, then some other parsing error has occurred, so return
// the input verbatim.
return rawHtml
}