123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345 |
- // Copyright 2014 The Gogs Authors. All rights reserved.
- // Use of this source code is governed by a MIT-style
- // license that can be found in the LICENSE file.
- package base
- import (
- "bytes"
- "fmt"
- "io"
- "net/http"
- "path"
- "path/filepath"
- "regexp"
- "strings"
- "github.com/Unknwon/com"
- "github.com/russross/blackfriday"
- "golang.org/x/net/html"
- "github.com/gogits/gogs/modules/setting"
- )
- func isletter(c byte) bool {
- return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
- }
- func isalnum(c byte) bool {
- return (c >= '0' && c <= '9') || isletter(c)
- }
- var validLinks = [][]byte{[]byte("http://"), []byte("https://"), []byte("ftp://"), []byte("mailto://")}
- func isLink(link []byte) bool {
- for _, prefix := range validLinks {
- if len(link) > len(prefix) && bytes.Equal(bytes.ToLower(link[:len(prefix)]), prefix) && isalnum(link[len(prefix)]) {
- return true
- }
- }
- return false
- }
- func IsMarkdownFile(name string) bool {
- name = strings.ToLower(name)
- switch filepath.Ext(name) {
- case ".md", ".markdown", ".mdown", ".mkd":
- return true
- }
- return false
- }
- func IsTextFile(data []byte) (string, bool) {
- contentType := http.DetectContentType(data)
- if strings.Index(contentType, "text/") != -1 {
- return contentType, true
- }
- return contentType, false
- }
- func IsImageFile(data []byte) (string, bool) {
- contentType := http.DetectContentType(data)
- if strings.Index(contentType, "image/") != -1 {
- return contentType, true
- }
- return contentType, false
- }
- // IsReadmeFile returns true if given file name suppose to be a README file.
- func IsReadmeFile(name string) bool {
- name = strings.ToLower(name)
- if len(name) < 6 {
- return false
- } else if len(name) == 6 {
- if name == "readme" {
- return true
- }
- return false
- }
- if name[:7] == "readme." {
- return true
- }
- return false
- }
- var (
- MentionPattern = regexp.MustCompile(`(\s|^)@[0-9a-zA-Z_\.]+`)
- commitPattern = regexp.MustCompile(`(\s|^)https?.*commit/[0-9a-zA-Z]+(#+[0-9a-zA-Z-]*)?`)
- issueFullPattern = regexp.MustCompile(`(\s|^)https?.*issues/[0-9]+(#+[0-9a-zA-Z-]*)?`)
- issueIndexPattern = regexp.MustCompile(`( |^|\()#[0-9]+\b`)
- sha1CurrentPattern = regexp.MustCompile(`\b[0-9a-f]{40}\b`)
- )
- type CustomRender struct {
- blackfriday.Renderer
- urlPrefix string
- }
- func (r *CustomRender) Link(out *bytes.Buffer, link []byte, title []byte, content []byte) {
- if len(link) > 0 && !isLink(link) {
- if link[0] == '#' {
- // link = append([]byte(options.urlPrefix), link...)
- } else {
- link = []byte(path.Join(r.urlPrefix, string(link)))
- }
- }
- r.Renderer.Link(out, link, title, content)
- }
- func (r *CustomRender) AutoLink(out *bytes.Buffer, link []byte, kind int) {
- if kind != 1 {
- r.Renderer.AutoLink(out, link, kind)
- return
- }
- // This method could only possibly serve one link at a time, no need to find all.
- m := commitPattern.Find(link)
- if m != nil {
- m = bytes.TrimSpace(m)
- i := strings.Index(string(m), "commit/")
- j := strings.Index(string(m), "#")
- if j == -1 {
- j = len(m)
- }
- out.WriteString(fmt.Sprintf(` <code><a href="%s">%s</a></code>`, m, ShortSha(string(m[i+7:j]))))
- return
- }
- m = issueFullPattern.Find(link)
- if m != nil {
- m = bytes.TrimSpace(m)
- i := strings.Index(string(m), "issues/")
- j := strings.Index(string(m), "#")
- if j == -1 {
- j = len(m)
- }
- out.WriteString(fmt.Sprintf(` <a href="%s">#%s</a>`, m, ShortSha(string(m[i+7:j]))))
- return
- }
- r.Renderer.AutoLink(out, link, kind)
- }
- var (
- svgSuffix = []byte(".svg")
- svgSuffixWithMark = []byte(".svg?")
- )
- func (r *CustomRender) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) {
- prefix := strings.Replace(r.urlPrefix, "/src/", "/raw/", 1)
- if len(link) > 0 {
- if isLink(link) {
- // External link with .svg suffix usually means CI status.
- if bytes.HasSuffix(link, svgSuffix) || bytes.Contains(link, svgSuffixWithMark) {
- r.Renderer.Image(out, link, title, alt)
- return
- }
- } else {
- if link[0] != '/' {
- prefix += "/"
- }
- link = []byte(prefix + string(link))
- }
- }
- out.WriteString(`<a href="`)
- out.Write(link)
- out.WriteString(`">`)
- r.Renderer.Image(out, link, title, alt)
- out.WriteString("</a>")
- }
- func cutoutVerbosePrefix(prefix string) string {
- count := 0
- for i := 0; i < len(prefix); i++ {
- if prefix[i] == '/' {
- count++
- }
- if count >= 3 {
- return prefix[:i]
- }
- }
- return prefix
- }
- func RenderIssueIndexPattern(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
- urlPrefix = cutoutVerbosePrefix(urlPrefix)
- ms := issueIndexPattern.FindAll(rawBytes, -1)
- for _, m := range ms {
- var space string
- m2 := m
- if m2[0] != '#' {
- space = string(m2[0])
- m2 = m2[1:]
- }
- if metas == nil {
- rawBytes = bytes.Replace(rawBytes, m, []byte(fmt.Sprintf(`%s<a href="%s/issues/%s">%s</a>`,
- space, urlPrefix, m2[1:], m2)), 1)
- } else {
- // Support for external issue tracker
- metas["index"] = string(m2[1:])
- rawBytes = bytes.Replace(rawBytes, m, []byte(fmt.Sprintf(`%s<a href="%s">%s</a>`,
- space, com.Expand(metas["format"], metas), m2)), 1)
- }
- }
- return rawBytes
- }
- func RenderSpecialLink(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
- ms := MentionPattern.FindAll(rawBytes, -1)
- for _, m := range ms {
- m = bytes.TrimSpace(m)
- rawBytes = bytes.Replace(rawBytes, m,
- []byte(fmt.Sprintf(`<a href="%s/%s">%s</a>`, setting.AppSubUrl, m[1:], m)), -1)
- }
- rawBytes = RenderIssueIndexPattern(rawBytes, urlPrefix, metas)
- rawBytes = RenderSha1CurrentPattern(rawBytes, urlPrefix)
- return rawBytes
- }
- func RenderSha1CurrentPattern(rawBytes []byte, urlPrefix string) []byte {
- ms := sha1CurrentPattern.FindAll(rawBytes, -1)
- for _, m := range ms {
- rawBytes = bytes.Replace(rawBytes, m, []byte(fmt.Sprintf(
- `<a href="%s/commit/%s"><code>%s</code></a>`, urlPrefix, m, ShortSha(string(m)))), -1)
- }
- return rawBytes
- }
- func RenderRawMarkdown(body []byte, urlPrefix string) []byte {
- htmlFlags := 0
- htmlFlags |= blackfriday.HTML_SKIP_STYLE
- htmlFlags |= blackfriday.HTML_OMIT_CONTENTS
- renderer := &CustomRender{
- Renderer: blackfriday.HtmlRenderer(htmlFlags, "", ""),
- urlPrefix: urlPrefix,
- }
- // set up the parser
- extensions := 0
- extensions |= blackfriday.EXTENSION_NO_INTRA_EMPHASIS
- extensions |= blackfriday.EXTENSION_TABLES
- extensions |= blackfriday.EXTENSION_FENCED_CODE
- extensions |= blackfriday.EXTENSION_AUTOLINK
- extensions |= blackfriday.EXTENSION_STRIKETHROUGH
- extensions |= blackfriday.EXTENSION_SPACE_HEADERS
- extensions |= blackfriday.EXTENSION_NO_EMPTY_LINE_BEFORE_BLOCK
- if setting.Markdown.EnableHardLineBreak {
- extensions |= blackfriday.EXTENSION_HARD_LINE_BREAK
- }
- body = blackfriday.Markdown(body, renderer, extensions)
- return body
- }
- var (
- leftAngleBracket = []byte("</")
- rightAngleBracket = []byte(">")
- )
- var noEndTags = []string{"img", "input", "br", "hr"}
- // PostProcessMarkdown treats different types of HTML differently,
- // and only renders special links for plain text blocks.
- func PostProcessMarkdown(rawHtml []byte, urlPrefix string, metas map[string]string) []byte {
- startTags := make([]string, 0, 5)
- var buf bytes.Buffer
- tokenizer := html.NewTokenizer(bytes.NewReader(rawHtml))
- OUTER_LOOP:
- for html.ErrorToken != tokenizer.Next() {
- token := tokenizer.Token()
- switch token.Type {
- case html.TextToken:
- buf.Write(RenderSpecialLink([]byte(token.String()), urlPrefix, metas))
- case html.StartTagToken:
- buf.WriteString(token.String())
- tagName := token.Data
- // If this is an excluded tag, we skip processing all output until a close tag is encountered.
- if strings.EqualFold("a", tagName) || strings.EqualFold("code", tagName) || strings.EqualFold("pre", tagName) {
- stackNum := 1
- for html.ErrorToken != tokenizer.Next() {
- token = tokenizer.Token()
- // Copy the token to the output verbatim
- buf.WriteString(token.String())
- if token.Type == html.StartTagToken {
- stackNum++
- }
- // If this is the close tag to the outer-most, we are done
- if token.Type == html.EndTagToken && strings.EqualFold(tagName, token.Data) {
- stackNum--
- if stackNum == 0 {
- break
- }
- }
- }
- continue OUTER_LOOP
- }
- if !com.IsSliceContainsStr(noEndTags, token.Data) {
- startTags = append(startTags, token.Data)
- }
- case html.EndTagToken:
- if len(startTags) == 0 {
- buf.WriteString(token.String())
- break
- }
- buf.Write(leftAngleBracket)
- buf.WriteString(startTags[len(startTags)-1])
- buf.Write(rightAngleBracket)
- startTags = startTags[:len(startTags)-1]
- default:
- buf.WriteString(token.String())
- }
- }
- if io.EOF == tokenizer.Err() {
- return buf.Bytes()
- }
- // If we are not at the end of the input, then some other parsing error has occurred,
- // so return the input verbatim.
- return rawHtml
- }
- func RenderMarkdown(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
- result := RenderRawMarkdown(rawBytes, urlPrefix)
- result = PostProcessMarkdown(result, urlPrefix, metas)
- result = Sanitizer.SanitizeBytes(result)
- return result
- }
- func RenderMarkdownString(raw, urlPrefix string, metas map[string]string) string {
- return string(RenderMarkdown([]byte(raw), urlPrefix, metas))
- }
|