diff --git a/cmd/web.go b/cmd/web.go index 4fa93ba4d8..b793cf0daf 100644 --- a/cmd/web.go +++ b/cmd/web.go @@ -563,6 +563,11 @@ func runWeb(ctx *cli.Context) error { }, reqSignIn, reqRepoWriter) }, repo.MustEnableWiki, context.RepoRef()) + m.Group("/wiki", func() { + m.Get("/raw/*", repo.WikiRaw) + m.Get("/*", repo.WikiRaw) + }, repo.MustEnableWiki) + m.Get("/archive/*", repo.Download) m.Group("/pulls/:index", func() { diff --git a/modules/markdown/markdown.go b/modules/markdown/markdown.go index c0b7c91f83..5218af8f27 100644 --- a/modules/markdown/markdown.go +++ b/modules/markdown/markdown.go @@ -8,6 +8,7 @@ import ( "bytes" "fmt" "io" + "net/url" "path" "path/filepath" "regexp" @@ -35,23 +36,17 @@ var Sanitizer = bluemonday.UGCPolicy() // This function should only be called once during entire application lifecycle. func BuildSanitizer() { // Normal markdown-stuff - Sanitizer.AllowAttrs("class").Matching(regexp.MustCompile(`[\p{L}\p{N}\s\-_',:\[\]!\./\\\(\)&]*`)).OnElements("code") + Sanitizer.AllowAttrs("class").Matching(regexp.MustCompile(`[\p{L}\p{N}\s\-_',:\[\]!\./\\\(\)&]*`)).OnElements("code", "div", "ul", "ol", "dl") // Checkboxes Sanitizer.AllowAttrs("type").Matching(regexp.MustCompile(`^checkbox$`)).OnElements("input") Sanitizer.AllowAttrs("checked", "disabled").OnElements("input") + Sanitizer.AllowNoAttrs().OnElements("label") // Custom URL-Schemes Sanitizer.AllowURLSchemes(setting.Markdown.CustomURLSchemes...) } -var validLinksPattern = regexp.MustCompile(`^[a-z][\w-]+://`) - -// isLink reports whether link fits valid format. -func isLink(link []byte) bool { - return validLinksPattern.Match(link) -} - // IsMarkdownFile reports whether name looks like a Markdown file // based on its extension. func IsMarkdownFile(name string) bool { @@ -65,7 +60,7 @@ func IsMarkdownFile(name string) bool { } // IsReadmeFile reports whether name looks like a README file -// based on its extension. +// based on its name. func IsReadmeFile(name string) bool { name = strings.ToLower(name) if len(name) < 6 { @@ -80,13 +75,6 @@ var ( // MentionPattern matches string that mentions someone, e.g. @Unknwon MentionPattern = regexp.MustCompile(`(\s|^|\W)@[0-9a-zA-Z-_\.]+`) - // CommitPattern matches link to certain commit with or without trailing hash, - // e.g. https://try.gogs.io/gogs/gogs/commit/d8a994ef243349f321568f9e36d5c3f444b99cae#diff-2 - CommitPattern = regexp.MustCompile(`(\s|^)https?.*commit/[0-9a-zA-Z]+(#+[0-9a-zA-Z-]*)?`) - - // IssueFullPattern matches link to an issue with or without trailing hash, - // e.g. https://try.gogs.io/gogs/gogs/issues/4#issue-685 - IssueFullPattern = regexp.MustCompile(`(\s|^)https?.*issues/[0-9]+(#+[0-9a-zA-Z-]*)?`) // IssueNumericPattern matches string that references to a numeric issue, e.g. #1287 IssueNumericPattern = regexp.MustCompile(`( |^|\()#[0-9]+\b`) // IssueAlphanumericPattern matches string that references to an alphanumeric issue, e.g. ABC-1234 @@ -96,11 +84,27 @@ var ( CrossReferenceIssueNumericPattern = regexp.MustCompile(`( |^)[0-9a-zA-Z]+/[0-9a-zA-Z]+#[0-9]+\b`) // Sha1CurrentPattern matches string that represents a commit SHA, e.g. d8a994ef243349f321568f9e36d5c3f444b99cae - // FIXME: this pattern matches pure numbers as well, right now we do a hack to check in RenderSha1CurrentPattern + // FIXME: this pattern matches pure numbers as well, right now we do a hack to check in renderSha1CurrentPattern // by converting string to a number. - Sha1CurrentPattern = regexp.MustCompile(`\b[0-9a-f]{40}\b`) + Sha1CurrentPattern = regexp.MustCompile(`(?:^|\s|\()[0-9a-f]{40}\b`) + + // ShortLinkPattern matches short but difficult to parse [[name|link|arg=test]] syntax + ShortLinkPattern = regexp.MustCompile(`(\[\[.*\]\]\w*)`) + + // AnySHA1Pattern allows to split url containing SHA into parts + AnySHA1Pattern = regexp.MustCompile(`http\S+//(\S+)/(\S+)/(\S+)/(\S+)/([0-9a-f]{40})(?:/?([^#\s]+)?(?:#(\S+))?)?`) + + // IssueFullPattern allows to split issue (and pull) URLs into parts + IssueFullPattern = regexp.MustCompile(`(?:^|\s|\()http\S+//((?:[^\s/]+/)+)((?:\w{1,10}-)?[1-9][0-9]*)([\?|#]\S+.(\S+)?)?\b`) + + validLinksPattern = regexp.MustCompile(`^[a-z][\w-]+://`) ) +// isLink reports whether link fits valid format. +func isLink(link []byte) bool { + return validLinksPattern.Match(link) +} + // FindAllMentions matches mention patterns in given content // and returns a list of found user names without @ prefix. func FindAllMentions(content string) []string { @@ -114,79 +118,67 @@ func FindAllMentions(content string) []string { // Renderer is a extended version of underlying render object. type Renderer struct { blackfriday.Renderer - urlPrefix string + urlPrefix string + isWikiMarkdown bool } // Link defines how formal links should be processed to produce corresponding HTML elements. func (r *Renderer) Link(out *bytes.Buffer, link []byte, title []byte, content []byte) { if len(link) > 0 && !isLink(link) { if link[0] != '#' { - link = []byte(path.Join(r.urlPrefix, string(link))) + mLink := URLJoin(r.urlPrefix, string(link)) + if r.isWikiMarkdown { + mLink = URLJoin(r.urlPrefix, "wiki", string(link)) + } + link = []byte(mLink) } } r.Renderer.Link(out, link, title, content) } -// AutoLink defines how auto-detected links should be processed to produce corresponding HTML elements. -// Reference for kind: https://github.com/russross/blackfriday/blob/master/markdown.go#L69-L76 -func (r *Renderer) AutoLink(out *bytes.Buffer, link []byte, kind int) { - if kind != blackfriday.LINK_TYPE_NORMAL { - r.Renderer.AutoLink(out, link, kind) +// List renders markdown bullet or digit lists to HTML +func (r *Renderer) List(out *bytes.Buffer, text func() bool, flags int) { + marker := out.Len() + if out.Len() > 0 { + out.WriteByte('\n') + } + + if flags&blackfriday.LIST_TYPE_DEFINITION != 0 { + out.WriteString("
%s
`, m, base.ShortSha(string(m[i+7:j]))))
- return
- }
-
- m = IssueFullPattern.Find(link)
- if m != nil {
- m = bytes.TrimSpace(m)
- i := strings.Index(string(m), "issues/")
- j := strings.Index(string(m), "#")
- if j == -1 {
- j = len(m)
- }
-
- issue := string(m[i+7 : j])
- fullRepoURL := setting.AppURL + strings.TrimPrefix(r.urlPrefix, "/")
- var link string
- if strings.HasPrefix(string(m), fullRepoURL) {
- // Use a short issue reference if the URL refers to this repository
- link = fmt.Sprintf(`#%s`, m, issue)
- } else {
- // Use a cross-repository issue reference if the URL refers to a different repository
- repo := string(m[len(setting.AppURL) : i-1])
- link = fmt.Sprintf(`%s#%s`, m, repo, issue)
- }
- out.WriteString(link)
- return
- }
+ if flags&blackfriday.LIST_TYPE_DEFINITION != 0 {
+ out.WriteString("")) { + prefix = "
" + } switch { - case bytes.HasPrefix(text, []byte("[ ] ")): - text = append([]byte(``), text[3:]...) - case bytes.HasPrefix(text, []byte("[x] ")): - text = append([]byte(``), text[3:]...) + case bytes.HasPrefix(text, []byte(prefix+"[ ] ")): + text = append([]byte(`
`), text[3+len(prefix):]...) + case bytes.HasPrefix(text, []byte(prefix+"[x] ")): + text = append([]byte(``), text[3+len(prefix):]...) + } + if prefix != "" { + text = bytes.Replace(text, []byte(""), []byte{}, 1) } r.Renderer.ListItem(out, text, flags) } @@ -196,15 +188,15 @@ func (r *Renderer) ListItem(out *bytes.Buffer, text []byte, flags int) { var ( svgSuffix = []byte(".svg") svgSuffixWithMark = []byte(".svg?") - spaceBytes = []byte(" ") - spaceEncodedBytes = []byte("%20") - space = " " - spaceEncoded = "%20" ) // Image defines how images should be processed to produce corresponding HTML elements. func (r *Renderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) { - prefix := strings.Replace(r.urlPrefix, "/src/", "/raw/", 1) + prefix := r.urlPrefix + if r.isWikiMarkdown { + prefix = URLJoin(prefix, "wiki", "src") + } + prefix = strings.Replace(prefix, "/src/", "/raw/", 1) if len(link) > 0 { if isLink(link) { // External link with .svg suffix usually means CI status. @@ -215,10 +207,11 @@ func (r *Renderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byt } } else { if link[0] != '/' { - prefix += "/" + if !strings.HasSuffix(prefix, "/") { + prefix += "/" + } } - link = bytes.Replace([]byte((prefix + string(link))), spaceBytes, spaceEncodedBytes, -1) - fmt.Println(333, string(link)) + link = []byte(url.QueryEscape(prefix + string(link))) } } @@ -247,6 +240,19 @@ func cutoutVerbosePrefix(prefix string) string { return prefix } +// URLJoin joins url components, like path.Join, but preserving contents +func URLJoin(elem ...string) string { + res := "" + last := len(elem) - 1 + for i, item := range elem { + res += item + if !strings.HasSuffix(res, "/") && i != last { + res += "/" + } + } + return res +} + // RenderIssueIndexPattern renders issue indexes to corresponding links. func RenderIssueIndexPattern(rawBytes []byte, urlPrefix string, metas map[string]string) []byte { urlPrefix = cutoutVerbosePrefix(urlPrefix) @@ -263,7 +269,7 @@ func RenderIssueIndexPattern(rawBytes []byte, urlPrefix string, metas map[string } var link string if metas == nil { - link = fmt.Sprintf(`%s`, urlPrefix, m[1:], m) + link = fmt.Sprintf(`%s`, URLJoin(urlPrefix, "issues", string(m[1:])), m) } else { // Support for external issue tracker if metas["style"] == IssueNameStyleAlphanumeric { @@ -278,6 +284,238 @@ func RenderIssueIndexPattern(rawBytes []byte, urlPrefix string, metas map[string return rawBytes } +// IsSameDomain checks if given url string has the same hostname as current Gitea instance +func IsSameDomain(s string) bool { + if uapp, err := url.Parse(setting.AppURL); err == nil { + if u, err := url.Parse(s); err == nil { + return u.Host == uapp.Host + } + return false + } + return false +} + +// renderFullSha1Pattern renders SHA containing URLs +func renderFullSha1Pattern(rawBytes []byte, urlPrefix string) []byte { + ms := AnySHA1Pattern.FindAllSubmatch(rawBytes, -1) + for _, m := range ms { + all := m[0] + paths := string(m[1]) + var path = "//" + paths + author := string(m[2]) + repoName := string(m[3]) + path = URLJoin(path, author, repoName) + ltype := "src" + itemType := m[4] + if IsSameDomain(paths) { + ltype = string(itemType) + } else if string(itemType) == "commit" { + ltype = "commit" + } + sha := m[5] + var subtree string + if len(m) > 6 && len(m[6]) > 0 { + subtree = string(m[6]) + } + var line []byte + if len(m) > 7 && len(m[7]) > 0 { + line = m[7] + } + urlSuffix := "" + text := base.ShortSha(string(sha)) + if subtree != "" { + urlSuffix = "/" + subtree + text += urlSuffix + } + if line != nil { + value := string(line) + urlSuffix += "#" + urlSuffix += value + text += " (" + text += value + text += ")" + } + rawBytes = bytes.Replace(rawBytes, all, []byte(fmt.Sprintf( + `%s`, URLJoin(path, ltype, string(sha))+urlSuffix, text)), -1) + } + return rawBytes +} + +// renderFullIssuePattern renders issues-like URLs +func renderFullIssuePattern(rawBytes []byte, urlPrefix string) []byte { + ms := IssueFullPattern.FindAllSubmatch(rawBytes, -1) + for _, m := range ms { + all := m[0] + paths := bytes.Split(m[1], []byte("/")) + paths = paths[:len(paths)-1] + if bytes.HasPrefix(paths[0], []byte("gist.")) { + continue + } + var path string + if len(paths) > 3 { + // Internal one + path = URLJoin(urlPrefix, "issues") + } else { + path = "//" + string(m[1]) + } + id := string(m[2]) + path = URLJoin(path, id) + var comment []byte + if len(m) > 3 { + comment = m[3] + } + urlSuffix := "" + text := "#" + id + if comment != nil { + urlSuffix += string(comment) + text += " " + } + rawBytes = bytes.Replace(rawBytes, all, []byte(fmt.Sprintf( + `%s`, path, urlSuffix, text)), -1) + } + return rawBytes +} + +func firstIndexOfByte(sl []byte, target byte) int { + for i := 0; i < len(sl); i++ { + if sl[i] == target { + return i + } + } + return -1 +} + +func lastIndexOfByte(sl []byte, target byte) int { + for i := len(sl) - 1; i >= 0; i-- { + if sl[i] == target { + return i + } + } + return -1 +} + +// renderShortLinks processes [[syntax]] +func renderShortLinks(rawBytes []byte, urlPrefix string, noLink bool) []byte { + ms := ShortLinkPattern.FindAll(rawBytes, -1) + for _, m := range ms { + orig := bytes.TrimSpace(m) + m = orig[2:] + tailPos := lastIndexOfByte(m, ']') + 1 + tail := []byte{} + if tailPos < len(m) { + tail = m[tailPos:] + m = m[:tailPos-1] + } + m = m[:len(m)-2] + props := map[string]string{} + + // MediaWiki uses [[link|text]], while GitHub uses [[text|link]] + // It makes page handling terrible, but we prefer GitHub syntax + // And fall back to MediaWiki only when it is obvious from the look + // Of text and link contents + sl := bytes.Split(m, []byte("|")) + for _, v := range sl { + switch bytes.Count(v, []byte("=")) { + + // Piped args without = sign, these are mandatory arguments + case 0: + { + sv := string(v) + if props["name"] == "" { + if isLink(v) { + // If we clearly see it is a link, we save it so + + // But first we need to ensure, that if both mandatory args provided + // look like links, we stick to GitHub syntax + if props["link"] != "" { + props["name"] = props["link"] + } + + props["link"] = strings.TrimSpace(sv) + } else { + props["name"] = sv + } + } else { + props["link"] = strings.TrimSpace(sv) + } + } + + // Piped args with = sign, these are optional arguments + case 1: + { + sep := firstIndexOfByte(v, '=') + key, val := string(v[:sep]), html.UnescapeString(string(v[sep+1:])) + lastCharIndex := len(val) - 1 + if (val[0] == '"' || val[0] == '\'') && (val[lastCharIndex] == '"' || val[lastCharIndex] == '\'') { + val = val[1:lastCharIndex] + } + props[key] = val + } + } + } + + var name string + var link string + if props["link"] != "" { + link = props["link"] + } else if props["name"] != "" { + link = props["name"] + } + if props["title"] != "" { + name = props["title"] + } else if props["name"] != "" { + name = props["name"] + } else { + name = link + } + + name += string(tail) + image := false + ext := filepath.Ext(string(link)) + if ext != "" { + switch ext { + case ".jpg", ".jpeg", ".png", ".tif", ".tiff", ".webp", ".gif", ".bmp", ".ico", ".svg": + { + image = true + } + } + } + absoluteLink := isLink([]byte(link)) + if !absoluteLink { + link = url.QueryEscape(link) + } + if image { + if !absoluteLink { + link = URLJoin(urlPrefix, "wiki", "raw", link) + } + title := props["title"] + if title == "" { + title = props["alt"] + } + if title == "" { + title = path.Base(string(name)) + } + alt := props["alt"] + if alt == "" { + alt = name + } + if alt != "" { + alt = `alt="` + alt + `"` + } + name = fmt.Sprintf(``, link, alt, title) + } else if !absoluteLink { + link = URLJoin(urlPrefix, "wiki", link) + } + if noLink { + rawBytes = bytes.Replace(rawBytes, orig, []byte(name), -1) + } else { + rawBytes = bytes.Replace(rawBytes, orig, + []byte(fmt.Sprintf(`%s`, link, name)), -1) + } + } + return rawBytes +} + // RenderCrossReferenceIssueIndexPattern renders issue indexes from other repositories to corresponding links. func RenderCrossReferenceIssueIndexPattern(rawBytes []byte, urlPrefix string, metas map[string]string) []byte { ms := CrossReferenceIssueNumericPattern.FindAll(rawBytes, -1) @@ -289,20 +527,24 @@ func RenderCrossReferenceIssueIndexPattern(rawBytes []byte, urlPrefix string, me repo := string(bytes.Split(m, []byte("#"))[0]) issue := string(bytes.Split(m, []byte("#"))[1]) - link := fmt.Sprintf(`%s`, setting.AppURL, repo, issue, m) + link := fmt.Sprintf(`%s`, URLJoin(urlPrefix, repo, "issues", issue), m) rawBytes = bytes.Replace(rawBytes, m, []byte(link), 1) } return rawBytes } -// RenderSha1CurrentPattern renders SHA1 strings to corresponding links that assumes in the same repository. -func RenderSha1CurrentPattern(rawBytes []byte, urlPrefix string) []byte { - return []byte(Sha1CurrentPattern.ReplaceAllStringFunc(string(rawBytes[:]), func(m string) string { - if com.StrTo(m).MustInt() > 0 { - return m +// renderSha1CurrentPattern renders SHA1 strings to corresponding links that assumes in the same repository. +func renderSha1CurrentPattern(rawBytes []byte, urlPrefix string) []byte { + ms := Sha1CurrentPattern.FindAllSubmatch(rawBytes, -1) + for _, m := range ms { + all := m[0] + if com.StrTo(all).MustInt() > 0 { + continue } - return fmt.Sprintf(`%s
`, urlPrefix, m, base.ShortSha(m))
- }))
+ rawBytes = bytes.Replace(rawBytes, all, []byte(fmt.Sprintf(
+ `%s`, URLJoin(urlPrefix, "commit", string(all)), base.ShortSha(string(all)))), -1)
+ }
+ return rawBytes
}
// RenderSpecialLink renders mentions, indexes and SHA1 strings to corresponding links.
@@ -311,23 +553,27 @@ func RenderSpecialLink(rawBytes []byte, urlPrefix string, metas map[string]strin
for _, m := range ms {
m = m[bytes.Index(m, []byte("@")):]
rawBytes = bytes.Replace(rawBytes, m,
- []byte(fmt.Sprintf(`%s`, setting.AppSubURL, m[1:], m)), -1)
+ []byte(fmt.Sprintf(`%s`, URLJoin(setting.AppURL, string(m[1:])), m)), -1)
}
+ rawBytes = renderShortLinks(rawBytes, urlPrefix, false)
rawBytes = RenderIssueIndexPattern(rawBytes, urlPrefix, metas)
rawBytes = RenderCrossReferenceIssueIndexPattern(rawBytes, urlPrefix, metas)
- rawBytes = RenderSha1CurrentPattern(rawBytes, urlPrefix)
+ rawBytes = renderFullSha1Pattern(rawBytes, urlPrefix)
+ rawBytes = renderSha1CurrentPattern(rawBytes, urlPrefix)
+ rawBytes = renderFullIssuePattern(rawBytes, urlPrefix)
return rawBytes
}
// RenderRaw renders Markdown to HTML without handling special links.
-func RenderRaw(body []byte, urlPrefix string) []byte {
+func RenderRaw(body []byte, urlPrefix string, wikiMarkdown bool) []byte {
htmlFlags := 0
htmlFlags |= blackfriday.HTML_SKIP_STYLE
htmlFlags |= blackfriday.HTML_OMIT_CONTENTS
renderer := &Renderer{
- Renderer: blackfriday.HtmlRenderer(htmlFlags, "", ""),
- urlPrefix: urlPrefix,
+ Renderer: blackfriday.HtmlRenderer(htmlFlags, "", ""),
+ urlPrefix: urlPrefix,
+ isWikiMarkdown: wikiMarkdown,
}
// set up the parser
@@ -335,9 +581,7 @@ func RenderRaw(body []byte, urlPrefix string) []byte {
extensions |= blackfriday.EXTENSION_NO_INTRA_EMPHASIS
extensions |= blackfriday.EXTENSION_TABLES
extensions |= blackfriday.EXTENSION_FENCED_CODE
- extensions |= blackfriday.EXTENSION_AUTOLINK
extensions |= blackfriday.EXTENSION_STRIKETHROUGH
- extensions |= blackfriday.EXTENSION_SPACE_HEADERS
extensions |= blackfriday.EXTENSION_NO_EMPTY_LINE_BEFORE_BLOCK
if setting.Markdown.EnableHardLineBreak {
@@ -379,10 +623,12 @@ OUTER_LOOP:
token = tokenizer.Token()
// Copy the token to the output verbatim
- buf.WriteString(token.String())
+ buf.Write(renderShortLinks([]byte(token.String()), urlPrefix, true))
if token.Type == html.StartTagToken {
- stackNum++
+ if !com.IsSliceContainsStr(noEndTags, token.Data) {
+ stackNum++
+ }
}
// If this is the close tag to the outer-most, we are done
@@ -425,16 +671,26 @@ OUTER_LOOP:
return rawHTML
}
-// Render renders Markdown to HTML with special links.
-func Render(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
- urlPrefix = strings.Replace(urlPrefix, space, spaceEncoded, -1)
- result := RenderRaw(rawBytes, urlPrefix)
+// Render renders Markdown to HTML with all specific handling stuff.
+func render(rawBytes []byte, urlPrefix string, metas map[string]string, isWikiMarkdown bool) []byte {
+ urlPrefix = strings.Replace(urlPrefix, " ", "%20", -1)
+ result := RenderRaw(rawBytes, urlPrefix, isWikiMarkdown)
result = PostProcess(result, urlPrefix, metas)
result = Sanitizer.SanitizeBytes(result)
return result
}
+// Render renders Markdown to HTML with all specific handling stuff.
+func Render(rawBytes []byte, urlPrefix string, metas map[string]string) []byte {
+ return render(rawBytes, urlPrefix, metas, false)
+}
+
// RenderString renders Markdown to HTML with special links and returns string type.
func RenderString(raw, urlPrefix string, metas map[string]string) string {
- return string(Render([]byte(raw), urlPrefix, metas))
+ return string(render([]byte(raw), urlPrefix, metas, false))
+}
+
+// RenderWiki renders markdown wiki page to HTML and return HTML string
+func RenderWiki(rawBytes []byte, urlPrefix string, metas map[string]string) string {
+ return string(render(rawBytes, urlPrefix, metas, true))
}
diff --git a/modules/markdown/markdown_test.go b/modules/markdown/markdown_test.go
index 88b48548eb..60dd0afde2 100644
--- a/modules/markdown/markdown_test.go
+++ b/modules/markdown/markdown_test.go
@@ -1,21 +1,20 @@
package markdown_test
import (
- "bytes"
"fmt"
- "net/url"
- "path"
"strconv"
"testing"
+ "strings"
+
. "code.gitea.io/gitea/modules/markdown"
"code.gitea.io/gitea/modules/setting"
-
- "github.com/russross/blackfriday"
"github.com/stretchr/testify/assert"
)
-const urlPrefix = "/prefix"
+const AppURL = "http://localhost:3000/"
+const Repo = "gogits/gogs"
+const AppSubURL = AppURL + Repo + "/"
var numericMetas = map[string]string{
"format": "https://someurl.com/{user}/{repo}/{index}",
@@ -33,16 +32,12 @@ var alphanumericMetas = map[string]string{
// numericLink an HTML to a numeric-style issue
func numericIssueLink(baseURL string, index int) string {
- u, _ := url.Parse(baseURL)
- u.Path = path.Join(u.Path, strconv.Itoa(index))
- return link(u.String(), fmt.Sprintf("#%d", index))
+ return link(URLJoin(baseURL, strconv.Itoa(index)), fmt.Sprintf("#%d", index))
}
// alphanumLink an HTML link to an alphanumeric-style issue
func alphanumIssueLink(baseURL string, name string) string {
- u, _ := url.Parse(baseURL)
- u.Path = path.Join(u.Path, name)
- return link(u.String(), name)
+ return link(URLJoin(baseURL, name), name)
}
// urlContentsLink an HTML link whose contents is the target URL
@@ -57,7 +52,7 @@ func link(href, contents string) string {
func testRenderIssueIndexPattern(t *testing.T, input, expected string, metas map[string]string) {
assert.Equal(t, expected,
- string(RenderIssueIndexPattern([]byte(input), urlPrefix, metas)))
+ string(RenderIssueIndexPattern([]byte(input), AppSubURL, metas)))
}
func TestRenderIssueIndexPattern(t *testing.T) {
@@ -88,11 +83,14 @@ func TestRenderIssueIndexPattern(t *testing.T) {
}
func TestRenderIssueIndexPattern2(t *testing.T) {
+ setting.AppURL = AppURL
+ setting.AppSubURL = AppSubURL
+
// numeric: render inputs with valid mentions
test := func(s, expectedFmt string, indices ...int) {
links := make([]interface{}, len(indices))
for i, index := range indices {
- links[i] = numericIssueLink(path.Join(urlPrefix, "issues"), index)
+ links[i] = numericIssueLink(URLJoin(setting.AppSubURL, "issues"), index)
}
expectedNil := fmt.Sprintf(expectedFmt, links...)
testRenderIssueIndexPattern(t, s, expectedNil, nil)
@@ -122,6 +120,9 @@ func TestRenderIssueIndexPattern2(t *testing.T) {
}
func TestRenderIssueIndexPattern3(t *testing.T) {
+ setting.AppURL = AppURL
+ setting.AppSubURL = AppSubURL
+
// alphanumeric: render inputs without valid mentions
test := func(s string) {
testRenderIssueIndexPattern(t, s, s, alphanumericMetas)
@@ -146,6 +147,9 @@ func TestRenderIssueIndexPattern3(t *testing.T) {
}
func TestRenderIssueIndexPattern4(t *testing.T) {
+ setting.AppURL = AppURL
+ setting.AppSubURL = AppSubURL
+
// alphanumeric: render inputs with valid mentions
test := func(s, expectedFmt string, names ...string) {
links := make([]interface{}, len(names))
@@ -161,36 +165,509 @@ func TestRenderIssueIndexPattern4(t *testing.T) {
}
func TestRenderer_AutoLink(t *testing.T) {
- setting.AppURL = "http://localhost:3000/"
- htmlFlags := blackfriday.HTML_SKIP_STYLE | blackfriday.HTML_OMIT_CONTENTS
- renderer := &Renderer{
- Renderer: blackfriday.HtmlRenderer(htmlFlags, "", ""),
- }
+ setting.AppURL = AppURL
+ setting.AppSubURL = AppSubURL
+
+ SubURLNoProtocol := setting.AppSubURL[5:]
+
test := func(input, expected string) {
- buffer := new(bytes.Buffer)
- renderer.AutoLink(buffer, []byte(input), blackfriday.LINK_TYPE_NORMAL)
- assert.Equal(t, expected, buffer.String())
+ buffer := RenderSpecialLink([]byte(input), setting.AppSubURL, map[string]string{})
+ assert.Equal(t, expected, string(buffer))
}
// render valid issue URLs
- test("http://localhost:3000/user/repo/issues/3333",
- numericIssueLink("http://localhost:3000/user/repo/issues/", 3333))
+ test(URLJoin(setting.AppSubURL, "issues", "3333"),
+ numericIssueLink(URLJoin(setting.AppSubURL, "issues"), 3333))
- // render, but not change, invalid issue URLs
- test("http://1111/2222/ssss-issues/3333?param=blah&blahh=333",
- urlContentsLink("http://1111/2222/ssss-issues/3333?param=blah&blahh=333"))
- test("http://test.com/issues/33333", urlContentsLink("http://test.com/issues/33333"))
- test("https://issues/333", urlContentsLink("https://issues/333"))
+ // render external issue URLs
+ tmp := "//1111/2222/ssss-issues/3333?param=blah&blahh=333"
+ test("http:"+tmp,
+ "#3333 ")
+ test("http://test.com/issues/33333", numericIssueLink("//test.com/issues", 33333))
+ test("https://issues/333", numericIssueLink("//issues", 333))
// render valid commit URLs
- test("http://localhost:3000/user/project/commit/d8a994ef243349f321568f9e36d5c3f444b99cae",
- " d8a994ef24
")
- test("http://localhost:3000/user/project/commit/d8a994ef243349f321568f9e36d5c3f444b99cae#diff-2",
- " d8a994ef24
")
+ tmp = URLJoin(SubURLNoProtocol, "commit", "d8a994ef243349f321568f9e36d5c3f444b99cae")
+ test("http://"+tmp, "d8a994ef24")
+ tmp += "#diff-2"
+ test("http://"+tmp, "d8a994ef24 (diff-2)")
// render other commit URLs
- test("https://external-link.gogs.io/gogs/gogs/commit/d8a994ef243349f321568f9e36d5c3f444b99cae#diff-2",
- urlContentsLink("https://external-link.gogs.io/gogs/gogs/commit/d8a994ef243349f321568f9e36d5c3f444b99cae#diff-2"))
- test("https://commit/d8a994ef243349f321568f9e36d5c3f444b99cae",
- urlContentsLink("https://commit/d8a994ef243349f321568f9e36d5c3f444b99cae"))
+ tmp = "//external-link.gogs.io/gogs/gogs/commit/d8a994ef243349f321568f9e36d5c3f444b99cae#diff-2"
+ test("https:"+tmp, "d8a994ef24 (diff-2)")
+}
+
+func TestRender_ShortLinks(t *testing.T) {
+ setting.AppURL = AppURL
+ setting.AppSubURL = AppSubURL
+
+ test := func(input, expected string) {
+ buffer := RenderString(input, setting.AppSubURL, nil)
+ assert.Equal(t, expected, string(buffer))
+ }
+
+ var url = URLJoin(AppSubURL, "wiki", "Link")
+ var imgurl = URLJoin(AppSubURL, "wiki", "raw", "Link.jpg")
+ var favicon = "http://google.com/favicon.ico"
+
+ test("[[Link]]", `
+`)
+ test("[[Link.jpg]]", `
+`)
+ test("[["+favicon+"]]", `
+`)
+ test("[[Name|Link]]", `
+`)
+ test("[[Name|Link.jpg]]", `
+`)
+ test("[[Name|Link.jpg|alt=AltName]]", `
+`)
+ test("[[Name|Link.jpg|title=Title]]", `
+`)
+ test("[[Name|Link.jpg|alt=AltName|title=Title]]", `
+`)
+ test("[[Name|Link.jpg|alt=\"AltName\"|title='Title']]", `
+`)
+}
+
+func TestRender_Commits(t *testing.T) {
+ setting.AppURL = AppURL
+ setting.AppSubURL = AppSubURL
+
+ test := func(input, expected string) {
+ buffer := RenderString(input, setting.AppSubURL, nil)
+ assert.Equal(t, expected, string(buffer))
+ }
+
+ var sha = "b6dd6210eaebc915fd5be5579c58cce4da2e2579"
+ var commit = URLJoin(AppSubURL, "commit", sha)
+ var subtree = URLJoin(commit, "src")
+ var tree = strings.Replace(subtree, "/commit/", "/tree/", -1)
+ var src = strings.Replace(subtree, "/commit/", "/src/", -1)
+
+ test(sha, `
+`)
+ test(commit, `
+`)
+ test(tree, `
+`)
+}
+
+func TestRegExp_MentionPattern(t *testing.T) {
+ trueTestCases := []string{
+ "@Unknwon",
+ "@ANT_123",
+ "@xxx-DiN0-z-A..uru..s-xxx",
+ " @lol ",
+ " @Te/st",
+ }
+ falseTestCases := []string{
+ "@ 0",
+ "@ ",
+ "@",
+ "",
+ "ABC",
+ }
+
+ for _, testCase := range trueTestCases {
+ res := MentionPattern.MatchString(testCase)
+ if !res {
+ println()
+ println(testCase)
+ }
+ assert.True(t, res)
+ }
+ for _, testCase := range falseTestCases {
+ res := MentionPattern.MatchString(testCase)
+ if res {
+ println()
+ println(testCase)
+ }
+ assert.False(t, res)
+ }
+}
+
+func TestRegExp_IssueNumericPattern(t *testing.T) {
+ trueTestCases := []string{
+ "#1234",
+ "#0",
+ "#1234567890987654321",
+ }
+ falseTestCases := []string{
+ "# 1234",
+ "# 0",
+ "# ",
+ "#",
+ "#ABC",
+ "#1A2B",
+ "",
+ "ABC",
+ }
+
+ for _, testCase := range trueTestCases {
+ assert.True(t, IssueNumericPattern.MatchString(testCase))
+ }
+ for _, testCase := range falseTestCases {
+ assert.False(t, IssueNumericPattern.MatchString(testCase))
+ }
+}
+
+func TestRegExp_IssueAlphanumericPattern(t *testing.T) {
+ trueTestCases := []string{
+ "ABC-1234",
+ "A-1",
+ "RC-80",
+ "ABCDEFGHIJ-1234567890987654321234567890",
+ }
+ falseTestCases := []string{
+ "RC-08",
+ "PR-0",
+ "ABCDEFGHIJK-1",
+ "PR_1",
+ "",
+ "#ABC",
+ "",
+ "ABC",
+ "GG-",
+ "rm-1",
+ }
+
+ for _, testCase := range trueTestCases {
+ assert.True(t, IssueAlphanumericPattern.MatchString(testCase))
+ }
+ for _, testCase := range falseTestCases {
+ assert.False(t, IssueAlphanumericPattern.MatchString(testCase))
+ }
+}
+
+func TestRegExp_Sha1CurrentPattern(t *testing.T) {
+ trueTestCases := []string{
+ "d8a994ef243349f321568f9e36d5c3f444b99cae",
+ "abcdefabcdefabcdefabcdefabcdefabcdefabcd",
+ }
+ falseTestCases := []string{
+ "test",
+ "abcdefg",
+ "abcdefghijklmnopqrstuvwxyzabcdefghijklmn",
+ "abcdefghijklmnopqrstuvwxyzabcdefghijklmO",
+ }
+
+ for _, testCase := range trueTestCases {
+ assert.True(t, Sha1CurrentPattern.MatchString(testCase))
+ }
+ for _, testCase := range falseTestCases {
+ assert.False(t, Sha1CurrentPattern.MatchString(testCase))
+ }
+}
+
+func TestRegExp_ShortLinkPattern(t *testing.T) {
+ trueTestCases := []string{
+ "[[stuff]]",
+ "[[]]",
+ "[[stuff|title=Difficult name with spaces*!]]",
+ }
+ falseTestCases := []string{
+ "test",
+ "abcdefg",
+ "[[]",
+ "[[",
+ "[]",
+ "]]",
+ "abcdefghijklmnopqrstuvwxyz",
+ }
+
+ for _, testCase := range trueTestCases {
+ assert.True(t, ShortLinkPattern.MatchString(testCase))
+ }
+ for _, testCase := range falseTestCases {
+ assert.False(t, ShortLinkPattern.MatchString(testCase))
+ }
+}
+
+func TestRegExp_AnySHA1Pattern(t *testing.T) {
+ testCases := map[string][]string{
+ "https://github.com/jquery/jquery/blob/a644101ed04d0beacea864ce805e0c4f86ba1cd1/test/unit/event.js#L2703": []string{
+ "github.com",
+ "jquery",
+ "jquery",
+ "blob",
+ "a644101ed04d0beacea864ce805e0c4f86ba1cd1",
+ "test/unit/event.js",
+ "L2703",
+ },
+ "https://github.com/jquery/jquery/blob/a644101ed04d0beacea864ce805e0c4f86ba1cd1/test/unit/event.js": []string{
+ "github.com",
+ "jquery",
+ "jquery",
+ "blob",
+ "a644101ed04d0beacea864ce805e0c4f86ba1cd1",
+ "test/unit/event.js",
+ "",
+ },
+ "https://github.com/jquery/jquery/commit/0705be475092aede1eddae01319ec931fb9c65fc": []string{
+ "github.com",
+ "jquery",
+ "jquery",
+ "commit",
+ "0705be475092aede1eddae01319ec931fb9c65fc",
+ "",
+ "",
+ },
+ "https://github.com/jquery/jquery/tree/0705be475092aede1eddae01319ec931fb9c65fc/src": []string{
+ "github.com",
+ "jquery",
+ "jquery",
+ "tree",
+ "0705be475092aede1eddae01319ec931fb9c65fc",
+ "src",
+ "",
+ },
+ "https://try.gogs.io/gogs/gogs/commit/d8a994ef243349f321568f9e36d5c3f444b99cae#diff-2": []string{
+ "try.gogs.io",
+ "gogs",
+ "gogs",
+ "commit",
+ "d8a994ef243349f321568f9e36d5c3f444b99cae",
+ "",
+ "diff-2",
+ },
+ }
+
+ for k, v := range testCases {
+ assert.Equal(t, AnySHA1Pattern.FindStringSubmatch(k)[1:], v)
+ }
+}
+
+func TestRegExp_IssueFullPattern(t *testing.T) {
+ testCases := map[string][]string{
+ "https://github.com/gogits/gogs/pull/3244": []string{
+ "github.com/gogits/gogs/pull/",
+ "3244",
+ "",
+ "",
+ },
+ "https://github.com/gogits/gogs/issues/3247#issuecomment-231517079": []string{
+ "github.com/gogits/gogs/issues/",
+ "3247",
+ "#issuecomment-231517079",
+ "",
+ },
+ "https://try.gogs.io/gogs/gogs/issues/4#issue-685": []string{
+ "try.gogs.io/gogs/gogs/issues/",
+ "4",
+ "#issue-685",
+ "",
+ },
+ "https://youtrack.jetbrains.com/issue/JT-36485": []string{
+ "youtrack.jetbrains.com/issue/",
+ "JT-36485",
+ "",
+ "",
+ },
+ "https://youtrack.jetbrains.com/issue/JT-36485#comment=27-1508676": []string{
+ "youtrack.jetbrains.com/issue/",
+ "JT-36485",
+ "#comment=27-1508676",
+ "",
+ },
+ }
+
+ for k, v := range testCases {
+ assert.Equal(t, IssueFullPattern.FindStringSubmatch(k)[1:], v)
+ }
+}
+
+func TestMisc_IsMarkdownFile(t *testing.T) {
+ setting.Markdown.FileExtensions = []string{".md", ".markdown", ".mdown", ".mkd"}
+ trueTestCases := []string{
+ "test.md",
+ "wow.MARKDOWN",
+ "LOL.mDoWn",
+ }
+ falseTestCases := []string{
+ "test",
+ "abcdefg",
+ "abcdefghijklmnopqrstuvwxyz",
+ "test.md.test",
+ }
+
+ for _, testCase := range trueTestCases {
+ assert.True(t, IsMarkdownFile(testCase))
+ }
+ for _, testCase := range falseTestCases {
+ assert.False(t, IsMarkdownFile(testCase))
+ }
+}
+
+func TestMisc_IsReadmeFile(t *testing.T) {
+ trueTestCases := []string{
+ "readme",
+ "README",
+ "readME.mdown",
+ "README.md",
+ }
+ falseTestCases := []string{
+ "test.md",
+ "wow.MARKDOWN",
+ "LOL.mDoWn",
+ "test",
+ "abcdefg",
+ "abcdefghijklmnopqrstuvwxyz",
+ "test.md.test",
+ }
+
+ for _, testCase := range trueTestCases {
+ assert.True(t, IsReadmeFile(testCase))
+ }
+ for _, testCase := range falseTestCases {
+ assert.False(t, IsReadmeFile(testCase))
+ }
+}
+
+func TestMisc_IsSameDomain(t *testing.T) {
+ setting.AppURL = AppURL
+ setting.AppSubURL = AppSubURL
+
+ var sha = "b6dd6210eaebc915fd5be5579c58cce4da2e2579"
+ var commit = URLJoin(AppSubURL, "commit", sha)
+
+ assert.True(t, IsSameDomain(commit))
+ assert.False(t, IsSameDomain("http://google.com/ncr"))
+ assert.False(t, IsSameDomain("favicon.ico"))
+}
+
+// Test cases without ambiguous links
+var sameCases = []string{
+ // dear imgui wiki markdown extract: special wiki syntax
+ `Wiki! Enjoy :)
+- [[Links, Language bindings, Engine bindings|Links]]
+- [[Tips]]
+
+Ideas and codes
+
+- Bezier widget (by @r-lyeh) https://github.com/ocornut/imgui/issues/786
+- Node graph editors https://github.com/ocornut/imgui/issues/306
+- [[Memory Editor|memory_editor_example]]
+- [[Plot var helper|plot_var_example]]`,
+ // rendered
+ `Wiki! Enjoy :)
+ + + +Ideas and codes
+ +Wine Staging on website wine-staging.com.
+ +Here are some links to the most important topics. You can find the full list of pages at the sidebar.
+ ++ | Installation | +
---|---|
+ | Usage | +
+ | Configuration | +
+ | Bugs | +
Excelsior JET allows you to create native executables for Windows, Linux and Mac OS X.
+ +Guardfile-DSL / Configuring-Guard
+`, + // special syntax + `[[Name|Link]]`, + // rendered + ` +`, + } + + for i := 0; i < len(testCases); i += 2 { + line := RenderWiki([]byte(testCases[i]), AppSubURL, map[string]string{}) + assert.Equal(t, testCases[i+1], line) + } } diff --git a/public/css/index.css b/public/css/index.css index 1bb9e9fd40..85101faba9 100644 --- a/public/css/index.css +++ b/public/css/index.css @@ -602,7 +602,7 @@ footer .ui.language .menu { list-style-type: lower-roman; } .markdown:not(code) li > p { - margin-top: 16px; + margin-top: 0; } .markdown:not(code) dl { padding: 0; @@ -841,6 +841,11 @@ footer .ui.language .menu { background: #f8f8f8; border-top: 0; } +.markdown:not(code) .ui.list .list, +.markdown:not(code) ol.ui.list ol, +.markdown:not(code) ul.ui.list ul { + padding-left: 2em; +} .home { padding-bottom: 80px; } diff --git a/public/js/index.js b/public/js/index.js index e700da29f8..2e0e26f55c 100644 --- a/public/js/index.js +++ b/public/js/index.js @@ -613,6 +613,7 @@ function initWikiForm() { function (data) { preview.innerHTML = 'Wiki! Enjoy :)
+ +Wine Staging on website wine-staging.com.
+ +Here are some links to the most important topics. You can find the full list of pages at the sidebar.
+ + +`, + // Guard wiki sidebar: special syntax + `[[Guardfile-DSL / Configuring-Guard|Guardfile-DSL---Configuring-Guard]]`, + // rendered + `Guardfile-DSL / Configuring-Guard
+`, + // special syntax + `[[Name|Link]]`, + // rendered + ` +`, + // empty + ``, + // rendered + ``, + } + + for i := 0; i < len(testCases); i += 2 { + options.Text = testCases[i] + Markdown(ctx, options) + assert.Equal(t, testCases[i+1], resp.Body.String()) + resp.Body.Reset() + } +} + +var simpleCases = []string{ + // Guard wiki sidebar: special syntax + `[[Guardfile-DSL / Configuring-Guard|Guardfile-DSL---Configuring-Guard]]`, + // rendered + `[[Guardfile-DSL / Configuring-Guard|Guardfile-DSL---Configuring-Guard]]
+`, + // special syntax + `[[Name|Link]]`, + // rendered + `[[Name|Link]]
+`, + // empty + ``, + // rendered + ``, +} + +func TestAPI_RenderSimple(t *testing.T) { + setting.AppURL = AppURL + + options := api.MarkdownOption{ + Mode: "markdown", + Text: "", + Context: Repo, + } + requrl, _ := url.Parse(markdown.URLJoin(AppURL, "api", "v1", "markdown")) + req := &http.Request{ + Method: "POST", + URL: requrl, + } + m, resp := createContext(req) + ctx := wrap(m) + + for i := 0; i < len(simpleCases); i += 2 { + options.Text = simpleCases[i] + Markdown(ctx, options) + assert.Equal(t, simpleCases[i+1], resp.Body.String()) + resp.Body.Reset() + } +} + +func TestAPI_RenderRaw(t *testing.T) { + setting.AppURL = AppURL + + requrl, _ := url.Parse(markdown.URLJoin(AppURL, "api", "v1", "markdown")) + req := &http.Request{ + Method: "POST", + URL: requrl, + } + m, resp := createContext(req) + ctx := wrap(m) + + for i := 0; i < len(simpleCases); i += 2 { + ctx.Req.Request.Body = ioutil.NopCloser(strings.NewReader(simpleCases[i])) + MarkdownRaw(ctx) + assert.Equal(t, simpleCases[i+1], resp.Body.String()) + resp.Body.Reset() + } +} diff --git a/routers/repo/wiki.go b/routers/repo/wiki.go index 6e491f73a4..7da49f61a6 100644 --- a/routers/repo/wiki.go +++ b/routers/repo/wiki.go @@ -5,7 +5,10 @@ package repo import ( + "fmt" "io/ioutil" + "net/url" + "path/filepath" "strings" "time" @@ -47,16 +50,145 @@ type PageMeta struct { Updated time.Time } -func renderWikiPage(ctx *context.Context, isViewPage bool) (*git.Repository, string) { +func urlEncoded(str string) string { + u, err := url.Parse(str) + if err != nil { + return str + } + return u.String() +} +func urlDecoded(str string) string { + res, err := url.QueryUnescape(str) + if err != nil { + return str + } + return res +} + +// commitTreeBlobEntry processes found file and checks if it matches search target +func commitTreeBlobEntry(entry *git.TreeEntry, path string, targets []string, textOnly bool) *git.TreeEntry { + name := entry.Name() + ext := filepath.Ext(name) + if !textOnly || markdown.IsMarkdownFile(name) || ext == ".textile" { + for _, target := range targets { + if matchName(path, target) || matchName(urlEncoded(path), target) || matchName(urlDecoded(path), target) { + return entry + } + pathNoExt := strings.TrimSuffix(path, ext) + if matchName(pathNoExt, target) || matchName(urlEncoded(pathNoExt), target) || matchName(urlDecoded(pathNoExt), target) { + return entry + } + } + } + return nil +} + +// commitTreeDirEntry is a recursive file tree traversal function +func commitTreeDirEntry(repo *git.Repository, commit *git.Commit, entries []*git.TreeEntry, prevPath string, targets []string, textOnly bool) (*git.TreeEntry, error) { + for i := range entries { + entry := entries[i] + var path string + if len(prevPath) == 0 { + path = entry.Name() + } else { + path = prevPath + "/" + entry.Name() + } + if entry.Type == git.ObjectBlob { + // File + if res := commitTreeBlobEntry(entry, path, targets, textOnly); res != nil { + return res, nil + } + } else if entry.IsDir() { + // Directory + // Get our tree entry, handling all possible errors + var err error + var tree *git.Tree + if tree, err = repo.GetTree(entry.ID.String()); tree == nil || err != nil { + if err == nil { + err = fmt.Errorf("repo.GetTree(%s) => nil", entry.ID.String()) + } + return nil, err + } + // Found us, get children entries + var ls git.Entries + if ls, err = tree.ListEntries(); err != nil { + return nil, err + } + // Call itself recursively to find needed entry + var te *git.TreeEntry + if te, err = commitTreeDirEntry(repo, commit, ls, path, targets, textOnly); err != nil { + return nil, err + } + if te != nil { + return te, nil + } + } + } + return nil, nil +} + +// commitTreeEntry is a first step of commitTreeDirEntry, which should be never called directly +func commitTreeEntry(repo *git.Repository, commit *git.Commit, targets []string, textOnly bool) (*git.TreeEntry, error) { + entries, err := commit.ListEntries() + if err != nil { + return nil, err + } + return commitTreeDirEntry(repo, commit, entries, "", targets, textOnly) +} + +// findFile finds the best match for given filename in repo file tree +func findFile(repo *git.Repository, commit *git.Commit, target string, textOnly bool) (*git.TreeEntry, error) { + targets := []string{target, urlEncoded(target), urlDecoded(target)} + var entry *git.TreeEntry + var err error + if entry, err = commitTreeEntry(repo, commit, targets, textOnly); err != nil { + return nil, err + } + return entry, nil +} + +// matchName matches generic name representation of the file with required one +func matchName(target, name string) bool { + if len(target) != len(name) { + return false + } + name = strings.ToLower(name) + target = strings.ToLower(target) + if name == target { + return true + } + target = strings.Replace(target, " ", "?", -1) + target = strings.Replace(target, "-", "?", -1) + for i := range name { + ch := name[i] + reqCh := target[i] + if ch != reqCh { + if string(reqCh) != "?" { + return false + } + } + } + return true +} + +func findWikiRepoCommit(ctx *context.Context) (*git.Repository, *git.Commit, error) { wikiRepo, err := git.OpenRepository(ctx.Repo.Repository.WikiPath()) if err != nil { - ctx.Handle(500, "OpenRepository", err) - return nil, "" + // ctx.Handle(500, "OpenRepository", err) + return nil, nil, err } commit, err := wikiRepo.GetBranchCommit("master") if err != nil { ctx.Handle(500, "GetBranchCommit", err) - return nil, "" + return wikiRepo, nil, err + } + return wikiRepo, commit, nil +} + +func renderWikiPage(ctx *context.Context, isViewPage bool) (*git.Repository, *git.TreeEntry) { + wikiRepo, commit, err := findWikiRepoCommit(ctx) + if err != nil { + return nil, nil } // Get page list. @@ -64,16 +196,23 @@ func renderWikiPage(ctx *context.Context, isViewPage bool) (*git.Repository, str entries, err := commit.ListEntries() if err != nil { ctx.Handle(500, "ListEntries", err) - return nil, "" + return nil, nil } - pages := make([]PageMeta, 0, len(entries)) + pages := []PageMeta{} for i := range entries { - if entries[i].Type == git.ObjectBlob && strings.HasSuffix(entries[i].Name(), ".md") { - name := strings.TrimSuffix(entries[i].Name(), ".md") - pages = append(pages, PageMeta{ - Name: name, - URL: models.ToWikiPageURL(name), - }) + if entries[i].Type == git.ObjectBlob { + name := entries[i].Name() + ext := filepath.Ext(name) + if markdown.IsMarkdownFile(name) || ext == ".textile" { + name = strings.TrimSuffix(name, ext) + if name == "" || name == "_Sidebar" || name == "_Footer" || name == "_Header" { + continue + } + pages = append(pages, PageMeta{ + Name: strings.Replace(name, "-", " ", -1), + URL: models.ToWikiPageURL(name), + }) + } } } ctx.Data["Pages"] = pages @@ -91,35 +230,71 @@ func renderWikiPage(ctx *context.Context, isViewPage bool) (*git.Repository, str ctx.Data["title"] = pageName ctx.Data["RequireHighlightJS"] = true - blob, err := commit.GetBlobByPath(pageURL + ".md") - if err != nil { - if git.IsErrNotExist(err) { - ctx.Redirect(ctx.Repo.RepoLink + "/wiki/_pages") - } else { - ctx.Handle(500, "GetBlobByPath", err) - } - return nil, "" + var entry *git.TreeEntry + if entry, err = findFile(wikiRepo, commit, pageName, true); err != nil { + ctx.Handle(500, "findFile", err) + return nil, nil } + if entry == nil { + ctx.Redirect(ctx.Repo.RepoLink + "/wiki/_pages") + return nil, nil + } + blob := entry.Blob() r, err := blob.Data() if err != nil { ctx.Handle(500, "Data", err) - return nil, "" + return nil, nil } data, err := ioutil.ReadAll(r) if err != nil { ctx.Handle(500, "ReadAll", err) - return nil, "" + return nil, nil + } + sidebarPresent := false + sidebarContent := []byte{} + sentry, err := findFile(wikiRepo, commit, "_Sidebar", true) + if err == nil && sentry != nil { + r, err = sentry.Blob().Data() + if err == nil { + dataSB, err := ioutil.ReadAll(r) + if err == nil { + sidebarPresent = true + sidebarContent = dataSB + } + } + } + footerPresent := false + footerContent := []byte{} + sentry, err = findFile(wikiRepo, commit, "_Footer", true) + if err == nil && sentry != nil { + r, err = sentry.Blob().Data() + if err == nil { + dataSB, err := ioutil.ReadAll(r) + if err == nil { + footerPresent = true + footerContent = dataSB + } + } } if isViewPage { - ctx.Data["content"] = string(markdown.Render(data, ctx.Repo.RepoLink, ctx.Repo.Repository.ComposeMetas())) + metas := ctx.Repo.Repository.ComposeMetas() + ctx.Data["content"] = markdown.RenderWiki(data, ctx.Repo.RepoLink, metas) + ctx.Data["sidebarPresent"] = sidebarPresent + ctx.Data["sidebarContent"] = markdown.RenderWiki(sidebarContent, ctx.Repo.RepoLink, metas) + ctx.Data["footerPresent"] = footerPresent + ctx.Data["footerContent"] = markdown.RenderWiki(footerContent, ctx.Repo.RepoLink, metas) } else { ctx.Data["content"] = string(data) + ctx.Data["sidebarPresent"] = false + ctx.Data["sidebarContent"] = "" + ctx.Data["footerPresent"] = false + ctx.Data["footerContent"] = "" } - return wikiRepo, pageURL + return wikiRepo, entry } -// Wiki render wiki page +// Wiki renders single wiki page func Wiki(ctx *context.Context) { ctx.Data["PageIsWiki"] = true @@ -129,13 +304,18 @@ func Wiki(ctx *context.Context) { return } - wikiRepo, pagePath := renderWikiPage(ctx, true) + wikiRepo, entry := renderWikiPage(ctx, true) if ctx.Written() { return } + ename := entry.Name() + if !markdown.IsMarkdownFile(ename) { + ext := strings.ToUpper(filepath.Ext(ename)) + ctx.Data["FormatWarning"] = fmt.Sprintf("%s rendering is not supported at the moment. Rendered as Markdown.", ext) + } // Get last change information. - lastCommit, err := wikiRepo.GetCommitByPath(pagePath + ".md") + lastCommit, err := wikiRepo.GetCommitByPath(ename) if err != nil { ctx.Handle(500, "GetCommitByPath", err) return @@ -155,14 +335,8 @@ func WikiPages(ctx *context.Context) { return } - wikiRepo, err := git.OpenRepository(ctx.Repo.Repository.WikiPath()) + wikiRepo, commit, err := findWikiRepoCommit(ctx) if err != nil { - ctx.Handle(500, "OpenRepository", err) - return - } - commit, err := wikiRepo.GetBranchCommit("master") - if err != nil { - ctx.Handle(500, "GetBranchCommit", err) return } @@ -173,18 +347,25 @@ func WikiPages(ctx *context.Context) { } pages := make([]PageMeta, 0, len(entries)) for i := range entries { - if entries[i].Type == git.ObjectBlob && strings.HasSuffix(entries[i].Name(), ".md") { + if entries[i].Type == git.ObjectBlob { c, err := wikiRepo.GetCommitByPath(entries[i].Name()) if err != nil { ctx.Handle(500, "GetCommit", err) return } - name := strings.TrimSuffix(entries[i].Name(), ".md") - pages = append(pages, PageMeta{ - Name: name, - URL: models.ToWikiPageURL(name), - Updated: c.Author.When, - }) + name := entries[i].Name() + ext := filepath.Ext(name) + if markdown.IsMarkdownFile(name) || ext == ".textile" { + name = strings.TrimSuffix(name, ext) + if name == "" { + continue + } + pages = append(pages, PageMeta{ + Name: name, + URL: models.ToWikiPageURL(name), + Updated: c.Author.When, + }) + } } } ctx.Data["Pages"] = pages @@ -192,6 +373,44 @@ func WikiPages(ctx *context.Context) { ctx.HTML(200, tplWikiPages) } +// WikiRaw outputs raw blob requested by user (image for example) +func WikiRaw(ctx *context.Context) { + wikiRepo, commit, err := findWikiRepoCommit(ctx) + if err != nil { + if wikiRepo != nil { + return + } + } + uri := ctx.Params("*") + var entry *git.TreeEntry + if commit != nil { + entry, err = findFile(wikiRepo, commit, uri, false) + } + if err != nil || entry == nil { + if entry == nil || commit == nil { + defBranch := ctx.Repo.Repository.DefaultBranch + if commit, err = ctx.Repo.GitRepo.GetBranchCommit(defBranch); commit == nil || err != nil { + ctx.Handle(500, "GetBranchCommit", err) + return + } + if entry, err = findFile(ctx.Repo.GitRepo, commit, uri, false); err != nil { + ctx.Handle(500, "findFile", err) + return + } + if entry == nil { + ctx.Handle(404, "findFile", nil) + return + } + } else { + ctx.Handle(500, "findFile", err) + return + } + } + if err = ServeBlob(ctx, entry.Blob()); err != nil { + ctx.Handle(500, "ServeBlob", err) + } +} + // NewWiki render wiki create page func NewWiki(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("repo.wiki.new_page") diff --git a/templates/base/footer.tmpl b/templates/base/footer.tmpl index 08a079ebb6..1a9202d271 100644 --- a/templates/base/footer.tmpl +++ b/templates/base/footer.tmpl @@ -60,6 +60,7 @@ {{if .RequireDropzone}} {{end}} +