// Copyright 2014 The Gogs Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package repo
import (
"bytes"
"fmt"
gotemplate "html/template"
"io/ioutil"
"path"
"strings"
"github.com/Unknwon/paginater"
log "gopkg.in/clog.v1"
"github.com/G-Node/git-module"
"bufio"
"io"
"os"
"github.com/G-Node/gin-doi/src"
"github.com/G-Node/go-annex"
"github.com/G-Node/gogs/models"
"github.com/G-Node/gogs/pkg/context"
"github.com/G-Node/gogs/pkg/markup"
"github.com/G-Node/gogs/pkg/setting"
"github.com/G-Node/gogs/pkg/template"
"github.com/G-Node/gogs/pkg/template/highlight"
"github.com/G-Node/gogs/pkg/tool"
"github.com/go-macaron/captcha"
"gopkg.in/yaml.v2"
"github.com/G-Node/godML/odml"
"encoding/json"
"encoding/xml"
"golang.org/x/net/html/charset"
)
const (
BARE = "repo/bare"
HOME = "repo/home"
WATCHERS = "repo/watchers"
FORKS = "repo/forks"
)
func renderDirectory(c *context.Context, treeLink string) {
tree, err := c.Repo.Commit.SubTree(c.Repo.TreePath)
if err != nil {
c.NotFoundOrServerError("Repo.Commit.SubTree", git.IsErrNotExist, err)
return
}
entries, err := tree.ListEntries()
if err != nil {
c.ServerError("ListEntries", err)
return
}
entries.Sort()
c.Data["Files"], err = entries.GetCommitsInfoWithCustomConcurrency(c.Repo.Commit, c.Repo.TreePath, setting.Repository.CommitsFetchConcurrency)
if err != nil {
c.ServerError("GetCommitsInfoWithCustomConcurrency", err)
return
}
c.Data["DOI"] = false
var readmeFile *git.Blob
for _, entry := range entries {
if entry.IsDir() || (!markup.IsReadmeFile(entry.Name()) && !(entry.Name() == "datacite.yml") && !(entry.Name() == "LICENSE")) {
continue
}
// TODO: collect all possible README files and show with priority.
if markup.IsReadmeFile(entry.Name()) && entry.Blob().Size() <
setting.UI.MaxDisplayFileSize {
readmeFile = entry.Blob()
} else if entry.Name() == "datacite.yml" {
c.Data["DOI"] = true
doiData, err := entry.Blob().Data()
if err != nil {
log.Trace("Doi Blob could not be read:%v", err)
}
buf, err := ioutil.ReadAll(doiData)
doiInfo := ginDoi.CBerry{}
err = yaml.Unmarshal(buf, &doiInfo)
if err != nil {
log.Trace("Doi Blob could not be unmarshalled:%v", err)
}
c.Data["DoiInfo"] = doiInfo
doi := GDoiRepo(c, setting.Doi.DoiBase)
//ddata, err := ginDoi.GDoiMData(doi, "https://api.datacite.org/works/") //todo configure URL?
c.Data["DoiReg"] = ginDoi.IsRegsitredDoi(doi)
c.Data["doi"] = doi
}
}
c.Data["LicenseExists"] = true
if readmeFile != nil {
c.Data["RawFileLink"] = ""
c.Data["ReadmeInList"] = true
c.Data["ReadmeExist"] = true
buf := make([]byte, 1024)
r, w := io.Pipe()
defer r.Close()
defer w.Close()
go readmeFile.DataPipeline(w, w)
if readmeFile.Size() > 0 {
n, _ := r.Read(buf)
buf = buf[:n]
}
isannex := tool.IsAnnexedFile(buf)
dataRc, err := readmeFile.Data()
if err != nil {
c.ServerError("readmeFile.Data", err)
return
}
if isannex {
af, err := gannex.NewAFile(c.Repo.Repository.RepoPath(), "annex", readmeFile.Name(), buf)
if err != nil {
log.Trace("Could not get annex file: %v", err)
c.ServerError("readmeFile.Data", err)
return
}
afp, err := af.Open()
defer afp.Close()
if err != nil {
c.ServerError("readmeFile.Data", err)
log.Trace("Could not open annex file: %v", err)
return
}
dataRc = bufio.NewReader(afp)
}
buf = make([]byte, 1024)
n, _ := dataRc.Read(buf)
buf = buf[:n]
isTextFile := tool.IsTextFile(buf)
c.Data["IsTextFile"] = isTextFile
c.Data["FileName"] = readmeFile.Name()
if isTextFile {
d, _ := ioutil.ReadAll(dataRc)
buf = append(buf, d...)
switch markup.Detect(readmeFile.Name()) {
case markup.MARKDOWN:
c.Data["IsMarkdown"] = true
buf = markup.Markdown(buf, treeLink, c.Repo.Repository.ComposeMetas())
case markup.ORG_MODE:
c.Data["IsMarkdown"] = true
buf = markup.OrgMode(buf, treeLink, c.Repo.Repository.ComposeMetas())
case markup.IPYTHON_NOTEBOOK:
c.Data["IsIPythonNotebook"] = true
c.Data["RawFileLink"] = c.Repo.RepoLink + "/raw/" + path.Join(c.Repo.BranchName, c.Repo.TreePath, readmeFile.Name())
default:
buf = bytes.Replace(buf, []byte("\n"), []byte(`
`), -1)
}
c.Data["FileContent"] = string(buf)
}
}
// Show latest commit info of repository in table header,
// or of directory if not in root directory.
latestCommit := c.Repo.Commit
if len(c.Repo.TreePath) > 0 {
latestCommit, err = c.Repo.Commit.GetCommitByPath(c.Repo.TreePath)
if err != nil {
c.ServerError("GetCommitByPath", err)
return
}
}
c.Data["LatestCommit"] = latestCommit
c.Data["LatestCommitUser"] = models.ValidateCommitWithEmail(latestCommit)
if c.Repo.CanEnableEditor() {
c.Data["CanAddFile"] = true
c.Data["CanUploadFile"] = setting.Repository.Upload.Enabled
}
}
func renderFile(c *context.Context, entry *git.TreeEntry, treeLink, rawLink string, cpt *captcha.Captcha) {
c.Data["IsViewFile"] = true
blob := entry.Blob()
log.Trace("Blob size is %s", blob.Size())
if blob.Size() > gannex.MEGABYTE*10 && setting.Service.EnableCaptcha && !cpt.VerifyReq(c.Req) && !c.IsLogged {
c.Data["EnableCaptcha"] = true
c.HTML(200, "repo/download")
return
}
c.Data["FileSize"] = blob.Size()
c.Data["FileName"] = blob.Name()
c.Data["HighlightClass"] = highlight.FileNameToHighlightClass(blob.Name())
c.Data["RawFileLink"] = rawLink + "/" + c.Repo.TreePath
buf := make([]byte, 1024)
r, w := io.Pipe()
defer r.Close()
defer w.Close()
go blob.DataPipeline(w, w)
if blob.Size() > 0 {
n, _ := r.Read(buf)
buf = buf[:n]
}
isannex := tool.IsAnnexedFile(buf)
var afpR *bufio.Reader
var afp *os.File
var annexf *gannex.AFile
if isannex == true {
af, err := gannex.NewAFile(c.Repo.Repository.RepoPath(), "annex", entry.Name(), buf)
if err != nil {
c.Data["IsAnnexedFile"] = true
log.Trace("Could not get annex file: %v", err)
return
}
if af.Info.Size() > gannex.MEGABYTE*setting.Repository.CaptchaMinFileSize && setting.Service.EnableCaptcha &&
!cpt.VerifyReq(c.Req) && !c.IsLogged {
c.Data["EnableCaptcha"] = true
c.HTML(200, "repo/download")
return
}
afp, err = af.Open()
defer afp.Close()
if err != nil {
log.Trace("Could not open annex file: %v", err)
c.Data["IsAnnexedFile"] = true
return
}
afpR = bufio.NewReader(afp)
buf, _ = afpR.Peek(1024)
annexf = af
c.Data["FileSize"] = af.Info.Size()
}
isTextFile := tool.IsTextFile(buf)
c.Data["IsTextFile"] = isTextFile
// Assume file is not editable first.
if !isTextFile {
c.Data["EditFileTooltip"] = c.Tr("repo.editor.cannot_edit_non_text_files")
}
canEnableEditor := c.Repo.CanEnableEditor()
switch {
case isTextFile:
if !isannex {
if blob.Size() >= setting.UI.MaxDisplayFileSize {
c.Data["IsFileTooLarge"] = true
break
}
c.Data["ReadmeExist"] = markup.IsReadmeFile(blob.Name())
if blob.Size() > 1024 {
d := make([]byte, blob.Size()-
1024)
if _, err := io.ReadAtLeast(r, d, int(blob.Size()-1024)); err != nil {
log.Error(4., "Could nor read all of a git file:%+v", err)
}
buf = append(buf, d...)
}
} else {
if annexf.Info.Size() >= setting.UI.MaxDisplayFileSize {
c.Data["IsFileTooLarge"] = true
break
}
c.Data["ReadmeExist"] = markup.IsReadmeFile(blob.Name())
buf = make([]byte, annexf.Info.Size())
afp.Seek(0, 0)
afp.Read(buf)
}
switch markup.Detect(blob.Name()) {
case markup.MARKDOWN:
c.Data["IsMarkdown"] = true
c.Data["FileContent"] = string(markup.Markdown(buf, path.Dir(treeLink), c.Repo.Repository.ComposeMetas()))
case markup.ORG_MODE:
c.Data["IsMarkdown"] = true
c.Data["FileContent"] = string(markup.OrgMode(buf, path.Dir(treeLink), c.Repo.Repository.ComposeMetas()))
case markup.IPYTHON_NOTEBOOK:
c.Data["IsIPythonNotebook"] = true
case markup.JSON:
c.Data["IsJSON"] = true
c.Data["RawFileContent"] = string(buf)
fallthrough
case markup.YAML:
c.Data["IsYAML"] = true
c.Data["RawFileContent"] = string(buf)
fallthrough
case markup.UNRECOGNIZED:
if tool.IsOdmlFile(buf) {
c.Data["IsOdML"] = true
od := odml.Odml{}
xml.Unmarshal(buf, &od)
decoder := xml.NewDecoder(bytes.NewReader(buf))
decoder.CharsetReader = charset.NewReaderLabel
decoder.Decode(&od)
data, _ := json.Marshal(od)
c.Data["OdML"] = string(data)
goto End
} else {
goto End
}
End:
fallthrough
default:
// Building code view blocks with line number on server side.
var fileContent string
if err, content := template.ToUTF8WithErr(buf); err != nil {
if err != nil {
log.Error(4, "ToUTF8WithErr: %s", err)
}
fileContent = string(buf)
} else {
fileContent = content
}
var output bytes.Buffer
lines := strings.Split(fileContent, "\n")
if len(lines) > setting.UI.MaxLineHighlight {
c.Data["HighlightClass"] = "nohighlight"
}
for index, line := range lines {
output.WriteString(fmt.Sprintf(`