2020-12-17 09:00:47 -05:00
|
|
|
// Copyright 2020 The Gitea Authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a MIT-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2021-08-24 12:47:09 -04:00
|
|
|
//go:build !gogit
|
2020-12-17 09:00:47 -05:00
|
|
|
|
|
|
|
package git
|
|
|
|
|
|
|
|
import (
|
2021-02-17 14:32:47 -05:00
|
|
|
"bufio"
|
2020-12-17 09:00:47 -05:00
|
|
|
"bytes"
|
|
|
|
"io"
|
2021-02-17 14:32:47 -05:00
|
|
|
"math"
|
2021-11-17 15:37:00 -05:00
|
|
|
"strings"
|
2020-12-17 09:00:47 -05:00
|
|
|
|
|
|
|
"code.gitea.io/gitea/modules/analyze"
|
2021-06-25 12:54:08 -04:00
|
|
|
"code.gitea.io/gitea/modules/log"
|
2020-12-17 09:00:47 -05:00
|
|
|
|
|
|
|
"github.com/go-enry/go-enry/v2"
|
|
|
|
)
|
|
|
|
|
|
|
|
// GetLanguageStats calculates language stats for git repository at specified commit
|
|
|
|
func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, error) {
|
2021-02-17 14:32:47 -05:00
|
|
|
// We will feed the commit IDs in order into cat-file --batch, followed by blobs as necessary.
|
|
|
|
// so let's create a batch stdin and stdout
|
2021-11-30 15:06:32 -05:00
|
|
|
batchStdinWriter, batchReader, cancel := repo.CatFileBatch(repo.Ctx)
|
2021-03-03 21:57:01 -05:00
|
|
|
defer cancel()
|
2021-02-17 14:32:47 -05:00
|
|
|
|
|
|
|
writeID := func(id string) error {
|
2021-06-16 18:16:47 -04:00
|
|
|
_, err := batchStdinWriter.Write([]byte(id + "\n"))
|
2021-02-17 14:32:47 -05:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := writeID(commitID); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
shaBytes, typ, size, err := ReadBatchLine(batchReader)
|
|
|
|
if typ != "commit" {
|
2021-06-25 12:54:08 -04:00
|
|
|
log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
|
2021-02-17 14:32:47 -05:00
|
|
|
return nil, ErrNotExist{commitID, ""}
|
|
|
|
}
|
|
|
|
|
|
|
|
sha, err := NewIDFromString(string(shaBytes))
|
2020-12-17 09:00:47 -05:00
|
|
|
if err != nil {
|
2021-06-25 12:54:08 -04:00
|
|
|
log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
|
2021-02-17 14:32:47 -05:00
|
|
|
return nil, ErrNotExist{commitID, ""}
|
|
|
|
}
|
|
|
|
|
|
|
|
commit, err := CommitFromReader(repo, sha, io.LimitReader(batchReader, size))
|
|
|
|
if err != nil {
|
2021-06-25 12:54:08 -04:00
|
|
|
log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
|
2020-12-17 09:00:47 -05:00
|
|
|
return nil, err
|
|
|
|
}
|
2021-06-20 18:00:46 -04:00
|
|
|
if _, err = batchReader.Discard(1); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2020-12-17 09:00:47 -05:00
|
|
|
|
|
|
|
tree := commit.Tree
|
|
|
|
|
|
|
|
entries, err := tree.ListEntriesRecursive()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-06-16 11:47:44 -04:00
|
|
|
checker, deferable := repo.CheckAttributeReader(commitID)
|
|
|
|
defer deferable()
|
2021-09-09 16:13:36 -04:00
|
|
|
|
2021-02-17 14:32:47 -05:00
|
|
|
contentBuf := bytes.Buffer{}
|
|
|
|
var content []byte
|
2020-12-17 09:00:47 -05:00
|
|
|
sizes := make(map[string]int64)
|
|
|
|
for _, f := range entries {
|
2021-11-30 15:06:32 -05:00
|
|
|
select {
|
|
|
|
case <-repo.Ctx.Done():
|
|
|
|
return sizes, repo.Ctx.Err()
|
|
|
|
default:
|
|
|
|
}
|
|
|
|
|
2021-02-17 14:32:47 -05:00
|
|
|
contentBuf.Reset()
|
|
|
|
content = contentBuf.Bytes()
|
2021-09-09 16:13:36 -04:00
|
|
|
|
|
|
|
if f.Size() == 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
notVendored := false
|
|
|
|
notGenerated := false
|
|
|
|
|
|
|
|
if checker != nil {
|
|
|
|
attrs, err := checker.CheckPath(f.Name())
|
|
|
|
if err == nil {
|
|
|
|
if vendored, has := attrs["linguist-vendored"]; has {
|
|
|
|
if vendored == "set" || vendored == "true" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
notVendored = vendored == "false"
|
|
|
|
}
|
|
|
|
if generated, has := attrs["linguist-generated"]; has {
|
|
|
|
if generated == "set" || generated == "true" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
notGenerated = generated == "false"
|
|
|
|
}
|
|
|
|
if language, has := attrs["linguist-language"]; has && language != "unspecified" && language != "" {
|
|
|
|
// group languages, such as Pug -> HTML; SCSS -> CSS
|
|
|
|
group := enry.GetLanguageGroup(language)
|
2021-09-20 15:46:51 -04:00
|
|
|
if len(group) != 0 {
|
2021-09-09 16:13:36 -04:00
|
|
|
language = group
|
|
|
|
}
|
|
|
|
|
|
|
|
sizes[language] += f.Size()
|
|
|
|
continue
|
2021-11-17 15:37:00 -05:00
|
|
|
} else if language, has := attrs["gitlab-language"]; has && language != "unspecified" && language != "" {
|
|
|
|
// strip off a ? if present
|
|
|
|
if idx := strings.IndexByte(language, '?'); idx >= 0 {
|
|
|
|
language = language[:idx]
|
|
|
|
}
|
|
|
|
if len(language) != 0 {
|
|
|
|
// group languages, such as Pug -> HTML; SCSS -> CSS
|
|
|
|
group := enry.GetLanguageGroup(language)
|
|
|
|
if len(group) != 0 {
|
|
|
|
language = group
|
|
|
|
}
|
|
|
|
|
|
|
|
sizes[language] += f.Size()
|
|
|
|
continue
|
|
|
|
}
|
2021-09-09 16:13:36 -04:00
|
|
|
}
|
2021-11-17 15:37:00 -05:00
|
|
|
|
2021-09-09 16:13:36 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!notVendored && analyze.IsVendor(f.Name())) || enry.IsDotFile(f.Name()) ||
|
2020-12-17 09:00:47 -05:00
|
|
|
enry.IsDocumentation(f.Name()) || enry.IsConfiguration(f.Name()) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// If content can not be read or file is too big just do detection by filename
|
2021-02-17 14:32:47 -05:00
|
|
|
|
2020-12-17 09:00:47 -05:00
|
|
|
if f.Size() <= bigFileSize {
|
2021-02-17 14:32:47 -05:00
|
|
|
if err := writeID(f.ID.String()); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
_, _, size, err := ReadBatchLine(batchReader)
|
|
|
|
if err != nil {
|
2021-06-25 12:54:08 -04:00
|
|
|
log.Debug("Error reading blob: %s Err: %v", f.ID.String(), err)
|
2021-02-17 14:32:47 -05:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
sizeToRead := size
|
2021-06-16 18:16:47 -04:00
|
|
|
discard := int64(1)
|
2021-02-17 14:32:47 -05:00
|
|
|
if size > fileSizeLimit {
|
|
|
|
sizeToRead = fileSizeLimit
|
2021-06-16 18:16:47 -04:00
|
|
|
discard = size - fileSizeLimit + 1
|
2021-02-17 14:32:47 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
_, err = contentBuf.ReadFrom(io.LimitReader(batchReader, sizeToRead))
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
content = contentBuf.Bytes()
|
|
|
|
err = discardFull(batchReader, discard)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2020-12-17 09:00:47 -05:00
|
|
|
}
|
2021-09-09 16:13:36 -04:00
|
|
|
if !notGenerated && enry.IsGenerated(f.Name(), content) {
|
2020-12-17 09:00:47 -05:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// FIXME: Why can't we split this and the IsGenerated tests to avoid reading the blob unless absolutely necessary?
|
|
|
|
// - eg. do the all the detection tests using filename first before reading content.
|
|
|
|
language := analyze.GetCodeLanguage(f.Name(), content)
|
|
|
|
if language == enry.OtherLanguage || language == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// group languages, such as Pug -> HTML; SCSS -> CSS
|
|
|
|
group := enry.GetLanguageGroup(language)
|
|
|
|
if group != "" {
|
|
|
|
language = group
|
|
|
|
}
|
|
|
|
|
|
|
|
sizes[language] += f.Size()
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// filter special languages unless they are the only language
|
|
|
|
if len(sizes) > 1 {
|
|
|
|
for language := range sizes {
|
|
|
|
langtype := enry.GetLanguageType(language)
|
|
|
|
if langtype != enry.Programming && langtype != enry.Markup {
|
|
|
|
delete(sizes, language)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return sizes, nil
|
|
|
|
}
|
|
|
|
|
2021-02-17 14:32:47 -05:00
|
|
|
func discardFull(rd *bufio.Reader, discard int64) error {
|
|
|
|
if discard > math.MaxInt32 {
|
|
|
|
n, err := rd.Discard(math.MaxInt32)
|
|
|
|
discard -= int64(n)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2020-12-17 09:00:47 -05:00
|
|
|
}
|
2021-02-17 14:32:47 -05:00
|
|
|
for discard > 0 {
|
|
|
|
n, err := rd.Discard(int(discard))
|
|
|
|
discard -= int64(n)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2020-12-17 09:00:47 -05:00
|
|
|
}
|
2021-02-17 14:32:47 -05:00
|
|
|
return nil
|
2020-12-17 09:00:47 -05:00
|
|
|
}
|