mirror of
https://codeberg.org/forgejo/forgejo.git
synced 2025-05-31 06:04:35 -04:00
[CHORE] Drop go-git
support
See https://codeberg.org/forgejo/discussions/issues/164 for the rationale and discussion of this change. Everything related to the `go-git` dependency is dropped (Only a single instance is left in a test file to test for an XSS, it requires crafting an commit that Git itself refuses to craft). `_gogit` files have been removed entirely, `go:build: !gogit` is removed, `XXX_nogogit.go` files either have been renamed or had their code being merged into the `XXX.go` file.
This commit is contained in:
parent
4132b18e59
commit
a21128a734
70 changed files with 1632 additions and 4069 deletions
|
@ -4,8 +4,17 @@
|
|||
package git
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"cmp"
|
||||
"io"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"code.gitea.io/gitea/modules/analyze"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/optional"
|
||||
|
||||
"github.com/go-enry/go-enry/v2"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -46,3 +55,194 @@ func mergeLanguageStats(stats map[string]int64) map[string]int64 {
|
|||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// GetLanguageStats calculates language stats for git repository at specified commit
|
||||
func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, error) {
|
||||
// We will feed the commit IDs in order into cat-file --batch, followed by blobs as necessary.
|
||||
// so let's create a batch stdin and stdout
|
||||
batchStdinWriter, batchReader, cancel := repo.CatFileBatch(repo.Ctx)
|
||||
defer cancel()
|
||||
|
||||
writeID := func(id string) error {
|
||||
_, err := batchStdinWriter.Write([]byte(id + "\n"))
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeID(commitID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
shaBytes, typ, size, err := ReadBatchLine(batchReader)
|
||||
if typ != "commit" {
|
||||
log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
|
||||
return nil, ErrNotExist{commitID, ""}
|
||||
}
|
||||
|
||||
sha, err := NewIDFromString(string(shaBytes))
|
||||
if err != nil {
|
||||
log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
|
||||
return nil, ErrNotExist{commitID, ""}
|
||||
}
|
||||
|
||||
commit, err := CommitFromReader(repo, sha, io.LimitReader(batchReader, size))
|
||||
if err != nil {
|
||||
log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
|
||||
return nil, err
|
||||
}
|
||||
if _, err = batchReader.Discard(1); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tree := commit.Tree
|
||||
|
||||
entries, err := tree.ListEntriesRecursiveWithSize()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
checker, err := repo.GitAttributeChecker(commitID, LinguistAttributes...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer checker.Close()
|
||||
|
||||
contentBuf := bytes.Buffer{}
|
||||
var content []byte
|
||||
|
||||
// sizes contains the current calculated size of all files by language
|
||||
sizes := make(map[string]int64)
|
||||
// by default we will only count the sizes of programming languages or markup languages
|
||||
// unless they are explicitly set using linguist-language
|
||||
includedLanguage := map[string]bool{}
|
||||
// or if there's only one language in the repository
|
||||
firstExcludedLanguage := ""
|
||||
firstExcludedLanguageSize := int64(0)
|
||||
|
||||
isTrue := func(v optional.Option[bool]) bool {
|
||||
return v.ValueOrDefault(false)
|
||||
}
|
||||
isFalse := func(v optional.Option[bool]) bool {
|
||||
return !v.ValueOrDefault(true)
|
||||
}
|
||||
|
||||
for _, f := range entries {
|
||||
select {
|
||||
case <-repo.Ctx.Done():
|
||||
return sizes, repo.Ctx.Err()
|
||||
default:
|
||||
}
|
||||
|
||||
contentBuf.Reset()
|
||||
content = contentBuf.Bytes()
|
||||
|
||||
if f.Size() == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
isVendored := optional.None[bool]()
|
||||
isGenerated := optional.None[bool]()
|
||||
isDocumentation := optional.None[bool]()
|
||||
isDetectable := optional.None[bool]()
|
||||
|
||||
attrs, err := checker.CheckPath(f.Name())
|
||||
if err == nil {
|
||||
isVendored = attrs["linguist-vendored"].Bool()
|
||||
isGenerated = attrs["linguist-generated"].Bool()
|
||||
isDocumentation = attrs["linguist-documentation"].Bool()
|
||||
isDetectable = attrs["linguist-detectable"].Bool()
|
||||
if language := cmp.Or(
|
||||
attrs["linguist-language"].String(),
|
||||
attrs["gitlab-language"].Prefix(),
|
||||
); language != "" {
|
||||
// group languages, such as Pug -> HTML; SCSS -> CSS
|
||||
group := enry.GetLanguageGroup(language)
|
||||
if len(group) != 0 {
|
||||
language = group
|
||||
}
|
||||
|
||||
// this language will always be added to the size
|
||||
sizes[language] += f.Size()
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if isFalse(isDetectable) || isTrue(isVendored) || isTrue(isDocumentation) ||
|
||||
(!isFalse(isVendored) && analyze.IsVendor(f.Name())) ||
|
||||
enry.IsDotFile(f.Name()) ||
|
||||
enry.IsConfiguration(f.Name()) ||
|
||||
(!isFalse(isDocumentation) && enry.IsDocumentation(f.Name())) {
|
||||
continue
|
||||
}
|
||||
|
||||
// If content can not be read or file is too big just do detection by filename
|
||||
|
||||
if f.Size() <= bigFileSize {
|
||||
if err := writeID(f.ID.String()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, _, size, err := ReadBatchLine(batchReader)
|
||||
if err != nil {
|
||||
log.Debug("Error reading blob: %s Err: %v", f.ID.String(), err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sizeToRead := size
|
||||
discard := int64(1)
|
||||
if size > fileSizeLimit {
|
||||
sizeToRead = fileSizeLimit
|
||||
discard = size - fileSizeLimit + 1
|
||||
}
|
||||
|
||||
_, err = contentBuf.ReadFrom(io.LimitReader(batchReader, sizeToRead))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
content = contentBuf.Bytes()
|
||||
if err := DiscardFull(batchReader, discard); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if !isTrue(isGenerated) && enry.IsGenerated(f.Name(), content) {
|
||||
continue
|
||||
}
|
||||
|
||||
// FIXME: Why can't we split this and the IsGenerated tests to avoid reading the blob unless absolutely necessary?
|
||||
// - eg. do the all the detection tests using filename first before reading content.
|
||||
language := analyze.GetCodeLanguage(f.Name(), content)
|
||||
if language == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// group languages, such as Pug -> HTML; SCSS -> CSS
|
||||
group := enry.GetLanguageGroup(language)
|
||||
if group != "" {
|
||||
language = group
|
||||
}
|
||||
|
||||
included, checked := includedLanguage[language]
|
||||
langType := enry.GetLanguageType(language)
|
||||
if !checked {
|
||||
included = langType == enry.Programming || langType == enry.Markup
|
||||
if !included && (isTrue(isDetectable) || (langType == enry.Prose && isFalse(isDocumentation))) {
|
||||
included = true
|
||||
}
|
||||
includedLanguage[language] = included
|
||||
}
|
||||
if included {
|
||||
sizes[language] += f.Size()
|
||||
} else if len(sizes) == 0 && (firstExcludedLanguage == "" || firstExcludedLanguage == language) {
|
||||
// Only consider Programming or Markup languages as fallback
|
||||
if !(langType == enry.Programming || langType == enry.Markup) {
|
||||
continue
|
||||
}
|
||||
firstExcludedLanguage = language
|
||||
firstExcludedLanguageSize += f.Size()
|
||||
}
|
||||
}
|
||||
|
||||
// If there are no included languages add the first excluded language
|
||||
if len(sizes) == 0 && firstExcludedLanguage != "" {
|
||||
sizes[firstExcludedLanguage] = firstExcludedLanguageSize
|
||||
}
|
||||
|
||||
return mergeLanguageStats(sizes), nil
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue