Initial commit, source code of Gitea 1.23.1
This commit is contained in:
115
build/backport-locales.go
Normal file
115
build/backport-locales.go
Normal file
@@ -0,0 +1,115 @@
|
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
//go:build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/container"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
)
|
||||
|
||||
func main() {
|
||||
if len(os.Args) != 2 {
|
||||
println("usage: backport-locales <to-ref>")
|
||||
println("eg: backport-locales release/v1.19")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
mustNoErr := func(err error) {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
collectInis := func(ref string) map[string]setting.ConfigProvider {
|
||||
inis := map[string]setting.ConfigProvider{}
|
||||
err := filepath.WalkDir("options/locale", func(path string, d os.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if d.IsDir() || !strings.HasSuffix(d.Name(), ".ini") {
|
||||
return nil
|
||||
}
|
||||
cfg, err := setting.NewConfigProviderForLocale(path)
|
||||
mustNoErr(err)
|
||||
inis[path] = cfg
|
||||
fmt.Printf("collecting: %s @ %s\n", path, ref)
|
||||
return nil
|
||||
})
|
||||
mustNoErr(err)
|
||||
return inis
|
||||
}
|
||||
|
||||
// collect new locales from current working directory
|
||||
inisNew := collectInis("HEAD")
|
||||
|
||||
// switch to the target ref, and collect the old locales
|
||||
cmd := exec.Command("git", "checkout", os.Args[1])
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
mustNoErr(cmd.Run())
|
||||
inisOld := collectInis(os.Args[1])
|
||||
|
||||
// use old en-US as the base, and copy the new translations to the old locales
|
||||
enUsOld := inisOld["options/locale/locale_en-US.ini"]
|
||||
brokenWarned := make(container.Set[string])
|
||||
for path, iniOld := range inisOld {
|
||||
if iniOld == enUsOld {
|
||||
continue
|
||||
}
|
||||
iniNew := inisNew[path]
|
||||
if iniNew == nil {
|
||||
continue
|
||||
}
|
||||
for _, secEnUS := range enUsOld.Sections() {
|
||||
secOld := iniOld.Section(secEnUS.Name())
|
||||
secNew := iniNew.Section(secEnUS.Name())
|
||||
for _, keyEnUs := range secEnUS.Keys() {
|
||||
if secNew.HasKey(keyEnUs.Name()) {
|
||||
oldStr := secOld.Key(keyEnUs.Name()).String()
|
||||
newStr := secNew.Key(keyEnUs.Name()).String()
|
||||
broken := oldStr != "" && strings.Count(oldStr, "%") != strings.Count(newStr, "%")
|
||||
broken = broken || strings.Contains(oldStr, "\n") || strings.Contains(oldStr, "\n")
|
||||
if broken {
|
||||
brokenWarned.Add(secOld.Name() + "." + keyEnUs.Name())
|
||||
fmt.Println("----")
|
||||
fmt.Printf("WARNING: skip broken locale: %s , [%s] %s\n", path, secEnUS.Name(), keyEnUs.Name())
|
||||
fmt.Printf("\told: %s\n", strings.ReplaceAll(oldStr, "\n", "\\n"))
|
||||
fmt.Printf("\tnew: %s\n", strings.ReplaceAll(newStr, "\n", "\\n"))
|
||||
continue
|
||||
}
|
||||
secOld.Key(keyEnUs.Name()).SetValue(newStr)
|
||||
}
|
||||
}
|
||||
}
|
||||
mustNoErr(iniOld.SaveTo(path))
|
||||
}
|
||||
|
||||
fmt.Println("========")
|
||||
|
||||
for path, iniNew := range inisNew {
|
||||
for _, sec := range iniNew.Sections() {
|
||||
for _, key := range sec.Keys() {
|
||||
str := sec.Key(key.Name()).String()
|
||||
broken := strings.Contains(str, "\n")
|
||||
broken = broken || strings.HasPrefix(str, "`") != strings.HasSuffix(str, "`")
|
||||
broken = broken || strings.HasPrefix(str, "\"`")
|
||||
broken = broken || strings.HasPrefix(str, "`\"")
|
||||
broken = broken || strings.Count(str, `"`)%2 == 1
|
||||
broken = broken || strings.Count(str, "`")%2 == 1
|
||||
if broken && !brokenWarned.Contains(sec.Name()+"."+key.Name()) {
|
||||
fmt.Printf("WARNING: found broken locale: %s , [%s] %s\n", path, sec.Name(), key.Name())
|
||||
fmt.Printf("\tstr: %s\n", strings.ReplaceAll(str, "\n", "\\n"))
|
||||
fmt.Println("----")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
281
build/code-batch-process.go
Normal file
281
build/code-batch-process.go
Normal file
@@ -0,0 +1,281 @@
|
||||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
//go:build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/build/codeformat"
|
||||
)
|
||||
|
||||
// Windows has a limitation for command line arguments, the size can not exceed 32KB.
|
||||
// So we have to feed the files to some tools (like gofmt) batch by batch
|
||||
|
||||
// We also introduce a `gitea-fmt` command, it does better import formatting than gofmt/goimports. `gitea-fmt` calls `gofmt` internally.
|
||||
|
||||
var optionLogVerbose bool
|
||||
|
||||
func logVerbose(msg string, args ...any) {
|
||||
if optionLogVerbose {
|
||||
log.Printf(msg, args...)
|
||||
}
|
||||
}
|
||||
|
||||
func passThroughCmd(cmd string, args []string) error {
|
||||
foundCmd, err := exec.LookPath(cmd)
|
||||
if err != nil {
|
||||
log.Fatalf("can not find cmd: %s", cmd)
|
||||
}
|
||||
c := exec.Cmd{
|
||||
Path: foundCmd,
|
||||
Args: append([]string{cmd}, args...),
|
||||
Stdin: os.Stdin,
|
||||
Stdout: os.Stdout,
|
||||
Stderr: os.Stderr,
|
||||
}
|
||||
return c.Run()
|
||||
}
|
||||
|
||||
type fileCollector struct {
|
||||
dirs []string
|
||||
includePatterns []*regexp.Regexp
|
||||
excludePatterns []*regexp.Regexp
|
||||
batchSize int
|
||||
}
|
||||
|
||||
func newFileCollector(fileFilter string, batchSize int) (*fileCollector, error) {
|
||||
co := &fileCollector{batchSize: batchSize}
|
||||
if fileFilter == "go-own" {
|
||||
co.dirs = []string{
|
||||
"build",
|
||||
"cmd",
|
||||
"contrib",
|
||||
"tests",
|
||||
"models",
|
||||
"modules",
|
||||
"routers",
|
||||
"services",
|
||||
}
|
||||
co.includePatterns = append(co.includePatterns, regexp.MustCompile(`.*\.go$`))
|
||||
|
||||
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`.*\bbindata\.go$`))
|
||||
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`\.pb\.go$`))
|
||||
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`tests/gitea-repositories-meta`))
|
||||
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`tests/integration/migration-test`))
|
||||
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`modules/git/tests`))
|
||||
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`models/fixtures`))
|
||||
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`models/migrations/fixtures`))
|
||||
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`services/gitdiff/testdata`))
|
||||
}
|
||||
|
||||
if co.dirs == nil {
|
||||
return nil, fmt.Errorf("unknown file-filter: %s", fileFilter)
|
||||
}
|
||||
return co, nil
|
||||
}
|
||||
|
||||
func (fc *fileCollector) matchPatterns(path string, regexps []*regexp.Regexp) bool {
|
||||
path = strings.ReplaceAll(path, "\\", "/")
|
||||
for _, re := range regexps {
|
||||
if re.MatchString(path) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (fc *fileCollector) collectFiles() (res [][]string, err error) {
|
||||
var batch []string
|
||||
for _, dir := range fc.dirs {
|
||||
err = filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error {
|
||||
include := len(fc.includePatterns) == 0 || fc.matchPatterns(path, fc.includePatterns)
|
||||
exclude := fc.matchPatterns(path, fc.excludePatterns)
|
||||
process := include && !exclude
|
||||
if !process {
|
||||
if d.IsDir() {
|
||||
if exclude {
|
||||
logVerbose("exclude dir %s", path)
|
||||
return filepath.SkipDir
|
||||
}
|
||||
// for a directory, if it is not excluded explicitly, we should walk into
|
||||
return nil
|
||||
}
|
||||
// for a file, we skip it if it shouldn't be processed
|
||||
logVerbose("skip process %s", path)
|
||||
return nil
|
||||
}
|
||||
if d.IsDir() {
|
||||
// skip dir, we don't add dirs to the file list now
|
||||
return nil
|
||||
}
|
||||
if len(batch) >= fc.batchSize {
|
||||
res = append(res, batch)
|
||||
batch = nil
|
||||
}
|
||||
batch = append(batch, path)
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
res = append(res, batch)
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// substArgFiles expands the {file-list} to a real file list for commands
|
||||
func substArgFiles(args, files []string) []string {
|
||||
for i, s := range args {
|
||||
if s == "{file-list}" {
|
||||
newArgs := append(args[:i], files...)
|
||||
newArgs = append(newArgs, args[i+1:]...)
|
||||
return newArgs
|
||||
}
|
||||
}
|
||||
return args
|
||||
}
|
||||
|
||||
func exitWithCmdErrors(subCmd string, subArgs []string, cmdErrors []error) {
|
||||
for _, err := range cmdErrors {
|
||||
if err != nil {
|
||||
if exitError, ok := err.(*exec.ExitError); ok {
|
||||
exitCode := exitError.ExitCode()
|
||||
log.Printf("run command failed (code=%d): %s %v", exitCode, subCmd, subArgs)
|
||||
os.Exit(exitCode)
|
||||
} else {
|
||||
log.Fatalf("run command failed (err=%s) %s %v", err, subCmd, subArgs)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func parseArgs() (mainOptions map[string]string, subCmd string, subArgs []string) {
|
||||
mainOptions = map[string]string{}
|
||||
for i := 1; i < len(os.Args); i++ {
|
||||
arg := os.Args[i]
|
||||
if arg == "" {
|
||||
break
|
||||
}
|
||||
if arg[0] == '-' {
|
||||
arg = strings.TrimPrefix(arg, "-")
|
||||
arg = strings.TrimPrefix(arg, "-")
|
||||
fields := strings.SplitN(arg, "=", 2)
|
||||
if len(fields) == 1 {
|
||||
mainOptions[fields[0]] = "1"
|
||||
} else {
|
||||
mainOptions[fields[0]] = fields[1]
|
||||
}
|
||||
} else {
|
||||
subCmd = arg
|
||||
subArgs = os.Args[i+1:]
|
||||
break
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func showUsage() {
|
||||
fmt.Printf(`Usage: %[1]s [options] {command} [arguments]
|
||||
|
||||
Options:
|
||||
--verbose
|
||||
--file-filter=go-own
|
||||
--batch-size=100
|
||||
|
||||
Commands:
|
||||
%[1]s gofmt ...
|
||||
|
||||
Arguments:
|
||||
{file-list} the file list
|
||||
|
||||
Example:
|
||||
%[1]s gofmt -s -d {file-list}
|
||||
|
||||
`, "file-batch-exec")
|
||||
}
|
||||
|
||||
func newFileCollectorFromMainOptions(mainOptions map[string]string) (fc *fileCollector, err error) {
|
||||
fileFilter := mainOptions["file-filter"]
|
||||
if fileFilter == "" {
|
||||
fileFilter = "go-own"
|
||||
}
|
||||
batchSize, _ := strconv.Atoi(mainOptions["batch-size"])
|
||||
if batchSize == 0 {
|
||||
batchSize = 100
|
||||
}
|
||||
|
||||
return newFileCollector(fileFilter, batchSize)
|
||||
}
|
||||
|
||||
func containsString(a []string, s string) bool {
|
||||
for _, v := range a {
|
||||
if v == s {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func giteaFormatGoImports(files []string, doWriteFile bool) error {
|
||||
for _, file := range files {
|
||||
if err := codeformat.FormatGoImports(file, doWriteFile); err != nil {
|
||||
log.Printf("failed to format go imports: %s, err=%v", file, err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
mainOptions, subCmd, subArgs := parseArgs()
|
||||
if subCmd == "" {
|
||||
showUsage()
|
||||
os.Exit(1)
|
||||
}
|
||||
optionLogVerbose = mainOptions["verbose"] != ""
|
||||
|
||||
fc, err := newFileCollectorFromMainOptions(mainOptions)
|
||||
if err != nil {
|
||||
log.Fatalf("can not create file collector: %s", err.Error())
|
||||
}
|
||||
|
||||
fileBatches, err := fc.collectFiles()
|
||||
if err != nil {
|
||||
log.Fatalf("can not collect files: %s", err.Error())
|
||||
}
|
||||
|
||||
processed := 0
|
||||
var cmdErrors []error
|
||||
for _, files := range fileBatches {
|
||||
if len(files) == 0 {
|
||||
break
|
||||
}
|
||||
substArgs := substArgFiles(subArgs, files)
|
||||
logVerbose("batch cmd: %s %v", subCmd, substArgs)
|
||||
switch subCmd {
|
||||
case "gitea-fmt":
|
||||
if containsString(subArgs, "-d") {
|
||||
log.Print("the -d option is not supported by gitea-fmt")
|
||||
}
|
||||
cmdErrors = append(cmdErrors, giteaFormatGoImports(files, containsString(subArgs, "-w")))
|
||||
cmdErrors = append(cmdErrors, passThroughCmd("gofmt", append([]string{"-w", "-r", "interface{} -> any"}, substArgs...)))
|
||||
cmdErrors = append(cmdErrors, passThroughCmd("go", append([]string{"run", os.Getenv("GOFUMPT_PACKAGE"), "-extra"}, substArgs...)))
|
||||
default:
|
||||
log.Fatalf("unknown cmd: %s %v", subCmd, subArgs)
|
||||
}
|
||||
processed += len(files)
|
||||
}
|
||||
|
||||
logVerbose("processed %d files", processed)
|
||||
exitWithCmdErrors(subCmd, subArgs, cmdErrors)
|
||||
}
|
195
build/codeformat/formatimports.go
Normal file
195
build/codeformat/formatimports.go
Normal file
@@ -0,0 +1,195 @@
|
||||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package codeformat
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var importPackageGroupOrders = map[string]int{
|
||||
"": 1, // internal
|
||||
"code.gitea.io/gitea/": 2,
|
||||
}
|
||||
|
||||
var errInvalidCommentBetweenImports = errors.New("comments between imported packages are invalid, please move comments to the end of the package line")
|
||||
|
||||
var (
|
||||
importBlockBegin = []byte("\nimport (\n")
|
||||
importBlockEnd = []byte("\n)")
|
||||
)
|
||||
|
||||
type importLineParsed struct {
|
||||
group string
|
||||
pkg string
|
||||
content string
|
||||
}
|
||||
|
||||
func parseImportLine(line string) (*importLineParsed, error) {
|
||||
il := &importLineParsed{content: line}
|
||||
p1 := strings.IndexRune(line, '"')
|
||||
if p1 == -1 {
|
||||
return nil, errors.New("invalid import line: " + line)
|
||||
}
|
||||
p1++
|
||||
p := strings.IndexRune(line[p1:], '"')
|
||||
if p == -1 {
|
||||
return nil, errors.New("invalid import line: " + line)
|
||||
}
|
||||
p2 := p1 + p
|
||||
il.pkg = line[p1:p2]
|
||||
|
||||
pDot := strings.IndexRune(il.pkg, '.')
|
||||
pSlash := strings.IndexRune(il.pkg, '/')
|
||||
if pDot != -1 && pDot < pSlash {
|
||||
il.group = "domain-package"
|
||||
}
|
||||
for groupName := range importPackageGroupOrders {
|
||||
if groupName == "" {
|
||||
continue // skip internal
|
||||
}
|
||||
if strings.HasPrefix(il.pkg, groupName) {
|
||||
il.group = groupName
|
||||
}
|
||||
}
|
||||
return il, nil
|
||||
}
|
||||
|
||||
type (
|
||||
importLineGroup []*importLineParsed
|
||||
importLineGroupMap map[string]importLineGroup
|
||||
)
|
||||
|
||||
func formatGoImports(contentBytes []byte) ([]byte, error) {
|
||||
p1 := bytes.Index(contentBytes, importBlockBegin)
|
||||
if p1 == -1 {
|
||||
return nil, nil
|
||||
}
|
||||
p1 += len(importBlockBegin)
|
||||
p := bytes.Index(contentBytes[p1:], importBlockEnd)
|
||||
if p == -1 {
|
||||
return nil, nil
|
||||
}
|
||||
p2 := p1 + p
|
||||
|
||||
importGroups := importLineGroupMap{}
|
||||
r := bytes.NewBuffer(contentBytes[p1:p2])
|
||||
eof := false
|
||||
for !eof {
|
||||
line, err := r.ReadString('\n')
|
||||
eof = err == io.EOF
|
||||
if err != nil && !eof {
|
||||
return nil, err
|
||||
}
|
||||
line = strings.TrimSpace(line)
|
||||
if line != "" {
|
||||
if strings.HasPrefix(line, "//") || strings.HasPrefix(line, "/*") {
|
||||
return nil, errInvalidCommentBetweenImports
|
||||
}
|
||||
importLine, err := parseImportLine(line)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
importGroups[importLine.group] = append(importGroups[importLine.group], importLine)
|
||||
}
|
||||
}
|
||||
|
||||
var groupNames []string
|
||||
for groupName, importLines := range importGroups {
|
||||
groupNames = append(groupNames, groupName)
|
||||
sort.Slice(importLines, func(i, j int) bool {
|
||||
return strings.Compare(importLines[i].pkg, importLines[j].pkg) < 0
|
||||
})
|
||||
}
|
||||
|
||||
sort.Slice(groupNames, func(i, j int) bool {
|
||||
n1 := groupNames[i]
|
||||
n2 := groupNames[j]
|
||||
o1 := importPackageGroupOrders[n1]
|
||||
o2 := importPackageGroupOrders[n2]
|
||||
if o1 != 0 && o2 != 0 {
|
||||
return o1 < o2
|
||||
}
|
||||
if o1 == 0 && o2 == 0 {
|
||||
return strings.Compare(n1, n2) < 0
|
||||
}
|
||||
return o1 != 0
|
||||
})
|
||||
|
||||
formattedBlock := bytes.Buffer{}
|
||||
for _, groupName := range groupNames {
|
||||
hasNormalImports := false
|
||||
hasDummyImports := false
|
||||
// non-dummy import comes first
|
||||
for _, importLine := range importGroups[groupName] {
|
||||
if strings.HasPrefix(importLine.content, "_") {
|
||||
hasDummyImports = true
|
||||
} else {
|
||||
formattedBlock.WriteString("\t" + importLine.content + "\n")
|
||||
hasNormalImports = true
|
||||
}
|
||||
}
|
||||
// dummy (_ "pkg") comes later
|
||||
if hasDummyImports {
|
||||
if hasNormalImports {
|
||||
formattedBlock.WriteString("\n")
|
||||
}
|
||||
for _, importLine := range importGroups[groupName] {
|
||||
if strings.HasPrefix(importLine.content, "_") {
|
||||
formattedBlock.WriteString("\t" + importLine.content + "\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
formattedBlock.WriteString("\n")
|
||||
}
|
||||
formattedBlockBytes := bytes.TrimRight(formattedBlock.Bytes(), "\n")
|
||||
|
||||
var formattedBytes []byte
|
||||
formattedBytes = append(formattedBytes, contentBytes[:p1]...)
|
||||
formattedBytes = append(formattedBytes, formattedBlockBytes...)
|
||||
formattedBytes = append(formattedBytes, contentBytes[p2:]...)
|
||||
return formattedBytes, nil
|
||||
}
|
||||
|
||||
// FormatGoImports format the imports by our rules (see unit tests)
|
||||
func FormatGoImports(file string, doWriteFile bool) error {
|
||||
f, err := os.Open(file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var contentBytes []byte
|
||||
{
|
||||
defer f.Close()
|
||||
contentBytes, err = io.ReadAll(f)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
formattedBytes, err := formatGoImports(contentBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if formattedBytes == nil {
|
||||
return nil
|
||||
}
|
||||
if bytes.Equal(contentBytes, formattedBytes) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if doWriteFile {
|
||||
f, err = os.OpenFile(file, os.O_TRUNC|os.O_WRONLY, 0o644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
_, err = f.Write(formattedBytes)
|
||||
return err
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
124
build/codeformat/formatimports_test.go
Normal file
124
build/codeformat/formatimports_test.go
Normal file
@@ -0,0 +1,124 @@
|
||||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package codeformat
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestFormatImportsSimple(t *testing.T) {
|
||||
formatted, err := formatGoImports([]byte(`
|
||||
package codeformat
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
)
|
||||
`))
|
||||
|
||||
expected := `
|
||||
package codeformat
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
`
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, expected, string(formatted))
|
||||
}
|
||||
|
||||
func TestFormatImportsGroup(t *testing.T) {
|
||||
// gofmt/goimports won't group the packages, for example, they produce such code:
|
||||
// "bytes"
|
||||
// "image"
|
||||
// (a blank line)
|
||||
// "fmt"
|
||||
// "image/color/palette"
|
||||
// our formatter does better, and these packages are grouped into one.
|
||||
|
||||
formatted, err := formatGoImports([]byte(`
|
||||
package test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"image"
|
||||
"image/color"
|
||||
|
||||
_ "image/gif" // for processing gif images
|
||||
_ "image/jpeg" // for processing jpeg images
|
||||
_ "image/png" // for processing png images
|
||||
|
||||
"code.gitea.io/other/package"
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"xorm.io/the/package"
|
||||
|
||||
"github.com/issue9/identicon"
|
||||
"github.com/nfnt/resize"
|
||||
"github.com/oliamb/cutter"
|
||||
)
|
||||
`))
|
||||
|
||||
expected := `
|
||||
package test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"image"
|
||||
"image/color"
|
||||
|
||||
_ "image/gif" // for processing gif images
|
||||
_ "image/jpeg" // for processing jpeg images
|
||||
_ "image/png" // for processing png images
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"code.gitea.io/other/package"
|
||||
"github.com/issue9/identicon"
|
||||
"github.com/nfnt/resize"
|
||||
"github.com/oliamb/cutter"
|
||||
"xorm.io/the/package"
|
||||
)
|
||||
`
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, expected, string(formatted))
|
||||
}
|
||||
|
||||
func TestFormatImportsInvalidComment(t *testing.T) {
|
||||
// why we shouldn't write comments between imports: it breaks the grouping of imports
|
||||
// for example:
|
||||
// "pkg1"
|
||||
// "pkg2"
|
||||
// // a comment
|
||||
// "pkgA"
|
||||
// "pkgB"
|
||||
// the comment splits the packages into two groups, pkg1/2 are sorted separately, pkgA/B are sorted separately
|
||||
// we don't want such code, so the code should be:
|
||||
// "pkg1"
|
||||
// "pkg2"
|
||||
// "pkgA" // a comment
|
||||
// "pkgB"
|
||||
|
||||
_, err := formatGoImports([]byte(`
|
||||
package test
|
||||
|
||||
import (
|
||||
"image/jpeg"
|
||||
// for processing gif images
|
||||
"image/gif"
|
||||
)
|
||||
`))
|
||||
assert.ErrorIs(t, err, errInvalidCommentBetweenImports)
|
||||
}
|
92
build/generate-bindata.go
Normal file
92
build/generate-bindata.go
Normal file
@@ -0,0 +1,92 @@
|
||||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
//go:build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha1"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
|
||||
"github.com/shurcooL/vfsgen"
|
||||
)
|
||||
|
||||
func needsUpdate(dir, filename string) (bool, []byte) {
|
||||
needRegen := false
|
||||
_, err := os.Stat(filename)
|
||||
if err != nil {
|
||||
needRegen = true
|
||||
}
|
||||
|
||||
oldHash, err := os.ReadFile(filename + ".hash")
|
||||
if err != nil {
|
||||
oldHash = []byte{}
|
||||
}
|
||||
|
||||
hasher := sha1.New()
|
||||
|
||||
err = filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
info, err := d.Info()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, _ = hasher.Write([]byte(d.Name()))
|
||||
_, _ = hasher.Write([]byte(info.ModTime().String()))
|
||||
_, _ = hasher.Write([]byte(strconv.FormatInt(info.Size(), 16)))
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return true, oldHash
|
||||
}
|
||||
|
||||
newHash := hasher.Sum([]byte{})
|
||||
|
||||
if bytes.Compare(oldHash, newHash) != 0 {
|
||||
return true, newHash
|
||||
}
|
||||
|
||||
return needRegen, newHash
|
||||
}
|
||||
|
||||
func main() {
|
||||
if len(os.Args) < 4 {
|
||||
log.Fatal("Insufficient number of arguments. Need: directory packageName filename")
|
||||
}
|
||||
|
||||
dir, packageName, filename := os.Args[1], os.Args[2], os.Args[3]
|
||||
var useGlobalModTime bool
|
||||
if len(os.Args) == 5 {
|
||||
useGlobalModTime, _ = strconv.ParseBool(os.Args[4])
|
||||
}
|
||||
|
||||
update, newHash := needsUpdate(dir, filename)
|
||||
|
||||
if !update {
|
||||
fmt.Printf("bindata for %s already up-to-date\n", packageName)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("generating bindata for %s\n", packageName)
|
||||
var fsTemplates http.FileSystem = http.Dir(dir)
|
||||
err := vfsgen.Generate(fsTemplates, vfsgen.Options{
|
||||
PackageName: packageName,
|
||||
BuildTags: "bindata",
|
||||
VariableName: "Assets",
|
||||
Filename: filename,
|
||||
UseGlobalModTime: useGlobalModTime,
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatalf("%v\n", err)
|
||||
}
|
||||
_ = os.WriteFile(filename+".hash", newHash, 0o666)
|
||||
}
|
219
build/generate-emoji.go
Normal file
219
build/generate-emoji.go
Normal file
@@ -0,0 +1,219 @@
|
||||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// Copyright 2015 Kenneth Shaw
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
//go:build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
)
|
||||
|
||||
const (
|
||||
gemojiURL = "https://raw.githubusercontent.com/github/gemoji/master/db/emoji.json"
|
||||
maxUnicodeVersion = 15
|
||||
)
|
||||
|
||||
var flagOut = flag.String("o", "modules/emoji/emoji_data.go", "out")
|
||||
|
||||
// Gemoji is a set of emoji data.
|
||||
type Gemoji []Emoji
|
||||
|
||||
// Emoji represents a single emoji and associated data.
|
||||
type Emoji struct {
|
||||
Emoji string `json:"emoji"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Aliases []string `json:"aliases"`
|
||||
UnicodeVersion string `json:"unicode_version,omitempty"`
|
||||
SkinTones bool `json:"skin_tones,omitempty"`
|
||||
}
|
||||
|
||||
// Don't include some fields in JSON
|
||||
func (e Emoji) MarshalJSON() ([]byte, error) {
|
||||
type emoji Emoji
|
||||
x := emoji(e)
|
||||
x.UnicodeVersion = ""
|
||||
x.Description = ""
|
||||
x.SkinTones = false
|
||||
return json.Marshal(x)
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
|
||||
// generate data
|
||||
buf, err := generate()
|
||||
if err != nil {
|
||||
log.Fatalf("generate err: %v", err)
|
||||
}
|
||||
|
||||
// write
|
||||
err = os.WriteFile(*flagOut, buf, 0o644)
|
||||
if err != nil {
|
||||
log.Fatalf("WriteFile err: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
var replacer = strings.NewReplacer(
|
||||
"main.Gemoji", "Gemoji",
|
||||
"main.Emoji", "\n",
|
||||
"}}", "},\n}",
|
||||
", Description:", ", ",
|
||||
", Aliases:", ", ",
|
||||
", UnicodeVersion:", ", ",
|
||||
", SkinTones:", ", ",
|
||||
)
|
||||
|
||||
var emojiRE = regexp.MustCompile(`\{Emoji:"([^"]*)"`)
|
||||
|
||||
func generate() ([]byte, error) {
|
||||
// load gemoji data
|
||||
res, err := http.Get(gemojiURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
// read all
|
||||
body, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// unmarshal
|
||||
var data Gemoji
|
||||
err = json.Unmarshal(body, &data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
skinTones := make(map[string]string)
|
||||
|
||||
skinTones["\U0001f3fb"] = "Light Skin Tone"
|
||||
skinTones["\U0001f3fc"] = "Medium-Light Skin Tone"
|
||||
skinTones["\U0001f3fd"] = "Medium Skin Tone"
|
||||
skinTones["\U0001f3fe"] = "Medium-Dark Skin Tone"
|
||||
skinTones["\U0001f3ff"] = "Dark Skin Tone"
|
||||
|
||||
var tmp Gemoji
|
||||
|
||||
// filter out emoji that require greater than max unicode version
|
||||
for i := range data {
|
||||
val, _ := strconv.ParseFloat(data[i].UnicodeVersion, 64)
|
||||
if int(val) <= maxUnicodeVersion {
|
||||
tmp = append(tmp, data[i])
|
||||
}
|
||||
}
|
||||
data = tmp
|
||||
|
||||
sort.Slice(data, func(i, j int) bool {
|
||||
return data[i].Aliases[0] < data[j].Aliases[0]
|
||||
})
|
||||
|
||||
aliasMap := make(map[string]int, len(data))
|
||||
|
||||
for i, e := range data {
|
||||
if e.Emoji == "" || len(e.Aliases) == 0 {
|
||||
continue
|
||||
}
|
||||
for _, a := range e.Aliases {
|
||||
if a == "" {
|
||||
continue
|
||||
}
|
||||
aliasMap[a] = i
|
||||
}
|
||||
}
|
||||
|
||||
// gitea customizations
|
||||
i, ok := aliasMap["tada"]
|
||||
if ok {
|
||||
data[i].Aliases = append(data[i].Aliases, "hooray")
|
||||
}
|
||||
i, ok = aliasMap["laughing"]
|
||||
if ok {
|
||||
data[i].Aliases = append(data[i].Aliases, "laugh")
|
||||
}
|
||||
|
||||
// write a JSON file to use with tribute (write before adding skin tones since we can't support them there yet)
|
||||
file, _ := json.Marshal(data)
|
||||
_ = os.WriteFile("assets/emoji.json", file, 0o644)
|
||||
|
||||
// Add skin tones to emoji that support it
|
||||
var (
|
||||
s []string
|
||||
newEmoji string
|
||||
newDescription string
|
||||
newData Emoji
|
||||
)
|
||||
|
||||
for i := range data {
|
||||
if data[i].SkinTones {
|
||||
for k, v := range skinTones {
|
||||
s = strings.Split(data[i].Emoji, "")
|
||||
|
||||
if utf8.RuneCountInString(data[i].Emoji) == 1 {
|
||||
s = append(s, k)
|
||||
} else {
|
||||
// insert into slice after first element because all emoji that support skin tones
|
||||
// have that modifier placed at this spot
|
||||
s = append(s, "")
|
||||
copy(s[2:], s[1:])
|
||||
s[1] = k
|
||||
}
|
||||
|
||||
newEmoji = strings.Join(s, "")
|
||||
newDescription = data[i].Description + ": " + v
|
||||
newAlias := data[i].Aliases[0] + "_" + strings.ReplaceAll(v, " ", "_")
|
||||
|
||||
newData = Emoji{newEmoji, newDescription, []string{newAlias}, "12.0", false}
|
||||
data = append(data, newData)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(data, func(i, j int) bool {
|
||||
return data[i].Aliases[0] < data[j].Aliases[0]
|
||||
})
|
||||
|
||||
// add header
|
||||
str := replacer.Replace(fmt.Sprintf(hdr, gemojiURL, data))
|
||||
|
||||
// change the format of the unicode string
|
||||
str = emojiRE.ReplaceAllStringFunc(str, func(s string) string {
|
||||
var err error
|
||||
s, err = strconv.Unquote(s[len("{Emoji:"):])
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return "{" + strconv.QuoteToASCII(s)
|
||||
})
|
||||
|
||||
// format
|
||||
return format.Source([]byte(str))
|
||||
}
|
||||
|
||||
const hdr = `
|
||||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
package emoji
|
||||
|
||||
// Code generated by build/generate-emoji.go. DO NOT EDIT.
|
||||
// Sourced from %s
|
||||
var GemojiData = %#v
|
||||
`
|
126
build/generate-gitignores.go
Normal file
126
build/generate-gitignores.go
Normal file
@@ -0,0 +1,126 @@
|
||||
//go:build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"compress/gzip"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
func main() {
|
||||
var (
|
||||
prefix = "gitea-gitignore"
|
||||
url = "https://api.github.com/repos/github/gitignore/tarball"
|
||||
githubApiToken = ""
|
||||
githubUsername = ""
|
||||
destination = ""
|
||||
)
|
||||
|
||||
flag.StringVar(&destination, "dest", "options/gitignore/", "destination for the gitignores")
|
||||
flag.StringVar(&githubUsername, "username", "", "github username")
|
||||
flag.StringVar(&githubApiToken, "token", "", "github api token")
|
||||
flag.Parse()
|
||||
|
||||
file, err := os.CreateTemp(os.TempDir(), prefix)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create temp file. %s", err)
|
||||
}
|
||||
|
||||
defer util.Remove(file.Name())
|
||||
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to download archive. %s", err)
|
||||
}
|
||||
|
||||
if len(githubApiToken) > 0 && len(githubUsername) > 0 {
|
||||
req.SetBasicAuth(githubUsername, githubApiToken)
|
||||
}
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to download archive. %s", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if _, err := io.Copy(file, resp.Body); err != nil {
|
||||
log.Fatalf("Failed to copy archive to file. %s", err)
|
||||
}
|
||||
|
||||
if _, err := file.Seek(0, 0); err != nil {
|
||||
log.Fatalf("Failed to reset seek on archive. %s", err)
|
||||
}
|
||||
|
||||
gz, err := gzip.NewReader(file)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to gunzip the archive. %s", err)
|
||||
}
|
||||
|
||||
tr := tar.NewReader(gz)
|
||||
|
||||
filesToCopy := make(map[string]string, 0)
|
||||
|
||||
for {
|
||||
hdr, err := tr.Next()
|
||||
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to iterate archive. %s", err)
|
||||
}
|
||||
|
||||
if filepath.Ext(hdr.Name) != ".gitignore" {
|
||||
continue
|
||||
}
|
||||
|
||||
if hdr.Typeflag == tar.TypeSymlink {
|
||||
fmt.Printf("Found symlink %s -> %s\n", hdr.Name, hdr.Linkname)
|
||||
filesToCopy[strings.TrimSuffix(filepath.Base(hdr.Name), ".gitignore")] = strings.TrimSuffix(filepath.Base(hdr.Linkname), ".gitignore")
|
||||
continue
|
||||
}
|
||||
|
||||
out, err := os.Create(path.Join(destination, strings.TrimSuffix(filepath.Base(hdr.Name), ".gitignore")))
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create new file. %s", err)
|
||||
}
|
||||
|
||||
defer out.Close()
|
||||
|
||||
if _, err := io.Copy(out, tr); err != nil {
|
||||
log.Fatalf("Failed to write new file. %s", err)
|
||||
} else {
|
||||
fmt.Printf("Written %s\n", out.Name())
|
||||
}
|
||||
}
|
||||
|
||||
for dst, src := range filesToCopy {
|
||||
// Read all content of src to data
|
||||
src = path.Join(destination, src)
|
||||
data, err := os.ReadFile(src)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to read src file. %s", err)
|
||||
}
|
||||
// Write data to dst
|
||||
dst = path.Join(destination, dst)
|
||||
err = os.WriteFile(dst, data, 0o644)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to write new file. %s", err)
|
||||
}
|
||||
fmt.Printf("Written (copy of %s) %s\n", src, dst)
|
||||
}
|
||||
|
||||
fmt.Println("Done")
|
||||
}
|
118
build/generate-go-licenses.go
Normal file
118
build/generate-go-licenses.go
Normal file
@@ -0,0 +1,118 @@
|
||||
// Copyright 2022 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
//go:build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/container"
|
||||
)
|
||||
|
||||
// regexp is based on go-license, excluding README and NOTICE
|
||||
// https://github.com/google/go-licenses/blob/master/licenses/find.go
|
||||
var licenseRe = regexp.MustCompile(`^(?i)((UN)?LICEN(S|C)E|COPYING).*$`)
|
||||
|
||||
type LicenseEntry struct {
|
||||
Name string `json:"name"`
|
||||
Path string `json:"path"`
|
||||
LicenseText string `json:"licenseText"`
|
||||
}
|
||||
|
||||
func main() {
|
||||
if len(os.Args) != 3 {
|
||||
fmt.Println("usage: go run generate-go-licenses.go <base-dir> <out-json-file>")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
base, out := os.Args[1], os.Args[2]
|
||||
|
||||
// Add ext for excluded files because license_test.go will be included for some reason.
|
||||
// And there are more files that should be excluded, check with:
|
||||
//
|
||||
// go run github.com/google/go-licenses@v1.6.0 save . --force --save_path=.go-licenses 2>/dev/null
|
||||
// find .go-licenses -type f | while read FILE; do echo "${$(basename $FILE)##*.}"; done | sort -u
|
||||
// AUTHORS
|
||||
// COPYING
|
||||
// LICENSE
|
||||
// Makefile
|
||||
// NOTICE
|
||||
// gitignore
|
||||
// go
|
||||
// md
|
||||
// mod
|
||||
// sum
|
||||
// toml
|
||||
// txt
|
||||
// yml
|
||||
//
|
||||
// It could be removed once we have a better regex.
|
||||
excludedExt := container.SetOf(".gitignore", ".go", ".mod", ".sum", ".toml", ".yml")
|
||||
|
||||
var paths []string
|
||||
err := filepath.WalkDir(base, func(path string, entry fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if entry.IsDir() || !licenseRe.MatchString(entry.Name()) || excludedExt.Contains(filepath.Ext(entry.Name())) {
|
||||
return nil
|
||||
}
|
||||
paths = append(paths, path)
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
sort.Strings(paths)
|
||||
|
||||
var entries []LicenseEntry
|
||||
for _, filePath := range paths {
|
||||
licenseText, err := os.ReadFile(filePath)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
pkgPath := filepath.ToSlash(filePath)
|
||||
pkgPath = strings.TrimPrefix(pkgPath, base+"/")
|
||||
pkgName := path.Dir(pkgPath)
|
||||
|
||||
// There might be a bug somewhere in go-licenses that sometimes interprets the
|
||||
// root package as "." and sometimes as "code.gitea.io/gitea". Workaround by
|
||||
// removing both of them for the sake of stable output.
|
||||
if pkgName == "." || pkgName == "code.gitea.io/gitea" {
|
||||
continue
|
||||
}
|
||||
|
||||
entries = append(entries, LicenseEntry{
|
||||
Name: pkgName,
|
||||
Path: pkgPath,
|
||||
LicenseText: string(licenseText),
|
||||
})
|
||||
}
|
||||
|
||||
jsonBytes, err := json.MarshalIndent(entries, "", " ")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Ensure file has a final newline
|
||||
if jsonBytes[len(jsonBytes)-1] != '\n' {
|
||||
jsonBytes = append(jsonBytes, '\n')
|
||||
}
|
||||
|
||||
err = os.WriteFile(out, jsonBytes, 0o644)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
176
build/generate-licenses.go
Normal file
176
build/generate-licenses.go
Normal file
@@ -0,0 +1,176 @@
|
||||
// Copyright 2017 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
//go:build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"compress/gzip"
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/build/license"
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
func main() {
|
||||
var (
|
||||
prefix = "gitea-licenses"
|
||||
url = "https://api.github.com/repos/spdx/license-list-data/tarball"
|
||||
githubApiToken = ""
|
||||
githubUsername = ""
|
||||
destination = ""
|
||||
)
|
||||
|
||||
flag.StringVar(&destination, "dest", "options/license/", "destination for the licenses")
|
||||
flag.StringVar(&githubUsername, "username", "", "github username")
|
||||
flag.StringVar(&githubApiToken, "token", "", "github api token")
|
||||
flag.Parse()
|
||||
|
||||
file, err := os.CreateTemp(os.TempDir(), prefix)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create temp file. %s", err)
|
||||
}
|
||||
|
||||
defer util.Remove(file.Name())
|
||||
|
||||
if err := os.RemoveAll(destination); err != nil {
|
||||
log.Fatalf("Cannot clean destination folder: %v", err)
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(destination, 0o755); err != nil {
|
||||
log.Fatalf("Cannot create destination: %v", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to download archive. %s", err)
|
||||
}
|
||||
|
||||
if len(githubApiToken) > 0 && len(githubUsername) > 0 {
|
||||
req.SetBasicAuth(githubUsername, githubApiToken)
|
||||
}
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to download archive. %s", err)
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
if _, err := io.Copy(file, resp.Body); err != nil {
|
||||
log.Fatalf("Failed to copy archive to file. %s", err)
|
||||
}
|
||||
|
||||
if _, err := file.Seek(0, 0); err != nil {
|
||||
log.Fatalf("Failed to reset seek on archive. %s", err)
|
||||
}
|
||||
|
||||
gz, err := gzip.NewReader(file)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to gunzip the archive. %s", err)
|
||||
}
|
||||
|
||||
tr := tar.NewReader(gz)
|
||||
aliasesFiles := make(map[string][]string)
|
||||
for {
|
||||
hdr, err := tr.Next()
|
||||
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to iterate archive. %s", err)
|
||||
}
|
||||
|
||||
if !strings.Contains(hdr.Name, "/text/") {
|
||||
continue
|
||||
}
|
||||
|
||||
if filepath.Ext(hdr.Name) != ".txt" {
|
||||
continue
|
||||
}
|
||||
|
||||
fileBaseName := filepath.Base(hdr.Name)
|
||||
licenseName := strings.TrimSuffix(fileBaseName, ".txt")
|
||||
|
||||
if strings.HasPrefix(fileBaseName, "README") {
|
||||
continue
|
||||
}
|
||||
|
||||
if strings.HasPrefix(fileBaseName, "deprecated_") {
|
||||
continue
|
||||
}
|
||||
out, err := os.Create(path.Join(destination, licenseName))
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create new file. %s", err)
|
||||
}
|
||||
|
||||
defer out.Close()
|
||||
|
||||
// some license files have same content, so we need to detect these files and create a convert map into a json file
|
||||
// Later we use this convert map to avoid adding same license content with different license name
|
||||
h := md5.New()
|
||||
// calculate md5 and write file in the same time
|
||||
r := io.TeeReader(tr, h)
|
||||
if _, err := io.Copy(out, r); err != nil {
|
||||
log.Fatalf("Failed to write new file. %s", err)
|
||||
} else {
|
||||
fmt.Printf("Written %s\n", out.Name())
|
||||
|
||||
md5 := hex.EncodeToString(h.Sum(nil))
|
||||
aliasesFiles[md5] = append(aliasesFiles[md5], licenseName)
|
||||
}
|
||||
}
|
||||
|
||||
// generate convert license name map
|
||||
licenseAliases := make(map[string]string)
|
||||
for _, fileNames := range aliasesFiles {
|
||||
if len(fileNames) > 1 {
|
||||
licenseName := license.GetLicenseNameFromAliases(fileNames)
|
||||
if licenseName == "" {
|
||||
// license name should not be empty as expected
|
||||
// if it is empty, we need to rewrite the logic of GetLicenseNameFromAliases
|
||||
log.Fatalf("GetLicenseNameFromAliases: license name is empty")
|
||||
}
|
||||
for _, fileName := range fileNames {
|
||||
licenseAliases[fileName] = licenseName
|
||||
}
|
||||
}
|
||||
}
|
||||
// save convert license name map to file
|
||||
b, err := json.Marshal(licenseAliases)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create json bytes. %s", err)
|
||||
}
|
||||
|
||||
licenseAliasesDestination := filepath.Join(destination, "etc", "license-aliases.json")
|
||||
if err := os.MkdirAll(filepath.Dir(licenseAliasesDestination), 0o755); err != nil {
|
||||
log.Fatalf("Failed to create directory for license aliases json file. %s", err)
|
||||
}
|
||||
|
||||
f, err := os.Create(licenseAliasesDestination)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create license aliases json file. %s", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
if _, err = f.Write(b); err != nil {
|
||||
log.Fatalf("Failed to write license aliases json file. %s", err)
|
||||
}
|
||||
|
||||
fmt.Println("Done")
|
||||
}
|
118
build/gocovmerge.go
Normal file
118
build/gocovmerge.go
Normal file
@@ -0,0 +1,118 @@
|
||||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// Copyright (c) 2015, Wade Simmons
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
// gocovmerge takes the results from multiple `go test -coverprofile` runs and
|
||||
// merges them into one profile
|
||||
|
||||
//go:build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"sort"
|
||||
|
||||
"golang.org/x/tools/cover"
|
||||
)
|
||||
|
||||
func mergeProfiles(p, merge *cover.Profile) {
|
||||
if p.Mode != merge.Mode {
|
||||
log.Fatalf("cannot merge profiles with different modes")
|
||||
}
|
||||
// Since the blocks are sorted, we can keep track of where the last block
|
||||
// was inserted and only look at the blocks after that as targets for merge
|
||||
startIndex := 0
|
||||
for _, b := range merge.Blocks {
|
||||
startIndex = mergeProfileBlock(p, b, startIndex)
|
||||
}
|
||||
}
|
||||
|
||||
func mergeProfileBlock(p *cover.Profile, pb cover.ProfileBlock, startIndex int) int {
|
||||
sortFunc := func(i int) bool {
|
||||
pi := p.Blocks[i+startIndex]
|
||||
return pi.StartLine >= pb.StartLine && (pi.StartLine != pb.StartLine || pi.StartCol >= pb.StartCol)
|
||||
}
|
||||
|
||||
i := 0
|
||||
if sortFunc(i) != true {
|
||||
i = sort.Search(len(p.Blocks)-startIndex, sortFunc)
|
||||
}
|
||||
i += startIndex
|
||||
if i < len(p.Blocks) && p.Blocks[i].StartLine == pb.StartLine && p.Blocks[i].StartCol == pb.StartCol {
|
||||
if p.Blocks[i].EndLine != pb.EndLine || p.Blocks[i].EndCol != pb.EndCol {
|
||||
log.Fatalf("OVERLAP MERGE: %v %v %v", p.FileName, p.Blocks[i], pb)
|
||||
}
|
||||
switch p.Mode {
|
||||
case "set":
|
||||
p.Blocks[i].Count |= pb.Count
|
||||
case "count", "atomic":
|
||||
p.Blocks[i].Count += pb.Count
|
||||
default:
|
||||
log.Fatalf("unsupported covermode: '%s'", p.Mode)
|
||||
}
|
||||
} else {
|
||||
if i > 0 {
|
||||
pa := p.Blocks[i-1]
|
||||
if pa.EndLine >= pb.EndLine && (pa.EndLine != pb.EndLine || pa.EndCol > pb.EndCol) {
|
||||
log.Fatalf("OVERLAP BEFORE: %v %v %v", p.FileName, pa, pb)
|
||||
}
|
||||
}
|
||||
if i < len(p.Blocks)-1 {
|
||||
pa := p.Blocks[i+1]
|
||||
if pa.StartLine <= pb.StartLine && (pa.StartLine != pb.StartLine || pa.StartCol < pb.StartCol) {
|
||||
log.Fatalf("OVERLAP AFTER: %v %v %v", p.FileName, pa, pb)
|
||||
}
|
||||
}
|
||||
p.Blocks = append(p.Blocks, cover.ProfileBlock{})
|
||||
copy(p.Blocks[i+1:], p.Blocks[i:])
|
||||
p.Blocks[i] = pb
|
||||
}
|
||||
return i + 1
|
||||
}
|
||||
|
||||
func addProfile(profiles []*cover.Profile, p *cover.Profile) []*cover.Profile {
|
||||
i := sort.Search(len(profiles), func(i int) bool { return profiles[i].FileName >= p.FileName })
|
||||
if i < len(profiles) && profiles[i].FileName == p.FileName {
|
||||
mergeProfiles(profiles[i], p)
|
||||
} else {
|
||||
profiles = append(profiles, nil)
|
||||
copy(profiles[i+1:], profiles[i:])
|
||||
profiles[i] = p
|
||||
}
|
||||
return profiles
|
||||
}
|
||||
|
||||
func dumpProfiles(profiles []*cover.Profile, out io.Writer) {
|
||||
if len(profiles) == 0 {
|
||||
return
|
||||
}
|
||||
fmt.Fprintf(out, "mode: %s\n", profiles[0].Mode)
|
||||
for _, p := range profiles {
|
||||
for _, b := range p.Blocks {
|
||||
fmt.Fprintf(out, "%s:%d.%d,%d.%d %d %d\n", p.FileName, b.StartLine, b.StartCol, b.EndLine, b.EndCol, b.NumStmt, b.Count)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
|
||||
var merged []*cover.Profile
|
||||
|
||||
for _, file := range flag.Args() {
|
||||
profiles, err := cover.ParseProfiles(file)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to parse profile '%s': %v", file, err)
|
||||
}
|
||||
for _, p := range profiles {
|
||||
merged = addProfile(merged, p)
|
||||
}
|
||||
}
|
||||
|
||||
dumpProfiles(merged, os.Stdout)
|
||||
}
|
41
build/license/aliasgenerator.go
Normal file
41
build/license/aliasgenerator.go
Normal file
@@ -0,0 +1,41 @@
|
||||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package license
|
||||
|
||||
import "strings"
|
||||
|
||||
func GetLicenseNameFromAliases(fnl []string) string {
|
||||
if len(fnl) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
shortestItem := func(list []string) string {
|
||||
s := list[0]
|
||||
for _, l := range list[1:] {
|
||||
if len(l) < len(s) {
|
||||
s = l
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
allHasPrefix := func(list []string, s string) bool {
|
||||
for _, l := range list {
|
||||
if !strings.HasPrefix(l, s) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
sl := shortestItem(fnl)
|
||||
slv := strings.Split(sl, "-")
|
||||
var result string
|
||||
for i := len(slv); i >= 0; i-- {
|
||||
result = strings.Join(slv[:i], "-")
|
||||
if allHasPrefix(fnl, result) {
|
||||
return result
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
39
build/license/aliasgenerator_test.go
Normal file
39
build/license/aliasgenerator_test.go
Normal file
@@ -0,0 +1,39 @@
|
||||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package license
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestGetLicenseNameFromAliases(t *testing.T) {
|
||||
tests := []struct {
|
||||
target string
|
||||
inputs []string
|
||||
}{
|
||||
{
|
||||
// real case which you can find in license-aliases.json
|
||||
target: "AGPL-1.0",
|
||||
inputs: []string{
|
||||
"AGPL-1.0-only",
|
||||
"AGPL-1.0-or-late",
|
||||
},
|
||||
},
|
||||
{
|
||||
target: "",
|
||||
inputs: []string{
|
||||
"APSL-1.0",
|
||||
"AGPL-1.0-only",
|
||||
"AGPL-1.0-or-late",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := GetLicenseNameFromAliases(tt.inputs)
|
||||
assert.Equal(t, result, tt.target)
|
||||
}
|
||||
}
|
20
build/test-echo.go
Normal file
20
build/test-echo.go
Normal file
@@ -0,0 +1,20 @@
|
||||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
//go:build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
func main() {
|
||||
_, err := io.Copy(os.Stdout, os.Stdin)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
24
build/test-env-check.sh
Normal file
24
build/test-env-check.sh
Normal file
@@ -0,0 +1,24 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
if [ ! -f ./build/test-env-check.sh ]; then
|
||||
echo "${0} can only be executed in gitea source root directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
echo "check uid ..."
|
||||
|
||||
# the uid of gitea defined in "https://gitea.com/gitea/test-env" is 1000
|
||||
gitea_uid=$(id -u gitea)
|
||||
if [ "$gitea_uid" != "1000" ]; then
|
||||
echo "The uid of linux user 'gitea' is expected to be 1000, but it is $gitea_uid"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cur_uid=$(id -u)
|
||||
if [ "$cur_uid" != "0" -a "$cur_uid" != "$gitea_uid" ]; then
|
||||
echo "The uid of current linux user is expected to be 0 or $gitea_uid, but it is $cur_uid"
|
||||
exit 1
|
||||
fi
|
11
build/test-env-prepare.sh
Normal file
11
build/test-env-prepare.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
if [ ! -f ./build/test-env-prepare.sh ]; then
|
||||
echo "${0} can only be executed in gitea source root directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "change the owner of files to gitea ..."
|
||||
chown -R gitea:gitea .
|
52
build/update-locales.sh
Normal file
52
build/update-locales.sh
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/bin/sh
|
||||
|
||||
# this script runs in alpine image which only has `sh` shell
|
||||
|
||||
set +e
|
||||
if sed --version 2>/dev/null | grep -q GNU; then
|
||||
SED_INPLACE="sed -i"
|
||||
else
|
||||
SED_INPLACE="sed -i ''"
|
||||
fi
|
||||
set -e
|
||||
|
||||
if [ ! -f ./options/locale/locale_en-US.ini ]; then
|
||||
echo "please run this script in the root directory of the project"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mv ./options/locale/locale_en-US.ini ./options/
|
||||
|
||||
# the "ini" library for locale has many quirks, its behavior is different from Crowdin.
|
||||
# see i18n_test.go for more details
|
||||
|
||||
# this script helps to unquote the Crowdin outputs for the quirky ini library
|
||||
# * find all `key="...\"..."` lines
|
||||
# * remove the leading quote
|
||||
# * remove the trailing quote
|
||||
# * unescape the quotes
|
||||
# * eg: key="...\"..." => key=..."...
|
||||
$SED_INPLACE -r -e '/^[-.A-Za-z0-9_]+[ ]*=[ ]*".*"$/ {
|
||||
s/^([-.A-Za-z0-9_]+)[ ]*=[ ]*"/\1=/
|
||||
s/"$//
|
||||
s/\\"/"/g
|
||||
}' ./options/locale/*.ini
|
||||
|
||||
# * if the escaped line is incomplete like `key="...` or `key=..."`, quote it with backticks
|
||||
# * eg: key="... => key=`"...`
|
||||
# * eg: key=..." => key=`..."`
|
||||
$SED_INPLACE -r -e 's/^([-.A-Za-z0-9_]+)[ ]*=[ ]*(".*[^"])$/\1=`\2`/' ./options/locale/*.ini
|
||||
$SED_INPLACE -r -e 's/^([-.A-Za-z0-9_]+)[ ]*=[ ]*([^"].*")$/\1=`\2`/' ./options/locale/*.ini
|
||||
|
||||
# Remove translation under 25% of en_us
|
||||
baselines=$(wc -l "./options/locale_en-US.ini" | cut -d" " -f1)
|
||||
baselines=$((baselines / 4))
|
||||
for filename in ./options/locale/*.ini; do
|
||||
lines=$(wc -l "$filename" | cut -d" " -f1)
|
||||
if [ $lines -lt $baselines ]; then
|
||||
echo "Removing $filename: $lines/$baselines"
|
||||
rm "$filename"
|
||||
fi
|
||||
done
|
||||
|
||||
mv ./options/locale_en-US.ini ./options/locale/
|
Reference in New Issue
Block a user