Merge pull request '20230218版本' (#6) from develop into master
This commit is contained in:
commit
f608d1e5fc
|
@ -10,31 +10,68 @@ trigger:
|
|||
ruleset:
|
||||
- param-ref: branch
|
||||
operator: EQ
|
||||
value: '"develop"'
|
||||
value: '"feature"'
|
||||
ruleset-operator: AND
|
||||
workflow:
|
||||
- ref: start
|
||||
name: 开始
|
||||
task: start
|
||||
- ref: ssh_cmd_0
|
||||
name: ssh执行命令
|
||||
task: ssh_cmd@1.1.0
|
||||
- ref: git_clone_0
|
||||
name: git clone
|
||||
on-failure: ignore
|
||||
task: git_clone@1.2.6
|
||||
input:
|
||||
ssh_private_key: ((ssh.siyao))
|
||||
ssh_ip: '"123.59.135.93"'
|
||||
ssh_port: '"51123"'
|
||||
ssh_user: '"pdl"'
|
||||
ssh_cmd: '"ssh root@10.9.69.134 -p30122 \"
|
||||
cd /root
|
||||
&&
|
||||
sh ./update.sh
|
||||
&&
|
||||
echo done \""'
|
||||
remote_url: '"https://gitlink.org.cn/Gitlink/gitea_hat.git"'
|
||||
ref: '"refs/heads/feature"'
|
||||
commit_id: '""'
|
||||
depth: 1
|
||||
needs:
|
||||
- start
|
||||
- ref: golang_build_node_0
|
||||
name: golang_build_node
|
||||
on-failure: ignore
|
||||
task: yystopf/golang_build_node@0.0.2
|
||||
input:
|
||||
workspace: git_clone_0.git_path
|
||||
out_bin_name: '"gitea"'
|
||||
goos: '"linux"'
|
||||
goarch: '"amd64"'
|
||||
needs:
|
||||
- git_clone_0
|
||||
- ref: gitlink_scp_resource_0
|
||||
name: scp复制文件(支持跳板机)
|
||||
on-failure: ignore
|
||||
task: yystopf/gitlink_scp_resource@0.0.7
|
||||
input:
|
||||
ssh_private_key: ((ssh.siyao))
|
||||
remote_host: '"10.9.69.134"'
|
||||
remote_port: '"30122"'
|
||||
remote_user: '"root"'
|
||||
remote_file: '"/root/gitea"'
|
||||
local_file: golang_build_node_0.bin_dir
|
||||
gateway_host: '"123.59.135.93"'
|
||||
gateway_port: '"51123"'
|
||||
gateway_user: '"pdl"'
|
||||
temp_file: '"/home/pdl/gitea"'
|
||||
needs:
|
||||
- golang_build_node_0
|
||||
- ref: gitlink_ssh_cmd_0
|
||||
name: ssh执行命令(支持跳板机)
|
||||
task: yystopf/gitlink_ssh_cmd@0.0.6
|
||||
input:
|
||||
ssh_private_key: ((ssh.siyao))
|
||||
remote_host: '"10.9.69.134"'
|
||||
remote_port: '"30122"'
|
||||
remote_user: '"root"'
|
||||
gateway_host: '"123.59.135.93"'
|
||||
gateway_port: '"51123"'
|
||||
gateway_user: '"pdl"'
|
||||
ssh_cmd: '"sh update.sh"'
|
||||
needs:
|
||||
- gitlink_scp_resource_0
|
||||
- ref: end
|
||||
name: 结束
|
||||
task: end
|
||||
needs:
|
||||
- ssh_cmd_0
|
||||
- gitlink_ssh_cmd_0
|
||||
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
FROM bitnami/git:latest
|
||||
RUN sed -i 's/deb.debian.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apt/sources.list
|
||||
RUN apt update
|
||||
RUN apt install -y vim nano curl wget
|
||||
RUN wget --no-check-certificate https://go.dev/dl/go1.20.4.linux-amd64.tar.gz
|
||||
RUN tar -C /usr/local -xzf go1.20.4.linux-amd64.tar.gz
|
||||
ENV PATH=$PATH:/usr/local/go/bin
|
||||
RUN go version
|
||||
RUN go env -w GOPROXY=https://goproxy.cn
|
||||
ADD ./ /gitea_hat/
|
||||
RUN cd /gitea_hat/ && ls -lh && sh build.sh
|
||||
|
||||
#/gitea_hat/gitea_hat admin user create --username root --password 123456 --email root@forge.com --admin
|
|
@ -0,0 +1,14 @@
|
|||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
//go:build vendor
|
||||
|
||||
package main
|
||||
|
||||
// Libraries that are included to vendor utilities used during build.
|
||||
// These libraries will not be included in a normal compilation.
|
||||
|
||||
import (
|
||||
// for embed
|
||||
_ "github.com/shurcooL/vfsgen"
|
||||
)
|
|
@ -0,0 +1,15 @@
|
|||
#!/bin/bash
|
||||
echo "go mod tidy..."
|
||||
go mod tidy
|
||||
|
||||
echo "go mod vendor..."
|
||||
go mod vendor
|
||||
|
||||
echo "go generate bindata.go for migration,options,public,templates..."
|
||||
go run build/generate-bindata.go modules/schemas migration vendor/code.gitea.io/gitea/modules/migration/bindata.go
|
||||
go run build/generate-bindata.go options options vendor/code.gitea.io/gitea/modules/options/bindata.go
|
||||
go run build/generate-bindata.go public public vendor/code.gitea.io/gitea/modules/public/bindata.go
|
||||
go run build/generate-bindata.go templates templates vendor/code.gitea.io/gitea/modules/templates/bindata.go
|
||||
|
||||
echo "go build -tags 'bindata' -o gitea_hat main.go..."
|
||||
go build -tags 'bindata' -o gitea_hat main.go
|
|
@ -0,0 +1,87 @@
|
|||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build ignore
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha1"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
|
||||
"github.com/shurcooL/vfsgen"
|
||||
)
|
||||
|
||||
func needsUpdate(dir string, filename string) (bool, []byte) {
|
||||
needRegen := false
|
||||
_, err := os.Stat(filename)
|
||||
if err != nil {
|
||||
needRegen = true
|
||||
}
|
||||
|
||||
oldHash, err := ioutil.ReadFile(filename + ".hash")
|
||||
if err != nil {
|
||||
oldHash = []byte{}
|
||||
}
|
||||
|
||||
hasher := sha1.New()
|
||||
|
||||
err = filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, _ = hasher.Write([]byte(info.Name()))
|
||||
_, _ = hasher.Write([]byte(info.ModTime().String()))
|
||||
_, _ = hasher.Write([]byte(strconv.FormatInt(info.Size(), 16)))
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return true, oldHash
|
||||
}
|
||||
|
||||
newHash := hasher.Sum([]byte{})
|
||||
|
||||
if bytes.Compare(oldHash, newHash) != 0 {
|
||||
|
||||
return true, newHash
|
||||
}
|
||||
|
||||
return needRegen, newHash
|
||||
}
|
||||
|
||||
func main() {
|
||||
if len(os.Args) != 4 {
|
||||
log.Fatal("Insufficient number of arguments. Need: directory packageName filename")
|
||||
}
|
||||
|
||||
dir, packageName, filename := os.Args[1], os.Args[2], os.Args[3]
|
||||
|
||||
update, newHash := needsUpdate(dir, filename)
|
||||
|
||||
if !update {
|
||||
fmt.Printf("bindata for %s already up-to-date\n", packageName)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("generating bindata for %s\n", packageName)
|
||||
var fsTemplates http.FileSystem = http.Dir(dir)
|
||||
err := vfsgen.Generate(fsTemplates, vfsgen.Options{
|
||||
PackageName: packageName,
|
||||
BuildTags: "bindata",
|
||||
VariableName: "Assets",
|
||||
Filename: filename,
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatalf("%v\n", err)
|
||||
}
|
||||
_ = ioutil.WriteFile(filename+".hash", newHash, 0666)
|
||||
}
|
|
@ -0,0 +1,225 @@
|
|||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// Copyright 2015 Kenneth Shaw
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build ignore
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
const (
|
||||
gemojiURL = "https://raw.githubusercontent.com/github/gemoji/master/db/emoji.json"
|
||||
maxUnicodeVersion = 12
|
||||
)
|
||||
|
||||
var (
|
||||
flagOut = flag.String("o", "modules/emoji/emoji_data.go", "out")
|
||||
)
|
||||
|
||||
// Gemoji is a set of emoji data.
|
||||
type Gemoji []Emoji
|
||||
|
||||
// Emoji represents a single emoji and associated data.
|
||||
type Emoji struct {
|
||||
Emoji string `json:"emoji"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Aliases []string `json:"aliases"`
|
||||
UnicodeVersion string `json:"unicode_version,omitempty"`
|
||||
SkinTones bool `json:"skin_tones,omitempty"`
|
||||
}
|
||||
|
||||
// Don't include some fields in JSON
|
||||
func (e Emoji) MarshalJSON() ([]byte, error) {
|
||||
type emoji Emoji
|
||||
x := emoji(e)
|
||||
x.UnicodeVersion = ""
|
||||
x.Description = ""
|
||||
x.SkinTones = false
|
||||
json := jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
return json.Marshal(x)
|
||||
}
|
||||
|
||||
func main() {
|
||||
var err error
|
||||
|
||||
flag.Parse()
|
||||
|
||||
// generate data
|
||||
buf, err := generate()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// write
|
||||
err = ioutil.WriteFile(*flagOut, buf, 0644)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
var replacer = strings.NewReplacer(
|
||||
"main.Gemoji", "Gemoji",
|
||||
"main.Emoji", "\n",
|
||||
"}}", "},\n}",
|
||||
", Description:", ", ",
|
||||
", Aliases:", ", ",
|
||||
", UnicodeVersion:", ", ",
|
||||
", SkinTones:", ", ",
|
||||
)
|
||||
|
||||
var emojiRE = regexp.MustCompile(`\{Emoji:"([^"]*)"`)
|
||||
|
||||
func generate() ([]byte, error) {
|
||||
var err error
|
||||
|
||||
// load gemoji data
|
||||
res, err := http.Get(gemojiURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
// read all
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// unmarshal
|
||||
var data Gemoji
|
||||
json := jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
err = json.Unmarshal(body, &data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var skinTones = make(map[string]string)
|
||||
|
||||
skinTones["\U0001f3fb"] = "Light Skin Tone"
|
||||
skinTones["\U0001f3fc"] = "Medium-Light Skin Tone"
|
||||
skinTones["\U0001f3fd"] = "Medium Skin Tone"
|
||||
skinTones["\U0001f3fe"] = "Medium-Dark Skin Tone"
|
||||
skinTones["\U0001f3ff"] = "Dark Skin Tone"
|
||||
|
||||
var tmp Gemoji
|
||||
|
||||
//filter out emoji that require greater than max unicode version
|
||||
for i := range data {
|
||||
val, _ := strconv.ParseFloat(data[i].UnicodeVersion, 64)
|
||||
if int(val) <= maxUnicodeVersion {
|
||||
tmp = append(tmp, data[i])
|
||||
}
|
||||
}
|
||||
data = tmp
|
||||
|
||||
sort.Slice(data, func(i, j int) bool {
|
||||
return data[i].Aliases[0] < data[j].Aliases[0]
|
||||
})
|
||||
|
||||
aliasMap := make(map[string]int, len(data))
|
||||
|
||||
for i, e := range data {
|
||||
if e.Emoji == "" || len(e.Aliases) == 0 {
|
||||
continue
|
||||
}
|
||||
for _, a := range e.Aliases {
|
||||
if a == "" {
|
||||
continue
|
||||
}
|
||||
aliasMap[a] = i
|
||||
}
|
||||
}
|
||||
|
||||
// gitea customizations
|
||||
i, ok := aliasMap["tada"]
|
||||
if ok {
|
||||
data[i].Aliases = append(data[i].Aliases, "hooray")
|
||||
}
|
||||
i, ok = aliasMap["laughing"]
|
||||
if ok {
|
||||
data[i].Aliases = append(data[i].Aliases, "laugh")
|
||||
}
|
||||
|
||||
// write a JSON file to use with tribute (write before adding skin tones since we can't support them there yet)
|
||||
file, _ := json.Marshal(data)
|
||||
_ = ioutil.WriteFile("assets/emoji.json", file, 0644)
|
||||
|
||||
// Add skin tones to emoji that support it
|
||||
var (
|
||||
s []string
|
||||
newEmoji string
|
||||
newDescription string
|
||||
newData Emoji
|
||||
)
|
||||
|
||||
for i := range data {
|
||||
if data[i].SkinTones {
|
||||
for k, v := range skinTones {
|
||||
s = strings.Split(data[i].Emoji, "")
|
||||
|
||||
if utf8.RuneCountInString(data[i].Emoji) == 1 {
|
||||
s = append(s, k)
|
||||
} else {
|
||||
// insert into slice after first element because all emoji that support skin tones
|
||||
// have that modifier placed at this spot
|
||||
s = append(s, "")
|
||||
copy(s[2:], s[1:])
|
||||
s[1] = k
|
||||
}
|
||||
|
||||
newEmoji = strings.Join(s, "")
|
||||
newDescription = data[i].Description + ": " + v
|
||||
newAlias := data[i].Aliases[0] + "_" + strings.ReplaceAll(v, " ", "_")
|
||||
|
||||
newData = Emoji{newEmoji, newDescription, []string{newAlias}, "12.0", false}
|
||||
data = append(data, newData)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// add header
|
||||
str := replacer.Replace(fmt.Sprintf(hdr, gemojiURL, data))
|
||||
|
||||
// change the format of the unicode string
|
||||
str = emojiRE.ReplaceAllStringFunc(str, func(s string) string {
|
||||
var err error
|
||||
s, err = strconv.Unquote(s[len("{Emoji:"):])
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return "{" + strconv.QuoteToASCII(s)
|
||||
})
|
||||
|
||||
// format
|
||||
return format.Source([]byte(str))
|
||||
}
|
||||
|
||||
const hdr = `
|
||||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package emoji
|
||||
|
||||
// Code generated by gen.go. DO NOT EDIT.
|
||||
// Sourced from %s
|
||||
//
|
||||
var GemojiData = %#v
|
||||
`
|
|
@ -0,0 +1,131 @@
|
|||
//go:build ignore
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"compress/gzip"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
func main() {
|
||||
var (
|
||||
prefix = "gitea-gitignore"
|
||||
url = "https://api.github.com/repos/github/gitignore/tarball"
|
||||
githubApiToken = ""
|
||||
githubUsername = ""
|
||||
destination = ""
|
||||
)
|
||||
|
||||
flag.StringVar(&destination, "dest", "options/gitignore/", "destination for the gitignores")
|
||||
flag.StringVar(&githubUsername, "username", "", "github username")
|
||||
flag.StringVar(&githubApiToken, "token", "", "github api token")
|
||||
flag.Parse()
|
||||
|
||||
file, err := ioutil.TempFile(os.TempDir(), prefix)
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create temp file. %s", err)
|
||||
}
|
||||
|
||||
defer util.Remove(file.Name())
|
||||
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to download archive. %s", err)
|
||||
}
|
||||
|
||||
if len(githubApiToken) > 0 && len(githubUsername) > 0 {
|
||||
req.SetBasicAuth(githubUsername, githubApiToken)
|
||||
}
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to download archive. %s", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if _, err := io.Copy(file, resp.Body); err != nil {
|
||||
log.Fatalf("Failed to copy archive to file. %s", err)
|
||||
}
|
||||
|
||||
if _, err := file.Seek(0, 0); err != nil {
|
||||
log.Fatalf("Failed to reset seek on archive. %s", err)
|
||||
}
|
||||
|
||||
gz, err := gzip.NewReader(file)
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to gunzip the archive. %s", err)
|
||||
}
|
||||
|
||||
tr := tar.NewReader(gz)
|
||||
|
||||
filesToCopy := make(map[string]string, 0)
|
||||
|
||||
for {
|
||||
hdr, err := tr.Next()
|
||||
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to iterate archive. %s", err)
|
||||
}
|
||||
|
||||
if filepath.Ext(hdr.Name) != ".gitignore" {
|
||||
continue
|
||||
}
|
||||
|
||||
if hdr.Typeflag == tar.TypeSymlink {
|
||||
fmt.Printf("Found symlink %s -> %s\n", hdr.Name, hdr.Linkname)
|
||||
filesToCopy[strings.TrimSuffix(filepath.Base(hdr.Name), ".gitignore")] = strings.TrimSuffix(filepath.Base(hdr.Linkname), ".gitignore")
|
||||
continue
|
||||
}
|
||||
|
||||
out, err := os.Create(path.Join(destination, strings.TrimSuffix(filepath.Base(hdr.Name), ".gitignore")))
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create new file. %s", err)
|
||||
}
|
||||
|
||||
defer out.Close()
|
||||
|
||||
if _, err := io.Copy(out, tr); err != nil {
|
||||
log.Fatalf("Failed to write new file. %s", err)
|
||||
} else {
|
||||
fmt.Printf("Written %s\n", out.Name())
|
||||
}
|
||||
}
|
||||
|
||||
for dst, src := range filesToCopy {
|
||||
// Read all content of src to data
|
||||
src = path.Join(destination, src)
|
||||
data, err := ioutil.ReadFile(src)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to read src file. %s", err)
|
||||
}
|
||||
// Write data to dst
|
||||
dst = path.Join(destination, dst)
|
||||
err = ioutil.WriteFile(dst, data, 0644)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to write new file. %s", err)
|
||||
}
|
||||
fmt.Printf("Written (copy of %s) %s\n", src, dst)
|
||||
}
|
||||
|
||||
fmt.Println("Done")
|
||||
}
|
|
@ -0,0 +1,83 @@
|
|||
import imageminZopfli from 'imagemin-zopfli';
|
||||
import {optimize, extendDefaultPlugins} from 'svgo';
|
||||
import {fabric} from 'fabric';
|
||||
import fs from 'fs';
|
||||
import {resolve, dirname} from 'path';
|
||||
import {fileURLToPath} from 'url';
|
||||
|
||||
const {readFile, writeFile} = fs.promises;
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const logoFile = resolve(__dirname, '../assets/logo.svg');
|
||||
|
||||
function exit(err) {
|
||||
if (err) console.error(err);
|
||||
process.exit(err ? 1 : 0);
|
||||
}
|
||||
|
||||
function loadSvg(svg) {
|
||||
return new Promise((resolve) => {
|
||||
fabric.loadSVGFromString(svg, (objects, options) => {
|
||||
resolve({objects, options});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function generate(svg, outputFile, {size, bg}) {
|
||||
if (outputFile.endsWith('.svg')) {
|
||||
const {data} = optimize(svg, {
|
||||
plugins: extendDefaultPlugins([
|
||||
'removeDimensions',
|
||||
{
|
||||
name: 'addAttributesToSVGElement',
|
||||
params: {attributes: [{width: size}, {height: size}]}
|
||||
},
|
||||
]),
|
||||
});
|
||||
await writeFile(outputFile, data);
|
||||
return;
|
||||
}
|
||||
|
||||
const {objects, options} = await loadSvg(svg);
|
||||
const canvas = new fabric.Canvas();
|
||||
canvas.setDimensions({width: size, height: size});
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.scale(options.width ? (size / options.width) : 1, options.height ? (size / options.height) : 1);
|
||||
|
||||
if (bg) {
|
||||
canvas.add(new fabric.Rect({
|
||||
left: 0,
|
||||
top: 0,
|
||||
height: size * (1 / (size / options.height)),
|
||||
width: size * (1 / (size / options.width)),
|
||||
fill: 'white',
|
||||
}));
|
||||
}
|
||||
|
||||
canvas.add(fabric.util.groupSVGElements(objects, options));
|
||||
canvas.renderAll();
|
||||
|
||||
let png = Buffer.from([]);
|
||||
for await (const chunk of canvas.createPNGStream()) {
|
||||
png = Buffer.concat([png, chunk]);
|
||||
}
|
||||
|
||||
png = await imageminZopfli({more: true})(png);
|
||||
await writeFile(outputFile, png);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const gitea = process.argv.slice(2).includes('gitea');
|
||||
const svg = await readFile(logoFile, 'utf8');
|
||||
|
||||
await Promise.all([
|
||||
generate(svg, resolve(__dirname, '../public/img/logo.svg'), {size: 32}),
|
||||
generate(svg, resolve(__dirname, '../public/img/logo.png'), {size: 512}),
|
||||
generate(svg, resolve(__dirname, '../public/img/favicon.png'), {size: 180}),
|
||||
generate(svg, resolve(__dirname, '../public/img/avatar_default.png'), {size: 200}),
|
||||
generate(svg, resolve(__dirname, '../public/img/apple-touch-icon.png'), {size: 180, bg: true}),
|
||||
gitea && generate(svg, resolve(__dirname, '../public/img/gitea.svg'), {size: 32}),
|
||||
]);
|
||||
}
|
||||
|
||||
main().then(exit).catch(exit);
|
||||
|
|
@ -0,0 +1,119 @@
|
|||
//go:build ignore
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"compress/gzip"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
func main() {
|
||||
var (
|
||||
prefix = "gitea-licenses"
|
||||
url = "https://api.github.com/repos/spdx/license-list-data/tarball"
|
||||
githubApiToken = ""
|
||||
githubUsername = ""
|
||||
destination = ""
|
||||
)
|
||||
|
||||
flag.StringVar(&destination, "dest", "options/license/", "destination for the licenses")
|
||||
flag.StringVar(&githubUsername, "username", "", "github username")
|
||||
flag.StringVar(&githubApiToken, "token", "", "github api token")
|
||||
flag.Parse()
|
||||
|
||||
file, err := ioutil.TempFile(os.TempDir(), prefix)
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create temp file. %s", err)
|
||||
}
|
||||
|
||||
defer util.Remove(file.Name())
|
||||
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to download archive. %s", err)
|
||||
}
|
||||
|
||||
if len(githubApiToken) > 0 && len(githubUsername) > 0 {
|
||||
req.SetBasicAuth(githubUsername, githubApiToken)
|
||||
}
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to download archive. %s", err)
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
if _, err := io.Copy(file, resp.Body); err != nil {
|
||||
log.Fatalf("Failed to copy archive to file. %s", err)
|
||||
}
|
||||
|
||||
if _, err := file.Seek(0, 0); err != nil {
|
||||
log.Fatalf("Failed to reset seek on archive. %s", err)
|
||||
}
|
||||
|
||||
gz, err := gzip.NewReader(file)
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to gunzip the archive. %s", err)
|
||||
}
|
||||
|
||||
tr := tar.NewReader(gz)
|
||||
|
||||
for {
|
||||
hdr, err := tr.Next()
|
||||
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to iterate archive. %s", err)
|
||||
}
|
||||
|
||||
if !strings.Contains(hdr.Name, "/text/") {
|
||||
continue
|
||||
}
|
||||
|
||||
if filepath.Ext(hdr.Name) != ".txt" {
|
||||
continue
|
||||
}
|
||||
|
||||
if strings.HasPrefix(filepath.Base(hdr.Name), "README") {
|
||||
continue
|
||||
}
|
||||
|
||||
if strings.HasPrefix(filepath.Base(hdr.Name), "deprecated_") {
|
||||
continue
|
||||
}
|
||||
out, err := os.Create(path.Join(destination, strings.TrimSuffix(filepath.Base(hdr.Name), ".txt")))
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create new file. %s", err)
|
||||
}
|
||||
|
||||
defer out.Close()
|
||||
|
||||
if _, err := io.Copy(out, tr); err != nil {
|
||||
log.Fatalf("Failed to write new file. %s", err)
|
||||
} else {
|
||||
fmt.Printf("Written %s\n", out.Name())
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Println("Done")
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
import fastGlob from 'fast-glob';
|
||||
import {optimize, extendDefaultPlugins} from 'svgo';
|
||||
import {resolve, parse, dirname} from 'path';
|
||||
import fs from 'fs';
|
||||
import {fileURLToPath} from 'url';
|
||||
|
||||
const {readFile, writeFile, mkdir} = fs.promises;
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const glob = (pattern) => fastGlob.sync(pattern, {cwd: resolve(__dirname), absolute: true});
|
||||
const outputDir = resolve(__dirname, '../public/img/svg');
|
||||
|
||||
function exit(err) {
|
||||
if (err) console.error(err);
|
||||
process.exit(err ? 1 : 0);
|
||||
}
|
||||
|
||||
async function processFile(file, {prefix, fullName} = {}) {
|
||||
let name;
|
||||
|
||||
if (fullName) {
|
||||
name = fullName;
|
||||
} else {
|
||||
name = parse(file).name;
|
||||
if (prefix) name = `${prefix}-${name}`;
|
||||
if (prefix === 'octicon') name = name.replace(/-[0-9]+$/, ''); // chop of '-16' on octicons
|
||||
}
|
||||
|
||||
const {data} = optimize(await readFile(file, 'utf8'), {
|
||||
plugins: extendDefaultPlugins([
|
||||
'removeXMLNS',
|
||||
'removeDimensions',
|
||||
{name: 'prefixIds', params: {prefix: () => name}},
|
||||
{
|
||||
name: 'addClassesToSVGElement',
|
||||
params: {classNames: ['svg', name]},
|
||||
},
|
||||
{
|
||||
name: 'addAttributesToSVGElement',
|
||||
params: {attributes: [{'width': '16'}, {'height': '16'}, {'aria-hidden': 'true'}]},
|
||||
},
|
||||
]),
|
||||
});
|
||||
await writeFile(resolve(outputDir, `${name}.svg`), data);
|
||||
}
|
||||
|
||||
function processFiles(pattern, opts) {
|
||||
return glob(pattern).map((file) => processFile(file, opts));
|
||||
}
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
await mkdir(outputDir);
|
||||
} catch {}
|
||||
|
||||
await Promise.all([
|
||||
...processFiles('../node_modules/@primer/octicons/build/svg/*-16.svg', {prefix: 'octicon'}),
|
||||
...processFiles('../web_src/svg/*.svg'),
|
||||
...processFiles('../public/img/gitea.svg', {fullName: 'gitea-gitea'}),
|
||||
]);
|
||||
}
|
||||
|
||||
main().then(exit).catch(exit);
|
||||
|
|
@ -0,0 +1,120 @@
|
|||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// Copyright (c) 2015, Wade Simmons
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// gocovmerge takes the results from multiple `go test -coverprofile` runs and
|
||||
// merges them into one profile
|
||||
|
||||
//go:build ignore
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"sort"
|
||||
|
||||
"golang.org/x/tools/cover"
|
||||
)
|
||||
|
||||
func mergeProfiles(p *cover.Profile, merge *cover.Profile) {
|
||||
if p.Mode != merge.Mode {
|
||||
log.Fatalf("cannot merge profiles with different modes")
|
||||
}
|
||||
// Since the blocks are sorted, we can keep track of where the last block
|
||||
// was inserted and only look at the blocks after that as targets for merge
|
||||
startIndex := 0
|
||||
for _, b := range merge.Blocks {
|
||||
startIndex = mergeProfileBlock(p, b, startIndex)
|
||||
}
|
||||
}
|
||||
|
||||
func mergeProfileBlock(p *cover.Profile, pb cover.ProfileBlock, startIndex int) int {
|
||||
sortFunc := func(i int) bool {
|
||||
pi := p.Blocks[i+startIndex]
|
||||
return pi.StartLine >= pb.StartLine && (pi.StartLine != pb.StartLine || pi.StartCol >= pb.StartCol)
|
||||
}
|
||||
|
||||
i := 0
|
||||
if sortFunc(i) != true {
|
||||
i = sort.Search(len(p.Blocks)-startIndex, sortFunc)
|
||||
}
|
||||
i += startIndex
|
||||
if i < len(p.Blocks) && p.Blocks[i].StartLine == pb.StartLine && p.Blocks[i].StartCol == pb.StartCol {
|
||||
if p.Blocks[i].EndLine != pb.EndLine || p.Blocks[i].EndCol != pb.EndCol {
|
||||
log.Fatalf("OVERLAP MERGE: %v %v %v", p.FileName, p.Blocks[i], pb)
|
||||
}
|
||||
switch p.Mode {
|
||||
case "set":
|
||||
p.Blocks[i].Count |= pb.Count
|
||||
case "count", "atomic":
|
||||
p.Blocks[i].Count += pb.Count
|
||||
default:
|
||||
log.Fatalf("unsupported covermode: '%s'", p.Mode)
|
||||
}
|
||||
} else {
|
||||
if i > 0 {
|
||||
pa := p.Blocks[i-1]
|
||||
if pa.EndLine >= pb.EndLine && (pa.EndLine != pb.EndLine || pa.EndCol > pb.EndCol) {
|
||||
log.Fatalf("OVERLAP BEFORE: %v %v %v", p.FileName, pa, pb)
|
||||
}
|
||||
}
|
||||
if i < len(p.Blocks)-1 {
|
||||
pa := p.Blocks[i+1]
|
||||
if pa.StartLine <= pb.StartLine && (pa.StartLine != pb.StartLine || pa.StartCol < pb.StartCol) {
|
||||
log.Fatalf("OVERLAP AFTER: %v %v %v", p.FileName, pa, pb)
|
||||
}
|
||||
}
|
||||
p.Blocks = append(p.Blocks, cover.ProfileBlock{})
|
||||
copy(p.Blocks[i+1:], p.Blocks[i:])
|
||||
p.Blocks[i] = pb
|
||||
}
|
||||
return i + 1
|
||||
}
|
||||
|
||||
func addProfile(profiles []*cover.Profile, p *cover.Profile) []*cover.Profile {
|
||||
i := sort.Search(len(profiles), func(i int) bool { return profiles[i].FileName >= p.FileName })
|
||||
if i < len(profiles) && profiles[i].FileName == p.FileName {
|
||||
mergeProfiles(profiles[i], p)
|
||||
} else {
|
||||
profiles = append(profiles, nil)
|
||||
copy(profiles[i+1:], profiles[i:])
|
||||
profiles[i] = p
|
||||
}
|
||||
return profiles
|
||||
}
|
||||
|
||||
func dumpProfiles(profiles []*cover.Profile, out io.Writer) {
|
||||
if len(profiles) == 0 {
|
||||
return
|
||||
}
|
||||
fmt.Fprintf(out, "mode: %s\n", profiles[0].Mode)
|
||||
for _, p := range profiles {
|
||||
for _, b := range p.Blocks {
|
||||
fmt.Fprintf(out, "%s:%d.%d,%d.%d %d %d\n", p.FileName, b.StartLine, b.StartCol, b.EndLine, b.EndCol, b.NumStmt, b.Count)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
|
||||
var merged []*cover.Profile
|
||||
|
||||
for _, file := range flag.Args() {
|
||||
profiles, err := cover.ParseProfiles(file)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to parse profiles: %v", err)
|
||||
}
|
||||
for _, p := range profiles {
|
||||
merged = addProfile(merged, p)
|
||||
}
|
||||
}
|
||||
|
||||
dumpProfiles(merged, os.Stdout)
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
#!/bin/sh
|
||||
|
||||
mv ./options/locale/locale_en-US.ini ./options/
|
||||
|
||||
# Make sure to only change lines that have the translation enclosed between quotes
|
||||
sed -i -r -e '/^[a-zA-Z0-9_.-]+[ ]*=[ ]*".*"$/ {
|
||||
s/^([a-zA-Z0-9_.-]+)[ ]*="/\1=/
|
||||
s/\\"/"/g
|
||||
s/"$//
|
||||
}' ./options/locale/*.ini
|
||||
|
||||
# Remove translation under 25% of en_us
|
||||
baselines=$(wc -l "./options/locale_en-US.ini" | cut -d" " -f1)
|
||||
baselines=$((baselines / 4))
|
||||
for filename in ./options/locale/*.ini; do
|
||||
lines=$(wc -l "$filename" | cut -d" " -f1)
|
||||
if [ $lines -lt $baselines ]; then
|
||||
echo "Removing $filename: $lines/$baselines"
|
||||
rm "$filename"
|
||||
fi
|
||||
done
|
||||
|
||||
mv ./options/locale_en-US.ini ./options/locale/
|
2
go.mod
2
go.mod
|
@ -12,6 +12,7 @@ require (
|
|||
github.com/json-iterator/go v1.1.12
|
||||
github.com/klauspost/cpuid/v2 v2.2.3
|
||||
github.com/russross/blackfriday/v2 v2.1.0
|
||||
github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546
|
||||
github.com/urfave/cli v1.22.12
|
||||
golang.org/x/net v0.8.0
|
||||
golang.org/x/text v0.8.0
|
||||
|
@ -180,6 +181,7 @@ require (
|
|||
github.com/rs/xid v1.4.0 // indirect
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.2.0 // indirect
|
||||
github.com/sergi/go-diff v1.3.1 // indirect
|
||||
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749 // indirect
|
||||
github.com/sirupsen/logrus v1.9.0 // indirect
|
||||
github.com/skeema/knownhosts v1.1.0 // indirect
|
||||
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect
|
||||
|
|
4
go.sum
4
go.sum
|
@ -1119,7 +1119,11 @@ github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NF
|
|||
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
|
||||
github.com/shopspring/decimal v0.0.0-20200227202807-02e2044944cc/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
||||
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
||||
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749 h1:bUGsEnyNbVPw06Bs80sCeARAlK8lhwqGyi6UT8ymuGk=
|
||||
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546 h1:pXY9qYc/MP5zdvqWEUH6SjNiu7VhSjuVFTFiTcphaLU=
|
||||
github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
|
||||
github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726/go.mod h1:3yhqj7WBBfRhbBlzyOC3gUxftwsU0u8gqevxwIHQpMw=
|
||||
github.com/siddontang/go-snappy v0.0.0-20140704025258-d8f7bb82a96d/go.mod h1:vq0tzqLRu6TS7Id0wMo2N5QzJoKedVeovOpHjnykSzY=
|
||||
github.com/siddontang/ledisdb v0.0.0-20190202134119-8ceb77e66a92/go.mod h1:mF1DpOSOUiJRMR+FDqaqu3EBqrybQtrDDszLUZ6oxPg=
|
||||
|
|
|
@ -3,13 +3,16 @@ package convert
|
|||
import (
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
gitea_convert "code.gitea.io/gitea/modules/convert"
|
||||
"code.gitea.io/gitea/modules/convert"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
hat_api "code.gitlink.org.cn/Gitlink/gitea_hat.git/modules/structs"
|
||||
"net/url"
|
||||
"time"
|
||||
)
|
||||
|
||||
func ToCommit(repo *repo_model.Repository, gitRepo *git.Repository, commit *git.Commit, userCache map[string]*user_model.User, stat bool) (*hat_api.Commit, error) {
|
||||
giteaApiCommit, err := gitea_convert.ToCommit(repo, gitRepo, commit, userCache, stat)
|
||||
giteaApiCommit, err := ToCommitNotDiff(repo, gitRepo, commit, userCache, stat)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -24,3 +27,132 @@ func ToCommit(repo *repo_model.Repository, gitRepo *git.Repository, commit *git.
|
|||
Branch: commit.Branch,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func ToCommitNotDiff(repo *repo_model.Repository, gitRepo *git.Repository, commit *git.Commit, userCache map[string]*user_model.User, stat bool) (*api.Commit, error) {
|
||||
var apiAuthor, apiCommitter *api.User
|
||||
|
||||
// Retrieve author and committer information
|
||||
|
||||
var cacheAuthor *user_model.User
|
||||
var ok bool
|
||||
if userCache == nil {
|
||||
cacheAuthor = (*user_model.User)(nil)
|
||||
ok = false
|
||||
} else {
|
||||
cacheAuthor, ok = userCache[commit.Author.Email]
|
||||
}
|
||||
|
||||
if ok {
|
||||
apiAuthor = convert.ToUser(cacheAuthor, nil)
|
||||
} else {
|
||||
author, err := user_model.GetUserByEmail(commit.Author.Email)
|
||||
if err != nil && !user_model.IsErrUserNotExist(err) {
|
||||
return nil, err
|
||||
} else if err == nil {
|
||||
apiAuthor = convert.ToUser(author, nil)
|
||||
if userCache != nil {
|
||||
userCache[commit.Author.Email] = author
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var cacheCommitter *user_model.User
|
||||
if userCache == nil {
|
||||
cacheCommitter = (*user_model.User)(nil)
|
||||
ok = false
|
||||
} else {
|
||||
cacheCommitter, ok = userCache[commit.Committer.Email]
|
||||
}
|
||||
|
||||
if ok {
|
||||
apiCommitter = convert.ToUser(cacheCommitter, nil)
|
||||
} else {
|
||||
committer, err := user_model.GetUserByEmail(commit.Committer.Email)
|
||||
if err != nil && !user_model.IsErrUserNotExist(err) {
|
||||
return nil, err
|
||||
} else if err == nil {
|
||||
apiCommitter = convert.ToUser(committer, nil)
|
||||
if userCache != nil {
|
||||
userCache[commit.Committer.Email] = committer
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Retrieve parent(s) of the commit
|
||||
apiParents := make([]*api.CommitMeta, commit.ParentCount())
|
||||
for i := 0; i < commit.ParentCount(); i++ {
|
||||
sha, _ := commit.ParentID(i)
|
||||
apiParents[i] = &api.CommitMeta{
|
||||
URL: repo.APIURL() + "/git/commits/" + url.PathEscape(sha.String()),
|
||||
SHA: sha.String(),
|
||||
}
|
||||
}
|
||||
|
||||
res := &api.Commit{
|
||||
CommitMeta: &api.CommitMeta{
|
||||
URL: repo.APIURL() + "/git/commits/" + url.PathEscape(commit.ID.String()),
|
||||
SHA: commit.ID.String(),
|
||||
Created: commit.Committer.When,
|
||||
},
|
||||
HTMLURL: repo.HTMLURL() + "/commit/" + url.PathEscape(commit.ID.String()),
|
||||
RepoCommit: &api.RepoCommit{
|
||||
URL: repo.APIURL() + "/git/commits/" + url.PathEscape(commit.ID.String()),
|
||||
Author: &api.CommitUser{
|
||||
Identity: api.Identity{
|
||||
Name: commit.Author.Name,
|
||||
Email: commit.Author.Email,
|
||||
},
|
||||
Date: commit.Author.When.Format(time.RFC3339),
|
||||
},
|
||||
Committer: &api.CommitUser{
|
||||
Identity: api.Identity{
|
||||
Name: commit.Committer.Name,
|
||||
Email: commit.Committer.Email,
|
||||
},
|
||||
Date: commit.Committer.When.Format(time.RFC3339),
|
||||
},
|
||||
Message: commit.Message(),
|
||||
Tree: &api.CommitMeta{
|
||||
URL: repo.APIURL() + "/git/trees/" + url.PathEscape(commit.ID.String()),
|
||||
SHA: commit.ID.String(),
|
||||
Created: commit.Committer.When,
|
||||
},
|
||||
Verification: convert.ToVerification(commit),
|
||||
},
|
||||
Author: apiAuthor,
|
||||
Committer: apiCommitter,
|
||||
Parents: apiParents,
|
||||
}
|
||||
|
||||
// Retrieve files affected by the commit
|
||||
if stat {
|
||||
fileStatus, err := git.GetCommitFileStatus(gitRepo.Ctx, repo.RepoPath(), commit.ID.String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
affectedFileList := make([]*api.CommitAffectedFiles, 0, len(fileStatus.Added)+len(fileStatus.Removed)+len(fileStatus.Modified))
|
||||
for _, files := range [][]string{fileStatus.Added, fileStatus.Removed, fileStatus.Modified} {
|
||||
for _, filename := range files {
|
||||
affectedFileList = append(affectedFileList, &api.CommitAffectedFiles{
|
||||
Filename: filename,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
//diff, err := gitdiff.GetDiff(gitRepo, &gitdiff.DiffOptions{
|
||||
// AfterCommitID: commit.ID.String(),
|
||||
//})
|
||||
//if err != nil {
|
||||
// return nil, err
|
||||
//}
|
||||
|
||||
res.Files = affectedFileList
|
||||
//res.Stats = &api.CommitStats{
|
||||
// Total: diff.TotalAddition + diff.TotalDeletion,
|
||||
// Additions: diff.TotalAddition,
|
||||
// Deletions: diff.TotalDeletion,
|
||||
//}
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
|
|
@ -82,3 +82,82 @@ func GetRepoContributors(repo *gitea_git.Repository, page, pageSize int) (int, [
|
|||
|
||||
return total, contributors, nil
|
||||
}
|
||||
|
||||
func GetRepoContributorsNew(repo *gitea_git.Repository, page, pageSize int) (int, []*RepoContributor, error) {
|
||||
var total, totalContributions, skip int
|
||||
var contributors []*RepoContributor
|
||||
var contributorInfos []*RepoContributor
|
||||
contributorInfoHash := make(map[string]*RepoContributor)
|
||||
|
||||
skip = (page - 1) * pageSize
|
||||
|
||||
stdoutReader, stdoutWriter, err := os.Pipe()
|
||||
if err != nil {
|
||||
return total, nil, err
|
||||
}
|
||||
defer func() {
|
||||
_ = stdoutReader.Close()
|
||||
_ = stdoutWriter.Close()
|
||||
}()
|
||||
cmd := gitea_git.NewCommand(repo.Ctx, "shortlog", "-sne", "--all")
|
||||
|
||||
stderr := new(strings.Builder)
|
||||
err = cmd.Run(&gitea_git.RunOpts{
|
||||
Env: []string{},
|
||||
Dir: repo.Path,
|
||||
Stdout: stdoutWriter,
|
||||
Stderr: stderr,
|
||||
PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
|
||||
_ = stdoutWriter.Close()
|
||||
scanner := bufio.NewScanner(stdoutReader)
|
||||
scanner.Split(bufio.ScanLines)
|
||||
for scanner.Scan() {
|
||||
l := strings.TrimSpace(scanner.Text())
|
||||
commits := l[0:strings.Index(l, "\t")]
|
||||
commitsInt, err := strconv.Atoi(commits)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
name := l[strings.Index(l, "\t")+1 : strings.Index(l, " <")]
|
||||
email := l[strings.Index(l, "<")+1 : strings.Index(l, ">")]
|
||||
totalContributions += commitsInt
|
||||
// committer is not system user
|
||||
if existedContributorInfo, ok := contributorInfoHash[email]; ok {
|
||||
// existed: same primary email, different committer name
|
||||
existedContributorInfo.Commits += commitsInt
|
||||
} else {
|
||||
var newContributor = &RepoContributor{
|
||||
Commits: commitsInt,
|
||||
Name: name,
|
||||
Email: email,
|
||||
}
|
||||
total++
|
||||
contributorInfos = append(contributorInfos, newContributor)
|
||||
contributorInfoHash[email] = newContributor
|
||||
}
|
||||
}
|
||||
|
||||
_ = stdoutReader.Close()
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
var ca int
|
||||
for _, cont := range contributorInfos {
|
||||
if skip > 0 {
|
||||
skip--
|
||||
} else {
|
||||
if ca < pageSize {
|
||||
fperc := fmt.Sprintf("%.2f", float64(cont.Commits)*100/float64(totalContributions))
|
||||
contributors = append(contributors, &RepoContributor{
|
||||
Commits: cont.Commits,
|
||||
Name: cont.Name,
|
||||
Email: cont.Email,
|
||||
ContributionPerc: fperc + "%",
|
||||
})
|
||||
ca++
|
||||
}
|
||||
}
|
||||
}
|
||||
return total, contributors, nil
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/container"
|
||||
gitea_git "code.gitea.io/gitea/modules/git"
|
||||
|
@ -145,11 +146,12 @@ func GetCodeActivityStatsWithoutSince(repo *gitea_git.Repository, branch string)
|
|||
}
|
||||
|
||||
// GetCodeActivityStats returns code statistics for activity page
|
||||
func GetPaginateCodeAuthorsWithoutSince(repo *gitea_git.Repository, branch string, page, pageSize int) (int64, []*CodeActivityAuthor, error) {
|
||||
func GetPaginateCodeAuthors(repo *gitea_git.Repository, fromTime time.Time, branch string, page, pageSize int) (int64, []*CodeActivityAuthor, error) {
|
||||
var total int64
|
||||
var authors []*CodeActivityAuthor
|
||||
since := fromTime.Format(time.RFC3339)
|
||||
|
||||
authorCmd := gitea_git.NewCommand(repo.Ctx, "log", "--no-merges", "--format=%aN <%aE>", "--date=iso")
|
||||
authorCmd := gitea_git.NewCommand(repo.Ctx, "log", "--no-merges", "--format=%aN <%aE>", "--date=iso", gitea_git.CmdArg(fmt.Sprintf("--since='%s'", since)))
|
||||
if len(branch) == 0 {
|
||||
authorCmd.AddArguments("--branches=*")
|
||||
} else {
|
||||
|
@ -199,7 +201,7 @@ func GetPaginateCodeAuthorsWithoutSince(repo *gitea_git.Repository, branch strin
|
|||
_ = stdoutReader.Close()
|
||||
_ = stdoutWriter.Close()
|
||||
}()
|
||||
gitCmd := gitea_git.NewCommand(repo.Ctx, "log", "--numstat", "--no-merges", "--pretty=format:---%n%h%n%aN%n%aE%n", "--date=iso", gitea_git.CmdArg(fmt.Sprintf("--author=%s", filterAuthor)))
|
||||
gitCmd := gitea_git.NewCommand(repo.Ctx, "log", "--numstat", "--no-merges", "--pretty=format:---%n%h%n%aN%n%aE%n", "--date=iso", gitea_git.CmdArg(fmt.Sprintf("--author=%s", filterAuthor)), gitea_git.CmdArg(fmt.Sprintf("--since='%s'", since)))
|
||||
if len(branch) == 0 {
|
||||
gitCmd.AddArguments("--branches=*")
|
||||
} else {
|
||||
|
|
|
@ -0,0 +1,114 @@
|
|||
{
|
||||
"title": "Issue",
|
||||
"description": "Issues associated to a repository within a forge (Gitea, GitLab, etc.).",
|
||||
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"number": {
|
||||
"description": "Unique identifier, relative to the repository.",
|
||||
"type": "number"
|
||||
},
|
||||
"poster_id": {
|
||||
"description": "Unique identifier of the user who authored the issue.",
|
||||
"type": "number"
|
||||
},
|
||||
"poster_name": {
|
||||
"description": "Name of the user who authored the issue.",
|
||||
"type": "string"
|
||||
},
|
||||
"poster_email": {
|
||||
"description": "Email of the user who authored the issue.",
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"description": "Short description displayed as the title.",
|
||||
"type": "string"
|
||||
},
|
||||
"content": {
|
||||
"description": "Long, multiline, description.",
|
||||
"type": "string"
|
||||
},
|
||||
"ref": {
|
||||
"description": "Target branch in the repository.",
|
||||
"type": "string"
|
||||
},
|
||||
"milestone": {
|
||||
"description": "Name of the milestone.",
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"description": "A 'closed' issue will not see any activity in the future, otherwise it is 'open'.",
|
||||
"enum": [
|
||||
"closed",
|
||||
"open"
|
||||
]
|
||||
},
|
||||
"is_locked": {
|
||||
"description": "A locked issue can only be modified by privileged users.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"created": {
|
||||
"description": "Creation time.",
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"updated": {
|
||||
"description": "Last update time.",
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"closed": {
|
||||
"description": "The last time 'state' changed to 'closed'.",
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"labels": {
|
||||
"description": "List of labels.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "label.json"
|
||||
}
|
||||
},
|
||||
"reactions": {
|
||||
"description": "List of reactions.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "reaction.json"
|
||||
}
|
||||
},
|
||||
"assignees": {
|
||||
"description": "List of assignees.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"description": "Name of a user assigned to the issue.",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"number",
|
||||
"poster_id",
|
||||
"poster_name",
|
||||
"title",
|
||||
"content",
|
||||
"state",
|
||||
"is_locked",
|
||||
"created",
|
||||
"updated"
|
||||
]
|
||||
},
|
||||
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"$id": "http://example.com/issue.json",
|
||||
"$$target": "issue.json"
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
{
|
||||
"title": "Label",
|
||||
"description": "Label associated to an issue.",
|
||||
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"name": {
|
||||
"description": "Name of the label, unique within the repository.",
|
||||
"type": "string"
|
||||
},
|
||||
"color": {
|
||||
"description": "Color code of the label.",
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"description": "Long, multiline, description.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name"
|
||||
],
|
||||
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"$id": "label.json",
|
||||
"$$target": "label.json"
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
{
|
||||
"title": "Milestone",
|
||||
"description": "Milestone associated to a repository within a forge.",
|
||||
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"title": {
|
||||
"description": "Short description.",
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"description": "Long, multiline, description.",
|
||||
"type": "string"
|
||||
},
|
||||
"deadline": {
|
||||
"description": "Deadline after which the milestone is overdue.",
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"created": {
|
||||
"description": "Creation time.",
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"updated": {
|
||||
"description": "Last update time.",
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"closed": {
|
||||
"description": "The last time 'state' changed to 'closed'.",
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"state": {
|
||||
"description": "A 'closed' issue will not see any activity in the future, otherwise it is 'open'.",
|
||||
"enum": [
|
||||
"closed",
|
||||
"open"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"title",
|
||||
"description",
|
||||
"deadline",
|
||||
"created",
|
||||
"updated",
|
||||
"closed",
|
||||
"state"
|
||||
]
|
||||
},
|
||||
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"$id": "http://example.com/milestone.json",
|
||||
"$$target": "milestone.json"
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
{
|
||||
"title": "Reaction",
|
||||
"description": "Reaction associated to an issue or a comment.",
|
||||
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"user_id": {
|
||||
"description": "Unique identifier of the user who authored the reaction.",
|
||||
"type": "number"
|
||||
},
|
||||
"user_name": {
|
||||
"description": "Name of the user who authored the reaction.",
|
||||
"type": "string"
|
||||
},
|
||||
"content": {
|
||||
"description": "Representation of the reaction",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"user_id",
|
||||
"content"
|
||||
],
|
||||
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"$id": "http://example.com/reaction.json",
|
||||
"$$target": "reaction.json"
|
||||
}
|
|
@ -119,6 +119,7 @@ func Routers(ctx gocontext.Context) *web.Route {
|
|||
}, reqRepoReader(unit.TypeReleases))
|
||||
m.Group("/contributors", func() {
|
||||
m.Get("", context.ReferencesGitRepo(), repo.GetContributors)
|
||||
m.Get("/stat", context.ReferencesGitRepo(), repo.GetContributorStat)
|
||||
})
|
||||
m.Group("/count", func() {
|
||||
m.Get("", context.ReferencesGitRepo(), repo.GetCommitCount)
|
||||
|
|
|
@ -87,8 +87,10 @@ func GetAllCommitsSliceByTime(ctx *context.APIContext) {
|
|||
|
||||
apiCommits := make([]*responseCommit, len(commits))
|
||||
apiCommitsList := []responseCommit{}
|
||||
stat := ctx.FormString("stat") == "" || ctx.FormBool("stat")
|
||||
|
||||
for i, commitPoniter := range commits {
|
||||
apiCommits[i], err = toResponseCommit(ctx.Repo.Repository, gitRepo, commitPoniter, userCache)
|
||||
apiCommits[i], err = toResponseCommit(ctx.Repo.Repository, gitRepo, commitPoniter, userCache, stat)
|
||||
if err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, "ToCommit", err)
|
||||
return
|
||||
|
@ -114,8 +116,8 @@ type responseCommit struct {
|
|||
CommitDate string `json:"commit_date"`
|
||||
}
|
||||
|
||||
func toResponseCommit(repo *repo.Repository, gitRepo *git.Repository, commit *git.Commit, userCache map[string]*user.User) (*responseCommit, error) {
|
||||
apiCommit, err := convert.ToCommit(repo, gitRepo, commit, userCache, true)
|
||||
func toResponseCommit(repo *repo.Repository, gitRepo *git.Repository, commit *git.Commit, userCache map[string]*user.User, stat bool) (*responseCommit, error) {
|
||||
apiCommit, err := convert.ToCommit(repo, gitRepo, commit, userCache, stat)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -190,8 +192,10 @@ func GetFileAllCommits(ctx *context.APIContext) {
|
|||
|
||||
userCache := make(map[string]*user_model.User)
|
||||
apiCommits := make([]*api.Commit, len(commits))
|
||||
|
||||
stat := ctx.FormString("stat") == "" || ctx.FormBool("stat")
|
||||
for i, commit := range commits {
|
||||
apiCommits[i], err = convert.ToCommit(ctx.Repo.Repository, ctx.Repo.GitRepo, commit, userCache, true)
|
||||
apiCommits[i], err = hat_convert.ToCommitNotDiff(ctx.Repo.Repository, ctx.Repo.GitRepo, commit, userCache, stat)
|
||||
if err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, "ToCommit", err)
|
||||
return
|
||||
|
|
|
@ -4,6 +4,7 @@ import (
|
|||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
access_model "code.gitea.io/gitea/models/perm/access"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
|
@ -484,7 +485,7 @@ func SetDiffViewStyle(ctx *context.Context) {
|
|||
func GetContributors(ctx *context.APIContext) {
|
||||
listOptions := utils.GetListOptions(ctx)
|
||||
|
||||
total, list, err := hat_git.GetRepoContributors(ctx.Repo.GitRepo, listOptions.Page, listOptions.PageSize)
|
||||
total, list, err := hat_git.GetRepoContributorsNew(ctx.Repo.GitRepo, listOptions.Page, listOptions.PageSize)
|
||||
if err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, "GetRepoContributors", err)
|
||||
return
|
||||
|
@ -498,6 +499,31 @@ func GetContributors(ctx *context.APIContext) {
|
|||
ctx.JSON(http.StatusOK, list)
|
||||
}
|
||||
|
||||
func GetContributorStat(ctx *context.APIContext) {
|
||||
listOptions := utils.GetListOptions(ctx)
|
||||
|
||||
ref := ctx.FormString("ref")
|
||||
year := ctx.FormInt("pass_year")
|
||||
var timeFrom time.Time = time.Now().AddDate(-1, 0, 0) // 默认近一年
|
||||
|
||||
if year > 0 {
|
||||
timeFrom = time.Now().AddDate(-year, 0, 0) // 近几年
|
||||
}
|
||||
|
||||
total, list, err := hat_git.GetPaginateCodeAuthors(ctx.Repo.GitRepo, timeFrom, ref, listOptions.Page, listOptions.PageSize)
|
||||
if err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, "GetPaginateCodeAuthors", err)
|
||||
return
|
||||
}
|
||||
|
||||
ctx.SetLinkHeader(int(total), listOptions.PageSize)
|
||||
ctx.RespHeader().Set("X-Total", fmt.Sprintf("%d", total))
|
||||
ctx.RespHeader().Set("X-Total-Count", fmt.Sprintf("%d", total))
|
||||
ctx.RespHeader().Set("Access-Control-Expose-Headers", "X-Total-Count, Link, X-Total")
|
||||
|
||||
ctx.JSON(http.StatusOK, list)
|
||||
}
|
||||
|
||||
type CountDTO struct {
|
||||
Branch CountDTOBranch `json:"branch"`
|
||||
ReleaseCount int64 `json:"release_count"`
|
||||
|
|
Loading…
Reference in New Issue