mirror of
https://github.com/go-gitea/gitea.git
synced 2025-12-02 13:59:48 +09:00
Compare commits
21 Commits
v1.18.5
...
release/v1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
efe7561787 | ||
|
|
e01e78a947 | ||
|
|
04d489dbdd | ||
|
|
491ee43082 | ||
|
|
9107a87ff6 | ||
|
|
3dc2724d36 | ||
|
|
93fe0202cb | ||
|
|
13f304d89e | ||
|
|
805c5926ff | ||
|
|
b6eea680ce | ||
|
|
9c33aff689 | ||
|
|
a3e185bc5c | ||
|
|
2f1d968b27 | ||
|
|
0c212b3f08 | ||
|
|
ceedb4973e | ||
|
|
f5f4a8d02a | ||
|
|
543322f81f | ||
|
|
35a3b452d9 | ||
|
|
5a60e023af | ||
|
|
1170e067b2 | ||
|
|
8adc6a188e |
11
cmd/admin.go
11
cmd/admin.go
@@ -161,6 +161,11 @@ var (
|
|||||||
Value: "false",
|
Value: "false",
|
||||||
Usage: "Use custom URLs for GitLab/GitHub OAuth endpoints",
|
Usage: "Use custom URLs for GitLab/GitHub OAuth endpoints",
|
||||||
},
|
},
|
||||||
|
cli.StringFlag{
|
||||||
|
Name: "custom-tenant-id",
|
||||||
|
Value: "",
|
||||||
|
Usage: "Use custom Tenant ID for OAuth endpoints",
|
||||||
|
},
|
||||||
cli.StringFlag{
|
cli.StringFlag{
|
||||||
Name: "custom-auth-url",
|
Name: "custom-auth-url",
|
||||||
Value: "",
|
Value: "",
|
||||||
@@ -422,6 +427,7 @@ func parseOAuth2Config(c *cli.Context) *oauth2.Source {
|
|||||||
AuthURL: c.String("custom-auth-url"),
|
AuthURL: c.String("custom-auth-url"),
|
||||||
ProfileURL: c.String("custom-profile-url"),
|
ProfileURL: c.String("custom-profile-url"),
|
||||||
EmailURL: c.String("custom-email-url"),
|
EmailURL: c.String("custom-email-url"),
|
||||||
|
Tenant: c.String("custom-tenant-id"),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
customURLMapping = nil
|
customURLMapping = nil
|
||||||
@@ -531,6 +537,7 @@ func runUpdateOauth(c *cli.Context) error {
|
|||||||
customURLMapping.AuthURL = oAuth2Config.CustomURLMapping.AuthURL
|
customURLMapping.AuthURL = oAuth2Config.CustomURLMapping.AuthURL
|
||||||
customURLMapping.ProfileURL = oAuth2Config.CustomURLMapping.ProfileURL
|
customURLMapping.ProfileURL = oAuth2Config.CustomURLMapping.ProfileURL
|
||||||
customURLMapping.EmailURL = oAuth2Config.CustomURLMapping.EmailURL
|
customURLMapping.EmailURL = oAuth2Config.CustomURLMapping.EmailURL
|
||||||
|
customURLMapping.Tenant = oAuth2Config.CustomURLMapping.Tenant
|
||||||
}
|
}
|
||||||
if c.IsSet("use-custom-urls") && c.IsSet("custom-token-url") {
|
if c.IsSet("use-custom-urls") && c.IsSet("custom-token-url") {
|
||||||
customURLMapping.TokenURL = c.String("custom-token-url")
|
customURLMapping.TokenURL = c.String("custom-token-url")
|
||||||
@@ -548,6 +555,10 @@ func runUpdateOauth(c *cli.Context) error {
|
|||||||
customURLMapping.EmailURL = c.String("custom-email-url")
|
customURLMapping.EmailURL = c.String("custom-email-url")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if c.IsSet("use-custom-urls") && c.IsSet("custom-tenant-id") {
|
||||||
|
customURLMapping.Tenant = c.String("custom-tenant-id")
|
||||||
|
}
|
||||||
|
|
||||||
oAuth2Config.CustomURLMapping = customURLMapping
|
oAuth2Config.CustomURLMapping = customURLMapping
|
||||||
source.Cfg = oAuth2Config
|
source.Cfg = oAuth2Config
|
||||||
|
|
||||||
|
|||||||
25
cmd/serv.go
25
cmd/serv.go
@@ -12,6 +12,7 @@ import (
|
|||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
@@ -290,17 +291,21 @@ func runServ(c *cli.Context) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Special handle for Windows.
|
|
||||||
if setting.IsWindows {
|
|
||||||
verb = strings.Replace(verb, "-", " ", 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
var gitcmd *exec.Cmd
|
var gitcmd *exec.Cmd
|
||||||
verbs := strings.Split(verb, " ")
|
gitBinPath := filepath.Dir(git.GitExecutable) // e.g. /usr/bin
|
||||||
if len(verbs) == 2 {
|
gitBinVerb := filepath.Join(gitBinPath, verb) // e.g. /usr/bin/git-upload-pack
|
||||||
gitcmd = exec.CommandContext(ctx, verbs[0], verbs[1], repoPath)
|
if _, err := os.Stat(gitBinVerb); err != nil {
|
||||||
} else {
|
// if the command "git-upload-pack" doesn't exist, try to split "git-upload-pack" to use the sub-command with git
|
||||||
gitcmd = exec.CommandContext(ctx, verb, repoPath)
|
// ps: Windows only has "git.exe" in the bin path, so Windows always uses this way
|
||||||
|
verbFields := strings.SplitN(verb, "-", 2)
|
||||||
|
if len(verbFields) == 2 {
|
||||||
|
// use git binary with the sub-command part: "C:\...\bin\git.exe", "upload-pack", ...
|
||||||
|
gitcmd = exec.CommandContext(ctx, git.GitExecutable, verbFields[1], repoPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if gitcmd == nil {
|
||||||
|
// by default, use the verb (it has been checked above by allowedCommands)
|
||||||
|
gitcmd = exec.CommandContext(ctx, gitBinVerb, repoPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
process.SetSysProcAttribute(gitcmd)
|
process.SetSysProcAttribute(gitcmd)
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ toc: false
|
|||||||
draft: false
|
draft: false
|
||||||
menu:
|
menu:
|
||||||
sidebar:
|
sidebar:
|
||||||
parent: "advanced"
|
parent: "developers"
|
||||||
name: "加入 Gitea 开源"
|
name: "加入 Gitea 开源"
|
||||||
weight: 10
|
weight: 10
|
||||||
identifier: "hacking-on-gitea"
|
identifier: "hacking-on-gitea"
|
||||||
@@ -15,7 +15,7 @@ menu:
|
|||||||
|
|
||||||
# Hacking on Gitea
|
# Hacking on Gitea
|
||||||
|
|
||||||
首先你需要一些运行环境,这和 [从源代码安装]({{< relref "from-source.zh-cn.md" >}}) 相同,如果你还没有设置好,可以先阅读那个章节。
|
首先你需要一些运行环境,这和 [从源代码安装]({{< relref "doc/installation/from-source.zh-cn.md" >}}) 相同,如果你还没有设置好,可以先阅读那个章节。
|
||||||
|
|
||||||
如果你想为 Gitea 贡献代码,你需要 Fork 这个项目并且以 `master` 为开发分支。Gitea 使用 Govendor
|
如果你想为 Gitea 贡献代码,你需要 Fork 这个项目并且以 `master` 为开发分支。Gitea 使用 Govendor
|
||||||
来管理依赖,因此所有依赖项都被工具自动 copy 在 vendor 子目录下。用下面的命令来下载源码:
|
来管理依赖,因此所有依赖项都被工具自动 copy 在 vendor 子目录下。用下面的命令来下载源码:
|
||||||
@@ -32,4 +32,4 @@ chmod +x gitea
|
|||||||
|
|
||||||
## 需要帮助?
|
## 需要帮助?
|
||||||
|
|
||||||
如果从本页中没有找到你需要的内容,请访问 [帮助页面]({{< relref "seek-help.zh-cn.md" >}})
|
如果从本页中没有找到你需要的内容,请访问 [帮助页面]({{< relref "doc/help/seek-help.zh-cn.md" >}})
|
||||||
|
|||||||
@@ -64,11 +64,11 @@ OpenSUSE 构建服务为 [openSUSE 和 SLE](https://software.opensuse.org/downlo
|
|||||||
choco install gitea
|
choco install gitea
|
||||||
```
|
```
|
||||||
|
|
||||||
你也可以 [从二进制安装]({{< relref "from-binary.zh-cn.md" >}}) 。
|
你也可以 [从二进制安装]({{< relref "doc/installation/from-binary.zh-cn.md" >}}) 。
|
||||||
|
|
||||||
## macOS
|
## macOS
|
||||||
|
|
||||||
macOS 平台下当前我们仅支持通过 `brew` 来安装。如果你没有安装 [Homebrew](http://brew.sh/),你也可以查看 [从二进制安装]({{< relref "from-binary.zh-cn.md" >}})。在你安装了 `brew` 之后, 你可以执行以下命令:
|
macOS 平台下当前我们仅支持通过 `brew` 来安装。如果你没有安装 [Homebrew](http://brew.sh/),你也可以查看 [从二进制安装]({{< relref "doc/installation/from-binary.zh-cn.md" >}})。在你安装了 `brew` 之后, 你可以执行以下命令:
|
||||||
|
|
||||||
```
|
```
|
||||||
brew tap gitea/tap https://gitea.com/gitea/homebrew-gitea
|
brew tap gitea/tap https://gitea.com/gitea/homebrew-gitea
|
||||||
@@ -105,4 +105,4 @@ make install clean
|
|||||||
|
|
||||||
## 需要帮助?
|
## 需要帮助?
|
||||||
|
|
||||||
如果从本页中没有找到你需要的内容,请访问 [帮助页面]({{< relref "seek-help.zh-cn.md" >}})
|
如果从本页中没有找到你需要的内容,请访问 [帮助页面]({{< relref "doc/help/seek-help.zh-cn.md" >}})
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ git checkout v{{< version >}}
|
|||||||
|
|
||||||
- `go` {{< min-go-version >}} 或以上版本, 详见[这里](https://golang.google.cn/doc/install)
|
- `go` {{< min-go-version >}} 或以上版本, 详见[这里](https://golang.google.cn/doc/install)
|
||||||
- `node` {{< min-node-version >}} 或以上版本,并且安装 `npm`, 详见[这里](https://nodejs.org/zh-cn/download/)
|
- `node` {{< min-node-version >}} 或以上版本,并且安装 `npm`, 详见[这里](https://nodejs.org/zh-cn/download/)
|
||||||
- `make`, 详见[这里]({{< relref "make.zh-cn.md" >}})</a>
|
- `make`, 详见[这里]({{< relref "doc/advanced/make.zh-cn.md" >}})
|
||||||
|
|
||||||
各种可用的 [make 任务](https://github.com/go-gitea/gitea/blob/main/Makefile)
|
各种可用的 [make 任务](https://github.com/go-gitea/gitea/blob/main/Makefile)
|
||||||
可以用来使编译过程更方便。
|
可以用来使编译过程更方便。
|
||||||
@@ -104,4 +104,4 @@ CC=aarch64-unknown-linux-gnu-gcc GOOS=linux GOARCH=arm64 TAGS="bindata sqlite sq
|
|||||||
|
|
||||||
## 需要帮助?
|
## 需要帮助?
|
||||||
|
|
||||||
如果从本页中没有找到你需要的内容,请访问 [帮助页面]({{< relref "seek-help.zh-cn.md" >}})
|
如果从本页中没有找到你需要的内容,请访问 [帮助页面]({{< relref "doc/help/seek-help.zh-cn.md" >}})
|
||||||
|
|||||||
@@ -131,6 +131,7 @@ Admin operations:
|
|||||||
- `--secret`: Client Secret.
|
- `--secret`: Client Secret.
|
||||||
- `--auto-discover-url`: OpenID Connect Auto Discovery URL (only required when using OpenID Connect as provider).
|
- `--auto-discover-url`: OpenID Connect Auto Discovery URL (only required when using OpenID Connect as provider).
|
||||||
- `--use-custom-urls`: Use custom URLs for GitLab/GitHub OAuth endpoints.
|
- `--use-custom-urls`: Use custom URLs for GitLab/GitHub OAuth endpoints.
|
||||||
|
- `--custom-tenant-id`: Use custom Tenant ID for OAuth endpoints.
|
||||||
- `--custom-auth-url`: Use a custom Authorization URL (option for GitLab/GitHub).
|
- `--custom-auth-url`: Use a custom Authorization URL (option for GitLab/GitHub).
|
||||||
- `--custom-token-url`: Use a custom Token URL (option for GitLab/GitHub).
|
- `--custom-token-url`: Use a custom Token URL (option for GitLab/GitHub).
|
||||||
- `--custom-profile-url`: Use a custom Profile URL (option for GitLab/GitHub).
|
- `--custom-profile-url`: Use a custom Profile URL (option for GitLab/GitHub).
|
||||||
@@ -154,6 +155,7 @@ Admin operations:
|
|||||||
- `--secret`: Client Secret.
|
- `--secret`: Client Secret.
|
||||||
- `--auto-discover-url`: OpenID Connect Auto Discovery URL (only required when using OpenID Connect as provider).
|
- `--auto-discover-url`: OpenID Connect Auto Discovery URL (only required when using OpenID Connect as provider).
|
||||||
- `--use-custom-urls`: Use custom URLs for GitLab/GitHub OAuth endpoints.
|
- `--use-custom-urls`: Use custom URLs for GitLab/GitHub OAuth endpoints.
|
||||||
|
- `--custom-tenant-id`: Use custom Tenant ID for OAuth endpoints.
|
||||||
- `--custom-auth-url`: Use a custom Authorization URL (option for GitLab/GitHub).
|
- `--custom-auth-url`: Use a custom Authorization URL (option for GitLab/GitHub).
|
||||||
- `--custom-token-url`: Use a custom Token URL (option for GitLab/GitHub).
|
- `--custom-token-url`: Use a custom Token URL (option for GitLab/GitHub).
|
||||||
- `--custom-profile-url`: Use a custom Profile URL (option for GitLab/GitHub).
|
- `--custom-profile-url`: Use a custom Profile URL (option for GitLab/GitHub).
|
||||||
|
|||||||
@@ -70,4 +70,4 @@ Gitea的首要目标是创建一个极易安装,运行非常快速,安装和
|
|||||||
|
|
||||||
## 需要帮助?
|
## 需要帮助?
|
||||||
|
|
||||||
如果从本页中没有找到你需要的内容,请访问 [帮助页面]({{< relref "seek-help.zh-cn.md" >}})
|
如果从本页中没有找到你需要的内容,请访问 [帮助页面]({{< relref "doc/help/seek-help.zh-cn.md" >}})
|
||||||
|
|||||||
@@ -755,7 +755,7 @@ func CountOrphanedLabels() (int64, error) {
|
|||||||
norepo, err := db.GetEngine(db.DefaultContext).Table("label").
|
norepo, err := db.GetEngine(db.DefaultContext).Table("label").
|
||||||
Where(builder.And(
|
Where(builder.And(
|
||||||
builder.Gt{"repo_id": 0},
|
builder.Gt{"repo_id": 0},
|
||||||
builder.NotIn("repo_id", builder.Select("id").From("repository")),
|
builder.NotIn("repo_id", builder.Select("id").From("`repository`")),
|
||||||
)).
|
)).
|
||||||
Count()
|
Count()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -765,7 +765,7 @@ func CountOrphanedLabels() (int64, error) {
|
|||||||
noorg, err := db.GetEngine(db.DefaultContext).Table("label").
|
noorg, err := db.GetEngine(db.DefaultContext).Table("label").
|
||||||
Where(builder.And(
|
Where(builder.And(
|
||||||
builder.Gt{"org_id": 0},
|
builder.Gt{"org_id": 0},
|
||||||
builder.NotIn("org_id", builder.Select("id").From("user")),
|
builder.NotIn("org_id", builder.Select("id").From("`user`")),
|
||||||
)).
|
)).
|
||||||
Count()
|
Count()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -786,7 +786,7 @@ func DeleteOrphanedLabels() error {
|
|||||||
if _, err := db.GetEngine(db.DefaultContext).
|
if _, err := db.GetEngine(db.DefaultContext).
|
||||||
Where(builder.And(
|
Where(builder.And(
|
||||||
builder.Gt{"repo_id": 0},
|
builder.Gt{"repo_id": 0},
|
||||||
builder.NotIn("repo_id", builder.Select("id").From("repository")),
|
builder.NotIn("repo_id", builder.Select("id").From("`repository`")),
|
||||||
)).
|
)).
|
||||||
Delete(Label{}); err != nil {
|
Delete(Label{}); err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -796,7 +796,7 @@ func DeleteOrphanedLabels() error {
|
|||||||
if _, err := db.GetEngine(db.DefaultContext).
|
if _, err := db.GetEngine(db.DefaultContext).
|
||||||
Where(builder.And(
|
Where(builder.And(
|
||||||
builder.Gt{"org_id": 0},
|
builder.Gt{"org_id": 0},
|
||||||
builder.NotIn("org_id", builder.Select("id").From("user")),
|
builder.NotIn("org_id", builder.Select("id").From("`user`")),
|
||||||
)).
|
)).
|
||||||
Delete(Label{}); err != nil {
|
Delete(Label{}); err != nil {
|
||||||
return err
|
return err
|
||||||
|
|||||||
@@ -52,13 +52,16 @@ func listPullRequestStatement(baseRepoID int64, opts *PullRequestsOptions) (*xor
|
|||||||
|
|
||||||
// GetUnmergedPullRequestsByHeadInfo returns all pull requests that are open and has not been merged
|
// GetUnmergedPullRequestsByHeadInfo returns all pull requests that are open and has not been merged
|
||||||
// by given head information (repo and branch).
|
// by given head information (repo and branch).
|
||||||
func GetUnmergedPullRequestsByHeadInfo(repoID int64, branch string) ([]*PullRequest, error) {
|
// arg `includeClosed` controls whether the SQL returns closed PRs
|
||||||
|
func GetUnmergedPullRequestsByHeadInfo(repoID int64, branch string, includeClosed bool) ([]*PullRequest, error) {
|
||||||
prs := make([]*PullRequest, 0, 2)
|
prs := make([]*PullRequest, 0, 2)
|
||||||
return prs, db.GetEngine(db.DefaultContext).
|
sess := db.GetEngine(db.DefaultContext).
|
||||||
Where("head_repo_id = ? AND head_branch = ? AND has_merged = ? AND issue.is_closed = ? AND flow = ?",
|
|
||||||
repoID, branch, false, false, PullRequestFlowGithub).
|
|
||||||
Join("INNER", "issue", "issue.id = pull_request.issue_id").
|
Join("INNER", "issue", "issue.id = pull_request.issue_id").
|
||||||
Find(&prs)
|
Where("head_repo_id = ? AND head_branch = ? AND has_merged = ? AND flow = ?", repoID, branch, false, PullRequestFlowGithub)
|
||||||
|
if !includeClosed {
|
||||||
|
sess.Where("issue.is_closed = ?", false)
|
||||||
|
}
|
||||||
|
return prs, sess.Find(&prs)
|
||||||
}
|
}
|
||||||
|
|
||||||
// CanMaintainerWriteToBranch check whether user is a maintainer and could write to the branch
|
// CanMaintainerWriteToBranch check whether user is a maintainer and could write to the branch
|
||||||
@@ -71,7 +74,7 @@ func CanMaintainerWriteToBranch(p access_model.Permission, branch string, user *
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
prs, err := GetUnmergedPullRequestsByHeadInfo(p.Units[0].RepoID, branch)
|
prs, err := GetUnmergedPullRequestsByHeadInfo(p.Units[0].RepoID, branch, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -119,7 +119,7 @@ func TestHasUnmergedPullRequestsByHeadInfo(t *testing.T) {
|
|||||||
|
|
||||||
func TestGetUnmergedPullRequestsByHeadInfo(t *testing.T) {
|
func TestGetUnmergedPullRequestsByHeadInfo(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(1, "branch2")
|
prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(1, "branch2", false)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, prs, 1)
|
assert.Len(t, prs, 1)
|
||||||
for _, pr := range prs {
|
for _, pr := range prs {
|
||||||
|
|||||||
@@ -19,10 +19,11 @@ type Pagination struct {
|
|||||||
urlParams []string
|
urlParams []string
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewPagination creates a new instance of the Pagination struct
|
// NewPagination creates a new instance of the Pagination struct.
|
||||||
func NewPagination(total, page, issueNum, numPages int) *Pagination {
|
// "pagingNum" is "page size" or "limit", "current" is "page"
|
||||||
|
func NewPagination(total, pagingNum, current, numPages int) *Pagination {
|
||||||
p := &Pagination{}
|
p := &Pagination{}
|
||||||
p.Paginater = paginator.New(total, page, issueNum, numPages)
|
p.Paginater = paginator.New(total, pagingNum, current, numPages)
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -313,7 +313,7 @@ func CheckGitVersionAtLeast(atLeast string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func configSet(key, value string) error {
|
func configSet(key, value string) error {
|
||||||
stdout, _, err := NewCommand(DefaultContext, "config", "--get").AddDynamicArguments(key).RunStdString(nil)
|
stdout, _, err := NewCommand(DefaultContext, "config", "--global", "--get").AddDynamicArguments(key).RunStdString(nil)
|
||||||
if err != nil && !err.IsExitCode(1) {
|
if err != nil && !err.IsExitCode(1) {
|
||||||
return fmt.Errorf("failed to get git config %s, err: %w", key, err)
|
return fmt.Errorf("failed to get git config %s, err: %w", key, err)
|
||||||
}
|
}
|
||||||
@@ -332,7 +332,7 @@ func configSet(key, value string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func configSetNonExist(key, value string) error {
|
func configSetNonExist(key, value string) error {
|
||||||
_, _, err := NewCommand(DefaultContext, "config", "--get").AddDynamicArguments(key).RunStdString(nil)
|
_, _, err := NewCommand(DefaultContext, "config", "--global", "--get").AddDynamicArguments(key).RunStdString(nil)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
// already exist
|
// already exist
|
||||||
return nil
|
return nil
|
||||||
@@ -350,7 +350,7 @@ func configSetNonExist(key, value string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func configAddNonExist(key, value string) error {
|
func configAddNonExist(key, value string) error {
|
||||||
_, _, err := NewCommand(DefaultContext, "config", "--get").AddDynamicArguments(key, regexp.QuoteMeta(value)).RunStdString(nil)
|
_, _, err := NewCommand(DefaultContext, "config", "--global", "--get").AddDynamicArguments(key, regexp.QuoteMeta(value)).RunStdString(nil)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
// already exist
|
// already exist
|
||||||
return nil
|
return nil
|
||||||
@@ -367,7 +367,7 @@ func configAddNonExist(key, value string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func configUnsetAll(key, value string) error {
|
func configUnsetAll(key, value string) error {
|
||||||
_, _, err := NewCommand(DefaultContext, "config", "--get").AddDynamicArguments(key).RunStdString(nil)
|
_, _, err := NewCommand(DefaultContext, "config", "--global", "--get").AddDynamicArguments(key).RunStdString(nil)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
// exist, need to remove
|
// exist, need to remove
|
||||||
_, _, err = NewCommand(DefaultContext, "config", "--global", "--unset-all").AddDynamicArguments(key, regexp.QuoteMeta(value)).RunStdString(nil)
|
_, _, err = NewCommand(DefaultContext, "config", "--global", "--unset-all").AddDynamicArguments(key, regexp.QuoteMeta(value)).RunStdString(nil)
|
||||||
|
|||||||
@@ -282,11 +282,18 @@ func (repo *Repository) GetPatch(base, head string, w io.Writer) error {
|
|||||||
|
|
||||||
// GetFilesChangedBetween returns a list of all files that have been changed between the given commits
|
// GetFilesChangedBetween returns a list of all files that have been changed between the given commits
|
||||||
func (repo *Repository) GetFilesChangedBetween(base, head string) ([]string, error) {
|
func (repo *Repository) GetFilesChangedBetween(base, head string) ([]string, error) {
|
||||||
stdout, _, err := NewCommand(repo.Ctx, "diff", "--name-only").AddDynamicArguments(base + ".." + head).RunStdString(&RunOpts{Dir: repo.Path})
|
stdout, _, err := NewCommand(repo.Ctx, "diff", "--name-only", "-z").AddDynamicArguments(base + ".." + head).RunStdString(&RunOpts{Dir: repo.Path})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return strings.Split(stdout, "\n"), err
|
split := strings.Split(stdout, "\000")
|
||||||
|
|
||||||
|
// Because Git will always emit filenames with a terminal NUL ignore the last entry in the split - which will always be empty.
|
||||||
|
if len(split) > 0 {
|
||||||
|
split = split[:len(split)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
return split, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetDiffFromMergeBase generates and return patch data from merge base to head
|
// GetDiffFromMergeBase generates and return patch data from merge base to head
|
||||||
|
|||||||
@@ -125,7 +125,10 @@ func (q *ChannelQueue) Shutdown() {
|
|||||||
log.Trace("ChannelQueue: %s Flushing", q.name)
|
log.Trace("ChannelQueue: %s Flushing", q.name)
|
||||||
// We can't use Cleanup here because that will close the channel
|
// We can't use Cleanup here because that will close the channel
|
||||||
if err := q.FlushWithContext(q.terminateCtx); err != nil {
|
if err := q.FlushWithContext(q.terminateCtx); err != nil {
|
||||||
log.Warn("ChannelQueue: %s Terminated before completed flushing", q.name)
|
count := atomic.LoadInt64(&q.numInQueue)
|
||||||
|
if count > 0 {
|
||||||
|
log.Warn("ChannelQueue: %s Terminated before completed flushing", q.name)
|
||||||
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
log.Debug("ChannelQueue: %s Flushed", q.name)
|
log.Debug("ChannelQueue: %s Flushed", q.name)
|
||||||
|
|||||||
@@ -95,7 +95,8 @@ func NewPersistableChannelQueue(handle HandlerFunc, cfg, exemplar interface{}) (
|
|||||||
},
|
},
|
||||||
Workers: 0,
|
Workers: 0,
|
||||||
},
|
},
|
||||||
DataDir: config.DataDir,
|
DataDir: config.DataDir,
|
||||||
|
QueueName: config.Name + "-level",
|
||||||
}
|
}
|
||||||
|
|
||||||
levelQueue, err := NewLevelQueue(wrappedHandle, levelCfg, exemplar)
|
levelQueue, err := NewLevelQueue(wrappedHandle, levelCfg, exemplar)
|
||||||
@@ -173,16 +174,18 @@ func (q *PersistableChannelQueue) Run(atShutdown, atTerminate func(func())) {
|
|||||||
atShutdown(q.Shutdown)
|
atShutdown(q.Shutdown)
|
||||||
atTerminate(q.Terminate)
|
atTerminate(q.Terminate)
|
||||||
|
|
||||||
if lq, ok := q.internal.(*LevelQueue); ok && lq.byteFIFO.Len(lq.shutdownCtx) != 0 {
|
if lq, ok := q.internal.(*LevelQueue); ok && lq.byteFIFO.Len(lq.terminateCtx) != 0 {
|
||||||
// Just run the level queue - we shut it down once it's flushed
|
// Just run the level queue - we shut it down once it's flushed
|
||||||
go q.internal.Run(func(_ func()) {}, func(_ func()) {})
|
go q.internal.Run(func(_ func()) {}, func(_ func()) {})
|
||||||
go func() {
|
go func() {
|
||||||
for !q.IsEmpty() {
|
for !lq.IsEmpty() {
|
||||||
_ = q.internal.Flush(0)
|
_ = lq.Flush(0)
|
||||||
select {
|
select {
|
||||||
case <-time.After(100 * time.Millisecond):
|
case <-time.After(100 * time.Millisecond):
|
||||||
case <-q.internal.(*LevelQueue).shutdownCtx.Done():
|
case <-lq.shutdownCtx.Done():
|
||||||
log.Warn("LevelQueue: %s shut down before completely flushed", q.internal.(*LevelQueue).Name())
|
if lq.byteFIFO.Len(lq.terminateCtx) > 0 {
|
||||||
|
log.Warn("LevelQueue: %s shut down before completely flushed", q.internal.(*LevelQueue).Name())
|
||||||
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -317,10 +320,22 @@ func (q *PersistableChannelQueue) Shutdown() {
|
|||||||
// Redirect all remaining data in the chan to the internal channel
|
// Redirect all remaining data in the chan to the internal channel
|
||||||
log.Trace("PersistableChannelQueue: %s Redirecting remaining data", q.delayedStarter.name)
|
log.Trace("PersistableChannelQueue: %s Redirecting remaining data", q.delayedStarter.name)
|
||||||
close(q.channelQueue.dataChan)
|
close(q.channelQueue.dataChan)
|
||||||
|
countOK, countLost := 0, 0
|
||||||
for data := range q.channelQueue.dataChan {
|
for data := range q.channelQueue.dataChan {
|
||||||
_ = q.internal.Push(data)
|
err := q.internal.Push(data)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("PersistableChannelQueue: %s Unable redirect %v due to: %v", q.delayedStarter.name, data, err)
|
||||||
|
countLost++
|
||||||
|
} else {
|
||||||
|
countOK++
|
||||||
|
}
|
||||||
atomic.AddInt64(&q.channelQueue.numInQueue, -1)
|
atomic.AddInt64(&q.channelQueue.numInQueue, -1)
|
||||||
}
|
}
|
||||||
|
if countLost > 0 {
|
||||||
|
log.Warn("PersistableChannelQueue: %s %d will be restored on restart, %d lost", q.delayedStarter.name, countOK, countLost)
|
||||||
|
} else if countOK > 0 {
|
||||||
|
log.Warn("PersistableChannelQueue: %s %d will be restored on restart", q.delayedStarter.name, countOK)
|
||||||
|
}
|
||||||
log.Trace("PersistableChannelQueue: %s Done Redirecting remaining data", q.delayedStarter.name)
|
log.Trace("PersistableChannelQueue: %s Done Redirecting remaining data", q.delayedStarter.name)
|
||||||
|
|
||||||
log.Debug("PersistableChannelQueue: %s Shutdown", q.delayedStarter.name)
|
log.Debug("PersistableChannelQueue: %s Shutdown", q.delayedStarter.name)
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ func TestPersistableChannelQueue(t *testing.T) {
|
|||||||
Workers: 1,
|
Workers: 1,
|
||||||
BoostWorkers: 0,
|
BoostWorkers: 0,
|
||||||
MaxWorkers: 10,
|
MaxWorkers: 10,
|
||||||
Name: "first",
|
Name: "test-queue",
|
||||||
}, &testData{})
|
}, &testData{})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
@@ -136,7 +136,7 @@ func TestPersistableChannelQueue(t *testing.T) {
|
|||||||
Workers: 1,
|
Workers: 1,
|
||||||
BoostWorkers: 0,
|
BoostWorkers: 0,
|
||||||
MaxWorkers: 10,
|
MaxWorkers: 10,
|
||||||
Name: "second",
|
Name: "test-queue",
|
||||||
}, &testData{})
|
}, &testData{})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
@@ -228,7 +228,7 @@ func TestPersistableChannelQueue_Pause(t *testing.T) {
|
|||||||
Workers: 1,
|
Workers: 1,
|
||||||
BoostWorkers: 0,
|
BoostWorkers: 0,
|
||||||
MaxWorkers: 10,
|
MaxWorkers: 10,
|
||||||
Name: "first",
|
Name: "test-queue",
|
||||||
}, &testData{})
|
}, &testData{})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
@@ -434,7 +434,7 @@ func TestPersistableChannelQueue_Pause(t *testing.T) {
|
|||||||
Workers: 1,
|
Workers: 1,
|
||||||
BoostWorkers: 0,
|
BoostWorkers: 0,
|
||||||
MaxWorkers: 10,
|
MaxWorkers: 10,
|
||||||
Name: "second",
|
Name: "test-queue",
|
||||||
}, &testData{})
|
}, &testData{})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
pausable, ok = queue.(Pausable)
|
pausable, ok = queue.(Pausable)
|
||||||
|
|||||||
@@ -178,7 +178,9 @@ func (q *ChannelUniqueQueue) Shutdown() {
|
|||||||
go func() {
|
go func() {
|
||||||
log.Trace("ChannelUniqueQueue: %s Flushing", q.name)
|
log.Trace("ChannelUniqueQueue: %s Flushing", q.name)
|
||||||
if err := q.FlushWithContext(q.terminateCtx); err != nil {
|
if err := q.FlushWithContext(q.terminateCtx); err != nil {
|
||||||
log.Warn("ChannelUniqueQueue: %s Terminated before completed flushing", q.name)
|
if !q.IsEmpty() {
|
||||||
|
log.Warn("ChannelUniqueQueue: %s Terminated before completed flushing", q.name)
|
||||||
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
log.Debug("ChannelUniqueQueue: %s Flushed", q.name)
|
log.Debug("ChannelUniqueQueue: %s Flushed", q.name)
|
||||||
|
|||||||
@@ -9,10 +9,13 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestChannelUniqueQueue(t *testing.T) {
|
func TestChannelUniqueQueue(t *testing.T) {
|
||||||
|
_ = log.NewLogger(1000, "console", "console", `{"level":"warn","stacktracelevel":"NONE","stderr":true}`)
|
||||||
handleChan := make(chan *testData)
|
handleChan := make(chan *testData)
|
||||||
handle := func(data ...Data) []Data {
|
handle := func(data ...Data) []Data {
|
||||||
for _, datum := range data {
|
for _, datum := range data {
|
||||||
@@ -53,6 +56,8 @@ func TestChannelUniqueQueue(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestChannelUniqueQueue_Batch(t *testing.T) {
|
func TestChannelUniqueQueue_Batch(t *testing.T) {
|
||||||
|
_ = log.NewLogger(1000, "console", "console", `{"level":"warn","stacktracelevel":"NONE","stderr":true}`)
|
||||||
|
|
||||||
handleChan := make(chan *testData)
|
handleChan := make(chan *testData)
|
||||||
handle := func(data ...Data) []Data {
|
handle := func(data ...Data) []Data {
|
||||||
for _, datum := range data {
|
for _, datum := range data {
|
||||||
@@ -99,6 +104,8 @@ func TestChannelUniqueQueue_Batch(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestChannelUniqueQueue_Pause(t *testing.T) {
|
func TestChannelUniqueQueue_Pause(t *testing.T) {
|
||||||
|
_ = log.NewLogger(1000, "console", "console", `{"level":"warn","stacktracelevel":"NONE","stderr":true}`)
|
||||||
|
|
||||||
lock := sync.Mutex{}
|
lock := sync.Mutex{}
|
||||||
var queue Queue
|
var queue Queue
|
||||||
var err error
|
var err error
|
||||||
|
|||||||
@@ -95,7 +95,8 @@ func NewPersistableChannelUniqueQueue(handle HandlerFunc, cfg, exemplar interfac
|
|||||||
},
|
},
|
||||||
Workers: 0,
|
Workers: 0,
|
||||||
},
|
},
|
||||||
DataDir: config.DataDir,
|
DataDir: config.DataDir,
|
||||||
|
QueueName: config.Name + "-level",
|
||||||
}
|
}
|
||||||
|
|
||||||
queue.channelQueue = channelUniqueQueue.(*ChannelUniqueQueue)
|
queue.channelQueue = channelUniqueQueue.(*ChannelUniqueQueue)
|
||||||
@@ -210,17 +211,29 @@ func (q *PersistableChannelUniqueQueue) Run(atShutdown, atTerminate func(func())
|
|||||||
atTerminate(q.Terminate)
|
atTerminate(q.Terminate)
|
||||||
_ = q.channelQueue.AddWorkers(q.channelQueue.workers, 0)
|
_ = q.channelQueue.AddWorkers(q.channelQueue.workers, 0)
|
||||||
|
|
||||||
if luq, ok := q.internal.(*LevelUniqueQueue); ok && luq.ByteFIFOUniqueQueue.byteFIFO.Len(luq.shutdownCtx) != 0 {
|
if luq, ok := q.internal.(*LevelUniqueQueue); ok && !luq.IsEmpty() {
|
||||||
// Just run the level queue - we shut it down once it's flushed
|
// Just run the level queue - we shut it down once it's flushed
|
||||||
go q.internal.Run(func(_ func()) {}, func(_ func()) {})
|
go luq.Run(func(_ func()) {}, func(_ func()) {})
|
||||||
go func() {
|
go func() {
|
||||||
_ = q.internal.Flush(0)
|
_ = luq.Flush(0)
|
||||||
log.Debug("LevelUniqueQueue: %s flushed so shutting down", q.internal.(*LevelUniqueQueue).Name())
|
for !luq.IsEmpty() {
|
||||||
q.internal.(*LevelUniqueQueue).Shutdown()
|
_ = luq.Flush(0)
|
||||||
GetManager().Remove(q.internal.(*LevelUniqueQueue).qid)
|
select {
|
||||||
|
case <-time.After(100 * time.Millisecond):
|
||||||
|
case <-luq.shutdownCtx.Done():
|
||||||
|
if luq.byteFIFO.Len(luq.terminateCtx) > 0 {
|
||||||
|
log.Warn("LevelUniqueQueue: %s shut down before completely flushed", luq.Name())
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.Debug("LevelUniqueQueue: %s flushed so shutting down", luq.Name())
|
||||||
|
luq.Shutdown()
|
||||||
|
GetManager().Remove(luq.qid)
|
||||||
}()
|
}()
|
||||||
} else {
|
} else {
|
||||||
log.Debug("PersistableChannelUniqueQueue: %s Skipping running the empty level queue", q.delayedStarter.name)
|
log.Debug("PersistableChannelUniqueQueue: %s Skipping running the empty level queue", q.delayedStarter.name)
|
||||||
|
_ = q.internal.Flush(0)
|
||||||
q.internal.(*LevelUniqueQueue).Shutdown()
|
q.internal.(*LevelUniqueQueue).Shutdown()
|
||||||
GetManager().Remove(q.internal.(*LevelUniqueQueue).qid)
|
GetManager().Remove(q.internal.(*LevelUniqueQueue).qid)
|
||||||
}
|
}
|
||||||
@@ -286,8 +299,20 @@ func (q *PersistableChannelUniqueQueue) Shutdown() {
|
|||||||
// Redirect all remaining data in the chan to the internal channel
|
// Redirect all remaining data in the chan to the internal channel
|
||||||
close(q.channelQueue.dataChan)
|
close(q.channelQueue.dataChan)
|
||||||
log.Trace("PersistableChannelUniqueQueue: %s Redirecting remaining data", q.delayedStarter.name)
|
log.Trace("PersistableChannelUniqueQueue: %s Redirecting remaining data", q.delayedStarter.name)
|
||||||
|
countOK, countLost := 0, 0
|
||||||
for data := range q.channelQueue.dataChan {
|
for data := range q.channelQueue.dataChan {
|
||||||
_ = q.internal.Push(data)
|
err := q.internal.(*LevelUniqueQueue).Push(data)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("PersistableChannelUniqueQueue: %s Unable redirect %v due to: %v", q.delayedStarter.name, data, err)
|
||||||
|
countLost++
|
||||||
|
} else {
|
||||||
|
countOK++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if countLost > 0 {
|
||||||
|
log.Warn("PersistableChannelUniqueQueue: %s %d will be restored on restart, %d lost", q.delayedStarter.name, countOK, countLost)
|
||||||
|
} else if countOK > 0 {
|
||||||
|
log.Warn("PersistableChannelUniqueQueue: %s %d will be restored on restart", q.delayedStarter.name, countOK)
|
||||||
}
|
}
|
||||||
log.Trace("PersistableChannelUniqueQueue: %s Done Redirecting remaining data", q.delayedStarter.name)
|
log.Trace("PersistableChannelUniqueQueue: %s Done Redirecting remaining data", q.delayedStarter.name)
|
||||||
|
|
||||||
|
|||||||
259
modules/queue/unique_queue_disk_channel_test.go
Normal file
259
modules/queue/unique_queue_disk_channel_test.go
Normal file
@@ -0,0 +1,259 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package queue
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"sync"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestPersistableChannelUniqueQueue(t *testing.T) {
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
fmt.Printf("TempDir %s\n", tmpDir)
|
||||||
|
_ = log.NewLogger(1000, "console", "console", `{"level":"warn","stacktracelevel":"NONE","stderr":true}`)
|
||||||
|
|
||||||
|
// Common function to create the Queue
|
||||||
|
newQueue := func(name string, handle func(data ...Data) []Data) Queue {
|
||||||
|
q, err := NewPersistableChannelUniqueQueue(handle,
|
||||||
|
PersistableChannelUniqueQueueConfiguration{
|
||||||
|
Name: name,
|
||||||
|
DataDir: tmpDir,
|
||||||
|
QueueLength: 200,
|
||||||
|
MaxWorkers: 1,
|
||||||
|
BlockTimeout: 1 * time.Second,
|
||||||
|
BoostTimeout: 5 * time.Minute,
|
||||||
|
BoostWorkers: 1,
|
||||||
|
Workers: 0,
|
||||||
|
}, "task-0")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
return q
|
||||||
|
}
|
||||||
|
|
||||||
|
// runs the provided queue and provides some timer function
|
||||||
|
type channels struct {
|
||||||
|
readyForShutdown chan struct{} // closed when shutdown functions have been assigned
|
||||||
|
readyForTerminate chan struct{} // closed when terminate functions have been assigned
|
||||||
|
signalShutdown chan struct{} // Should close to signal shutdown
|
||||||
|
doneShutdown chan struct{} // closed when shutdown function is done
|
||||||
|
queueTerminate []func() // list of atTerminate functions to call atTerminate - need to be accessed with lock
|
||||||
|
}
|
||||||
|
runQueue := func(q Queue, lock *sync.Mutex) *channels {
|
||||||
|
chans := &channels{
|
||||||
|
readyForShutdown: make(chan struct{}),
|
||||||
|
readyForTerminate: make(chan struct{}),
|
||||||
|
signalShutdown: make(chan struct{}),
|
||||||
|
doneShutdown: make(chan struct{}),
|
||||||
|
}
|
||||||
|
go q.Run(func(atShutdown func()) {
|
||||||
|
go func() {
|
||||||
|
lock.Lock()
|
||||||
|
select {
|
||||||
|
case <-chans.readyForShutdown:
|
||||||
|
default:
|
||||||
|
close(chans.readyForShutdown)
|
||||||
|
}
|
||||||
|
lock.Unlock()
|
||||||
|
<-chans.signalShutdown
|
||||||
|
atShutdown()
|
||||||
|
close(chans.doneShutdown)
|
||||||
|
}()
|
||||||
|
}, func(atTerminate func()) {
|
||||||
|
lock.Lock()
|
||||||
|
defer lock.Unlock()
|
||||||
|
select {
|
||||||
|
case <-chans.readyForTerminate:
|
||||||
|
default:
|
||||||
|
close(chans.readyForTerminate)
|
||||||
|
}
|
||||||
|
chans.queueTerminate = append(chans.queueTerminate, atTerminate)
|
||||||
|
})
|
||||||
|
|
||||||
|
return chans
|
||||||
|
}
|
||||||
|
|
||||||
|
// call to shutdown and terminate the queue associated with the channels
|
||||||
|
doTerminate := func(chans *channels, lock *sync.Mutex) {
|
||||||
|
<-chans.readyForTerminate
|
||||||
|
|
||||||
|
lock.Lock()
|
||||||
|
callbacks := []func(){}
|
||||||
|
callbacks = append(callbacks, chans.queueTerminate...)
|
||||||
|
lock.Unlock()
|
||||||
|
|
||||||
|
for _, callback := range callbacks {
|
||||||
|
callback()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mapLock := sync.Mutex{}
|
||||||
|
executedInitial := map[string][]string{}
|
||||||
|
hasInitial := map[string][]string{}
|
||||||
|
|
||||||
|
fillQueue := func(name string, done chan struct{}) {
|
||||||
|
t.Run("Initial Filling: "+name, func(t *testing.T) {
|
||||||
|
lock := sync.Mutex{}
|
||||||
|
|
||||||
|
startAt100Queued := make(chan struct{})
|
||||||
|
stopAt20Shutdown := make(chan struct{}) // stop and shutdown at the 20th item
|
||||||
|
|
||||||
|
handle := func(data ...Data) []Data {
|
||||||
|
<-startAt100Queued
|
||||||
|
for _, datum := range data {
|
||||||
|
s := datum.(string)
|
||||||
|
mapLock.Lock()
|
||||||
|
executedInitial[name] = append(executedInitial[name], s)
|
||||||
|
mapLock.Unlock()
|
||||||
|
if s == "task-20" {
|
||||||
|
close(stopAt20Shutdown)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
q := newQueue(name, handle)
|
||||||
|
|
||||||
|
// add 100 tasks to the queue
|
||||||
|
for i := 0; i < 100; i++ {
|
||||||
|
_ = q.Push("task-" + strconv.Itoa(i))
|
||||||
|
}
|
||||||
|
close(startAt100Queued)
|
||||||
|
|
||||||
|
chans := runQueue(q, &lock)
|
||||||
|
|
||||||
|
<-chans.readyForShutdown
|
||||||
|
<-stopAt20Shutdown
|
||||||
|
close(chans.signalShutdown)
|
||||||
|
<-chans.doneShutdown
|
||||||
|
_ = q.Push("final")
|
||||||
|
|
||||||
|
// check which tasks are still in the queue
|
||||||
|
for i := 0; i < 100; i++ {
|
||||||
|
if has, _ := q.(UniqueQueue).Has("task-" + strconv.Itoa(i)); has {
|
||||||
|
mapLock.Lock()
|
||||||
|
hasInitial[name] = append(hasInitial[name], "task-"+strconv.Itoa(i))
|
||||||
|
mapLock.Unlock()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if has, _ := q.(UniqueQueue).Has("final"); has {
|
||||||
|
mapLock.Lock()
|
||||||
|
hasInitial[name] = append(hasInitial[name], "final")
|
||||||
|
mapLock.Unlock()
|
||||||
|
} else {
|
||||||
|
assert.Fail(t, "UnqueQueue %s should have \"final\"", name)
|
||||||
|
}
|
||||||
|
doTerminate(chans, &lock)
|
||||||
|
mapLock.Lock()
|
||||||
|
assert.Equal(t, 101, len(executedInitial[name])+len(hasInitial[name]))
|
||||||
|
mapLock.Unlock()
|
||||||
|
})
|
||||||
|
close(done)
|
||||||
|
}
|
||||||
|
|
||||||
|
doneA := make(chan struct{})
|
||||||
|
doneB := make(chan struct{})
|
||||||
|
|
||||||
|
go fillQueue("QueueA", doneA)
|
||||||
|
go fillQueue("QueueB", doneB)
|
||||||
|
|
||||||
|
<-doneA
|
||||||
|
<-doneB
|
||||||
|
|
||||||
|
executedEmpty := map[string][]string{}
|
||||||
|
hasEmpty := map[string][]string{}
|
||||||
|
emptyQueue := func(name string, done chan struct{}) {
|
||||||
|
t.Run("Empty Queue: "+name, func(t *testing.T) {
|
||||||
|
lock := sync.Mutex{}
|
||||||
|
stop := make(chan struct{})
|
||||||
|
|
||||||
|
// collect the tasks that have been executed
|
||||||
|
handle := func(data ...Data) []Data {
|
||||||
|
lock.Lock()
|
||||||
|
for _, datum := range data {
|
||||||
|
mapLock.Lock()
|
||||||
|
executedEmpty[name] = append(executedEmpty[name], datum.(string))
|
||||||
|
mapLock.Unlock()
|
||||||
|
if datum.(string) == "final" {
|
||||||
|
close(stop)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
lock.Unlock()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
q := newQueue(name, handle)
|
||||||
|
chans := runQueue(q, &lock)
|
||||||
|
|
||||||
|
<-chans.readyForShutdown
|
||||||
|
<-stop
|
||||||
|
close(chans.signalShutdown)
|
||||||
|
<-chans.doneShutdown
|
||||||
|
|
||||||
|
// check which tasks are still in the queue
|
||||||
|
for i := 0; i < 100; i++ {
|
||||||
|
if has, _ := q.(UniqueQueue).Has("task-" + strconv.Itoa(i)); has {
|
||||||
|
mapLock.Lock()
|
||||||
|
hasEmpty[name] = append(hasEmpty[name], "task-"+strconv.Itoa(i))
|
||||||
|
mapLock.Unlock()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
doTerminate(chans, &lock)
|
||||||
|
|
||||||
|
mapLock.Lock()
|
||||||
|
assert.Equal(t, 101, len(executedInitial[name])+len(executedEmpty[name]))
|
||||||
|
assert.Equal(t, 0, len(hasEmpty[name]))
|
||||||
|
mapLock.Unlock()
|
||||||
|
})
|
||||||
|
close(done)
|
||||||
|
}
|
||||||
|
|
||||||
|
doneA = make(chan struct{})
|
||||||
|
doneB = make(chan struct{})
|
||||||
|
|
||||||
|
go emptyQueue("QueueA", doneA)
|
||||||
|
go emptyQueue("QueueB", doneB)
|
||||||
|
|
||||||
|
<-doneA
|
||||||
|
<-doneB
|
||||||
|
|
||||||
|
mapLock.Lock()
|
||||||
|
t.Logf("TestPersistableChannelUniqueQueue executedInitiallyA=%v, executedInitiallyB=%v, executedToEmptyA=%v, executedToEmptyB=%v",
|
||||||
|
len(executedInitial["QueueA"]), len(executedInitial["QueueB"]), len(executedEmpty["QueueA"]), len(executedEmpty["QueueB"]))
|
||||||
|
|
||||||
|
// reset and rerun
|
||||||
|
executedInitial = map[string][]string{}
|
||||||
|
hasInitial = map[string][]string{}
|
||||||
|
executedEmpty = map[string][]string{}
|
||||||
|
hasEmpty = map[string][]string{}
|
||||||
|
mapLock.Unlock()
|
||||||
|
|
||||||
|
doneA = make(chan struct{})
|
||||||
|
doneB = make(chan struct{})
|
||||||
|
|
||||||
|
go fillQueue("QueueA", doneA)
|
||||||
|
go fillQueue("QueueB", doneB)
|
||||||
|
|
||||||
|
<-doneA
|
||||||
|
<-doneB
|
||||||
|
|
||||||
|
doneA = make(chan struct{})
|
||||||
|
doneB = make(chan struct{})
|
||||||
|
|
||||||
|
go emptyQueue("QueueA", doneA)
|
||||||
|
go emptyQueue("QueueB", doneB)
|
||||||
|
|
||||||
|
<-doneA
|
||||||
|
<-doneB
|
||||||
|
|
||||||
|
mapLock.Lock()
|
||||||
|
t.Logf("TestPersistableChannelUniqueQueue executedInitiallyA=%v, executedInitiallyB=%v, executedToEmptyA=%v, executedToEmptyB=%v",
|
||||||
|
len(executedInitial["QueueA"]), len(executedInitial["QueueB"]), len(executedEmpty["QueueA"]), len(executedEmpty["QueueB"]))
|
||||||
|
mapLock.Unlock()
|
||||||
|
}
|
||||||
@@ -33,6 +33,8 @@ func CreateRepo(ctx *context.APIContext) {
|
|||||||
// responses:
|
// responses:
|
||||||
// "201":
|
// "201":
|
||||||
// "$ref": "#/responses/Repository"
|
// "$ref": "#/responses/Repository"
|
||||||
|
// "400":
|
||||||
|
// "$ref": "#/responses/error"
|
||||||
// "403":
|
// "403":
|
||||||
// "$ref": "#/responses/forbidden"
|
// "$ref": "#/responses/forbidden"
|
||||||
// "404":
|
// "404":
|
||||||
|
|||||||
@@ -155,7 +155,7 @@ func Migrate(ctx *context.APIContext) {
|
|||||||
Issues: form.Issues,
|
Issues: form.Issues,
|
||||||
Milestones: form.Milestones,
|
Milestones: form.Milestones,
|
||||||
Labels: form.Labels,
|
Labels: form.Labels,
|
||||||
Comments: true,
|
Comments: form.Issues || form.PullRequests,
|
||||||
PullRequests: form.PullRequests,
|
PullRequests: form.PullRequests,
|
||||||
Releases: form.Releases,
|
Releases: form.Releases,
|
||||||
GitServiceType: gitServiceType,
|
GitServiceType: gitServiceType,
|
||||||
|
|||||||
@@ -231,6 +231,22 @@ func CreateUserRepo(ctx *context.APIContext, owner *user_model.User, opt api.Cre
|
|||||||
if opt.AutoInit && opt.Readme == "" {
|
if opt.AutoInit && opt.Readme == "" {
|
||||||
opt.Readme = "Default"
|
opt.Readme = "Default"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
contains := func(slice []string, s string) bool {
|
||||||
|
for _, v := range slice {
|
||||||
|
if v == s {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the readme template does not exist, a 400 will be returned.
|
||||||
|
if opt.AutoInit && len(opt.Readme) > 0 && !contains(repo_module.Readmes, opt.Readme) {
|
||||||
|
ctx.Error(http.StatusBadRequest, "", fmt.Errorf("readme template does not exist, available templates: %v", repo_module.Readmes))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
repo, err := repo_service.CreateRepository(ctx.Doer, owner, repo_module.CreateRepoOptions{
|
repo, err := repo_service.CreateRepository(ctx.Doer, owner, repo_module.CreateRepoOptions{
|
||||||
Name: opt.Name,
|
Name: opt.Name,
|
||||||
Description: opt.Description,
|
Description: opt.Description,
|
||||||
@@ -283,6 +299,8 @@ func Create(ctx *context.APIContext) {
|
|||||||
// responses:
|
// responses:
|
||||||
// "201":
|
// "201":
|
||||||
// "$ref": "#/responses/Repository"
|
// "$ref": "#/responses/Repository"
|
||||||
|
// "400":
|
||||||
|
// "$ref": "#/responses/error"
|
||||||
// "409":
|
// "409":
|
||||||
// description: The repository with the same name already exists.
|
// description: The repository with the same name already exists.
|
||||||
// "422":
|
// "422":
|
||||||
@@ -464,6 +482,8 @@ func CreateOrgRepo(ctx *context.APIContext) {
|
|||||||
// responses:
|
// responses:
|
||||||
// "201":
|
// "201":
|
||||||
// "$ref": "#/responses/Repository"
|
// "$ref": "#/responses/Repository"
|
||||||
|
// "400":
|
||||||
|
// "$ref": "#/responses/error"
|
||||||
// "404":
|
// "404":
|
||||||
// "$ref": "#/responses/notFound"
|
// "$ref": "#/responses/notFound"
|
||||||
// "403":
|
// "403":
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models"
|
"code.gitea.io/gitea/models"
|
||||||
@@ -66,21 +67,17 @@ func Branches(ctx *context.Context) {
|
|||||||
if page <= 1 {
|
if page <= 1 {
|
||||||
page = 1
|
page = 1
|
||||||
}
|
}
|
||||||
|
pageSize := setting.Git.BranchesRangeSize
|
||||||
|
|
||||||
limit := ctx.FormInt("limit")
|
skip := (page - 1) * pageSize
|
||||||
if limit <= 0 || limit > setting.Git.BranchesRangeSize {
|
log.Debug("Branches: skip: %d limit: %d", skip, pageSize)
|
||||||
limit = setting.Git.BranchesRangeSize
|
defaultBranchBranch, branches, branchesCount := loadBranches(ctx, skip, pageSize)
|
||||||
}
|
|
||||||
|
|
||||||
skip := (page - 1) * limit
|
|
||||||
log.Debug("Branches: skip: %d limit: %d", skip, limit)
|
|
||||||
defaultBranchBranch, branches, branchesCount := loadBranches(ctx, skip, limit)
|
|
||||||
if ctx.Written() {
|
if ctx.Written() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
ctx.Data["Branches"] = branches
|
ctx.Data["Branches"] = branches
|
||||||
ctx.Data["DefaultBranchBranch"] = defaultBranchBranch
|
ctx.Data["DefaultBranchBranch"] = defaultBranchBranch
|
||||||
pager := context.NewPagination(branchesCount, setting.Git.BranchesRangeSize, page, 5)
|
pager := context.NewPagination(branchesCount, pageSize, page, 5)
|
||||||
pager.SetDefaultParams(ctx)
|
pager.SetDefaultParams(ctx)
|
||||||
ctx.Data["Page"] = pager
|
ctx.Data["Page"] = pager
|
||||||
|
|
||||||
@@ -166,7 +163,7 @@ func RestoreBranchPost(ctx *context.Context) {
|
|||||||
|
|
||||||
func redirect(ctx *context.Context) {
|
func redirect(ctx *context.Context) {
|
||||||
ctx.JSON(http.StatusOK, map[string]interface{}{
|
ctx.JSON(http.StatusOK, map[string]interface{}{
|
||||||
"redirect": ctx.Repo.RepoLink + "/branches",
|
"redirect": ctx.Repo.RepoLink + "/branches?page=" + url.QueryEscape(ctx.FormString("page")),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1375,11 +1375,12 @@ func ViewIssue(ctx *context.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
role issues_model.RoleDescriptor
|
role issues_model.RoleDescriptor
|
||||||
ok bool
|
ok bool
|
||||||
marked = make(map[int64]issues_model.RoleDescriptor)
|
marked = make(map[int64]issues_model.RoleDescriptor)
|
||||||
comment *issues_model.Comment
|
comment *issues_model.Comment
|
||||||
participants = make([]*user_model.User, 1, 10)
|
participants = make([]*user_model.User, 1, 10)
|
||||||
|
latestCloseCommentID int64
|
||||||
)
|
)
|
||||||
if ctx.Repo.Repository.IsTimetrackerEnabled() {
|
if ctx.Repo.Repository.IsTimetrackerEnabled() {
|
||||||
if ctx.IsSigned {
|
if ctx.IsSigned {
|
||||||
@@ -1586,9 +1587,15 @@ func ViewIssue(ctx *context.Context) {
|
|||||||
comment.Type == issues_model.CommentTypeStopTracking {
|
comment.Type == issues_model.CommentTypeStopTracking {
|
||||||
// drop error since times could be pruned from DB..
|
// drop error since times could be pruned from DB..
|
||||||
_ = comment.LoadTime()
|
_ = comment.LoadTime()
|
||||||
|
} else if comment.Type == issues_model.CommentTypeClose {
|
||||||
|
// record ID of latest closed comment.
|
||||||
|
// if PR is closed, the comments whose type is CommentTypePullRequestPush(29) after latestCloseCommentID won't be rendered.
|
||||||
|
latestCloseCommentID = comment.ID
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ctx.Data["LatestCloseCommentID"] = latestCloseCommentID
|
||||||
|
|
||||||
// Combine multiple label assignments into a single comment
|
// Combine multiple label assignments into a single comment
|
||||||
combineLabelComments(issue)
|
combineLabelComments(issue)
|
||||||
|
|
||||||
|
|||||||
@@ -26,6 +26,8 @@ const (
|
|||||||
func NewDiffPatch(ctx *context.Context) {
|
func NewDiffPatch(ctx *context.Context) {
|
||||||
canCommit := renderCommitRights(ctx)
|
canCommit := renderCommitRights(ctx)
|
||||||
|
|
||||||
|
ctx.Data["PageIsPatch"] = true
|
||||||
|
|
||||||
ctx.Data["TreePath"] = ""
|
ctx.Data["TreePath"] = ""
|
||||||
|
|
||||||
ctx.Data["commit_summary"] = ""
|
ctx.Data["commit_summary"] = ""
|
||||||
@@ -52,6 +54,7 @@ func NewDiffPatchPost(ctx *context.Context) {
|
|||||||
if form.CommitChoice == frmCommitChoiceNewBranch {
|
if form.CommitChoice == frmCommitChoiceNewBranch {
|
||||||
branchName = form.NewBranchName
|
branchName = form.NewBranchName
|
||||||
}
|
}
|
||||||
|
ctx.Data["PageIsPatch"] = true
|
||||||
ctx.Data["TreePath"] = ""
|
ctx.Data["TreePath"] = ""
|
||||||
ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
|
ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
|
||||||
ctx.Data["FileContent"] = form.Content
|
ctx.Data["FileContent"] = form.Content
|
||||||
@@ -87,13 +90,14 @@ func NewDiffPatchPost(ctx *context.Context) {
|
|||||||
message += "\n\n" + form.CommitMessage
|
message += "\n\n" + form.CommitMessage
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := files.ApplyDiffPatch(ctx, ctx.Repo.Repository, ctx.Doer, &files.ApplyDiffPatchOptions{
|
fileResponse, err := files.ApplyDiffPatch(ctx, ctx.Repo.Repository, ctx.Doer, &files.ApplyDiffPatchOptions{
|
||||||
LastCommitID: form.LastCommit,
|
LastCommitID: form.LastCommit,
|
||||||
OldBranch: ctx.Repo.BranchName,
|
OldBranch: ctx.Repo.BranchName,
|
||||||
NewBranch: branchName,
|
NewBranch: branchName,
|
||||||
Message: message,
|
Message: message,
|
||||||
Content: strings.ReplaceAll(form.Content, "\r", ""),
|
Content: strings.ReplaceAll(form.Content, "\r", ""),
|
||||||
}); err != nil {
|
})
|
||||||
|
if err != nil {
|
||||||
if models.IsErrBranchAlreadyExists(err) {
|
if models.IsErrBranchAlreadyExists(err) {
|
||||||
// User has specified a branch that already exists
|
// User has specified a branch that already exists
|
||||||
branchErr := err.(models.ErrBranchAlreadyExists)
|
branchErr := err.(models.ErrBranchAlreadyExists)
|
||||||
@@ -112,6 +116,6 @@ func NewDiffPatchPost(ctx *context.Context) {
|
|||||||
if form.CommitChoice == frmCommitChoiceNewBranch && ctx.Repo.Repository.UnitEnabled(unit.TypePullRequests) {
|
if form.CommitChoice == frmCommitChoiceNewBranch && ctx.Repo.Repository.UnitEnabled(unit.TypePullRequests) {
|
||||||
ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ctx.Repo.BranchName) + "..." + util.PathEscapeSegments(form.NewBranchName))
|
ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ctx.Repo.BranchName) + "..." + util.PathEscapeSegments(form.NewBranchName))
|
||||||
} else {
|
} else {
|
||||||
ctx.Redirect(ctx.Repo.RepoLink + "/src/branch/" + util.PathEscapeSegments(branchName) + "/" + util.PathEscapeSegments(form.TreePath))
|
ctx.Redirect(ctx.Repo.RepoLink + "/commit/" + fileResponse.Commit.SHA)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -574,7 +574,7 @@ func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
|
|||||||
ctx.Data["HeadBranchCommitID"] = headBranchSha
|
ctx.Data["HeadBranchCommitID"] = headBranchSha
|
||||||
ctx.Data["PullHeadCommitID"] = sha
|
ctx.Data["PullHeadCommitID"] = sha
|
||||||
|
|
||||||
if pull.HeadRepo == nil || !headBranchExist || headBranchSha != sha {
|
if pull.HeadRepo == nil || !headBranchExist || (!pull.Issue.IsClosed && (headBranchSha != sha)) {
|
||||||
ctx.Data["IsPullRequestBroken"] = true
|
ctx.Data["IsPullRequestBroken"] = true
|
||||||
if pull.IsSameRepo() {
|
if pull.IsSameRepo() {
|
||||||
ctx.Data["HeadTarget"] = pull.HeadBranch
|
ctx.Data["HeadTarget"] = pull.HeadBranch
|
||||||
|
|||||||
@@ -500,6 +500,13 @@ func SyncPullMirror(ctx context.Context, repoID int64) bool {
|
|||||||
theCommits.Commits = theCommits.Commits[:setting.UI.FeedMaxCommitNum]
|
theCommits.Commits = theCommits.Commits[:setting.UI.FeedMaxCommitNum]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if newCommit, err := gitRepo.GetCommit(newCommitID); err != nil {
|
||||||
|
log.Error("SyncMirrors [repo: %-v]: unable to get commit %s: %v", m.Repo, newCommitID, err)
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
theCommits.HeadCommit = repo_module.CommitToPushCommit(newCommit)
|
||||||
|
}
|
||||||
|
|
||||||
theCommits.CompareURL = m.Repo.ComposeCompareURL(oldCommitID, newCommitID)
|
theCommits.CompareURL = m.Repo.ComposeCompareURL(oldCommitID, newCommitID)
|
||||||
|
|
||||||
notification.NotifySyncPushCommits(m.Repo.MustOwner(), m.Repo, &repo_module.PushUpdateOptions{
|
notification.NotifySyncPushCommits(m.Repo.MustOwner(), m.Repo, &repo_module.PushUpdateOptions{
|
||||||
|
|||||||
@@ -258,7 +258,7 @@ func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string,
|
|||||||
// If you don't let it run all the way then you will lose data
|
// If you don't let it run all the way then you will lose data
|
||||||
// TODO: graceful: AddTestPullRequestTask needs to become a queue!
|
// TODO: graceful: AddTestPullRequestTask needs to become a queue!
|
||||||
|
|
||||||
prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(repoID, branch)
|
prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(repoID, branch, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Find pull requests [head_repo_id: %d, head_branch: %s]: %v", repoID, branch, err)
|
log.Error("Find pull requests [head_repo_id: %d, head_branch: %s]: %v", repoID, branch, err)
|
||||||
return
|
return
|
||||||
@@ -502,7 +502,7 @@ func (errs errlist) Error() string {
|
|||||||
|
|
||||||
// CloseBranchPulls close all the pull requests who's head branch is the branch
|
// CloseBranchPulls close all the pull requests who's head branch is the branch
|
||||||
func CloseBranchPulls(doer *user_model.User, repoID int64, branch string) error {
|
func CloseBranchPulls(doer *user_model.User, repoID int64, branch string) error {
|
||||||
prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(repoID, branch)
|
prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(repoID, branch, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -538,7 +538,7 @@ func CloseRepoBranchesPulls(ctx context.Context, doer *user_model.User, repo *re
|
|||||||
|
|
||||||
var errs errlist
|
var errs errlist
|
||||||
for _, branch := range branches {
|
for _, branch := range branches {
|
||||||
prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(repo.ID, branch.Name)
|
prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(repo.ID, branch.Name, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -68,6 +68,12 @@ func createTemporaryRepo(ctx context.Context, pr *issues_model.PullRequest) (str
|
|||||||
remoteRepoName := "head_repo"
|
remoteRepoName := "head_repo"
|
||||||
baseBranch := "base"
|
baseBranch := "base"
|
||||||
|
|
||||||
|
fetchArgs := []git.CmdArg{"--no-tags"}
|
||||||
|
if git.CheckGitVersionAtLeast("2.25.0") == nil {
|
||||||
|
// Writing the commit graph can be slow and is not needed here
|
||||||
|
fetchArgs = append(fetchArgs, "--no-write-commit-graph")
|
||||||
|
}
|
||||||
|
|
||||||
// Add head repo remote.
|
// Add head repo remote.
|
||||||
addCacheRepo := func(staging, cache string) error {
|
addCacheRepo := func(staging, cache string) error {
|
||||||
p := filepath.Join(staging, ".git", "objects", "info", "alternates")
|
p := filepath.Join(staging, ".git", "objects", "info", "alternates")
|
||||||
@@ -109,7 +115,7 @@ func createTemporaryRepo(ctx context.Context, pr *issues_model.PullRequest) (str
|
|||||||
outbuf.Reset()
|
outbuf.Reset()
|
||||||
errbuf.Reset()
|
errbuf.Reset()
|
||||||
|
|
||||||
if err := git.NewCommand(ctx, "fetch", "origin", "--no-tags").AddDashesAndList(pr.BaseBranch+":"+baseBranch, pr.BaseBranch+":original_"+baseBranch).
|
if err := git.NewCommand(ctx, "fetch", "origin").AddArguments(fetchArgs...).AddDashesAndList(pr.BaseBranch+":"+baseBranch, pr.BaseBranch+":original_"+baseBranch).
|
||||||
Run(&git.RunOpts{
|
Run(&git.RunOpts{
|
||||||
Dir: tmpBasePath,
|
Dir: tmpBasePath,
|
||||||
Stdout: &outbuf,
|
Stdout: &outbuf,
|
||||||
@@ -172,7 +178,7 @@ func createTemporaryRepo(ctx context.Context, pr *issues_model.PullRequest) (str
|
|||||||
} else {
|
} else {
|
||||||
headBranch = pr.GetGitRefName()
|
headBranch = pr.GetGitRefName()
|
||||||
}
|
}
|
||||||
if err := git.NewCommand(ctx, "fetch", "--no-tags").AddDynamicArguments(remoteRepoName, headBranch+":"+trackingBranch).
|
if err := git.NewCommand(ctx, "fetch").AddArguments(fetchArgs...).AddDynamicArguments(remoteRepoName, headBranch+":"+trackingBranch).
|
||||||
Run(&git.RunOpts{
|
Run(&git.RunOpts{
|
||||||
Dir: tmpBasePath,
|
Dir: tmpBasePath,
|
||||||
Stdout: &outbuf,
|
Stdout: &outbuf,
|
||||||
|
|||||||
@@ -81,9 +81,9 @@
|
|||||||
<td class="three wide right aligned">
|
<td class="three wide right aligned">
|
||||||
{{if not .LatestPullRequest}}
|
{{if not .LatestPullRequest}}
|
||||||
{{if .IsIncluded}}
|
{{if .IsIncluded}}
|
||||||
<a class="ui tooltip orange large label" data-content="{{$.locale.Tr "repo.branch.included_desc"}}" data-position="top right">
|
<span class="ui tooltip orange large label" data-content="{{$.locale.Tr "repo.branch.included_desc"}}" data-position="top right">
|
||||||
{{svg "octicon-git-pull-request"}} {{$.locale.Tr "repo.branch.included"}}
|
{{svg "octicon-git-pull-request"}} {{$.locale.Tr "repo.branch.included"}}
|
||||||
</a>
|
</span>
|
||||||
{{else if and (not .IsDeleted) $.AllowsPulls (gt .CommitsAhead 0)}}
|
{{else if and (not .IsDeleted) $.AllowsPulls (gt .CommitsAhead 0)}}
|
||||||
<a href="{{$.RepoLink}}/compare/{{PathEscapeSegments $.DefaultBranch}}...{{if ne $.Repository.Owner.Name $.Owner.Name}}{{PathEscape $.Owner.Name}}:{{end}}{{PathEscapeSegments .Name}}">
|
<a href="{{$.RepoLink}}/compare/{{PathEscapeSegments $.DefaultBranch}}...{{if ne $.Repository.Owner.Name $.Owner.Name}}{{PathEscape $.Owner.Name}}:{{end}}{{PathEscapeSegments .Name}}">
|
||||||
<button id="new-pull-request" class="ui compact basic button mr-0">{{if $.CanPull}}{{$.locale.Tr "repo.pulls.compare_changes"}}{{else}}{{$.locale.Tr "action.compare_branch"}}{{end}}</button>
|
<button id="new-pull-request" class="ui compact basic button mr-0">{{if $.CanPull}}{{$.locale.Tr "repo.pulls.compare_changes"}}{{else}}{{$.locale.Tr "action.compare_branch"}}{{end}}</button>
|
||||||
@@ -123,13 +123,13 @@
|
|||||||
{{end}}
|
{{end}}
|
||||||
{{if and $.IsWriter (not $.IsMirror) (not $.Repository.IsArchived) (not .IsProtected)}}
|
{{if and $.IsWriter (not $.IsMirror) (not $.Repository.IsArchived) (not .IsProtected)}}
|
||||||
{{if .IsDeleted}}
|
{{if .IsDeleted}}
|
||||||
<button class="ui basic jump button icon tooltip undo-button" data-url="{{$.Link}}/restore?branch_id={{.DeletedBranch.ID}}&name={{.DeletedBranch.Name}}" data-content="{{$.locale.Tr "repo.branch.restore" (.Name)}}" data-position="top right">
|
<button class="ui basic jump button icon tooltip undo-button" data-url="{{$.Link}}/restore?branch_id={{.DeletedBranch.ID}}&name={{.DeletedBranch.Name}}&page={{$.Page.Paginater.Current}}" data-content="{{$.locale.Tr "repo.branch.restore" (.Name)}}" data-position="top right">
|
||||||
<span class="text blue">
|
<span class="text blue">
|
||||||
{{svg "octicon-reply"}}
|
{{svg "octicon-reply"}}
|
||||||
</span>
|
</span>
|
||||||
</button>
|
</button>
|
||||||
{{else}}
|
{{else}}
|
||||||
<button class="ui basic jump button icon tooltip delete-button delete-branch-button" data-url="{{$.Link}}/delete?name={{.Name}}" data-content="{{$.locale.Tr "repo.branch.delete" (.Name)}}" data-position="top right" data-name="{{.Name}}">
|
<button class="ui basic jump button icon tooltip delete-button delete-branch-button" data-url="{{$.Link}}/delete?name={{.Name}}&page={{$.Page.Paginater.Current}}" data-content="{{$.locale.Tr "repo.branch.delete" (.Name)}}" data-position="top right" data-name="{{.Name}}">
|
||||||
{{svg "octicon-trash"}}
|
{{svg "octicon-trash"}}
|
||||||
</button>
|
</button>
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
{{.locale.Tr "repo.editor.commit_changes"}}
|
{{.locale.Tr "repo.editor.commit_changes"}}
|
||||||
{{- end}}</h3>
|
{{- end}}</h3>
|
||||||
<div class="field">
|
<div class="field">
|
||||||
<input name="commit_summary" placeholder="{{if .PageIsDelete}}{{.locale.Tr "repo.editor.delete" .TreePath}}{{else if .PageIsUpload}}{{.locale.Tr "repo.editor.upload_files_to_dir" .TreePath}}{{else if .IsNewFile}}{{.locale.Tr "repo.editor.add_tmpl"}}{{else}}{{.locale.Tr "repo.editor.update" .TreePath}}{{end}}" value="{{.commit_summary}}" autofocus>
|
<input name="commit_summary" placeholder="{{if .PageIsDelete}}{{.locale.Tr "repo.editor.delete" .TreePath}}{{else if .PageIsUpload}}{{.locale.Tr "repo.editor.upload_files_to_dir" .TreePath}}{{else if .IsNewFile}}{{.locale.Tr "repo.editor.add_tmpl"}}{{else if .PageIsPatch}}{{.locale.Tr "repo.editor.patch"}}{{else}}{{.locale.Tr "repo.editor.update" .TreePath}}{{end}}" value="{{.commit_summary}}" autofocus>
|
||||||
</div>
|
</div>
|
||||||
<div class="field">
|
<div class="field">
|
||||||
<textarea name="commit_message" placeholder="{{.locale.Tr "repo.editor.commit_message_desc"}}" rows="5">{{.commit_message}}</textarea>
|
<textarea name="commit_message" placeholder="{{.locale.Tr "repo.editor.commit_message_desc"}}" rows="5">{{.commit_message}}</textarea>
|
||||||
|
|||||||
@@ -697,6 +697,10 @@
|
|||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
{{else if and (eq .Type 29) (or (gt .CommitsNum 0) .IsForcePush)}}
|
{{else if and (eq .Type 29) (or (gt .CommitsNum 0) .IsForcePush)}}
|
||||||
|
<!-- If PR is closed, the comments whose type is CommentTypePullRequestPush(29) after latestCloseCommentID won't be rendered. //-->
|
||||||
|
{{if and .Issue.IsClosed (gt .ID $.LatestCloseCommentID)}}
|
||||||
|
{{continue}}
|
||||||
|
{{end}}
|
||||||
<div class="timeline-item event" id="{{.HashTag}}">
|
<div class="timeline-item event" id="{{.HashTag}}">
|
||||||
<span class="badge">{{svg "octicon-repo-push"}}</span>
|
<span class="badge">{{svg "octicon-repo-push"}}</span>
|
||||||
<span class="text grey muted-links">
|
<span class="text grey muted-links">
|
||||||
|
|||||||
@@ -8,14 +8,14 @@
|
|||||||
{{if .LatestCommitUser}}
|
{{if .LatestCommitUser}}
|
||||||
{{avatar .LatestCommitUser 24}}
|
{{avatar .LatestCommitUser 24}}
|
||||||
{{if .LatestCommitUser.FullName}}
|
{{if .LatestCommitUser.FullName}}
|
||||||
<a class="muted" href="{{.LatestCommitUser.HomeLink}}"><strong>{{.LatestCommitUser.FullName}}</strong></a>
|
<a class="muted author-wrapper" title="{{.LatestCommitUser.FullName}}" href="{{.LatestCommitUser.HomeLink}}"><strong>{{.LatestCommitUser.FullName}}</strong></a>
|
||||||
{{else}}
|
{{else}}
|
||||||
<a class="muted" href="{{.LatestCommitUser.HomeLink}}"><strong>{{if .LatestCommit.Author}}{{.LatestCommit.Author.Name}}{{else}}{{.LatestCommitUser.Name}}{{end}}</strong></a>
|
<a class="muted author-wrapper" title="{{if .LatestCommit.Author}}{{.LatestCommit.Author.Name}}{{else}}{{.LatestCommitUser.Name}}{{end}}" href="{{.LatestCommitUser.HomeLink}}"><strong>{{if .LatestCommit.Author}}{{.LatestCommit.Author.Name}}{{else}}{{.LatestCommitUser.Name}}{{end}}</strong></a>
|
||||||
{{end}}
|
{{end}}
|
||||||
{{else}}
|
{{else}}
|
||||||
{{if .LatestCommit.Author}}
|
{{if .LatestCommit.Author}}
|
||||||
{{avatarByEmail .LatestCommit.Author.Email .LatestCommit.Author.Name 24}}
|
{{avatarByEmail .LatestCommit.Author.Email .LatestCommit.Author.Name 24}}
|
||||||
<strong>{{.LatestCommit.Author.Name}}</strong>
|
<span class="author-wrapper" title="{{.LatestCommit.Author.Name}}"><strong>{{.LatestCommit.Author.Name}}</strong></span>
|
||||||
{{end}}
|
{{end}}
|
||||||
{{end}}
|
{{end}}
|
||||||
<a rel="nofollow" class="ui sha label {{if .LatestCommit.Signature}} isSigned {{if .LatestCommitVerification.Verified}} isVerified{{if eq .LatestCommitVerification.TrustStatus "trusted"}}{{else if eq .LatestCommitVerification.TrustStatus "untrusted"}}Untrusted{{else}}Unmatched{{end}}{{else if .LatestCommitVerification.Warning}} isWarning{{end}}{{end}}" href="{{.RepoLink}}/commit/{{PathEscape .LatestCommit.ID.String}}">
|
<a rel="nofollow" class="ui sha label {{if .LatestCommit.Signature}} isSigned {{if .LatestCommitVerification.Verified}} isVerified{{if eq .LatestCommitVerification.TrustStatus "trusted"}}{{else if eq .LatestCommitVerification.TrustStatus "untrusted"}}Untrusted{{else}}Unmatched{{end}}{{else if .LatestCommitVerification.Warning}} isWarning{{end}}{{end}}" href="{{.RepoLink}}/commit/{{PathEscape .LatestCommit.ID.String}}">
|
||||||
|
|||||||
@@ -109,7 +109,7 @@
|
|||||||
<span class="due-date tooltip" data-content="{{$.locale.Tr "repo.issues.due_date"}}" data-position="right center">
|
<span class="due-date tooltip" data-content="{{$.locale.Tr "repo.issues.due_date"}}" data-position="right center">
|
||||||
<span{{if .IsOverdue}} class="overdue"{{end}}>
|
<span{{if .IsOverdue}} class="overdue"{{end}}>
|
||||||
{{svg "octicon-calendar" 14 "mr-2"}}
|
{{svg "octicon-calendar" 14 "mr-2"}}
|
||||||
<time data-format="short-date" datetime="{{.DeadlineUnix.FormatLong}}">{{.DeadlineUnix.FormatShort}}</time>
|
<time data-format="short-date" datetime="{{.DeadlineUnix.FormatDate}}">{{.DeadlineUnix.FormatShort}}</time>
|
||||||
</span>
|
</span>
|
||||||
</span>
|
</span>
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|||||||
@@ -586,6 +586,9 @@
|
|||||||
"201": {
|
"201": {
|
||||||
"$ref": "#/responses/Repository"
|
"$ref": "#/responses/Repository"
|
||||||
},
|
},
|
||||||
|
"400": {
|
||||||
|
"$ref": "#/responses/error"
|
||||||
|
},
|
||||||
"403": {
|
"403": {
|
||||||
"$ref": "#/responses/forbidden"
|
"$ref": "#/responses/forbidden"
|
||||||
},
|
},
|
||||||
@@ -1772,6 +1775,9 @@
|
|||||||
"201": {
|
"201": {
|
||||||
"$ref": "#/responses/Repository"
|
"$ref": "#/responses/Repository"
|
||||||
},
|
},
|
||||||
|
"400": {
|
||||||
|
"$ref": "#/responses/error"
|
||||||
|
},
|
||||||
"403": {
|
"403": {
|
||||||
"$ref": "#/responses/forbidden"
|
"$ref": "#/responses/forbidden"
|
||||||
},
|
},
|
||||||
@@ -12502,6 +12508,9 @@
|
|||||||
"201": {
|
"201": {
|
||||||
"$ref": "#/responses/Repository"
|
"$ref": "#/responses/Repository"
|
||||||
},
|
},
|
||||||
|
"400": {
|
||||||
|
"$ref": "#/responses/error"
|
||||||
|
},
|
||||||
"409": {
|
"409": {
|
||||||
"description": "The repository with the same name already exists."
|
"description": "The repository with the same name already exists."
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -77,6 +77,9 @@ export async function createCommentEasyMDE(textarea, easyMDEOptions = {}) {
|
|||||||
|
|
||||||
const inputField = easyMDE.codemirror.getInputField();
|
const inputField = easyMDE.codemirror.getInputField();
|
||||||
|
|
||||||
|
easyMDE.codemirror.on('change', (...args) => {
|
||||||
|
easyMDEOptions?.onChange?.(...args);
|
||||||
|
});
|
||||||
easyMDE.codemirror.setOption('extraKeys', {
|
easyMDE.codemirror.setOption('extraKeys', {
|
||||||
'Cmd-Enter': codeMirrorQuickSubmit,
|
'Cmd-Enter': codeMirrorQuickSubmit,
|
||||||
'Ctrl-Enter': codeMirrorQuickSubmit,
|
'Ctrl-Enter': codeMirrorQuickSubmit,
|
||||||
|
|||||||
@@ -31,6 +31,7 @@ export default function initContextPopups() {
|
|||||||
createTippy(this, {
|
createTippy(this, {
|
||||||
content: el,
|
content: el,
|
||||||
interactive: true,
|
interactive: true,
|
||||||
|
interactiveBorder: 5,
|
||||||
onShow: () => {
|
onShow: () => {
|
||||||
el.firstChild.dispatchEvent(new CustomEvent('us-load-context-popup', {detail: {owner, repo, index}}));
|
el.firstChild.dispatchEvent(new CustomEvent('us-load-context-popup', {detail: {owner, repo, index}}));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -68,12 +68,18 @@ export function initRepoCommentForm() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
(async () => {
|
(async () => {
|
||||||
|
const $statusButton = $('#status-button');
|
||||||
for (const textarea of $commentForm.find('textarea:not(.review-textarea, .no-easymde)')) {
|
for (const textarea of $commentForm.find('textarea:not(.review-textarea, .no-easymde)')) {
|
||||||
// Don't initialize EasyMDE for the dormant #edit-content-form
|
// Don't initialize EasyMDE for the dormant #edit-content-form
|
||||||
if (textarea.closest('#edit-content-form')) {
|
if (textarea.closest('#edit-content-form')) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const easyMDE = await createCommentEasyMDE(textarea);
|
const easyMDE = await createCommentEasyMDE(textarea, {
|
||||||
|
'onChange': () => {
|
||||||
|
const value = easyMDE?.value().trim();
|
||||||
|
$statusButton.text($statusButton.attr(value.length === 0 ? 'data-status' : 'data-status-and-comment'));
|
||||||
|
},
|
||||||
|
});
|
||||||
initEasyMDEImagePaste(easyMDE, $commentForm.find('.dropzone'));
|
initEasyMDEImagePaste(easyMDE, $commentForm.find('.dropzone'));
|
||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
@@ -180,7 +186,7 @@ export function initRepoCommentForm() {
|
|||||||
|
|
||||||
$(this).parent().find('.item').each(function () {
|
$(this).parent().find('.item').each(function () {
|
||||||
$(this).removeClass('checked');
|
$(this).removeClass('checked');
|
||||||
$(this).find('.octicon').addClass('invisible');
|
$(this).find('.octicon-check').addClass('invisible');
|
||||||
});
|
});
|
||||||
|
|
||||||
if (selector === 'select-reviewers-modify' || selector === 'select-assignees-modify') {
|
if (selector === 'select-reviewers-modify' || selector === 'select-assignees-modify') {
|
||||||
|
|||||||
@@ -271,6 +271,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
#repo-files-table {
|
#repo-files-table {
|
||||||
|
table-layout: fixed;
|
||||||
|
|
||||||
thead {
|
thead {
|
||||||
th {
|
th {
|
||||||
padding-top: 8px;
|
padding-top: 8px;
|
||||||
@@ -2846,7 +2848,8 @@ tbody.commit-list {
|
|||||||
vertical-align: baseline;
|
vertical-align: baseline;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-wrapper {
|
.message-wrapper,
|
||||||
|
.author-wrapper {
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
text-overflow: ellipsis;
|
text-overflow: ellipsis;
|
||||||
max-width: calc(100% - 50px);
|
max-width: calc(100% - 50px);
|
||||||
@@ -2854,6 +2857,10 @@ tbody.commit-list {
|
|||||||
vertical-align: middle;
|
vertical-align: middle;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.author-wrapper {
|
||||||
|
max-width: 180px;
|
||||||
|
}
|
||||||
|
|
||||||
// in the commit list, messages can wrap so we can use inline
|
// in the commit list, messages can wrap so we can use inline
|
||||||
.commit-list .message-wrapper {
|
.commit-list .message-wrapper {
|
||||||
display: inline;
|
display: inline;
|
||||||
@@ -2873,6 +2880,10 @@ tbody.commit-list {
|
|||||||
display: block;
|
display: block;
|
||||||
max-width: calc(100vw - 70px);
|
max-width: calc(100vw - 70px);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.author-wrapper {
|
||||||
|
max-width: 80px;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@media @mediaMd {
|
@media @mediaMd {
|
||||||
@@ -2881,7 +2892,7 @@ tbody.commit-list {
|
|||||||
}
|
}
|
||||||
|
|
||||||
th .message-wrapper {
|
th .message-wrapper {
|
||||||
max-width: 280px;
|
max-width: 120px;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2891,7 +2902,7 @@ tbody.commit-list {
|
|||||||
}
|
}
|
||||||
|
|
||||||
th .message-wrapper {
|
th .message-wrapper {
|
||||||
max-width: 490px;
|
max-width: 350px;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2901,7 +2912,7 @@ tbody.commit-list {
|
|||||||
}
|
}
|
||||||
|
|
||||||
th .message-wrapper {
|
th .message-wrapper {
|
||||||
max-width: 680px;
|
max-width: 525px;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -3199,8 +3210,8 @@ td.blob-excerpt {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.sidebar-item-link {
|
.sidebar-item-link {
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
align-items: center;
|
||||||
|
word-break: break-all;
|
||||||
}
|
}
|
||||||
|
|
||||||
.diff-file-box[data-folded="true"] .diff-file-body {
|
.diff-file-box[data-folded="true"] .diff-file-body {
|
||||||
@@ -3218,17 +3229,9 @@ td.blob-excerpt {
|
|||||||
.ui.attached.header.diff-file-header {
|
.ui.attached.header.diff-file-header {
|
||||||
&.sticky-2nd-row {
|
&.sticky-2nd-row {
|
||||||
position: sticky;
|
position: sticky;
|
||||||
top: 46px;
|
top: 77px;
|
||||||
z-index: 7;
|
z-index: 7;
|
||||||
|
|
||||||
@media @mediaMd {
|
|
||||||
top: 77px;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media @mediaSm {
|
|
||||||
top: 77px;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (max-width: 480px) {
|
@media (max-width: 480px) {
|
||||||
position: static;
|
position: static;
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user