mirror of
https://github.com/go-gitea/gitea.git
synced 2025-11-08 05:02:38 +09:00
Compare commits
25 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
08c6ea6728 | ||
|
|
67977f0b1c | ||
|
|
78fbcf35ad | ||
|
|
8f5b1d27d4 | ||
|
|
89c99a4dcb | ||
|
|
3c7e7a19dd | ||
|
|
8313b5d998 | ||
|
|
6ca73bf662 | ||
|
|
5e10def7f7 | ||
|
|
1b8efb6fc7 | ||
|
|
8f89e1e174 | ||
|
|
cbc595b9d9 | ||
|
|
cc5ccf44dc | ||
|
|
f91e35b8b7 | ||
|
|
f52ed422dc | ||
|
|
0266ee5de7 | ||
|
|
ac03e65cf4 | ||
|
|
f3e6672c09 | ||
|
|
136ec9ef81 | ||
|
|
79018ae726 | ||
|
|
e11176192a | ||
|
|
4e0269e890 | ||
|
|
04114c637a | ||
|
|
e5540bfa81 | ||
|
|
d22d6ca0d8 |
6
.github/workflows/pull-db-tests.yml
vendored
6
.github/workflows/pull-db-tests.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
minio:
|
||||
# as github actions doesn't support "entrypoint", we need to use a non-official image
|
||||
# that has a custom entrypoint set to "minio server /data"
|
||||
image: bitnami/minio:2023.8.31
|
||||
image: bitnamilegacy/minio:2023.8.31
|
||||
env:
|
||||
MINIO_ROOT_USER: 123456
|
||||
MINIO_ROOT_PASSWORD: 12345678
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
ports:
|
||||
- 6379:6379
|
||||
minio:
|
||||
image: bitnami/minio:2021.3.17
|
||||
image: bitnamilegacy/minio:2021.3.17
|
||||
env:
|
||||
MINIO_ACCESS_KEY: 123456
|
||||
MINIO_SECRET_KEY: 12345678
|
||||
@@ -155,7 +155,7 @@ jobs:
|
||||
services:
|
||||
mysql:
|
||||
# the bitnami mysql image has more options than the official one, it's easier to customize
|
||||
image: bitnami/mysql:8.0
|
||||
image: bitnamilegacy/mysql:8.0
|
||||
env:
|
||||
ALLOW_EMPTY_PASSWORD: true
|
||||
MYSQL_DATABASE: testgitea
|
||||
|
||||
52
CHANGELOG.md
52
CHANGELOG.md
@@ -4,7 +4,35 @@ This changelog goes through the changes that have been made in each release
|
||||
without substantial changes to our git log; to see the highlights of what has
|
||||
been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
|
||||
## [1.24.4](https://github.com/go-gitea/gitea/releases/tag/1.24.4) - 2025-08-03
|
||||
## [1.24.6](https://github.com/go-gitea/gitea/releases/tag/1.24.6) - 2025-09-10
|
||||
|
||||
* SECURITY
|
||||
* Upgrade xz to v0.5.15 (#35385)
|
||||
* BUGFIXES
|
||||
* Fix a compare page 404 bug when the pull request disabled (#35441) (#35453)
|
||||
* Fix bug when issue disabled, pull request number in the commit message cannot be redirected (#35420) (#35442)
|
||||
* Add author.name field to Swift Package Registry API response (#35410) (#35431)
|
||||
* Remove usernames when empty in discord webhook (#35412) (#35417)
|
||||
* Allow foreachref parser to grow its buffer (#35365) (#35376)
|
||||
* Allow deleting comment with content via API like web did (#35346) (#35354)
|
||||
* Fix atom/rss mixed error (#35345) (#35347)
|
||||
* Fix review request webhook bug (#35339)
|
||||
* Remove duplicate html IDs (#35210) (#35325)
|
||||
* Fix LFS range size header response (#35277) (#35293)
|
||||
* Fix GitHub release assets URL validation (#35287) (#35290)
|
||||
* Fix token lifetime, closes #35230 (#35271) (#35281)
|
||||
* Fix push commits comments when changing the pull request target branch (#35386) (#35443)
|
||||
|
||||
## [1.24.5](https://github.com/go-gitea/gitea/releases/tag/v1.24.5) - 2025-08-12
|
||||
|
||||
* BUGFIXES
|
||||
* Fix a bug where lfs gc never worked. (#35198) (#35255)
|
||||
* Reload issue when sending webhook to make num comments is right. (#35243) (#35248)
|
||||
* Fix bug when review pull request commits (#35192) (#35246)
|
||||
* MISC
|
||||
* Vertically center "Show Resolved" (#35211) (#35218)
|
||||
|
||||
## [1.24.4](https://github.com/go-gitea/gitea/releases/tag/v1.24.4) - 2025-08-03
|
||||
|
||||
* BUGFIXES
|
||||
* Fix various bugs (1.24) (#35186)
|
||||
@@ -16,7 +44,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
* Don't use full-file highlight when there is a git diff textconv (#35114) (#35119)
|
||||
* Increase gap on latest commit (#35104) (#35113)
|
||||
|
||||
## [1.24.3](https://github.com/go-gitea/gitea/releases/tag/1.24.3) - 2025-07-15
|
||||
## [1.24.3](https://github.com/go-gitea/gitea/releases/tag/v1.24.3) - 2025-07-15
|
||||
|
||||
* BUGFIXES
|
||||
* Fix form property assignment edge case (#35073) (#35078)
|
||||
@@ -48,7 +76,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
* Skip updating timestamp when sync branch (#34875)
|
||||
* Fix required contexts and commit status matching bug (#34815) (#34829)
|
||||
|
||||
## [1.24.2](https://github.com/go-gitea/gitea/releases/tag/1.24.2) - 2025-06-20
|
||||
## [1.24.2](https://github.com/go-gitea/gitea/releases/tag/v1.24.2) - 2025-06-20
|
||||
|
||||
* BUGFIXES
|
||||
* Fix container range bug (#34795) (#34796)
|
||||
@@ -56,7 +84,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
* BUILD
|
||||
* Bump poetry feature to new url for dev container (#34787) (#34790)
|
||||
|
||||
## [1.24.1](https://github.com/go-gitea/gitea/releases/tag/1.24.1) - 2025-06-18
|
||||
## [1.24.1](https://github.com/go-gitea/gitea/releases/tag/v1.24.1) - 2025-06-18
|
||||
|
||||
* ENHANCEMENTS
|
||||
* Improve alignment of commit status icon on commit page (#34750) (#34757)
|
||||
@@ -76,7 +104,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
* Hide href attribute of a tag if there is no target_url (#34556) (#34684)
|
||||
* Fix tag target (#34781) #34783
|
||||
|
||||
## [1.24.0](https://github.com/go-gitea/gitea/releases/tag/1.24.0) - 2025-05-26
|
||||
## [1.24.0](https://github.com/go-gitea/gitea/releases/tag/v1.24.0) - 2025-05-26
|
||||
|
||||
* BREAKING
|
||||
* Make Gitea always use its internal config, ignore `/etc/gitconfig` (#33076)
|
||||
@@ -446,7 +474,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
* Bump x/net (#32896) (#32900)
|
||||
* Only activity tab needs heatmap data loading (#34652)
|
||||
|
||||
## [1.23.8](https://github.com/go-gitea/gitea/releases/tag/1.23.8) - 2025-05-11
|
||||
## [1.23.8](https://github.com/go-gitea/gitea/releases/tag/v1.23.8) - 2025-05-11
|
||||
|
||||
* SECURITY
|
||||
* Fix a bug when uploading file via lfs ssh command (#34408) (#34411)
|
||||
@@ -473,7 +501,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
* Bump go version in go.mod (#34160)
|
||||
* remove hardcoded 'code' string in clone_panel.tmpl (#34153) (#34158)
|
||||
|
||||
## [1.23.7](https://github.com/go-gitea/gitea/releases/tag/1.23.7) - 2025-04-07
|
||||
## [1.23.7](https://github.com/go-gitea/gitea/releases/tag/v1.23.7) - 2025-04-07
|
||||
|
||||
* Enhancements
|
||||
* Add a config option to block "expensive" pages for anonymous users (#34024) (#34071)
|
||||
@@ -571,7 +599,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
* BUGFIXES
|
||||
* Fix a bug caused by status webhook template #33512
|
||||
|
||||
## [1.23.2](https://github.com/go-gitea/gitea/releases/tag/1.23.2) - 2025-02-04
|
||||
## [1.23.2](https://github.com/go-gitea/gitea/releases/tag/v1.23.2) - 2025-02-04
|
||||
|
||||
* BREAKING
|
||||
* Add tests for webhook and fix some webhook bugs (#33396) (#33442)
|
||||
@@ -3101,7 +3129,7 @@ Key highlights of this release encompass significant changes categorized under `
|
||||
* Improve decryption failure message (#24573) (#24575)
|
||||
* Makefile: Use portable !, not GNUish -not, with find(1). (#24565) (#24572)
|
||||
|
||||
## [1.19.3](https://github.com/go-gitea/gitea/releases/tag/1.19.3) - 2023-05-03
|
||||
## [1.19.3](https://github.com/go-gitea/gitea/releases/tag/v1.19.3) - 2023-05-03
|
||||
|
||||
* SECURITY
|
||||
* Use golang 1.20.4 to fix CVE-2023-24539, CVE-2023-24540, and CVE-2023-29400
|
||||
@@ -3114,7 +3142,7 @@ Key highlights of this release encompass significant changes categorized under `
|
||||
* Fix incorrect CurrentUser check for docker rootless (#24435)
|
||||
* Getting the tag list does not require being signed in (#24413) (#24416)
|
||||
|
||||
## [1.19.2](https://github.com/go-gitea/gitea/releases/tag/1.19.2) - 2023-04-26
|
||||
## [1.19.2](https://github.com/go-gitea/gitea/releases/tag/v1.19.2) - 2023-04-26
|
||||
|
||||
* SECURITY
|
||||
* Require repo scope for PATs for private repos and basic authentication (#24362) (#24364)
|
||||
@@ -3613,7 +3641,7 @@ Key highlights of this release encompass significant changes categorized under `
|
||||
* Display attachments of review comment when comment content is blank (#23035) (#23046)
|
||||
* Return empty url for submodule tree entries (#23043) (#23048)
|
||||
|
||||
## [1.18.4](https://github.com/go-gitea/gitea/releases/tag/1.18.4) - 2023-02-20
|
||||
## [1.18.4](https://github.com/go-gitea/gitea/releases/tag/v1.18.4) - 2023-02-20
|
||||
|
||||
* SECURITY
|
||||
* Provide the ability to set password hash algorithm parameters (#22942) (#22943)
|
||||
@@ -4040,7 +4068,7 @@ Key highlights of this release encompass significant changes categorized under `
|
||||
* Fix the mode of custom dir to 0700 in docker-rootless (#20861) (#20867)
|
||||
* Fix UI mis-align for PR commit history (#20845) (#20859)
|
||||
|
||||
## [1.17.1](https://github.com/go-gitea/gitea/releases/tag/1.17.1) - 2022-08-17
|
||||
## [1.17.1](https://github.com/go-gitea/gitea/releases/tag/v1.17.1) - 2022-08-17
|
||||
|
||||
* SECURITY
|
||||
* Correctly escape within tribute.js (#20831) (#20832)
|
||||
|
||||
16
Makefile
16
Makefile
@@ -47,6 +47,17 @@ ifeq ($(HAS_GO), yes)
|
||||
CGO_CFLAGS ?= $(shell $(GO) env CGO_CFLAGS) $(CGO_EXTRA_CFLAGS)
|
||||
endif
|
||||
|
||||
CGO_ENABLED ?= 0
|
||||
ifneq (,$(findstring sqlite,$(TAGS))$(findstring pam,$(TAGS)))
|
||||
CGO_ENABLED = 1
|
||||
endif
|
||||
|
||||
STATIC ?=
|
||||
EXTLDFLAGS ?=
|
||||
ifneq ($(STATIC),)
|
||||
EXTLDFLAGS = -extldflags "-static"
|
||||
endif
|
||||
|
||||
ifeq ($(GOOS),windows)
|
||||
IS_WINDOWS := yes
|
||||
else ifeq ($(patsubst Windows%,Windows,$(OS)),Windows)
|
||||
@@ -740,7 +751,10 @@ security-check:
|
||||
go run $(GOVULNCHECK_PACKAGE) -show color ./...
|
||||
|
||||
$(EXECUTABLE): $(GO_SOURCES) $(TAGS_PREREQ)
|
||||
CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) build $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)' -o $@
|
||||
ifneq ($(and $(STATIC),$(findstring pam,$(TAGS))),)
|
||||
$(error pam support set via TAGS doesn't support static builds)
|
||||
endif
|
||||
CGO_ENABLED="$(CGO_ENABLED)" CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) build $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(EXTLDFLAGS) $(LDFLAGS)' -o $@
|
||||
|
||||
.PHONY: release
|
||||
release: frontend generate release-windows release-linux release-darwin release-freebsd release-copy release-compress vendor release-sources release-check
|
||||
|
||||
60
flake.nix
60
flake.nix
@@ -11,33 +11,45 @@
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
in
|
||||
{
|
||||
devShells.default = pkgs.mkShell {
|
||||
buildInputs = with pkgs; [
|
||||
# generic
|
||||
git
|
||||
git-lfs
|
||||
gnumake
|
||||
gnused
|
||||
gnutar
|
||||
gzip
|
||||
devShells.default =
|
||||
with pkgs;
|
||||
let
|
||||
# only bump toolchain versions here
|
||||
go = go_1_24;
|
||||
nodejs = nodejs_24;
|
||||
python3 = python312;
|
||||
in
|
||||
pkgs.mkShell {
|
||||
buildInputs = [
|
||||
# generic
|
||||
git
|
||||
git-lfs
|
||||
gnumake
|
||||
gnused
|
||||
gnutar
|
||||
gzip
|
||||
|
||||
# frontend
|
||||
nodejs_22
|
||||
# frontend
|
||||
nodejs
|
||||
|
||||
# linting
|
||||
python312
|
||||
poetry
|
||||
# linting
|
||||
python3
|
||||
poetry
|
||||
|
||||
# backend
|
||||
go_1_24
|
||||
gofumpt
|
||||
sqlite
|
||||
];
|
||||
shellHook = ''
|
||||
export GO="${pkgs.go_1_24}/bin/go"
|
||||
export GOROOT="${pkgs.go_1_24}/share/go"
|
||||
'';
|
||||
};
|
||||
# backend
|
||||
go
|
||||
glibc.static
|
||||
gofumpt
|
||||
sqlite
|
||||
];
|
||||
CFLAGS = "-I${glibc.static.dev}/include";
|
||||
LDFLAGS = "-L ${glibc.static}/lib";
|
||||
GO = "${go}/bin/go";
|
||||
GOROOT = "${go}/share/go";
|
||||
|
||||
TAGS = "sqlite sqlite_unlock_notify";
|
||||
STATIC = "true";
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
2
go.mod
2
go.mod
@@ -109,7 +109,7 @@ require (
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/syndtr/goleveldb v1.0.0
|
||||
github.com/tstranex/u2f v1.0.0
|
||||
github.com/ulikunitz/xz v0.5.12
|
||||
github.com/ulikunitz/xz v0.5.15
|
||||
github.com/urfave/cli/v2 v2.27.6
|
||||
github.com/wneessen/go-mail v0.6.2
|
||||
github.com/xeipuuv/gojsonschema v1.2.0
|
||||
|
||||
4
go.sum
4
go.sum
@@ -757,8 +757,8 @@ github.com/tstranex/u2f v1.0.0/go.mod h1:eahSLaqAS0zsIEv80+vXT7WanXs7MQQDg3j3wGB
|
||||
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
||||
github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/ulikunitz/xz v0.5.9/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc=
|
||||
github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/ulikunitz/xz v0.5.15 h1:9DNdB5s+SgV3bQ2ApL10xRc35ck0DuIX/isZvIk+ubY=
|
||||
github.com/ulikunitz/xz v0.5.15/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/unknwon/com v1.0.1 h1:3d1LTxD+Lnf3soQiD4Cp/0BRB+Rsa/+RTvz8GMMzIXs=
|
||||
github.com/unknwon/com v1.0.1/go.mod h1:tOOxU81rwgoCLoOVVPHb6T/wt8HZygqH5id+GNnlCXM=
|
||||
github.com/urfave/cli/v2 v2.27.6 h1:VdRdS98FNhKZ8/Az8B7MTyGQmpIr36O1EHybx/LaZ4g=
|
||||
|
||||
@@ -85,8 +85,8 @@ func TestRepository_ChangeCollaborationAccessMode(t *testing.T) {
|
||||
|
||||
assert.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, unittest.NonexistentID, perm.AccessModeAdmin))
|
||||
|
||||
// Disvard invalid input.
|
||||
assert.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, 4, perm.AccessMode(unittest.NonexistentID)))
|
||||
// Discard invalid input.
|
||||
assert.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, 4, perm.AccessMode(-1)))
|
||||
|
||||
unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: repo.ID})
|
||||
}
|
||||
|
||||
@@ -30,6 +30,10 @@ type Parser struct {
|
||||
func NewParser(r io.Reader, format Format) *Parser {
|
||||
scanner := bufio.NewScanner(r)
|
||||
|
||||
// default MaxScanTokenSize = 64 kiB may be too small for some references,
|
||||
// so allow the buffer to grow up to 4x if needed
|
||||
scanner.Buffer(nil, 4*bufio.MaxScanTokenSize)
|
||||
|
||||
// in addition to the reference delimiter we specified in the --format,
|
||||
// `git for-each-ref` will always add a newline after every reference.
|
||||
refDelim := make([]byte, 0, len(format.refDelim)+1)
|
||||
@@ -70,6 +74,9 @@ func NewParser(r io.Reader, format Format) *Parser {
|
||||
// { "objecttype": "tag", "refname:short": "v1.16.4", "object": "f460b7543ed500e49c133c2cd85c8c55ee9dbe27" }
|
||||
func (p *Parser) Next() map[string]string {
|
||||
if !p.scanner.Scan() {
|
||||
if err := p.scanner.Err(); err != nil {
|
||||
p.err = err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
fields, err := p.parseRef(p.scanner.Text())
|
||||
|
||||
@@ -82,6 +82,7 @@ type ProgrammingLanguage struct {
|
||||
// https://schema.org/Person
|
||||
type Person struct {
|
||||
Type string `json:"@type,omitempty"`
|
||||
Name string `json:"name,omitempty"` // inherited from https://schema.org/Thing
|
||||
GivenName string `json:"givenName,omitempty"`
|
||||
MiddleName string `json:"middleName,omitempty"`
|
||||
FamilyName string `json:"familyName,omitempty"`
|
||||
@@ -184,11 +185,17 @@ func ParsePackage(sr io.ReaderAt, size int64, mr io.Reader) (*Package, error) {
|
||||
p.Metadata.Description = ssc.Description
|
||||
p.Metadata.Keywords = ssc.Keywords
|
||||
p.Metadata.License = ssc.License
|
||||
p.Metadata.Author = Person{
|
||||
author := Person{
|
||||
Name: ssc.Author.Name,
|
||||
GivenName: ssc.Author.GivenName,
|
||||
MiddleName: ssc.Author.MiddleName,
|
||||
FamilyName: ssc.Author.FamilyName,
|
||||
}
|
||||
// If Name is not provided, generate it from individual name components
|
||||
if author.Name == "" {
|
||||
author.Name = author.String()
|
||||
}
|
||||
p.Metadata.Author = author
|
||||
|
||||
p.Metadata.RepositoryURL = ssc.CodeRepository
|
||||
if !validation.IsValidURL(p.Metadata.RepositoryURL) {
|
||||
|
||||
@@ -97,10 +97,49 @@ func TestParsePackage(t *testing.T) {
|
||||
assert.Equal(t, packageDescription, p.Metadata.Description)
|
||||
assert.ElementsMatch(t, []string{"swift", "package"}, p.Metadata.Keywords)
|
||||
assert.Equal(t, packageLicense, p.Metadata.License)
|
||||
assert.Equal(t, packageAuthor, p.Metadata.Author.Name)
|
||||
assert.Equal(t, packageAuthor, p.Metadata.Author.GivenName)
|
||||
assert.Equal(t, packageRepositoryURL, p.Metadata.RepositoryURL)
|
||||
assert.ElementsMatch(t, []string{packageRepositoryURL}, p.RepositoryURLs)
|
||||
})
|
||||
|
||||
t.Run("WithExplicitNameField", func(t *testing.T) {
|
||||
data := createArchive(map[string][]byte{
|
||||
"Package.swift": []byte("// swift-tools-version:5.7\n//\n// Package.swift"),
|
||||
})
|
||||
|
||||
authorName := "John Doe"
|
||||
p, err := ParsePackage(
|
||||
data,
|
||||
data.Size(),
|
||||
strings.NewReader(`{"name":"`+packageName+`","version":"`+packageVersion+`","description":"`+packageDescription+`","author":{"name":"`+authorName+`","givenName":"John","familyName":"Doe"}}`),
|
||||
)
|
||||
assert.NotNil(t, p)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, authorName, p.Metadata.Author.Name)
|
||||
assert.Equal(t, "John", p.Metadata.Author.GivenName)
|
||||
assert.Equal(t, "Doe", p.Metadata.Author.FamilyName)
|
||||
})
|
||||
|
||||
t.Run("NameFieldGeneration", func(t *testing.T) {
|
||||
data := createArchive(map[string][]byte{
|
||||
"Package.swift": []byte("// swift-tools-version:5.7\n//\n// Package.swift"),
|
||||
})
|
||||
|
||||
// Test with only individual name components - Name should be auto-generated
|
||||
p, err := ParsePackage(
|
||||
data,
|
||||
data.Size(),
|
||||
strings.NewReader(`{"author":{"givenName":"John","middleName":"Q","familyName":"Doe"}}`),
|
||||
)
|
||||
assert.NotNil(t, p)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "John Q Doe", p.Metadata.Author.Name)
|
||||
assert.Equal(t, "John", p.Metadata.Author.GivenName)
|
||||
assert.Equal(t, "Q", p.Metadata.Author.MiddleName)
|
||||
assert.Equal(t, "Doe", p.Metadata.Author.FamilyName)
|
||||
})
|
||||
}
|
||||
|
||||
func TestTrimmedVersionString(t *testing.T) {
|
||||
@@ -142,3 +181,43 @@ func TestTrimmedVersionString(t *testing.T) {
|
||||
assert.Equal(t, c.Expected, TrimmedVersionString(c.Version))
|
||||
}
|
||||
}
|
||||
|
||||
func TestPersonNameString(t *testing.T) {
|
||||
cases := []struct {
|
||||
Name string
|
||||
Person Person
|
||||
Expected string
|
||||
}{
|
||||
{
|
||||
Name: "GivenNameOnly",
|
||||
Person: Person{GivenName: "John"},
|
||||
Expected: "John",
|
||||
},
|
||||
{
|
||||
Name: "GivenAndFamily",
|
||||
Person: Person{GivenName: "John", FamilyName: "Doe"},
|
||||
Expected: "John Doe",
|
||||
},
|
||||
{
|
||||
Name: "FullName",
|
||||
Person: Person{GivenName: "John", MiddleName: "Q", FamilyName: "Doe"},
|
||||
Expected: "John Q Doe",
|
||||
},
|
||||
{
|
||||
Name: "MiddleAndFamily",
|
||||
Person: Person{MiddleName: "Q", FamilyName: "Doe"},
|
||||
Expected: "Q Doe",
|
||||
},
|
||||
{
|
||||
Name: "Empty",
|
||||
Person: Person{},
|
||||
Expected: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.Name, func(t *testing.T) {
|
||||
assert.Equal(t, c.Expected, c.Person.String())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ var (
|
||||
ZombieTaskTimeout time.Duration `ini:"ZOMBIE_TASK_TIMEOUT"`
|
||||
EndlessTaskTimeout time.Duration `ini:"ENDLESS_TASK_TIMEOUT"`
|
||||
AbandonedJobTimeout time.Duration `ini:"ABANDONED_JOB_TIMEOUT"`
|
||||
SkipWorkflowStrings []string `ìni:"SKIP_WORKFLOW_STRINGS"`
|
||||
SkipWorkflowStrings []string `ini:"SKIP_WORKFLOW_STRINGS"`
|
||||
}{
|
||||
Enabled: true,
|
||||
DefaultActionsURL: defaultActionsURLGitHub,
|
||||
|
||||
@@ -41,3 +41,56 @@ EXTEND = true
|
||||
assert.Equal(t, "white rabbit", extended.Second)
|
||||
assert.True(t, extended.Extend)
|
||||
}
|
||||
|
||||
// Test_getCronSettings2 tests that getCronSettings can not handle two levels of embedding
|
||||
func Test_getCronSettings2(t *testing.T) {
|
||||
type BaseStruct struct {
|
||||
Enabled bool
|
||||
RunAtStart bool
|
||||
Schedule string
|
||||
}
|
||||
|
||||
type Extended struct {
|
||||
BaseStruct
|
||||
Extend bool
|
||||
}
|
||||
type Extended2 struct {
|
||||
Extended
|
||||
Third string
|
||||
}
|
||||
|
||||
iniStr := `
|
||||
[cron.test]
|
||||
ENABLED = TRUE
|
||||
RUN_AT_START = TRUE
|
||||
SCHEDULE = @every 1h
|
||||
EXTEND = true
|
||||
THIRD = white rabbit
|
||||
`
|
||||
cfg, err := NewConfigProviderFromData(iniStr)
|
||||
assert.NoError(t, err)
|
||||
|
||||
extended := &Extended2{
|
||||
Extended: Extended{
|
||||
BaseStruct: BaseStruct{
|
||||
Enabled: false,
|
||||
RunAtStart: false,
|
||||
Schedule: "@every 72h",
|
||||
},
|
||||
Extend: false,
|
||||
},
|
||||
Third: "black rabbit",
|
||||
}
|
||||
|
||||
_, err = getCronSettings(cfg, "test", extended)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// This confirms the first level of embedding works
|
||||
assert.Equal(t, "white rabbit", extended.Third)
|
||||
assert.True(t, extended.Extend)
|
||||
|
||||
// This confirms 2 levels of embedding doesn't work
|
||||
assert.False(t, extended.Enabled)
|
||||
assert.False(t, extended.RunAtStart)
|
||||
assert.Equal(t, "@every 72h", extended.Schedule)
|
||||
}
|
||||
|
||||
@@ -13,6 +13,6 @@ func TestCountFmt(t *testing.T) {
|
||||
assert.Equal(t, "125", countFmt(125))
|
||||
assert.Equal(t, "1.3k", countFmt(int64(1317)))
|
||||
assert.Equal(t, "21.3M", countFmt(21317675))
|
||||
assert.Equal(t, "45.7G", countFmt(45721317675))
|
||||
assert.Equal(t, "45.7G", countFmt(int64(45721317675)))
|
||||
assert.Empty(t, countFmt("test"))
|
||||
}
|
||||
|
||||
36
package-lock.json
generated
36
package-lock.json
generated
@@ -35,7 +35,7 @@
|
||||
"jquery": "3.7.1",
|
||||
"katex": "0.16.22",
|
||||
"license-checker-webpack-plugin": "0.2.1",
|
||||
"mermaid": "11.6.0",
|
||||
"mermaid": "11.10.0",
|
||||
"mini-css-extract-plugin": "2.9.2",
|
||||
"minimatch": "10.0.1",
|
||||
"monaco-editor": "0.52.2",
|
||||
@@ -1540,9 +1540,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@mermaid-js/parser": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-0.4.0.tgz",
|
||||
"integrity": "sha512-wla8XOWvQAwuqy+gxiZqY+c7FokraOTHRWMsbB4AgRx9Sy7zKslNyejy7E+a77qHfey5GXw/ik3IXv/NHMJgaA==",
|
||||
"version": "0.6.2",
|
||||
"resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-0.6.2.tgz",
|
||||
"integrity": "sha512-+PO02uGF6L6Cs0Bw8RpGhikVvMWEysfAyl27qTlroUB8jSWr1lL0Sf6zi78ZxlSnmgSY2AMMKVgghnN9jTtwkQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"langium": "3.3.1"
|
||||
@@ -6154,9 +6154,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/dompurify": {
|
||||
"version": "3.2.4",
|
||||
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.4.tgz",
|
||||
"integrity": "sha512-ysFSFEDVduQpyhzAob/kkuJjf5zWkZD8/A9ywSp1byueyuCfHamrCBa14/Oc2iiB0e51B+NpxSl5gmzn+Ms/mg==",
|
||||
"version": "3.2.6",
|
||||
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.6.tgz",
|
||||
"integrity": "sha512-/2GogDQlohXPZe6D6NOgQvXLPSYBqIWMnZ8zzOhn09REE4eyAzb+Hed3jhoM9OkuaJ8P6ZGTTVWQKAi8ieIzfQ==",
|
||||
"license": "(MPL-2.0 OR Apache-2.0)",
|
||||
"optionalDependencies": {
|
||||
"@types/trusted-types": "^2.0.7"
|
||||
@@ -9249,14 +9249,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/mermaid": {
|
||||
"version": "11.6.0",
|
||||
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.6.0.tgz",
|
||||
"integrity": "sha512-PE8hGUy1LDlWIHWBP05SFdqUHGmRcCcK4IzpOKPE35eOw+G9zZgcnMpyunJVUEOgb//KBORPjysKndw8bFLuRg==",
|
||||
"version": "11.10.0",
|
||||
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.10.0.tgz",
|
||||
"integrity": "sha512-oQsFzPBy9xlpnGxUqLbVY8pvknLlsNIJ0NWwi8SUJjhbP1IT0E0o1lfhU4iYV3ubpy+xkzkaOyDUQMn06vQElQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@braintree/sanitize-url": "^7.0.4",
|
||||
"@iconify/utils": "^2.1.33",
|
||||
"@mermaid-js/parser": "^0.4.0",
|
||||
"@mermaid-js/parser": "^0.6.2",
|
||||
"@types/d3": "^7.4.3",
|
||||
"cytoscape": "^3.29.3",
|
||||
"cytoscape-cose-bilkent": "^4.1.0",
|
||||
@@ -9265,11 +9265,11 @@
|
||||
"d3-sankey": "^0.12.3",
|
||||
"dagre-d3-es": "7.0.11",
|
||||
"dayjs": "^1.11.13",
|
||||
"dompurify": "^3.2.4",
|
||||
"katex": "^0.16.9",
|
||||
"dompurify": "^3.2.5",
|
||||
"katex": "^0.16.22",
|
||||
"khroma": "^2.1.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"marked": "^15.0.7",
|
||||
"marked": "^16.0.0",
|
||||
"roughjs": "^4.6.6",
|
||||
"stylis": "^4.3.6",
|
||||
"ts-dedent": "^2.2.0",
|
||||
@@ -9277,15 +9277,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/mermaid/node_modules/marked": {
|
||||
"version": "15.0.7",
|
||||
"resolved": "https://registry.npmjs.org/marked/-/marked-15.0.7.tgz",
|
||||
"integrity": "sha512-dgLIeKGLx5FwziAnsk4ONoGwHwGPJzselimvlVskE9XLN4Orv9u2VA3GWw/lYUqjfA0rUT/6fqKwfZJapP9BEg==",
|
||||
"version": "16.2.0",
|
||||
"resolved": "https://registry.npmjs.org/marked/-/marked-16.2.0.tgz",
|
||||
"integrity": "sha512-LbbTuye+0dWRz2TS9KJ7wsnD4KAtpj0MVkWc90XvBa6AslXsT0hTBVH5k32pcSyHH1fst9XEFJunXHktVy0zlg==",
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"marked": "bin/marked.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18"
|
||||
"node": ">= 20"
|
||||
}
|
||||
},
|
||||
"node_modules/micromark": {
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
"jquery": "3.7.1",
|
||||
"katex": "0.16.22",
|
||||
"license-checker-webpack-plugin": "0.2.1",
|
||||
"mermaid": "11.6.0",
|
||||
"mermaid": "11.10.0",
|
||||
"mini-css-extract-plugin": "2.9.2",
|
||||
"minimatch": "10.0.1",
|
||||
"monaco-editor": "0.52.2",
|
||||
|
||||
@@ -230,6 +230,7 @@ func PackageVersionMetadata(ctx *context.Context) {
|
||||
},
|
||||
Author: swift_module.Person{
|
||||
Type: "Person",
|
||||
Name: metadata.Author.String(),
|
||||
GivenName: metadata.Author.GivenName,
|
||||
MiddleName: metadata.Author.MiddleName,
|
||||
FamilyName: metadata.Author.FamilyName,
|
||||
|
||||
@@ -721,8 +721,8 @@ func deleteIssueComment(ctx *context.APIContext) {
|
||||
if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(comment.Issue.IsPull)) {
|
||||
ctx.Status(http.StatusForbidden)
|
||||
return
|
||||
} else if comment.Type != issues_model.CommentTypeComment {
|
||||
ctx.Status(http.StatusNoContent)
|
||||
} else if !comment.Type.HasContentSupport() {
|
||||
ctx.Status(http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -8,11 +8,18 @@ import (
|
||||
)
|
||||
|
||||
// RenderBranchFeed render format for branch or file
|
||||
func RenderBranchFeed(ctx *context.Context) {
|
||||
_, showFeedType := GetFeedType(ctx.PathParam("reponame"), ctx.Req)
|
||||
func RenderBranchFeed(ctx *context.Context, feedType string) {
|
||||
if ctx.Repo.TreePath == "" {
|
||||
ShowBranchFeed(ctx, ctx.Repo.Repository, showFeedType)
|
||||
ShowBranchFeed(ctx, ctx.Repo.Repository, feedType)
|
||||
} else {
|
||||
ShowFileFeed(ctx, ctx.Repo.Repository, showFeedType)
|
||||
ShowFileFeed(ctx, ctx.Repo.Repository, feedType)
|
||||
}
|
||||
}
|
||||
|
||||
func RenderBranchFeedRSS(ctx *context.Context) {
|
||||
RenderBranchFeed(ctx, "rss")
|
||||
}
|
||||
|
||||
func RenderBranchFeedAtom(ctx *context.Context) {
|
||||
RenderBranchFeed(ctx, "atom")
|
||||
}
|
||||
|
||||
@@ -523,7 +523,7 @@ func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
|
||||
|
||||
// Treat as pull request if both references are branches
|
||||
if ctx.Data["PageIsComparePull"] == nil {
|
||||
ctx.Data["PageIsComparePull"] = headIsBranch && baseIsBranch
|
||||
ctx.Data["PageIsComparePull"] = headIsBranch && baseIsBranch && permBase.CanReadIssuesOrPulls(true)
|
||||
}
|
||||
|
||||
if ctx.Data["PageIsComparePull"] == true && !permBase.CanReadIssuesOrPulls(true) {
|
||||
@@ -735,6 +735,7 @@ func CompareDiff(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
ctx.Data["PageIsViewCode"] = true
|
||||
ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
|
||||
ctx.Data["DirectComparison"] = ci.DirectComparison
|
||||
ctx.Data["OtherCompareSeparator"] = ".."
|
||||
|
||||
@@ -643,8 +643,17 @@ func ViewPullCommits(ctx *context.Context) {
|
||||
ctx.HTML(http.StatusOK, tplPullCommits)
|
||||
}
|
||||
|
||||
func indexCommit(commits []*git.Commit, commitID string) *git.Commit {
|
||||
for i := range commits {
|
||||
if commits[i].ID.String() == commitID {
|
||||
return commits[i]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ViewPullFiles render pull request changed files list page
|
||||
func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommit string, willShowSpecifiedCommitRange, willShowSpecifiedCommit bool) {
|
||||
func viewPullFiles(ctx *context.Context, beforeCommitID, afterCommitID string) {
|
||||
ctx.Data["PageIsPullList"] = true
|
||||
ctx.Data["PageIsPullFiles"] = true
|
||||
|
||||
@@ -654,11 +663,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
}
|
||||
pull := issue.PullRequest
|
||||
|
||||
var (
|
||||
startCommitID string
|
||||
endCommitID string
|
||||
gitRepo = ctx.Repo.GitRepo
|
||||
)
|
||||
gitRepo := ctx.Repo.GitRepo
|
||||
|
||||
prInfo := preparePullViewPullInfo(ctx, issue)
|
||||
if ctx.Written() {
|
||||
@@ -668,77 +673,68 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
return
|
||||
}
|
||||
|
||||
// Validate the given commit sha to show (if any passed)
|
||||
if willShowSpecifiedCommit || willShowSpecifiedCommitRange {
|
||||
foundStartCommit := len(specifiedStartCommit) == 0
|
||||
foundEndCommit := len(specifiedEndCommit) == 0
|
||||
|
||||
if !(foundStartCommit && foundEndCommit) {
|
||||
for _, commit := range prInfo.Commits {
|
||||
if commit.ID.String() == specifiedStartCommit {
|
||||
foundStartCommit = true
|
||||
}
|
||||
if commit.ID.String() == specifiedEndCommit {
|
||||
foundEndCommit = true
|
||||
}
|
||||
|
||||
if foundStartCommit && foundEndCommit {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !(foundStartCommit && foundEndCommit) {
|
||||
ctx.NotFound(nil)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if ctx.Written() {
|
||||
return
|
||||
}
|
||||
|
||||
headCommitID, err := gitRepo.GetRefCommitID(pull.GetGitRefName())
|
||||
if err != nil {
|
||||
ctx.ServerError("GetRefCommitID", err)
|
||||
return
|
||||
}
|
||||
|
||||
ctx.Data["IsShowingOnlySingleCommit"] = willShowSpecifiedCommit
|
||||
isSingleCommit := beforeCommitID == "" && afterCommitID != ""
|
||||
ctx.Data["IsShowingOnlySingleCommit"] = isSingleCommit
|
||||
isShowAllCommits := (beforeCommitID == "" || beforeCommitID == prInfo.MergeBase) && (afterCommitID == "" || afterCommitID == headCommitID)
|
||||
ctx.Data["IsShowingAllCommits"] = isShowAllCommits
|
||||
|
||||
if willShowSpecifiedCommit || willShowSpecifiedCommitRange {
|
||||
if len(specifiedEndCommit) > 0 {
|
||||
endCommitID = specifiedEndCommit
|
||||
if afterCommitID == "" || afterCommitID == headCommitID {
|
||||
afterCommitID = headCommitID
|
||||
}
|
||||
afterCommit := indexCommit(prInfo.Commits, afterCommitID)
|
||||
if afterCommit == nil {
|
||||
ctx.HTTPError(http.StatusBadRequest, "after commit not found in PR commits")
|
||||
return
|
||||
}
|
||||
|
||||
var beforeCommit *git.Commit
|
||||
if !isSingleCommit {
|
||||
if beforeCommitID == "" || beforeCommitID == prInfo.MergeBase {
|
||||
beforeCommitID = prInfo.MergeBase
|
||||
// mergebase commit is not in the list of the pull request commits
|
||||
beforeCommit, err = gitRepo.GetCommit(beforeCommitID)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetCommit", err)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
endCommitID = headCommitID
|
||||
beforeCommit = indexCommit(prInfo.Commits, beforeCommitID)
|
||||
if beforeCommit == nil {
|
||||
ctx.HTTPError(http.StatusBadRequest, "before commit not found in PR commits")
|
||||
return
|
||||
}
|
||||
}
|
||||
if len(specifiedStartCommit) > 0 {
|
||||
startCommitID = specifiedStartCommit
|
||||
} else {
|
||||
startCommitID = prInfo.MergeBase
|
||||
}
|
||||
ctx.Data["IsShowingAllCommits"] = false
|
||||
} else {
|
||||
endCommitID = headCommitID
|
||||
startCommitID = prInfo.MergeBase
|
||||
ctx.Data["IsShowingAllCommits"] = true
|
||||
beforeCommit, err = afterCommit.Parent(0)
|
||||
if err != nil {
|
||||
ctx.ServerError("Parent", err)
|
||||
return
|
||||
}
|
||||
beforeCommitID = beforeCommit.ID.String()
|
||||
}
|
||||
|
||||
ctx.Data["Username"] = ctx.Repo.Owner.Name
|
||||
ctx.Data["Reponame"] = ctx.Repo.Repository.Name
|
||||
ctx.Data["AfterCommitID"] = endCommitID
|
||||
ctx.Data["BeforeCommitID"] = startCommitID
|
||||
|
||||
fileOnly := ctx.FormBool("file-only")
|
||||
ctx.Data["MergeBase"] = prInfo.MergeBase
|
||||
ctx.Data["AfterCommitID"] = afterCommitID
|
||||
ctx.Data["BeforeCommitID"] = beforeCommitID
|
||||
|
||||
maxLines, maxFiles := setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffFiles
|
||||
files := ctx.FormStrings("files")
|
||||
fileOnly := ctx.FormBool("file-only")
|
||||
if fileOnly && (len(files) == 2 || len(files) == 1) {
|
||||
maxLines, maxFiles = -1, -1
|
||||
}
|
||||
|
||||
diffOptions := &gitdiff.DiffOptions{
|
||||
AfterCommitID: endCommitID,
|
||||
BeforeCommitID: beforeCommitID,
|
||||
AfterCommitID: afterCommitID,
|
||||
SkipTo: ctx.FormString("skip-to"),
|
||||
MaxLines: maxLines,
|
||||
MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
|
||||
@@ -746,10 +742,6 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
WhitespaceBehavior: gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)),
|
||||
}
|
||||
|
||||
if !willShowSpecifiedCommit {
|
||||
diffOptions.BeforeCommitID = startCommitID
|
||||
}
|
||||
|
||||
diff, err := gitdiff.GetDiffForRender(ctx, ctx.Repo.RepoLink, gitRepo, diffOptions, files...)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetDiff", err)
|
||||
@@ -761,7 +753,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
// as the viewed information is designed to be loaded only on latest PR
|
||||
// diff and if you're signed in.
|
||||
var reviewState *pull_model.ReviewState
|
||||
if ctx.IsSigned && !willShowSpecifiedCommit && !willShowSpecifiedCommitRange {
|
||||
if ctx.IsSigned && isShowAllCommits {
|
||||
reviewState, err = gitdiff.SyncUserSpecificDiff(ctx, ctx.Doer.ID, pull, gitRepo, diff, diffOptions)
|
||||
if err != nil {
|
||||
ctx.ServerError("SyncUserSpecificDiff", err)
|
||||
@@ -769,7 +761,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
}
|
||||
}
|
||||
|
||||
diffShortStat, err := gitdiff.GetDiffShortStat(ctx.Repo.GitRepo, startCommitID, endCommitID)
|
||||
diffShortStat, err := gitdiff.GetDiffShortStat(ctx.Repo.GitRepo, beforeCommitID, afterCommitID)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetDiffShortStat", err)
|
||||
return
|
||||
@@ -816,7 +808,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
|
||||
if !fileOnly {
|
||||
// note: use mergeBase is set to false because we already have the merge base from the pull request info
|
||||
diffTree, err := gitdiff.GetDiffTree(ctx, gitRepo, false, startCommitID, endCommitID)
|
||||
diffTree, err := gitdiff.GetDiffTree(ctx, gitRepo, false, beforeCommitID, afterCommitID)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetDiffTree", err)
|
||||
return
|
||||
@@ -836,17 +828,6 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
ctx.Data["Diff"] = diff
|
||||
ctx.Data["DiffNotAvailable"] = diffShortStat.NumFiles == 0
|
||||
|
||||
baseCommit, err := ctx.Repo.GitRepo.GetCommit(startCommitID)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetCommit", err)
|
||||
return
|
||||
}
|
||||
commit, err := gitRepo.GetCommit(endCommitID)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetCommit", err)
|
||||
return
|
||||
}
|
||||
|
||||
if ctx.IsSigned && ctx.Doer != nil {
|
||||
if ctx.Data["CanMarkConversation"], err = issues_model.CanMarkConversation(ctx, issue, ctx.Doer); err != nil {
|
||||
ctx.ServerError("CanMarkConversation", err)
|
||||
@@ -854,7 +835,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
}
|
||||
}
|
||||
|
||||
setCompareContext(ctx, baseCommit, commit, ctx.Repo.Owner.Name, ctx.Repo.Repository.Name)
|
||||
setCompareContext(ctx, beforeCommit, afterCommit, ctx.Repo.Owner.Name, ctx.Repo.Repository.Name)
|
||||
|
||||
assigneeUsers, err := repo_model.GetRepoAssignees(ctx, ctx.Repo.Repository)
|
||||
if err != nil {
|
||||
@@ -901,7 +882,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
ctx.Data["CanBlockUser"] = func(blocker, blockee *user_model.User) bool {
|
||||
return user_service.CanBlockUser(ctx, ctx.Doer, blocker, blockee)
|
||||
}
|
||||
if !willShowSpecifiedCommit && !willShowSpecifiedCommitRange && pull.Flow == issues_model.PullRequestFlowGithub {
|
||||
if isShowAllCommits && pull.Flow == issues_model.PullRequestFlowGithub {
|
||||
if err := pull.LoadHeadRepo(ctx); err != nil {
|
||||
ctx.ServerError("LoadHeadRepo", err)
|
||||
return
|
||||
@@ -930,19 +911,17 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
}
|
||||
|
||||
func ViewPullFilesForSingleCommit(ctx *context.Context) {
|
||||
viewPullFiles(ctx, "", ctx.PathParam("sha"), true, true)
|
||||
// it doesn't support showing files from mergebase to the special commit
|
||||
// otherwise it will be ambiguous
|
||||
viewPullFiles(ctx, "", ctx.PathParam("sha"))
|
||||
}
|
||||
|
||||
func ViewPullFilesForRange(ctx *context.Context) {
|
||||
viewPullFiles(ctx, ctx.PathParam("shaFrom"), ctx.PathParam("shaTo"), true, false)
|
||||
}
|
||||
|
||||
func ViewPullFilesStartingFromCommit(ctx *context.Context) {
|
||||
viewPullFiles(ctx, "", ctx.PathParam("sha"), true, false)
|
||||
viewPullFiles(ctx, ctx.PathParam("shaFrom"), ctx.PathParam("shaTo"))
|
||||
}
|
||||
|
||||
func ViewPullFilesForAllCommitsOfPr(ctx *context.Context) {
|
||||
viewPullFiles(ctx, "", "", false, false)
|
||||
viewPullFiles(ctx, "", "")
|
||||
}
|
||||
|
||||
// UpdatePullRequest merge PR's baseBranch into headBranch
|
||||
|
||||
@@ -1217,10 +1217,11 @@ func registerWebRoutes(m *web.Router) {
|
||||
// end "/{username}/{reponame}": view milestone, label, issue, pull, etc
|
||||
|
||||
m.Group("/{username}/{reponame}/{type:issues}", func() {
|
||||
// these handlers also check unit permissions internally
|
||||
m.Get("", repo.Issues)
|
||||
m.Get("/{index}", repo.ViewIssue)
|
||||
}, optSignIn, context.RepoAssignment, context.RequireUnitReader(unit.TypeIssues, unit.TypeExternalTracker))
|
||||
// end "/{username}/{reponame}": issue/pull list, issue/pull view, external tracker
|
||||
m.Get("/{index}", repo.ViewIssue) // also do pull-request redirection (".../issues/{PR-number}" -> ".../pulls/{PR-number}")
|
||||
}, optSignIn, context.RepoAssignment, context.RequireUnitReader(unit.TypeIssues, unit.TypePullRequests, unit.TypeExternalTracker))
|
||||
// end "/{username}/{reponame}": issue list, issue view (pull-request redirection), external tracker
|
||||
|
||||
m.Group("/{username}/{reponame}", func() { // edit issues, pulls, labels, milestones, etc
|
||||
m.Group("/issues", func() {
|
||||
@@ -1509,7 +1510,7 @@ func registerWebRoutes(m *web.Router) {
|
||||
m.Group("/commits", func() {
|
||||
m.Get("", repo.SetWhitespaceBehavior, repo.GetPullDiffStats, repo.ViewPullCommits)
|
||||
m.Get("/list", repo.GetPullCommits)
|
||||
m.Get("/{sha:[a-f0-9]{7,40}}", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForSingleCommit)
|
||||
m.Get("/{sha:[a-f0-9]{7,64}}", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForSingleCommit)
|
||||
})
|
||||
m.Post("/merge", context.RepoMustNotBeArchived(), web.Bind(forms.MergePullRequestForm{}), repo.MergePullRequest)
|
||||
m.Post("/cancel_auto_merge", context.RepoMustNotBeArchived(), repo.CancelAutoMergePullRequest)
|
||||
@@ -1518,8 +1519,7 @@ func registerWebRoutes(m *web.Router) {
|
||||
m.Post("/cleanup", context.RepoMustNotBeArchived(), repo.CleanUpPullRequest)
|
||||
m.Group("/files", func() {
|
||||
m.Get("", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForAllCommitsOfPr)
|
||||
m.Get("/{sha:[a-f0-9]{7,40}}", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesStartingFromCommit)
|
||||
m.Get("/{shaFrom:[a-f0-9]{7,40}}..{shaTo:[a-f0-9]{7,40}}", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForRange)
|
||||
m.Get("/{shaFrom:[a-f0-9]{7,64}}..{shaTo:[a-f0-9]{7,64}}", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForRange)
|
||||
m.Group("/reviews", func() {
|
||||
m.Get("/new_comment", repo.RenderNewCodeCommentForm)
|
||||
m.Post("/comments", web.Bind(forms.CodeCommentForm{}), repo.SetShowOutdatedComments, repo.CreateCodeComment)
|
||||
@@ -1593,8 +1593,8 @@ func registerWebRoutes(m *web.Router) {
|
||||
m.Get("/cherry-pick/{sha:([a-f0-9]{7,64})$}", repo.SetEditorconfigIfExists, context.RepoRefByDefaultBranch(), repo.CherryPick)
|
||||
}, repo.MustBeNotEmpty)
|
||||
|
||||
m.Get("/rss/branch/*", context.RepoRefByType(git.RefTypeBranch), feedEnabled, feed.RenderBranchFeed)
|
||||
m.Get("/atom/branch/*", context.RepoRefByType(git.RefTypeBranch), feedEnabled, feed.RenderBranchFeed)
|
||||
m.Get("/rss/branch/*", context.RepoRefByType(git.RefTypeBranch), feedEnabled, feed.RenderBranchFeedRSS)
|
||||
m.Get("/atom/branch/*", context.RepoRefByType(git.RefTypeBranch), feedEnabled, feed.RenderBranchFeedAtom)
|
||||
|
||||
m.Group("/src", func() {
|
||||
m.Get("", func(ctx *context.Context) { ctx.Redirect(ctx.Repo.RepoLink) }) // there is no "{owner}/{repo}/src" page, so redirect to "{owner}/{repo}" to avoid 404
|
||||
|
||||
@@ -53,7 +53,7 @@ func CreateAuthorizationToken(taskID, runID, jobID int64) (string, error) {
|
||||
|
||||
claims := actionsClaims{
|
||||
RegisteredClaims: jwt.RegisteredClaims{
|
||||
ExpiresAt: jwt.NewNumericDate(now.Add(24 * time.Hour)),
|
||||
ExpiresAt: jwt.NewNumericDate(now.Add(1*time.Hour + setting.Actions.EndlessTaskTimeout)),
|
||||
NotBefore: jwt.NewNumericDate(now),
|
||||
},
|
||||
Scp: fmt.Sprintf("Actions.Results:%d:%d", runID, jobID),
|
||||
|
||||
@@ -260,11 +260,6 @@ func (n *actionsNotifier) CreateIssueComment(ctx context.Context, doer *user_mod
|
||||
func (n *actionsNotifier) UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment, oldContent string) {
|
||||
ctx = withMethod(ctx, "UpdateComment")
|
||||
|
||||
if err := c.LoadIssue(ctx); err != nil {
|
||||
log.Error("LoadIssue: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if c.Issue.IsPull {
|
||||
notifyIssueCommentChange(ctx, doer, c, oldContent, webhook_module.HookEventPullRequestComment, api.HookIssueCommentEdited)
|
||||
return
|
||||
@@ -275,11 +270,6 @@ func (n *actionsNotifier) UpdateComment(ctx context.Context, doer *user_model.Us
|
||||
func (n *actionsNotifier) DeleteComment(ctx context.Context, doer *user_model.User, comment *issues_model.Comment) {
|
||||
ctx = withMethod(ctx, "DeleteComment")
|
||||
|
||||
if err := comment.LoadIssue(ctx); err != nil {
|
||||
log.Error("LoadIssue: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if comment.Issue.IsPull {
|
||||
notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventPullRequestComment, api.HookIssueCommentDeleted)
|
||||
return
|
||||
@@ -288,6 +278,7 @@ func (n *actionsNotifier) DeleteComment(ctx context.Context, doer *user_model.Us
|
||||
}
|
||||
|
||||
func notifyIssueCommentChange(ctx context.Context, doer *user_model.User, comment *issues_model.Comment, oldContent string, event webhook_module.HookEventType, action api.HookIssueCommentAction) {
|
||||
comment.Issue = nil // force issue to be loaded
|
||||
if err := comment.LoadIssue(ctx); err != nil {
|
||||
log.Error("LoadIssue: %v", err)
|
||||
return
|
||||
|
||||
@@ -250,7 +250,7 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git.
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load pull issue. Error: %w", err)
|
||||
}
|
||||
comment, err := pull_service.CreatePushPullComment(ctx, pusher, pr, oldCommitID, opts.NewCommitIDs[i])
|
||||
comment, err := pull_service.CreatePushPullComment(ctx, pusher, pr, oldCommitID, opts.NewCommitIDs[i], forcePush.Value())
|
||||
if err == nil && comment != nil {
|
||||
notify_service.PullRequestPushCommits(ctx, pusher, pr, comment)
|
||||
}
|
||||
|
||||
@@ -171,34 +171,35 @@ func registerDeleteOldSystemNotices() {
|
||||
})
|
||||
}
|
||||
|
||||
type GCLFSConfig struct {
|
||||
BaseConfig
|
||||
OlderThan time.Duration
|
||||
LastUpdatedMoreThanAgo time.Duration
|
||||
NumberToCheckPerRepo int64
|
||||
ProportionToCheckPerRepo float64
|
||||
}
|
||||
|
||||
func registerGCLFS() {
|
||||
if !setting.LFS.StartServer {
|
||||
return
|
||||
}
|
||||
type GCLFSConfig struct {
|
||||
OlderThanConfig
|
||||
LastUpdatedMoreThanAgo time.Duration
|
||||
NumberToCheckPerRepo int64
|
||||
ProportionToCheckPerRepo float64
|
||||
}
|
||||
|
||||
RegisterTaskFatal("gc_lfs", &GCLFSConfig{
|
||||
OlderThanConfig: OlderThanConfig{
|
||||
BaseConfig: BaseConfig{
|
||||
Enabled: false,
|
||||
RunAtStart: false,
|
||||
Schedule: "@every 24h",
|
||||
},
|
||||
// Only attempt to garbage collect lfs meta objects older than a week as the order of git lfs upload
|
||||
// and git object upload is not necessarily guaranteed. It's possible to imagine a situation whereby
|
||||
// an LFS object is uploaded but the git branch is not uploaded immediately, or there are some rapid
|
||||
// changes in new branches that might lead to lfs objects becoming temporarily unassociated with git
|
||||
// objects.
|
||||
//
|
||||
// It is likely that a week is potentially excessive but it should definitely be enough that any
|
||||
// unassociated LFS object is genuinely unassociated.
|
||||
OlderThan: 24 * time.Hour * 7,
|
||||
BaseConfig: BaseConfig{
|
||||
Enabled: false,
|
||||
RunAtStart: false,
|
||||
Schedule: "@every 24h",
|
||||
},
|
||||
// Only attempt to garbage collect lfs meta objects older than a week as the order of git lfs upload
|
||||
// and git object upload is not necessarily guaranteed. It's possible to imagine a situation whereby
|
||||
// an LFS object is uploaded but the git branch is not uploaded immediately, or there are some rapid
|
||||
// changes in new branches that might lead to lfs objects becoming temporarily unassociated with git
|
||||
// objects.
|
||||
//
|
||||
// It is likely that a week is potentially excessive but it should definitely be enough that any
|
||||
// unassociated LFS object is genuinely unassociated.
|
||||
OlderThan: 24 * time.Hour * 7,
|
||||
|
||||
// Only GC things that haven't been looked at in the past 3 days
|
||||
LastUpdatedMoreThanAgo: 24 * time.Hour * 3,
|
||||
NumberToCheckPerRepo: 100,
|
||||
|
||||
51
services/cron/tasks_extended_test.go
Normal file
51
services/cron/tasks_extended_test.go
Normal file
@@ -0,0 +1,51 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package cron
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/test"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func Test_GCLFSConfig(t *testing.T) {
|
||||
cfg, err := setting.NewConfigProviderFromData(`
|
||||
[cron.gc_lfs]
|
||||
ENABLED = true
|
||||
RUN_AT_START = true
|
||||
SCHEDULE = "@every 2h"
|
||||
OLDER_THAN = "1h"
|
||||
LAST_UPDATED_MORE_THAN_AGO = "7h"
|
||||
NUMBER_TO_CHECK_PER_REPO = 10
|
||||
PROPORTION_TO_CHECK_PER_REPO = 0.1
|
||||
`)
|
||||
assert.NoError(t, err)
|
||||
defer test.MockVariableValue(&setting.CfgProvider, cfg)()
|
||||
|
||||
config := &GCLFSConfig{
|
||||
BaseConfig: BaseConfig{
|
||||
Enabled: false,
|
||||
RunAtStart: false,
|
||||
Schedule: "@every 24h",
|
||||
},
|
||||
OlderThan: 24 * time.Hour * 7,
|
||||
LastUpdatedMoreThanAgo: 24 * time.Hour * 3,
|
||||
NumberToCheckPerRepo: 100,
|
||||
ProportionToCheckPerRepo: 0.6,
|
||||
}
|
||||
|
||||
_, err = setting.GetCronSettings("gc_lfs", config)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, config.Enabled)
|
||||
assert.True(t, config.RunAtStart)
|
||||
assert.Equal(t, "@every 2h", config.Schedule)
|
||||
assert.Equal(t, 1*time.Hour, config.OlderThan)
|
||||
assert.Equal(t, 7*time.Hour, config.LastUpdatedMoreThanAgo)
|
||||
assert.Equal(t, int64(10), config.NumberToCheckPerRepo)
|
||||
assert.InDelta(t, 0.1, config.ProportionToCheckPerRepo, 0.001)
|
||||
}
|
||||
@@ -80,6 +80,12 @@ func CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_m
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// reload issue to ensure it has the latest data, especially the number of comments
|
||||
issue, err = issues_model.GetIssueByID(ctx, issue.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
notify_service.CreateIssueComment(ctx, doer, repo, issue, comment, mentions)
|
||||
|
||||
return comment, nil
|
||||
|
||||
@@ -111,7 +111,7 @@ func DownloadHandler(ctx *context.Context) {
|
||||
}
|
||||
}
|
||||
|
||||
ctx.Resp.Header().Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", fromByte, toByte, meta.Size-fromByte))
|
||||
ctx.Resp.Header().Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", fromByte, toByte, meta.Size))
|
||||
ctx.Resp.Header().Set("Access-Control-Expose-Headers", "Content-Range")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -354,7 +354,8 @@ func (g *GithubDownloaderV3) convertGithubRelease(ctx context.Context, rel *gith
|
||||
|
||||
// Prevent open redirect
|
||||
if !hasBaseURL(redirectURL, g.baseURL) &&
|
||||
!hasBaseURL(redirectURL, "https://objects.githubusercontent.com/") {
|
||||
!hasBaseURL(redirectURL, "https://objects.githubusercontent.com/") &&
|
||||
!hasBaseURL(redirectURL, "https://release-assets.githubusercontent.com/") {
|
||||
WarnAndNotice("Unexpected AssetURL for assetID[%d] in %s: %s", asset.GetID(), g, redirectURL)
|
||||
|
||||
return io.NopCloser(strings.NewReader(redirectURL)), nil
|
||||
|
||||
@@ -14,42 +14,28 @@ import (
|
||||
)
|
||||
|
||||
// getCommitIDsFromRepo get commit IDs from repo in between oldCommitID and newCommitID
|
||||
// isForcePush will be true if oldCommit isn't on the branch
|
||||
// Commit on baseBranch will skip
|
||||
func getCommitIDsFromRepo(ctx context.Context, repo *repo_model.Repository, oldCommitID, newCommitID, baseBranch string) (commitIDs []string, isForcePush bool, err error) {
|
||||
func getCommitIDsFromRepo(ctx context.Context, repo *repo_model.Repository, oldCommitID, newCommitID, baseBranch string) (commitIDs []string, err error) {
|
||||
gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
return nil, err
|
||||
}
|
||||
defer closer.Close()
|
||||
|
||||
oldCommit, err := gitRepo.GetCommit(oldCommitID)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
newCommit, err := gitRepo.GetCommit(newCommitID)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
isForcePush, err = newCommit.IsForcePush(oldCommitID)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
if isForcePush {
|
||||
commitIDs = make([]string, 2)
|
||||
commitIDs[0] = oldCommitID
|
||||
commitIDs[1] = newCommitID
|
||||
|
||||
return commitIDs, isForcePush, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Find commits between new and old commit excluding base branch commits
|
||||
commits, err := gitRepo.CommitsBetweenNotBase(newCommit, oldCommit, baseBranch)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
commitIDs = make([]string, 0, len(commits))
|
||||
@@ -57,38 +43,40 @@ func getCommitIDsFromRepo(ctx context.Context, repo *repo_model.Repository, oldC
|
||||
commitIDs = append(commitIDs, commits[i].ID.String())
|
||||
}
|
||||
|
||||
return commitIDs, isForcePush, err
|
||||
return commitIDs, err
|
||||
}
|
||||
|
||||
// CreatePushPullComment create push code to pull base comment
|
||||
func CreatePushPullComment(ctx context.Context, pusher *user_model.User, pr *issues_model.PullRequest, oldCommitID, newCommitID string) (comment *issues_model.Comment, err error) {
|
||||
func CreatePushPullComment(ctx context.Context, pusher *user_model.User, pr *issues_model.PullRequest, oldCommitID, newCommitID string, isForcePush bool) (comment *issues_model.Comment, err error) {
|
||||
if pr.HasMerged || oldCommitID == "" || newCommitID == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
ops := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypePullRequestPush,
|
||||
Doer: pusher,
|
||||
Repo: pr.BaseRepo,
|
||||
opts := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypePullRequestPush,
|
||||
Doer: pusher,
|
||||
Repo: pr.BaseRepo,
|
||||
IsForcePush: isForcePush,
|
||||
Issue: pr.Issue,
|
||||
}
|
||||
|
||||
var data issues_model.PushActionContent
|
||||
|
||||
data.CommitIDs, data.IsForcePush, err = getCommitIDsFromRepo(ctx, pr.BaseRepo, oldCommitID, newCommitID, pr.BaseBranch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
if opts.IsForcePush {
|
||||
data.CommitIDs = []string{oldCommitID, newCommitID}
|
||||
} else {
|
||||
data.CommitIDs, err = getCommitIDsFromRepo(ctx, pr.BaseRepo, oldCommitID, newCommitID, pr.BaseBranch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
ops.Issue = pr.Issue
|
||||
|
||||
dataJSON, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ops.Content = string(dataJSON)
|
||||
|
||||
comment, err = issues_model.CreateComment(ctx, ops)
|
||||
opts.Content = string(dataJSON)
|
||||
comment, err = issues_model.CreateComment(ctx, opts)
|
||||
|
||||
return comment, err
|
||||
}
|
||||
|
||||
@@ -28,7 +28,6 @@ import (
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/globallock"
|
||||
"code.gitea.io/gitea/modules/graceful"
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
repo_module "code.gitea.io/gitea/modules/repository"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
@@ -142,36 +141,7 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error {
|
||||
return err
|
||||
}
|
||||
|
||||
compareInfo, err := baseGitRepo.GetCompareInfo(pr.BaseRepo.RepoPath(),
|
||||
git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName(), false, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(compareInfo.Commits) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
data := issues_model.PushActionContent{IsForcePush: false}
|
||||
data.CommitIDs = make([]string, 0, len(compareInfo.Commits))
|
||||
for i := len(compareInfo.Commits) - 1; i >= 0; i-- {
|
||||
data.CommitIDs = append(data.CommitIDs, compareInfo.Commits[i].ID.String())
|
||||
}
|
||||
|
||||
dataJSON, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ops := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypePullRequestPush,
|
||||
Doer: issue.Poster,
|
||||
Repo: repo,
|
||||
Issue: pr.Issue,
|
||||
IsForcePush: false,
|
||||
Content: string(dataJSON),
|
||||
}
|
||||
|
||||
if _, err = issues_model.CreateComment(ctx, ops); err != nil {
|
||||
if _, err := CreatePushPullComment(ctx, issue.Poster, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName(), false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -193,6 +163,20 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error {
|
||||
|
||||
issue_service.ReviewRequestNotify(ctx, issue, issue.Poster, reviewNotifiers)
|
||||
|
||||
// Request reviews, these should be requested before other notifications because they will add request reviews record
|
||||
// on database
|
||||
permDoer, err := access_model.GetUserRepoPermission(ctx, repo, issue.Poster)
|
||||
for _, reviewer := range opts.Reviewers {
|
||||
if _, err = issue_service.ReviewRequest(ctx, pr.Issue, issue.Poster, &permDoer, reviewer, true); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
for _, teamReviewer := range opts.TeamReviewers {
|
||||
if _, err = issue_service.TeamReviewRequest(ctx, pr.Issue, issue.Poster, teamReviewer, true); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
mentions, err := issues_model.FindAndUpdateIssueMentions(ctx, issue, issue.Poster, issue.Content)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -211,17 +195,7 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error {
|
||||
}
|
||||
notify_service.IssueChangeAssignee(ctx, issue.Poster, issue, assignee, false, assigneeCommentMap[assigneeID])
|
||||
}
|
||||
permDoer, err := access_model.GetUserRepoPermission(ctx, repo, issue.Poster)
|
||||
for _, reviewer := range opts.Reviewers {
|
||||
if _, err = issue_service.ReviewRequest(ctx, pr.Issue, issue.Poster, &permDoer, reviewer, true); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
for _, teamReviewer := range opts.TeamReviewers {
|
||||
if _, err = issue_service.TeamReviewRequest(ctx, pr.Issue, issue.Poster, teamReviewer, true); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -332,24 +306,42 @@ func ChangeTargetBranch(ctx context.Context, pr *issues_model.PullRequest, doer
|
||||
pr.CommitsAhead = divergence.Ahead
|
||||
pr.CommitsBehind = divergence.Behind
|
||||
|
||||
if err := pr.UpdateColsIfNotMerged(ctx, "merge_base", "status", "conflicted_files", "changed_protected_files", "base_branch", "commits_ahead", "commits_behind"); err != nil {
|
||||
// add first push codes comment
|
||||
baseGitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer baseGitRepo.Close()
|
||||
|
||||
// Create comment
|
||||
options := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypeChangeTargetBranch,
|
||||
Doer: doer,
|
||||
Repo: pr.Issue.Repo,
|
||||
Issue: pr.Issue,
|
||||
OldRef: oldBranch,
|
||||
NewRef: targetBranch,
|
||||
}
|
||||
if _, err = issues_model.CreateComment(ctx, options); err != nil {
|
||||
return fmt.Errorf("CreateChangeTargetBranchComment: %w", err)
|
||||
}
|
||||
return db.WithTx(ctx, func(ctx context.Context) error {
|
||||
if err := pr.UpdateColsIfNotMerged(ctx, "merge_base", "status", "conflicted_files", "changed_protected_files", "base_branch", "commits_ahead", "commits_behind"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
// Create comment
|
||||
options := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypeChangeTargetBranch,
|
||||
Doer: doer,
|
||||
Repo: pr.Issue.Repo,
|
||||
Issue: pr.Issue,
|
||||
OldRef: oldBranch,
|
||||
NewRef: targetBranch,
|
||||
}
|
||||
if _, err = issues_model.CreateComment(ctx, options); err != nil {
|
||||
return fmt.Errorf("CreateChangeTargetBranchComment: %w", err)
|
||||
}
|
||||
|
||||
// Delete all old push comments and insert new push comments
|
||||
if _, err := db.GetEngine(ctx).Where("issue_id = ?", pr.IssueID).
|
||||
And("type = ?", issues_model.CommentTypePullRequestPush).
|
||||
NoAutoCondition().
|
||||
Delete(new(issues_model.Comment)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = CreatePushPullComment(ctx, doer, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName(), false)
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
func checkForInvalidation(ctx context.Context, requests issues_model.PullRequestList, repoID int64, doer *user_model.User, branch string) error {
|
||||
@@ -410,7 +402,7 @@ func AddTestPullRequestTask(opts TestPullRequestOptions) {
|
||||
}
|
||||
|
||||
StartPullRequestCheckImmediately(ctx, pr)
|
||||
comment, err := CreatePushPullComment(ctx, opts.Doer, pr, opts.OldCommitID, opts.NewCommitID)
|
||||
comment, err := CreatePushPullComment(ctx, opts.Doer, pr, opts.OldCommitID, opts.NewCommitID, opts.IsForcePush)
|
||||
if err == nil && comment != nil {
|
||||
notify_service.PullRequestPushCommits(ctx, opts.Doer, pr, comment)
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ type (
|
||||
DiscordPayload struct {
|
||||
Wait bool `json:"wait"`
|
||||
Content string `json:"content"`
|
||||
Username string `json:"username"`
|
||||
Username string `json:"username,omitempty"`
|
||||
AvatarURL string `json:"avatar_url,omitempty"`
|
||||
TTS bool `json:"tts"`
|
||||
Embeds []DiscordEmbed `json:"embeds"`
|
||||
|
||||
@@ -445,6 +445,7 @@ func (m *webhookNotifier) DeleteComment(ctx context.Context, doer *user_model.Us
|
||||
log.Error("LoadPoster: %v", err)
|
||||
return
|
||||
}
|
||||
comment.Issue = nil // reload issue to ensure it has the latest data, especially the number of comments
|
||||
if err = comment.LoadIssue(ctx); err != nil {
|
||||
log.Error("LoadIssue: %v", err)
|
||||
return
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
{{template "repo/diff/whitespace_dropdown" .}}
|
||||
{{template "repo/diff/options_dropdown" .}}
|
||||
{{if .PageIsPullFiles}}
|
||||
<div id="diff-commit-select" data-issuelink="{{$.Issue.Link}}" data-queryparams="?style={{if $.IsSplitStyle}}split{{else}}unified{{end}}&whitespace={{$.WhitespaceBehavior}}&show-outdated={{$.ShowOutdatedComments}}" data-filter_changes_by_commit="{{ctx.Locale.Tr "repo.pulls.filter_changes_by_commit"}}">
|
||||
<div id="diff-commit-select" data-merge-base="{{.MergeBase}}" data-issuelink="{{$.Issue.Link}}" data-queryparams="?style={{if $.IsSplitStyle}}split{{else}}unified{{end}}&whitespace={{$.WhitespaceBehavior}}&show-outdated={{$.ShowOutdatedComments}}" data-filter_changes_by_commit="{{ctx.Locale.Tr "repo.pulls.filter_changes_by_commit"}}">
|
||||
{{/* the following will be replaced by vue component, but this avoids any loading artifacts till the vue component is initialized */}}
|
||||
<div class="ui jump dropdown tiny basic button custom">
|
||||
{{svg "octicon-git-commit"}}
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
</span>
|
||||
{{end}}
|
||||
</div>
|
||||
<div>
|
||||
<div class="tw-flex tw-items-center">
|
||||
{{if or $invalid $resolved}}
|
||||
<button id="show-outdated-{{$comment.ID}}" data-comment="{{$comment.ID}}" class="{{if not $resolved}}tw-hidden{{end}} btn tiny show-outdated">
|
||||
{{svg "octicon-unfold" 16 "tw-mr-2"}}
|
||||
|
||||
@@ -119,7 +119,7 @@
|
||||
{{range $idx, $code := .FileContent}}
|
||||
{{$line := Eval $idx "+" 1}}
|
||||
<tr>
|
||||
<td id="L{{$line}}" class="lines-num"><span id="L{{$line}}" data-line-number="{{$line}}"></span></td>
|
||||
<td class="lines-num"><span id="L{{$line}}" data-line-number="{{$line}}"></span></td>
|
||||
{{if $.EscapeStatus.Escaped}}
|
||||
<td class="lines-escape">{{if (index $.LineEscapeStatus $idx).Escaped}}<button class="toggle-escape-button btn interact-bg" title="{{if (index $.LineEscapeStatus $idx).HasInvisible}}{{ctx.Locale.Tr "repo.invisible_runes_line"}} {{end}}{{if (index $.LineEscapeStatus $idx).HasAmbiguous}}{{ctx.Locale.Tr "repo.ambiguous_runes_line"}}{{end}}"></button>{{end}}</td>
|
||||
{{end}}
|
||||
|
||||
@@ -355,6 +355,7 @@ func TestPackageSwift(t *testing.T) {
|
||||
assert.Equal(t, packageVersion, result.Metadata.Version)
|
||||
assert.Equal(t, packageDescription, result.Metadata.Description)
|
||||
assert.Equal(t, "Swift", result.Metadata.ProgrammingLanguage.Name)
|
||||
assert.Equal(t, packageAuthor, result.Metadata.Author.Name)
|
||||
assert.Equal(t, packageAuthor, result.Metadata.Author.GivenName)
|
||||
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("%s/%s/%s/%s.json", url, packageScope, packageName, packageVersion)).
|
||||
|
||||
@@ -17,6 +17,7 @@ import (
|
||||
"code.gitea.io/gitea/models/db"
|
||||
issues_model "code.gitea.io/gitea/models/issues"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unit"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/indexer/issues"
|
||||
@@ -471,19 +472,38 @@ func TestIssueRedirect(t *testing.T) {
|
||||
session := loginUser(t, "user2")
|
||||
|
||||
// Test external tracker where style not set (shall default numeric)
|
||||
req := NewRequest(t, "GET", path.Join("org26", "repo_external_tracker", "issues", "1"))
|
||||
req := NewRequest(t, "GET", "/org26/repo_external_tracker/issues/1")
|
||||
resp := session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "https://tracker.com/org26/repo_external_tracker/issues/1", test.RedirectURL(resp))
|
||||
|
||||
// Test external tracker with numeric style
|
||||
req = NewRequest(t, "GET", path.Join("org26", "repo_external_tracker_numeric", "issues", "1"))
|
||||
req = NewRequest(t, "GET", "/org26/repo_external_tracker_numeric/issues/1")
|
||||
resp = session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "https://tracker.com/org26/repo_external_tracker_numeric/issues/1", test.RedirectURL(resp))
|
||||
|
||||
// Test external tracker with alphanumeric style (for a pull request)
|
||||
req = NewRequest(t, "GET", path.Join("org26", "repo_external_tracker_alpha", "issues", "1"))
|
||||
req = NewRequest(t, "GET", "/org26/repo_external_tracker_alpha/issues/1")
|
||||
resp = session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "/"+path.Join("org26", "repo_external_tracker_alpha", "pulls", "1"), test.RedirectURL(resp))
|
||||
assert.Equal(t, "/org26/repo_external_tracker_alpha/pulls/1", test.RedirectURL(resp))
|
||||
|
||||
// test to check that the PR redirection works if the issue unit is disabled
|
||||
// repo1 is a normal repository with issue unit enabled, visit issue 2(which is a pull request)
|
||||
// will redirect to pulls
|
||||
req = NewRequest(t, "GET", "/user2/repo1/issues/2")
|
||||
resp = session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "/user2/repo1/pulls/2", test.RedirectURL(resp))
|
||||
|
||||
repoUnit := unittest.AssertExistsAndLoadBean(t, &repo_model.RepoUnit{RepoID: 1, Type: unit.TypeIssues})
|
||||
|
||||
// disable issue unit, it will be reset
|
||||
_, err := db.DeleteByID[repo_model.RepoUnit](t.Context(), repoUnit.ID)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// even if the issue unit is disabled, visiting an issue which is a pull request
|
||||
// will still redirect to pull request
|
||||
req = NewRequest(t, "GET", "/user2/repo1/issues/2")
|
||||
resp = session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "/user2/repo1/pulls/2", test.RedirectURL(resp))
|
||||
}
|
||||
|
||||
func TestSearchIssues(t *testing.T) {
|
||||
|
||||
@@ -105,7 +105,15 @@ func TestPullCompare_EnableAllowEditsFromMaintainer(t *testing.T) {
|
||||
|
||||
// user4 creates a new branch and a PR
|
||||
testEditFileToNewBranch(t, user4Session, "user4", forkedRepoName, "master", "user4/update-readme", "README.md", "Hello, World\n(Edited by user4)\n")
|
||||
resp := testPullCreateDirectly(t, user4Session, repo3.OwnerName, repo3.Name, "master", "user4", forkedRepoName, "user4/update-readme", "PR for user4 forked repo3")
|
||||
resp := testPullCreateDirectly(t, user4Session, createPullRequestOptions{
|
||||
BaseRepoOwner: repo3.OwnerName,
|
||||
BaseRepoName: repo3.Name,
|
||||
BaseBranch: "master",
|
||||
HeadRepoOwner: "user4",
|
||||
HeadRepoName: forkedRepoName,
|
||||
HeadBranch: "user4/update-readme",
|
||||
Title: "PR for user4 forked repo3",
|
||||
})
|
||||
prURL := test.RedirectURL(resp)
|
||||
|
||||
// user2 (admin of repo3) goes to the PR files page
|
||||
|
||||
@@ -60,26 +60,50 @@ func testPullCreate(t *testing.T, session *TestSession, user, repo string, toSel
|
||||
return resp
|
||||
}
|
||||
|
||||
func testPullCreateDirectly(t *testing.T, session *TestSession, baseRepoOwner, baseRepoName, baseBranch, headRepoOwner, headRepoName, headBranch, title string) *httptest.ResponseRecorder {
|
||||
headCompare := headBranch
|
||||
if headRepoOwner != "" {
|
||||
if headRepoName != "" {
|
||||
headCompare = fmt.Sprintf("%s/%s:%s", headRepoOwner, headRepoName, headBranch)
|
||||
type createPullRequestOptions struct {
|
||||
BaseRepoOwner string
|
||||
BaseRepoName string
|
||||
BaseBranch string
|
||||
HeadRepoOwner string
|
||||
HeadRepoName string
|
||||
HeadBranch string
|
||||
Title string
|
||||
ReviewerIDs string // comma-separated list of user IDs
|
||||
}
|
||||
|
||||
func (opts createPullRequestOptions) IsValid() bool {
|
||||
return opts.BaseRepoOwner != "" && opts.BaseRepoName != "" && opts.BaseBranch != "" &&
|
||||
opts.HeadBranch != "" && opts.Title != ""
|
||||
}
|
||||
|
||||
func testPullCreateDirectly(t *testing.T, session *TestSession, opts createPullRequestOptions) *httptest.ResponseRecorder {
|
||||
if !opts.IsValid() {
|
||||
t.Fatal("Invalid pull request options")
|
||||
}
|
||||
|
||||
headCompare := opts.HeadBranch
|
||||
if opts.HeadRepoOwner != "" {
|
||||
if opts.HeadRepoName != "" {
|
||||
headCompare = fmt.Sprintf("%s/%s:%s", opts.HeadRepoOwner, opts.HeadRepoName, opts.HeadBranch)
|
||||
} else {
|
||||
headCompare = fmt.Sprintf("%s:%s", headRepoOwner, headBranch)
|
||||
headCompare = fmt.Sprintf("%s:%s", opts.HeadRepoOwner, opts.HeadBranch)
|
||||
}
|
||||
}
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/compare/%s...%s", baseRepoOwner, baseRepoName, baseBranch, headCompare))
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/compare/%s...%s", opts.BaseRepoOwner, opts.BaseRepoName, opts.BaseBranch, headCompare))
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
// Submit the form for creating the pull
|
||||
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||
link, exists := htmlDoc.doc.Find("form.ui.form").Attr("action")
|
||||
assert.True(t, exists, "The template has changed")
|
||||
req = NewRequestWithValues(t, "POST", link, map[string]string{
|
||||
params := map[string]string{
|
||||
"_csrf": htmlDoc.GetCSRF(),
|
||||
"title": title,
|
||||
})
|
||||
"title": opts.Title,
|
||||
}
|
||||
if opts.ReviewerIDs != "" {
|
||||
params["reviewer_ids"] = opts.ReviewerIDs
|
||||
}
|
||||
req = NewRequestWithValues(t, "POST", link, params)
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
return resp
|
||||
}
|
||||
@@ -246,7 +270,15 @@ func TestPullCreatePrFromBaseToFork(t *testing.T) {
|
||||
testEditFile(t, sessionBase, "user2", "repo1", "master", "README.md", "Hello, World (Edited)\n")
|
||||
|
||||
// Create a PR
|
||||
resp := testPullCreateDirectly(t, sessionFork, "user1", "repo1", "master", "user2", "repo1", "master", "This is a pull title")
|
||||
resp := testPullCreateDirectly(t, sessionFork, createPullRequestOptions{
|
||||
BaseRepoOwner: "user1",
|
||||
BaseRepoName: "repo1",
|
||||
BaseBranch: "master",
|
||||
HeadRepoOwner: "user2",
|
||||
HeadRepoName: "repo1",
|
||||
HeadBranch: "master",
|
||||
Title: "This is a pull title",
|
||||
})
|
||||
// check the redirected URL
|
||||
url := test.RedirectURL(resp)
|
||||
assert.Regexp(t, "^/user1/repo1/pulls/[0-9]*$", url)
|
||||
|
||||
@@ -25,10 +25,6 @@ func TestPullDiff_CommitRangePRDiff(t *testing.T) {
|
||||
doTestPRDiff(t, "/user2/commitsonpr/pulls/1/files/4ca8bcaf27e28504df7bf996819665986b01c847..23576dd018294e476c06e569b6b0f170d0558705", true, []string{"test2.txt", "test3.txt", "test4.txt"})
|
||||
}
|
||||
|
||||
func TestPullDiff_StartingFromBaseToCommitPRDiff(t *testing.T) {
|
||||
doTestPRDiff(t, "/user2/commitsonpr/pulls/1/files/c5626fc9eff57eb1bb7b796b01d4d0f2f3f792a2", true, []string{"test1.txt", "test2.txt", "test3.txt"})
|
||||
}
|
||||
|
||||
func doTestPRDiff(t *testing.T, prDiffURL string, reviewBtnDisabled bool, expectedFilenames []string) {
|
||||
defer tests.PrepareTestEnv(t)()
|
||||
|
||||
|
||||
@@ -184,13 +184,29 @@ func TestPullView_CodeOwner(t *testing.T) {
|
||||
session := loginUser(t, "user5")
|
||||
|
||||
// create a pull request on the forked repository, code reviewers should not be mentioned
|
||||
testPullCreateDirectly(t, session, "user5", "test_codeowner", forkedRepo.DefaultBranch, "", "", "codeowner-basebranch-forked", "Test Pull Request on Forked Repository")
|
||||
testPullCreateDirectly(t, session, createPullRequestOptions{
|
||||
BaseRepoOwner: "user5",
|
||||
BaseRepoName: "test_codeowner",
|
||||
BaseBranch: forkedRepo.DefaultBranch,
|
||||
HeadRepoOwner: "",
|
||||
HeadRepoName: "",
|
||||
HeadBranch: "codeowner-basebranch-forked",
|
||||
Title: "Test Pull Request on Forked Repository",
|
||||
})
|
||||
|
||||
pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{BaseRepoID: forkedRepo.ID, HeadBranch: "codeowner-basebranch-forked"})
|
||||
unittest.AssertNotExistsBean(t, &issues_model.Review{IssueID: pr.IssueID, Type: issues_model.ReviewTypeRequest, ReviewerID: 8})
|
||||
|
||||
// create a pull request to base repository, code reviewers should be mentioned
|
||||
testPullCreateDirectly(t, session, repo.OwnerName, repo.Name, repo.DefaultBranch, forkedRepo.OwnerName, forkedRepo.Name, "codeowner-basebranch-forked", "Test Pull Request3")
|
||||
testPullCreateDirectly(t, session, createPullRequestOptions{
|
||||
BaseRepoOwner: repo.OwnerName,
|
||||
BaseRepoName: repo.Name,
|
||||
BaseBranch: repo.DefaultBranch,
|
||||
HeadRepoOwner: forkedRepo.OwnerName,
|
||||
HeadRepoName: forkedRepo.Name,
|
||||
HeadBranch: "codeowner-basebranch-forked",
|
||||
Title: "Test Pull Request3",
|
||||
})
|
||||
|
||||
pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{BaseRepoID: repo.ID, HeadRepoID: forkedRepo.ID, HeadBranch: "codeowner-basebranch-forked"})
|
||||
unittest.AssertExistsAndLoadBean(t, &issues_model.Review{IssueID: pr.IssueID, Type: issues_model.ReviewTypeRequest, ReviewerID: 8})
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
"time"
|
||||
|
||||
auth_model "code.gitea.io/gitea/models/auth"
|
||||
"code.gitea.io/gitea/models/perm"
|
||||
"code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
@@ -529,15 +530,30 @@ func Test_WebhookPullRequest(t *testing.T) {
|
||||
}, http.StatusOK)
|
||||
defer provider.Close()
|
||||
|
||||
testCtx := NewAPITestContext(t, "user2", "repo1", auth_model.AccessTokenScopeAll)
|
||||
// add user4 as collabrator so that it can be a reviewer
|
||||
doAPIAddCollaborator(testCtx, "user4", perm.AccessModeWrite)(t)
|
||||
|
||||
// 1. create a new webhook with special webhook for repo1
|
||||
session := loginUser(t, "user2")
|
||||
sessionUser2 := loginUser(t, "user2")
|
||||
sessionUser4 := loginUser(t, "user4")
|
||||
|
||||
testAPICreateWebhookForRepo(t, session, "user2", "repo1", provider.URL(), "pull_request")
|
||||
// ignore the possible review_requested event to keep the test deterministic
|
||||
testAPICreateWebhookForRepo(t, sessionUser2, "user2", "repo1", provider.URL(), "pull_request_only")
|
||||
|
||||
testAPICreateBranch(t, session, "user2", "repo1", "master", "master2", http.StatusCreated)
|
||||
testAPICreateBranch(t, sessionUser2, "user2", "repo1", "master", "master2", http.StatusCreated)
|
||||
// 2. trigger the webhook
|
||||
repo1 := unittest.AssertExistsAndLoadBean(t, &repo.Repository{ID: 1})
|
||||
testCreatePullToDefaultBranch(t, session, repo1, repo1, "master2", "first pull request")
|
||||
testPullCreateDirectly(t, sessionUser4, createPullRequestOptions{
|
||||
BaseRepoOwner: repo1.OwnerName,
|
||||
BaseRepoName: repo1.Name,
|
||||
BaseBranch: repo1.DefaultBranch,
|
||||
HeadRepoOwner: "",
|
||||
HeadRepoName: "",
|
||||
HeadBranch: "master2",
|
||||
Title: "first pull request",
|
||||
ReviewerIDs: "2", // add user2 as reviewer
|
||||
})
|
||||
|
||||
// 3. validate the webhook is triggered
|
||||
assert.Equal(t, "pull_request", triggeredEvent)
|
||||
@@ -549,6 +565,8 @@ func Test_WebhookPullRequest(t *testing.T) {
|
||||
assert.Equal(t, 0, *payloads[0].PullRequest.Additions)
|
||||
assert.Equal(t, 0, *payloads[0].PullRequest.ChangedFiles)
|
||||
assert.Equal(t, 0, *payloads[0].PullRequest.Deletions)
|
||||
assert.Len(t, payloads[0].PullRequest.RequestedReviewers, 1)
|
||||
assert.Equal(t, int64(2), payloads[0].PullRequest.RequestedReviewers[0].ID)
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -32,6 +32,7 @@ export default defineComponent({
|
||||
locale: {
|
||||
filter_changes_by_commit: el.getAttribute('data-filter_changes_by_commit'),
|
||||
} as Record<string, string>,
|
||||
mergeBase: el.getAttribute('data-merge-base'),
|
||||
commits: [] as Array<Commit>,
|
||||
hoverActivated: false,
|
||||
lastReviewCommitSha: '',
|
||||
@@ -176,32 +177,38 @@ export default defineComponent({
|
||||
}
|
||||
},
|
||||
/**
|
||||
* When a commit is clicked with shift this enables the range
|
||||
* selection. Second click (with shift) defines the end of the
|
||||
* range. This opens the diff of this range
|
||||
* Exception: first commit is the first commit of this PR. Then
|
||||
* the diff from beginning of PR up to the second clicked commit is
|
||||
* opened
|
||||
* When a commit is clicked while holding Shift, it enables range selection.
|
||||
* - The range selection is a half-open, half-closed range, meaning it excludes the start commit but includes the end commit.
|
||||
* - The start of the commit range is always the previous commit of the first clicked commit.
|
||||
* - If the first commit in the list is clicked, the mergeBase will be used as the start of the range instead.
|
||||
* - The second Shift-click defines the end of the range.
|
||||
* - Once both are selected, the diff view for the selected commit range will open.
|
||||
*/
|
||||
commitClickedShift(commit: Commit) {
|
||||
this.hoverActivated = !this.hoverActivated;
|
||||
commit.selected = true;
|
||||
// Second click -> determine our range and open links accordingly
|
||||
if (!this.hoverActivated) {
|
||||
// since at least one commit is selected, we can determine the range
|
||||
// find all selected commits and generate a link
|
||||
if (this.commits[0].selected) {
|
||||
// first commit is selected - generate a short url with only target sha
|
||||
const lastCommitIdx = this.commits.findLastIndex((x) => x.selected);
|
||||
if (lastCommitIdx === this.commits.length - 1) {
|
||||
// user selected all commits - just show the normal diff page
|
||||
window.location.assign(`${this.issueLink}/files${this.queryParams}`);
|
||||
} else {
|
||||
window.location.assign(`${this.issueLink}/files/${this.commits[lastCommitIdx].id}${this.queryParams}`);
|
||||
}
|
||||
const firstSelected = this.commits.findIndex((x) => x.selected);
|
||||
const lastSelected = this.commits.findLastIndex((x) => x.selected);
|
||||
let beforeCommitID: string;
|
||||
if (firstSelected === 0) {
|
||||
beforeCommitID = this.mergeBase;
|
||||
} else {
|
||||
const start = this.commits[this.commits.findIndex((x) => x.selected) - 1].id;
|
||||
const end = this.commits.findLast((x) => x.selected).id;
|
||||
window.location.assign(`${this.issueLink}/files/${start}..${end}${this.queryParams}`);
|
||||
beforeCommitID = this.commits[firstSelected - 1].id;
|
||||
}
|
||||
const afterCommitID = this.commits[lastSelected].id;
|
||||
|
||||
if (firstSelected === lastSelected) {
|
||||
// if the start and end are the same, we show this single commit
|
||||
window.location.assign(`${this.issueLink}/commits/${afterCommitID}${this.queryParams}`);
|
||||
} else if (beforeCommitID === this.mergeBase && afterCommitID === this.commits.at(-1).id) {
|
||||
// if the first commit is selected and the last commit is selected, we show all commits
|
||||
window.location.assign(`${this.issueLink}/files${this.queryParams}`);
|
||||
} else {
|
||||
window.location.assign(`${this.issueLink}/files/${beforeCommitID}..${afterCommitID}${this.queryParams}`);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user