mirror of
https://github.com/go-gitea/gitea.git
synced 2025-11-08 05:02:38 +09:00
Compare commits
43 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
99053ce4fa | ||
|
|
e818de179e | ||
|
|
0a87bf9016 | ||
|
|
86d99e2f38 | ||
|
|
7bfb7567b2 | ||
|
|
7619808137 | ||
|
|
b854930a96 | ||
|
|
935f5e0ad5 | ||
|
|
08c6ea6728 | ||
|
|
67977f0b1c | ||
|
|
78fbcf35ad | ||
|
|
8f5b1d27d4 | ||
|
|
89c99a4dcb | ||
|
|
3c7e7a19dd | ||
|
|
8313b5d998 | ||
|
|
6ca73bf662 | ||
|
|
5e10def7f7 | ||
|
|
1b8efb6fc7 | ||
|
|
8f89e1e174 | ||
|
|
cbc595b9d9 | ||
|
|
cc5ccf44dc | ||
|
|
f91e35b8b7 | ||
|
|
f52ed422dc | ||
|
|
0266ee5de7 | ||
|
|
ac03e65cf4 | ||
|
|
f3e6672c09 | ||
|
|
136ec9ef81 | ||
|
|
79018ae726 | ||
|
|
e11176192a | ||
|
|
4e0269e890 | ||
|
|
04114c637a | ||
|
|
e5540bfa81 | ||
|
|
d22d6ca0d8 | ||
|
|
d49feab428 | ||
|
|
9162f4403a | ||
|
|
d05cf08fad | ||
|
|
f4b4b0bf98 | ||
|
|
99596044d7 | ||
|
|
693d26914f | ||
|
|
315f197790 | ||
|
|
76b8f0c3a7 | ||
|
|
f99bbd7f3f | ||
|
|
f7ef657b5a |
6
.github/workflows/pull-db-tests.yml
vendored
6
.github/workflows/pull-db-tests.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
minio:
|
||||
# as github actions doesn't support "entrypoint", we need to use a non-official image
|
||||
# that has a custom entrypoint set to "minio server /data"
|
||||
image: bitnami/minio:2023.8.31
|
||||
image: bitnamilegacy/minio:2023.8.31
|
||||
env:
|
||||
MINIO_ROOT_USER: 123456
|
||||
MINIO_ROOT_PASSWORD: 12345678
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
ports:
|
||||
- 6379:6379
|
||||
minio:
|
||||
image: bitnami/minio:2021.3.17
|
||||
image: bitnamilegacy/minio:2021.3.17
|
||||
env:
|
||||
MINIO_ACCESS_KEY: 123456
|
||||
MINIO_SECRET_KEY: 12345678
|
||||
@@ -155,7 +155,7 @@ jobs:
|
||||
services:
|
||||
mysql:
|
||||
# the bitnami mysql image has more options than the official one, it's easier to customize
|
||||
image: bitnami/mysql:8.0
|
||||
image: bitnamilegacy/mysql:8.0
|
||||
env:
|
||||
ALLOW_EMPTY_PASSWORD: true
|
||||
MYSQL_DATABASE: testgitea
|
||||
|
||||
74
CHANGELOG.md
74
CHANGELOG.md
@@ -4,7 +4,59 @@ This changelog goes through the changes that have been made in each release
|
||||
without substantial changes to our git log; to see the highlights of what has
|
||||
been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
|
||||
## [1.24.3](https://github.com/go-gitea/gitea/releases/tag/1.24.3) - 2025-07-15
|
||||
## [1.24.7](https://github.com/go-gitea/gitea/releases/tag/1.24.7) - 2025-10-24
|
||||
|
||||
* SECURITY
|
||||
* Refactor legacy code (#35708) (#35713)
|
||||
* Fixing issue #35530: Password Leak in Log Messages (#35584) (#35665)
|
||||
* Fix a bug missed return (#35655) (#35671)
|
||||
* BUGFIXES
|
||||
* Fix inputing review comment will remove reviewer (#35591) (#35664)
|
||||
* TESTING
|
||||
* Mock external service in hcaptcha TestCaptcha (#35604) (#35663)
|
||||
* Fix build (#35669)
|
||||
|
||||
## [1.24.6](https://github.com/go-gitea/gitea/releases/tag/1.24.6) - 2025-09-10
|
||||
|
||||
* SECURITY
|
||||
* Upgrade xz to v0.5.15 (#35385)
|
||||
* BUGFIXES
|
||||
* Fix a compare page 404 bug when the pull request disabled (#35441) (#35453)
|
||||
* Fix bug when issue disabled, pull request number in the commit message cannot be redirected (#35420) (#35442)
|
||||
* Add author.name field to Swift Package Registry API response (#35410) (#35431)
|
||||
* Remove usernames when empty in discord webhook (#35412) (#35417)
|
||||
* Allow foreachref parser to grow its buffer (#35365) (#35376)
|
||||
* Allow deleting comment with content via API like web did (#35346) (#35354)
|
||||
* Fix atom/rss mixed error (#35345) (#35347)
|
||||
* Fix review request webhook bug (#35339)
|
||||
* Remove duplicate html IDs (#35210) (#35325)
|
||||
* Fix LFS range size header response (#35277) (#35293)
|
||||
* Fix GitHub release assets URL validation (#35287) (#35290)
|
||||
* Fix token lifetime, closes #35230 (#35271) (#35281)
|
||||
* Fix push commits comments when changing the pull request target branch (#35386) (#35443)
|
||||
|
||||
## [1.24.5](https://github.com/go-gitea/gitea/releases/tag/v1.24.5) - 2025-08-12
|
||||
|
||||
* BUGFIXES
|
||||
* Fix a bug where lfs gc never worked. (#35198) (#35255)
|
||||
* Reload issue when sending webhook to make num comments is right. (#35243) (#35248)
|
||||
* Fix bug when review pull request commits (#35192) (#35246)
|
||||
* MISC
|
||||
* Vertically center "Show Resolved" (#35211) (#35218)
|
||||
|
||||
## [1.24.4](https://github.com/go-gitea/gitea/releases/tag/v1.24.4) - 2025-08-03
|
||||
|
||||
* BUGFIXES
|
||||
* Fix various bugs (1.24) (#35186)
|
||||
* Fix migrate input box bug (#35166) (#35171)
|
||||
* Only hide dropzone when no files have been uploaded (#35156) (#35167)
|
||||
* Fix review comment/dimiss comment x reference can be refereced back (#35094) (#35099)
|
||||
* Fix submodule nil check (#35096) (#35098)
|
||||
* MISC
|
||||
* Don't use full-file highlight when there is a git diff textconv (#35114) (#35119)
|
||||
* Increase gap on latest commit (#35104) (#35113)
|
||||
|
||||
## [1.24.3](https://github.com/go-gitea/gitea/releases/tag/v1.24.3) - 2025-07-15
|
||||
|
||||
* BUGFIXES
|
||||
* Fix form property assignment edge case (#35073) (#35078)
|
||||
@@ -36,7 +88,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
* Skip updating timestamp when sync branch (#34875)
|
||||
* Fix required contexts and commit status matching bug (#34815) (#34829)
|
||||
|
||||
## [1.24.2](https://github.com/go-gitea/gitea/releases/tag/1.24.2) - 2025-06-20
|
||||
## [1.24.2](https://github.com/go-gitea/gitea/releases/tag/v1.24.2) - 2025-06-20
|
||||
|
||||
* BUGFIXES
|
||||
* Fix container range bug (#34795) (#34796)
|
||||
@@ -44,7 +96,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
* BUILD
|
||||
* Bump poetry feature to new url for dev container (#34787) (#34790)
|
||||
|
||||
## [1.24.1](https://github.com/go-gitea/gitea/releases/tag/1.24.1) - 2025-06-18
|
||||
## [1.24.1](https://github.com/go-gitea/gitea/releases/tag/v1.24.1) - 2025-06-18
|
||||
|
||||
* ENHANCEMENTS
|
||||
* Improve alignment of commit status icon on commit page (#34750) (#34757)
|
||||
@@ -64,7 +116,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
* Hide href attribute of a tag if there is no target_url (#34556) (#34684)
|
||||
* Fix tag target (#34781) #34783
|
||||
|
||||
## [1.24.0](https://github.com/go-gitea/gitea/releases/tag/1.24.0) - 2025-05-26
|
||||
## [1.24.0](https://github.com/go-gitea/gitea/releases/tag/v1.24.0) - 2025-05-26
|
||||
|
||||
* BREAKING
|
||||
* Make Gitea always use its internal config, ignore `/etc/gitconfig` (#33076)
|
||||
@@ -434,7 +486,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
* Bump x/net (#32896) (#32900)
|
||||
* Only activity tab needs heatmap data loading (#34652)
|
||||
|
||||
## [1.23.8](https://github.com/go-gitea/gitea/releases/tag/1.23.8) - 2025-05-11
|
||||
## [1.23.8](https://github.com/go-gitea/gitea/releases/tag/v1.23.8) - 2025-05-11
|
||||
|
||||
* SECURITY
|
||||
* Fix a bug when uploading file via lfs ssh command (#34408) (#34411)
|
||||
@@ -461,7 +513,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
* Bump go version in go.mod (#34160)
|
||||
* remove hardcoded 'code' string in clone_panel.tmpl (#34153) (#34158)
|
||||
|
||||
## [1.23.7](https://github.com/go-gitea/gitea/releases/tag/1.23.7) - 2025-04-07
|
||||
## [1.23.7](https://github.com/go-gitea/gitea/releases/tag/v1.23.7) - 2025-04-07
|
||||
|
||||
* Enhancements
|
||||
* Add a config option to block "expensive" pages for anonymous users (#34024) (#34071)
|
||||
@@ -559,7 +611,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
* BUGFIXES
|
||||
* Fix a bug caused by status webhook template #33512
|
||||
|
||||
## [1.23.2](https://github.com/go-gitea/gitea/releases/tag/1.23.2) - 2025-02-04
|
||||
## [1.23.2](https://github.com/go-gitea/gitea/releases/tag/v1.23.2) - 2025-02-04
|
||||
|
||||
* BREAKING
|
||||
* Add tests for webhook and fix some webhook bugs (#33396) (#33442)
|
||||
@@ -3089,7 +3141,7 @@ Key highlights of this release encompass significant changes categorized under `
|
||||
* Improve decryption failure message (#24573) (#24575)
|
||||
* Makefile: Use portable !, not GNUish -not, with find(1). (#24565) (#24572)
|
||||
|
||||
## [1.19.3](https://github.com/go-gitea/gitea/releases/tag/1.19.3) - 2023-05-03
|
||||
## [1.19.3](https://github.com/go-gitea/gitea/releases/tag/v1.19.3) - 2023-05-03
|
||||
|
||||
* SECURITY
|
||||
* Use golang 1.20.4 to fix CVE-2023-24539, CVE-2023-24540, and CVE-2023-29400
|
||||
@@ -3102,7 +3154,7 @@ Key highlights of this release encompass significant changes categorized under `
|
||||
* Fix incorrect CurrentUser check for docker rootless (#24435)
|
||||
* Getting the tag list does not require being signed in (#24413) (#24416)
|
||||
|
||||
## [1.19.2](https://github.com/go-gitea/gitea/releases/tag/1.19.2) - 2023-04-26
|
||||
## [1.19.2](https://github.com/go-gitea/gitea/releases/tag/v1.19.2) - 2023-04-26
|
||||
|
||||
* SECURITY
|
||||
* Require repo scope for PATs for private repos and basic authentication (#24362) (#24364)
|
||||
@@ -3601,7 +3653,7 @@ Key highlights of this release encompass significant changes categorized under `
|
||||
* Display attachments of review comment when comment content is blank (#23035) (#23046)
|
||||
* Return empty url for submodule tree entries (#23043) (#23048)
|
||||
|
||||
## [1.18.4](https://github.com/go-gitea/gitea/releases/tag/1.18.4) - 2023-02-20
|
||||
## [1.18.4](https://github.com/go-gitea/gitea/releases/tag/v1.18.4) - 2023-02-20
|
||||
|
||||
* SECURITY
|
||||
* Provide the ability to set password hash algorithm parameters (#22942) (#22943)
|
||||
@@ -4028,7 +4080,7 @@ Key highlights of this release encompass significant changes categorized under `
|
||||
* Fix the mode of custom dir to 0700 in docker-rootless (#20861) (#20867)
|
||||
* Fix UI mis-align for PR commit history (#20845) (#20859)
|
||||
|
||||
## [1.17.1](https://github.com/go-gitea/gitea/releases/tag/1.17.1) - 2022-08-17
|
||||
## [1.17.1](https://github.com/go-gitea/gitea/releases/tag/v1.17.1) - 2022-08-17
|
||||
|
||||
* SECURITY
|
||||
* Correctly escape within tribute.js (#20831) (#20832)
|
||||
|
||||
16
Makefile
16
Makefile
@@ -47,6 +47,17 @@ ifeq ($(HAS_GO), yes)
|
||||
CGO_CFLAGS ?= $(shell $(GO) env CGO_CFLAGS) $(CGO_EXTRA_CFLAGS)
|
||||
endif
|
||||
|
||||
CGO_ENABLED ?= 0
|
||||
ifneq (,$(findstring sqlite,$(TAGS))$(findstring pam,$(TAGS)))
|
||||
CGO_ENABLED = 1
|
||||
endif
|
||||
|
||||
STATIC ?=
|
||||
EXTLDFLAGS ?=
|
||||
ifneq ($(STATIC),)
|
||||
EXTLDFLAGS = -extldflags "-static"
|
||||
endif
|
||||
|
||||
ifeq ($(GOOS),windows)
|
||||
IS_WINDOWS := yes
|
||||
else ifeq ($(patsubst Windows%,Windows,$(OS)),Windows)
|
||||
@@ -740,7 +751,10 @@ security-check:
|
||||
go run $(GOVULNCHECK_PACKAGE) -show color ./...
|
||||
|
||||
$(EXECUTABLE): $(GO_SOURCES) $(TAGS_PREREQ)
|
||||
CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) build $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)' -o $@
|
||||
ifneq ($(and $(STATIC),$(findstring pam,$(TAGS))),)
|
||||
$(error pam support set via TAGS doesn't support static builds)
|
||||
endif
|
||||
CGO_ENABLED="$(CGO_ENABLED)" CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) build $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(EXTLDFLAGS) $(LDFLAGS)' -o $@
|
||||
|
||||
.PHONY: release
|
||||
release: frontend generate release-windows release-linux release-darwin release-freebsd release-copy release-compress vendor release-sources release-check
|
||||
|
||||
27
cmd/serv.go
27
cmd/serv.go
@@ -13,7 +13,6 @@ import (
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
asymkey_model "code.gitea.io/gitea/models/asymkey"
|
||||
@@ -31,7 +30,6 @@ import (
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/services/lfs"
|
||||
|
||||
"github.com/golang-jwt/jwt/v5"
|
||||
"github.com/kballard/go-shellquote"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
@@ -131,27 +129,6 @@ func getAccessMode(verb, lfsVerb string) perm.AccessMode {
|
||||
return perm.AccessModeNone
|
||||
}
|
||||
|
||||
func getLFSAuthToken(ctx context.Context, lfsVerb string, results *private.ServCommandResults) (string, error) {
|
||||
now := time.Now()
|
||||
claims := lfs.Claims{
|
||||
RegisteredClaims: jwt.RegisteredClaims{
|
||||
ExpiresAt: jwt.NewNumericDate(now.Add(setting.LFS.HTTPAuthExpiry)),
|
||||
NotBefore: jwt.NewNumericDate(now),
|
||||
},
|
||||
RepoID: results.RepoID,
|
||||
Op: lfsVerb,
|
||||
UserID: results.UserID,
|
||||
}
|
||||
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
|
||||
|
||||
// Sign and get the complete encoded token as a string using the secret
|
||||
tokenString, err := token.SignedString(setting.LFS.JWTSecretBytes)
|
||||
if err != nil {
|
||||
return "", fail(ctx, "Failed to sign JWT Token", "Failed to sign JWT token: %v", err)
|
||||
}
|
||||
return "Bearer " + tokenString, nil
|
||||
}
|
||||
|
||||
func runServ(c *cli.Context) error {
|
||||
ctx, cancel := installSignals()
|
||||
defer cancel()
|
||||
@@ -284,7 +261,7 @@ func runServ(c *cli.Context) error {
|
||||
|
||||
// LFS SSH protocol
|
||||
if verb == git.CmdVerbLfsTransfer {
|
||||
token, err := getLFSAuthToken(ctx, lfsVerb, results)
|
||||
token, err := lfs.GetLFSAuthTokenWithBearer(lfs.AuthTokenOptions{Op: lfsVerb, UserID: results.UserID, RepoID: results.RepoID})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -295,7 +272,7 @@ func runServ(c *cli.Context) error {
|
||||
if verb == git.CmdVerbLfsAuthenticate {
|
||||
url := fmt.Sprintf("%s%s/%s.git/info/lfs", setting.AppURL, url.PathEscape(results.OwnerName), url.PathEscape(results.RepoName))
|
||||
|
||||
token, err := getLFSAuthToken(ctx, lfsVerb, results)
|
||||
token, err := lfs.GetLFSAuthTokenWithBearer(lfs.AuthTokenOptions{Op: lfsVerb, UserID: results.UserID, RepoID: results.RepoID})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
6
flake.lock
generated
6
flake.lock
generated
@@ -20,11 +20,11 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1747179050,
|
||||
"narHash": "sha256-qhFMmDkeJX9KJwr5H32f1r7Prs7XbQWtO0h3V0a0rFY=",
|
||||
"lastModified": 1752480373,
|
||||
"narHash": "sha256-JHQbm+OcGp32wAsXTE/FLYGNpb+4GLi5oTvCxwSoBOA=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "adaa24fbf46737f3f1b5497bf64bae750f82942e",
|
||||
"rev": "62e0f05ede1da0d54515d4ea8ce9c733f12d9f08",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
||||
60
flake.nix
60
flake.nix
@@ -11,33 +11,45 @@
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
in
|
||||
{
|
||||
devShells.default = pkgs.mkShell {
|
||||
buildInputs = with pkgs; [
|
||||
# generic
|
||||
git
|
||||
git-lfs
|
||||
gnumake
|
||||
gnused
|
||||
gnutar
|
||||
gzip
|
||||
devShells.default =
|
||||
with pkgs;
|
||||
let
|
||||
# only bump toolchain versions here
|
||||
go = go_1_24;
|
||||
nodejs = nodejs_24;
|
||||
python3 = python312;
|
||||
in
|
||||
pkgs.mkShell {
|
||||
buildInputs = [
|
||||
# generic
|
||||
git
|
||||
git-lfs
|
||||
gnumake
|
||||
gnused
|
||||
gnutar
|
||||
gzip
|
||||
|
||||
# frontend
|
||||
nodejs_22
|
||||
# frontend
|
||||
nodejs
|
||||
|
||||
# linting
|
||||
python312
|
||||
poetry
|
||||
# linting
|
||||
python3
|
||||
poetry
|
||||
|
||||
# backend
|
||||
go_1_24
|
||||
gofumpt
|
||||
sqlite
|
||||
];
|
||||
shellHook = ''
|
||||
export GO="${pkgs.go_1_24}/bin/go"
|
||||
export GOROOT="${pkgs.go_1_24}/share/go"
|
||||
'';
|
||||
};
|
||||
# backend
|
||||
go
|
||||
glibc.static
|
||||
gofumpt
|
||||
sqlite
|
||||
];
|
||||
CFLAGS = "-I${glibc.static.dev}/include";
|
||||
LDFLAGS = "-L ${glibc.static}/lib";
|
||||
GO = "${go}/bin/go";
|
||||
GOROOT = "${go}/share/go";
|
||||
|
||||
TAGS = "sqlite sqlite_unlock_notify";
|
||||
STATIC = "true";
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
2
go.mod
2
go.mod
@@ -109,7 +109,7 @@ require (
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/syndtr/goleveldb v1.0.0
|
||||
github.com/tstranex/u2f v1.0.0
|
||||
github.com/ulikunitz/xz v0.5.12
|
||||
github.com/ulikunitz/xz v0.5.15
|
||||
github.com/urfave/cli/v2 v2.27.6
|
||||
github.com/wneessen/go-mail v0.6.2
|
||||
github.com/xeipuuv/gojsonschema v1.2.0
|
||||
|
||||
4
go.sum
4
go.sum
@@ -757,8 +757,8 @@ github.com/tstranex/u2f v1.0.0/go.mod h1:eahSLaqAS0zsIEv80+vXT7WanXs7MQQDg3j3wGB
|
||||
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
||||
github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/ulikunitz/xz v0.5.9/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc=
|
||||
github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/ulikunitz/xz v0.5.15 h1:9DNdB5s+SgV3bQ2ApL10xRc35ck0DuIX/isZvIk+ubY=
|
||||
github.com/ulikunitz/xz v0.5.15/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/unknwon/com v1.0.1 h1:3d1LTxD+Lnf3soQiD4Cp/0BRB+Rsa/+RTvz8GMMzIXs=
|
||||
github.com/unknwon/com v1.0.1/go.mod h1:tOOxU81rwgoCLoOVVPHb6T/wt8HZygqH5id+GNnlCXM=
|
||||
github.com/urfave/cli/v2 v2.27.6 h1:VdRdS98FNhKZ8/Az8B7MTyGQmpIr36O1EHybx/LaZ4g=
|
||||
|
||||
@@ -67,13 +67,6 @@ func (key *PublicKey) OmitEmail() string {
|
||||
return strings.Join(strings.Split(key.Content, " ")[:2], " ")
|
||||
}
|
||||
|
||||
// AuthorizedString returns formatted public key string for authorized_keys file.
|
||||
//
|
||||
// TODO: Consider dropping this function
|
||||
func (key *PublicKey) AuthorizedString() string {
|
||||
return AuthorizedStringForKey(key)
|
||||
}
|
||||
|
||||
func addKey(ctx context.Context, key *PublicKey) (err error) {
|
||||
if len(key.Fingerprint) == 0 {
|
||||
key.Fingerprint, err = CalcFingerprint(key.Content)
|
||||
|
||||
@@ -17,29 +17,13 @@ import (
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"golang.org/x/crypto/ssh"
|
||||
)
|
||||
|
||||
// _____ __ .__ .__ .___
|
||||
// / _ \ __ ___/ |_| |__ ___________|__|_______ ____ __| _/
|
||||
// / /_\ \| | \ __\ | \ / _ \_ __ \ \___ // __ \ / __ |
|
||||
// / | \ | /| | | Y ( <_> ) | \/ |/ /\ ___// /_/ |
|
||||
// \____|__ /____/ |__| |___| /\____/|__| |__/_____ \\___ >____ |
|
||||
// \/ \/ \/ \/ \/
|
||||
// ____ __.
|
||||
// | |/ _|____ ___.__. ______
|
||||
// | <_/ __ < | |/ ___/
|
||||
// | | \ ___/\___ |\___ \
|
||||
// |____|__ \___ > ____/____ >
|
||||
// \/ \/\/ \/
|
||||
//
|
||||
// This file contains functions for creating authorized_keys files
|
||||
//
|
||||
// There is a dependence on the database within RegeneratePublicKeys however most of these functions probably belong in a module
|
||||
|
||||
const (
|
||||
tplCommentPrefix = `# gitea public key`
|
||||
tplPublicKey = tplCommentPrefix + "\n" + `command=%s,no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,no-user-rc,restrict %s` + "\n"
|
||||
)
|
||||
// AuthorizedStringCommentPrefix is a magic tag
|
||||
// some functions like RegeneratePublicKeys needs this tag to skip the keys generated by Gitea, while keep other keys
|
||||
const AuthorizedStringCommentPrefix = `# gitea public key`
|
||||
|
||||
var sshOpLocker sync.Mutex
|
||||
|
||||
@@ -50,17 +34,45 @@ func WithSSHOpLocker(f func() error) error {
|
||||
}
|
||||
|
||||
// AuthorizedStringForKey creates the authorized keys string appropriate for the provided key
|
||||
func AuthorizedStringForKey(key *PublicKey) string {
|
||||
func AuthorizedStringForKey(key *PublicKey) (string, error) {
|
||||
sb := &strings.Builder{}
|
||||
_ = setting.SSH.AuthorizedKeysCommandTemplateTemplate.Execute(sb, map[string]any{
|
||||
_, err := writeAuthorizedStringForKey(key, sb)
|
||||
return sb.String(), err
|
||||
}
|
||||
|
||||
// WriteAuthorizedStringForValidKey writes the authorized key for the provided key. If the key is invalid, it does nothing.
|
||||
func WriteAuthorizedStringForValidKey(key *PublicKey, w io.Writer) error {
|
||||
validKey, err := writeAuthorizedStringForKey(key, w)
|
||||
if !validKey {
|
||||
log.Debug("WriteAuthorizedStringForValidKey: key %s is not valid: %v", key, err)
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func writeAuthorizedStringForKey(key *PublicKey, w io.Writer) (keyValid bool, err error) {
|
||||
const tpl = AuthorizedStringCommentPrefix + "\n" + `command=%s,no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,no-user-rc,restrict %s %s` + "\n"
|
||||
pubKey, _, _, _, err := ssh.ParseAuthorizedKey([]byte(key.Content))
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
// now the key is valid, the code below could only return template/IO related errors
|
||||
sbCmd := &strings.Builder{}
|
||||
err = setting.SSH.AuthorizedKeysCommandTemplateTemplate.Execute(sbCmd, map[string]any{
|
||||
"AppPath": util.ShellEscape(setting.AppPath),
|
||||
"AppWorkPath": util.ShellEscape(setting.AppWorkPath),
|
||||
"CustomConf": util.ShellEscape(setting.CustomConf),
|
||||
"CustomPath": util.ShellEscape(setting.CustomPath),
|
||||
"Key": key,
|
||||
})
|
||||
|
||||
return fmt.Sprintf(tplPublicKey, util.ShellEscape(sb.String()), key.Content)
|
||||
if err != nil {
|
||||
return true, err
|
||||
}
|
||||
sshCommandEscaped := util.ShellEscape(sbCmd.String())
|
||||
sshKeyMarshalled := strings.TrimSpace(string(ssh.MarshalAuthorizedKey(pubKey)))
|
||||
sshKeyComment := fmt.Sprintf("user-%d", key.OwnerID)
|
||||
_, err = fmt.Fprintf(w, tpl, sshCommandEscaped, sshKeyMarshalled, sshKeyComment)
|
||||
return true, err
|
||||
}
|
||||
|
||||
// appendAuthorizedKeysToFile appends new SSH keys' content to authorized_keys file.
|
||||
@@ -112,7 +124,7 @@ func appendAuthorizedKeysToFile(keys ...*PublicKey) error {
|
||||
if key.Type == KeyTypePrincipal {
|
||||
continue
|
||||
}
|
||||
if _, err = f.WriteString(key.AuthorizedString()); err != nil {
|
||||
if err = WriteAuthorizedStringForValidKey(key, f); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@@ -120,10 +132,9 @@ func appendAuthorizedKeysToFile(keys ...*PublicKey) error {
|
||||
}
|
||||
|
||||
// RegeneratePublicKeys regenerates the authorized_keys file
|
||||
func RegeneratePublicKeys(ctx context.Context, t io.StringWriter) error {
|
||||
func RegeneratePublicKeys(ctx context.Context, t io.Writer) error {
|
||||
if err := db.GetEngine(ctx).Where("type != ?", KeyTypePrincipal).Iterate(new(PublicKey), func(idx int, bean any) (err error) {
|
||||
_, err = t.WriteString((bean.(*PublicKey)).AuthorizedString())
|
||||
return err
|
||||
return WriteAuthorizedStringForValidKey(bean.(*PublicKey), t)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -144,11 +155,11 @@ func RegeneratePublicKeys(ctx context.Context, t io.StringWriter) error {
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||
if strings.HasPrefix(line, AuthorizedStringCommentPrefix) {
|
||||
scanner.Scan()
|
||||
continue
|
||||
}
|
||||
_, err = t.WriteString(line + "\n")
|
||||
_, err = io.WriteString(t, line+"\n")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -235,7 +235,7 @@ func (issue *Issue) verifyReferencedIssue(stdCtx context.Context, ctx *crossRefe
|
||||
|
||||
// AddCrossReferences add cross references
|
||||
func (c *Comment) AddCrossReferences(stdCtx context.Context, doer *user_model.User, removeOld bool) error {
|
||||
if c.Type != CommentTypeCode && c.Type != CommentTypeComment {
|
||||
if !c.Type.HasContentSupport() {
|
||||
return nil
|
||||
}
|
||||
if err := c.LoadIssue(stdCtx); err != nil {
|
||||
|
||||
@@ -173,7 +173,7 @@ func GetReviewsByIssueID(ctx context.Context, issueID int64) (latestReviews, mig
|
||||
reviewersMap := make(map[int64][]*Review) // key is reviewer id
|
||||
originalReviewersMap := make(map[int64][]*Review) // key is original author id
|
||||
reviewTeamsMap := make(map[int64][]*Review) // key is reviewer team id
|
||||
countedReivewTypes := []ReviewType{ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest}
|
||||
countedReivewTypes := []ReviewType{ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest, ReviewTypeComment}
|
||||
for _, review := range reviews {
|
||||
if review.ReviewerTeamID == 0 && slices.Contains(countedReivewTypes, review.Type) && !review.Dismissed {
|
||||
if review.OriginalAuthorID != 0 {
|
||||
|
||||
@@ -123,6 +123,7 @@ func TestGetReviewersByIssueID(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
|
||||
issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 3})
|
||||
user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
|
||||
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||
org3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
|
||||
user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
|
||||
@@ -130,6 +131,12 @@ func TestGetReviewersByIssueID(t *testing.T) {
|
||||
|
||||
expectedReviews := []*issues_model.Review{}
|
||||
expectedReviews = append(expectedReviews,
|
||||
&issues_model.Review{
|
||||
ID: 5,
|
||||
Reviewer: user1,
|
||||
Type: issues_model.ReviewTypeComment,
|
||||
UpdatedUnix: 946684810,
|
||||
},
|
||||
&issues_model.Review{
|
||||
ID: 7,
|
||||
Reviewer: org3,
|
||||
@@ -168,8 +175,9 @@ func TestGetReviewersByIssueID(t *testing.T) {
|
||||
for _, review := range allReviews {
|
||||
assert.NoError(t, review.LoadReviewer(db.DefaultContext))
|
||||
}
|
||||
if assert.Len(t, allReviews, 5) {
|
||||
if assert.Len(t, allReviews, 6) {
|
||||
for i, review := range allReviews {
|
||||
assert.Equal(t, expectedReviews[i].ID, review.ID)
|
||||
assert.Equal(t, expectedReviews[i].Reviewer, review.Reviewer)
|
||||
assert.Equal(t, expectedReviews[i].Type, review.Type)
|
||||
assert.Equal(t, expectedReviews[i].UpdatedUnix, review.UpdatedUnix)
|
||||
|
||||
@@ -348,10 +348,8 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use
|
||||
|
||||
for _, u := range repo.Units {
|
||||
for _, team := range teams {
|
||||
unitAccessMode := minAccessMode
|
||||
if teamMode, exist := team.UnitAccessModeEx(ctx, u.Type); exist {
|
||||
unitAccessMode = max(perm.unitsMode[u.Type], unitAccessMode, teamMode)
|
||||
}
|
||||
teamMode, _ := team.UnitAccessModeEx(ctx, u.Type)
|
||||
unitAccessMode := max(perm.unitsMode[u.Type], minAccessMode, teamMode)
|
||||
perm.unitsMode[u.Type] = unitAccessMode
|
||||
}
|
||||
}
|
||||
|
||||
@@ -197,4 +197,37 @@ func TestGetUserRepoPermission(t *testing.T) {
|
||||
assert.Equal(t, perm_model.AccessModeWrite, perm.unitsMode[unit.TypeCode])
|
||||
assert.Equal(t, perm_model.AccessModeRead, perm.unitsMode[unit.TypeIssues])
|
||||
})
|
||||
|
||||
repo3 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3}) // org private repo, same org as repo 32
|
||||
require.NoError(t, repo3.LoadOwner(ctx))
|
||||
require.True(t, repo3.Owner.IsOrganization())
|
||||
require.NoError(t, db.TruncateBeans(ctx, &organization.TeamUnit{}, &Access{})) // The user has access set of that repo, remove it, it is useless for our test
|
||||
require.NoError(t, db.Insert(ctx, &organization.TeamRepo{OrgID: org.ID, TeamID: team.ID, RepoID: repo3.ID}))
|
||||
t.Run("DoerWithNoopTeamOnPrivateRepo", func(t *testing.T) {
|
||||
perm, err := GetUserRepoPermission(ctx, repo3, user)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, perm_model.AccessModeNone, perm.AccessMode)
|
||||
assert.Equal(t, perm_model.AccessModeNone, perm.unitsMode[unit.TypeCode])
|
||||
assert.Equal(t, perm_model.AccessModeNone, perm.unitsMode[unit.TypeIssues])
|
||||
})
|
||||
|
||||
require.NoError(t, db.Insert(ctx, &organization.TeamUnit{OrgID: org.ID, TeamID: team.ID, Type: unit.TypeCode, AccessMode: perm_model.AccessModeNone}))
|
||||
require.NoError(t, db.Insert(ctx, &organization.TeamUnit{OrgID: org.ID, TeamID: team.ID, Type: unit.TypeIssues, AccessMode: perm_model.AccessModeRead}))
|
||||
t.Run("DoerWithReadIssueTeamOnPrivateRepo", func(t *testing.T) {
|
||||
perm, err := GetUserRepoPermission(ctx, repo3, user)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, perm_model.AccessModeNone, perm.AccessMode)
|
||||
assert.Equal(t, perm_model.AccessModeNone, perm.unitsMode[unit.TypeCode])
|
||||
assert.Equal(t, perm_model.AccessModeRead, perm.unitsMode[unit.TypeIssues])
|
||||
})
|
||||
|
||||
require.NoError(t, db.Insert(ctx, repo_model.Collaboration{RepoID: repo3.ID, UserID: user.ID, Mode: perm_model.AccessModeWrite}))
|
||||
require.NoError(t, db.Insert(ctx, Access{RepoID: repo3.ID, UserID: user.ID, Mode: perm_model.AccessModeWrite}))
|
||||
t.Run("DoerWithReadIssueTeamAndWriteCollaboratorOnPrivateRepo", func(t *testing.T) {
|
||||
perm, err := GetUserRepoPermission(ctx, repo3, user)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, perm_model.AccessModeWrite, perm.AccessMode)
|
||||
assert.Equal(t, perm_model.AccessModeWrite, perm.unitsMode[unit.TypeCode])
|
||||
assert.Equal(t, perm_model.AccessModeWrite, perm.unitsMode[unit.TypeIssues])
|
||||
})
|
||||
}
|
||||
|
||||
@@ -85,8 +85,8 @@ func TestRepository_ChangeCollaborationAccessMode(t *testing.T) {
|
||||
|
||||
assert.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, unittest.NonexistentID, perm.AccessModeAdmin))
|
||||
|
||||
// Disvard invalid input.
|
||||
assert.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, 4, perm.AccessMode(unittest.NonexistentID)))
|
||||
// Discard invalid input.
|
||||
assert.NoError(t, repo_model.ChangeCollaborationAccessMode(db.DefaultContext, repo, 4, perm.AccessMode(-1)))
|
||||
|
||||
unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: repo.ID})
|
||||
}
|
||||
|
||||
@@ -137,16 +137,9 @@ func DeleteUploads(ctx context.Context, uploads ...*Upload) (err error) {
|
||||
|
||||
for _, upload := range uploads {
|
||||
localPath := upload.LocalPath()
|
||||
isFile, err := util.IsFile(localPath)
|
||||
if err != nil {
|
||||
log.Error("Unable to check if %s is a file. Error: %v", localPath, err)
|
||||
}
|
||||
if !isFile {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := util.Remove(localPath); err != nil {
|
||||
return fmt.Errorf("remove upload: %w", err)
|
||||
// just continue, don't fail the whole operation if a file is missing (removed by others)
|
||||
log.Error("unable to remove upload file %s: %v", localPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ const (
|
||||
GitlabLanguage = "gitlab-language"
|
||||
Lockable = "lockable"
|
||||
Filter = "filter"
|
||||
Diff = "diff"
|
||||
)
|
||||
|
||||
var LinguistAttributes = []string{
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
|
||||
package git
|
||||
|
||||
import "path"
|
||||
|
||||
// CommitInfo describes the first commit with the provided entry
|
||||
type CommitInfo struct {
|
||||
Entry *TreeEntry
|
||||
@@ -12,11 +10,14 @@ type CommitInfo struct {
|
||||
SubmoduleFile *CommitSubmoduleFile
|
||||
}
|
||||
|
||||
func getCommitInfoSubmoduleFile(repoLink string, entry *TreeEntry, commit *Commit, treePathDir string) (*CommitSubmoduleFile, error) {
|
||||
fullPath := path.Join(treePathDir, entry.Name())
|
||||
func GetCommitInfoSubmoduleFile(repoLink, fullPath string, commit *Commit, refCommitID ObjectID) (*CommitSubmoduleFile, error) {
|
||||
submodule, err := commit.GetSubModule(fullPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return NewCommitSubmoduleFile(repoLink, fullPath, submodule.URL, entry.ID.String()), nil
|
||||
if submodule == nil {
|
||||
// unable to find submodule from ".gitmodules" file
|
||||
return NewCommitSubmoduleFile(repoLink, fullPath, "", refCommitID.String()), nil
|
||||
}
|
||||
return NewCommitSubmoduleFile(repoLink, fullPath, submodule.URL, refCommitID.String()), nil
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@ func (tes Entries) GetCommitsInfo(ctx context.Context, repoLink string, commit *
|
||||
|
||||
// If the entry is a submodule, add a submodule file for this
|
||||
if entry.IsSubModule() {
|
||||
commitsInfo[i].SubmoduleFile, err = getCommitInfoSubmoduleFile(repoLink, entry, commit, treePath)
|
||||
commitsInfo[i].SubmoduleFile, err = GetCommitInfoSubmoduleFile(repoLink, path.Join(treePath, entry.Name()), commit, entry.ID)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ func (tes Entries) GetCommitsInfo(ctx context.Context, repoLink string, commit *
|
||||
|
||||
// If the entry is a submodule, add a submodule file for this
|
||||
if entry.IsSubModule() {
|
||||
commitsInfo[i].SubmoduleFile, err = getCommitInfoSubmoduleFile(repoLink, entry, commit, treePath)
|
||||
commitsInfo[i].SubmoduleFile, err = GetCommitInfoSubmoduleFile(repoLink, path.Join(treePath, entry.Name()), commit, entry.ID)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -120,6 +121,23 @@ func TestEntries_GetCommitsInfo(t *testing.T) {
|
||||
defer clonedRepo1.Close()
|
||||
|
||||
testGetCommitsInfo(t, clonedRepo1)
|
||||
|
||||
t.Run("NonExistingSubmoduleAsNil", func(t *testing.T) {
|
||||
commit, err := bareRepo1.GetCommit("HEAD")
|
||||
require.NoError(t, err)
|
||||
treeEntry, err := commit.GetTreeEntryByPath("file1.txt")
|
||||
require.NoError(t, err)
|
||||
cisf, err := GetCommitInfoSubmoduleFile("/any/repo-link", "file1.txt", commit, treeEntry.ID)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, &CommitSubmoduleFile{
|
||||
repoLink: "/any/repo-link",
|
||||
fullPath: "file1.txt",
|
||||
refURL: "",
|
||||
refID: "e2129701f1a4d54dc44f03c93bca0a2aec7c5449",
|
||||
}, cisf)
|
||||
// since there is no refURL, it means that the submodule info doesn't exist, so it won't have a web link
|
||||
assert.Nil(t, cisf.SubmoduleWebLinkTree(t.Context()))
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkEntries_GetCommitsInfo(b *testing.B) {
|
||||
|
||||
@@ -35,7 +35,8 @@ func (c *Commit) GetSubModules() (*ObjectCache[*SubModule], error) {
|
||||
return c.submoduleCache, nil
|
||||
}
|
||||
|
||||
// GetSubModule get the submodule according entry name
|
||||
// GetSubModule gets the submodule by the entry name.
|
||||
// It returns "nil, nil" if the submodule does not exist, caller should always remember to check the "nil"
|
||||
func (c *Commit) GetSubModule(entryName string) (*SubModule, error) {
|
||||
modules, err := c.GetSubModules()
|
||||
if err != nil {
|
||||
|
||||
@@ -29,16 +29,20 @@ func NewCommitSubmoduleFile(repoLink, fullPath, refURL, refID string) *CommitSub
|
||||
return &CommitSubmoduleFile{repoLink: repoLink, fullPath: fullPath, refURL: refURL, refID: refID}
|
||||
}
|
||||
|
||||
// RefID returns the commit ID of the submodule, it returns empty string for nil receiver
|
||||
func (sf *CommitSubmoduleFile) RefID() string {
|
||||
if sf == nil {
|
||||
return ""
|
||||
}
|
||||
return sf.refID
|
||||
}
|
||||
|
||||
func (sf *CommitSubmoduleFile) getWebLinkInTargetRepo(ctx context.Context, moreLinkPath string) *SubmoduleWebLink {
|
||||
if sf == nil {
|
||||
if sf == nil || sf.refURL == "" {
|
||||
return nil
|
||||
}
|
||||
if strings.HasPrefix(sf.refURL, "../") {
|
||||
targetLink := path.Join(sf.repoLink, path.Dir(sf.fullPath), sf.refURL)
|
||||
targetLink := path.Join(sf.repoLink, sf.refURL)
|
||||
return &SubmoduleWebLink{RepoWebLink: targetLink, CommitWebLink: targetLink + moreLinkPath}
|
||||
}
|
||||
if !sf.parsed {
|
||||
@@ -53,14 +57,13 @@ func (sf *CommitSubmoduleFile) getWebLinkInTargetRepo(ctx context.Context, moreL
|
||||
}
|
||||
|
||||
// SubmoduleWebLinkTree tries to make the submodule's tree link in its own repo, it also works on "nil" receiver
|
||||
// It returns nil if the submodule does not have a valid URL or is nil
|
||||
func (sf *CommitSubmoduleFile) SubmoduleWebLinkTree(ctx context.Context, optCommitID ...string) *SubmoduleWebLink {
|
||||
if sf == nil {
|
||||
return nil
|
||||
}
|
||||
return sf.getWebLinkInTargetRepo(ctx, "/tree/"+util.OptionalArg(optCommitID, sf.refID))
|
||||
return sf.getWebLinkInTargetRepo(ctx, "/tree/"+util.OptionalArg(optCommitID, sf.RefID()))
|
||||
}
|
||||
|
||||
// SubmoduleWebLinkCompare tries to make the submodule's compare link in its own repo, it also works on "nil" receiver
|
||||
// It returns nil if the submodule does not have a valid URL or is nil
|
||||
func (sf *CommitSubmoduleFile) SubmoduleWebLinkCompare(ctx context.Context, commitID1, commitID2 string) *SubmoduleWebLink {
|
||||
return sf.getWebLinkInTargetRepo(ctx, "/compare/"+commitID1+"..."+commitID2)
|
||||
}
|
||||
|
||||
@@ -12,6 +12,8 @@ import (
|
||||
func TestCommitSubmoduleLink(t *testing.T) {
|
||||
assert.Nil(t, (*CommitSubmoduleFile)(nil).SubmoduleWebLinkTree(t.Context()))
|
||||
assert.Nil(t, (*CommitSubmoduleFile)(nil).SubmoduleWebLinkCompare(t.Context(), "", ""))
|
||||
assert.Nil(t, (&CommitSubmoduleFile{}).SubmoduleWebLinkTree(t.Context()))
|
||||
assert.Nil(t, (&CommitSubmoduleFile{}).SubmoduleWebLinkCompare(t.Context(), "", ""))
|
||||
|
||||
t.Run("GitHubRepo", func(t *testing.T) {
|
||||
sf := NewCommitSubmoduleFile("/any/repo-link", "full-path", "git@github.com:user/repo.git", "aaaa")
|
||||
@@ -30,7 +32,7 @@ func TestCommitSubmoduleLink(t *testing.T) {
|
||||
assert.Equal(t, "/subpath/user/repo", wl.RepoWebLink)
|
||||
assert.Equal(t, "/subpath/user/repo/tree/aaaa", wl.CommitWebLink)
|
||||
|
||||
sf = NewCommitSubmoduleFile("/subpath/any/repo-home-link", "dir/submodule", "../../../user/repo", "aaaa")
|
||||
sf = NewCommitSubmoduleFile("/subpath/any/repo-home-link", "dir/submodule", "../../user/repo", "aaaa")
|
||||
wl = sf.SubmoduleWebLinkCompare(t.Context(), "1111", "2222")
|
||||
assert.Equal(t, "/subpath/user/repo", wl.RepoWebLink)
|
||||
assert.Equal(t, "/subpath/user/repo/compare/1111...2222", wl.CommitWebLink)
|
||||
|
||||
@@ -30,6 +30,10 @@ type Parser struct {
|
||||
func NewParser(r io.Reader, format Format) *Parser {
|
||||
scanner := bufio.NewScanner(r)
|
||||
|
||||
// default MaxScanTokenSize = 64 kiB may be too small for some references,
|
||||
// so allow the buffer to grow up to 4x if needed
|
||||
scanner.Buffer(nil, 4*bufio.MaxScanTokenSize)
|
||||
|
||||
// in addition to the reference delimiter we specified in the --format,
|
||||
// `git for-each-ref` will always add a newline after every reference.
|
||||
refDelim := make([]byte, 0, len(format.refDelim)+1)
|
||||
@@ -70,6 +74,9 @@ func NewParser(r io.Reader, format Format) *Parser {
|
||||
// { "objecttype": "tag", "refname:short": "v1.16.4", "object": "f460b7543ed500e49c133c2cd85c8c55ee9dbe27" }
|
||||
func (p *Parser) Next() map[string]string {
|
||||
if !p.scanner.Scan() {
|
||||
if err := p.scanner.Err(); err != nil {
|
||||
p.err = err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
fields, err := p.parseRef(p.scanner.Text())
|
||||
|
||||
@@ -51,30 +51,16 @@ func GetHook(repoPath, name string) (*Hook, error) {
|
||||
name: name,
|
||||
path: filepath.Join(repoPath, "hooks", name+".d", name),
|
||||
}
|
||||
isFile, err := util.IsFile(h.path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if isFile {
|
||||
data, err := os.ReadFile(h.path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if data, err := os.ReadFile(h.path); err == nil {
|
||||
h.IsActive = true
|
||||
h.Content = string(data)
|
||||
return h, nil
|
||||
} else if !os.IsNotExist(err) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
samplePath := filepath.Join(repoPath, "hooks", name+".sample")
|
||||
isFile, err = util.IsFile(samplePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if isFile {
|
||||
data, err := os.ReadFile(samplePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if data, err := os.ReadFile(samplePath); err == nil {
|
||||
h.Sample = string(data)
|
||||
}
|
||||
return h, nil
|
||||
|
||||
@@ -34,12 +34,12 @@ func TestParseGitURLs(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
kase: "git@[fe80:14fc:cec5:c174:d88%2510]:go-gitea/gitea.git",
|
||||
kase: "git@[fe80::14fc:cec5:c174:d88%2510]:go-gitea/gitea.git",
|
||||
expected: &GitURL{
|
||||
URL: &url.URL{
|
||||
Scheme: "ssh",
|
||||
User: url.User("git"),
|
||||
Host: "[fe80:14fc:cec5:c174:d88%10]",
|
||||
Host: "[fe80::14fc:cec5:c174:d88%10]",
|
||||
Path: "go-gitea/gitea.git",
|
||||
},
|
||||
extraMark: 1,
|
||||
@@ -137,11 +137,11 @@ func TestParseGitURLs(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
kase: "https://[fe80:14fc:cec5:c174:d88%2510]:20/go-gitea/gitea.git",
|
||||
kase: "https://[fe80::14fc:cec5:c174:d88%2510]:20/go-gitea/gitea.git",
|
||||
expected: &GitURL{
|
||||
URL: &url.URL{
|
||||
Scheme: "https",
|
||||
Host: "[fe80:14fc:cec5:c174:d88%10]:20",
|
||||
Host: "[fe80::14fc:cec5:c174:d88%10]:20",
|
||||
Path: "/go-gitea/gitea.git",
|
||||
},
|
||||
extraMark: 0,
|
||||
|
||||
@@ -11,6 +11,8 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
// ObjectCache provides thread-safe cache operations.
|
||||
@@ -106,3 +108,16 @@ func HashFilePathForWebUI(s string) string {
|
||||
_, _ = h.Write([]byte(s))
|
||||
return hex.EncodeToString(h.Sum(nil))
|
||||
}
|
||||
|
||||
func SplitCommitTitleBody(commitMessage string, titleRuneLimit int) (title, body string) {
|
||||
title, body, _ = strings.Cut(commitMessage, "\n")
|
||||
title, title2 := util.EllipsisTruncateRunes(title, titleRuneLimit)
|
||||
if title2 != "" {
|
||||
if body == "" {
|
||||
body = title2
|
||||
} else {
|
||||
body = title2 + "\n" + body
|
||||
}
|
||||
}
|
||||
return title, body
|
||||
}
|
||||
|
||||
@@ -15,3 +15,17 @@ func TestHashFilePathForWebUI(t *testing.T) {
|
||||
HashFilePathForWebUI("foobar"),
|
||||
)
|
||||
}
|
||||
|
||||
func TestSplitCommitTitleBody(t *testing.T) {
|
||||
title, body := SplitCommitTitleBody("啊bcdefg", 4)
|
||||
assert.Equal(t, "啊…", title)
|
||||
assert.Equal(t, "…bcdefg", body)
|
||||
|
||||
title, body = SplitCommitTitleBody("abcdefg\n1234567", 4)
|
||||
assert.Equal(t, "a…", title)
|
||||
assert.Equal(t, "…bcdefg\n1234567", body)
|
||||
|
||||
title, body = SplitCommitTitleBody("abcdefg\n1234567", 100)
|
||||
assert.Equal(t, "abcdefg", title)
|
||||
assert.Equal(t, "1234567", body)
|
||||
}
|
||||
|
||||
@@ -4,7 +4,10 @@
|
||||
package hcaptcha
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -21,6 +24,33 @@ func TestMain(m *testing.M) {
|
||||
os.Exit(m.Run())
|
||||
}
|
||||
|
||||
type mockTransport struct{}
|
||||
|
||||
func (mockTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
if req.URL.String() != verifyURL {
|
||||
return nil, errors.New("unsupported url")
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
bodyValues, err := url.ParseQuery(string(body))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var responseText string
|
||||
if bodyValues.Get("response") == dummyToken {
|
||||
responseText = `{"success":true,"credit":false,"hostname":"dummy-key-pass","challenge_ts":"2025-10-08T16:02:56.136Z"}`
|
||||
} else {
|
||||
responseText = `{"success":false,"error-codes":["invalid-input-response"]}`
|
||||
}
|
||||
|
||||
return &http.Response{Request: req, Body: io.NopCloser(strings.NewReader(responseText))}, nil
|
||||
}
|
||||
|
||||
func TestCaptcha(t *testing.T) {
|
||||
tt := []struct {
|
||||
Name string
|
||||
@@ -54,7 +84,8 @@ func TestCaptcha(t *testing.T) {
|
||||
for _, tc := range tt {
|
||||
t.Run(tc.Name, func(t *testing.T) {
|
||||
client, err := New(tc.Secret, WithHTTP(&http.Client{
|
||||
Timeout: time.Second * 5,
|
||||
Timeout: time.Second * 5,
|
||||
Transport: mockTransport{},
|
||||
}))
|
||||
if err != nil {
|
||||
// The only error that can be returned from creating a client
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
"code.gitea.io/gitea/modules/queue"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -166,12 +167,12 @@ func Init() {
|
||||
log.Fatal("PID: %d Unable to initialize the bleve Repository Indexer at path: %s Error: %v", os.Getpid(), setting.Indexer.RepoPath, err)
|
||||
}
|
||||
case "elasticsearch":
|
||||
log.Info("PID: %d Initializing Repository Indexer at: %s", os.Getpid(), setting.Indexer.RepoConnStr)
|
||||
log.Info("PID: %d Initializing Repository Indexer at: %s", os.Getpid(), util.SanitizeCredentialURLs(setting.Indexer.RepoConnStr))
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
log.Error("PANIC whilst initializing repository indexer: %v\nStacktrace: %s", err, log.Stack(2))
|
||||
log.Error("The indexer files are likely corrupted and may need to be deleted")
|
||||
log.Error("You can completely remove the \"%s\" index to make Gitea recreate the indexes", setting.Indexer.RepoConnStr)
|
||||
log.Error("You can completely remove the \"%s\" index to make Gitea recreate the indexes", util.SanitizeCredentialURLs(setting.Indexer.RepoConnStr))
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -181,7 +182,7 @@ func Init() {
|
||||
cancel()
|
||||
(*globalIndexer.Load()).Close()
|
||||
close(waitChannel)
|
||||
log.Fatal("PID: %d Unable to initialize the elasticsearch Repository Indexer connstr: %s Error: %v", os.Getpid(), setting.Indexer.RepoConnStr, err)
|
||||
log.Fatal("PID: %d Unable to initialize the elasticsearch Repository Indexer connstr: %s Error: %v", os.Getpid(), util.SanitizeCredentialURLs(setting.Indexer.RepoConnStr), err)
|
||||
}
|
||||
|
||||
default:
|
||||
|
||||
@@ -25,6 +25,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
"code.gitea.io/gitea/modules/queue"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
// IndexerMetadata is used to send data to the queue, so it contains only the ids.
|
||||
@@ -100,7 +101,7 @@ func InitIssueIndexer(syncReindex bool) {
|
||||
issueIndexer = elasticsearch.NewIndexer(setting.Indexer.IssueConnStr, setting.Indexer.IssueIndexerName)
|
||||
existed, err = issueIndexer.Init(ctx)
|
||||
if err != nil {
|
||||
log.Fatal("Unable to issueIndexer.Init with connection %s Error: %v", setting.Indexer.IssueConnStr, err)
|
||||
log.Fatal("Unable to issueIndexer.Init with connection %s Error: %v", util.SanitizeCredentialURLs(setting.Indexer.IssueConnStr), err)
|
||||
}
|
||||
case "db":
|
||||
issueIndexer = db.GetIndexer()
|
||||
@@ -108,7 +109,7 @@ func InitIssueIndexer(syncReindex bool) {
|
||||
issueIndexer = meilisearch.NewIndexer(setting.Indexer.IssueConnStr, setting.Indexer.IssueConnAuth, setting.Indexer.IssueIndexerName)
|
||||
existed, err = issueIndexer.Init(ctx)
|
||||
if err != nil {
|
||||
log.Fatal("Unable to issueIndexer.Init with connection %s Error: %v", setting.Indexer.IssueConnStr, err)
|
||||
log.Fatal("Unable to issueIndexer.Init with connection %s Error: %v", util.SanitizeCredentialURLs(setting.Indexer.IssueConnStr), err)
|
||||
}
|
||||
default:
|
||||
log.Fatal("Unknown issue indexer type: %s", setting.Indexer.IssueType)
|
||||
|
||||
@@ -82,6 +82,7 @@ type ProgrammingLanguage struct {
|
||||
// https://schema.org/Person
|
||||
type Person struct {
|
||||
Type string `json:"@type,omitempty"`
|
||||
Name string `json:"name,omitempty"` // inherited from https://schema.org/Thing
|
||||
GivenName string `json:"givenName,omitempty"`
|
||||
MiddleName string `json:"middleName,omitempty"`
|
||||
FamilyName string `json:"familyName,omitempty"`
|
||||
@@ -184,11 +185,17 @@ func ParsePackage(sr io.ReaderAt, size int64, mr io.Reader) (*Package, error) {
|
||||
p.Metadata.Description = ssc.Description
|
||||
p.Metadata.Keywords = ssc.Keywords
|
||||
p.Metadata.License = ssc.License
|
||||
p.Metadata.Author = Person{
|
||||
author := Person{
|
||||
Name: ssc.Author.Name,
|
||||
GivenName: ssc.Author.GivenName,
|
||||
MiddleName: ssc.Author.MiddleName,
|
||||
FamilyName: ssc.Author.FamilyName,
|
||||
}
|
||||
// If Name is not provided, generate it from individual name components
|
||||
if author.Name == "" {
|
||||
author.Name = author.String()
|
||||
}
|
||||
p.Metadata.Author = author
|
||||
|
||||
p.Metadata.RepositoryURL = ssc.CodeRepository
|
||||
if !validation.IsValidURL(p.Metadata.RepositoryURL) {
|
||||
|
||||
@@ -97,10 +97,49 @@ func TestParsePackage(t *testing.T) {
|
||||
assert.Equal(t, packageDescription, p.Metadata.Description)
|
||||
assert.ElementsMatch(t, []string{"swift", "package"}, p.Metadata.Keywords)
|
||||
assert.Equal(t, packageLicense, p.Metadata.License)
|
||||
assert.Equal(t, packageAuthor, p.Metadata.Author.Name)
|
||||
assert.Equal(t, packageAuthor, p.Metadata.Author.GivenName)
|
||||
assert.Equal(t, packageRepositoryURL, p.Metadata.RepositoryURL)
|
||||
assert.ElementsMatch(t, []string{packageRepositoryURL}, p.RepositoryURLs)
|
||||
})
|
||||
|
||||
t.Run("WithExplicitNameField", func(t *testing.T) {
|
||||
data := createArchive(map[string][]byte{
|
||||
"Package.swift": []byte("// swift-tools-version:5.7\n//\n// Package.swift"),
|
||||
})
|
||||
|
||||
authorName := "John Doe"
|
||||
p, err := ParsePackage(
|
||||
data,
|
||||
data.Size(),
|
||||
strings.NewReader(`{"name":"`+packageName+`","version":"`+packageVersion+`","description":"`+packageDescription+`","author":{"name":"`+authorName+`","givenName":"John","familyName":"Doe"}}`),
|
||||
)
|
||||
assert.NotNil(t, p)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, authorName, p.Metadata.Author.Name)
|
||||
assert.Equal(t, "John", p.Metadata.Author.GivenName)
|
||||
assert.Equal(t, "Doe", p.Metadata.Author.FamilyName)
|
||||
})
|
||||
|
||||
t.Run("NameFieldGeneration", func(t *testing.T) {
|
||||
data := createArchive(map[string][]byte{
|
||||
"Package.swift": []byte("// swift-tools-version:5.7\n//\n// Package.swift"),
|
||||
})
|
||||
|
||||
// Test with only individual name components - Name should be auto-generated
|
||||
p, err := ParsePackage(
|
||||
data,
|
||||
data.Size(),
|
||||
strings.NewReader(`{"author":{"givenName":"John","middleName":"Q","familyName":"Doe"}}`),
|
||||
)
|
||||
assert.NotNil(t, p)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "John Q Doe", p.Metadata.Author.Name)
|
||||
assert.Equal(t, "John", p.Metadata.Author.GivenName)
|
||||
assert.Equal(t, "Q", p.Metadata.Author.MiddleName)
|
||||
assert.Equal(t, "Doe", p.Metadata.Author.FamilyName)
|
||||
})
|
||||
}
|
||||
|
||||
func TestTrimmedVersionString(t *testing.T) {
|
||||
@@ -142,3 +181,43 @@ func TestTrimmedVersionString(t *testing.T) {
|
||||
assert.Equal(t, c.Expected, TrimmedVersionString(c.Version))
|
||||
}
|
||||
}
|
||||
|
||||
func TestPersonNameString(t *testing.T) {
|
||||
cases := []struct {
|
||||
Name string
|
||||
Person Person
|
||||
Expected string
|
||||
}{
|
||||
{
|
||||
Name: "GivenNameOnly",
|
||||
Person: Person{GivenName: "John"},
|
||||
Expected: "John",
|
||||
},
|
||||
{
|
||||
Name: "GivenAndFamily",
|
||||
Person: Person{GivenName: "John", FamilyName: "Doe"},
|
||||
Expected: "John Doe",
|
||||
},
|
||||
{
|
||||
Name: "FullName",
|
||||
Person: Person{GivenName: "John", MiddleName: "Q", FamilyName: "Doe"},
|
||||
Expected: "John Q Doe",
|
||||
},
|
||||
{
|
||||
Name: "MiddleAndFamily",
|
||||
Person: Person{MiddleName: "Q", FamilyName: "Doe"},
|
||||
Expected: "Q Doe",
|
||||
},
|
||||
{
|
||||
Name: "Empty",
|
||||
Person: Person{},
|
||||
Expected: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.Name, func(t *testing.T) {
|
||||
assert.Equal(t, c.Expected, c.Person.String())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ var (
|
||||
ZombieTaskTimeout time.Duration `ini:"ZOMBIE_TASK_TIMEOUT"`
|
||||
EndlessTaskTimeout time.Duration `ini:"ENDLESS_TASK_TIMEOUT"`
|
||||
AbandonedJobTimeout time.Duration `ini:"ABANDONED_JOB_TIMEOUT"`
|
||||
SkipWorkflowStrings []string `ìni:"SKIP_WORKFLOW_STRINGS"`
|
||||
SkipWorkflowStrings []string `ini:"SKIP_WORKFLOW_STRINGS"`
|
||||
}{
|
||||
Enabled: true,
|
||||
DefaultActionsURL: defaultActionsURLGitHub,
|
||||
|
||||
@@ -202,11 +202,11 @@ func NewConfigProviderFromFile(file string) (ConfigProvider, error) {
|
||||
loadedFromEmpty := true
|
||||
|
||||
if file != "" {
|
||||
isFile, err := util.IsFile(file)
|
||||
isExist, err := util.IsExist(file)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to check if %q is a file. Error: %v", file, err)
|
||||
return nil, fmt.Errorf("unable to check if %q exists: %v", file, err)
|
||||
}
|
||||
if isFile {
|
||||
if isExist {
|
||||
if err = cfg.Append(file); err != nil {
|
||||
return nil, fmt.Errorf("failed to load config file %q: %v", file, err)
|
||||
}
|
||||
|
||||
@@ -41,3 +41,56 @@ EXTEND = true
|
||||
assert.Equal(t, "white rabbit", extended.Second)
|
||||
assert.True(t, extended.Extend)
|
||||
}
|
||||
|
||||
// Test_getCronSettings2 tests that getCronSettings can not handle two levels of embedding
|
||||
func Test_getCronSettings2(t *testing.T) {
|
||||
type BaseStruct struct {
|
||||
Enabled bool
|
||||
RunAtStart bool
|
||||
Schedule string
|
||||
}
|
||||
|
||||
type Extended struct {
|
||||
BaseStruct
|
||||
Extend bool
|
||||
}
|
||||
type Extended2 struct {
|
||||
Extended
|
||||
Third string
|
||||
}
|
||||
|
||||
iniStr := `
|
||||
[cron.test]
|
||||
ENABLED = TRUE
|
||||
RUN_AT_START = TRUE
|
||||
SCHEDULE = @every 1h
|
||||
EXTEND = true
|
||||
THIRD = white rabbit
|
||||
`
|
||||
cfg, err := NewConfigProviderFromData(iniStr)
|
||||
assert.NoError(t, err)
|
||||
|
||||
extended := &Extended2{
|
||||
Extended: Extended{
|
||||
BaseStruct: BaseStruct{
|
||||
Enabled: false,
|
||||
RunAtStart: false,
|
||||
Schedule: "@every 72h",
|
||||
},
|
||||
Extend: false,
|
||||
},
|
||||
Third: "black rabbit",
|
||||
}
|
||||
|
||||
_, err = getCronSettings(cfg, "test", extended)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// This confirms the first level of embedding works
|
||||
assert.Equal(t, "white rabbit", extended.Third)
|
||||
assert.True(t, extended.Extend)
|
||||
|
||||
// This confirms 2 levels of embedding doesn't work
|
||||
assert.False(t, extended.Enabled)
|
||||
assert.False(t, extended.RunAtStart)
|
||||
assert.Equal(t, "@every 72h", extended.Schedule)
|
||||
}
|
||||
|
||||
@@ -13,6 +13,6 @@ func TestCountFmt(t *testing.T) {
|
||||
assert.Equal(t, "125", countFmt(125))
|
||||
assert.Equal(t, "1.3k", countFmt(int64(1317)))
|
||||
assert.Equal(t, "21.3M", countFmt(21317675))
|
||||
assert.Equal(t, "45.7G", countFmt(45721317675))
|
||||
assert.Equal(t, "45.7G", countFmt(int64(45721317675)))
|
||||
assert.Empty(t, countFmt("test"))
|
||||
}
|
||||
|
||||
@@ -115,15 +115,10 @@ func IsDir(dir string) (bool, error) {
|
||||
return false, err
|
||||
}
|
||||
|
||||
// IsFile returns true if given path is a file,
|
||||
// or returns false when it's a directory or does not exist.
|
||||
func IsFile(filePath string) (bool, error) {
|
||||
f, err := os.Stat(filePath)
|
||||
func IsRegularFile(filePath string) (bool, error) {
|
||||
f, err := os.Lstat(filePath)
|
||||
if err == nil {
|
||||
return !f.IsDir(), nil
|
||||
}
|
||||
if os.IsNotExist(err) {
|
||||
return false, nil
|
||||
return f.Mode().IsRegular(), nil
|
||||
}
|
||||
return false, err
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ func IsLikelyEllipsisLeftPart(s string) bool {
|
||||
return strings.HasSuffix(s, utf8Ellipsis) || strings.HasSuffix(s, asciiEllipsis)
|
||||
}
|
||||
|
||||
func ellipsisGuessDisplayWidth(r rune) int {
|
||||
func ellipsisDisplayGuessWidth(r rune) int {
|
||||
// To make the truncated string as long as possible,
|
||||
// CJK/emoji chars are considered as 2-ASCII width but not 3-4 bytes width.
|
||||
// Here we only make the best guess (better than counting them in bytes),
|
||||
@@ -48,13 +48,17 @@ func ellipsisGuessDisplayWidth(r rune) int {
|
||||
// It appends "…" or "..." at the end of truncated string.
|
||||
// It guarantees the length of the returned runes doesn't exceed the limit.
|
||||
func EllipsisDisplayString(str string, limit int) string {
|
||||
s, _, _, _ := ellipsisDisplayString(str, limit)
|
||||
s, _, _, _ := ellipsisDisplayString(str, limit, ellipsisDisplayGuessWidth)
|
||||
return s
|
||||
}
|
||||
|
||||
// EllipsisDisplayStringX works like EllipsisDisplayString while it also returns the right part
|
||||
func EllipsisDisplayStringX(str string, limit int) (left, right string) {
|
||||
left, offset, truncated, encounterInvalid := ellipsisDisplayString(str, limit)
|
||||
return ellipsisDisplayStringX(str, limit, ellipsisDisplayGuessWidth)
|
||||
}
|
||||
|
||||
func ellipsisDisplayStringX(str string, limit int, widthGuess func(rune) int) (left, right string) {
|
||||
left, offset, truncated, encounterInvalid := ellipsisDisplayString(str, limit, widthGuess)
|
||||
if truncated {
|
||||
right = str[offset:]
|
||||
r, _ := utf8.DecodeRune(UnsafeStringToBytes(right))
|
||||
@@ -68,7 +72,7 @@ func EllipsisDisplayStringX(str string, limit int) (left, right string) {
|
||||
return left, right
|
||||
}
|
||||
|
||||
func ellipsisDisplayString(str string, limit int) (res string, offset int, truncated, encounterInvalid bool) {
|
||||
func ellipsisDisplayString(str string, limit int, widthGuess func(rune) int) (res string, offset int, truncated, encounterInvalid bool) {
|
||||
if len(str) <= limit {
|
||||
return str, len(str), false, false
|
||||
}
|
||||
@@ -81,7 +85,7 @@ func ellipsisDisplayString(str string, limit int) (res string, offset int, trunc
|
||||
for i, r := range str {
|
||||
encounterInvalid = encounterInvalid || r == utf8.RuneError
|
||||
pos = i
|
||||
runeWidth := ellipsisGuessDisplayWidth(r)
|
||||
runeWidth := widthGuess(r)
|
||||
if used+runeWidth+3 > limit {
|
||||
break
|
||||
}
|
||||
@@ -96,7 +100,7 @@ func ellipsisDisplayString(str string, limit int) (res string, offset int, trunc
|
||||
if nextCnt >= 4 {
|
||||
break
|
||||
}
|
||||
nextWidth += ellipsisGuessDisplayWidth(r)
|
||||
nextWidth += widthGuess(r)
|
||||
nextCnt++
|
||||
}
|
||||
if nextCnt <= 3 && used+nextWidth <= limit {
|
||||
@@ -114,6 +118,10 @@ func ellipsisDisplayString(str string, limit int) (res string, offset int, trunc
|
||||
return str[:offset] + ellipsis, offset, true, encounterInvalid
|
||||
}
|
||||
|
||||
func EllipsisTruncateRunes(str string, limit int) (left, right string) {
|
||||
return ellipsisDisplayStringX(str, limit, func(r rune) int { return 1 })
|
||||
}
|
||||
|
||||
// TruncateRunes returns a truncated string with given rune limit,
|
||||
// it returns input string if its rune length doesn't exceed the limit.
|
||||
func TruncateRunes(str string, limit int) string {
|
||||
|
||||
@@ -29,7 +29,7 @@ func TestEllipsisGuessDisplayWidth(t *testing.T) {
|
||||
t.Run(c.r, func(t *testing.T) {
|
||||
w := 0
|
||||
for _, r := range c.r {
|
||||
w += ellipsisGuessDisplayWidth(r)
|
||||
w += ellipsisDisplayGuessWidth(r)
|
||||
}
|
||||
assert.Equal(t, c.want, w, "hex=% x", []byte(c.r))
|
||||
})
|
||||
|
||||
36
package-lock.json
generated
36
package-lock.json
generated
@@ -35,7 +35,7 @@
|
||||
"jquery": "3.7.1",
|
||||
"katex": "0.16.22",
|
||||
"license-checker-webpack-plugin": "0.2.1",
|
||||
"mermaid": "11.6.0",
|
||||
"mermaid": "11.10.0",
|
||||
"mini-css-extract-plugin": "2.9.2",
|
||||
"minimatch": "10.0.1",
|
||||
"monaco-editor": "0.52.2",
|
||||
@@ -1540,9 +1540,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@mermaid-js/parser": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-0.4.0.tgz",
|
||||
"integrity": "sha512-wla8XOWvQAwuqy+gxiZqY+c7FokraOTHRWMsbB4AgRx9Sy7zKslNyejy7E+a77qHfey5GXw/ik3IXv/NHMJgaA==",
|
||||
"version": "0.6.2",
|
||||
"resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-0.6.2.tgz",
|
||||
"integrity": "sha512-+PO02uGF6L6Cs0Bw8RpGhikVvMWEysfAyl27qTlroUB8jSWr1lL0Sf6zi78ZxlSnmgSY2AMMKVgghnN9jTtwkQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"langium": "3.3.1"
|
||||
@@ -6154,9 +6154,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/dompurify": {
|
||||
"version": "3.2.4",
|
||||
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.4.tgz",
|
||||
"integrity": "sha512-ysFSFEDVduQpyhzAob/kkuJjf5zWkZD8/A9ywSp1byueyuCfHamrCBa14/Oc2iiB0e51B+NpxSl5gmzn+Ms/mg==",
|
||||
"version": "3.2.6",
|
||||
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.6.tgz",
|
||||
"integrity": "sha512-/2GogDQlohXPZe6D6NOgQvXLPSYBqIWMnZ8zzOhn09REE4eyAzb+Hed3jhoM9OkuaJ8P6ZGTTVWQKAi8ieIzfQ==",
|
||||
"license": "(MPL-2.0 OR Apache-2.0)",
|
||||
"optionalDependencies": {
|
||||
"@types/trusted-types": "^2.0.7"
|
||||
@@ -9249,14 +9249,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/mermaid": {
|
||||
"version": "11.6.0",
|
||||
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.6.0.tgz",
|
||||
"integrity": "sha512-PE8hGUy1LDlWIHWBP05SFdqUHGmRcCcK4IzpOKPE35eOw+G9zZgcnMpyunJVUEOgb//KBORPjysKndw8bFLuRg==",
|
||||
"version": "11.10.0",
|
||||
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.10.0.tgz",
|
||||
"integrity": "sha512-oQsFzPBy9xlpnGxUqLbVY8pvknLlsNIJ0NWwi8SUJjhbP1IT0E0o1lfhU4iYV3ubpy+xkzkaOyDUQMn06vQElQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@braintree/sanitize-url": "^7.0.4",
|
||||
"@iconify/utils": "^2.1.33",
|
||||
"@mermaid-js/parser": "^0.4.0",
|
||||
"@mermaid-js/parser": "^0.6.2",
|
||||
"@types/d3": "^7.4.3",
|
||||
"cytoscape": "^3.29.3",
|
||||
"cytoscape-cose-bilkent": "^4.1.0",
|
||||
@@ -9265,11 +9265,11 @@
|
||||
"d3-sankey": "^0.12.3",
|
||||
"dagre-d3-es": "7.0.11",
|
||||
"dayjs": "^1.11.13",
|
||||
"dompurify": "^3.2.4",
|
||||
"katex": "^0.16.9",
|
||||
"dompurify": "^3.2.5",
|
||||
"katex": "^0.16.22",
|
||||
"khroma": "^2.1.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"marked": "^15.0.7",
|
||||
"marked": "^16.0.0",
|
||||
"roughjs": "^4.6.6",
|
||||
"stylis": "^4.3.6",
|
||||
"ts-dedent": "^2.2.0",
|
||||
@@ -9277,15 +9277,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/mermaid/node_modules/marked": {
|
||||
"version": "15.0.7",
|
||||
"resolved": "https://registry.npmjs.org/marked/-/marked-15.0.7.tgz",
|
||||
"integrity": "sha512-dgLIeKGLx5FwziAnsk4ONoGwHwGPJzselimvlVskE9XLN4Orv9u2VA3GWw/lYUqjfA0rUT/6fqKwfZJapP9BEg==",
|
||||
"version": "16.2.0",
|
||||
"resolved": "https://registry.npmjs.org/marked/-/marked-16.2.0.tgz",
|
||||
"integrity": "sha512-LbbTuye+0dWRz2TS9KJ7wsnD4KAtpj0MVkWc90XvBa6AslXsT0hTBVH5k32pcSyHH1fst9XEFJunXHktVy0zlg==",
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"marked": "bin/marked.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18"
|
||||
"node": ">= 20"
|
||||
}
|
||||
},
|
||||
"node_modules/micromark": {
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
"jquery": "3.7.1",
|
||||
"katex": "0.16.22",
|
||||
"license-checker-webpack-plugin": "0.2.1",
|
||||
"mermaid": "11.6.0",
|
||||
"mermaid": "11.10.0",
|
||||
"mini-css-extract-plugin": "2.9.2",
|
||||
"minimatch": "10.0.1",
|
||||
"monaco-editor": "0.52.2",
|
||||
|
||||
@@ -230,6 +230,7 @@ func PackageVersionMetadata(ctx *context.Context) {
|
||||
},
|
||||
Author: swift_module.Person{
|
||||
Type: "Person",
|
||||
Name: metadata.Author.String(),
|
||||
GivenName: metadata.Author.GivenName,
|
||||
MiddleName: metadata.Author.MiddleName,
|
||||
FamilyName: metadata.Author.FamilyName,
|
||||
|
||||
@@ -721,8 +721,8 @@ func deleteIssueComment(ctx *context.APIContext) {
|
||||
if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(comment.Issue.IsPull)) {
|
||||
ctx.Status(http.StatusForbidden)
|
||||
return
|
||||
} else if comment.Type != issues_model.CommentTypeComment {
|
||||
ctx.Status(http.StatusNoContent)
|
||||
} else if !comment.Type.HasContentSupport() {
|
||||
ctx.Status(http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ func UpdatePublicKeyInRepo(ctx *context.PrivateContext) {
|
||||
ctx.PlainText(http.StatusOK, "success")
|
||||
}
|
||||
|
||||
// AuthorizedPublicKeyByContent searches content as prefix (leak e-mail part)
|
||||
// AuthorizedPublicKeyByContent searches content as prefix (without comment part)
|
||||
// and returns public key found.
|
||||
func AuthorizedPublicKeyByContent(ctx *context.PrivateContext) {
|
||||
content := ctx.FormString("content")
|
||||
@@ -57,5 +57,14 @@ func AuthorizedPublicKeyByContent(ctx *context.PrivateContext) {
|
||||
})
|
||||
return
|
||||
}
|
||||
ctx.PlainText(http.StatusOK, publicKey.AuthorizedString())
|
||||
|
||||
authorizedString, err := asymkey_model.AuthorizedStringForKey(publicKey)
|
||||
if err != nil {
|
||||
ctx.JSON(http.StatusInternalServerError, private.Response{
|
||||
Err: err.Error(),
|
||||
UserMsg: "invalid public key",
|
||||
})
|
||||
return
|
||||
}
|
||||
ctx.PlainText(http.StatusOK, authorizedString)
|
||||
}
|
||||
|
||||
@@ -639,6 +639,7 @@ func handleAuthorizationCode(ctx *context.Context, form forms.AccessTokenForm, s
|
||||
ErrorCode: oauth2_provider.AccessTokenErrorCodeInvalidRequest,
|
||||
ErrorDescription: "cannot proceed your request",
|
||||
})
|
||||
return
|
||||
}
|
||||
resp, tokenErr := oauth2_provider.NewAccessTokenResponse(ctx, authorizationCode.Grant, serverKey, clientKey)
|
||||
if tokenErr != nil {
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/services/context"
|
||||
|
||||
"github.com/gorilla/feeds"
|
||||
@@ -15,10 +16,14 @@ import (
|
||||
|
||||
// ShowBranchFeed shows tags and/or releases on the repo as RSS / Atom feed
|
||||
func ShowBranchFeed(ctx *context.Context, repo *repo.Repository, formatType string) {
|
||||
commits, err := ctx.Repo.Commit.CommitsByRange(0, 10, "")
|
||||
if err != nil {
|
||||
ctx.ServerError("ShowBranchFeed", err)
|
||||
return
|
||||
var commits []*git.Commit
|
||||
var err error
|
||||
if ctx.Repo.Commit != nil {
|
||||
commits, err = ctx.Repo.Commit.CommitsByRange(0, 10, "")
|
||||
if err != nil {
|
||||
ctx.ServerError("ShowBranchFeed", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
title := "Latest commits for branch " + ctx.Repo.BranchName
|
||||
|
||||
@@ -8,11 +8,18 @@ import (
|
||||
)
|
||||
|
||||
// RenderBranchFeed render format for branch or file
|
||||
func RenderBranchFeed(ctx *context.Context) {
|
||||
_, showFeedType := GetFeedType(ctx.PathParam("reponame"), ctx.Req)
|
||||
func RenderBranchFeed(ctx *context.Context, feedType string) {
|
||||
if ctx.Repo.TreePath == "" {
|
||||
ShowBranchFeed(ctx, ctx.Repo.Repository, showFeedType)
|
||||
ShowBranchFeed(ctx, ctx.Repo.Repository, feedType)
|
||||
} else {
|
||||
ShowFileFeed(ctx, ctx.Repo.Repository, showFeedType)
|
||||
ShowFileFeed(ctx, ctx.Repo.Repository, feedType)
|
||||
}
|
||||
}
|
||||
|
||||
func RenderBranchFeedRSS(ctx *context.Context) {
|
||||
RenderBranchFeed(ctx, "rss")
|
||||
}
|
||||
|
||||
func RenderBranchFeedAtom(ctx *context.Context) {
|
||||
RenderBranchFeed(ctx, "atom")
|
||||
}
|
||||
|
||||
@@ -523,7 +523,7 @@ func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
|
||||
|
||||
// Treat as pull request if both references are branches
|
||||
if ctx.Data["PageIsComparePull"] == nil {
|
||||
ctx.Data["PageIsComparePull"] = headIsBranch && baseIsBranch
|
||||
ctx.Data["PageIsComparePull"] = headIsBranch && baseIsBranch && permBase.CanReadIssuesOrPulls(true)
|
||||
}
|
||||
|
||||
if ctx.Data["PageIsComparePull"] == true && !permBase.CanReadIssuesOrPulls(true) {
|
||||
@@ -735,6 +735,7 @@ func CompareDiff(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
ctx.Data["PageIsViewCode"] = true
|
||||
ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
|
||||
ctx.Data["DirectComparison"] = ci.DirectComparison
|
||||
ctx.Data["OtherCompareSeparator"] = ".."
|
||||
|
||||
@@ -643,8 +643,17 @@ func ViewPullCommits(ctx *context.Context) {
|
||||
ctx.HTML(http.StatusOK, tplPullCommits)
|
||||
}
|
||||
|
||||
func indexCommit(commits []*git.Commit, commitID string) *git.Commit {
|
||||
for i := range commits {
|
||||
if commits[i].ID.String() == commitID {
|
||||
return commits[i]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ViewPullFiles render pull request changed files list page
|
||||
func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommit string, willShowSpecifiedCommitRange, willShowSpecifiedCommit bool) {
|
||||
func viewPullFiles(ctx *context.Context, beforeCommitID, afterCommitID string) {
|
||||
ctx.Data["PageIsPullList"] = true
|
||||
ctx.Data["PageIsPullFiles"] = true
|
||||
|
||||
@@ -654,11 +663,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
}
|
||||
pull := issue.PullRequest
|
||||
|
||||
var (
|
||||
startCommitID string
|
||||
endCommitID string
|
||||
gitRepo = ctx.Repo.GitRepo
|
||||
)
|
||||
gitRepo := ctx.Repo.GitRepo
|
||||
|
||||
prInfo := preparePullViewPullInfo(ctx, issue)
|
||||
if ctx.Written() {
|
||||
@@ -668,77 +673,68 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
return
|
||||
}
|
||||
|
||||
// Validate the given commit sha to show (if any passed)
|
||||
if willShowSpecifiedCommit || willShowSpecifiedCommitRange {
|
||||
foundStartCommit := len(specifiedStartCommit) == 0
|
||||
foundEndCommit := len(specifiedEndCommit) == 0
|
||||
|
||||
if !(foundStartCommit && foundEndCommit) {
|
||||
for _, commit := range prInfo.Commits {
|
||||
if commit.ID.String() == specifiedStartCommit {
|
||||
foundStartCommit = true
|
||||
}
|
||||
if commit.ID.String() == specifiedEndCommit {
|
||||
foundEndCommit = true
|
||||
}
|
||||
|
||||
if foundStartCommit && foundEndCommit {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !(foundStartCommit && foundEndCommit) {
|
||||
ctx.NotFound(nil)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if ctx.Written() {
|
||||
return
|
||||
}
|
||||
|
||||
headCommitID, err := gitRepo.GetRefCommitID(pull.GetGitRefName())
|
||||
if err != nil {
|
||||
ctx.ServerError("GetRefCommitID", err)
|
||||
return
|
||||
}
|
||||
|
||||
ctx.Data["IsShowingOnlySingleCommit"] = willShowSpecifiedCommit
|
||||
isSingleCommit := beforeCommitID == "" && afterCommitID != ""
|
||||
ctx.Data["IsShowingOnlySingleCommit"] = isSingleCommit
|
||||
isShowAllCommits := (beforeCommitID == "" || beforeCommitID == prInfo.MergeBase) && (afterCommitID == "" || afterCommitID == headCommitID)
|
||||
ctx.Data["IsShowingAllCommits"] = isShowAllCommits
|
||||
|
||||
if willShowSpecifiedCommit || willShowSpecifiedCommitRange {
|
||||
if len(specifiedEndCommit) > 0 {
|
||||
endCommitID = specifiedEndCommit
|
||||
if afterCommitID == "" || afterCommitID == headCommitID {
|
||||
afterCommitID = headCommitID
|
||||
}
|
||||
afterCommit := indexCommit(prInfo.Commits, afterCommitID)
|
||||
if afterCommit == nil {
|
||||
ctx.HTTPError(http.StatusBadRequest, "after commit not found in PR commits")
|
||||
return
|
||||
}
|
||||
|
||||
var beforeCommit *git.Commit
|
||||
if !isSingleCommit {
|
||||
if beforeCommitID == "" || beforeCommitID == prInfo.MergeBase {
|
||||
beforeCommitID = prInfo.MergeBase
|
||||
// mergebase commit is not in the list of the pull request commits
|
||||
beforeCommit, err = gitRepo.GetCommit(beforeCommitID)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetCommit", err)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
endCommitID = headCommitID
|
||||
beforeCommit = indexCommit(prInfo.Commits, beforeCommitID)
|
||||
if beforeCommit == nil {
|
||||
ctx.HTTPError(http.StatusBadRequest, "before commit not found in PR commits")
|
||||
return
|
||||
}
|
||||
}
|
||||
if len(specifiedStartCommit) > 0 {
|
||||
startCommitID = specifiedStartCommit
|
||||
} else {
|
||||
startCommitID = prInfo.MergeBase
|
||||
}
|
||||
ctx.Data["IsShowingAllCommits"] = false
|
||||
} else {
|
||||
endCommitID = headCommitID
|
||||
startCommitID = prInfo.MergeBase
|
||||
ctx.Data["IsShowingAllCommits"] = true
|
||||
beforeCommit, err = afterCommit.Parent(0)
|
||||
if err != nil {
|
||||
ctx.ServerError("Parent", err)
|
||||
return
|
||||
}
|
||||
beforeCommitID = beforeCommit.ID.String()
|
||||
}
|
||||
|
||||
ctx.Data["Username"] = ctx.Repo.Owner.Name
|
||||
ctx.Data["Reponame"] = ctx.Repo.Repository.Name
|
||||
ctx.Data["AfterCommitID"] = endCommitID
|
||||
ctx.Data["BeforeCommitID"] = startCommitID
|
||||
|
||||
fileOnly := ctx.FormBool("file-only")
|
||||
ctx.Data["MergeBase"] = prInfo.MergeBase
|
||||
ctx.Data["AfterCommitID"] = afterCommitID
|
||||
ctx.Data["BeforeCommitID"] = beforeCommitID
|
||||
|
||||
maxLines, maxFiles := setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffFiles
|
||||
files := ctx.FormStrings("files")
|
||||
fileOnly := ctx.FormBool("file-only")
|
||||
if fileOnly && (len(files) == 2 || len(files) == 1) {
|
||||
maxLines, maxFiles = -1, -1
|
||||
}
|
||||
|
||||
diffOptions := &gitdiff.DiffOptions{
|
||||
AfterCommitID: endCommitID,
|
||||
BeforeCommitID: beforeCommitID,
|
||||
AfterCommitID: afterCommitID,
|
||||
SkipTo: ctx.FormString("skip-to"),
|
||||
MaxLines: maxLines,
|
||||
MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
|
||||
@@ -746,10 +742,6 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
WhitespaceBehavior: gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)),
|
||||
}
|
||||
|
||||
if !willShowSpecifiedCommit {
|
||||
diffOptions.BeforeCommitID = startCommitID
|
||||
}
|
||||
|
||||
diff, err := gitdiff.GetDiffForRender(ctx, ctx.Repo.RepoLink, gitRepo, diffOptions, files...)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetDiff", err)
|
||||
@@ -761,7 +753,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
// as the viewed information is designed to be loaded only on latest PR
|
||||
// diff and if you're signed in.
|
||||
var reviewState *pull_model.ReviewState
|
||||
if ctx.IsSigned && !willShowSpecifiedCommit && !willShowSpecifiedCommitRange {
|
||||
if ctx.IsSigned && isShowAllCommits {
|
||||
reviewState, err = gitdiff.SyncUserSpecificDiff(ctx, ctx.Doer.ID, pull, gitRepo, diff, diffOptions)
|
||||
if err != nil {
|
||||
ctx.ServerError("SyncUserSpecificDiff", err)
|
||||
@@ -769,7 +761,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
}
|
||||
}
|
||||
|
||||
diffShortStat, err := gitdiff.GetDiffShortStat(ctx.Repo.GitRepo, startCommitID, endCommitID)
|
||||
diffShortStat, err := gitdiff.GetDiffShortStat(ctx.Repo.GitRepo, beforeCommitID, afterCommitID)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetDiffShortStat", err)
|
||||
return
|
||||
@@ -816,7 +808,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
|
||||
if !fileOnly {
|
||||
// note: use mergeBase is set to false because we already have the merge base from the pull request info
|
||||
diffTree, err := gitdiff.GetDiffTree(ctx, gitRepo, false, startCommitID, endCommitID)
|
||||
diffTree, err := gitdiff.GetDiffTree(ctx, gitRepo, false, beforeCommitID, afterCommitID)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetDiffTree", err)
|
||||
return
|
||||
@@ -836,17 +828,6 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
ctx.Data["Diff"] = diff
|
||||
ctx.Data["DiffNotAvailable"] = diffShortStat.NumFiles == 0
|
||||
|
||||
baseCommit, err := ctx.Repo.GitRepo.GetCommit(startCommitID)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetCommit", err)
|
||||
return
|
||||
}
|
||||
commit, err := gitRepo.GetCommit(endCommitID)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetCommit", err)
|
||||
return
|
||||
}
|
||||
|
||||
if ctx.IsSigned && ctx.Doer != nil {
|
||||
if ctx.Data["CanMarkConversation"], err = issues_model.CanMarkConversation(ctx, issue, ctx.Doer); err != nil {
|
||||
ctx.ServerError("CanMarkConversation", err)
|
||||
@@ -854,7 +835,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
}
|
||||
}
|
||||
|
||||
setCompareContext(ctx, baseCommit, commit, ctx.Repo.Owner.Name, ctx.Repo.Repository.Name)
|
||||
setCompareContext(ctx, beforeCommit, afterCommit, ctx.Repo.Owner.Name, ctx.Repo.Repository.Name)
|
||||
|
||||
assigneeUsers, err := repo_model.GetRepoAssignees(ctx, ctx.Repo.Repository)
|
||||
if err != nil {
|
||||
@@ -901,7 +882,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
ctx.Data["CanBlockUser"] = func(blocker, blockee *user_model.User) bool {
|
||||
return user_service.CanBlockUser(ctx, ctx.Doer, blocker, blockee)
|
||||
}
|
||||
if !willShowSpecifiedCommit && !willShowSpecifiedCommitRange && pull.Flow == issues_model.PullRequestFlowGithub {
|
||||
if isShowAllCommits && pull.Flow == issues_model.PullRequestFlowGithub {
|
||||
if err := pull.LoadHeadRepo(ctx); err != nil {
|
||||
ctx.ServerError("LoadHeadRepo", err)
|
||||
return
|
||||
@@ -930,19 +911,17 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||
}
|
||||
|
||||
func ViewPullFilesForSingleCommit(ctx *context.Context) {
|
||||
viewPullFiles(ctx, "", ctx.PathParam("sha"), true, true)
|
||||
// it doesn't support showing files from mergebase to the special commit
|
||||
// otherwise it will be ambiguous
|
||||
viewPullFiles(ctx, "", ctx.PathParam("sha"))
|
||||
}
|
||||
|
||||
func ViewPullFilesForRange(ctx *context.Context) {
|
||||
viewPullFiles(ctx, ctx.PathParam("shaFrom"), ctx.PathParam("shaTo"), true, false)
|
||||
}
|
||||
|
||||
func ViewPullFilesStartingFromCommit(ctx *context.Context) {
|
||||
viewPullFiles(ctx, "", ctx.PathParam("sha"), true, false)
|
||||
viewPullFiles(ctx, ctx.PathParam("shaFrom"), ctx.PathParam("shaTo"))
|
||||
}
|
||||
|
||||
func ViewPullFilesForAllCommitsOfPr(ctx *context.Context) {
|
||||
viewPullFiles(ctx, "", "", false, false)
|
||||
viewPullFiles(ctx, "", "")
|
||||
}
|
||||
|
||||
// UpdatePullRequest merge PR's baseBranch into headBranch
|
||||
|
||||
@@ -20,8 +20,8 @@ import (
|
||||
unit_model "code.gitea.io/gitea/models/unit"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
giturl "code.gitea.io/gitea/modules/git/url"
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/htmlutil"
|
||||
"code.gitea.io/gitea/modules/httplib"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
repo_module "code.gitea.io/gitea/modules/repository"
|
||||
@@ -309,34 +309,41 @@ func handleRepoEmptyOrBroken(ctx *context.Context) {
|
||||
ctx.Redirect(link)
|
||||
}
|
||||
|
||||
func handleRepoViewSubmodule(ctx *context.Context, submodule *git.SubModule) {
|
||||
submoduleRepoURL, err := giturl.ParseRepositoryURL(ctx, submodule.URL)
|
||||
if err != nil {
|
||||
HandleGitError(ctx, "prepareToRenderDirOrFile: ParseRepositoryURL", err)
|
||||
func isViewHomeOnlyContent(ctx *context.Context) bool {
|
||||
return ctx.FormBool("only_content")
|
||||
}
|
||||
|
||||
func handleRepoViewSubmodule(ctx *context.Context, commitSubmoduleFile *git.CommitSubmoduleFile) {
|
||||
submoduleWebLink := commitSubmoduleFile.SubmoduleWebLinkTree(ctx)
|
||||
if submoduleWebLink == nil {
|
||||
ctx.Data["NotFoundPrompt"] = ctx.Repo.TreePath
|
||||
ctx.NotFound(nil)
|
||||
return
|
||||
}
|
||||
submoduleURL := giturl.MakeRepositoryWebLink(submoduleRepoURL)
|
||||
if httplib.IsCurrentGiteaSiteURL(ctx, submoduleURL) {
|
||||
ctx.RedirectToCurrentSite(submoduleURL)
|
||||
} else {
|
||||
|
||||
redirectLink := submoduleWebLink.CommitWebLink
|
||||
if isViewHomeOnlyContent(ctx) {
|
||||
ctx.Resp.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||
_, _ = ctx.Resp.Write([]byte(htmlutil.HTMLFormat(`<a href="%s">%s</a>`, redirectLink, redirectLink)))
|
||||
} else if !httplib.IsCurrentGiteaSiteURL(ctx, redirectLink) {
|
||||
// don't auto-redirect to external URL, to avoid open redirect or phishing
|
||||
ctx.Data["NotFoundPrompt"] = submoduleURL
|
||||
ctx.Data["NotFoundPrompt"] = redirectLink
|
||||
ctx.NotFound(nil)
|
||||
} else {
|
||||
ctx.Redirect(submoduleWebLink.CommitWebLink)
|
||||
}
|
||||
}
|
||||
|
||||
func prepareToRenderDirOrFile(entry *git.TreeEntry) func(ctx *context.Context) {
|
||||
return func(ctx *context.Context) {
|
||||
if entry.IsSubModule() {
|
||||
submodule, err := ctx.Repo.Commit.GetSubModule(entry.Name())
|
||||
commitSubmoduleFile, err := git.GetCommitInfoSubmoduleFile(ctx.Repo.RepoLink, ctx.Repo.TreePath, ctx.Repo.Commit, entry.ID)
|
||||
if err != nil {
|
||||
HandleGitError(ctx, "prepareToRenderDirOrFile: GetSubModule", err)
|
||||
HandleGitError(ctx, "prepareToRenderDirOrFile: GetCommitInfoSubmoduleFile", err)
|
||||
return
|
||||
}
|
||||
handleRepoViewSubmodule(ctx, submodule)
|
||||
return
|
||||
}
|
||||
if entry.IsDir() {
|
||||
handleRepoViewSubmodule(ctx, commitSubmoduleFile)
|
||||
} else if entry.IsDir() {
|
||||
prepareToRenderDirectory(ctx)
|
||||
} else {
|
||||
prepareToRenderFile(ctx, entry)
|
||||
@@ -472,7 +479,7 @@ func Home(ctx *context.Context) {
|
||||
}
|
||||
}
|
||||
|
||||
if ctx.FormBool("only_content") {
|
||||
if isViewHomeOnlyContent(ctx) {
|
||||
ctx.HTML(http.StatusOK, tplRepoViewContent)
|
||||
} else if len(treeNames) != 0 {
|
||||
ctx.HTML(http.StatusOK, tplRepoView)
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
git_module "code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/services/contexttest"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
@@ -19,14 +18,20 @@ func TestViewHomeSubmoduleRedirect(t *testing.T) {
|
||||
unittest.PrepareTestEnv(t)
|
||||
|
||||
ctx, _ := contexttest.MockContext(t, "/user2/repo1/src/branch/master/test-submodule")
|
||||
submodule := &git_module.SubModule{Path: "test-submodule", URL: setting.AppURL + "user2/repo-other.git"}
|
||||
submodule := git_module.NewCommitSubmoduleFile("/user2/repo1", "test-submodule", "../repo-other", "any-ref-id")
|
||||
handleRepoViewSubmodule(ctx, submodule)
|
||||
assert.Equal(t, http.StatusSeeOther, ctx.Resp.WrittenStatus())
|
||||
assert.Equal(t, "/user2/repo-other", ctx.Resp.Header().Get("Location"))
|
||||
assert.Equal(t, "/user2/repo-other/tree/any-ref-id", ctx.Resp.Header().Get("Location"))
|
||||
|
||||
ctx, _ = contexttest.MockContext(t, "/user2/repo1/src/branch/master/test-submodule")
|
||||
submodule = &git_module.SubModule{Path: "test-submodule", URL: "https://other/user2/repo-other.git"}
|
||||
submodule = git_module.NewCommitSubmoduleFile("/user2/repo1", "test-submodule", "https://other/user2/repo-other.git", "any-ref-id")
|
||||
handleRepoViewSubmodule(ctx, submodule)
|
||||
// do not auto-redirect for external URLs, to avoid open redirect or phishing
|
||||
assert.Equal(t, http.StatusNotFound, ctx.Resp.WrittenStatus())
|
||||
|
||||
ctx, respWriter := contexttest.MockContext(t, "/user2/repo1/src/branch/master/test-submodule?only_content=true")
|
||||
submodule = git_module.NewCommitSubmoduleFile("/user2/repo1", "test-submodule", "../repo-other", "any-ref-id")
|
||||
handleRepoViewSubmodule(ctx, submodule)
|
||||
assert.Equal(t, http.StatusOK, ctx.Resp.WrittenStatus())
|
||||
assert.Equal(t, `<a href="/user2/repo-other/tree/any-ref-id">/user2/repo-other/tree/any-ref-id</a>`, respWriter.Body.String())
|
||||
}
|
||||
|
||||
@@ -1217,10 +1217,11 @@ func registerWebRoutes(m *web.Router) {
|
||||
// end "/{username}/{reponame}": view milestone, label, issue, pull, etc
|
||||
|
||||
m.Group("/{username}/{reponame}/{type:issues}", func() {
|
||||
// these handlers also check unit permissions internally
|
||||
m.Get("", repo.Issues)
|
||||
m.Get("/{index}", repo.ViewIssue)
|
||||
}, optSignIn, context.RepoAssignment, context.RequireUnitReader(unit.TypeIssues, unit.TypeExternalTracker))
|
||||
// end "/{username}/{reponame}": issue/pull list, issue/pull view, external tracker
|
||||
m.Get("/{index}", repo.ViewIssue) // also do pull-request redirection (".../issues/{PR-number}" -> ".../pulls/{PR-number}")
|
||||
}, optSignIn, context.RepoAssignment, context.RequireUnitReader(unit.TypeIssues, unit.TypePullRequests, unit.TypeExternalTracker))
|
||||
// end "/{username}/{reponame}": issue list, issue view (pull-request redirection), external tracker
|
||||
|
||||
m.Group("/{username}/{reponame}", func() { // edit issues, pulls, labels, milestones, etc
|
||||
m.Group("/issues", func() {
|
||||
@@ -1509,7 +1510,7 @@ func registerWebRoutes(m *web.Router) {
|
||||
m.Group("/commits", func() {
|
||||
m.Get("", repo.SetWhitespaceBehavior, repo.GetPullDiffStats, repo.ViewPullCommits)
|
||||
m.Get("/list", repo.GetPullCommits)
|
||||
m.Get("/{sha:[a-f0-9]{7,40}}", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForSingleCommit)
|
||||
m.Get("/{sha:[a-f0-9]{7,64}}", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForSingleCommit)
|
||||
})
|
||||
m.Post("/merge", context.RepoMustNotBeArchived(), web.Bind(forms.MergePullRequestForm{}), repo.MergePullRequest)
|
||||
m.Post("/cancel_auto_merge", context.RepoMustNotBeArchived(), repo.CancelAutoMergePullRequest)
|
||||
@@ -1518,8 +1519,7 @@ func registerWebRoutes(m *web.Router) {
|
||||
m.Post("/cleanup", context.RepoMustNotBeArchived(), repo.CleanUpPullRequest)
|
||||
m.Group("/files", func() {
|
||||
m.Get("", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForAllCommitsOfPr)
|
||||
m.Get("/{sha:[a-f0-9]{7,40}}", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesStartingFromCommit)
|
||||
m.Get("/{shaFrom:[a-f0-9]{7,40}}..{shaTo:[a-f0-9]{7,40}}", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForRange)
|
||||
m.Get("/{shaFrom:[a-f0-9]{7,64}}..{shaTo:[a-f0-9]{7,64}}", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForRange)
|
||||
m.Group("/reviews", func() {
|
||||
m.Get("/new_comment", repo.RenderNewCodeCommentForm)
|
||||
m.Post("/comments", web.Bind(forms.CodeCommentForm{}), repo.SetShowOutdatedComments, repo.CreateCodeComment)
|
||||
@@ -1593,8 +1593,8 @@ func registerWebRoutes(m *web.Router) {
|
||||
m.Get("/cherry-pick/{sha:([a-f0-9]{7,64})$}", repo.SetEditorconfigIfExists, context.RepoRefByDefaultBranch(), repo.CherryPick)
|
||||
}, repo.MustBeNotEmpty)
|
||||
|
||||
m.Get("/rss/branch/*", context.RepoRefByType(git.RefTypeBranch), feedEnabled, feed.RenderBranchFeed)
|
||||
m.Get("/atom/branch/*", context.RepoRefByType(git.RefTypeBranch), feedEnabled, feed.RenderBranchFeed)
|
||||
m.Get("/rss/branch/*", context.RepoRefByType(git.RefTypeBranch), feedEnabled, feed.RenderBranchFeedRSS)
|
||||
m.Get("/atom/branch/*", context.RepoRefByType(git.RefTypeBranch), feedEnabled, feed.RenderBranchFeedAtom)
|
||||
|
||||
m.Group("/src", func() {
|
||||
m.Get("", func(ctx *context.Context) { ctx.Redirect(ctx.Repo.RepoLink) }) // there is no "{owner}/{repo}/src" page, so redirect to "{owner}/{repo}" to avoid 404
|
||||
|
||||
@@ -53,7 +53,7 @@ func CreateAuthorizationToken(taskID, runID, jobID int64) (string, error) {
|
||||
|
||||
claims := actionsClaims{
|
||||
RegisteredClaims: jwt.RegisteredClaims{
|
||||
ExpiresAt: jwt.NewNumericDate(now.Add(24 * time.Hour)),
|
||||
ExpiresAt: jwt.NewNumericDate(now.Add(1*time.Hour + setting.Actions.EndlessTaskTimeout)),
|
||||
NotBefore: jwt.NewNumericDate(now),
|
||||
},
|
||||
Scp: fmt.Sprintf("Actions.Results:%d:%d", runID, jobID),
|
||||
|
||||
@@ -260,11 +260,6 @@ func (n *actionsNotifier) CreateIssueComment(ctx context.Context, doer *user_mod
|
||||
func (n *actionsNotifier) UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment, oldContent string) {
|
||||
ctx = withMethod(ctx, "UpdateComment")
|
||||
|
||||
if err := c.LoadIssue(ctx); err != nil {
|
||||
log.Error("LoadIssue: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if c.Issue.IsPull {
|
||||
notifyIssueCommentChange(ctx, doer, c, oldContent, webhook_module.HookEventPullRequestComment, api.HookIssueCommentEdited)
|
||||
return
|
||||
@@ -275,11 +270,6 @@ func (n *actionsNotifier) UpdateComment(ctx context.Context, doer *user_model.Us
|
||||
func (n *actionsNotifier) DeleteComment(ctx context.Context, doer *user_model.User, comment *issues_model.Comment) {
|
||||
ctx = withMethod(ctx, "DeleteComment")
|
||||
|
||||
if err := comment.LoadIssue(ctx); err != nil {
|
||||
log.Error("LoadIssue: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if comment.Issue.IsPull {
|
||||
notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventPullRequestComment, api.HookIssueCommentDeleted)
|
||||
return
|
||||
@@ -288,6 +278,7 @@ func (n *actionsNotifier) DeleteComment(ctx context.Context, doer *user_model.Us
|
||||
}
|
||||
|
||||
func notifyIssueCommentChange(ctx context.Context, doer *user_model.User, comment *issues_model.Comment, oldContent string, event webhook_module.HookEventType, action api.HookIssueCommentAction) {
|
||||
comment.Issue = nil // force issue to be loaded
|
||||
if err := comment.LoadIssue(ctx); err != nil {
|
||||
log.Error("LoadIssue: %v", err)
|
||||
return
|
||||
|
||||
@@ -250,7 +250,7 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git.
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load pull issue. Error: %w", err)
|
||||
}
|
||||
comment, err := pull_service.CreatePushPullComment(ctx, pusher, pr, oldCommitID, opts.NewCommitIDs[i])
|
||||
comment, err := pull_service.CreatePushPullComment(ctx, pusher, pr, oldCommitID, opts.NewCommitIDs[i], forcePush.Value())
|
||||
if err == nil && comment != nil {
|
||||
notify_service.PullRequestPushCommits(ctx, pusher, pr, comment)
|
||||
}
|
||||
|
||||
@@ -25,10 +25,7 @@ import (
|
||||
// There is a dependence on the database within RewriteAllPrincipalKeys & RegeneratePrincipalKeys
|
||||
// The sshOpLocker is used from ssh_key_authorized_keys.go
|
||||
|
||||
const (
|
||||
authorizedPrincipalsFile = "authorized_principals"
|
||||
tplCommentPrefix = `# gitea public key`
|
||||
)
|
||||
const authorizedPrincipalsFile = "authorized_principals"
|
||||
|
||||
// RewriteAllPrincipalKeys removes any authorized principal and rewrite all keys from database again.
|
||||
// Note: db.GetEngine(ctx).Iterate does not get latest data after insert/delete, so we have to call this function
|
||||
@@ -90,10 +87,9 @@ func rewriteAllPrincipalKeys(ctx context.Context) error {
|
||||
return util.Rename(tmpPath, fPath)
|
||||
}
|
||||
|
||||
func regeneratePrincipalKeys(ctx context.Context, t io.StringWriter) error {
|
||||
func regeneratePrincipalKeys(ctx context.Context, t io.Writer) error {
|
||||
if err := db.GetEngine(ctx).Where("type = ?", asymkey_model.KeyTypePrincipal).Iterate(new(asymkey_model.PublicKey), func(idx int, bean any) (err error) {
|
||||
_, err = t.WriteString((bean.(*asymkey_model.PublicKey)).AuthorizedString())
|
||||
return err
|
||||
return asymkey_model.WriteAuthorizedStringForValidKey(bean.(*asymkey_model.PublicKey), t)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -114,11 +110,11 @@ func regeneratePrincipalKeys(ctx context.Context, t io.StringWriter) error {
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||
if strings.HasPrefix(line, asymkey_model.AuthorizedStringCommentPrefix) {
|
||||
scanner.Scan()
|
||||
continue
|
||||
}
|
||||
_, err = t.WriteString(line + "\n")
|
||||
_, err = io.WriteString(t, line+"\n")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -171,34 +171,35 @@ func registerDeleteOldSystemNotices() {
|
||||
})
|
||||
}
|
||||
|
||||
type GCLFSConfig struct {
|
||||
BaseConfig
|
||||
OlderThan time.Duration
|
||||
LastUpdatedMoreThanAgo time.Duration
|
||||
NumberToCheckPerRepo int64
|
||||
ProportionToCheckPerRepo float64
|
||||
}
|
||||
|
||||
func registerGCLFS() {
|
||||
if !setting.LFS.StartServer {
|
||||
return
|
||||
}
|
||||
type GCLFSConfig struct {
|
||||
OlderThanConfig
|
||||
LastUpdatedMoreThanAgo time.Duration
|
||||
NumberToCheckPerRepo int64
|
||||
ProportionToCheckPerRepo float64
|
||||
}
|
||||
|
||||
RegisterTaskFatal("gc_lfs", &GCLFSConfig{
|
||||
OlderThanConfig: OlderThanConfig{
|
||||
BaseConfig: BaseConfig{
|
||||
Enabled: false,
|
||||
RunAtStart: false,
|
||||
Schedule: "@every 24h",
|
||||
},
|
||||
// Only attempt to garbage collect lfs meta objects older than a week as the order of git lfs upload
|
||||
// and git object upload is not necessarily guaranteed. It's possible to imagine a situation whereby
|
||||
// an LFS object is uploaded but the git branch is not uploaded immediately, or there are some rapid
|
||||
// changes in new branches that might lead to lfs objects becoming temporarily unassociated with git
|
||||
// objects.
|
||||
//
|
||||
// It is likely that a week is potentially excessive but it should definitely be enough that any
|
||||
// unassociated LFS object is genuinely unassociated.
|
||||
OlderThan: 24 * time.Hour * 7,
|
||||
BaseConfig: BaseConfig{
|
||||
Enabled: false,
|
||||
RunAtStart: false,
|
||||
Schedule: "@every 24h",
|
||||
},
|
||||
// Only attempt to garbage collect lfs meta objects older than a week as the order of git lfs upload
|
||||
// and git object upload is not necessarily guaranteed. It's possible to imagine a situation whereby
|
||||
// an LFS object is uploaded but the git branch is not uploaded immediately, or there are some rapid
|
||||
// changes in new branches that might lead to lfs objects becoming temporarily unassociated with git
|
||||
// objects.
|
||||
//
|
||||
// It is likely that a week is potentially excessive but it should definitely be enough that any
|
||||
// unassociated LFS object is genuinely unassociated.
|
||||
OlderThan: 24 * time.Hour * 7,
|
||||
|
||||
// Only GC things that haven't been looked at in the past 3 days
|
||||
LastUpdatedMoreThanAgo: 24 * time.Hour * 3,
|
||||
NumberToCheckPerRepo: 100,
|
||||
|
||||
51
services/cron/tasks_extended_test.go
Normal file
51
services/cron/tasks_extended_test.go
Normal file
@@ -0,0 +1,51 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package cron
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/test"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func Test_GCLFSConfig(t *testing.T) {
|
||||
cfg, err := setting.NewConfigProviderFromData(`
|
||||
[cron.gc_lfs]
|
||||
ENABLED = true
|
||||
RUN_AT_START = true
|
||||
SCHEDULE = "@every 2h"
|
||||
OLDER_THAN = "1h"
|
||||
LAST_UPDATED_MORE_THAN_AGO = "7h"
|
||||
NUMBER_TO_CHECK_PER_REPO = 10
|
||||
PROPORTION_TO_CHECK_PER_REPO = 0.1
|
||||
`)
|
||||
assert.NoError(t, err)
|
||||
defer test.MockVariableValue(&setting.CfgProvider, cfg)()
|
||||
|
||||
config := &GCLFSConfig{
|
||||
BaseConfig: BaseConfig{
|
||||
Enabled: false,
|
||||
RunAtStart: false,
|
||||
Schedule: "@every 24h",
|
||||
},
|
||||
OlderThan: 24 * time.Hour * 7,
|
||||
LastUpdatedMoreThanAgo: 24 * time.Hour * 3,
|
||||
NumberToCheckPerRepo: 100,
|
||||
ProportionToCheckPerRepo: 0.6,
|
||||
}
|
||||
|
||||
_, err = setting.GetCronSettings("gc_lfs", config)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, config.Enabled)
|
||||
assert.True(t, config.RunAtStart)
|
||||
assert.Equal(t, "@every 2h", config.Schedule)
|
||||
assert.Equal(t, 1*time.Hour, config.OlderThan)
|
||||
assert.Equal(t, 7*time.Hour, config.LastUpdatedMoreThanAgo)
|
||||
assert.Equal(t, int64(10), config.NumberToCheckPerRepo)
|
||||
assert.InDelta(t, 0.1, config.ProportionToCheckPerRepo, 0.001)
|
||||
}
|
||||
@@ -20,8 +20,6 @@ import (
|
||||
asymkey_service "code.gitea.io/gitea/services/asymkey"
|
||||
)
|
||||
|
||||
const tplCommentPrefix = `# gitea public key`
|
||||
|
||||
func checkAuthorizedKeys(ctx context.Context, logger log.Logger, autofix bool) error {
|
||||
if setting.SSH.StartBuiltinServer || !setting.SSH.CreateAuthorizedKeysFile {
|
||||
return nil
|
||||
@@ -47,7 +45,7 @@ func checkAuthorizedKeys(ctx context.Context, logger log.Logger, autofix bool) e
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||
if strings.HasPrefix(line, asymkey_model.AuthorizedStringCommentPrefix) {
|
||||
continue
|
||||
}
|
||||
linesInAuthorizedKeys.Add(line)
|
||||
@@ -67,7 +65,7 @@ func checkAuthorizedKeys(ctx context.Context, logger log.Logger, autofix bool) e
|
||||
scanner = bufio.NewScanner(regenerated)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||
if strings.HasPrefix(line, asymkey_model.AuthorizedStringCommentPrefix) {
|
||||
continue
|
||||
}
|
||||
if linesInAuthorizedKeys.Contains(line) {
|
||||
|
||||
@@ -1239,7 +1239,7 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit
|
||||
return nil, err
|
||||
}
|
||||
|
||||
checker, err := attribute.NewBatchChecker(gitRepo, opts.AfterCommitID, []string{attribute.LinguistVendored, attribute.LinguistGenerated, attribute.LinguistLanguage, attribute.GitlabLanguage})
|
||||
checker, err := attribute.NewBatchChecker(gitRepo, opts.AfterCommitID, []string{attribute.LinguistVendored, attribute.LinguistGenerated, attribute.LinguistLanguage, attribute.GitlabLanguage, attribute.Diff})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -1248,6 +1248,7 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit
|
||||
for _, diffFile := range diff.Files {
|
||||
isVendored := optional.None[bool]()
|
||||
isGenerated := optional.None[bool]()
|
||||
attrDiff := optional.None[string]()
|
||||
attrs, err := checker.CheckPath(diffFile.Name)
|
||||
if err == nil {
|
||||
isVendored, isGenerated = attrs.GetVendored(), attrs.GetGenerated()
|
||||
@@ -1255,6 +1256,7 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit
|
||||
if language.Has() {
|
||||
diffFile.Language = language.Value()
|
||||
}
|
||||
attrDiff = attrs.Get(attribute.Diff).ToString()
|
||||
}
|
||||
|
||||
// Populate Submodule URLs
|
||||
@@ -1276,7 +1278,8 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit
|
||||
diffFile.Sections = append(diffFile.Sections, tailSection)
|
||||
}
|
||||
|
||||
if !setting.Git.DisableDiffHighlight {
|
||||
shouldFullFileHighlight := !setting.Git.DisableDiffHighlight && attrDiff.Value() == ""
|
||||
if shouldFullFileHighlight {
|
||||
if limitedContent.LeftContent != nil && limitedContent.LeftContent.buf.Len() < MaxDiffHighlightEntireFileSize {
|
||||
diffFile.highlightedLeftLines = highlightCodeLines(diffFile, true /* left */, limitedContent.LeftContent.buf.String())
|
||||
}
|
||||
|
||||
@@ -80,6 +80,12 @@ func CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_m
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// reload issue to ensure it has the latest data, especially the number of comments
|
||||
issue, err = issues_model.GetIssueByID(ctx, issue.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
notify_service.CreateIssueComment(ctx, doer, repo, issue, comment, mentions)
|
||||
|
||||
return comment, nil
|
||||
|
||||
@@ -17,6 +17,7 @@ import (
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
actions_model "code.gitea.io/gitea/models/actions"
|
||||
auth_model "code.gitea.io/gitea/models/auth"
|
||||
@@ -51,6 +52,33 @@ type Claims struct {
|
||||
jwt.RegisteredClaims
|
||||
}
|
||||
|
||||
type AuthTokenOptions struct {
|
||||
Op string
|
||||
UserID int64
|
||||
RepoID int64
|
||||
}
|
||||
|
||||
func GetLFSAuthTokenWithBearer(opts AuthTokenOptions) (string, error) {
|
||||
now := time.Now()
|
||||
claims := Claims{
|
||||
RegisteredClaims: jwt.RegisteredClaims{
|
||||
ExpiresAt: jwt.NewNumericDate(now.Add(setting.LFS.HTTPAuthExpiry)),
|
||||
NotBefore: jwt.NewNumericDate(now),
|
||||
},
|
||||
RepoID: opts.RepoID,
|
||||
Op: opts.Op,
|
||||
UserID: opts.UserID,
|
||||
}
|
||||
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
|
||||
|
||||
// Sign and get the complete encoded token as a string using the secret
|
||||
tokenString, err := token.SignedString(setting.LFS.JWTSecretBytes)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to sign LFS JWT token: %w", err)
|
||||
}
|
||||
return "Bearer " + tokenString, nil
|
||||
}
|
||||
|
||||
// DownloadLink builds a URL to download the object.
|
||||
func (rc *requestContext) DownloadLink(p lfs_module.Pointer) string {
|
||||
return setting.AppURL + path.Join(url.PathEscape(rc.User), url.PathEscape(rc.Repo+".git"), "info/lfs/objects", url.PathEscape(p.Oid))
|
||||
@@ -111,7 +139,7 @@ func DownloadHandler(ctx *context.Context) {
|
||||
}
|
||||
}
|
||||
|
||||
ctx.Resp.Header().Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", fromByte, toByte, meta.Size-fromByte))
|
||||
ctx.Resp.Header().Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", fromByte, toByte, meta.Size))
|
||||
ctx.Resp.Header().Set("Access-Control-Expose-Headers", "Content-Range")
|
||||
}
|
||||
}
|
||||
@@ -557,9 +585,6 @@ func authenticate(ctx *context.Context, repository *repo_model.Repository, autho
|
||||
}
|
||||
|
||||
func handleLFSToken(ctx stdCtx.Context, tokenSHA string, target *repo_model.Repository, mode perm_model.AccessMode) (*user_model.User, error) {
|
||||
if !strings.Contains(tokenSHA, ".") {
|
||||
return nil, nil
|
||||
}
|
||||
token, err := jwt.ParseWithClaims(tokenSHA, &Claims{}, func(t *jwt.Token) (any, error) {
|
||||
if _, ok := t.Method.(*jwt.SigningMethodHMAC); !ok {
|
||||
return nil, fmt.Errorf("unexpected signing method: %v", t.Header["alg"])
|
||||
@@ -567,7 +592,7 @@ func handleLFSToken(ctx stdCtx.Context, tokenSHA string, target *repo_model.Repo
|
||||
return setting.LFS.JWTSecretBytes, nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
return nil, errors.New("invalid token")
|
||||
}
|
||||
|
||||
claims, claimsOk := token.Claims.(*Claims)
|
||||
|
||||
51
services/lfs/server_test.go
Normal file
51
services/lfs/server_test.go
Normal file
@@ -0,0 +1,51 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package lfs
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
perm_model "code.gitea.io/gitea/models/perm"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
"code.gitea.io/gitea/services/contexttest"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
unittest.MainTest(m)
|
||||
}
|
||||
|
||||
func TestAuthenticate(t *testing.T) {
|
||||
require.NoError(t, unittest.PrepareTestDatabase())
|
||||
repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
|
||||
|
||||
token2, _ := GetLFSAuthTokenWithBearer(AuthTokenOptions{Op: "download", UserID: 2, RepoID: 1})
|
||||
_, token2, _ = strings.Cut(token2, " ")
|
||||
ctx, _ := contexttest.MockContext(t, "/")
|
||||
|
||||
t.Run("handleLFSToken", func(t *testing.T) {
|
||||
u, err := handleLFSToken(ctx, "", repo1, perm_model.AccessModeRead)
|
||||
require.Error(t, err)
|
||||
assert.Nil(t, u)
|
||||
|
||||
u, err = handleLFSToken(ctx, "invalid", repo1, perm_model.AccessModeRead)
|
||||
require.Error(t, err)
|
||||
assert.Nil(t, u)
|
||||
|
||||
u, err = handleLFSToken(ctx, token2, repo1, perm_model.AccessModeRead)
|
||||
require.NoError(t, err)
|
||||
assert.EqualValues(t, 2, u.ID)
|
||||
})
|
||||
|
||||
t.Run("authenticate", func(t *testing.T) {
|
||||
const prefixBearer = "Bearer "
|
||||
assert.False(t, authenticate(ctx, repo1, "", true, false))
|
||||
assert.False(t, authenticate(ctx, repo1, prefixBearer+"invalid", true, false))
|
||||
assert.True(t, authenticate(ctx, repo1, prefixBearer+token2, true, false))
|
||||
})
|
||||
}
|
||||
@@ -354,7 +354,8 @@ func (g *GithubDownloaderV3) convertGithubRelease(ctx context.Context, rel *gith
|
||||
|
||||
// Prevent open redirect
|
||||
if !hasBaseURL(redirectURL, g.baseURL) &&
|
||||
!hasBaseURL(redirectURL, "https://objects.githubusercontent.com/") {
|
||||
!hasBaseURL(redirectURL, "https://objects.githubusercontent.com/") &&
|
||||
!hasBaseURL(redirectURL, "https://release-assets.githubusercontent.com/") {
|
||||
WarnAndNotice("Unexpected AssetURL for assetID[%d] in %s: %s", asset.GetID(), g, redirectURL)
|
||||
|
||||
return io.NopCloser(strings.NewReader(redirectURL)), nil
|
||||
|
||||
@@ -14,42 +14,28 @@ import (
|
||||
)
|
||||
|
||||
// getCommitIDsFromRepo get commit IDs from repo in between oldCommitID and newCommitID
|
||||
// isForcePush will be true if oldCommit isn't on the branch
|
||||
// Commit on baseBranch will skip
|
||||
func getCommitIDsFromRepo(ctx context.Context, repo *repo_model.Repository, oldCommitID, newCommitID, baseBranch string) (commitIDs []string, isForcePush bool, err error) {
|
||||
func getCommitIDsFromRepo(ctx context.Context, repo *repo_model.Repository, oldCommitID, newCommitID, baseBranch string) (commitIDs []string, err error) {
|
||||
gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
return nil, err
|
||||
}
|
||||
defer closer.Close()
|
||||
|
||||
oldCommit, err := gitRepo.GetCommit(oldCommitID)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
newCommit, err := gitRepo.GetCommit(newCommitID)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
isForcePush, err = newCommit.IsForcePush(oldCommitID)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
if isForcePush {
|
||||
commitIDs = make([]string, 2)
|
||||
commitIDs[0] = oldCommitID
|
||||
commitIDs[1] = newCommitID
|
||||
|
||||
return commitIDs, isForcePush, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Find commits between new and old commit excluding base branch commits
|
||||
commits, err := gitRepo.CommitsBetweenNotBase(newCommit, oldCommit, baseBranch)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
commitIDs = make([]string, 0, len(commits))
|
||||
@@ -57,38 +43,40 @@ func getCommitIDsFromRepo(ctx context.Context, repo *repo_model.Repository, oldC
|
||||
commitIDs = append(commitIDs, commits[i].ID.String())
|
||||
}
|
||||
|
||||
return commitIDs, isForcePush, err
|
||||
return commitIDs, err
|
||||
}
|
||||
|
||||
// CreatePushPullComment create push code to pull base comment
|
||||
func CreatePushPullComment(ctx context.Context, pusher *user_model.User, pr *issues_model.PullRequest, oldCommitID, newCommitID string) (comment *issues_model.Comment, err error) {
|
||||
func CreatePushPullComment(ctx context.Context, pusher *user_model.User, pr *issues_model.PullRequest, oldCommitID, newCommitID string, isForcePush bool) (comment *issues_model.Comment, err error) {
|
||||
if pr.HasMerged || oldCommitID == "" || newCommitID == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
ops := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypePullRequestPush,
|
||||
Doer: pusher,
|
||||
Repo: pr.BaseRepo,
|
||||
opts := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypePullRequestPush,
|
||||
Doer: pusher,
|
||||
Repo: pr.BaseRepo,
|
||||
IsForcePush: isForcePush,
|
||||
Issue: pr.Issue,
|
||||
}
|
||||
|
||||
var data issues_model.PushActionContent
|
||||
|
||||
data.CommitIDs, data.IsForcePush, err = getCommitIDsFromRepo(ctx, pr.BaseRepo, oldCommitID, newCommitID, pr.BaseBranch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
if opts.IsForcePush {
|
||||
data.CommitIDs = []string{oldCommitID, newCommitID}
|
||||
} else {
|
||||
data.CommitIDs, err = getCommitIDsFromRepo(ctx, pr.BaseRepo, oldCommitID, newCommitID, pr.BaseBranch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
ops.Issue = pr.Issue
|
||||
|
||||
dataJSON, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ops.Content = string(dataJSON)
|
||||
|
||||
comment, err = issues_model.CreateComment(ctx, ops)
|
||||
opts.Content = string(dataJSON)
|
||||
comment, err = issues_model.CreateComment(ctx, opts)
|
||||
|
||||
return comment, err
|
||||
}
|
||||
|
||||
@@ -28,7 +28,6 @@ import (
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/globallock"
|
||||
"code.gitea.io/gitea/modules/graceful"
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
repo_module "code.gitea.io/gitea/modules/repository"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
@@ -142,36 +141,7 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error {
|
||||
return err
|
||||
}
|
||||
|
||||
compareInfo, err := baseGitRepo.GetCompareInfo(pr.BaseRepo.RepoPath(),
|
||||
git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName(), false, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(compareInfo.Commits) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
data := issues_model.PushActionContent{IsForcePush: false}
|
||||
data.CommitIDs = make([]string, 0, len(compareInfo.Commits))
|
||||
for i := len(compareInfo.Commits) - 1; i >= 0; i-- {
|
||||
data.CommitIDs = append(data.CommitIDs, compareInfo.Commits[i].ID.String())
|
||||
}
|
||||
|
||||
dataJSON, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ops := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypePullRequestPush,
|
||||
Doer: issue.Poster,
|
||||
Repo: repo,
|
||||
Issue: pr.Issue,
|
||||
IsForcePush: false,
|
||||
Content: string(dataJSON),
|
||||
}
|
||||
|
||||
if _, err = issues_model.CreateComment(ctx, ops); err != nil {
|
||||
if _, err := CreatePushPullComment(ctx, issue.Poster, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName(), false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -193,6 +163,20 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error {
|
||||
|
||||
issue_service.ReviewRequestNotify(ctx, issue, issue.Poster, reviewNotifiers)
|
||||
|
||||
// Request reviews, these should be requested before other notifications because they will add request reviews record
|
||||
// on database
|
||||
permDoer, err := access_model.GetUserRepoPermission(ctx, repo, issue.Poster)
|
||||
for _, reviewer := range opts.Reviewers {
|
||||
if _, err = issue_service.ReviewRequest(ctx, pr.Issue, issue.Poster, &permDoer, reviewer, true); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
for _, teamReviewer := range opts.TeamReviewers {
|
||||
if _, err = issue_service.TeamReviewRequest(ctx, pr.Issue, issue.Poster, teamReviewer, true); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
mentions, err := issues_model.FindAndUpdateIssueMentions(ctx, issue, issue.Poster, issue.Content)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -211,17 +195,7 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error {
|
||||
}
|
||||
notify_service.IssueChangeAssignee(ctx, issue.Poster, issue, assignee, false, assigneeCommentMap[assigneeID])
|
||||
}
|
||||
permDoer, err := access_model.GetUserRepoPermission(ctx, repo, issue.Poster)
|
||||
for _, reviewer := range opts.Reviewers {
|
||||
if _, err = issue_service.ReviewRequest(ctx, pr.Issue, issue.Poster, &permDoer, reviewer, true); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
for _, teamReviewer := range opts.TeamReviewers {
|
||||
if _, err = issue_service.TeamReviewRequest(ctx, pr.Issue, issue.Poster, teamReviewer, true); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -332,24 +306,42 @@ func ChangeTargetBranch(ctx context.Context, pr *issues_model.PullRequest, doer
|
||||
pr.CommitsAhead = divergence.Ahead
|
||||
pr.CommitsBehind = divergence.Behind
|
||||
|
||||
if err := pr.UpdateColsIfNotMerged(ctx, "merge_base", "status", "conflicted_files", "changed_protected_files", "base_branch", "commits_ahead", "commits_behind"); err != nil {
|
||||
// add first push codes comment
|
||||
baseGitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer baseGitRepo.Close()
|
||||
|
||||
// Create comment
|
||||
options := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypeChangeTargetBranch,
|
||||
Doer: doer,
|
||||
Repo: pr.Issue.Repo,
|
||||
Issue: pr.Issue,
|
||||
OldRef: oldBranch,
|
||||
NewRef: targetBranch,
|
||||
}
|
||||
if _, err = issues_model.CreateComment(ctx, options); err != nil {
|
||||
return fmt.Errorf("CreateChangeTargetBranchComment: %w", err)
|
||||
}
|
||||
return db.WithTx(ctx, func(ctx context.Context) error {
|
||||
if err := pr.UpdateColsIfNotMerged(ctx, "merge_base", "status", "conflicted_files", "changed_protected_files", "base_branch", "commits_ahead", "commits_behind"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
// Create comment
|
||||
options := &issues_model.CreateCommentOptions{
|
||||
Type: issues_model.CommentTypeChangeTargetBranch,
|
||||
Doer: doer,
|
||||
Repo: pr.Issue.Repo,
|
||||
Issue: pr.Issue,
|
||||
OldRef: oldBranch,
|
||||
NewRef: targetBranch,
|
||||
}
|
||||
if _, err = issues_model.CreateComment(ctx, options); err != nil {
|
||||
return fmt.Errorf("CreateChangeTargetBranchComment: %w", err)
|
||||
}
|
||||
|
||||
// Delete all old push comments and insert new push comments
|
||||
if _, err := db.GetEngine(ctx).Where("issue_id = ?", pr.IssueID).
|
||||
And("type = ?", issues_model.CommentTypePullRequestPush).
|
||||
NoAutoCondition().
|
||||
Delete(new(issues_model.Comment)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = CreatePushPullComment(ctx, doer, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName(), false)
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
func checkForInvalidation(ctx context.Context, requests issues_model.PullRequestList, repoID int64, doer *user_model.User, branch string) error {
|
||||
@@ -410,7 +402,7 @@ func AddTestPullRequestTask(opts TestPullRequestOptions) {
|
||||
}
|
||||
|
||||
StartPullRequestCheckImmediately(ctx, pr)
|
||||
comment, err := CreatePushPullComment(ctx, opts.Doer, pr, opts.OldCommitID, opts.NewCommitID)
|
||||
comment, err := CreatePushPullComment(ctx, opts.Doer, pr, opts.OldCommitID, opts.NewCommitID, opts.IsForcePush)
|
||||
if err == nil && comment != nil {
|
||||
notify_service.PullRequestPushCommits(ctx, opts.Doer, pr, comment)
|
||||
}
|
||||
|
||||
@@ -90,15 +90,8 @@ func GetTreeBySHA(ctx context.Context, repo *repo_model.Repository, gitRepo *git
|
||||
if rangeStart >= len(entries) {
|
||||
return tree, nil
|
||||
}
|
||||
var rangeEnd int
|
||||
if len(entries) > perPage {
|
||||
tree.Truncated = true
|
||||
}
|
||||
if rangeStart+perPage < len(entries) {
|
||||
rangeEnd = rangeStart + perPage
|
||||
} else {
|
||||
rangeEnd = len(entries)
|
||||
}
|
||||
rangeEnd := min(rangeStart+perPage, len(entries))
|
||||
tree.Truncated = rangeEnd < len(entries)
|
||||
tree.Entries = make([]api.GitEntry, rangeEnd-rangeStart)
|
||||
for e := rangeStart; e < rangeEnd; e++ {
|
||||
i := e - rangeStart
|
||||
@@ -178,7 +171,9 @@ func newTreeViewNodeFromEntry(ctx context.Context, repoLink string, renderedIcon
|
||||
} else if subModule != nil {
|
||||
submoduleFile := git.NewCommitSubmoduleFile(repoLink, node.FullPath, subModule.URL, entry.ID.String())
|
||||
webLink := submoduleFile.SubmoduleWebLinkTree(ctx)
|
||||
node.SubmoduleURL = webLink.CommitWebLink
|
||||
if webLink != nil {
|
||||
node.SubmoduleURL = webLink.CommitWebLink
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
git_model "code.gitea.io/gitea/models/git"
|
||||
@@ -39,29 +40,41 @@ type expansion struct {
|
||||
Transformers []transformer
|
||||
}
|
||||
|
||||
var defaultTransformers = []transformer{
|
||||
{Name: "SNAKE", Transform: xstrings.ToSnakeCase},
|
||||
{Name: "KEBAB", Transform: xstrings.ToKebabCase},
|
||||
{Name: "CAMEL", Transform: xstrings.ToCamelCase},
|
||||
{Name: "PASCAL", Transform: xstrings.ToPascalCase},
|
||||
{Name: "LOWER", Transform: strings.ToLower},
|
||||
{Name: "UPPER", Transform: strings.ToUpper},
|
||||
{Name: "TITLE", Transform: util.ToTitleCase},
|
||||
}
|
||||
var globalVars = sync.OnceValue(func() (ret struct {
|
||||
defaultTransformers []transformer
|
||||
fileNameSanitizeRegexp *regexp.Regexp
|
||||
},
|
||||
) {
|
||||
ret.defaultTransformers = []transformer{
|
||||
{Name: "SNAKE", Transform: xstrings.ToSnakeCase},
|
||||
{Name: "KEBAB", Transform: xstrings.ToKebabCase},
|
||||
{Name: "CAMEL", Transform: xstrings.ToCamelCase},
|
||||
{Name: "PASCAL", Transform: xstrings.ToPascalCase},
|
||||
{Name: "LOWER", Transform: strings.ToLower},
|
||||
{Name: "UPPER", Transform: strings.ToUpper},
|
||||
{Name: "TITLE", Transform: util.ToTitleCase},
|
||||
}
|
||||
|
||||
func generateExpansion(ctx context.Context, src string, templateRepo, generateRepo *repo_model.Repository, sanitizeFileName bool) string {
|
||||
// invalid filename contents, based on https://github.com/sindresorhus/filename-reserved-regex
|
||||
// "COM10" needs to be opened with UNC "\\.\COM10" on Windows, so itself is valid
|
||||
ret.fileNameSanitizeRegexp = regexp.MustCompile(`(?i)[<>:"/\\|?*\x{0000}-\x{001F}]|^(con|prn|aux|nul|com\d|lpt\d)$`)
|
||||
return ret
|
||||
})
|
||||
|
||||
func generateExpansion(ctx context.Context, src string, templateRepo, generateRepo *repo_model.Repository) string {
|
||||
transformers := globalVars().defaultTransformers
|
||||
year, month, day := time.Now().Date()
|
||||
expansions := []expansion{
|
||||
{Name: "YEAR", Value: strconv.Itoa(year), Transformers: nil},
|
||||
{Name: "MONTH", Value: fmt.Sprintf("%02d", int(month)), Transformers: nil},
|
||||
{Name: "MONTH_ENGLISH", Value: month.String(), Transformers: defaultTransformers},
|
||||
{Name: "MONTH_ENGLISH", Value: month.String(), Transformers: transformers},
|
||||
{Name: "DAY", Value: fmt.Sprintf("%02d", day), Transformers: nil},
|
||||
{Name: "REPO_NAME", Value: generateRepo.Name, Transformers: defaultTransformers},
|
||||
{Name: "TEMPLATE_NAME", Value: templateRepo.Name, Transformers: defaultTransformers},
|
||||
{Name: "REPO_NAME", Value: generateRepo.Name, Transformers: transformers},
|
||||
{Name: "TEMPLATE_NAME", Value: templateRepo.Name, Transformers: transformers},
|
||||
{Name: "REPO_DESCRIPTION", Value: generateRepo.Description, Transformers: nil},
|
||||
{Name: "TEMPLATE_DESCRIPTION", Value: templateRepo.Description, Transformers: nil},
|
||||
{Name: "REPO_OWNER", Value: generateRepo.OwnerName, Transformers: defaultTransformers},
|
||||
{Name: "TEMPLATE_OWNER", Value: templateRepo.OwnerName, Transformers: defaultTransformers},
|
||||
{Name: "REPO_OWNER", Value: generateRepo.OwnerName, Transformers: transformers},
|
||||
{Name: "TEMPLATE_OWNER", Value: templateRepo.OwnerName, Transformers: transformers},
|
||||
{Name: "REPO_LINK", Value: generateRepo.Link(), Transformers: nil},
|
||||
{Name: "TEMPLATE_LINK", Value: templateRepo.Link(), Transformers: nil},
|
||||
{Name: "REPO_HTTPS_URL", Value: generateRepo.CloneLinkGeneral(ctx).HTTPS, Transformers: nil},
|
||||
@@ -79,32 +92,23 @@ func generateExpansion(ctx context.Context, src string, templateRepo, generateRe
|
||||
}
|
||||
|
||||
return os.Expand(src, func(key string) string {
|
||||
if expansion, ok := expansionMap[key]; ok {
|
||||
if sanitizeFileName {
|
||||
return fileNameSanitize(expansion)
|
||||
}
|
||||
return expansion
|
||||
if val, ok := expansionMap[key]; ok {
|
||||
return val
|
||||
}
|
||||
return key
|
||||
})
|
||||
}
|
||||
|
||||
// GiteaTemplate holds information about a .gitea/template file
|
||||
type GiteaTemplate struct {
|
||||
Path string
|
||||
Content []byte
|
||||
|
||||
globs []glob.Glob
|
||||
// giteaTemplateFileMatcher holds information about a .gitea/template file
|
||||
type giteaTemplateFileMatcher struct {
|
||||
LocalFullPath string
|
||||
globs []glob.Glob
|
||||
}
|
||||
|
||||
// Globs parses the .gitea/template globs or returns them if they were already parsed
|
||||
func (gt *GiteaTemplate) Globs() []glob.Glob {
|
||||
if gt.globs != nil {
|
||||
return gt.globs
|
||||
}
|
||||
|
||||
func newGiteaTemplateFileMatcher(fullPath string, content []byte) *giteaTemplateFileMatcher {
|
||||
gt := &giteaTemplateFileMatcher{LocalFullPath: fullPath}
|
||||
gt.globs = make([]glob.Glob, 0)
|
||||
scanner := bufio.NewScanner(bytes.NewReader(gt.Content))
|
||||
scanner := bufio.NewScanner(bytes.NewReader(content))
|
||||
for scanner.Scan() {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
if line == "" || strings.HasPrefix(line, "#") {
|
||||
@@ -112,73 +116,91 @@ func (gt *GiteaTemplate) Globs() []glob.Glob {
|
||||
}
|
||||
g, err := glob.Compile(line, '/')
|
||||
if err != nil {
|
||||
log.Info("Invalid glob expression '%s' (skipped): %v", line, err)
|
||||
log.Debug("Invalid glob expression '%s' (skipped): %v", line, err)
|
||||
continue
|
||||
}
|
||||
gt.globs = append(gt.globs, g)
|
||||
}
|
||||
return gt.globs
|
||||
return gt
|
||||
}
|
||||
|
||||
func readGiteaTemplateFile(tmpDir string) (*GiteaTemplate, error) {
|
||||
gtPath := filepath.Join(tmpDir, ".gitea", "template")
|
||||
if _, err := os.Stat(gtPath); os.IsNotExist(err) {
|
||||
func (gt *giteaTemplateFileMatcher) HasRules() bool {
|
||||
return len(gt.globs) != 0
|
||||
}
|
||||
|
||||
func (gt *giteaTemplateFileMatcher) Match(s string) bool {
|
||||
for _, g := range gt.globs {
|
||||
if g.Match(s) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func readGiteaTemplateFile(tmpDir string) (*giteaTemplateFileMatcher, error) {
|
||||
localPath := filepath.Join(tmpDir, ".gitea", "template")
|
||||
if _, err := os.Stat(localPath); os.IsNotExist(err) {
|
||||
return nil, nil
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(gtPath)
|
||||
content, err := os.ReadFile(localPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &GiteaTemplate{Path: gtPath, Content: content}, nil
|
||||
return newGiteaTemplateFileMatcher(localPath, content), nil
|
||||
}
|
||||
|
||||
func processGiteaTemplateFile(ctx context.Context, tmpDir string, templateRepo, generateRepo *repo_model.Repository, giteaTemplateFile *GiteaTemplate) error {
|
||||
if err := util.Remove(giteaTemplateFile.Path); err != nil {
|
||||
return fmt.Errorf("remove .giteatemplate: %w", err)
|
||||
func substGiteaTemplateFile(ctx context.Context, tmpDir, tmpDirSubPath string, templateRepo, generateRepo *repo_model.Repository) error {
|
||||
tmpFullPath := filepath.Join(tmpDir, tmpDirSubPath)
|
||||
if ok, err := util.IsRegularFile(tmpFullPath); !ok {
|
||||
return err
|
||||
}
|
||||
if len(giteaTemplateFile.Globs()) == 0 {
|
||||
|
||||
content, err := os.ReadFile(tmpFullPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := util.Remove(tmpFullPath); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
generatedContent := generateExpansion(ctx, string(content), templateRepo, generateRepo)
|
||||
substSubPath := filepath.Clean(filePathSanitize(generateExpansion(ctx, tmpDirSubPath, templateRepo, generateRepo)))
|
||||
newLocalPath := filepath.Join(tmpDir, substSubPath)
|
||||
regular, err := util.IsRegularFile(newLocalPath)
|
||||
if canWrite := regular || os.IsNotExist(err); !canWrite {
|
||||
return nil
|
||||
}
|
||||
if err := os.MkdirAll(filepath.Dir(newLocalPath), 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
return os.WriteFile(newLocalPath, []byte(generatedContent), 0o644)
|
||||
}
|
||||
|
||||
func processGiteaTemplateFile(ctx context.Context, tmpDir string, templateRepo, generateRepo *repo_model.Repository, fileMatcher *giteaTemplateFileMatcher) error {
|
||||
if err := util.Remove(fileMatcher.LocalFullPath); err != nil {
|
||||
return fmt.Errorf("unable to remove .gitea/template: %w", err)
|
||||
}
|
||||
if !fileMatcher.HasRules() {
|
||||
return nil // Avoid walking tree if there are no globs
|
||||
}
|
||||
tmpDirSlash := strings.TrimSuffix(filepath.ToSlash(tmpDir), "/") + "/"
|
||||
return filepath.WalkDir(tmpDirSlash, func(path string, d os.DirEntry, walkErr error) error {
|
||||
|
||||
return filepath.WalkDir(tmpDir, func(fullPath string, d os.DirEntry, walkErr error) error {
|
||||
if walkErr != nil {
|
||||
return walkErr
|
||||
}
|
||||
|
||||
if d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
base := strings.TrimPrefix(filepath.ToSlash(path), tmpDirSlash)
|
||||
for _, g := range giteaTemplateFile.Globs() {
|
||||
if g.Match(base) {
|
||||
content, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
generatedContent := []byte(generateExpansion(ctx, string(content), templateRepo, generateRepo, false))
|
||||
if err := os.WriteFile(path, generatedContent, 0o644); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
substPath := filepath.FromSlash(filepath.Join(tmpDirSlash, generateExpansion(ctx, base, templateRepo, generateRepo, true)))
|
||||
|
||||
// Create parent subdirectories if needed or continue silently if it exists
|
||||
if err = os.MkdirAll(filepath.Dir(substPath), 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Substitute filename variables
|
||||
if err = os.Rename(path, substPath); err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
}
|
||||
tmpDirSubPath, err := filepath.Rel(tmpDir, fullPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if fileMatcher.Match(filepath.ToSlash(tmpDirSubPath)) {
|
||||
return substGiteaTemplateFile(ctx, tmpDir, tmpDirSubPath, templateRepo, generateRepo)
|
||||
}
|
||||
return nil
|
||||
}) // end: WalkDir
|
||||
@@ -218,13 +240,13 @@ func generateRepoCommit(ctx context.Context, repo, templateRepo, generateRepo *r
|
||||
}
|
||||
|
||||
// Variable expansion
|
||||
giteaTemplateFile, err := readGiteaTemplateFile(tmpDir)
|
||||
fileMatcher, err := readGiteaTemplateFile(tmpDir)
|
||||
if err != nil {
|
||||
return fmt.Errorf("readGiteaTemplateFile: %w", err)
|
||||
}
|
||||
|
||||
if giteaTemplateFile != nil {
|
||||
err = processGiteaTemplateFile(ctx, tmpDir, templateRepo, generateRepo, giteaTemplateFile)
|
||||
if fileMatcher != nil {
|
||||
err = processGiteaTemplateFile(ctx, tmpDir, templateRepo, generateRepo, fileMatcher)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -321,12 +343,17 @@ func (gro GenerateRepoOptions) IsValid() bool {
|
||||
gro.IssueLabels || gro.ProtectedBranch // or other items as they are added
|
||||
}
|
||||
|
||||
var fileNameSanitizeRegexp = regexp.MustCompile(`(?i)\.\.|[<>:\"/\\|?*\x{0000}-\x{001F}]|^(con|prn|aux|nul|com\d|lpt\d)$`)
|
||||
|
||||
// Sanitize user input to valid OS filenames
|
||||
//
|
||||
// Based on https://github.com/sindresorhus/filename-reserved-regex
|
||||
// Adds ".." to prevent directory traversal
|
||||
func fileNameSanitize(s string) string {
|
||||
return strings.TrimSpace(fileNameSanitizeRegexp.ReplaceAllString(s, "_"))
|
||||
func filePathSanitize(s string) string {
|
||||
fields := strings.Split(filepath.ToSlash(s), "/")
|
||||
for i, field := range fields {
|
||||
field = strings.TrimSpace(strings.TrimSpace(globalVars().fileNameSanitizeRegexp.ReplaceAllString(field, "_")))
|
||||
if strings.HasPrefix(field, "..") {
|
||||
field = "__" + field[2:]
|
||||
}
|
||||
if strings.EqualFold(field, ".git") {
|
||||
field = "_" + field[1:]
|
||||
}
|
||||
fields[i] = field
|
||||
}
|
||||
return filepath.FromSlash(strings.Join(fields, "/"))
|
||||
}
|
||||
|
||||
@@ -4,13 +4,18 @@
|
||||
package repository
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var giteaTemplate = []byte(`
|
||||
func TestGiteaTemplate(t *testing.T) {
|
||||
giteaTemplate := []byte(`
|
||||
# Header
|
||||
|
||||
# All .go files
|
||||
@@ -23,48 +28,153 @@ text/*.txt
|
||||
**/modules/*
|
||||
`)
|
||||
|
||||
func TestGiteaTemplate(t *testing.T) {
|
||||
gt := GiteaTemplate{Content: giteaTemplate}
|
||||
assert.Len(t, gt.Globs(), 3)
|
||||
gt := newGiteaTemplateFileMatcher("", giteaTemplate)
|
||||
assert.Len(t, gt.globs, 3)
|
||||
|
||||
tt := []struct {
|
||||
Path string
|
||||
Match bool
|
||||
}{
|
||||
{Path: "main.go", Match: true},
|
||||
{Path: "a/b/c/d/e.go", Match: true},
|
||||
{Path: "main.txt", Match: false},
|
||||
{Path: "a/b.txt", Match: false},
|
||||
{Path: "sub/sub/foo.go", Match: true},
|
||||
|
||||
{Path: "a.txt", Match: false},
|
||||
{Path: "text/a.txt", Match: true},
|
||||
{Path: "text/b.txt", Match: true},
|
||||
{Path: "text/c.json", Match: false},
|
||||
{Path: "sub/text/a.txt", Match: false},
|
||||
{Path: "text/a.json", Match: false},
|
||||
|
||||
{Path: "a/b/c/modules/README.md", Match: true},
|
||||
{Path: "a/b/c/modules/d/README.md", Match: false},
|
||||
}
|
||||
|
||||
for _, tc := range tt {
|
||||
t.Run(tc.Path, func(t *testing.T) {
|
||||
match := false
|
||||
for _, g := range gt.Globs() {
|
||||
if g.Match(tc.Path) {
|
||||
match = true
|
||||
break
|
||||
}
|
||||
}
|
||||
assert.Equal(t, tc.Match, match)
|
||||
})
|
||||
assert.Equal(t, tc.Match, gt.Match(tc.Path), "path: %s", tc.Path)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileNameSanitize(t *testing.T) {
|
||||
assert.Equal(t, "test_CON", fileNameSanitize("test_CON"))
|
||||
assert.Equal(t, "test CON", fileNameSanitize("test CON "))
|
||||
assert.Equal(t, "__traverse__", fileNameSanitize("../traverse/.."))
|
||||
assert.Equal(t, "http___localhost_3003_user_test.git", fileNameSanitize("http://localhost:3003/user/test.git"))
|
||||
assert.Equal(t, "_", fileNameSanitize("CON"))
|
||||
assert.Equal(t, "_", fileNameSanitize("con"))
|
||||
assert.Equal(t, "_", fileNameSanitize("\u0000"))
|
||||
assert.Equal(t, "目标", fileNameSanitize("目标"))
|
||||
func TestFilePathSanitize(t *testing.T) {
|
||||
assert.Equal(t, "test_CON", filePathSanitize("test_CON"))
|
||||
assert.Equal(t, "test CON", filePathSanitize("test CON "))
|
||||
assert.Equal(t, "__/traverse/__", filePathSanitize(".. /traverse/ .."))
|
||||
assert.Equal(t, "./__/a/_git/b_", filePathSanitize("./../a/.git/ b: "))
|
||||
assert.Equal(t, "_", filePathSanitize("CoN"))
|
||||
assert.Equal(t, "_", filePathSanitize("LpT1"))
|
||||
assert.Equal(t, "_", filePathSanitize("CoM1"))
|
||||
assert.Equal(t, "_", filePathSanitize("\u0000"))
|
||||
assert.Equal(t, "目标", filePathSanitize("目标"))
|
||||
// unlike filepath.Clean, it only sanitizes, doesn't change the separator layout
|
||||
assert.Equal(t, "", filePathSanitize("")) //nolint:testifylint // for easy reading
|
||||
assert.Equal(t, ".", filePathSanitize("."))
|
||||
assert.Equal(t, "/", filePathSanitize("/"))
|
||||
}
|
||||
|
||||
func TestProcessGiteaTemplateFile(t *testing.T) {
|
||||
tmpDir := filepath.Join(t.TempDir(), "gitea-template-test")
|
||||
|
||||
assertFileContent := func(path, expected string) {
|
||||
data, err := os.ReadFile(filepath.Join(tmpDir, path))
|
||||
if expected == "" {
|
||||
assert.ErrorIs(t, err, os.ErrNotExist)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, expected, string(data), "file content mismatch for %s", path)
|
||||
}
|
||||
|
||||
assertSymLink := func(path, expected string) {
|
||||
link, err := os.Readlink(filepath.Join(tmpDir, path))
|
||||
if expected == "" {
|
||||
assert.ErrorIs(t, err, os.ErrNotExist)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, expected, link, "symlink target mismatch for %s", path)
|
||||
}
|
||||
|
||||
require.NoError(t, os.MkdirAll(tmpDir+"/.gitea", 0o755))
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/.gitea/template", []byte("*\ninclude/**"), 0o644))
|
||||
require.NoError(t, os.MkdirAll(tmpDir+"/sub", 0o755))
|
||||
require.NoError(t, os.MkdirAll(tmpDir+"/include/foo/bar", 0o755))
|
||||
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/sub/link-target", []byte("link target content from ${TEMPLATE_NAME}"), 0o644))
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/include/foo/bar/test.txt", []byte("include subdir ${TEMPLATE_NAME}"), 0o644))
|
||||
|
||||
// case-1
|
||||
{
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/normal", []byte("normal content"), 0o644))
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/template", []byte("template from ${TEMPLATE_NAME}"), 0o644))
|
||||
}
|
||||
|
||||
// case-2
|
||||
{
|
||||
require.NoError(t, os.Symlink(tmpDir+"/sub/link-target", tmpDir+"/link"))
|
||||
}
|
||||
|
||||
// case-3
|
||||
{
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/subst-${REPO_NAME}", []byte("dummy subst repo name"), 0o644))
|
||||
}
|
||||
|
||||
// case-4
|
||||
assertSubstTemplateName := func(normalContent, toLinkContent, fromLinkContent string) {
|
||||
assertFileContent("subst-${TEMPLATE_NAME}-normal", normalContent)
|
||||
assertFileContent("subst-${TEMPLATE_NAME}-to-link", toLinkContent)
|
||||
assertFileContent("subst-${TEMPLATE_NAME}-from-link", fromLinkContent)
|
||||
}
|
||||
{
|
||||
// will succeed
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/subst-${TEMPLATE_NAME}-normal", []byte("dummy subst template name normal"), 0o644))
|
||||
// will skil if the path subst result is a link
|
||||
require.NoError(t, os.WriteFile(tmpDir+"/subst-${TEMPLATE_NAME}-to-link", []byte("dummy subst template name to link"), 0o644))
|
||||
require.NoError(t, os.Symlink(tmpDir+"/sub/link-target", tmpDir+"/subst-TemplateRepoName-to-link"))
|
||||
// will be skipped since the source is a symlink
|
||||
require.NoError(t, os.Symlink(tmpDir+"/sub/link-target", tmpDir+"/subst-${TEMPLATE_NAME}-from-link"))
|
||||
// pre-check
|
||||
assertSubstTemplateName("dummy subst template name normal", "dummy subst template name to link", "link target content from ${TEMPLATE_NAME}")
|
||||
}
|
||||
|
||||
// process the template files
|
||||
{
|
||||
templateRepo := &repo_model.Repository{Name: "TemplateRepoName"}
|
||||
generatedRepo := &repo_model.Repository{Name: "/../.gIt/name"}
|
||||
fileMatcher, _ := readGiteaTemplateFile(tmpDir)
|
||||
err := processGiteaTemplateFile(t.Context(), tmpDir, templateRepo, generatedRepo, fileMatcher)
|
||||
require.NoError(t, err)
|
||||
assertFileContent("include/foo/bar/test.txt", "include subdir TemplateRepoName")
|
||||
}
|
||||
|
||||
// the lin target should never be modified, and since it is in a subdirectory, it is not affected by the template either
|
||||
assertFileContent("sub/link-target", "link target content from ${TEMPLATE_NAME}")
|
||||
|
||||
// case-1
|
||||
{
|
||||
assertFileContent("no-such", "")
|
||||
assertFileContent("normal", "normal content")
|
||||
assertFileContent("template", "template from TemplateRepoName")
|
||||
}
|
||||
|
||||
// case-2
|
||||
{
|
||||
// symlink with templates should be preserved (not read or write)
|
||||
assertSymLink("link", tmpDir+"/sub/link-target")
|
||||
}
|
||||
|
||||
// case-3
|
||||
{
|
||||
assertFileContent("subst-${REPO_NAME}", "")
|
||||
assertFileContent("subst-/__/_gIt/name", "dummy subst repo name")
|
||||
}
|
||||
|
||||
// case-4
|
||||
{
|
||||
// the paths with templates should have been removed, subst to a regular file, succeed, the link is preserved
|
||||
assertSubstTemplateName("", "", "link target content from ${TEMPLATE_NAME}")
|
||||
assertFileContent("subst-TemplateRepoName-normal", "dummy subst template name normal")
|
||||
// subst to a link, skip, and the target is unchanged
|
||||
assertSymLink("subst-TemplateRepoName-to-link", tmpDir+"/sub/link-target")
|
||||
// subst from a link, skip, and the target is unchanged
|
||||
assertSymLink("subst-${TEMPLATE_NAME}-from-link", tmpDir+"/sub/link-target")
|
||||
}
|
||||
}
|
||||
|
||||
func TestTransformers(t *testing.T) {
|
||||
@@ -82,9 +192,9 @@ func TestTransformers(t *testing.T) {
|
||||
}
|
||||
|
||||
input := "Abc_Def-XYZ"
|
||||
assert.Len(t, defaultTransformers, len(cases))
|
||||
assert.Len(t, globalVars().defaultTransformers, len(cases))
|
||||
for i, c := range cases {
|
||||
tf := defaultTransformers[i]
|
||||
tf := globalVars().defaultTransformers[i]
|
||||
require.Equal(t, c.name, tf.Name)
|
||||
assert.Equal(t, c.expected, tf.Transform(input), "case %s", c.name)
|
||||
}
|
||||
|
||||
@@ -402,16 +402,11 @@ func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo
|
||||
}
|
||||
|
||||
rel, has := relMap[lowerTag]
|
||||
|
||||
parts := strings.SplitN(tag.Message, "\n", 2)
|
||||
note := ""
|
||||
if len(parts) > 1 {
|
||||
note = parts[1]
|
||||
}
|
||||
title, note := git.SplitCommitTitleBody(tag.Message, 255)
|
||||
if !has {
|
||||
rel = &repo_model.Release{
|
||||
RepoID: repo.ID,
|
||||
Title: parts[0],
|
||||
Title: title,
|
||||
TagName: tags[i],
|
||||
LowerTagName: lowerTag,
|
||||
Target: "",
|
||||
@@ -430,7 +425,7 @@ func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo
|
||||
rel.Sha1 = commit.ID.String()
|
||||
rel.CreatedUnix = timeutil.TimeStamp(createdAt.Unix())
|
||||
if rel.IsTag {
|
||||
rel.Title = parts[0]
|
||||
rel.Title = title
|
||||
rel.Note = note
|
||||
} else {
|
||||
rel.IsDraft = false
|
||||
|
||||
@@ -57,7 +57,7 @@ type (
|
||||
DiscordPayload struct {
|
||||
Wait bool `json:"wait"`
|
||||
Content string `json:"content"`
|
||||
Username string `json:"username"`
|
||||
Username string `json:"username,omitempty"`
|
||||
AvatarURL string `json:"avatar_url,omitempty"`
|
||||
TTS bool `json:"tts"`
|
||||
Embeds []DiscordEmbed `json:"embeds"`
|
||||
|
||||
@@ -445,6 +445,7 @@ func (m *webhookNotifier) DeleteComment(ctx context.Context, doer *user_model.Us
|
||||
log.Error("LoadPoster: %v", err)
|
||||
return
|
||||
}
|
||||
comment.Issue = nil // reload issue to ensure it has the latest data, especially the number of comments
|
||||
if err = comment.LoadIssue(ctx); err != nil {
|
||||
log.Error("LoadIssue: %v", err)
|
||||
return
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
{{template "repo/diff/whitespace_dropdown" .}}
|
||||
{{template "repo/diff/options_dropdown" .}}
|
||||
{{if .PageIsPullFiles}}
|
||||
<div id="diff-commit-select" data-issuelink="{{$.Issue.Link}}" data-queryparams="?style={{if $.IsSplitStyle}}split{{else}}unified{{end}}&whitespace={{$.WhitespaceBehavior}}&show-outdated={{$.ShowOutdatedComments}}" data-filter_changes_by_commit="{{ctx.Locale.Tr "repo.pulls.filter_changes_by_commit"}}">
|
||||
<div id="diff-commit-select" data-merge-base="{{.MergeBase}}" data-issuelink="{{$.Issue.Link}}" data-queryparams="?style={{if $.IsSplitStyle}}split{{else}}unified{{end}}&whitespace={{$.WhitespaceBehavior}}&show-outdated={{$.ShowOutdatedComments}}" data-filter_changes_by_commit="{{ctx.Locale.Tr "repo.pulls.filter_changes_by_commit"}}">
|
||||
{{/* the following will be replaced by vue component, but this avoids any loading artifacts till the vue component is initialized */}}
|
||||
<div class="ui jump dropdown tiny basic button custom">
|
||||
{{svg "octicon-git-commit"}}
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
</span>
|
||||
{{end}}
|
||||
</div>
|
||||
<div>
|
||||
<div class="tw-flex tw-items-center">
|
||||
{{if or $invalid $resolved}}
|
||||
<button id="show-outdated-{{$comment.ID}}" data-comment="{{$comment.ID}}" class="{{if not $resolved}}tw-hidden{{end}} btn tiny show-outdated">
|
||||
{{svg "octicon-unfold" 16 "tw-mr-2"}}
|
||||
|
||||
@@ -119,7 +119,7 @@
|
||||
{{range $idx, $code := .FileContent}}
|
||||
{{$line := Eval $idx "+" 1}}
|
||||
<tr>
|
||||
<td id="L{{$line}}" class="lines-num"><span id="L{{$line}}" data-line-number="{{$line}}"></span></td>
|
||||
<td class="lines-num"><span id="L{{$line}}" data-line-number="{{$line}}"></span></td>
|
||||
{{if $.EscapeStatus.Escaped}}
|
||||
<td class="lines-escape">{{if (index $.LineEscapeStatus $idx).Escaped}}<button class="toggle-escape-button btn interact-bg" title="{{if (index $.LineEscapeStatus $idx).HasInvisible}}{{ctx.Locale.Tr "repo.invisible_runes_line"}} {{end}}{{if (index $.LineEscapeStatus $idx).HasAmbiguous}}{{ctx.Locale.Tr "repo.ambiguous_runes_line"}}{{end}}"></button>{{end}}</td>
|
||||
{{end}}
|
||||
|
||||
@@ -355,6 +355,7 @@ func TestPackageSwift(t *testing.T) {
|
||||
assert.Equal(t, packageVersion, result.Metadata.Version)
|
||||
assert.Equal(t, packageDescription, result.Metadata.Description)
|
||||
assert.Equal(t, "Swift", result.Metadata.ProgrammingLanguage.Name)
|
||||
assert.Equal(t, packageAuthor, result.Metadata.Author.Name)
|
||||
assert.Equal(t, packageAuthor, result.Metadata.Author.GivenName)
|
||||
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("%s/%s/%s/%s.json", url, packageScope, packageName, packageVersion)).
|
||||
|
||||
@@ -11,7 +11,11 @@ import (
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/tests"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestAPIReposGitTrees(t *testing.T) {
|
||||
@@ -32,13 +36,21 @@ func TestAPIReposGitTrees(t *testing.T) {
|
||||
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeReadRepository)
|
||||
|
||||
// Test a public repo that anyone can GET the tree of
|
||||
for _, ref := range [...]string{
|
||||
"master", // Branch
|
||||
repo1TreeSHA, // Tree SHA
|
||||
} {
|
||||
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/git/trees/%s", user2.Name, repo1.Name, ref)
|
||||
MakeRequest(t, req, http.StatusOK)
|
||||
}
|
||||
_ = MakeRequest(t, NewRequest(t, "GET", "/api/v1/repos/user2/repo1/git/trees/master"), http.StatusOK)
|
||||
|
||||
resp := MakeRequest(t, NewRequest(t, "GET", "/api/v1/repos/user2/repo1/git/trees/62fb502a7172d4453f0322a2cc85bddffa57f07a?per_page=1"), http.StatusOK)
|
||||
var respGitTree api.GitTreeResponse
|
||||
DecodeJSON(t, resp, &respGitTree)
|
||||
assert.True(t, respGitTree.Truncated)
|
||||
require.Len(t, respGitTree.Entries, 1)
|
||||
assert.Equal(t, "File-WoW", respGitTree.Entries[0].Path)
|
||||
|
||||
resp = MakeRequest(t, NewRequest(t, "GET", "/api/v1/repos/user2/repo1/git/trees/62fb502a7172d4453f0322a2cc85bddffa57f07a?page=2&per_page=1"), http.StatusOK)
|
||||
respGitTree = api.GitTreeResponse{}
|
||||
DecodeJSON(t, resp, &respGitTree)
|
||||
assert.False(t, respGitTree.Truncated)
|
||||
require.Len(t, respGitTree.Entries, 1)
|
||||
assert.Equal(t, "README.md", respGitTree.Entries[0].Path)
|
||||
|
||||
// Tests a private repo with no token so will fail
|
||||
for _, ref := range [...]string{
|
||||
|
||||
@@ -30,7 +30,7 @@ func Test_CmdKeys(t *testing.T) {
|
||||
"with_key",
|
||||
[]string{"keys", "-e", "git", "-u", "git", "-t", "ssh-rsa", "-k", "AAAAB3NzaC1yc2EAAAADAQABAAABgQDWVj0fQ5N8wNc0LVNA41wDLYJ89ZIbejrPfg/avyj3u/ZohAKsQclxG4Ju0VirduBFF9EOiuxoiFBRr3xRpqzpsZtnMPkWVWb+akZwBFAx8p+jKdy4QXR/SZqbVobrGwip2UjSrri1CtBxpJikojRIZfCnDaMOyd9Jp6KkujvniFzUWdLmCPxUE9zhTaPu0JsEP7MW0m6yx7ZUhHyfss+NtqmFTaDO+QlMR7L2QkDliN2Jl3Xa3PhuWnKJfWhdAq1Cw4oraKUOmIgXLkuiuxVQ6mD3AiFupkmfqdHq6h+uHHmyQqv3gU+/sD8GbGAhf6ftqhTsXjnv1Aj4R8NoDf9BS6KRkzkeun5UisSzgtfQzjOMEiJtmrep2ZQrMGahrXa+q4VKr0aKJfm+KlLfwm/JztfsBcqQWNcTURiCFqz+fgZw0Ey/de0eyMzldYTdXXNRYCKjs9bvBK+6SSXRM7AhftfQ0ZuoW5+gtinPrnmoOaSCEJbAiEiTO/BzOHgowiM="},
|
||||
false,
|
||||
"# gitea public key\ncommand=\"" + setting.AppPath + " --config=" + util.ShellEscape(setting.CustomConf) + " serv key-1\",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,no-user-rc,restrict ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDWVj0fQ5N8wNc0LVNA41wDLYJ89ZIbejrPfg/avyj3u/ZohAKsQclxG4Ju0VirduBFF9EOiuxoiFBRr3xRpqzpsZtnMPkWVWb+akZwBFAx8p+jKdy4QXR/SZqbVobrGwip2UjSrri1CtBxpJikojRIZfCnDaMOyd9Jp6KkujvniFzUWdLmCPxUE9zhTaPu0JsEP7MW0m6yx7ZUhHyfss+NtqmFTaDO+QlMR7L2QkDliN2Jl3Xa3PhuWnKJfWhdAq1Cw4oraKUOmIgXLkuiuxVQ6mD3AiFupkmfqdHq6h+uHHmyQqv3gU+/sD8GbGAhf6ftqhTsXjnv1Aj4R8NoDf9BS6KRkzkeun5UisSzgtfQzjOMEiJtmrep2ZQrMGahrXa+q4VKr0aKJfm+KlLfwm/JztfsBcqQWNcTURiCFqz+fgZw0Ey/de0eyMzldYTdXXNRYCKjs9bvBK+6SSXRM7AhftfQ0ZuoW5+gtinPrnmoOaSCEJbAiEiTO/BzOHgowiM= user2@localhost\n",
|
||||
"# gitea public key\ncommand=\"" + setting.AppPath + " --config=" + util.ShellEscape(setting.CustomConf) + " serv key-1\",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,no-user-rc,restrict ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDWVj0fQ5N8wNc0LVNA41wDLYJ89ZIbejrPfg/avyj3u/ZohAKsQclxG4Ju0VirduBFF9EOiuxoiFBRr3xRpqzpsZtnMPkWVWb+akZwBFAx8p+jKdy4QXR/SZqbVobrGwip2UjSrri1CtBxpJikojRIZfCnDaMOyd9Jp6KkujvniFzUWdLmCPxUE9zhTaPu0JsEP7MW0m6yx7ZUhHyfss+NtqmFTaDO+QlMR7L2QkDliN2Jl3Xa3PhuWnKJfWhdAq1Cw4oraKUOmIgXLkuiuxVQ6mD3AiFupkmfqdHq6h+uHHmyQqv3gU+/sD8GbGAhf6ftqhTsXjnv1Aj4R8NoDf9BS6KRkzkeun5UisSzgtfQzjOMEiJtmrep2ZQrMGahrXa+q4VKr0aKJfm+KlLfwm/JztfsBcqQWNcTURiCFqz+fgZw0Ey/de0eyMzldYTdXXNRYCKjs9bvBK+6SSXRM7AhftfQ0ZuoW5+gtinPrnmoOaSCEJbAiEiTO/BzOHgowiM= user-2\n",
|
||||
},
|
||||
{"invalid", []string{"keys", "--not-a-flag=git"}, true, "Incorrect Usage: flag provided but not defined: -not-a-flag\n\n"},
|
||||
}
|
||||
|
||||
@@ -75,6 +75,11 @@ func TestEmptyRepoAddFile(t *testing.T) {
|
||||
req = NewRequest(t, "GET", "/api/v1/repos/user30/empty/raw/main/README.md").AddTokenAuth(token)
|
||||
session.MakeRequest(t, req, http.StatusNotFound)
|
||||
|
||||
// test feed
|
||||
req = NewRequest(t, "GET", "/user30/empty/rss/branch/main/README.md").AddTokenAuth(token).SetHeader("Accept", "application/rss+xml")
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
assert.Contains(t, resp.Body.String(), "</rss>")
|
||||
|
||||
// create a new file
|
||||
req = NewRequest(t, "GET", "/user30/empty/_new/"+setting.Repository.DefaultBranch)
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
@@ -17,6 +17,7 @@ import (
|
||||
"code.gitea.io/gitea/models/db"
|
||||
issues_model "code.gitea.io/gitea/models/issues"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unit"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/indexer/issues"
|
||||
@@ -471,19 +472,38 @@ func TestIssueRedirect(t *testing.T) {
|
||||
session := loginUser(t, "user2")
|
||||
|
||||
// Test external tracker where style not set (shall default numeric)
|
||||
req := NewRequest(t, "GET", path.Join("org26", "repo_external_tracker", "issues", "1"))
|
||||
req := NewRequest(t, "GET", "/org26/repo_external_tracker/issues/1")
|
||||
resp := session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "https://tracker.com/org26/repo_external_tracker/issues/1", test.RedirectURL(resp))
|
||||
|
||||
// Test external tracker with numeric style
|
||||
req = NewRequest(t, "GET", path.Join("org26", "repo_external_tracker_numeric", "issues", "1"))
|
||||
req = NewRequest(t, "GET", "/org26/repo_external_tracker_numeric/issues/1")
|
||||
resp = session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "https://tracker.com/org26/repo_external_tracker_numeric/issues/1", test.RedirectURL(resp))
|
||||
|
||||
// Test external tracker with alphanumeric style (for a pull request)
|
||||
req = NewRequest(t, "GET", path.Join("org26", "repo_external_tracker_alpha", "issues", "1"))
|
||||
req = NewRequest(t, "GET", "/org26/repo_external_tracker_alpha/issues/1")
|
||||
resp = session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "/"+path.Join("org26", "repo_external_tracker_alpha", "pulls", "1"), test.RedirectURL(resp))
|
||||
assert.Equal(t, "/org26/repo_external_tracker_alpha/pulls/1", test.RedirectURL(resp))
|
||||
|
||||
// test to check that the PR redirection works if the issue unit is disabled
|
||||
// repo1 is a normal repository with issue unit enabled, visit issue 2(which is a pull request)
|
||||
// will redirect to pulls
|
||||
req = NewRequest(t, "GET", "/user2/repo1/issues/2")
|
||||
resp = session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "/user2/repo1/pulls/2", test.RedirectURL(resp))
|
||||
|
||||
repoUnit := unittest.AssertExistsAndLoadBean(t, &repo_model.RepoUnit{RepoID: 1, Type: unit.TypeIssues})
|
||||
|
||||
// disable issue unit, it will be reset
|
||||
_, err := db.DeleteByID[repo_model.RepoUnit](t.Context(), repoUnit.ID)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// even if the issue unit is disabled, visiting an issue which is a pull request
|
||||
// will still redirect to pull request
|
||||
req = NewRequest(t, "GET", "/user2/repo1/issues/2")
|
||||
resp = session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "/user2/repo1/pulls/2", test.RedirectURL(resp))
|
||||
}
|
||||
|
||||
func TestSearchIssues(t *testing.T) {
|
||||
|
||||
@@ -105,7 +105,15 @@ func TestPullCompare_EnableAllowEditsFromMaintainer(t *testing.T) {
|
||||
|
||||
// user4 creates a new branch and a PR
|
||||
testEditFileToNewBranch(t, user4Session, "user4", forkedRepoName, "master", "user4/update-readme", "README.md", "Hello, World\n(Edited by user4)\n")
|
||||
resp := testPullCreateDirectly(t, user4Session, repo3.OwnerName, repo3.Name, "master", "user4", forkedRepoName, "user4/update-readme", "PR for user4 forked repo3")
|
||||
resp := testPullCreateDirectly(t, user4Session, createPullRequestOptions{
|
||||
BaseRepoOwner: repo3.OwnerName,
|
||||
BaseRepoName: repo3.Name,
|
||||
BaseBranch: "master",
|
||||
HeadRepoOwner: "user4",
|
||||
HeadRepoName: forkedRepoName,
|
||||
HeadBranch: "user4/update-readme",
|
||||
Title: "PR for user4 forked repo3",
|
||||
})
|
||||
prURL := test.RedirectURL(resp)
|
||||
|
||||
// user2 (admin of repo3) goes to the PR files page
|
||||
|
||||
@@ -60,26 +60,50 @@ func testPullCreate(t *testing.T, session *TestSession, user, repo string, toSel
|
||||
return resp
|
||||
}
|
||||
|
||||
func testPullCreateDirectly(t *testing.T, session *TestSession, baseRepoOwner, baseRepoName, baseBranch, headRepoOwner, headRepoName, headBranch, title string) *httptest.ResponseRecorder {
|
||||
headCompare := headBranch
|
||||
if headRepoOwner != "" {
|
||||
if headRepoName != "" {
|
||||
headCompare = fmt.Sprintf("%s/%s:%s", headRepoOwner, headRepoName, headBranch)
|
||||
type createPullRequestOptions struct {
|
||||
BaseRepoOwner string
|
||||
BaseRepoName string
|
||||
BaseBranch string
|
||||
HeadRepoOwner string
|
||||
HeadRepoName string
|
||||
HeadBranch string
|
||||
Title string
|
||||
ReviewerIDs string // comma-separated list of user IDs
|
||||
}
|
||||
|
||||
func (opts createPullRequestOptions) IsValid() bool {
|
||||
return opts.BaseRepoOwner != "" && opts.BaseRepoName != "" && opts.BaseBranch != "" &&
|
||||
opts.HeadBranch != "" && opts.Title != ""
|
||||
}
|
||||
|
||||
func testPullCreateDirectly(t *testing.T, session *TestSession, opts createPullRequestOptions) *httptest.ResponseRecorder {
|
||||
if !opts.IsValid() {
|
||||
t.Fatal("Invalid pull request options")
|
||||
}
|
||||
|
||||
headCompare := opts.HeadBranch
|
||||
if opts.HeadRepoOwner != "" {
|
||||
if opts.HeadRepoName != "" {
|
||||
headCompare = fmt.Sprintf("%s/%s:%s", opts.HeadRepoOwner, opts.HeadRepoName, opts.HeadBranch)
|
||||
} else {
|
||||
headCompare = fmt.Sprintf("%s:%s", headRepoOwner, headBranch)
|
||||
headCompare = fmt.Sprintf("%s:%s", opts.HeadRepoOwner, opts.HeadBranch)
|
||||
}
|
||||
}
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/compare/%s...%s", baseRepoOwner, baseRepoName, baseBranch, headCompare))
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/compare/%s...%s", opts.BaseRepoOwner, opts.BaseRepoName, opts.BaseBranch, headCompare))
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
// Submit the form for creating the pull
|
||||
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||
link, exists := htmlDoc.doc.Find("form.ui.form").Attr("action")
|
||||
assert.True(t, exists, "The template has changed")
|
||||
req = NewRequestWithValues(t, "POST", link, map[string]string{
|
||||
params := map[string]string{
|
||||
"_csrf": htmlDoc.GetCSRF(),
|
||||
"title": title,
|
||||
})
|
||||
"title": opts.Title,
|
||||
}
|
||||
if opts.ReviewerIDs != "" {
|
||||
params["reviewer_ids"] = opts.ReviewerIDs
|
||||
}
|
||||
req = NewRequestWithValues(t, "POST", link, params)
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
return resp
|
||||
}
|
||||
@@ -246,7 +270,15 @@ func TestPullCreatePrFromBaseToFork(t *testing.T) {
|
||||
testEditFile(t, sessionBase, "user2", "repo1", "master", "README.md", "Hello, World (Edited)\n")
|
||||
|
||||
// Create a PR
|
||||
resp := testPullCreateDirectly(t, sessionFork, "user1", "repo1", "master", "user2", "repo1", "master", "This is a pull title")
|
||||
resp := testPullCreateDirectly(t, sessionFork, createPullRequestOptions{
|
||||
BaseRepoOwner: "user1",
|
||||
BaseRepoName: "repo1",
|
||||
BaseBranch: "master",
|
||||
HeadRepoOwner: "user2",
|
||||
HeadRepoName: "repo1",
|
||||
HeadBranch: "master",
|
||||
Title: "This is a pull title",
|
||||
})
|
||||
// check the redirected URL
|
||||
url := test.RedirectURL(resp)
|
||||
assert.Regexp(t, "^/user1/repo1/pulls/[0-9]*$", url)
|
||||
|
||||
@@ -25,10 +25,6 @@ func TestPullDiff_CommitRangePRDiff(t *testing.T) {
|
||||
doTestPRDiff(t, "/user2/commitsonpr/pulls/1/files/4ca8bcaf27e28504df7bf996819665986b01c847..23576dd018294e476c06e569b6b0f170d0558705", true, []string{"test2.txt", "test3.txt", "test4.txt"})
|
||||
}
|
||||
|
||||
func TestPullDiff_StartingFromBaseToCommitPRDiff(t *testing.T) {
|
||||
doTestPRDiff(t, "/user2/commitsonpr/pulls/1/files/c5626fc9eff57eb1bb7b796b01d4d0f2f3f792a2", true, []string{"test1.txt", "test2.txt", "test3.txt"})
|
||||
}
|
||||
|
||||
func doTestPRDiff(t *testing.T, prDiffURL string, reviewBtnDisabled bool, expectedFilenames []string) {
|
||||
defer tests.PrepareTestEnv(t)()
|
||||
|
||||
|
||||
@@ -184,13 +184,29 @@ func TestPullView_CodeOwner(t *testing.T) {
|
||||
session := loginUser(t, "user5")
|
||||
|
||||
// create a pull request on the forked repository, code reviewers should not be mentioned
|
||||
testPullCreateDirectly(t, session, "user5", "test_codeowner", forkedRepo.DefaultBranch, "", "", "codeowner-basebranch-forked", "Test Pull Request on Forked Repository")
|
||||
testPullCreateDirectly(t, session, createPullRequestOptions{
|
||||
BaseRepoOwner: "user5",
|
||||
BaseRepoName: "test_codeowner",
|
||||
BaseBranch: forkedRepo.DefaultBranch,
|
||||
HeadRepoOwner: "",
|
||||
HeadRepoName: "",
|
||||
HeadBranch: "codeowner-basebranch-forked",
|
||||
Title: "Test Pull Request on Forked Repository",
|
||||
})
|
||||
|
||||
pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{BaseRepoID: forkedRepo.ID, HeadBranch: "codeowner-basebranch-forked"})
|
||||
unittest.AssertNotExistsBean(t, &issues_model.Review{IssueID: pr.IssueID, Type: issues_model.ReviewTypeRequest, ReviewerID: 8})
|
||||
|
||||
// create a pull request to base repository, code reviewers should be mentioned
|
||||
testPullCreateDirectly(t, session, repo.OwnerName, repo.Name, repo.DefaultBranch, forkedRepo.OwnerName, forkedRepo.Name, "codeowner-basebranch-forked", "Test Pull Request3")
|
||||
testPullCreateDirectly(t, session, createPullRequestOptions{
|
||||
BaseRepoOwner: repo.OwnerName,
|
||||
BaseRepoName: repo.Name,
|
||||
BaseBranch: repo.DefaultBranch,
|
||||
HeadRepoOwner: forkedRepo.OwnerName,
|
||||
HeadRepoName: forkedRepo.Name,
|
||||
HeadBranch: "codeowner-basebranch-forked",
|
||||
Title: "Test Pull Request3",
|
||||
})
|
||||
|
||||
pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{BaseRepoID: repo.ID, HeadRepoID: forkedRepo.ID, HeadBranch: "codeowner-basebranch-forked"})
|
||||
unittest.AssertExistsAndLoadBean(t, &issues_model.Review{IssueID: pr.IssueID, Type: issues_model.ReviewTypeRequest, ReviewerID: 8})
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
"time"
|
||||
|
||||
auth_model "code.gitea.io/gitea/models/auth"
|
||||
"code.gitea.io/gitea/models/perm"
|
||||
"code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
@@ -529,15 +530,30 @@ func Test_WebhookPullRequest(t *testing.T) {
|
||||
}, http.StatusOK)
|
||||
defer provider.Close()
|
||||
|
||||
testCtx := NewAPITestContext(t, "user2", "repo1", auth_model.AccessTokenScopeAll)
|
||||
// add user4 as collabrator so that it can be a reviewer
|
||||
doAPIAddCollaborator(testCtx, "user4", perm.AccessModeWrite)(t)
|
||||
|
||||
// 1. create a new webhook with special webhook for repo1
|
||||
session := loginUser(t, "user2")
|
||||
sessionUser2 := loginUser(t, "user2")
|
||||
sessionUser4 := loginUser(t, "user4")
|
||||
|
||||
testAPICreateWebhookForRepo(t, session, "user2", "repo1", provider.URL(), "pull_request")
|
||||
// ignore the possible review_requested event to keep the test deterministic
|
||||
testAPICreateWebhookForRepo(t, sessionUser2, "user2", "repo1", provider.URL(), "pull_request_only")
|
||||
|
||||
testAPICreateBranch(t, session, "user2", "repo1", "master", "master2", http.StatusCreated)
|
||||
testAPICreateBranch(t, sessionUser2, "user2", "repo1", "master", "master2", http.StatusCreated)
|
||||
// 2. trigger the webhook
|
||||
repo1 := unittest.AssertExistsAndLoadBean(t, &repo.Repository{ID: 1})
|
||||
testCreatePullToDefaultBranch(t, session, repo1, repo1, "master2", "first pull request")
|
||||
testPullCreateDirectly(t, sessionUser4, createPullRequestOptions{
|
||||
BaseRepoOwner: repo1.OwnerName,
|
||||
BaseRepoName: repo1.Name,
|
||||
BaseBranch: repo1.DefaultBranch,
|
||||
HeadRepoOwner: "",
|
||||
HeadRepoName: "",
|
||||
HeadBranch: "master2",
|
||||
Title: "first pull request",
|
||||
ReviewerIDs: "2", // add user2 as reviewer
|
||||
})
|
||||
|
||||
// 3. validate the webhook is triggered
|
||||
assert.Equal(t, "pull_request", triggeredEvent)
|
||||
@@ -549,6 +565,8 @@ func Test_WebhookPullRequest(t *testing.T) {
|
||||
assert.Equal(t, 0, *payloads[0].PullRequest.Additions)
|
||||
assert.Equal(t, 0, *payloads[0].PullRequest.ChangedFiles)
|
||||
assert.Equal(t, 0, *payloads[0].PullRequest.Deletions)
|
||||
assert.Len(t, payloads[0].PullRequest.RequestedReviewers, 1)
|
||||
assert.Equal(t, int64(2), payloads[0].PullRequest.RequestedReviewers[0].ID)
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -130,7 +130,7 @@ td .commit-summary {
|
||||
align-items: center;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
gap: 0.25em;
|
||||
gap: 0.5em;
|
||||
}
|
||||
|
||||
@media (max-width: 767.98px) {
|
||||
|
||||
@@ -32,6 +32,7 @@ export default defineComponent({
|
||||
locale: {
|
||||
filter_changes_by_commit: el.getAttribute('data-filter_changes_by_commit'),
|
||||
} as Record<string, string>,
|
||||
mergeBase: el.getAttribute('data-merge-base'),
|
||||
commits: [] as Array<Commit>,
|
||||
hoverActivated: false,
|
||||
lastReviewCommitSha: '',
|
||||
@@ -176,32 +177,38 @@ export default defineComponent({
|
||||
}
|
||||
},
|
||||
/**
|
||||
* When a commit is clicked with shift this enables the range
|
||||
* selection. Second click (with shift) defines the end of the
|
||||
* range. This opens the diff of this range
|
||||
* Exception: first commit is the first commit of this PR. Then
|
||||
* the diff from beginning of PR up to the second clicked commit is
|
||||
* opened
|
||||
* When a commit is clicked while holding Shift, it enables range selection.
|
||||
* - The range selection is a half-open, half-closed range, meaning it excludes the start commit but includes the end commit.
|
||||
* - The start of the commit range is always the previous commit of the first clicked commit.
|
||||
* - If the first commit in the list is clicked, the mergeBase will be used as the start of the range instead.
|
||||
* - The second Shift-click defines the end of the range.
|
||||
* - Once both are selected, the diff view for the selected commit range will open.
|
||||
*/
|
||||
commitClickedShift(commit: Commit) {
|
||||
this.hoverActivated = !this.hoverActivated;
|
||||
commit.selected = true;
|
||||
// Second click -> determine our range and open links accordingly
|
||||
if (!this.hoverActivated) {
|
||||
// since at least one commit is selected, we can determine the range
|
||||
// find all selected commits and generate a link
|
||||
if (this.commits[0].selected) {
|
||||
// first commit is selected - generate a short url with only target sha
|
||||
const lastCommitIdx = this.commits.findLastIndex((x) => x.selected);
|
||||
if (lastCommitIdx === this.commits.length - 1) {
|
||||
// user selected all commits - just show the normal diff page
|
||||
window.location.assign(`${this.issueLink}/files${this.queryParams}`);
|
||||
} else {
|
||||
window.location.assign(`${this.issueLink}/files/${this.commits[lastCommitIdx].id}${this.queryParams}`);
|
||||
}
|
||||
const firstSelected = this.commits.findIndex((x) => x.selected);
|
||||
const lastSelected = this.commits.findLastIndex((x) => x.selected);
|
||||
let beforeCommitID: string;
|
||||
if (firstSelected === 0) {
|
||||
beforeCommitID = this.mergeBase;
|
||||
} else {
|
||||
const start = this.commits[this.commits.findIndex((x) => x.selected) - 1].id;
|
||||
const end = this.commits.findLast((x) => x.selected).id;
|
||||
window.location.assign(`${this.issueLink}/files/${start}..${end}${this.queryParams}`);
|
||||
beforeCommitID = this.commits[firstSelected - 1].id;
|
||||
}
|
||||
const afterCommitID = this.commits[lastSelected].id;
|
||||
|
||||
if (firstSelected === lastSelected) {
|
||||
// if the start and end are the same, we show this single commit
|
||||
window.location.assign(`${this.issueLink}/commits/${afterCommitID}${this.queryParams}`);
|
||||
} else if (beforeCommitID === this.mergeBase && afterCommitID === this.commits.at(-1).id) {
|
||||
// if the first commit is selected and the last commit is selected, we show all commits
|
||||
window.location.assign(`${this.issueLink}/files${this.queryParams}`);
|
||||
} else {
|
||||
window.location.assign(`${this.issueLink}/files/${beforeCommitID}..${afterCommitID}${this.queryParams}`);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -547,7 +547,12 @@ function initIssueTemplateCommentEditors(commentForm: HTMLFormElement) {
|
||||
// deactivate all markdown editors
|
||||
showElem(commentForm.querySelectorAll('.combo-editor-dropzone .form-field-real'));
|
||||
hideElem(commentForm.querySelectorAll('.combo-editor-dropzone .combo-markdown-editor'));
|
||||
hideElem(commentForm.querySelectorAll('.combo-editor-dropzone .form-field-dropzone'));
|
||||
queryElems(commentForm, '.combo-editor-dropzone .form-field-dropzone', (dropzoneContainer) => {
|
||||
// if "form-field-dropzone" exists, then "dropzone" must also exist
|
||||
const dropzone = dropzoneContainer.querySelector<HTMLElement>('.dropzone').dropzone;
|
||||
const hasUploadedFiles = dropzone.files.length !== 0;
|
||||
toggleElem(dropzoneContainer, hasUploadedFiles);
|
||||
});
|
||||
|
||||
// activate this markdown editor
|
||||
hideElem(fieldTextarea);
|
||||
|
||||
@@ -34,8 +34,12 @@ export function initRepoMigration() {
|
||||
elCloneAddr.addEventListener('input', () => {
|
||||
if (repoNameChanged) return;
|
||||
let repoNameFromUrl = elCloneAddr.value.split(/[?#]/)[0];
|
||||
repoNameFromUrl = /^(.*\/)?((.+?)\/?)$/.exec(repoNameFromUrl)[3];
|
||||
repoNameFromUrl = repoNameFromUrl.split(/[?#]/)[0];
|
||||
const parts = /^(.*\/)?((.+?)\/?)$/.exec(repoNameFromUrl);
|
||||
if (!parts || parts.length < 4) {
|
||||
elRepoName.value = '';
|
||||
return;
|
||||
}
|
||||
repoNameFromUrl = parts[3].split(/[?#]/)[0];
|
||||
elRepoName.value = sanitizeRepoName(repoNameFromUrl);
|
||||
});
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user