Compare commits

...

18 Commits

Author SHA1 Message Date
Lunny Xiao
7758df4264 Add changelog for 1.23.6 (#33975) (#34000)
Co-authored-by: wxiaoguang <wxiaoguang@gmail.com>
Co-authored-by: techknowlogick <techknowlogick@gitea.io>
2025-03-24 13:08:00 -07:00
Giteabot
f994f3cac6 Fix incorrect code search indexer options (#33992) (#33999)
Backport #33992 by @wxiaoguang

Fix #33798

Co-authored-by: wxiaoguang <wxiaoguang@gmail.com>
2025-03-24 10:02:05 -07:00
wxiaoguang
f514b2651e Update golang crypto and net for 1.23 (#33989) 2025-03-24 01:18:29 +08:00
TheFox0x7
347101f2a8 update jwt and redis packages (#33984) (#33987) 2025-03-23 15:35:27 +00:00
Giteabot
b5f8c4a510 Drop timeout for requests made to the internal hook api (#33947) (#33970)
Backport #33947 by Mik4sa

Co-authored-by: Kai Leonhardt <8343141+Mik4sa@users.noreply.github.com>
Co-authored-by: wxiaoguang <wxiaoguang@gmail.com>
2025-03-22 08:46:54 +08:00
wxiaoguang
d6cee7c596 Fix oauth2 auth (#33961) (#33962)
Backport #33961 

UI fix is not needed.
2025-03-21 20:50:44 +08:00
wxiaoguang
987219ab3c Fix incorrect 1.23 translations (#33932)
Fix #33931
2025-03-18 11:13:14 -04:00
wxiaoguang
92280637a4 Try to figure out attribute checker problem (#33901) (#33902)
Backport #33901
2025-03-17 11:59:51 -07:00
Giteabot
5fadcf997e Fix maven panic when no package exists (#33888) (#33889)
Backport #33888 by @wxiaoguang

Fix #33886

Restore the old logic from #16510, which was incorrectly removed by
#33678

Co-authored-by: wxiaoguang <wxiaoguang@gmail.com>
2025-03-14 11:11:41 -07:00
Giteabot
be94f7bc07 Ignore trivial errors when updating push data (#33864) (#33887)
Backport #33864 by wxiaoguang

Fix #23213

Co-authored-by: wxiaoguang <wxiaoguang@gmail.com>
2025-03-14 15:37:00 +00:00
Giteabot
9054a6670c Fix markdown render (#33870) (#33875)
Backport #33870 by wxiaoguang

Co-authored-by: wxiaoguang <wxiaoguang@gmail.com>
2025-03-14 00:28:10 +00:00
ChristopherHX
fc82204fca Fix auto concurrency cancellation skips commit status updates (#33764) (#33849)
Backport #33764
* add missing commit status
* conflicts with concurrency support
2025-03-11 16:51:58 +00:00
wxiaoguang
6f8e62fa9c Fix some UI problems for 1.23 (#33856)
Partially backport #32927 #33851
2025-03-11 23:16:33 +08:00
Giteabot
a2c6ecc093 Fix LFS URL (#33840) (#33843)
Backport #33840 by wxiaoguang

Fix #33839

---------

Co-authored-by: wxiaoguang <wxiaoguang@gmail.com>
2025-03-10 11:26:31 +01:00
Giteabot
523a84e5d0 Removing unwanted ui container (#33833) (#33835)
Backport #33833 by Vinoth-kumar-Ganesan

when the passkey auth and register was disabled
the unwanted ui container was show

Co-authored-by: Vinoth Kumar <103478407+Vinoth-kumar-Ganesan@users.noreply.github.com>
Co-authored-by: Vinoth414 <103478407+Vinoth414@users.noreply.github.com>
2025-03-09 17:02:45 +00:00
wxiaoguang
869ee4fc38 Do not call "git diff" when listing PRs (#33817)
Fix  #31492
2025-03-08 07:41:51 +00:00
wxiaoguang
16a332464d Try to fix ACME (3rd) (#33807) (#33808)
Backport #33807
2025-03-08 15:16:54 +08:00
wxiaoguang
d03e7fd65e Support disable passkey auth (#33348) (#33819)
* Backport #33348
* Backport #33820

---------

Co-authored-by: yp05327 <576951401@qq.com>
2025-03-07 21:31:25 +02:00
54 changed files with 496 additions and 196 deletions

View File

@@ -4,6 +4,28 @@ This changelog goes through the changes that have been made in each release
without substantial changes to our git log; to see the highlights of what has without substantial changes to our git log; to see the highlights of what has
been added to each release, please refer to the [blog](https://blog.gitea.com). been added to each release, please refer to the [blog](https://blog.gitea.com).
## [1.23.6](https://github.com/go-gitea/gitea/releases/tag/v1.23.6) - 2025-03-24
* SECURITY
* Fix LFS URL (#33840) (#33843)
* Update jwt and redis packages (#33984) (#33987)
* Update golang crypto and net (#33989)
* BUGFIXES
* Drop timeout for requests made to the internal hook api (#33947) (#33970)
* Fix maven panic when no package exists (#33888) (#33889)
* Fix markdown render (#33870) (#33875)
* Fix auto concurrency cancellation skips commit status updates (#33764) (#33849)
* Fix oauth2 auth (#33961) (#33962)
* Fix incorrect 1.23 translations (#33932)
* Try to figure out attribute checker problem (#33901) (#33902)
* Ignore trivial errors when updating push data (#33864) (#33887)
* Fix some UI problems for 1.23 (#33856)
* Removing unwanted ui container (#33833) (#33835)
* Support disable passkey auth (#33348) (#33819)
* Do not call "git diff" when listing PRs (#33817)
* Try to fix ACME (3rd) (#33807) (#33808)
* Fix incorrect code search indexer options (#33992) #33999
## [1.23.5](https://github.com/go-gitea/gitea/releases/tag/v1.23.5) - 2025-03-03 ## [1.23.5](https://github.com/go-gitea/gitea/releases/tag/v1.23.5) - 2025-03-03
* SECURITY * SECURITY

View File

@@ -16,6 +16,7 @@ import (
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/process"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"github.com/caddyserver/certmagic" "github.com/caddyserver/certmagic"
) )
@@ -68,9 +69,15 @@ func runACME(listenAddr string, m http.Handler) error {
// And one more thing, no idea why we should set the global default variables here // And one more thing, no idea why we should set the global default variables here
// But it seems that the current ACME code needs these global variables to make renew work. // But it seems that the current ACME code needs these global variables to make renew work.
// Otherwise, "renew" will use incorrect storage path // Otherwise, "renew" will use incorrect storage path
oldDefaultACME := certmagic.DefaultACME
certmagic.Default.Storage = &certmagic.FileStorage{Path: setting.AcmeLiveDirectory} certmagic.Default.Storage = &certmagic.FileStorage{Path: setting.AcmeLiveDirectory}
certmagic.DefaultACME = certmagic.ACMEIssuer{ certmagic.DefaultACME = certmagic.ACMEIssuer{
CA: setting.AcmeURL, // try to use the default values provided by DefaultACME
CA: util.IfZero(setting.AcmeURL, oldDefaultACME.CA),
TestCA: oldDefaultACME.TestCA,
Logger: oldDefaultACME.Logger,
HTTPProxy: oldDefaultACME.HTTPProxy,
TrustedRoots: certPool, TrustedRoots: certPool,
Email: setting.AcmeEmail, Email: setting.AcmeEmail,
Agreed: setting.AcmeTOS, Agreed: setting.AcmeTOS,

View File

@@ -784,10 +784,13 @@ LEVEL = Info
;; Please note that setting this to false will not disable OAuth Basic or Basic authentication using a token ;; Please note that setting this to false will not disable OAuth Basic or Basic authentication using a token
;ENABLE_BASIC_AUTHENTICATION = true ;ENABLE_BASIC_AUTHENTICATION = true
;; ;;
;; Show the password sign-in form (for password-based login), otherwise, only show OAuth2 login methods. ;; Show the password sign-in form (for password-based login), otherwise, only show OAuth2 or passkey login methods if they are enabled.
;; If you set it to false, maybe it also needs to set ENABLE_BASIC_AUTHENTICATION to false to completely disable password-based authentication. ;; If you set it to false, maybe it also needs to set ENABLE_BASIC_AUTHENTICATION to false to completely disable password-based authentication.
;ENABLE_PASSWORD_SIGNIN_FORM = true ;ENABLE_PASSWORD_SIGNIN_FORM = true
;; ;;
;; Allow users to sign-in with a passkey
;ENABLE_PASSKEY_AUTHENTICATION = true
;;
;; More detail: https://github.com/gogits/gogs/issues/165 ;; More detail: https://github.com/gogits/gogs/issues/165
;ENABLE_REVERSE_PROXY_AUTHENTICATION = false ;ENABLE_REVERSE_PROXY_AUTHENTICATION = false
; Enable this to allow reverse proxy authentication for API requests, the reverse proxy is responsible for ensuring that no CSRF is possible. ; Enable this to allow reverse proxy authentication for API requests, the reverse proxy is responsible for ensuring that no CSRF is possible.

16
go.mod
View File

@@ -64,7 +64,7 @@ require (
github.com/gobwas/glob v0.2.3 github.com/gobwas/glob v0.2.3
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f
github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85 github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85
github.com/golang-jwt/jwt/v5 v5.2.1 github.com/golang-jwt/jwt/v5 v5.2.2
github.com/google/go-github/v61 v61.0.0 github.com/google/go-github/v61 v61.0.0
github.com/google/licenseclassifier/v2 v2.0.0 github.com/google/licenseclassifier/v2 v2.0.0
github.com/google/pprof v0.0.0-20241029153458-d1b30febd7db github.com/google/pprof v0.0.0-20241029153458-d1b30febd7db
@@ -100,7 +100,7 @@ require (
github.com/pquerna/otp v1.4.0 github.com/pquerna/otp v1.4.0
github.com/prometheus/client_golang v1.20.5 github.com/prometheus/client_golang v1.20.5
github.com/quasoft/websspi v1.1.2 github.com/quasoft/websspi v1.1.2
github.com/redis/go-redis/v9 v9.7.0 github.com/redis/go-redis/v9 v9.7.3
github.com/robfig/cron/v3 v3.0.1 github.com/robfig/cron/v3 v3.0.1
github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 github.com/santhosh-tekuri/jsonschema/v5 v5.3.1
github.com/sassoftware/go-rpmutils v0.4.0 github.com/sassoftware/go-rpmutils v0.4.0
@@ -118,13 +118,13 @@ require (
github.com/yuin/goldmark v1.7.8 github.com/yuin/goldmark v1.7.8
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc
github.com/yuin/goldmark-meta v1.1.0 github.com/yuin/goldmark-meta v1.1.0
golang.org/x/crypto v0.35.0 golang.org/x/crypto v0.36.0
golang.org/x/image v0.21.0 golang.org/x/image v0.21.0
golang.org/x/net v0.36.0 golang.org/x/net v0.37.0
golang.org/x/oauth2 v0.27.0 golang.org/x/oauth2 v0.27.0
golang.org/x/sync v0.11.0 golang.org/x/sync v0.12.0
golang.org/x/sys v0.30.0 golang.org/x/sys v0.31.0
golang.org/x/text v0.22.0 golang.org/x/text v0.23.0
golang.org/x/tools v0.29.0 golang.org/x/tools v0.29.0
google.golang.org/grpc v1.67.1 google.golang.org/grpc v1.67.1
google.golang.org/protobuf v1.35.1 google.golang.org/protobuf v1.35.1
@@ -215,7 +215,7 @@ require (
github.com/go-openapi/validate v0.24.0 // indirect github.com/go-openapi/validate v0.24.0 // indirect
github.com/go-webauthn/x v0.1.15 // indirect github.com/go-webauthn/x v0.1.15 // indirect
github.com/goccy/go-json v0.10.3 // indirect github.com/goccy/go-json v0.10.3 // indirect
github.com/golang-jwt/jwt/v4 v4.5.1 // indirect github.com/golang-jwt/jwt/v4 v4.5.2 // indirect
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
github.com/golang-sql/sqlexp v0.1.0 // indirect github.com/golang-sql/sqlexp v0.1.0 // indirect
github.com/golang/geo v0.0.0-20230421003525-6adc56603217 // indirect github.com/golang/geo v0.0.0-20230421003525-6adc56603217 // indirect

35
go.sum
View File

@@ -373,10 +373,11 @@ github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f h1:3BSP1Tbs2djlpprl7w
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f/go.mod h1:Pcatq5tYkCW2Q6yrR2VRHlbHpZ/R4/7qyL1TCF7vl14= github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f/go.mod h1:Pcatq5tYkCW2Q6yrR2VRHlbHpZ/R4/7qyL1TCF7vl14=
github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85 h1:UjoPNDAQ5JPCjlxoJd6K8ALZqSDDhk2ymieAZOVaDg0= github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85 h1:UjoPNDAQ5JPCjlxoJd6K8ALZqSDDhk2ymieAZOVaDg0=
github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85/go.mod h1:fR6z1Ie6rtF7kl/vBYMfgD5/G5B1blui7z426/sj2DU= github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85/go.mod h1:fR6z1Ie6rtF7kl/vBYMfgD5/G5B1blui7z426/sj2DU=
github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo=
github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI=
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A=
@@ -658,8 +659,8 @@ github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoG
github.com/quasoft/websspi v1.1.2 h1:/mA4w0LxWlE3novvsoEL6BBA1WnjJATbjkh1kFrTidw= github.com/quasoft/websspi v1.1.2 h1:/mA4w0LxWlE3novvsoEL6BBA1WnjJATbjkh1kFrTidw=
github.com/quasoft/websspi v1.1.2/go.mod h1:HmVdl939dQ0WIXZhyik+ARdI03M6bQzaSEKcgpFmewk= github.com/quasoft/websspi v1.1.2/go.mod h1:HmVdl939dQ0WIXZhyik+ARdI03M6bQzaSEKcgpFmewk=
github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/redis/go-redis/v9 v9.7.0 h1:HhLSs+B6O021gwzl+locl0zEDnyNkxMtf/Z3NNBMa9E= github.com/redis/go-redis/v9 v9.7.3 h1:YpPyAayJV+XErNsatSElgRZZVCwXX9QzkKYNvO7x0wM=
github.com/redis/go-redis/v9 v9.7.0/go.mod h1:f6zhXITC7JUJIlPEiBOTXxJgPLdZcA93GewI7inzyWw= github.com/redis/go-redis/v9 v9.7.3/go.mod h1:bGUrSggJ9X9GUmZpZNEOQKaANxSGgOEBRltRTZHSvrA=
github.com/redis/rueidis v1.0.19 h1:s65oWtotzlIFN8eMPhyYwxlwLR1lUdhza2KtWprKYSo= github.com/redis/rueidis v1.0.19 h1:s65oWtotzlIFN8eMPhyYwxlwLR1lUdhza2KtWprKYSo=
github.com/redis/rueidis v1.0.19/go.mod h1:8B+r5wdnjwK3lTFml5VtxjzGOQAC+5UmujoD12pDrEo= github.com/redis/rueidis v1.0.19/go.mod h1:8B+r5wdnjwK3lTFml5VtxjzGOQAC+5UmujoD12pDrEo=
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk= github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk=
@@ -833,8 +834,8 @@ golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDf
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U=
golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs= golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ= golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY=
golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8=
golang.org/x/image v0.21.0 h1:c5qV36ajHpdj4Qi0GnE0jUc/yuo33OLFaa0d+crTD5s= golang.org/x/image v0.21.0 h1:c5qV36ajHpdj4Qi0GnE0jUc/yuo33OLFaa0d+crTD5s=
@@ -869,8 +870,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.36.0 h1:vWF2fRbw4qslQsQzgFqZff+BItCvGFQqKzKIzx1rmoA= golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c=
golang.org/x/net v0.36.0/go.mod h1:bFmbeoIPfrw4sMHNhb4J9f6+tPziuGjq7Jk/38fxi1I= golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M=
golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -884,8 +885,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w= golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181221143128-b4a75ba826a6/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181221143128-b4a75ba826a6/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -919,8 +920,8 @@ golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@@ -932,8 +933,8 @@ golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M= golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M=
golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU= golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y=
golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
@@ -944,8 +945,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
golang.org/x/time v0.7.0 h1:ntUhktv3OPE6TgYxXWv9vKvUSJyIFJlyohwbkEwPrKQ= golang.org/x/time v0.7.0 h1:ntUhktv3OPE6TgYxXWv9vKvUSJyIFJlyohwbkEwPrKQ=
golang.org/x/time v0.7.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/time v0.7.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=

View File

@@ -194,7 +194,7 @@ func updateRepoRunsNumbers(ctx context.Context, repo *repo_model.Repository) err
// CancelPreviousJobs cancels all previous jobs of the same repository, reference, workflow, and event. // CancelPreviousJobs cancels all previous jobs of the same repository, reference, workflow, and event.
// It's useful when a new run is triggered, and all previous runs needn't be continued anymore. // It's useful when a new run is triggered, and all previous runs needn't be continued anymore.
func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID string, event webhook_module.HookEventType) error { func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID string, event webhook_module.HookEventType) ([]*ActionRunJob, error) {
// Find all runs in the specified repository, reference, and workflow with non-final status // Find all runs in the specified repository, reference, and workflow with non-final status
runs, total, err := db.FindAndCount[ActionRun](ctx, FindRunOptions{ runs, total, err := db.FindAndCount[ActionRun](ctx, FindRunOptions{
RepoID: repoID, RepoID: repoID,
@@ -204,14 +204,16 @@ func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID strin
Status: []Status{StatusRunning, StatusWaiting, StatusBlocked}, Status: []Status{StatusRunning, StatusWaiting, StatusBlocked},
}) })
if err != nil { if err != nil {
return err return nil, err
} }
// If there are no runs found, there's no need to proceed with cancellation, so return nil. // If there are no runs found, there's no need to proceed with cancellation, so return nil.
if total == 0 { if total == 0 {
return nil return nil, nil
} }
cancelledJobs := make([]*ActionRunJob, 0, total)
// Iterate over each found run and cancel its associated jobs. // Iterate over each found run and cancel its associated jobs.
for _, run := range runs { for _, run := range runs {
// Find all jobs associated with the current run. // Find all jobs associated with the current run.
@@ -219,7 +221,7 @@ func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID strin
RunID: run.ID, RunID: run.ID,
}) })
if err != nil { if err != nil {
return err return cancelledJobs, err
} }
// Iterate over each job and attempt to cancel it. // Iterate over each job and attempt to cancel it.
@@ -238,27 +240,29 @@ func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID strin
// Update the job's status and stopped time in the database. // Update the job's status and stopped time in the database.
n, err := UpdateRunJob(ctx, job, builder.Eq{"task_id": 0}, "status", "stopped") n, err := UpdateRunJob(ctx, job, builder.Eq{"task_id": 0}, "status", "stopped")
if err != nil { if err != nil {
return err return cancelledJobs, err
} }
// If the update affected 0 rows, it means the job has changed in the meantime, so we need to try again. // If the update affected 0 rows, it means the job has changed in the meantime, so we need to try again.
if n == 0 { if n == 0 {
return fmt.Errorf("job has changed, try again") return cancelledJobs, fmt.Errorf("job has changed, try again")
} }
cancelledJobs = append(cancelledJobs, job)
// Continue with the next job. // Continue with the next job.
continue continue
} }
// If the job has an associated task, try to stop the task, effectively cancelling the job. // If the job has an associated task, try to stop the task, effectively cancelling the job.
if err := StopTask(ctx, job.TaskID, StatusCancelled); err != nil { if err := StopTask(ctx, job.TaskID, StatusCancelled); err != nil {
return err return cancelledJobs, err
} }
cancelledJobs = append(cancelledJobs, job)
} }
} }
// Return nil to indicate successful cancellation of all running and waiting jobs. // Return nil to indicate successful cancellation of all running and waiting jobs.
return nil return cancelledJobs, nil
} }
// InsertRun inserts a run // InsertRun inserts a run

View File

@@ -120,21 +120,22 @@ func DeleteScheduleTaskByRepo(ctx context.Context, id int64) error {
return committer.Commit() return committer.Commit()
} }
func CleanRepoScheduleTasks(ctx context.Context, repo *repo_model.Repository) error { func CleanRepoScheduleTasks(ctx context.Context, repo *repo_model.Repository) ([]*ActionRunJob, error) {
// If actions disabled when there is schedule task, this will remove the outdated schedule tasks // If actions disabled when there is schedule task, this will remove the outdated schedule tasks
// There is no other place we can do this because the app.ini will be changed manually // There is no other place we can do this because the app.ini will be changed manually
if err := DeleteScheduleTaskByRepo(ctx, repo.ID); err != nil { if err := DeleteScheduleTaskByRepo(ctx, repo.ID); err != nil {
return fmt.Errorf("DeleteCronTaskByRepo: %v", err) return nil, fmt.Errorf("DeleteCronTaskByRepo: %v", err)
} }
// cancel running cron jobs of this repository and delete old schedules // cancel running cron jobs of this repository and delete old schedules
if err := CancelPreviousJobs( jobs, err := CancelPreviousJobs(
ctx, ctx,
repo.ID, repo.ID,
repo.DefaultBranch, repo.DefaultBranch,
"", "",
webhook_module.HookEventSchedule, webhook_module.HookEventSchedule,
); err != nil { )
return fmt.Errorf("CancelPreviousJobs: %v", err) if err != nil {
return jobs, fmt.Errorf("CancelPreviousJobs: %v", err)
} }
return nil return jobs, nil
} }

View File

@@ -46,6 +46,7 @@ type Command struct {
desc string desc string
globalArgsLength int globalArgsLength int
brokenArgs []string brokenArgs []string
cmd *exec.Cmd // for debug purpose only
} }
func (c *Command) String() string { func (c *Command) String() string {
@@ -314,6 +315,7 @@ func (c *Command) Run(opts *RunOpts) error {
startTime := time.Now() startTime := time.Now()
cmd := exec.CommandContext(ctx, c.prog, c.args...) cmd := exec.CommandContext(ctx, c.prog, c.args...)
c.cmd = cmd // for debug purpose only
if opts.Env == nil { if opts.Env == nil {
cmd.Env = os.Environ() cmd.Env = os.Environ()
} else { } else {

View File

@@ -9,6 +9,8 @@ import (
"fmt" "fmt"
"io" "io"
"os" "os"
"path/filepath"
"time"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
) )
@@ -102,7 +104,7 @@ type CheckAttributeReader struct {
stdinReader io.ReadCloser stdinReader io.ReadCloser
stdinWriter *os.File stdinWriter *os.File
stdOut attributeWriter stdOut *nulSeparatedAttributeWriter
cmd *Command cmd *Command
env []string env []string
ctx context.Context ctx context.Context
@@ -152,7 +154,6 @@ func (c *CheckAttributeReader) Init(ctx context.Context) error {
return nil return nil
} }
// Run run cmd
func (c *CheckAttributeReader) Run() error { func (c *CheckAttributeReader) Run() error {
defer func() { defer func() {
_ = c.stdinReader.Close() _ = c.stdinReader.Close()
@@ -176,7 +177,7 @@ func (c *CheckAttributeReader) Run() error {
func (c *CheckAttributeReader) CheckPath(path string) (rs map[string]string, err error) { func (c *CheckAttributeReader) CheckPath(path string) (rs map[string]string, err error) {
defer func() { defer func() {
if err != nil && err != c.ctx.Err() { if err != nil && err != c.ctx.Err() {
log.Error("Unexpected error when checking path %s in %s. Error: %v", path, c.Repo.Path, err) log.Error("Unexpected error when checking path %s in %s, error: %v", path, filepath.Base(c.Repo.Path), err)
} }
}() }()
@@ -191,9 +192,31 @@ func (c *CheckAttributeReader) CheckPath(path string) (rs map[string]string, err
return nil, err return nil, err
} }
reportTimeout := func() error {
stdOutClosed := false
select {
case <-c.stdOut.closed:
stdOutClosed = true
default:
}
debugMsg := fmt.Sprintf("check path %q in repo %q", path, filepath.Base(c.Repo.Path))
debugMsg += fmt.Sprintf(", stdOut: tmp=%q, pos=%d, closed=%v", string(c.stdOut.tmp), c.stdOut.pos, stdOutClosed)
if c.cmd.cmd != nil {
debugMsg += fmt.Sprintf(", process state: %q", c.cmd.cmd.ProcessState.String())
}
_ = c.Close()
return fmt.Errorf("CheckPath timeout: %s", debugMsg)
}
rs = make(map[string]string) rs = make(map[string]string)
for range c.Attributes { for range c.Attributes {
select { select {
case <-time.After(5 * time.Second):
// There is a strange "hang" problem in gitdiff.GetDiff -> CheckPath
// So add a timeout here to mitigate the problem, and output more logs for debug purpose
// In real world, if CheckPath runs long than seconds, it blocks the end user's operation,
// and at the moment the CheckPath result is not so important, so we can just ignore it.
return nil, reportTimeout()
case attr, ok := <-c.stdOut.ReadAttribute(): case attr, ok := <-c.stdOut.ReadAttribute():
if !ok { if !ok {
return nil, c.ctx.Err() return nil, c.ctx.Err()
@@ -206,18 +229,12 @@ func (c *CheckAttributeReader) CheckPath(path string) (rs map[string]string, err
return rs, nil return rs, nil
} }
// Close close pip after use
func (c *CheckAttributeReader) Close() error { func (c *CheckAttributeReader) Close() error {
c.cancel() c.cancel()
err := c.stdinWriter.Close() err := c.stdinWriter.Close()
return err return err
} }
type attributeWriter interface {
io.WriteCloser
ReadAttribute() <-chan attributeTriple
}
type attributeTriple struct { type attributeTriple struct {
Filename string Filename string
Attribute string Attribute string
@@ -281,7 +298,7 @@ func (wr *nulSeparatedAttributeWriter) Close() error {
return nil return nil
} }
// Create a check attribute reader for the current repository and provided commit ID // CheckAttributeReader creates a check attribute reader for the current repository and provided commit ID
func (repo *Repository) CheckAttributeReader(commitID string) (*CheckAttributeReader, context.CancelFunc) { func (repo *Repository) CheckAttributeReader(commitID string) (*CheckAttributeReader, context.CancelFunc) {
indexFilename, worktree, deleteTemporaryFile, err := repo.ReadTreeToTemporaryIndex(commitID) indexFilename, worktree, deleteTemporaryFile, err := repo.ReadTreeToTemporaryIndex(commitID)
if err != nil { if err != nil {
@@ -303,21 +320,21 @@ func (repo *Repository) CheckAttributeReader(commitID string) (*CheckAttributeRe
} }
ctx, cancel := context.WithCancel(repo.Ctx) ctx, cancel := context.WithCancel(repo.Ctx)
if err := checker.Init(ctx); err != nil { if err := checker.Init(ctx); err != nil {
log.Error("Unable to open checker for %s. Error: %v", commitID, err) log.Error("Unable to open attribute checker for commit %s, error: %v", commitID, err)
} else { } else {
go func() { go func() {
err := checker.Run() err := checker.Run()
if err != nil && err != ctx.Err() { if err != nil && !IsErrCanceledOrKilled(err) {
log.Error("Unable to open checker for %s. Error: %v", commitID, err) log.Error("Attribute checker for commit %s exits with error: %v", commitID, err)
} }
cancel() cancel()
}() }()
} }
deferable := func() { deferrable := func() {
_ = checker.Close() _ = checker.Close()
cancel() cancel()
deleteTemporaryFile() deleteTemporaryFile()
} }
return checker, deferable return checker, deferrable
} }

View File

@@ -4,10 +4,16 @@
package git package git
import ( import (
"context"
mathRand "math/rand/v2"
"path/filepath"
"slices"
"sync"
"testing" "testing"
"time" "time"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
) )
func Test_nulSeparatedAttributeWriter_ReadAttribute(t *testing.T) { func Test_nulSeparatedAttributeWriter_ReadAttribute(t *testing.T) {
@@ -95,3 +101,57 @@ func Test_nulSeparatedAttributeWriter_ReadAttribute(t *testing.T) {
Value: "unspecified", Value: "unspecified",
}, attr) }, attr)
} }
func TestAttributeReader(t *testing.T) {
t.Skip() // for debug purpose only, do not run in CI
ctx := context.Background()
timeout := 1 * time.Second
repoPath := filepath.Join(testReposDir, "language_stats_repo")
commitRef := "HEAD"
oneRound := func(t *testing.T, roundIdx int) {
ctx, cancel := context.WithTimeout(ctx, timeout)
_ = cancel
gitRepo, err := OpenRepository(ctx, repoPath)
require.NoError(t, err)
defer gitRepo.Close()
commit, err := gitRepo.GetCommit(commitRef)
require.NoError(t, err)
files, err := gitRepo.LsFiles()
require.NoError(t, err)
randomFiles := slices.Clone(files)
randomFiles = append(randomFiles, "any-file-1", "any-file-2")
t.Logf("Round %v with %d files", roundIdx, len(randomFiles))
attrReader, deferrable := gitRepo.CheckAttributeReader(commit.ID.String())
defer deferrable()
wg := sync.WaitGroup{}
wg.Add(1)
go func() {
for {
file := randomFiles[mathRand.IntN(len(randomFiles))]
_, err := attrReader.CheckPath(file)
if err != nil {
for i := 0; i < 10; i++ {
_, _ = attrReader.CheckPath(file)
}
break
}
}
wg.Done()
}()
wg.Wait()
}
for i := 0; i < 100; i++ {
oneRound(t, i)
}
}

View File

@@ -8,6 +8,7 @@ import (
"bytes" "bytes"
"context" "context"
"crypto/tls" "crypto/tls"
"errors"
"fmt" "fmt"
"io" "io"
"net" "net"
@@ -101,6 +102,9 @@ func (r *Request) Param(key, value string) *Request {
// Body adds request raw body. It supports string, []byte and io.Reader as body. // Body adds request raw body. It supports string, []byte and io.Reader as body.
func (r *Request) Body(data any) *Request { func (r *Request) Body(data any) *Request {
if r == nil {
return nil
}
switch t := data.(type) { switch t := data.(type) {
case nil: // do nothing case nil: // do nothing
case string: case string:
@@ -193,6 +197,9 @@ func (r *Request) getResponse() (*http.Response, error) {
// Response executes request client gets response manually. // Response executes request client gets response manually.
// Caller MUST close the response body if no error occurs // Caller MUST close the response body if no error occurs
func (r *Request) Response() (*http.Response, error) { func (r *Request) Response() (*http.Response, error) {
if r == nil {
return nil, errors.New("invalid request")
}
return r.getResponse() return r.getResponse()
} }

View File

@@ -28,7 +28,6 @@ import (
"github.com/blevesearch/bleve/v2" "github.com/blevesearch/bleve/v2"
analyzer_custom "github.com/blevesearch/bleve/v2/analysis/analyzer/custom" analyzer_custom "github.com/blevesearch/bleve/v2/analysis/analyzer/custom"
analyzer_keyword "github.com/blevesearch/bleve/v2/analysis/analyzer/keyword" analyzer_keyword "github.com/blevesearch/bleve/v2/analysis/analyzer/keyword"
"github.com/blevesearch/bleve/v2/analysis/token/camelcase"
"github.com/blevesearch/bleve/v2/analysis/token/lowercase" "github.com/blevesearch/bleve/v2/analysis/token/lowercase"
"github.com/blevesearch/bleve/v2/analysis/token/unicodenorm" "github.com/blevesearch/bleve/v2/analysis/token/unicodenorm"
"github.com/blevesearch/bleve/v2/analysis/tokenizer/letter" "github.com/blevesearch/bleve/v2/analysis/tokenizer/letter"
@@ -70,7 +69,7 @@ const (
filenameIndexerAnalyzer = "filenameIndexerAnalyzer" filenameIndexerAnalyzer = "filenameIndexerAnalyzer"
filenameIndexerTokenizer = "filenameIndexerTokenizer" filenameIndexerTokenizer = "filenameIndexerTokenizer"
repoIndexerDocType = "repoIndexerDocType" repoIndexerDocType = "repoIndexerDocType"
repoIndexerLatestVersion = 8 repoIndexerLatestVersion = 9
) )
// generateBleveIndexMapping generates a bleve index mapping for the repo indexer // generateBleveIndexMapping generates a bleve index mapping for the repo indexer
@@ -107,7 +106,7 @@ func generateBleveIndexMapping() (mapping.IndexMapping, error) {
"type": analyzer_custom.Name, "type": analyzer_custom.Name,
"char_filters": []string{}, "char_filters": []string{},
"tokenizer": letter.Name, "tokenizer": letter.Name,
"token_filters": []string{unicodeNormalizeName, camelcase.Name, lowercase.Name}, "token_filters": []string{unicodeNormalizeName, lowercase.Name},
}); err != nil { }); err != nil {
return nil, err return nil, err
} }

View File

@@ -70,14 +70,13 @@ func (g *GiteaBackend) Batch(_ string, pointers []transfer.BatchItem, args trans
g.logger.Log("json marshal error", err) g.logger.Log("json marshal error", err)
return nil, err return nil, err
} }
url := g.server.JoinPath("objects/batch").String()
headers := map[string]string{ headers := map[string]string{
headerAuthorization: g.authToken, headerAuthorization: g.authToken,
headerGiteaInternalAuth: g.internalAuth, headerGiteaInternalAuth: g.internalAuth,
headerAccept: mimeGitLFS, headerAccept: mimeGitLFS,
headerContentType: mimeGitLFS, headerContentType: mimeGitLFS,
} }
req := newInternalRequestLFS(g.ctx, url, http.MethodPost, headers, bodyBytes) req := newInternalRequestLFS(g.ctx, g.server.JoinPath("objects/batch").String(), http.MethodPost, headers, bodyBytes)
resp, err := req.Response() resp, err := req.Response()
if err != nil { if err != nil {
g.logger.Log("http request error", err) g.logger.Log("http request error", err)
@@ -179,13 +178,12 @@ func (g *GiteaBackend) Download(oid string, args transfer.Args) (io.ReadCloser,
g.logger.Log("argument id incorrect") g.logger.Log("argument id incorrect")
return nil, 0, transfer.ErrCorruptData return nil, 0, transfer.ErrCorruptData
} }
url := action.Href
headers := map[string]string{ headers := map[string]string{
headerAuthorization: g.authToken, headerAuthorization: g.authToken,
headerGiteaInternalAuth: g.internalAuth, headerGiteaInternalAuth: g.internalAuth,
headerAccept: mimeOctetStream, headerAccept: mimeOctetStream,
} }
req := newInternalRequestLFS(g.ctx, url, http.MethodGet, headers, nil) req := newInternalRequestLFS(g.ctx, toInternalLFSURL(action.Href), http.MethodGet, headers, nil)
resp, err := req.Response() resp, err := req.Response()
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("failed to get response: %w", err) return nil, 0, fmt.Errorf("failed to get response: %w", err)
@@ -225,7 +223,6 @@ func (g *GiteaBackend) Upload(oid string, size int64, r io.Reader, args transfer
g.logger.Log("argument id incorrect") g.logger.Log("argument id incorrect")
return transfer.ErrCorruptData return transfer.ErrCorruptData
} }
url := action.Href
headers := map[string]string{ headers := map[string]string{
headerAuthorization: g.authToken, headerAuthorization: g.authToken,
headerGiteaInternalAuth: g.internalAuth, headerGiteaInternalAuth: g.internalAuth,
@@ -233,7 +230,7 @@ func (g *GiteaBackend) Upload(oid string, size int64, r io.Reader, args transfer
headerContentLength: strconv.FormatInt(size, 10), headerContentLength: strconv.FormatInt(size, 10),
} }
req := newInternalRequestLFS(g.ctx, url, http.MethodPut, headers, nil) req := newInternalRequestLFS(g.ctx, toInternalLFSURL(action.Href), http.MethodPut, headers, nil)
req.Body(r) req.Body(r)
resp, err := req.Response() resp, err := req.Response()
if err != nil { if err != nil {
@@ -274,14 +271,13 @@ func (g *GiteaBackend) Verify(oid string, size int64, args transfer.Args) (trans
// the server sent no verify action // the server sent no verify action
return transfer.SuccessStatus(), nil return transfer.SuccessStatus(), nil
} }
url := action.Href
headers := map[string]string{ headers := map[string]string{
headerAuthorization: g.authToken, headerAuthorization: g.authToken,
headerGiteaInternalAuth: g.internalAuth, headerGiteaInternalAuth: g.internalAuth,
headerAccept: mimeGitLFS, headerAccept: mimeGitLFS,
headerContentType: mimeGitLFS, headerContentType: mimeGitLFS,
} }
req := newInternalRequestLFS(g.ctx, url, http.MethodPost, headers, bodyBytes) req := newInternalRequestLFS(g.ctx, toInternalLFSURL(action.Href), http.MethodPost, headers, bodyBytes)
resp, err := req.Response() resp, err := req.Response()
if err != nil { if err != nil {
return transfer.NewStatus(transfer.StatusInternalServerError), err return transfer.NewStatus(transfer.StatusInternalServerError), err

View File

@@ -43,14 +43,13 @@ func (g *giteaLockBackend) Create(path, refname string) (transfer.Lock, error) {
g.logger.Log("json marshal error", err) g.logger.Log("json marshal error", err)
return nil, err return nil, err
} }
url := g.server.String()
headers := map[string]string{ headers := map[string]string{
headerAuthorization: g.authToken, headerAuthorization: g.authToken,
headerGiteaInternalAuth: g.internalAuth, headerGiteaInternalAuth: g.internalAuth,
headerAccept: mimeGitLFS, headerAccept: mimeGitLFS,
headerContentType: mimeGitLFS, headerContentType: mimeGitLFS,
} }
req := newInternalRequestLFS(g.ctx, url, http.MethodPost, headers, bodyBytes) req := newInternalRequestLFS(g.ctx, g.server.String(), http.MethodPost, headers, bodyBytes)
resp, err := req.Response() resp, err := req.Response()
if err != nil { if err != nil {
g.logger.Log("http request error", err) g.logger.Log("http request error", err)
@@ -95,14 +94,13 @@ func (g *giteaLockBackend) Unlock(lock transfer.Lock) error {
g.logger.Log("json marshal error", err) g.logger.Log("json marshal error", err)
return err return err
} }
url := g.server.JoinPath(lock.ID(), "unlock").String()
headers := map[string]string{ headers := map[string]string{
headerAuthorization: g.authToken, headerAuthorization: g.authToken,
headerGiteaInternalAuth: g.internalAuth, headerGiteaInternalAuth: g.internalAuth,
headerAccept: mimeGitLFS, headerAccept: mimeGitLFS,
headerContentType: mimeGitLFS, headerContentType: mimeGitLFS,
} }
req := newInternalRequestLFS(g.ctx, url, http.MethodPost, headers, bodyBytes) req := newInternalRequestLFS(g.ctx, g.server.JoinPath(lock.ID(), "unlock").String(), http.MethodPost, headers, bodyBytes)
resp, err := req.Response() resp, err := req.Response()
if err != nil { if err != nil {
g.logger.Log("http request error", err) g.logger.Log("http request error", err)
@@ -176,16 +174,15 @@ func (g *giteaLockBackend) Range(cursor string, limit int, iter func(transfer.Lo
} }
func (g *giteaLockBackend) queryLocks(v url.Values) ([]transfer.Lock, string, error) { func (g *giteaLockBackend) queryLocks(v url.Values) ([]transfer.Lock, string, error) {
urlq := g.server.JoinPath() // get a copy serverURLWithQuery := g.server.JoinPath() // get a copy
urlq.RawQuery = v.Encode() serverURLWithQuery.RawQuery = v.Encode()
url := urlq.String()
headers := map[string]string{ headers := map[string]string{
headerAuthorization: g.authToken, headerAuthorization: g.authToken,
headerGiteaInternalAuth: g.internalAuth, headerGiteaInternalAuth: g.internalAuth,
headerAccept: mimeGitLFS, headerAccept: mimeGitLFS,
headerContentType: mimeGitLFS, headerContentType: mimeGitLFS,
} }
req := newInternalRequestLFS(g.ctx, url, http.MethodGet, headers, nil) req := newInternalRequestLFS(g.ctx, serverURLWithQuery.String(), http.MethodGet, headers, nil)
resp, err := req.Response() resp, err := req.Response()
if err != nil { if err != nil {
g.logger.Log("http request error", err) g.logger.Log("http request error", err)

View File

@@ -8,9 +8,13 @@ import (
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
"net/url"
"strings"
"code.gitea.io/gitea/modules/httplib" "code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/private" "code.gitea.io/gitea/modules/private"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"github.com/charmbracelet/git-lfs-transfer/transfer" "github.com/charmbracelet/git-lfs-transfer/transfer"
) )
@@ -57,8 +61,7 @@ const (
// Operations enum // Operations enum
const ( const (
opNone = iota opDownload = iota + 1
opDownload
opUpload opUpload
) )
@@ -86,8 +89,49 @@ func statusCodeToErr(code int) error {
} }
} }
func newInternalRequestLFS(ctx context.Context, url, method string, headers map[string]string, body any) *httplib.Request { func toInternalLFSURL(s string) string {
req := private.NewInternalRequest(ctx, url, method) pos1 := strings.Index(s, "://")
if pos1 == -1 {
return ""
}
appSubURLWithSlash := setting.AppSubURL + "/"
pos2 := strings.Index(s[pos1+3:], appSubURLWithSlash)
if pos2 == -1 {
return ""
}
routePath := s[pos1+3+pos2+len(appSubURLWithSlash):]
fields := strings.SplitN(routePath, "/", 3)
if len(fields) < 3 || !strings.HasPrefix(fields[2], "info/lfs") {
return ""
}
return setting.LocalURL + "api/internal/repo/" + routePath
}
func isInternalLFSURL(s string) bool {
if !strings.HasPrefix(s, setting.LocalURL) {
return false
}
u, err := url.Parse(s)
if err != nil {
return false
}
routePath := util.PathJoinRelX(u.Path)
subRoutePath, cut := strings.CutPrefix(routePath, "api/internal/repo/")
if !cut {
return false
}
fields := strings.SplitN(subRoutePath, "/", 3)
if len(fields) < 3 || !strings.HasPrefix(fields[2], "info/lfs") {
return false
}
return true
}
func newInternalRequestLFS(ctx context.Context, internalURL, method string, headers map[string]string, body any) *httplib.Request {
if !isInternalLFSURL(internalURL) {
return nil
}
req := private.NewInternalRequest(ctx, internalURL, method)
for k, v := range headers { for k, v := range headers {
req.Header(k, v) req.Header(k, v)
} }

View File

@@ -0,0 +1,54 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package backend
import (
"context"
"testing"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/test"
"github.com/stretchr/testify/assert"
)
func TestToInternalLFSURL(t *testing.T) {
defer test.MockVariableValue(&setting.LocalURL, "http://localurl/")()
defer test.MockVariableValue(&setting.AppSubURL, "/sub")()
cases := []struct {
url string
expected string
}{
{"http://appurl/any", ""},
{"http://appurl/sub/any", ""},
{"http://appurl/sub/owner/repo/any", ""},
{"http://appurl/sub/owner/repo/info/any", ""},
{"http://appurl/sub/owner/repo/info/lfs/any", "http://localurl/api/internal/repo/owner/repo/info/lfs/any"},
}
for _, c := range cases {
assert.Equal(t, c.expected, toInternalLFSURL(c.url), c.url)
}
}
func TestIsInternalLFSURL(t *testing.T) {
defer test.MockVariableValue(&setting.LocalURL, "http://localurl/")()
defer test.MockVariableValue(&setting.InternalToken, "mock-token")()
cases := []struct {
url string
expected bool
}{
{"", false},
{"http://otherurl/api/internal/repo/owner/repo/info/lfs/any", false},
{"http://localurl/api/internal/repo/owner/repo/info/lfs/any", true},
{"http://localurl/api/internal/repo/owner/repo/info", false},
{"http://localurl/api/internal/misc/owner/repo/info/lfs/any", false},
{"http://localurl/api/internal/owner/repo/info/lfs/any", false},
{"http://localurl/api/internal/foo/bar", false},
}
for _, c := range cases {
req := newInternalRequestLFS(context.Background(), c.url, "GET", nil, nil)
assert.Equal(t, c.expected, req != nil, c.url)
assert.Equal(t, c.expected, isInternalLFSURL(c.url), c.url)
}
}

View File

@@ -159,6 +159,14 @@ func render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error
limit: setting.UI.MaxDisplayFileSize * 3, limit: setting.UI.MaxDisplayFileSize * 3,
} }
// FIXME: Don't read all to memory, but goldmark doesn't support
buf, err := io.ReadAll(input)
if err != nil {
log.Error("Unable to ReadAll: %v", err)
return err
}
buf = giteautil.NormalizeEOL(buf)
// FIXME: should we include a timeout to abort the renderer if it takes too long? // FIXME: should we include a timeout to abort the renderer if it takes too long?
defer func() { defer func() {
err := recover() err := recover()
@@ -166,20 +174,12 @@ func render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error
return return
} }
log.Warn("Unable to render markdown due to panic in goldmark: %v", err) log.Error("Panic in markdown: %v\n%s", err, log.Stack(2))
if (!setting.IsProd && !setting.IsInTesting) || log.IsDebug() { escapedHTML := template.HTMLEscapeString(giteautil.UnsafeBytesToString(buf))
log.Error("Panic in markdown: %v\n%s", err, log.Stack(2)) _, _ = output.Write(giteautil.UnsafeStringToBytes(escapedHTML))
}
}() }()
// FIXME: Don't read all to memory, but goldmark doesn't support
pc := newParserContext(ctx) pc := newParserContext(ctx)
buf, err := io.ReadAll(input)
if err != nil {
log.Error("Unable to ReadAll: %v", err)
return err
}
buf = giteautil.NormalizeEOL(buf)
// Preserve original length. // Preserve original length.
bufWithMetadataLength := len(buf) bufWithMetadataLength := len(buf)

View File

@@ -23,6 +23,11 @@ func TestAttention(t *testing.T) {
defer svg.MockIcon("octicon-alert")() defer svg.MockIcon("octicon-alert")()
defer svg.MockIcon("octicon-stop")() defer svg.MockIcon("octicon-stop")()
test := func(input, expected string) {
result, err := markdown.RenderString(markup.NewTestRenderContext(), input)
assert.NoError(t, err)
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(string(result)))
}
renderAttention := func(attention, icon string) string { renderAttention := func(attention, icon string) string {
tmpl := `<blockquote class="attention-header attention-{attention}"><p><svg class="attention-icon attention-{attention} svg {icon}" width="16" height="16"></svg><strong class="attention-{attention}">{Attention}</strong></p>` tmpl := `<blockquote class="attention-header attention-{attention}"><p><svg class="attention-icon attention-{attention} svg {icon}" width="16" height="16"></svg><strong class="attention-{attention}">{Attention}</strong></p>`
tmpl = strings.ReplaceAll(tmpl, "{attention}", attention) tmpl = strings.ReplaceAll(tmpl, "{attention}", attention)
@@ -31,12 +36,6 @@ func TestAttention(t *testing.T) {
return tmpl return tmpl
} }
test := func(input, expected string) {
result, err := markdown.RenderString(markup.NewTestRenderContext(), input)
assert.NoError(t, err)
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(string(result)))
}
test(` test(`
> [!NOTE] > [!NOTE]
> text > text
@@ -53,4 +52,7 @@ func TestAttention(t *testing.T) {
// legacy GitHub style // legacy GitHub style
test(`> **warning**`, renderAttention("warning", "octicon-alert")+"\n</blockquote>") test(`> **warning**`, renderAttention("warning", "octicon-alert")+"\n</blockquote>")
// edge case (it used to cause panic)
test(">\ntext", "<blockquote>\n</blockquote>\n<p>text</p>")
} }

View File

@@ -115,6 +115,9 @@ func (g *ASTTransformer) transformBlockquote(v *ast.Blockquote, reader text.Read
// grab these nodes and make sure we adhere to the attention blockquote structure // grab these nodes and make sure we adhere to the attention blockquote structure
firstParagraph := v.FirstChild() firstParagraph := v.FirstChild()
if firstParagraph == nil {
return ast.WalkContinue, nil
}
g.applyElementDir(firstParagraph) g.applyElementDir(firstParagraph)
attentionType, processedNodes := g.extractBlockquoteAttentionEmphasis(firstParagraph, reader) attentionType, processedNodes := g.extractBlockquoteAttentionEmphasis(firstParagraph, reader)

View File

@@ -1,4 +0,0 @@
// Copyright 2017 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package markup_test

View File

@@ -7,9 +7,9 @@ import (
"context" "context"
"fmt" "fmt"
"net/url" "net/url"
"time"
"code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
) )
@@ -82,29 +82,32 @@ type HookProcReceiveRefResult struct {
HeadBranch string HeadBranch string
} }
func newInternalRequestAPIForHooks(ctx context.Context, hookName, ownerName, repoName string, opts HookOptions) *httplib.Request {
reqURL := setting.LocalURL + fmt.Sprintf("api/internal/hook/%s/%s/%s", hookName, url.PathEscape(ownerName), url.PathEscape(repoName))
req := newInternalRequestAPI(ctx, reqURL, "POST", opts)
// This "timeout" applies to http.Client's timeout: A Timeout of zero means no timeout.
// This "timeout" was previously set to `time.Duration(60+len(opts.OldCommitIDs))` seconds, but it caused unnecessary timeout failures.
// It should be good enough to remove the client side timeout, only respect the "ctx" and server side timeout.
req.SetReadWriteTimeout(0)
return req
}
// HookPreReceive check whether the provided commits are allowed // HookPreReceive check whether the provided commits are allowed
func HookPreReceive(ctx context.Context, ownerName, repoName string, opts HookOptions) ResponseExtra { func HookPreReceive(ctx context.Context, ownerName, repoName string, opts HookOptions) ResponseExtra {
reqURL := setting.LocalURL + fmt.Sprintf("api/internal/hook/pre-receive/%s/%s", url.PathEscape(ownerName), url.PathEscape(repoName)) req := newInternalRequestAPIForHooks(ctx, "pre-receive", ownerName, repoName, opts)
req := newInternalRequestAPI(ctx, reqURL, "POST", opts)
req.SetReadWriteTimeout(time.Duration(60+len(opts.OldCommitIDs)) * time.Second)
_, extra := requestJSONResp(req, &ResponseText{}) _, extra := requestJSONResp(req, &ResponseText{})
return extra return extra
} }
// HookPostReceive updates services and users // HookPostReceive updates services and users
func HookPostReceive(ctx context.Context, ownerName, repoName string, opts HookOptions) (*HookPostReceiveResult, ResponseExtra) { func HookPostReceive(ctx context.Context, ownerName, repoName string, opts HookOptions) (*HookPostReceiveResult, ResponseExtra) {
reqURL := setting.LocalURL + fmt.Sprintf("api/internal/hook/post-receive/%s/%s", url.PathEscape(ownerName), url.PathEscape(repoName)) req := newInternalRequestAPIForHooks(ctx, "post-receive", ownerName, repoName, opts)
req := newInternalRequestAPI(ctx, reqURL, "POST", opts)
req.SetReadWriteTimeout(time.Duration(60+len(opts.OldCommitIDs)) * time.Second)
return requestJSONResp(req, &HookPostReceiveResult{}) return requestJSONResp(req, &HookPostReceiveResult{})
} }
// HookProcReceive proc-receive hook // HookProcReceive proc-receive hook
func HookProcReceive(ctx context.Context, ownerName, repoName string, opts HookOptions) (*HookProcReceiveResult, ResponseExtra) { func HookProcReceive(ctx context.Context, ownerName, repoName string, opts HookOptions) (*HookProcReceiveResult, ResponseExtra) {
reqURL := setting.LocalURL + fmt.Sprintf("api/internal/hook/proc-receive/%s/%s", url.PathEscape(ownerName), url.PathEscape(repoName)) req := newInternalRequestAPIForHooks(ctx, "proc-receive", ownerName, repoName, opts)
req := newInternalRequestAPI(ctx, reqURL, "POST", opts)
req.SetReadWriteTimeout(time.Duration(60+len(opts.OldCommitIDs)) * time.Second)
return requestJSONResp(req, &HookProcReceiveResult{}) return requestJSONResp(req, &HookProcReceiveResult{})
} }

View File

@@ -40,6 +40,10 @@ func NewInternalRequest(ctx context.Context, url, method string) *httplib.Reques
Ensure you are running in the correct environment or set the correct configuration file with -c.`, setting.CustomConf) Ensure you are running in the correct environment or set the correct configuration file with -c.`, setting.CustomConf)
} }
if !strings.HasPrefix(url, setting.LocalURL) {
log.Fatal("Invalid internal request URL: %q", url)
}
req := httplib.NewRequest(url, method). req := httplib.NewRequest(url, method).
SetContext(ctx). SetContext(ctx).
Header("X-Real-IP", getClientIP()). Header("X-Real-IP", getClientIP()).

View File

@@ -46,6 +46,7 @@ var Service = struct {
RequireSignInView bool RequireSignInView bool
EnableNotifyMail bool EnableNotifyMail bool
EnableBasicAuth bool EnableBasicAuth bool
EnablePasskeyAuth bool
EnableReverseProxyAuth bool EnableReverseProxyAuth bool
EnableReverseProxyAuthAPI bool EnableReverseProxyAuthAPI bool
EnableReverseProxyAutoRegister bool EnableReverseProxyAutoRegister bool
@@ -161,6 +162,7 @@ func loadServiceFrom(rootCfg ConfigProvider) {
Service.RequireSignInView = sec.Key("REQUIRE_SIGNIN_VIEW").MustBool() Service.RequireSignInView = sec.Key("REQUIRE_SIGNIN_VIEW").MustBool()
Service.EnableBasicAuth = sec.Key("ENABLE_BASIC_AUTHENTICATION").MustBool(true) Service.EnableBasicAuth = sec.Key("ENABLE_BASIC_AUTHENTICATION").MustBool(true)
Service.EnablePasswordSignInForm = sec.Key("ENABLE_PASSWORD_SIGNIN_FORM").MustBool(true) Service.EnablePasswordSignInForm = sec.Key("ENABLE_PASSWORD_SIGNIN_FORM").MustBool(true)
Service.EnablePasskeyAuth = sec.Key("ENABLE_PASSKEY_AUTHENTICATION").MustBool(true)
Service.EnableReverseProxyAuth = sec.Key("ENABLE_REVERSE_PROXY_AUTHENTICATION").MustBool() Service.EnableReverseProxyAuth = sec.Key("ENABLE_REVERSE_PROXY_AUTHENTICATION").MustBool()
Service.EnableReverseProxyAuthAPI = sec.Key("ENABLE_REVERSE_PROXY_AUTHENTICATION_API").MustBool() Service.EnableReverseProxyAuthAPI = sec.Key("ENABLE_REVERSE_PROXY_AUTHENTICATION_API").MustBool()
Service.EnableReverseProxyAutoRegister = sec.Key("ENABLE_REVERSE_PROXY_AUTO_REGISTRATION").MustBool() Service.EnableReverseProxyAutoRegister = sec.Key("ENABLE_REVERSE_PROXY_AUTO_REGISTRATION").MustBool()

View File

@@ -27,9 +27,10 @@ type PullRequest struct {
Comments int `json:"comments"` Comments int `json:"comments"`
// number of review comments made on the diff of a PR review (not including comments on commits or issues in a PR) // number of review comments made on the diff of a PR review (not including comments on commits or issues in a PR)
ReviewComments int `json:"review_comments"` ReviewComments int `json:"review_comments"`
Additions int `json:"additions"`
Deletions int `json:"deletions"` Additions *int `json:"additions,omitempty"`
ChangedFiles int `json:"changed_files"` Deletions *int `json:"deletions,omitempty"`
ChangedFiles *int `json:"changed_files,omitempty"`
HTMLURL string `json:"html_url"` HTMLURL string `json:"html_url"`
DiffURL string `json:"diff_url"` DiffURL string `json:"diff_url"`

View File

@@ -1943,8 +1943,8 @@ pulls.delete.title=删除此合并请求?
pulls.delete.text=你真的要删除这个合并请求吗? (这将永久删除所有内容。如果你打算将内容存档,请考虑关闭它) pulls.delete.text=你真的要删除这个合并请求吗? (这将永久删除所有内容。如果你打算将内容存档,请考虑关闭它)
pulls.recently_pushed_new_branches=您已经于%[2]s推送了分支 <strong>%[1]s</strong> pulls.recently_pushed_new_branches=您已经于%[2]s推送了分支 <strong>%[1]s</strong>
pulls.upstream_diverging_prompt_behind_1=该分支落后于 %s %d 个提交 pulls.upstream_diverging_prompt_behind_1=该分支落后于 %[2]s %[1]d 个提交
pulls.upstream_diverging_prompt_behind_n=该分支落后于 %s %d 个提交 pulls.upstream_diverging_prompt_behind_n=该分支落后于 %[2]s %[1]d 个提交
pulls.upstream_diverging_prompt_base_newer=基础分支 %s 有新的更改 pulls.upstream_diverging_prompt_base_newer=基础分支 %s 有新的更改
pulls.upstream_diverging_merge=同步派生 pulls.upstream_diverging_merge=同步派生

View File

@@ -98,6 +98,11 @@ func serveMavenMetadata(ctx *context.Context, params parameters) {
} }
pvs = append(pvsLegacy, pvs...) pvs = append(pvsLegacy, pvs...)
if len(pvs) == 0 {
apiError(ctx, http.StatusNotFound, packages_model.ErrPackageNotExist)
return
}
pds, err := packages_model.GetPackageDescriptors(ctx, pvs) pds, err := packages_model.GetPackageDescriptors(ctx, pvs)
if err != nil { if err != nil {
apiError(ctx, http.StatusInternalServerError, err) apiError(ctx, http.StatusInternalServerError, err)

View File

@@ -12,7 +12,6 @@ import (
"strings" "strings"
"time" "time"
actions_model "code.gitea.io/gitea/models/actions"
activities_model "code.gitea.io/gitea/models/activities" activities_model "code.gitea.io/gitea/models/activities"
"code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/organization"
@@ -1050,7 +1049,7 @@ func updateRepoArchivedState(ctx *context.APIContext, opts api.EditRepoOption) e
ctx.Error(http.StatusInternalServerError, "ArchiveRepoState", err) ctx.Error(http.StatusInternalServerError, "ArchiveRepoState", err)
return err return err
} }
if err := actions_model.CleanRepoScheduleTasks(ctx, repo); err != nil { if err := actions_service.CleanRepoScheduleTasks(ctx, repo); err != nil {
log.Error("CleanRepoScheduleTasks for archived repo %s/%s: %v", ctx.Repo.Owner.Name, repo.Name, err) log.Error("CleanRepoScheduleTasks for archived repo %s/%s: %v", ctx.Repo.Owner.Name, repo.Name, err)
} }
log.Trace("Repository was archived: %s/%s", ctx.Repo.Owner.Name, repo.Name) log.Trace("Repository was archived: %s/%s", ctx.Repo.Owner.Name, repo.Name)

View File

@@ -169,6 +169,7 @@ func prepareSignInPageData(ctx *context.Context) {
ctx.Data["PageIsLogin"] = true ctx.Data["PageIsLogin"] = true
ctx.Data["EnableSSPI"] = auth.IsSSPIEnabled(ctx) ctx.Data["EnableSSPI"] = auth.IsSSPIEnabled(ctx)
ctx.Data["EnablePasswordSignInForm"] = setting.Service.EnablePasswordSignInForm ctx.Data["EnablePasswordSignInForm"] = setting.Service.EnablePasswordSignInForm
ctx.Data["EnablePasskeyAuth"] = setting.Service.EnablePasskeyAuth
if setting.Service.EnableCaptcha && setting.Service.RequireCaptchaForLogin { if setting.Service.EnableCaptcha && setting.Service.RequireCaptchaForLogin {
context.SetCaptchaData(ctx) context.SetCaptchaData(ctx)

View File

@@ -46,6 +46,7 @@ func LinkAccount(ctx *context.Context) {
ctx.Data["AllowOnlyInternalRegistration"] = setting.Service.AllowOnlyInternalRegistration ctx.Data["AllowOnlyInternalRegistration"] = setting.Service.AllowOnlyInternalRegistration
ctx.Data["EnablePasswordSignInForm"] = setting.Service.EnablePasswordSignInForm ctx.Data["EnablePasswordSignInForm"] = setting.Service.EnablePasswordSignInForm
ctx.Data["ShowRegistrationButton"] = false ctx.Data["ShowRegistrationButton"] = false
ctx.Data["EnablePasskeyAuth"] = setting.Service.EnablePasskeyAuth
// use this to set the right link into the signIn and signUp templates in the link_account template // use this to set the right link into the signIn and signUp templates in the link_account template
ctx.Data["SignInLink"] = setting.AppSubURL + "/user/link_account_signin" ctx.Data["SignInLink"] = setting.AppSubURL + "/user/link_account_signin"
@@ -145,6 +146,7 @@ func LinkAccountPostSignIn(ctx *context.Context) {
ctx.Data["AllowOnlyInternalRegistration"] = setting.Service.AllowOnlyInternalRegistration ctx.Data["AllowOnlyInternalRegistration"] = setting.Service.AllowOnlyInternalRegistration
ctx.Data["EnablePasswordSignInForm"] = setting.Service.EnablePasswordSignInForm ctx.Data["EnablePasswordSignInForm"] = setting.Service.EnablePasswordSignInForm
ctx.Data["ShowRegistrationButton"] = false ctx.Data["ShowRegistrationButton"] = false
ctx.Data["EnablePasskeyAuth"] = setting.Service.EnablePasskeyAuth
// use this to set the right link into the signIn and signUp templates in the link_account template // use this to set the right link into the signIn and signUp templates in the link_account template
ctx.Data["SignInLink"] = setting.AppSubURL + "/user/link_account_signin" ctx.Data["SignInLink"] = setting.AppSubURL + "/user/link_account_signin"
@@ -235,6 +237,7 @@ func LinkAccountPostRegister(ctx *context.Context) {
ctx.Data["AllowOnlyInternalRegistration"] = setting.Service.AllowOnlyInternalRegistration ctx.Data["AllowOnlyInternalRegistration"] = setting.Service.AllowOnlyInternalRegistration
ctx.Data["EnablePasswordSignInForm"] = setting.Service.EnablePasswordSignInForm ctx.Data["EnablePasswordSignInForm"] = setting.Service.EnablePasswordSignInForm
ctx.Data["ShowRegistrationButton"] = false ctx.Data["ShowRegistrationButton"] = false
ctx.Data["EnablePasskeyAuth"] = setting.Service.EnablePasskeyAuth
// use this to set the right link into the signIn and signUp templates in the link_account template // use this to set the right link into the signIn and signUp templates in the link_account template
ctx.Data["SignInLink"] = setting.AppSubURL + "/user/link_account_signin" ctx.Data["SignInLink"] = setting.AppSubURL + "/user/link_account_signin"

View File

@@ -248,7 +248,7 @@ func AuthorizeOAuth(ctx *context.Context) {
}, form.RedirectURI) }, form.RedirectURI)
return return
} }
if err := ctx.Session.Set("CodeChallengeMethod", form.CodeChallenge); err != nil { if err := ctx.Session.Set("CodeChallenge", form.CodeChallenge); err != nil {
handleAuthorizeError(ctx, AuthorizeError{ handleAuthorizeError(ctx, AuthorizeError{
ErrorCode: ErrorCodeServerError, ErrorCode: ErrorCodeServerError,
ErrorDescription: "cannot set code challenge", ErrorDescription: "cannot set code challenge",

View File

@@ -50,6 +50,11 @@ func WebAuthn(ctx *context.Context) {
// WebAuthnPasskeyAssertion submits a WebAuthn challenge for the passkey login to the browser // WebAuthnPasskeyAssertion submits a WebAuthn challenge for the passkey login to the browser
func WebAuthnPasskeyAssertion(ctx *context.Context) { func WebAuthnPasskeyAssertion(ctx *context.Context) {
if !setting.Service.EnablePasskeyAuth {
ctx.Error(http.StatusForbidden)
return
}
assertion, sessionData, err := wa.WebAuthn.BeginDiscoverableLogin() assertion, sessionData, err := wa.WebAuthn.BeginDiscoverableLogin()
if err != nil { if err != nil {
ctx.ServerError("webauthn.BeginDiscoverableLogin", err) ctx.ServerError("webauthn.BeginDiscoverableLogin", err)
@@ -66,6 +71,11 @@ func WebAuthnPasskeyAssertion(ctx *context.Context) {
// WebAuthnPasskeyLogin handles the WebAuthn login process using a Passkey // WebAuthnPasskeyLogin handles the WebAuthn login process using a Passkey
func WebAuthnPasskeyLogin(ctx *context.Context) { func WebAuthnPasskeyLogin(ctx *context.Context) {
if !setting.Service.EnablePasskeyAuth {
ctx.Error(http.StatusForbidden)
return
}
sessionData, okData := ctx.Session.Get("webauthnPasskeyAssertion").(*webauthn.SessionData) sessionData, okData := ctx.Session.Get("webauthnPasskeyAssertion").(*webauthn.SessionData)
if !okData || sessionData == nil { if !okData || sessionData == nil {
ctx.ServerError("ctx.Session.Get", errors.New("not in WebAuthn session")) ctx.ServerError("ctx.Session.Get", errors.New("not in WebAuthn session"))

View File

@@ -903,7 +903,7 @@ func Run(ctx *context_module.Context) {
} }
// cancel running jobs of the same workflow // cancel running jobs of the same workflow
if err := actions_model.CancelPreviousJobs( if err := actions_service.CancelPreviousJobs(
ctx, ctx,
run.RepoID, run.RepoID,
run.Ref, run.Ref,

View File

@@ -12,7 +12,6 @@ import (
"time" "time"
"code.gitea.io/gitea/models" "code.gitea.io/gitea/models"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/organization"
"code.gitea.io/gitea/models/perm" "code.gitea.io/gitea/models/perm"
@@ -902,7 +901,7 @@ func SettingsPost(ctx *context.Context) {
return return
} }
if err := actions_model.CleanRepoScheduleTasks(ctx, repo); err != nil { if err := actions_service.CleanRepoScheduleTasks(ctx, repo); err != nil {
log.Error("CleanRepoScheduleTasks for archived repo %s/%s: %v", ctx.Repo.Owner.Name, repo.Name, err) log.Error("CleanRepoScheduleTasks for archived repo %s/%s: %v", ctx.Repo.Owner.Name, repo.Name, err)
} }

View File

@@ -10,10 +10,12 @@ import (
actions_model "code.gitea.io/gitea/models/actions" actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/actions" "code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/timeutil"
webhook_module "code.gitea.io/gitea/modules/webhook"
) )
// StopZombieTasks stops the task which have running status, but haven't been updated for a long time // StopZombieTasks stops the task which have running status, but haven't been updated for a long time
@@ -32,6 +34,24 @@ func StopEndlessTasks(ctx context.Context) error {
}) })
} }
func notifyWorkflowJobStatusUpdate(ctx context.Context, jobs []*actions_model.ActionRunJob) {
if len(jobs) > 0 {
CreateCommitStatus(ctx, jobs...)
}
}
func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID string, event webhook_module.HookEventType) error {
jobs, err := actions_model.CancelPreviousJobs(ctx, repoID, ref, workflowID, event)
notifyWorkflowJobStatusUpdate(ctx, jobs)
return err
}
func CleanRepoScheduleTasks(ctx context.Context, repo *repo_model.Repository) error {
jobs, err := actions_model.CleanRepoScheduleTasks(ctx, repo)
notifyWorkflowJobStatusUpdate(ctx, jobs)
return err
}
func stopTasks(ctx context.Context, opts actions_model.FindTaskOptions) error { func stopTasks(ctx context.Context, opts actions_model.FindTaskOptions) error {
tasks, err := db.Find[actions_model.ActionTask](ctx, opts) tasks, err := db.Find[actions_model.ActionTask](ctx, opts)
if err != nil { if err != nil {
@@ -67,7 +87,7 @@ func stopTasks(ctx context.Context, opts actions_model.FindTaskOptions) error {
remove() remove()
} }
CreateCommitStatus(ctx, jobs...) notifyWorkflowJobStatusUpdate(ctx, jobs)
return nil return nil
} }

View File

@@ -136,7 +136,7 @@ func notify(ctx context.Context, input *notifyInput) error {
return nil return nil
} }
if unit_model.TypeActions.UnitGlobalDisabled() { if unit_model.TypeActions.UnitGlobalDisabled() {
if err := actions_model.CleanRepoScheduleTasks(ctx, input.Repo); err != nil { if err := CleanRepoScheduleTasks(ctx, input.Repo); err != nil {
log.Error("CleanRepoScheduleTasks: %v", err) log.Error("CleanRepoScheduleTasks: %v", err)
} }
return nil return nil
@@ -341,7 +341,7 @@ func handleWorkflows(
// cancel running jobs if the event is push or pull_request_sync // cancel running jobs if the event is push or pull_request_sync
if run.Event == webhook_module.HookEventPush || if run.Event == webhook_module.HookEventPush ||
run.Event == webhook_module.HookEventPullRequestSync { run.Event == webhook_module.HookEventPullRequestSync {
if err := actions_model.CancelPreviousJobs( if err := CancelPreviousJobs(
ctx, ctx,
run.RepoID, run.RepoID,
run.Ref, run.Ref,
@@ -472,7 +472,7 @@ func handleSchedules(
log.Error("CountSchedules: %v", err) log.Error("CountSchedules: %v", err)
return err return err
} else if count > 0 { } else if count > 0 {
if err := actions_model.CleanRepoScheduleTasks(ctx, input.Repo); err != nil { if err := CleanRepoScheduleTasks(ctx, input.Repo); err != nil {
log.Error("CleanRepoScheduleTasks: %v", err) log.Error("CleanRepoScheduleTasks: %v", err)
} }
} }

View File

@@ -55,7 +55,7 @@ func startTasks(ctx context.Context) error {
// cancel running jobs if the event is push // cancel running jobs if the event is push
if row.Schedule.Event == webhook_module.HookEventPush { if row.Schedule.Event == webhook_module.HookEventPush {
// cancel running jobs of the same workflow // cancel running jobs of the same workflow
if err := actions_model.CancelPreviousJobs( if err := CancelPreviousJobs(
ctx, ctx,
row.RepoID, row.RepoID,
row.Schedule.Ref, row.Schedule.Ref,

View File

@@ -239,9 +239,11 @@ func ToAPIPullRequest(ctx context.Context, pr *issues_model.PullRequest, doer *u
// Calculate diff // Calculate diff
startCommitID = pr.MergeBase startCommitID = pr.MergeBase
apiPullRequest.ChangedFiles, apiPullRequest.Additions, apiPullRequest.Deletions, err = gitRepo.GetDiffShortStat(startCommitID, endCommitID) diffChangedFiles, diffAdditions, diffDeletions, err := gitRepo.GetDiffShortStat(startCommitID, endCommitID)
if err != nil { if err != nil {
log.Error("GetDiffShortStat: %v", err) log.Error("GetDiffShortStat: %v", err)
} else {
apiPullRequest.ChangedFiles, apiPullRequest.Additions, apiPullRequest.Deletions = &diffChangedFiles, &diffAdditions, &diffDeletions
} }
} }
@@ -459,12 +461,6 @@ func ToAPIPullRequests(ctx context.Context, baseRepo *repo_model.Repository, prs
return nil, err return nil, err
} }
// Outer scope variables to be used in diff calculation
var (
startCommitID string
endCommitID string
)
if git.IsErrBranchNotExist(err) { if git.IsErrBranchNotExist(err) {
headCommitID, err := headGitRepo.GetRefCommitID(apiPullRequest.Head.Ref) headCommitID, err := headGitRepo.GetRefCommitID(apiPullRequest.Head.Ref)
if err != nil && !git.IsErrNotExist(err) { if err != nil && !git.IsErrNotExist(err) {
@@ -473,7 +469,6 @@ func ToAPIPullRequests(ctx context.Context, baseRepo *repo_model.Repository, prs
} }
if err == nil { if err == nil {
apiPullRequest.Head.Sha = headCommitID apiPullRequest.Head.Sha = headCommitID
endCommitID = headCommitID
} }
} else { } else {
commit, err := headBranch.GetCommit() commit, err := headBranch.GetCommit()
@@ -484,17 +479,8 @@ func ToAPIPullRequests(ctx context.Context, baseRepo *repo_model.Repository, prs
if err == nil { if err == nil {
apiPullRequest.Head.Ref = pr.HeadBranch apiPullRequest.Head.Ref = pr.HeadBranch
apiPullRequest.Head.Sha = commit.ID.String() apiPullRequest.Head.Sha = commit.ID.String()
endCommitID = commit.ID.String()
} }
} }
// Calculate diff
startCommitID = pr.MergeBase
apiPullRequest.ChangedFiles, apiPullRequest.Additions, apiPullRequest.Deletions, err = gitRepo.GetDiffShortStat(startCommitID, endCommitID)
if err != nil {
log.Error("GetDiffShortStat: %v", err)
}
} }
if len(apiPullRequest.Head.Sha) == 0 && len(apiPullRequest.Head.Ref) != 0 { if len(apiPullRequest.Head.Sha) == 0 && len(apiPullRequest.Head.Ref) != 0 {

View File

@@ -1193,6 +1193,8 @@ func GetDiff(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, fi
if language.Has() { if language.Has() {
diffFile.Language = language.Value() diffFile.Language = language.Value()
} }
} else {
checker = nil // CheckPath fails, it's not impossible to "check" anymore
} }
} }
@@ -1377,10 +1379,8 @@ func GetWhitespaceFlag(whitespaceBehavior string) git.TrustedCmdArgs {
"ignore-eol": {"--ignore-space-at-eol"}, "ignore-eol": {"--ignore-space-at-eol"},
"show-all": nil, "show-all": nil,
} }
if flag, ok := whitespaceFlags[whitespaceBehavior]; ok { if flag, ok := whitespaceFlags[whitespaceBehavior]; ok {
return flag return flag
} }
log.Warn("unknown whitespace behavior: %q, default to 'show-all'", whitespaceBehavior)
return nil return nil
} }

View File

@@ -30,6 +30,7 @@ import (
"code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/util"
webhook_module "code.gitea.io/gitea/modules/webhook" webhook_module "code.gitea.io/gitea/modules/webhook"
actions_service "code.gitea.io/gitea/services/actions"
notify_service "code.gitea.io/gitea/services/notify" notify_service "code.gitea.io/gitea/services/notify"
files_service "code.gitea.io/gitea/services/repository/files" files_service "code.gitea.io/gitea/services/repository/files"
@@ -428,7 +429,7 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, doer *user_m
log.Error("DeleteCronTaskByRepo: %v", err) log.Error("DeleteCronTaskByRepo: %v", err)
} }
// cancel running cron jobs of this repository and delete old schedules // cancel running cron jobs of this repository and delete old schedules
if err := actions_model.CancelPreviousJobs( if err := actions_service.CancelPreviousJobs(
ctx, ctx,
repo.ID, repo.ID,
from, from,
@@ -609,7 +610,7 @@ func SetRepoDefaultBranch(ctx context.Context, repo *repo_model.Repository, gitR
log.Error("DeleteCronTaskByRepo: %v", err) log.Error("DeleteCronTaskByRepo: %v", err)
} }
// cancel running cron jobs of this repository and delete old schedules // cancel running cron jobs of this repository and delete old schedules
if err := actions_model.CancelPreviousJobs( if err := actions_service.CancelPreviousJobs(
ctx, ctx,
repo.ID, repo.ID,
oldDefaultBranchName, oldDefaultBranchName,

View File

@@ -23,6 +23,7 @@ import (
repo_module "code.gitea.io/gitea/modules/repository" repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util"
issue_service "code.gitea.io/gitea/services/issue" issue_service "code.gitea.io/gitea/services/issue"
notify_service "code.gitea.io/gitea/services/notify" notify_service "code.gitea.io/gitea/services/notify"
pull_service "code.gitea.io/gitea/services/pull" pull_service "code.gitea.io/gitea/services/pull"
@@ -133,23 +134,26 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error {
} else { // is new tag } else { // is new tag
newCommit, err := gitRepo.GetCommit(opts.NewCommitID) newCommit, err := gitRepo.GetCommit(opts.NewCommitID)
if err != nil { if err != nil {
return fmt.Errorf("gitRepo.GetCommit(%s) in %s/%s[%d]: %w", opts.NewCommitID, repo.OwnerName, repo.Name, repo.ID, err) // in case there is dirty data, for example, the "github.com/git/git" repository has tags pointing to non-existing commits
if !errors.Is(err, util.ErrNotExist) {
log.Error("Unable to get tag commit: gitRepo.GetCommit(%s) in %s/%s[%d]: %v", opts.NewCommitID, repo.OwnerName, repo.Name, repo.ID, err)
}
} else {
commits := repo_module.NewPushCommits()
commits.HeadCommit = repo_module.CommitToPushCommit(newCommit)
commits.CompareURL = repo.ComposeCompareURL(objectFormat.EmptyObjectID().String(), opts.NewCommitID)
notify_service.PushCommits(
ctx, pusher, repo,
&repo_module.PushUpdateOptions{
RefFullName: opts.RefFullName,
OldCommitID: objectFormat.EmptyObjectID().String(),
NewCommitID: opts.NewCommitID,
}, commits)
addTags = append(addTags, tagName)
notify_service.CreateRef(ctx, pusher, repo, opts.RefFullName, opts.NewCommitID)
} }
commits := repo_module.NewPushCommits()
commits.HeadCommit = repo_module.CommitToPushCommit(newCommit)
commits.CompareURL = repo.ComposeCompareURL(objectFormat.EmptyObjectID().String(), opts.NewCommitID)
notify_service.PushCommits(
ctx, pusher, repo,
&repo_module.PushUpdateOptions{
RefFullName: opts.RefFullName,
OldCommitID: objectFormat.EmptyObjectID().String(),
NewCommitID: opts.NewCommitID,
}, commits)
addTags = append(addTags, tagName)
notify_service.CreateRef(ctx, pusher, repo, opts.RefFullName, opts.NewCommitID)
} }
} else if opts.RefFullName.IsBranch() { } else if opts.RefFullName.IsBranch() {
if pusher == nil || pusher.ID != opts.PusherID { if pusher == nil || pusher.ID != opts.PusherID {

View File

@@ -7,7 +7,6 @@ import (
"context" "context"
"slices" "slices"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo" repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit" "code.gitea.io/gitea/models/unit"
@@ -29,7 +28,7 @@ func UpdateRepositoryUnits(ctx context.Context, repo *repo_model.Repository, uni
} }
if slices.Contains(deleteUnitTypes, unit.TypeActions) { if slices.Contains(deleteUnitTypes, unit.TypeActions) {
if err := actions_model.CleanRepoScheduleTasks(ctx, repo); err != nil { if err := actions_service.CleanRepoScheduleTasks(ctx, repo); err != nil {
log.Error("CleanRepoScheduleTasks: %v", err) log.Error("CleanRepoScheduleTasks: %v", err)
} }
} }

View File

@@ -59,11 +59,13 @@
</div> </div>
</div> </div>
{{if or .EnablePasskeyAuth .ShowRegistrationButton}}
<div class="ui container fluid"> <div class="ui container fluid">
{{template "user/auth/webauthn_error" .}}
<div class="ui attached segment header top tw-max-w-2xl tw-m-auto tw-flex tw-flex-col tw-items-center"> <div class="ui attached segment header top tw-max-w-2xl tw-m-auto tw-flex tw-flex-col tw-items-center">
<a class="signin-passkey">{{ctx.Locale.Tr "auth.signin_passkey"}}</a> {{if .EnablePasskeyAuth}}
{{template "user/auth/webauthn_error" .}}
<a class="signin-passkey">{{ctx.Locale.Tr "auth.signin_passkey"}}</a>
{{end}}
{{if .ShowRegistrationButton}} {{if .ShowRegistrationButton}}
<div class="field"> <div class="field">
@@ -73,3 +75,4 @@
{{end}} {{end}}
</div> </div>
</div> </div>
{{end}}

View File

@@ -51,7 +51,6 @@ func TestAPIViewPulls(t *testing.T) {
assert.Empty(t, pull.RequestedReviewersTeams) assert.Empty(t, pull.RequestedReviewersTeams)
assert.EqualValues(t, 5, pull.RequestedReviewers[0].ID) assert.EqualValues(t, 5, pull.RequestedReviewers[0].ID)
assert.EqualValues(t, 6, pull.RequestedReviewers[1].ID) assert.EqualValues(t, 6, pull.RequestedReviewers[1].ID)
assert.EqualValues(t, 1, pull.ChangedFiles)
if assert.EqualValues(t, 5, pull.ID) { if assert.EqualValues(t, 5, pull.ID) {
resp = ctx.Session.MakeRequest(t, NewRequest(t, "GET", pull.DiffURL), http.StatusOK) resp = ctx.Session.MakeRequest(t, NewRequest(t, "GET", pull.DiffURL), http.StatusOK)
@@ -59,22 +58,23 @@ func TestAPIViewPulls(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
patch, err := gitdiff.ParsePatch(context.Background(), 1000, 5000, 10, bytes.NewReader(bs), "") patch, err := gitdiff.ParsePatch(context.Background(), 1000, 5000, 10, bytes.NewReader(bs), "")
assert.NoError(t, err) assert.NoError(t, err)
if assert.Len(t, patch.Files, pull.ChangedFiles) { if assert.Len(t, patch.Files, 1) {
assert.Equal(t, "File-WoW", patch.Files[0].Name) assert.Equal(t, "File-WoW", patch.Files[0].Name)
// FIXME: The old name should be empty if it's a file add type // FIXME: The old name should be empty if it's a file add type
assert.Equal(t, "File-WoW", patch.Files[0].OldName) assert.Equal(t, "File-WoW", patch.Files[0].OldName)
assert.EqualValues(t, pull.Additions, patch.Files[0].Addition) assert.EqualValues(t, 1, patch.Files[0].Addition)
assert.EqualValues(t, pull.Deletions, patch.Files[0].Deletion) assert.EqualValues(t, 0, patch.Files[0].Deletion)
assert.Equal(t, gitdiff.DiffFileAdd, patch.Files[0].Type) assert.Equal(t, gitdiff.DiffFileAdd, patch.Files[0].Type)
} }
t.Run(fmt.Sprintf("APIGetPullFiles_%d", pull.ID), t.Run(fmt.Sprintf("APIGetPullFiles_%d", pull.ID),
doAPIGetPullFiles(ctx, pull, func(t *testing.T, files []*api.ChangedFile) { doAPIGetPullFiles(ctx, pull, func(t *testing.T, files []*api.ChangedFile) {
if assert.Len(t, files, pull.ChangedFiles) { if assert.Len(t, files, 1) {
assert.Equal(t, "File-WoW", files[0].Filename) assert.Equal(t, "File-WoW", files[0].Filename)
assert.Empty(t, files[0].PreviousFilename) assert.Empty(t, files[0].PreviousFilename)
assert.EqualValues(t, pull.Additions, files[0].Additions) assert.EqualValues(t, 1, files[0].Additions)
assert.EqualValues(t, pull.Deletions, files[0].Deletions) assert.EqualValues(t, 1, files[0].Changes)
assert.EqualValues(t, 0, files[0].Deletions)
assert.Equal(t, "added", files[0].Status) assert.Equal(t, "added", files[0].Status)
} }
})) }))
@@ -88,7 +88,6 @@ func TestAPIViewPulls(t *testing.T) {
assert.EqualValues(t, 4, pull.RequestedReviewers[1].ID) assert.EqualValues(t, 4, pull.RequestedReviewers[1].ID)
assert.EqualValues(t, 2, pull.RequestedReviewers[2].ID) assert.EqualValues(t, 2, pull.RequestedReviewers[2].ID)
assert.EqualValues(t, 5, pull.RequestedReviewers[3].ID) assert.EqualValues(t, 5, pull.RequestedReviewers[3].ID)
assert.EqualValues(t, 1, pull.ChangedFiles)
if assert.EqualValues(t, 2, pull.ID) { if assert.EqualValues(t, 2, pull.ID) {
resp = ctx.Session.MakeRequest(t, NewRequest(t, "GET", pull.DiffURL), http.StatusOK) resp = ctx.Session.MakeRequest(t, NewRequest(t, "GET", pull.DiffURL), http.StatusOK)
@@ -96,45 +95,44 @@ func TestAPIViewPulls(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
patch, err := gitdiff.ParsePatch(context.Background(), 1000, 5000, 10, bytes.NewReader(bs), "") patch, err := gitdiff.ParsePatch(context.Background(), 1000, 5000, 10, bytes.NewReader(bs), "")
assert.NoError(t, err) assert.NoError(t, err)
if assert.Len(t, patch.Files, pull.ChangedFiles) { if assert.Len(t, patch.Files, 1) {
assert.Equal(t, "README.md", patch.Files[0].Name) assert.Equal(t, "README.md", patch.Files[0].Name)
assert.Equal(t, "README.md", patch.Files[0].OldName) assert.Equal(t, "README.md", patch.Files[0].OldName)
assert.EqualValues(t, pull.Additions, patch.Files[0].Addition) assert.EqualValues(t, 4, patch.Files[0].Addition)
assert.EqualValues(t, pull.Deletions, patch.Files[0].Deletion) assert.EqualValues(t, 1, patch.Files[0].Deletion)
assert.Equal(t, gitdiff.DiffFileChange, patch.Files[0].Type) assert.Equal(t, gitdiff.DiffFileChange, patch.Files[0].Type)
} }
t.Run(fmt.Sprintf("APIGetPullFiles_%d", pull.ID), t.Run(fmt.Sprintf("APIGetPullFiles_%d", pull.ID),
doAPIGetPullFiles(ctx, pull, func(t *testing.T, files []*api.ChangedFile) { doAPIGetPullFiles(ctx, pull, func(t *testing.T, files []*api.ChangedFile) {
if assert.Len(t, files, pull.ChangedFiles) { if assert.Len(t, files, 1) {
assert.Equal(t, "README.md", files[0].Filename) assert.Equal(t, "README.md", files[0].Filename)
// FIXME: The PreviousFilename name should be the same as Filename if it's a file change // FIXME: The PreviousFilename name should be the same as Filename if it's a file change
assert.Equal(t, "", files[0].PreviousFilename) assert.Equal(t, "", files[0].PreviousFilename)
assert.EqualValues(t, pull.Additions, files[0].Additions) assert.EqualValues(t, 4, files[0].Additions)
assert.EqualValues(t, pull.Deletions, files[0].Deletions) assert.EqualValues(t, 1, files[0].Deletions)
assert.Equal(t, "changed", files[0].Status) assert.Equal(t, "changed", files[0].Status)
} }
})) }))
} }
pull = pulls[2] pull = pulls[0]
assert.EqualValues(t, 1, pull.Poster.ID) assert.EqualValues(t, 1, pull.Poster.ID)
assert.Len(t, pull.RequestedReviewers, 1) assert.Len(t, pull.RequestedReviewers, 2)
assert.Empty(t, pull.RequestedReviewersTeams) assert.Empty(t, pull.RequestedReviewersTeams)
assert.EqualValues(t, 1, pull.RequestedReviewers[0].ID) assert.EqualValues(t, 5, pull.RequestedReviewers[0].ID)
assert.EqualValues(t, 0, pull.ChangedFiles)
if assert.EqualValues(t, 1, pull.ID) { if assert.EqualValues(t, 5, pull.ID) {
resp = ctx.Session.MakeRequest(t, NewRequest(t, "GET", pull.DiffURL), http.StatusOK) resp = ctx.Session.MakeRequest(t, NewRequest(t, "GET", pull.DiffURL), http.StatusOK)
bs, err := io.ReadAll(resp.Body) bs, err := io.ReadAll(resp.Body)
assert.NoError(t, err) assert.NoError(t, err)
patch, err := gitdiff.ParsePatch(context.Background(), 1000, 5000, 10, bytes.NewReader(bs), "") patch, err := gitdiff.ParsePatch(context.Background(), 1000, 5000, 10, bytes.NewReader(bs), "")
assert.NoError(t, err) assert.NoError(t, err)
assert.EqualValues(t, pull.ChangedFiles, patch.NumFiles) assert.EqualValues(t, 1, patch.NumFiles)
t.Run(fmt.Sprintf("APIGetPullFiles_%d", pull.ID), t.Run(fmt.Sprintf("APIGetPullFiles_%d", pull.ID),
doAPIGetPullFiles(ctx, pull, func(t *testing.T, files []*api.ChangedFile) { doAPIGetPullFiles(ctx, pull, func(t *testing.T, files []*api.ChangedFile) {
assert.Len(t, files, pull.ChangedFiles) assert.Len(t, files, 1)
})) }))
} }
} }

View File

@@ -55,9 +55,14 @@ func TestGitLFSSSH(t *testing.T) {
return strings.Contains(s, "POST /api/internal/repo/user2/repo1.git/info/lfs/objects/batch") return strings.Contains(s, "POST /api/internal/repo/user2/repo1.git/info/lfs/objects/batch")
}) })
countUpload := slices.ContainsFunc(routerCalls, func(s string) bool { countUpload := slices.ContainsFunc(routerCalls, func(s string) bool {
return strings.Contains(s, "PUT /user2/repo1.git/info/lfs/objects/") return strings.Contains(s, "PUT /api/internal/repo/user2/repo1.git/info/lfs/objects/")
})
nonAPIRequests := slices.ContainsFunc(routerCalls, func(s string) bool {
fields := strings.Fields(s)
return !strings.HasPrefix(fields[1], "/api/")
}) })
assert.NotZero(t, countBatch) assert.NotZero(t, countBatch)
assert.NotZero(t, countUpload) assert.NotZero(t, countUpload)
assert.Zero(t, nonAPIRequests)
}) })
} }

View File

@@ -25,6 +25,7 @@ import (
"github.com/PuerkitoBio/goquery" "github.com/PuerkitoBio/goquery"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
) )
func TestNewWebHookLink(t *testing.T) { func TestNewWebHookLink(t *testing.T) {
@@ -378,12 +379,14 @@ func Test_WebhookPullRequest(t *testing.T) {
// 3. validate the webhook is triggered // 3. validate the webhook is triggered
assert.EqualValues(t, "pull_request", triggeredEvent) assert.EqualValues(t, "pull_request", triggeredEvent)
assert.Len(t, payloads, 1) require.Len(t, payloads, 1)
assert.EqualValues(t, "repo1", payloads[0].PullRequest.Base.Repository.Name) assert.EqualValues(t, "repo1", payloads[0].PullRequest.Base.Repository.Name)
assert.EqualValues(t, "user2/repo1", payloads[0].PullRequest.Base.Repository.FullName) assert.EqualValues(t, "user2/repo1", payloads[0].PullRequest.Base.Repository.FullName)
assert.EqualValues(t, "repo1", payloads[0].PullRequest.Head.Repository.Name) assert.EqualValues(t, "repo1", payloads[0].PullRequest.Head.Repository.Name)
assert.EqualValues(t, "user2/repo1", payloads[0].PullRequest.Head.Repository.FullName) assert.EqualValues(t, "user2/repo1", payloads[0].PullRequest.Head.Repository.FullName)
assert.EqualValues(t, 0, payloads[0].PullRequest.Additions) assert.EqualValues(t, 0, *payloads[0].PullRequest.Additions)
assert.EqualValues(t, 0, *payloads[0].PullRequest.ChangedFiles)
assert.EqualValues(t, 0, *payloads[0].PullRequest.Deletions)
}) })
} }

View File

@@ -96,7 +96,7 @@ func TestSigninWithRememberMe(t *testing.T) {
session.MakeRequest(t, req, http.StatusOK) session.MakeRequest(t, req, http.StatusOK)
} }
func TestEnablePasswordSignInForm(t *testing.T) { func TestEnablePasswordSignInFormAndEnablePasskeyAuth(t *testing.T) {
defer tests.PrepareTestEnv(t)() defer tests.PrepareTestEnv(t)()
mockLinkAccount := func(ctx *context.Context) { mockLinkAccount := func(ctx *context.Context) {
@@ -139,4 +139,22 @@ func TestEnablePasswordSignInForm(t *testing.T) {
resp = MakeRequest(t, req, http.StatusOK) resp = MakeRequest(t, req, http.StatusOK)
NewHTMLParser(t, resp.Body).AssertElement(t, "form[action='/user/link_account_signin']", true) NewHTMLParser(t, resp.Body).AssertElement(t, "form[action='/user/link_account_signin']", true)
}) })
t.Run("EnablePasskeyAuth=false", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
defer test.MockVariableValue(&setting.Service.EnablePasskeyAuth, false)()
req := NewRequest(t, "GET", "/user/login")
resp := MakeRequest(t, req, http.StatusOK)
NewHTMLParser(t, resp.Body).AssertElement(t, ".signin-passkey", false)
})
t.Run("EnablePasskeyAuth=true", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
defer test.MockVariableValue(&setting.Service.EnablePasskeyAuth, true)()
req := NewRequest(t, "GET", "/user/login")
resp := MakeRequest(t, req, http.StatusOK)
NewHTMLParser(t, resp.Body).AssertElement(t, ".signin-passkey", true)
})
} }

View File

@@ -45,6 +45,7 @@ SIGNING_KEY = none
SSH_DOMAIN = localhost SSH_DOMAIN = localhost
HTTP_PORT = 3003 HTTP_PORT = 3003
ROOT_URL = http://localhost:3003/ ROOT_URL = http://localhost:3003/
LOCAL_ROOT_URL = http://127.0.0.1:3003/
DISABLE_SSH = false DISABLE_SSH = false
SSH_LISTEN_HOST = localhost SSH_LISTEN_HOST = localhost
SSH_PORT = 2201 SSH_PORT = 2201

View File

@@ -47,6 +47,7 @@ SIGNING_KEY = none
SSH_DOMAIN = localhost SSH_DOMAIN = localhost
HTTP_PORT = 3001 HTTP_PORT = 3001
ROOT_URL = http://localhost:3001/ ROOT_URL = http://localhost:3001/
LOCAL_ROOT_URL = http://127.0.0.1:3001/
DISABLE_SSH = false DISABLE_SSH = false
SSH_LISTEN_HOST = localhost SSH_LISTEN_HOST = localhost
SSH_PORT = 2201 SSH_PORT = 2201

View File

@@ -46,6 +46,7 @@ SIGNING_KEY = none
SSH_DOMAIN = localhost SSH_DOMAIN = localhost
HTTP_PORT = 3002 HTTP_PORT = 3002
ROOT_URL = http://localhost:3002/ ROOT_URL = http://localhost:3002/
LOCAL_ROOT_URL = http://127.0.0.1:3002/
DISABLE_SSH = false DISABLE_SSH = false
SSH_LISTEN_HOST = localhost SSH_LISTEN_HOST = localhost
SSH_PORT = 2202 SSH_PORT = 2202

View File

@@ -41,6 +41,7 @@ SIGNING_KEY = none
SSH_DOMAIN = localhost SSH_DOMAIN = localhost
HTTP_PORT = 3003 HTTP_PORT = 3003
ROOT_URL = http://localhost:3003/ ROOT_URL = http://localhost:3003/
LOCAL_ROOT_URL = http://127.0.0.1:3003/
DISABLE_SSH = false DISABLE_SSH = false
SSH_LISTEN_HOST = localhost SSH_LISTEN_HOST = localhost
SSH_PORT = 2203 SSH_PORT = 2203

View File

@@ -57,6 +57,10 @@
margin-right: 0; margin-right: 0;
} }
#navbar .item.active {
background: var(--color-active);
}
@media (max-width: 767.98px) { @media (max-width: 767.98px) {
#navbar { #navbar {
align-items: stretch; align-items: stretch;

View File

@@ -221,7 +221,10 @@ function initRepoDiffShowMore() {
if (!resp) { if (!resp) {
return; return;
} }
$target.parent().replaceWith($(resp).find('#diff-file-boxes .diff-file-body .file-body').children()); const $respFileBody = $(resp).find('#diff-file-boxes .diff-file-body .file-body');
const respFileBodyChildren = Array.from($respFileBody.children());
$target.parent().replaceWith($respFileBody.children());
for (const el of respFileBodyChildren) window.htmx.process(el);
onShowMoreFiles(); onShowMoreFiles();
} catch (error) { } catch (error) {
console.error('Error:', error); console.error('Error:', error);

View File

@@ -1,5 +1,5 @@
import {encodeURLEncodedBase64, decodeURLEncodedBase64} from '../utils.ts'; import {encodeURLEncodedBase64, decodeURLEncodedBase64} from '../utils.ts';
import {showElem} from '../utils/dom.ts'; import {hideElem, showElem} from '../utils/dom.ts';
import {GET, POST} from '../modules/fetch.ts'; import {GET, POST} from '../modules/fetch.ts';
const {appSubUrl} = window.config; const {appSubUrl} = window.config;
@@ -11,6 +11,15 @@ export async function initUserAuthWebAuthn() {
return; return;
} }
if (window.location.protocol === 'http:') {
// webauthn is only supported on secure contexts
const isLocalhost = ['localhost', '127.0.0.1'].includes(window.location.hostname);
if (!isLocalhost) {
hideElem(elSignInPasskeyBtn);
return;
}
}
if (!detectWebAuthnSupport()) { if (!detectWebAuthnSupport()) {
return; return;
} }

View File

@@ -63,6 +63,7 @@ interface Window {
jQuery: typeof import('@types/jquery'), jQuery: typeof import('@types/jquery'),
htmx: Omit<typeof import('htmx.org/dist/htmx.esm.js').default, 'config'> & { htmx: Omit<typeof import('htmx.org/dist/htmx.esm.js').default, 'config'> & {
config?: Writable<typeof import('htmx.org').default.config>, config?: Writable<typeof import('htmx.org').default.config>,
process?: (elt: Element | string) => void,
}, },
ui?: any, ui?: any,
_globalHandlerErrors: Array<ErrorEvent & PromiseRejectionEvent> & { _globalHandlerErrors: Array<ErrorEvent & PromiseRejectionEvent> & {