mirror of
https://github.com/go-gitea/gitea.git
synced 2025-11-10 15:32:55 +09:00
Compare commits
36 Commits
v1.26.0-de
...
v1.25.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cf644d565d | ||
|
|
88a8571b93 | ||
|
|
45a88e09af | ||
|
|
6aa1a1e54d | ||
|
|
18cc3160b5 | ||
|
|
123c8d2b81 | ||
|
|
b2f2f8528a | ||
|
|
0925089b5e | ||
|
|
c84d17b1bb | ||
|
|
cb338a2ba1 | ||
|
|
16f4f0d473 | ||
|
|
387a4e72f7 | ||
|
|
ac6d38e4b7 | ||
|
|
6df51d4ef5 | ||
|
|
46f695ac65 | ||
|
|
4af1d58c86 | ||
|
|
f71df88a6b | ||
|
|
18b178e63f | ||
|
|
1644b8743c | ||
|
|
53a2aaee35 | ||
|
|
5ae9bb4df9 | ||
|
|
ae2e8c1f00 | ||
|
|
602af1499e | ||
|
|
f4512426a1 | ||
|
|
a3458c669a | ||
|
|
609d88f029 | ||
|
|
3c78598217 | ||
|
|
b7bb0fa538 | ||
|
|
6de2151607 | ||
|
|
a99761d466 | ||
|
|
8d1c04bda4 | ||
|
|
aa57531aac | ||
|
|
006fe2a907 | ||
|
|
d94faf6d7e | ||
|
|
6c8879b832 | ||
|
|
94a6da3bc8 |
227
CHANGELOG.md
227
CHANGELOG.md
@@ -4,6 +4,233 @@ This changelog goes through the changes that have been made in each release
|
||||
without substantial changes to our git log; to see the highlights of what has
|
||||
been added to each release, please refer to the [blog](https://blog.gitea.com).
|
||||
|
||||
## [1.25.0](https://github.com/go-gitea/gitea/releases/tag/1.25.0) - 2025-10-30
|
||||
|
||||
* BREAKING
|
||||
* Return 201 Created for CreateVariable API responses (#34517)
|
||||
* Add label 'state' to metric 'gitea_users' (#34326)
|
||||
* SECURITY
|
||||
* Upgrade security public key (#34956)
|
||||
* Also include all security fixes in 1.24.x after 1.25.0-rc0
|
||||
* FEATURES
|
||||
* Stream repo zip/tar.gz/bundle achives by default (#35487)
|
||||
* Use configurable remote name for git commands (#35172)
|
||||
* Send email on Workflow Run Success/Failure (#34982)
|
||||
* Refactor OpenIDConnect to support SSH/FullName sync (#34978)
|
||||
* Refactor repo contents API and add "contents-ext" API (#34822)
|
||||
* Add support for 3D/CAD file formats preview (#34794)
|
||||
* Improve instance wide ssh commit signing (#34341)
|
||||
* Edit file workflow for creating a fork and proposing changes (#34240)
|
||||
* Follow file symlinks in the UI to their target (#28835)
|
||||
* Allow renaming/moving binary/LFS files in the UI (#34350)
|
||||
* PERFORMANCE
|
||||
* Improve the performance when detecting the file editable (#34653)
|
||||
* ENHANCEMENTS
|
||||
* Enable more markdown paste features in textarea editor (#35494)
|
||||
* Don't store repo archives on `gitea dump` (#35467)
|
||||
* Always return the relevant status information, even if no status exists. (#35335)
|
||||
* Add start time on perf trace because it seems some steps haven't been recorded. (#35282)
|
||||
* Remove deprecated auth sources (#35272)
|
||||
* When sorting issues by nearest due date, issues without due date should be sorted ascending (#35267)
|
||||
* Disable field count validation of CSV viewer (#35228)
|
||||
* Add `has_code` to repository REST API (#35214)
|
||||
* Display pull request in merged commit view (#35202)
|
||||
* Support Basic Authentication for archive downloads (#35087)
|
||||
* Add hover background to table rows in user and repo admin page (#35072)
|
||||
* Partially refresh notifications list (#35010)
|
||||
* Also display "recently pushed branch" alert on PR view (#35001)
|
||||
* Refactor time tracker UI (#34983)
|
||||
* Improve CLI commands (#34973)
|
||||
* Improve project & label color picker and image scroll (#34971)
|
||||
* Improve NuGet API Parity (#21291) (#34940)
|
||||
* Support getting last commit message using contents-ext API (#34904)
|
||||
* Adds title on branch commit counts (#34869)
|
||||
* Add "Cancel workflow run" button to Actions list page (#34817)
|
||||
* Improve img lazy loading (#34804)
|
||||
* Forks repository list page follow other repositories page (#34784)
|
||||
* Add ff_only parameter to POST /repos/{owner}/{repo}/merge-upstream (#34770)
|
||||
* Rework delete org and rename org UI (#34762)
|
||||
* Improve nuget/rubygems package registries (#34741)
|
||||
* Add repo file tree item link behavior (#34730)
|
||||
* Add issue delete notifier (#34592)
|
||||
* Improve Actions list (#34530)
|
||||
* Add a default tab on repo header when migrating (#34503)
|
||||
* Add post-installation redirect based on admin account status (#34493)
|
||||
* Trigger 'unlabeled' event when label is Deleted from PR (#34316)
|
||||
* Support annotated tags when using create release API (#31840)
|
||||
* Use lfs label for lfs file rather than a long description (#34363)
|
||||
* Add "View workflow file" to Actions list page (#34538)
|
||||
* Move organization's visibility change to danger zone. (#34814)
|
||||
* Don't block site admin's operation if SECRET_KEY is lost (#35721)
|
||||
* Make restricted users can access public repositories (#35693)
|
||||
* The status icon of the Action step is consistent with GitHub (#35618) #35621
|
||||
* BUGFIXES
|
||||
* Update tab title when navigating file tree (#35757) #35772
|
||||
* Fix "ref-issue" handling in markup (#35739) #35771
|
||||
* Fix webhook to prevent tag events from bypassing branch filters targets (#35567) #35577
|
||||
* Fix markup init after issue comment editing (#35536) #35537
|
||||
* Fix creating pull request failure when the target branch name is the same as some tag (#35552) #35582
|
||||
* Fix auto-expand and auto-scroll for actions logs (#35570) (#35583) #35586
|
||||
* Use inputs context when parsing workflows (#35590) #35595
|
||||
* Fix diffpatch API endpoint (#35610) #35613
|
||||
* Creating push comments before invoke pull request checking (#35647) #35668
|
||||
* Fix missing Close when error occurs and abused connection pool (#35658) #35670
|
||||
* Fix build (#35674)
|
||||
* Use LFS object size instead of blob size when viewing a LFS file (#35679)
|
||||
* Fix workflow run event status while rerunning a failed job (#35689)
|
||||
* Avoid emoji mismatch and allow to only enable chosen emojis (#35692)
|
||||
* Refactor legacy code, fix LFS auth bypass, fix symlink bypass (#35708)
|
||||
* Fix various trivial problems (#35714)
|
||||
* Fix attachment file size limit in server backend (#35519)
|
||||
* Honor delete branch on merge repo setting when using merge API (#35488)
|
||||
* Fix external render, make iframe render work (#35727, #35730)
|
||||
* Upgrade go mail to 0.7.2 (#35748)
|
||||
* Revert #18491, fix oauth2 client link account (#35745)
|
||||
* Fix different behavior in status check pattern matching with double stars (#35474)
|
||||
* Fix overflow in notifications list (#35446)
|
||||
* Fix package link setting can only list limited repositories (#35394)
|
||||
* Extend comment treepath length (#35389)
|
||||
* Fix font-size in inline code comment preview (#35209)
|
||||
* Move git config/remote to gitrepo package and add global lock to resolve possible conflict when updating repository git config file (#35151)
|
||||
* Change some columns from text to longtext and fix column wrong type caused by xorm (#35141)
|
||||
* Redirect to a presigned URL of HEAD for HEAD requests (#35088)
|
||||
* Fix git commit committer parsing and add some tests (#35007)
|
||||
* Fix OCI manifest parser (#34797)
|
||||
* Refactor FindOrgOptions to use enum instead of bool, fix membership visibility (#34629)
|
||||
* Fix notification count positioning for variable-width elements (#34597)
|
||||
* Keeping consistent between UI and API about combined commit status state and fix some bugs (#34562)
|
||||
* Fix possible panic (#34508)
|
||||
* Fix autofocus behavior (#34397)
|
||||
* Fix Actions API (#35204)
|
||||
* Fix ListWorkflowRuns OpenAPI response model. (#35026)
|
||||
* Small fix in Pull Requests page (#34612)
|
||||
* Fix http auth header parsing (#34936)
|
||||
* Fix modal + form abuse (#34921)
|
||||
* Fix PR toggle WIP (#34920)
|
||||
* Fix log fmt (#34810)
|
||||
* Replace stopwatch toggle with explicit start/stop actions (#34818)
|
||||
* Fix some package registry problems (#34759)
|
||||
* Fix RPM package download routing & missing package version count (#34909)
|
||||
* Fix repo search input height (#34330)
|
||||
* Fix "The sidebar of the repository file list does not have a fixed height #34298" (#34321)
|
||||
* Fix minor typos in two files #HSFDPMUW (#34944)
|
||||
* Fix actions skipped commit status indicator (#34507)
|
||||
* Fix job status aggregation logic (#35000)
|
||||
* Fix broken OneDev migration caused by various REST API changes in OneDev 7.8.0 and later (#35216)
|
||||
* Fix typo in oauth2_full_name_claim_name string (#35199)
|
||||
* Fix typo in locale_en-US.ini (#35196)
|
||||
* API
|
||||
* Exposing TimeEstimate field in the API (#35475)
|
||||
* UpdateBranch API supports renaming a branch (#35374)
|
||||
* Add `owner` and `parent` fields clarification to docs (#35023)
|
||||
* Improve OAuth2 provider (correct Issuer, respect ENABLED) (#34966)
|
||||
* Add a `login`/`login-name`/`username` disambiguation to affected endpoint parameters and response/request models (#34901)
|
||||
* Do not mutate incoming options to SearchRepositoryByName (#34553)
|
||||
* Do not mutate incoming options to RenderUserSearch and SearchUsers (#34544)
|
||||
* Export repo's manual merge settings (#34502)
|
||||
* Add date range filtering to commit retrieval endpoints (#34497)
|
||||
* Add endpoint deleting workflow run (#34337)
|
||||
* Add workflow_run api + webhook (#33964)
|
||||
* REFACTOR
|
||||
* Move updateref and removeref to gitrepo and remove unnecessary open repository (#35511)
|
||||
* Remove unused param `doer` (#34545)
|
||||
* Split GetLatestCommitStatus as two functions (#34535)
|
||||
* Use gitrepo.SetDefaultBranch when set default branch of wiki repository (#33911)
|
||||
* Refactor editor (#34780)
|
||||
* Refactor packages (#34777)
|
||||
* Refactor container package (#34877)
|
||||
* Refactor "change file" API (#34855)
|
||||
* Rename pull request GetGitRefName to GetGitHeadRefName to prepare introducing GetGitMergeRefName (#35093)
|
||||
* Move git command to git/gitcmd (#35483)
|
||||
* Use db.WithTx/WithTx2 instead of TxContext when possible (#35428)
|
||||
* Support Node.js 22.6 with type stripping (#35427)
|
||||
* Migrate tools and configs to typescript, require node.js >= 22.18.0 (#35421)
|
||||
* Check user and repo for redirects when using git via SSH transport (#35416)
|
||||
* Remove the duplicated function GetTags (#35375)
|
||||
* Refactor to use reflect.TypeFor (#35370)
|
||||
* Deleting branch could delete broken branch which has database record but git branch is missing (#35360)
|
||||
* Exit with success when already up to date (#35312)
|
||||
* Split admin config settings templates to make it maintain easier (#35294)
|
||||
* A small refactor to use context in the service layer (#35179)
|
||||
* Refactor and update mail templates (#35150)
|
||||
* Use db.WithTx/WithTx2 instead of TxContext when possible (#35130)
|
||||
* Align `issue-title-buttons` with `list-header` (#35018)
|
||||
* Add Notifications section in User Settings (#35008)
|
||||
* Tweak placement of diff file menu (#34999)
|
||||
* Refactor mail template and support preview (#34990)
|
||||
* Rerun job only when run is done (#34970)
|
||||
* Merge index.js (#34963)
|
||||
* Refactor "delete-button" to "link-action" (#34962)
|
||||
* Refactor webhook and fix feishu/lark secret (#34961)
|
||||
* Exclude devtest.ts from tailwindcss (#34935)
|
||||
* Refactor head navbar icons (#34922)
|
||||
* Improve html escape (#34911)
|
||||
* Improve tags list page (#34898)
|
||||
* Improve `labels-list` rendering (#34846)
|
||||
* Remove unused variable HUGO_VERSION (#34840)
|
||||
* Correct migration tab name (#34826)
|
||||
* Refactor template helper (#34819)
|
||||
* Use `shallowRef` instead of `ref` in `.vue` files where possible (#34813)
|
||||
* Use standalone function to update repository cols (#34811)
|
||||
* Refactor wiki (#34805)
|
||||
* Remove unnecessary duplicate code (#34733)
|
||||
* Refactor embedded assets and drop unnecessary dependencies (#34692)
|
||||
* Update x/crypto package and make builtin SSH use default parameters (#34667)
|
||||
* Add `--color-logo`, matching the logo's primary color (#34639)
|
||||
* Add openssh-keygen to rootless image (#34625)
|
||||
* Replace update repository function in some places (#34566)
|
||||
* Change "rejected" to "changes requested" in 3rd party PR review notification (#34481)
|
||||
* Remove legacy template helper functions (#34426)
|
||||
* Use run-name and evaluate workflow variables (#34301)
|
||||
* Move HasWiki to repository service package (#33912)
|
||||
* Move some functions from package git to gitrepo (#33910)
|
||||
* TESTING
|
||||
* Add webhook test for push event (#34442)
|
||||
* Add a webhook push test for dev branch (#34421)
|
||||
* Add migrations tests (#34456) (#34498)
|
||||
* STYLE
|
||||
* Enforce explanation for necessary nolints and fix bugs (#34883)
|
||||
* Fix remaining issues after `gopls modernize` formatting (#34771)
|
||||
* Update gofumpt, add go.mod ignore directive (#35434)
|
||||
* Enforce nolint scope (#34851)
|
||||
* Enable gocritic `equalFold` and fix issues (#34952)
|
||||
* Run `gopls modernize` on codebase (#34751)
|
||||
* Upgrade `gopls` to v0.19.0, add `make fix` (#34772)
|
||||
* BUILD
|
||||
* bump archives&rar dep (#35637) #35638
|
||||
* Use github.com/mholt/archives replace github.com/mholt/archiver (#35390)
|
||||
* Update JS and PY dependencies (#35444)
|
||||
* Upgrade devcontainer go version to 1.24.6 (#35298)
|
||||
* Upgrade golang to 1.25.1 and add descriptions for the swagger structs' fields (#35418)
|
||||
* Update JS and PY deps (#35191)
|
||||
* Update JS and PY dependencies (#34391)
|
||||
* Update go tool dependencies (#34845)
|
||||
* Update `uint8-to-base64`, remove type stub (#34844)
|
||||
* Switch to `@resvg/resvg-wasm` for `generate-images` (#35415)
|
||||
* Switch to pnpm (#35274)
|
||||
* Update chroma to v2.20.0 (#35220)
|
||||
* Migrate to urfave v3 (#34510)
|
||||
* Update JS deps, regenerate SVGs (#34640)
|
||||
* Upgrade dependencies (#35384)
|
||||
* Bump `@github/relative-time-element` to v4.4.8 (#34413)
|
||||
* Update JS dependencies (#34951)
|
||||
* Upgrade orgmode to v1.8.0 (#34721)
|
||||
* Raise minimum Node.js version to 20, test on 24 (#34713)
|
||||
* Update JS deps (#34701)
|
||||
* Upgrade htmx to 2.0.6 (#34887)
|
||||
* Update eslint to v9 (#35485)
|
||||
* Update js dependencies (#35429)
|
||||
* Clean up npm dependencies (#35508)
|
||||
* Clean up npm dependencies (#35484)
|
||||
* Bump setup-node to v5 (#35448)
|
||||
* MISC
|
||||
* Add gitignore rules to exclude LLM instruction files (#35076)
|
||||
* Gitignore: Visual Studio settings folder (#34375)
|
||||
* Improve language in en-US locale strings (#35124)
|
||||
* Fixed all grammatical errors in locale_en-US.ini (#35053)
|
||||
* Docs/fix typo and grammar in CONTRIBUTING.md (#35024)
|
||||
* Improve english grammar and readability in locale_en-US.ini (#35017)
|
||||
|
||||
## [1.24.0](https://github.com/go-gitea/gitea/releases/tag/1.24.0) - 2025-05-26
|
||||
|
||||
* BREAKING
|
||||
|
||||
13
assets/go-licenses.json
generated
13
assets/go-licenses.json
generated
File diff suppressed because one or more lines are too long
27
cmd/serv.go
27
cmd/serv.go
@@ -13,7 +13,6 @@ import (
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
asymkey_model "code.gitea.io/gitea/models/asymkey"
|
||||
@@ -32,7 +31,6 @@ import (
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/services/lfs"
|
||||
|
||||
"github.com/golang-jwt/jwt/v5"
|
||||
"github.com/kballard/go-shellquote"
|
||||
"github.com/urfave/cli/v3"
|
||||
)
|
||||
@@ -133,27 +131,6 @@ func getAccessMode(verb, lfsVerb string) perm.AccessMode {
|
||||
return perm.AccessModeNone
|
||||
}
|
||||
|
||||
func getLFSAuthToken(ctx context.Context, lfsVerb string, results *private.ServCommandResults) (string, error) {
|
||||
now := time.Now()
|
||||
claims := lfs.Claims{
|
||||
RegisteredClaims: jwt.RegisteredClaims{
|
||||
ExpiresAt: jwt.NewNumericDate(now.Add(setting.LFS.HTTPAuthExpiry)),
|
||||
NotBefore: jwt.NewNumericDate(now),
|
||||
},
|
||||
RepoID: results.RepoID,
|
||||
Op: lfsVerb,
|
||||
UserID: results.UserID,
|
||||
}
|
||||
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
|
||||
|
||||
// Sign and get the complete encoded token as a string using the secret
|
||||
tokenString, err := token.SignedString(setting.LFS.JWTSecretBytes)
|
||||
if err != nil {
|
||||
return "", fail(ctx, "Failed to sign JWT Token", "Failed to sign JWT token: %v", err)
|
||||
}
|
||||
return "Bearer " + tokenString, nil
|
||||
}
|
||||
|
||||
func runServ(ctx context.Context, c *cli.Command) error {
|
||||
// FIXME: This needs to internationalised
|
||||
setup(ctx, c.Bool("debug"))
|
||||
@@ -283,7 +260,7 @@ func runServ(ctx context.Context, c *cli.Command) error {
|
||||
|
||||
// LFS SSH protocol
|
||||
if verb == git.CmdVerbLfsTransfer {
|
||||
token, err := getLFSAuthToken(ctx, lfsVerb, results)
|
||||
token, err := lfs.GetLFSAuthTokenWithBearer(lfs.AuthTokenOptions{Op: lfsVerb, UserID: results.UserID, RepoID: results.RepoID})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -294,7 +271,7 @@ func runServ(ctx context.Context, c *cli.Command) error {
|
||||
if verb == git.CmdVerbLfsAuthenticate {
|
||||
url := fmt.Sprintf("%s%s/%s.git/info/lfs", setting.AppURL, url.PathEscape(results.OwnerName), url.PathEscape(results.RepoName))
|
||||
|
||||
token, err := getLFSAuthToken(ctx, lfsVerb, results)
|
||||
token, err := lfs.GetLFSAuthTokenWithBearer(lfs.AuthTokenOptions{Op: lfsVerb, UserID: results.UserID, RepoID: results.RepoID})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -1343,6 +1343,10 @@ LEVEL = Info
|
||||
;; Dont mistake it for Reactions.
|
||||
;CUSTOM_EMOJIS = gitea, codeberg, gitlab, git, github, gogs
|
||||
;;
|
||||
;; Comma separated list of enabled emojis, for example: smile, thumbsup, thumbsdown
|
||||
;; Leave it empty to enable all emojis.
|
||||
;ENABLED_EMOJIS =
|
||||
;;
|
||||
;; Whether the full name of the users should be shown where possible. If the full name isn't set, the username will be used.
|
||||
;DEFAULT_SHOW_FULL_NAME = false
|
||||
;;
|
||||
@@ -2536,7 +2540,19 @@ LEVEL = Info
|
||||
;; * sanitized: Sanitize the content and render it inside current page, default to only allow a few HTML tags and attributes. Customized sanitizer rules can be defined in [markup.sanitizer.*] .
|
||||
;; * no-sanitizer: Disable the sanitizer and render the content inside current page. It's **insecure** and may lead to XSS attack if the content contains malicious code.
|
||||
;; * iframe: Render the content in a separate standalone page and embed it into current page by iframe. The iframe is in sandbox mode with same-origin disabled, and the JS code are safely isolated from parent page.
|
||||
;RENDER_CONTENT_MODE=sanitized
|
||||
;RENDER_CONTENT_MODE = sanitized
|
||||
;; The sandbox applied to the iframe and Content-Security-Policy header when RENDER_CONTENT_MODE is `iframe`.
|
||||
;; It defaults to a safe set of "allow-*" restrictions (space separated).
|
||||
;; You can also set it by your requirements or use "disabled" to disable the sandbox completely.
|
||||
;; When set it, make sure there is no security risk:
|
||||
;; * PDF-only content: generally safe to use "disabled", and it needs to be "disabled" because PDF only renders with no sandbox.
|
||||
;; * HTML content with JS: if the "RENDER_COMMAND" can guarantee there is no XSS, then it is safe, otherwise, you need to fine tune the "allow-*" restrictions.
|
||||
;RENDER_CONTENT_SANDBOX =
|
||||
;; Whether post-process the rendered HTML content, including:
|
||||
;; resolve relative links and image sources, recognizing issue/commit references, escaping invisible characters,
|
||||
;; mentioning users, rendering permlink code blocks, replacing emoji shorthands, etc.
|
||||
;; By default, this is true when RENDER_CONTENT_MODE is `sanitized`, otherwise false.
|
||||
;NEED_POST_PROCESS = false
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
38
go.mod
38
go.mod
@@ -35,7 +35,7 @@ require (
|
||||
github.com/bohde/codel v0.2.0
|
||||
github.com/buildkite/terminal-to-html/v3 v3.16.8
|
||||
github.com/caddyserver/certmagic v0.24.0
|
||||
github.com/charmbracelet/git-lfs-transfer v0.2.0
|
||||
github.com/charmbracelet/git-lfs-transfer v0.1.1-0.20251013092601-6327009efd21
|
||||
github.com/chi-middleware/proxy v1.1.1
|
||||
github.com/dimiro1/reply v0.0.0-20200315094148-d0136a4c9e21
|
||||
github.com/djherbis/buffer v1.2.0
|
||||
@@ -56,7 +56,7 @@ require (
|
||||
github.com/go-co-op/gocron v1.37.0
|
||||
github.com/go-enry/go-enry/v2 v2.9.2
|
||||
github.com/go-git/go-billy/v5 v5.6.2
|
||||
github.com/go-git/go-git/v5 v5.16.2
|
||||
github.com/go-git/go-git/v5 v5.16.3
|
||||
github.com/go-ldap/ldap/v3 v3.4.11
|
||||
github.com/go-redsync/redsync/v4 v4.13.0
|
||||
github.com/go-sql-driver/mysql v1.9.3
|
||||
@@ -84,7 +84,7 @@ require (
|
||||
github.com/mattn/go-isatty v0.0.20
|
||||
github.com/mattn/go-sqlite3 v1.14.32
|
||||
github.com/meilisearch/meilisearch-go v0.33.2
|
||||
github.com/mholt/archives v0.1.3
|
||||
github.com/mholt/archives v0.1.5-0.20251009205813-e30ac6010726
|
||||
github.com/microcosm-cc/bluemonday v1.0.27
|
||||
github.com/microsoft/go-mssqldb v1.9.3
|
||||
github.com/minio/minio-go/v7 v7.0.95
|
||||
@@ -109,20 +109,20 @@ require (
|
||||
github.com/ulikunitz/xz v0.5.15
|
||||
github.com/urfave/cli-docs/v3 v3.0.0-alpha6
|
||||
github.com/urfave/cli/v3 v3.4.1
|
||||
github.com/wneessen/go-mail v0.6.2
|
||||
github.com/wneessen/go-mail v0.7.2
|
||||
github.com/xeipuuv/gojsonschema v1.2.0
|
||||
github.com/yohcop/openid-go v1.0.1
|
||||
github.com/yuin/goldmark v1.7.13
|
||||
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc
|
||||
github.com/yuin/goldmark-meta v1.1.0
|
||||
gitlab.com/gitlab-org/api/client-go v0.142.4
|
||||
golang.org/x/crypto v0.41.0
|
||||
golang.org/x/crypto v0.42.0
|
||||
golang.org/x/image v0.30.0
|
||||
golang.org/x/net v0.43.0
|
||||
golang.org/x/net v0.44.0
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
golang.org/x/sync v0.16.0
|
||||
golang.org/x/sys v0.35.0
|
||||
golang.org/x/text v0.28.0
|
||||
golang.org/x/sync v0.17.0
|
||||
golang.org/x/sys v0.37.0
|
||||
golang.org/x/text v0.30.0
|
||||
google.golang.org/grpc v1.75.0
|
||||
google.golang.org/protobuf v1.36.8
|
||||
gopkg.in/ini.v1 v1.67.0
|
||||
@@ -142,7 +142,7 @@ require (
|
||||
github.com/DataDog/zstd v1.5.7 // indirect
|
||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||
github.com/RoaringBitmap/roaring/v2 v2.10.0 // indirect
|
||||
github.com/STARRY-S/zip v0.2.1 // indirect
|
||||
github.com/STARRY-S/zip v0.2.3 // indirect
|
||||
github.com/andybalholm/brotli v1.2.0 // indirect
|
||||
github.com/andybalholm/cascadia v1.3.3 // indirect
|
||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect
|
||||
@@ -172,7 +172,7 @@ require (
|
||||
github.com/blevesearch/zapx/v16 v16.2.4 // indirect
|
||||
github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect
|
||||
github.com/bodgit/plumbing v1.3.0 // indirect
|
||||
github.com/bodgit/sevenzip v1.6.0 // indirect
|
||||
github.com/bodgit/sevenzip v1.6.1 // indirect
|
||||
github.com/bodgit/windows v1.0.1 // indirect
|
||||
github.com/boombuler/barcode v1.1.0 // indirect
|
||||
github.com/bradfitz/gomemcache v0.0.0-20250403215159-8d39553ac7cf // indirect
|
||||
@@ -233,14 +233,14 @@ require (
|
||||
github.com/mikelolasagasti/xz v1.0.1 // indirect
|
||||
github.com/minio/crc64nvme v1.1.1 // indirect
|
||||
github.com/minio/md5-simd v1.1.2 // indirect
|
||||
github.com/minio/minlz v1.0.0 // indirect
|
||||
github.com/minio/minlz v1.0.1 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450 // indirect
|
||||
github.com/mschoch/smat v0.2.0 // indirect
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/nwaples/rardecode/v2 v2.1.0 // indirect
|
||||
github.com/nwaples/rardecode/v2 v2.2.0 // indirect
|
||||
github.com/olekukonko/cat v0.0.0-20250817074551-3280053e4e00 // indirect
|
||||
github.com/olekukonko/errors v1.1.0 // indirect
|
||||
github.com/olekukonko/ll v0.1.0 // indirect
|
||||
@@ -259,7 +259,8 @@ require (
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||
github.com/skeema/knownhosts v1.3.1 // indirect
|
||||
github.com/sorairolake/lzip-go v0.3.5 // indirect
|
||||
github.com/sorairolake/lzip-go v0.3.8 // indirect
|
||||
github.com/spf13/afero v1.15.0 // indirect
|
||||
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect
|
||||
github.com/tinylib/msgp v1.4.0 // indirect
|
||||
github.com/unknwon/com v1.0.1 // indirect
|
||||
@@ -278,9 +279,9 @@ require (
|
||||
go.uber.org/zap/exp v0.3.0 // indirect
|
||||
go4.org v0.0.0-20230225012048-214862532bf5 // indirect
|
||||
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect
|
||||
golang.org/x/mod v0.27.0 // indirect
|
||||
golang.org/x/mod v0.28.0 // indirect
|
||||
golang.org/x/time v0.12.0 // indirect
|
||||
golang.org/x/tools v0.36.0 // indirect
|
||||
golang.org/x/tools v0.37.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250826171959-ef028d996bc1 // indirect
|
||||
gopkg.in/warnings.v0 v0.1.2 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
@@ -295,10 +296,7 @@ replace github.com/jaytaylor/html2text => github.com/Necoro/html2text v0.0.0-202
|
||||
|
||||
replace github.com/hashicorp/go-version => github.com/6543/go-version v1.3.1
|
||||
|
||||
replace github.com/nektos/act => gitea.com/gitea/act v0.261.6
|
||||
|
||||
// TODO: the only difference is in `PutObject`: the fork doesn't use `NewVerifyingReader(r, sha256.New(), oid, expectedSize)`, need to figure out why
|
||||
replace github.com/charmbracelet/git-lfs-transfer => gitea.com/gitea/git-lfs-transfer v0.2.0
|
||||
replace github.com/nektos/act => gitea.com/gitea/act v0.261.7-0.20251003180512-ac6e4b751763
|
||||
|
||||
replace git.sr.ht/~mariusor/go-xsd-duration => gitea.com/gitea/go-xsd-duration v0.0.0-20220703122237-02e73435a078
|
||||
|
||||
|
||||
80
go.sum
80
go.sum
@@ -31,10 +31,8 @@ dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
|
||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
||||
gitea.com/gitea/act v0.261.6 h1:CjZwKOyejonNFDmsXOw3wGm5Vet573hHM6VMLsxtvPY=
|
||||
gitea.com/gitea/act v0.261.6/go.mod h1:Pg5C9kQY1CEA3QjthjhlrqOC/QOT5NyWNjOjRHw23Ok=
|
||||
gitea.com/gitea/git-lfs-transfer v0.2.0 h1:baHaNoBSRaeq/xKayEXwiDQtlIjps4Ac/Ll4KqLMB40=
|
||||
gitea.com/gitea/git-lfs-transfer v0.2.0/go.mod h1:UrXUCm3xLQkq15fu7qlXHUMlrhdlXHoi13KH2Dfiits=
|
||||
gitea.com/gitea/act v0.261.7-0.20251003180512-ac6e4b751763 h1:ohdxegvslDEllZmRNDqpKun6L4Oq81jNdEDtGgHEV2c=
|
||||
gitea.com/gitea/act v0.261.7-0.20251003180512-ac6e4b751763/go.mod h1:Pg5C9kQY1CEA3QjthjhlrqOC/QOT5NyWNjOjRHw23Ok=
|
||||
gitea.com/gitea/go-xsd-duration v0.0.0-20220703122237-02e73435a078 h1:BAFmdZpRW7zMQZQDClaCWobRj9uL1MR3MzpCVJvc5s4=
|
||||
gitea.com/gitea/go-xsd-duration v0.0.0-20220703122237-02e73435a078/go.mod h1:g/V2Hjas6Z1UHUp4yIx6bATpNzJ7DYtD0FG3+xARWxs=
|
||||
gitea.com/go-chi/binding v0.0.0-20240430071103-39a851e106ed h1:EZZBtilMLSZNWtHHcgq2mt6NSGhJSZBuduAlinMEmso=
|
||||
@@ -93,8 +91,8 @@ github.com/RoaringBitmap/roaring v0.4.23/go.mod h1:D0gp8kJQgE1A4LQ5wFLggQEyvDi06
|
||||
github.com/RoaringBitmap/roaring v0.7.1/go.mod h1:jdT9ykXwHFNdJbEtxePexlFYH9LXucApeS0/+/g+p1I=
|
||||
github.com/RoaringBitmap/roaring/v2 v2.10.0 h1:HbJ8Cs71lfCJyvmSptxeMX2PtvOC8yonlU0GQcy2Ak0=
|
||||
github.com/RoaringBitmap/roaring/v2 v2.10.0/go.mod h1:FiJcsfkGje/nZBZgCu0ZxCPOKD/hVXDS2dXi7/eUFE0=
|
||||
github.com/STARRY-S/zip v0.2.1 h1:pWBd4tuSGm3wtpoqRZZ2EAwOmcHK6XFf7bU9qcJXyFg=
|
||||
github.com/STARRY-S/zip v0.2.1/go.mod h1:xNvshLODWtC4EJ702g7cTYn13G53o1+X9BWnPFpcWV4=
|
||||
github.com/STARRY-S/zip v0.2.3 h1:luE4dMvRPDOWQdeDdUxUoZkzUIpTccdKdhHHsQJ1fm4=
|
||||
github.com/STARRY-S/zip v0.2.3/go.mod h1:lqJ9JdeRipyOQJrYSOtpNAiaesFO6zVDsE8GIGFaoSk=
|
||||
github.com/SaveTheRbtz/zstd-seekable-format-go/pkg v0.8.0 h1:tgjwQrDH5m6jIYB7kac5IQZmfUzQNseac/e3H4VoCNE=
|
||||
github.com/SaveTheRbtz/zstd-seekable-format-go/pkg v0.8.0/go.mod h1:1HmmMEVsr+0R1QWahSeMJkjSkq6CYAZu1aIbYSpfJ4o=
|
||||
github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0=
|
||||
@@ -193,8 +191,8 @@ github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTS
|
||||
github.com/bmizerany/perks v0.0.0-20141205001514-d9a9656a3a4b/go.mod h1:ac9efd0D1fsDb3EJvhqgXRbFx7bs2wqZ10HQPeU8U/Q=
|
||||
github.com/bodgit/plumbing v1.3.0 h1:pf9Itz1JOQgn7vEOE7v7nlEfBykYqvUYioC61TwWCFU=
|
||||
github.com/bodgit/plumbing v1.3.0/go.mod h1:JOTb4XiRu5xfnmdnDJo6GmSbSbtSyufrsyZFByMtKEs=
|
||||
github.com/bodgit/sevenzip v1.6.0 h1:a4R0Wu6/P1o1pP/3VV++aEOcyeBxeO/xE2Y9NSTrr6A=
|
||||
github.com/bodgit/sevenzip v1.6.0/go.mod h1:zOBh9nJUof7tcrlqJFv1koWRrhz3LbDbUNngkuZxLMc=
|
||||
github.com/bodgit/sevenzip v1.6.1 h1:kikg2pUMYC9ljU7W9SaqHXhym5HyKm8/M/jd31fYan4=
|
||||
github.com/bodgit/sevenzip v1.6.1/go.mod h1:GVoYQbEVbOGT8n2pfqCIMRUaRjQ8F9oSqoBEqZh5fQ8=
|
||||
github.com/bodgit/windows v1.0.1 h1:tF7K6KOluPYygXa3Z2594zxlkbKPAOvqr97etrGNIz4=
|
||||
github.com/bodgit/windows v1.0.1/go.mod h1:a6JLwrB4KrTR5hBpp8FI9/9W9jJfeQ2h4XDXU74ZCdM=
|
||||
github.com/bohde/codel v0.2.0 h1:fzF7ibgKmCfQbOzQCblmQcwzDRmV7WO7VMLm/hDvD3E=
|
||||
@@ -219,6 +217,8 @@ github.com/cention-sany/utf7 v0.0.0-20170124080048-26cad61bd60a h1:MISbI8sU/PSK/
|
||||
github.com/cention-sany/utf7 v0.0.0-20170124080048-26cad61bd60a/go.mod h1:2GxOXOlEPAMFPfp014mK1SWq8G8BN8o7/dfYqJrVGn8=
|
||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/charmbracelet/git-lfs-transfer v0.1.1-0.20251013092601-6327009efd21 h1:2d64+4Jek9vjYwhY93AjbleiVH+AeWvPwPmDi1mfKFQ=
|
||||
github.com/charmbracelet/git-lfs-transfer v0.1.1-0.20251013092601-6327009efd21/go.mod h1:fNlYtCHWTRC8MofQERZkVUNUWaOvZeTBqHn/amSbKZI=
|
||||
github.com/chi-middleware/proxy v1.1.1 h1:4HaXUp8o2+bhHr1OhVy+VjN0+L7/07JDcn6v7YrTjrQ=
|
||||
github.com/chi-middleware/proxy v1.1.1/go.mod h1:jQwMEJct2tz9VmtCELxvnXoMfa+SOdikvbVJVHv/M+0=
|
||||
github.com/chromedp/cdproto v0.0.0-20230802225258-3cf4e6d46a89/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs=
|
||||
@@ -339,8 +339,8 @@ github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UN
|
||||
github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU=
|
||||
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4=
|
||||
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII=
|
||||
github.com/go-git/go-git/v5 v5.16.2 h1:fT6ZIOjE5iEnkzKyxTHK1W4HGAsPhqEqiSAssSO77hM=
|
||||
github.com/go-git/go-git/v5 v5.16.2/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8=
|
||||
github.com/go-git/go-git/v5 v5.16.3 h1:Z8BtvxZ09bYm/yYNgPKCzgWtaRqDTgIKRgIRHBfU6Z8=
|
||||
github.com/go-git/go-git/v5 v5.16.3/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8=
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=
|
||||
@@ -572,8 +572,8 @@ github.com/meilisearch/meilisearch-go v0.33.2 h1:YgsQSLYhAkRN2ias6I1KNRTjdYCN5w2
|
||||
github.com/meilisearch/meilisearch-go v0.33.2/go.mod h1:6eOPcQ+OAuwXvnONlfSgfgvr7TIAWM/6OdhcVHg8cF0=
|
||||
github.com/mholt/acmez/v3 v3.1.2 h1:auob8J/0FhmdClQicvJvuDavgd5ezwLBfKuYmynhYzc=
|
||||
github.com/mholt/acmez/v3 v3.1.2/go.mod h1:L1wOU06KKvq7tswuMDwKdcHeKpFFgkppZy/y0DFxagQ=
|
||||
github.com/mholt/archives v0.1.3 h1:aEAaOtNra78G+TvV5ohmXrJOAzf++dIlYeDW3N9q458=
|
||||
github.com/mholt/archives v0.1.3/go.mod h1:LUCGp++/IbV/I0Xq4SzcIR6uwgeh2yjnQWamjRQfLTU=
|
||||
github.com/mholt/archives v0.1.5-0.20251009205813-e30ac6010726 h1:WVjGWXBLI1Ggm2kHzNraCGgxFhLoK6gdpPSizCdxnx0=
|
||||
github.com/mholt/archives v0.1.5-0.20251009205813-e30ac6010726/go.mod h1:3TPMmBLPsgszL+1As5zECTuKwKvIfj6YcwWPpeTAXF4=
|
||||
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
|
||||
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
|
||||
github.com/microsoft/go-mssqldb v1.9.3 h1:hy4p+LDC8LIGvI3JATnLVmBOLMJbmn5X400mr5j0lPs=
|
||||
@@ -588,8 +588,8 @@ github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
||||
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
|
||||
github.com/minio/minio-go/v7 v7.0.95 h1:ywOUPg+PebTMTzn9VDsoFJy32ZuARN9zhB+K3IYEvYU=
|
||||
github.com/minio/minio-go/v7 v7.0.95/go.mod h1:wOOX3uxS334vImCNRVyIDdXX9OsXDm89ToynKgqUKlo=
|
||||
github.com/minio/minlz v1.0.0 h1:Kj7aJZ1//LlTP1DM8Jm7lNKvvJS2m74gyyXXn3+uJWQ=
|
||||
github.com/minio/minlz v1.0.0/go.mod h1:qT0aEB35q79LLornSzeDH75LBf3aH1MV+jB5w9Wasec=
|
||||
github.com/minio/minlz v1.0.1 h1:OUZUzXcib8diiX+JYxyRLIdomyZYzHct6EShOKtQY2A=
|
||||
github.com/minio/minlz v1.0.1/go.mod h1:qT0aEB35q79LLornSzeDH75LBf3aH1MV+jB5w9Wasec=
|
||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||
@@ -610,8 +610,8 @@ github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||
github.com/niklasfasching/go-org v1.9.1 h1:/3s4uTPOF06pImGa2Yvlp24yKXZoTYM+nsIlMzfpg/0=
|
||||
github.com/niklasfasching/go-org v1.9.1/go.mod h1:ZAGFFkWvUQcpazmi/8nHqwvARpr1xpb+Es67oUGX/48=
|
||||
github.com/nwaples/rardecode/v2 v2.1.0 h1:JQl9ZoBPDy+nIZGb1mx8+anfHp/LV3NE2MjMiv0ct/U=
|
||||
github.com/nwaples/rardecode/v2 v2.1.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw=
|
||||
github.com/nwaples/rardecode/v2 v2.2.0 h1:4ufPGHiNe1rYJxYfehALLjup4Ls3ck42CWwjKiOqu0A=
|
||||
github.com/nwaples/rardecode/v2 v2.2.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw=
|
||||
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
||||
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
||||
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
||||
@@ -714,9 +714,11 @@ github.com/smartystreets/assertions v1.1.1/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYl
|
||||
github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
|
||||
github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337 h1:WN9BUFbdyOsSH/XohnWpXOlq9NBD5sGAB2FciQMUEe8=
|
||||
github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/sorairolake/lzip-go v0.3.5 h1:ms5Xri9o1JBIWvOFAorYtUNik6HI3HgBTkISiqu0Cwg=
|
||||
github.com/sorairolake/lzip-go v0.3.5/go.mod h1:N0KYq5iWrMXI0ZEXKXaS9hCyOjZUQdBDEIbXfoUwbdk=
|
||||
github.com/sorairolake/lzip-go v0.3.8 h1:j5Q2313INdTA80ureWYRhX+1K78mUXfMoPZCw/ivWik=
|
||||
github.com/sorairolake/lzip-go v0.3.8/go.mod h1:JcBqGMV0frlxwrsE9sMWXDjqn3EeVf0/54YPsw66qkU=
|
||||
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
|
||||
github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
|
||||
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
|
||||
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||
github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
|
||||
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||
@@ -729,6 +731,7 @@ github.com/steveyen/gtreap v0.1.0/go.mod h1:kl/5J7XbrOmlIbYIXdRHDDE5QxHqpk0cmkT7
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
@@ -765,8 +768,8 @@ github.com/urfave/cli/v3 v3.4.1/go.mod h1:FJSKtM/9AiiTOJL4fJ6TbMUkxBXn7GO9guZqoZ
|
||||
github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXVQ=
|
||||
github.com/valyala/fastjson v1.6.4/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY=
|
||||
github.com/willf/bitset v1.1.10/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
|
||||
github.com/wneessen/go-mail v0.6.2 h1:c6V7c8D2mz868z9WJ+8zDKtUyLfZ1++uAZmo2GRFji8=
|
||||
github.com/wneessen/go-mail v0.6.2/go.mod h1:L/PYjPK3/2ZlNb2/FjEBIn9n1rUWjW+Toy531oVmeb4=
|
||||
github.com/wneessen/go-mail v0.7.2 h1:xxPnhZ6IZLSgxShebmZ6DPKh1b6OJcoHfzy7UjOkzS8=
|
||||
github.com/wneessen/go-mail v0.7.2/go.mod h1:+TkW6QP3EVkgTEqHtVmnAE/1MRhmzb8Y9/W3pweuS+k=
|
||||
github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
|
||||
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
|
||||
github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM=
|
||||
@@ -837,9 +840,8 @@ golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDf
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
|
||||
golang.org/x/crypto v0.33.0/go.mod h1:bVdXmD7IV/4GdElGPozy6U7lWdRXA4qyRVGJV57uQ5M=
|
||||
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
|
||||
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
|
||||
golang.org/x/crypto v0.42.0 h1:chiH31gIWm57EkTXpwnqf8qeuMUi0yekh6mT2AvFlqI=
|
||||
golang.org/x/crypto v0.42.0/go.mod h1:4+rDnOTJhQCx2q7/j6rAN5XDw8kPjeaXEUR2eL94ix8=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
@@ -876,8 +878,8 @@ golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ=
|
||||
golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc=
|
||||
golang.org/x/mod v0.28.0 h1:gQBtGhjxykdjY9YhZpSlZIsbnaE2+PgjfLWUQTnoZ1U=
|
||||
golang.org/x/mod v0.28.0/go.mod h1:yfB/L0NOf/kmEbXjzCPOx1iK1fRutOydrCMsqRhEBxI=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@@ -906,8 +908,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
|
||||
golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
|
||||
golang.org/x/net v0.44.0 h1:evd8IRDyfNBMBTTY5XRF1vaZlD+EmWx6x8PkhR04H/I=
|
||||
golang.org/x/net v0.44.0/go.mod h1:ECOoLqd5U3Lhyeyo/QDCEVQ4sNgYsqvCZ722XogGieY=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
@@ -930,9 +932,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
||||
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
|
||||
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -974,9 +975,8 @@ golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
|
||||
golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
|
||||
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
@@ -987,9 +987,8 @@ golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
|
||||
golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s=
|
||||
golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4=
|
||||
golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
|
||||
golang.org/x/term v0.35.0 h1:bZBVKBudEyhRcajGcNc3jIfWPqV4y/Kt2XcoigOWtDQ=
|
||||
golang.org/x/term v0.35.0/go.mod h1:TPGtkTLesOwf2DE8CgVYiZinHAOuy5AYUYT1lENIZnA=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
@@ -1003,9 +1002,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
|
||||
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
|
||||
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
|
||||
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
|
||||
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
|
||||
@@ -1041,8 +1039,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/tools v0.36.0 h1:kWS0uv/zsvHEle1LbV5LE8QujrxB3wfQyxHfhOk0Qkg=
|
||||
golang.org/x/tools v0.36.0/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s=
|
||||
golang.org/x/tools v0.37.0 h1:DVSRzp7FwePZW356yEAChSdNcQo6Nsp+fex1SUW09lE=
|
||||
golang.org/x/tools v0.37.0/go.mod h1:MBN5QPQtLMHVdvsbtarmTNukZDdgwdwlO5qGacAzF0w=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/migration"
|
||||
"code.gitea.io/gitea/modules/secret"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
@@ -123,17 +124,17 @@ func (task *Task) MigrateConfig() (*migration.MigrateOptions, error) {
|
||||
// decrypt credentials
|
||||
if opts.CloneAddrEncrypted != "" {
|
||||
if opts.CloneAddr, err = secret.DecryptSecret(setting.SecretKey, opts.CloneAddrEncrypted); err != nil {
|
||||
return nil, err
|
||||
log.Error("Unable to decrypt CloneAddr, maybe SECRET_KEY is wrong: %v", err)
|
||||
}
|
||||
}
|
||||
if opts.AuthPasswordEncrypted != "" {
|
||||
if opts.AuthPassword, err = secret.DecryptSecret(setting.SecretKey, opts.AuthPasswordEncrypted); err != nil {
|
||||
return nil, err
|
||||
log.Error("Unable to decrypt AuthPassword, maybe SECRET_KEY is wrong: %v", err)
|
||||
}
|
||||
}
|
||||
if opts.AuthTokenEncrypted != "" {
|
||||
if opts.AuthToken, err = secret.DecryptSecret(setting.SecretKey, opts.AuthTokenEncrypted); err != nil {
|
||||
return nil, err
|
||||
log.Error("Unable to decrypt AuthToken, maybe SECRET_KEY is wrong: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -67,13 +67,6 @@ func (key *PublicKey) OmitEmail() string {
|
||||
return strings.Join(strings.Split(key.Content, " ")[:2], " ")
|
||||
}
|
||||
|
||||
// AuthorizedString returns formatted public key string for authorized_keys file.
|
||||
//
|
||||
// TODO: Consider dropping this function
|
||||
func (key *PublicKey) AuthorizedString() string {
|
||||
return AuthorizedStringForKey(key)
|
||||
}
|
||||
|
||||
func addKey(ctx context.Context, key *PublicKey) (err error) {
|
||||
if len(key.Fingerprint) == 0 {
|
||||
key.Fingerprint, err = CalcFingerprint(key.Content)
|
||||
|
||||
@@ -17,29 +17,13 @@ import (
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"golang.org/x/crypto/ssh"
|
||||
)
|
||||
|
||||
// _____ __ .__ .__ .___
|
||||
// / _ \ __ ___/ |_| |__ ___________|__|_______ ____ __| _/
|
||||
// / /_\ \| | \ __\ | \ / _ \_ __ \ \___ // __ \ / __ |
|
||||
// / | \ | /| | | Y ( <_> ) | \/ |/ /\ ___// /_/ |
|
||||
// \____|__ /____/ |__| |___| /\____/|__| |__/_____ \\___ >____ |
|
||||
// \/ \/ \/ \/ \/
|
||||
// ____ __.
|
||||
// | |/ _|____ ___.__. ______
|
||||
// | <_/ __ < | |/ ___/
|
||||
// | | \ ___/\___ |\___ \
|
||||
// |____|__ \___ > ____/____ >
|
||||
// \/ \/\/ \/
|
||||
//
|
||||
// This file contains functions for creating authorized_keys files
|
||||
//
|
||||
// There is a dependence on the database within RegeneratePublicKeys however most of these functions probably belong in a module
|
||||
|
||||
const (
|
||||
tplCommentPrefix = `# gitea public key`
|
||||
tplPublicKey = tplCommentPrefix + "\n" + `command=%s,no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,no-user-rc,restrict %s` + "\n"
|
||||
)
|
||||
// AuthorizedStringCommentPrefix is a magic tag
|
||||
// some functions like RegeneratePublicKeys needs this tag to skip the keys generated by Gitea, while keep other keys
|
||||
const AuthorizedStringCommentPrefix = `# gitea public key`
|
||||
|
||||
var sshOpLocker sync.Mutex
|
||||
|
||||
@@ -50,17 +34,45 @@ func WithSSHOpLocker(f func() error) error {
|
||||
}
|
||||
|
||||
// AuthorizedStringForKey creates the authorized keys string appropriate for the provided key
|
||||
func AuthorizedStringForKey(key *PublicKey) string {
|
||||
func AuthorizedStringForKey(key *PublicKey) (string, error) {
|
||||
sb := &strings.Builder{}
|
||||
_ = setting.SSH.AuthorizedKeysCommandTemplateTemplate.Execute(sb, map[string]any{
|
||||
_, err := writeAuthorizedStringForKey(key, sb)
|
||||
return sb.String(), err
|
||||
}
|
||||
|
||||
// WriteAuthorizedStringForValidKey writes the authorized key for the provided key. If the key is invalid, it does nothing.
|
||||
func WriteAuthorizedStringForValidKey(key *PublicKey, w io.Writer) error {
|
||||
validKey, err := writeAuthorizedStringForKey(key, w)
|
||||
if !validKey {
|
||||
log.Debug("WriteAuthorizedStringForValidKey: key %s is not valid: %v", key, err)
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func writeAuthorizedStringForKey(key *PublicKey, w io.Writer) (keyValid bool, err error) {
|
||||
const tpl = AuthorizedStringCommentPrefix + "\n" + `command=%s,no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,no-user-rc,restrict %s %s` + "\n"
|
||||
pubKey, _, _, _, err := ssh.ParseAuthorizedKey([]byte(key.Content))
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
// now the key is valid, the code below could only return template/IO related errors
|
||||
sbCmd := &strings.Builder{}
|
||||
err = setting.SSH.AuthorizedKeysCommandTemplateTemplate.Execute(sbCmd, map[string]any{
|
||||
"AppPath": util.ShellEscape(setting.AppPath),
|
||||
"AppWorkPath": util.ShellEscape(setting.AppWorkPath),
|
||||
"CustomConf": util.ShellEscape(setting.CustomConf),
|
||||
"CustomPath": util.ShellEscape(setting.CustomPath),
|
||||
"Key": key,
|
||||
})
|
||||
|
||||
return fmt.Sprintf(tplPublicKey, util.ShellEscape(sb.String()), key.Content)
|
||||
if err != nil {
|
||||
return true, err
|
||||
}
|
||||
sshCommandEscaped := util.ShellEscape(sbCmd.String())
|
||||
sshKeyMarshalled := strings.TrimSpace(string(ssh.MarshalAuthorizedKey(pubKey)))
|
||||
sshKeyComment := fmt.Sprintf("user-%d", key.OwnerID)
|
||||
_, err = fmt.Fprintf(w, tpl, sshCommandEscaped, sshKeyMarshalled, sshKeyComment)
|
||||
return true, err
|
||||
}
|
||||
|
||||
// appendAuthorizedKeysToFile appends new SSH keys' content to authorized_keys file.
|
||||
@@ -112,7 +124,7 @@ func appendAuthorizedKeysToFile(keys ...*PublicKey) error {
|
||||
if key.Type == KeyTypePrincipal {
|
||||
continue
|
||||
}
|
||||
if _, err = f.WriteString(key.AuthorizedString()); err != nil {
|
||||
if err = WriteAuthorizedStringForValidKey(key, f); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@@ -120,10 +132,9 @@ func appendAuthorizedKeysToFile(keys ...*PublicKey) error {
|
||||
}
|
||||
|
||||
// RegeneratePublicKeys regenerates the authorized_keys file
|
||||
func RegeneratePublicKeys(ctx context.Context, t io.StringWriter) error {
|
||||
func RegeneratePublicKeys(ctx context.Context, t io.Writer) error {
|
||||
if err := db.GetEngine(ctx).Where("type != ?", KeyTypePrincipal).Iterate(new(PublicKey), func(idx int, bean any) (err error) {
|
||||
_, err = t.WriteString((bean.(*PublicKey)).AuthorizedString())
|
||||
return err
|
||||
return WriteAuthorizedStringForValidKey(bean.(*PublicKey), t)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -144,11 +155,11 @@ func RegeneratePublicKeys(ctx context.Context, t io.StringWriter) error {
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||
if strings.HasPrefix(line, AuthorizedStringCommentPrefix) {
|
||||
scanner.Scan()
|
||||
continue
|
||||
}
|
||||
_, err = t.WriteString(line + "\n")
|
||||
_, err = io.WriteString(t, line+"\n")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -111,11 +111,11 @@ func (t *TwoFactor) SetSecret(secretString string) error {
|
||||
func (t *TwoFactor) ValidateTOTP(passcode string) (bool, error) {
|
||||
decodedStoredSecret, err := base64.StdEncoding.DecodeString(t.Secret)
|
||||
if err != nil {
|
||||
return false, err
|
||||
return false, fmt.Errorf("ValidateTOTP invalid base64: %w", err)
|
||||
}
|
||||
secretBytes, err := secret.AesDecrypt(t.getEncryptionKey(), decodedStoredSecret)
|
||||
if err != nil {
|
||||
return false, err
|
||||
return false, fmt.Errorf("ValidateTOTP unable to decrypt (maybe SECRET_KEY is wrong): %w", err)
|
||||
}
|
||||
secretStr := string(secretBytes)
|
||||
return totp.Validate(passcode, secretStr), nil
|
||||
|
||||
@@ -213,3 +213,15 @@
|
||||
is_deleted: false
|
||||
deleted_by_id: 0
|
||||
deleted_unix: 0
|
||||
|
||||
-
|
||||
id: 26
|
||||
repo_id: 10
|
||||
name: 'feature/1'
|
||||
commit_id: '65f1bf27bc3bf70f64657658635e66094edbcb4d'
|
||||
commit_message: 'Initial commit'
|
||||
commit_time: 1489950479
|
||||
pusher_id: 2
|
||||
is_deleted: false
|
||||
deleted_by_id: 0
|
||||
deleted_unix: 0
|
||||
|
||||
@@ -5,7 +5,6 @@ package git
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
@@ -25,7 +24,7 @@ import (
|
||||
"xorm.io/builder"
|
||||
)
|
||||
|
||||
var ErrBranchIsProtected = errors.New("branch is protected")
|
||||
var ErrBranchIsProtected = util.ErrorWrap(util.ErrPermissionDenied, "branch is protected")
|
||||
|
||||
// ProtectedBranch struct
|
||||
type ProtectedBranch struct {
|
||||
|
||||
@@ -476,7 +476,7 @@ func applySubscribedCondition(sess *xorm.Session, subscriberID int64) {
|
||||
),
|
||||
builder.Eq{"issue.poster_id": subscriberID},
|
||||
builder.In("issue.repo_id", builder.
|
||||
Select("id").
|
||||
Select("repo_id").
|
||||
From("watch").
|
||||
Where(builder.And(builder.Eq{"user_id": subscriberID},
|
||||
builder.In("mode", repo_model.WatchModeNormal, repo_model.WatchModeAuto))),
|
||||
|
||||
@@ -197,6 +197,12 @@ func TestIssues(t *testing.T) {
|
||||
},
|
||||
[]int64{2},
|
||||
},
|
||||
{
|
||||
issues_model.IssuesOptions{
|
||||
SubscriberID: 11,
|
||||
},
|
||||
[]int64{11, 5, 9, 8, 3, 2, 1},
|
||||
},
|
||||
} {
|
||||
issues, err := issues_model.Issues(t.Context(), &test.Opts)
|
||||
assert.NoError(t, err)
|
||||
|
||||
@@ -173,7 +173,7 @@ func GetReviewsByIssueID(ctx context.Context, issueID int64) (latestReviews, mig
|
||||
reviewersMap := make(map[int64][]*Review) // key is reviewer id
|
||||
originalReviewersMap := make(map[int64][]*Review) // key is original author id
|
||||
reviewTeamsMap := make(map[int64][]*Review) // key is reviewer team id
|
||||
countedReivewTypes := []ReviewType{ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest}
|
||||
countedReivewTypes := []ReviewType{ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest, ReviewTypeComment}
|
||||
for _, review := range reviews {
|
||||
if review.ReviewerTeamID == 0 && slices.Contains(countedReivewTypes, review.Type) && !review.Dismissed {
|
||||
if review.OriginalAuthorID != 0 {
|
||||
|
||||
@@ -122,6 +122,7 @@ func TestGetReviewersByIssueID(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
|
||||
issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 3})
|
||||
user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
|
||||
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||
org3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
|
||||
user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
|
||||
@@ -129,6 +130,12 @@ func TestGetReviewersByIssueID(t *testing.T) {
|
||||
|
||||
expectedReviews := []*issues_model.Review{}
|
||||
expectedReviews = append(expectedReviews,
|
||||
&issues_model.Review{
|
||||
ID: 5,
|
||||
Reviewer: user1,
|
||||
Type: issues_model.ReviewTypeComment,
|
||||
UpdatedUnix: 946684810,
|
||||
},
|
||||
&issues_model.Review{
|
||||
ID: 7,
|
||||
Reviewer: org3,
|
||||
@@ -167,8 +174,9 @@ func TestGetReviewersByIssueID(t *testing.T) {
|
||||
for _, review := range allReviews {
|
||||
assert.NoError(t, review.LoadReviewer(t.Context()))
|
||||
}
|
||||
if assert.Len(t, allReviews, 5) {
|
||||
if assert.Len(t, allReviews, 6) {
|
||||
for i, review := range allReviews {
|
||||
assert.Equal(t, expectedReviews[i].ID, review.ID)
|
||||
assert.Equal(t, expectedReviews[i].Reviewer, review.Reviewer)
|
||||
assert.Equal(t, expectedReviews[i].Type, review.Type)
|
||||
assert.Equal(t, expectedReviews[i].UpdatedUnix, review.UpdatedUnix)
|
||||
|
||||
@@ -429,6 +429,10 @@ func HasOrgOrUserVisible(ctx context.Context, orgOrUser, user *user_model.User)
|
||||
return true
|
||||
}
|
||||
|
||||
if !setting.Service.RequireSignInViewStrict && orgOrUser.Visibility == structs.VisibleTypePublic {
|
||||
return true
|
||||
}
|
||||
|
||||
if (orgOrUser.Visibility == structs.VisibleTypePrivate || user.IsRestricted) && !OrgFromUser(orgOrUser).hasMemberWithUserID(ctx, user.ID) {
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -13,7 +13,9 @@ import (
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/test"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
@@ -382,6 +384,12 @@ func TestHasOrgVisibleTypePublic(t *testing.T) {
|
||||
assert.True(t, test1) // owner of org
|
||||
assert.True(t, test2) // user not a part of org
|
||||
assert.True(t, test3) // logged out user
|
||||
|
||||
restrictedUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 29, IsRestricted: true})
|
||||
require.True(t, restrictedUser.IsRestricted)
|
||||
assert.True(t, organization.HasOrgOrUserVisible(t.Context(), org.AsUser(), restrictedUser))
|
||||
defer test.MockVariableValue(&setting.Service.RequireSignInViewStrict, true)()
|
||||
assert.False(t, organization.HasOrgOrUserVisible(t.Context(), org.AsUser(), restrictedUser))
|
||||
}
|
||||
|
||||
func TestHasOrgVisibleTypeLimited(t *testing.T) {
|
||||
|
||||
@@ -13,6 +13,8 @@ import (
|
||||
"code.gitea.io/gitea/models/perm"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/structs"
|
||||
|
||||
"xorm.io/builder"
|
||||
)
|
||||
@@ -41,7 +43,12 @@ func accessLevel(ctx context.Context, user *user_model.User, repo *repo_model.Re
|
||||
restricted = user.IsRestricted
|
||||
}
|
||||
|
||||
if !restricted && !repo.IsPrivate {
|
||||
if err := repo.LoadOwner(ctx); err != nil {
|
||||
return mode, err
|
||||
}
|
||||
|
||||
repoIsFullyPublic := !setting.Service.RequireSignInViewStrict && repo.Owner.Visibility == structs.VisibleTypePublic && !repo.IsPrivate
|
||||
if (restricted && repoIsFullyPublic) || (!restricted && !repo.IsPrivate) {
|
||||
mode = perm.AccessModeRead
|
||||
}
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
@@ -51,7 +52,14 @@ func TestAccessLevel(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, perm_model.AccessModeNone, level)
|
||||
|
||||
// restricted user has no access to a public repo
|
||||
// restricted user has default access to a public repo if no sign-in is required
|
||||
setting.Service.RequireSignInViewStrict = false
|
||||
level, err = access_model.AccessLevel(t.Context(), user29, repo1)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, perm_model.AccessModeRead, level)
|
||||
|
||||
// restricted user has no access to a public repo if sign-in is required
|
||||
setting.Service.RequireSignInViewStrict = true
|
||||
level, err = access_model.AccessLevel(t.Context(), user29, repo1)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, perm_model.AccessModeNone, level)
|
||||
|
||||
@@ -642,6 +642,17 @@ func SearchRepositoryIDsByCondition(ctx context.Context, cond builder.Cond) ([]i
|
||||
Find(&repoIDs)
|
||||
}
|
||||
|
||||
func userAllPublicRepoCond(cond builder.Cond, orgVisibilityLimit []structs.VisibleType) builder.Cond {
|
||||
return cond.Or(builder.And(
|
||||
builder.Eq{"`repository`.is_private": false},
|
||||
// Aren't in a private organisation or limited organisation if we're not logged in
|
||||
builder.NotIn("`repository`.owner_id", builder.Select("id").From("`user`").Where(
|
||||
builder.And(
|
||||
builder.Eq{"type": user_model.UserTypeOrganization},
|
||||
builder.In("visibility", orgVisibilityLimit)),
|
||||
))))
|
||||
}
|
||||
|
||||
// AccessibleRepositoryCondition takes a user a returns a condition for checking if a repository is accessible
|
||||
func AccessibleRepositoryCondition(user *user_model.User, unitType unit.Type) builder.Cond {
|
||||
cond := builder.NewCond()
|
||||
@@ -651,15 +662,8 @@ func AccessibleRepositoryCondition(user *user_model.User, unitType unit.Type) bu
|
||||
if user == nil || user.ID <= 0 {
|
||||
orgVisibilityLimit = append(orgVisibilityLimit, structs.VisibleTypeLimited)
|
||||
}
|
||||
// 1. Be able to see all non-private repositories that either:
|
||||
cond = cond.Or(builder.And(
|
||||
builder.Eq{"`repository`.is_private": false},
|
||||
// 2. Aren't in an private organisation or limited organisation if we're not logged in
|
||||
builder.NotIn("`repository`.owner_id", builder.Select("id").From("`user`").Where(
|
||||
builder.And(
|
||||
builder.Eq{"type": user_model.UserTypeOrganization},
|
||||
builder.In("visibility", orgVisibilityLimit)),
|
||||
))))
|
||||
// 1. Be able to see all non-private repositories
|
||||
cond = userAllPublicRepoCond(cond, orgVisibilityLimit)
|
||||
}
|
||||
|
||||
if user != nil {
|
||||
@@ -683,6 +687,9 @@ func AccessibleRepositoryCondition(user *user_model.User, unitType unit.Type) bu
|
||||
if !user.IsRestricted {
|
||||
// 5. Be able to see all public repos in private organizations that we are an org_user of
|
||||
cond = cond.Or(userOrgPublicRepoCond(user.ID))
|
||||
} else if !setting.Service.RequireSignInViewStrict {
|
||||
orgVisibilityLimit := []structs.VisibleType{structs.VisibleTypePrivate, structs.VisibleTypeLimited}
|
||||
cond = userAllPublicRepoCond(cond, orgVisibilityLimit)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -10,9 +10,14 @@ import (
|
||||
"code.gitea.io/gitea/models/db"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/optional"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/test"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func getTestCases() []struct {
|
||||
@@ -182,7 +187,16 @@ func getTestCases() []struct {
|
||||
|
||||
func TestSearchRepository(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
t.Run("SearchRepositoryPublic", testSearchRepositoryPublic)
|
||||
t.Run("SearchRepositoryPublicRestricted", testSearchRepositoryRestricted)
|
||||
t.Run("SearchRepositoryPrivate", testSearchRepositoryPrivate)
|
||||
t.Run("SearchRepositoryNonExistingOwner", testSearchRepositoryNonExistingOwner)
|
||||
t.Run("SearchRepositoryWithInDescription", testSearchRepositoryWithInDescription)
|
||||
t.Run("SearchRepositoryNotInDescription", testSearchRepositoryNotInDescription)
|
||||
t.Run("SearchRepositoryCases", testSearchRepositoryCases)
|
||||
}
|
||||
|
||||
func testSearchRepositoryPublic(t *testing.T) {
|
||||
// test search public repository on explore page
|
||||
repos, count, err := repo_model.SearchRepositoryByName(t.Context(), repo_model.SearchRepoOptions{
|
||||
ListOptions: db.ListOptions{
|
||||
@@ -211,9 +225,54 @@ func TestSearchRepository(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(2), count)
|
||||
assert.Len(t, repos, 2)
|
||||
}
|
||||
|
||||
func testSearchRepositoryRestricted(t *testing.T) {
|
||||
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||
restrictedUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 29, IsRestricted: true})
|
||||
|
||||
performSearch := func(t *testing.T, user *user_model.User) (publicRepoIDs []int64) {
|
||||
repos, count, err := repo_model.SearchRepositoryByName(t.Context(), repo_model.SearchRepoOptions{
|
||||
ListOptions: db.ListOptions{Page: 1, PageSize: 10000},
|
||||
Actor: user,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, repos, int(count))
|
||||
for _, repo := range repos {
|
||||
require.NoError(t, repo.LoadOwner(t.Context()))
|
||||
if repo.Owner.Visibility == structs.VisibleTypePublic && !repo.IsPrivate {
|
||||
publicRepoIDs = append(publicRepoIDs, repo.ID)
|
||||
}
|
||||
}
|
||||
return publicRepoIDs
|
||||
}
|
||||
|
||||
normalPublicRepoIDs := performSearch(t, user2)
|
||||
require.Greater(t, len(normalPublicRepoIDs), 10) // quite a lot
|
||||
|
||||
t.Run("RestrictedUser-NoSignInRequirement", func(t *testing.T) {
|
||||
// restricted user can also see public repositories if no "required sign-in"
|
||||
repoIDs := performSearch(t, restrictedUser)
|
||||
assert.ElementsMatch(t, normalPublicRepoIDs, repoIDs)
|
||||
})
|
||||
|
||||
defer test.MockVariableValue(&setting.Service.RequireSignInViewStrict, true)()
|
||||
|
||||
t.Run("NormalUser-RequiredSignIn", func(t *testing.T) {
|
||||
// normal user can still see all public repos, not affected by "required sign-in"
|
||||
repoIDs := performSearch(t, user2)
|
||||
assert.ElementsMatch(t, normalPublicRepoIDs, repoIDs)
|
||||
})
|
||||
t.Run("RestrictedUser-RequiredSignIn", func(t *testing.T) {
|
||||
// restricted user can see only their own repo
|
||||
repoIDs := performSearch(t, restrictedUser)
|
||||
assert.Equal(t, []int64{4}, repoIDs)
|
||||
})
|
||||
}
|
||||
|
||||
func testSearchRepositoryPrivate(t *testing.T) {
|
||||
// test search private repository on explore page
|
||||
repos, count, err = repo_model.SearchRepositoryByName(t.Context(), repo_model.SearchRepoOptions{
|
||||
repos, count, err := repo_model.SearchRepositoryByName(t.Context(), repo_model.SearchRepoOptions{
|
||||
ListOptions: db.ListOptions{
|
||||
Page: 1,
|
||||
PageSize: 10,
|
||||
@@ -242,16 +301,18 @@ func TestSearchRepository(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(3), count)
|
||||
assert.Len(t, repos, 3)
|
||||
}
|
||||
|
||||
// Test non existing owner
|
||||
repos, count, err = repo_model.SearchRepositoryByName(t.Context(), repo_model.SearchRepoOptions{OwnerID: unittest.NonexistentID})
|
||||
func testSearchRepositoryNonExistingOwner(t *testing.T) {
|
||||
repos, count, err := repo_model.SearchRepositoryByName(t.Context(), repo_model.SearchRepoOptions{OwnerID: unittest.NonexistentID})
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Empty(t, repos)
|
||||
assert.Equal(t, int64(0), count)
|
||||
}
|
||||
|
||||
// Test search within description
|
||||
repos, count, err = repo_model.SearchRepository(t.Context(), repo_model.SearchRepoOptions{
|
||||
func testSearchRepositoryWithInDescription(t *testing.T) {
|
||||
repos, count, err := repo_model.SearchRepository(t.Context(), repo_model.SearchRepoOptions{
|
||||
ListOptions: db.ListOptions{
|
||||
Page: 1,
|
||||
PageSize: 10,
|
||||
@@ -266,9 +327,10 @@ func TestSearchRepository(t *testing.T) {
|
||||
assert.Equal(t, "test_repo_14", repos[0].Name)
|
||||
}
|
||||
assert.Equal(t, int64(1), count)
|
||||
}
|
||||
|
||||
// Test NOT search within description
|
||||
repos, count, err = repo_model.SearchRepository(t.Context(), repo_model.SearchRepoOptions{
|
||||
func testSearchRepositoryNotInDescription(t *testing.T) {
|
||||
repos, count, err := repo_model.SearchRepository(t.Context(), repo_model.SearchRepoOptions{
|
||||
ListOptions: db.ListOptions{
|
||||
Page: 1,
|
||||
PageSize: 10,
|
||||
@@ -281,7 +343,9 @@ func TestSearchRepository(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
assert.Empty(t, repos)
|
||||
assert.Equal(t, int64(0), count)
|
||||
}
|
||||
|
||||
func testSearchRepositoryCases(t *testing.T) {
|
||||
testCases := getTestCases()
|
||||
|
||||
for _, testCase := range testCases {
|
||||
|
||||
@@ -127,16 +127,9 @@ func DeleteUploads(ctx context.Context, uploads ...*Upload) (err error) {
|
||||
|
||||
for _, upload := range uploads {
|
||||
localPath := upload.LocalPath()
|
||||
isFile, err := util.IsFile(localPath)
|
||||
if err != nil {
|
||||
log.Error("Unable to check if %s is a file. Error: %v", localPath, err)
|
||||
}
|
||||
if !isFile {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := util.Remove(localPath); err != nil {
|
||||
return fmt.Errorf("remove upload: %w", err)
|
||||
// just continue, don't fail the whole operation if a file is missing (removed by others)
|
||||
log.Error("unable to remove upload file %s: %v", localPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -178,8 +178,8 @@ func GetSecretsOfTask(ctx context.Context, task *actions_model.ActionTask) (map[
|
||||
for _, secret := range append(ownerSecrets, repoSecrets...) {
|
||||
v, err := secret_module.DecryptSecret(setting.SecretKey, secret.Data)
|
||||
if err != nil {
|
||||
log.Error("decrypt secret %v %q: %v", secret.ID, secret.Name, err)
|
||||
return nil, err
|
||||
log.Error("Unable to decrypt Actions secret %v %q, maybe SECRET_KEY is wrong: %v", secret.ID, secret.Name, err)
|
||||
continue
|
||||
}
|
||||
secrets[secret.Name] = v
|
||||
}
|
||||
|
||||
@@ -8,7 +8,9 @@ import (
|
||||
"io"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
)
|
||||
|
||||
// Gemoji is a set of emoji data.
|
||||
@@ -23,30 +25,25 @@ type Emoji struct {
|
||||
SkinTones bool
|
||||
}
|
||||
|
||||
var (
|
||||
// codeMap provides a map of the emoji unicode code to its emoji data.
|
||||
codeMap map[string]int
|
||||
type globalVarsStruct struct {
|
||||
codeMap map[string]int // emoji unicode code to its emoji data.
|
||||
aliasMap map[string]int // the alias to its emoji data.
|
||||
emptyReplacer *strings.Replacer // string replacer for emoji codes, used for finding emoji positions.
|
||||
codeReplacer *strings.Replacer // string replacer for emoji codes.
|
||||
aliasReplacer *strings.Replacer // string replacer for emoji aliases.
|
||||
}
|
||||
|
||||
// aliasMap provides a map of the alias to its emoji data.
|
||||
aliasMap map[string]int
|
||||
var globalVarsStore atomic.Pointer[globalVarsStruct]
|
||||
|
||||
// emptyReplacer is the string replacer for emoji codes.
|
||||
emptyReplacer *strings.Replacer
|
||||
|
||||
// codeReplacer is the string replacer for emoji codes.
|
||||
codeReplacer *strings.Replacer
|
||||
|
||||
// aliasReplacer is the string replacer for emoji aliases.
|
||||
aliasReplacer *strings.Replacer
|
||||
|
||||
once sync.Once
|
||||
)
|
||||
|
||||
func loadMap() {
|
||||
once.Do(func() {
|
||||
// initialize
|
||||
codeMap = make(map[string]int, len(GemojiData))
|
||||
aliasMap = make(map[string]int, len(GemojiData))
|
||||
func globalVars() *globalVarsStruct {
|
||||
vars := globalVarsStore.Load()
|
||||
if vars != nil {
|
||||
return vars
|
||||
}
|
||||
// although there can be concurrent calls, the result should be the same, and there is no performance problem
|
||||
vars = &globalVarsStruct{}
|
||||
vars.codeMap = make(map[string]int, len(GemojiData))
|
||||
vars.aliasMap = make(map[string]int, len(GemojiData))
|
||||
|
||||
// process emoji codes and aliases
|
||||
codePairs := make([]string, 0)
|
||||
@@ -58,39 +55,48 @@ func loadMap() {
|
||||
return len(GemojiData[i].Emoji) > len(GemojiData[j].Emoji)
|
||||
})
|
||||
|
||||
for i, e := range GemojiData {
|
||||
if e.Emoji == "" || len(e.Aliases) == 0 {
|
||||
for idx, emoji := range GemojiData {
|
||||
if emoji.Emoji == "" || len(emoji.Aliases) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// setup codes
|
||||
codeMap[e.Emoji] = i
|
||||
codePairs = append(codePairs, e.Emoji, ":"+e.Aliases[0]+":")
|
||||
emptyPairs = append(emptyPairs, e.Emoji, e.Emoji)
|
||||
|
||||
// setup aliases
|
||||
for _, a := range e.Aliases {
|
||||
if a == "" {
|
||||
// process aliases
|
||||
firstAlias := ""
|
||||
for _, alias := range emoji.Aliases {
|
||||
if alias == "" {
|
||||
continue
|
||||
}
|
||||
enabled := len(setting.UI.EnabledEmojisSet) == 0 || setting.UI.EnabledEmojisSet.Contains(alias)
|
||||
if !enabled {
|
||||
continue
|
||||
}
|
||||
if firstAlias == "" {
|
||||
firstAlias = alias
|
||||
}
|
||||
vars.aliasMap[alias] = idx
|
||||
aliasPairs = append(aliasPairs, ":"+alias+":", emoji.Emoji)
|
||||
}
|
||||
|
||||
aliasMap[a] = i
|
||||
aliasPairs = append(aliasPairs, ":"+a+":", e.Emoji)
|
||||
// process emoji code
|
||||
if firstAlias != "" {
|
||||
vars.codeMap[emoji.Emoji] = idx
|
||||
codePairs = append(codePairs, emoji.Emoji, ":"+emoji.Aliases[0]+":")
|
||||
emptyPairs = append(emptyPairs, emoji.Emoji, emoji.Emoji)
|
||||
}
|
||||
}
|
||||
|
||||
// create replacers
|
||||
emptyReplacer = strings.NewReplacer(emptyPairs...)
|
||||
codeReplacer = strings.NewReplacer(codePairs...)
|
||||
aliasReplacer = strings.NewReplacer(aliasPairs...)
|
||||
})
|
||||
vars.emptyReplacer = strings.NewReplacer(emptyPairs...)
|
||||
vars.codeReplacer = strings.NewReplacer(codePairs...)
|
||||
vars.aliasReplacer = strings.NewReplacer(aliasPairs...)
|
||||
globalVarsStore.Store(vars)
|
||||
return vars
|
||||
}
|
||||
|
||||
// FromCode retrieves the emoji data based on the provided unicode code (ie,
|
||||
// "\u2618" will return the Gemoji data for "shamrock").
|
||||
func FromCode(code string) *Emoji {
|
||||
loadMap()
|
||||
i, ok := codeMap[code]
|
||||
i, ok := globalVars().codeMap[code]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
@@ -102,12 +108,11 @@ func FromCode(code string) *Emoji {
|
||||
// "alias" or ":alias:" (ie, "shamrock" or ":shamrock:" will return the Gemoji
|
||||
// data for "shamrock").
|
||||
func FromAlias(alias string) *Emoji {
|
||||
loadMap()
|
||||
if strings.HasPrefix(alias, ":") && strings.HasSuffix(alias, ":") {
|
||||
alias = alias[1 : len(alias)-1]
|
||||
}
|
||||
|
||||
i, ok := aliasMap[alias]
|
||||
i, ok := globalVars().aliasMap[alias]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
@@ -119,15 +124,13 @@ func FromAlias(alias string) *Emoji {
|
||||
// alias (in the form of ":alias:") (ie, "\u2618" will be converted to
|
||||
// ":shamrock:").
|
||||
func ReplaceCodes(s string) string {
|
||||
loadMap()
|
||||
return codeReplacer.Replace(s)
|
||||
return globalVars().codeReplacer.Replace(s)
|
||||
}
|
||||
|
||||
// ReplaceAliases replaces all aliases of the form ":alias:" with its
|
||||
// corresponding unicode value.
|
||||
func ReplaceAliases(s string) string {
|
||||
loadMap()
|
||||
return aliasReplacer.Replace(s)
|
||||
return globalVars().aliasReplacer.Replace(s)
|
||||
}
|
||||
|
||||
type rememberSecondWriteWriter struct {
|
||||
@@ -163,7 +166,6 @@ func (n *rememberSecondWriteWriter) WriteString(s string) (int, error) {
|
||||
|
||||
// FindEmojiSubmatchIndex returns index pair of longest emoji in a string
|
||||
func FindEmojiSubmatchIndex(s string) []int {
|
||||
loadMap()
|
||||
secondWriteWriter := rememberSecondWriteWriter{}
|
||||
|
||||
// A faster and clean implementation would copy the trie tree formation in strings.NewReplacer but
|
||||
@@ -175,7 +177,7 @@ func FindEmojiSubmatchIndex(s string) []int {
|
||||
// Therefore we can simply take the index of the second write as our first emoji
|
||||
//
|
||||
// FIXME: just copy the trie implementation from strings.NewReplacer
|
||||
_, _ = emptyReplacer.WriteString(&secondWriteWriter, s)
|
||||
_, _ = globalVars().emptyReplacer.WriteString(&secondWriteWriter, s)
|
||||
|
||||
// if we wrote less than twice then we never "replaced"
|
||||
if secondWriteWriter.writecount < 2 {
|
||||
|
||||
@@ -7,14 +7,13 @@ package emoji
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/modules/container"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/test"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestDumpInfo(t *testing.T) {
|
||||
t.Logf("codes: %d", len(codeMap))
|
||||
t.Logf("aliases: %d", len(aliasMap))
|
||||
}
|
||||
|
||||
func TestLookup(t *testing.T) {
|
||||
a := FromCode("\U0001f37a")
|
||||
b := FromCode("🍺")
|
||||
@@ -24,7 +23,6 @@ func TestLookup(t *testing.T) {
|
||||
assert.Equal(t, a, b)
|
||||
assert.Equal(t, b, c)
|
||||
assert.Equal(t, c, d)
|
||||
assert.Equal(t, a, d)
|
||||
|
||||
m := FromCode("\U0001f44d")
|
||||
n := FromAlias(":thumbsup:")
|
||||
@@ -32,7 +30,20 @@ func TestLookup(t *testing.T) {
|
||||
|
||||
assert.Equal(t, m, n)
|
||||
assert.Equal(t, m, o)
|
||||
assert.Equal(t, n, o)
|
||||
|
||||
defer test.MockVariableValue(&setting.UI.EnabledEmojisSet, container.SetOf("thumbsup"))()
|
||||
defer globalVarsStore.Store(nil)
|
||||
globalVarsStore.Store(nil)
|
||||
a = FromCode("\U0001f37a")
|
||||
c = FromAlias(":beer:")
|
||||
m = FromCode("\U0001f44d")
|
||||
n = FromAlias(":thumbsup:")
|
||||
o = FromAlias("+1")
|
||||
assert.Nil(t, a)
|
||||
assert.Nil(t, c)
|
||||
assert.NotNil(t, m)
|
||||
assert.NotNil(t, n)
|
||||
assert.Nil(t, o)
|
||||
}
|
||||
|
||||
func TestReplacers(t *testing.T) {
|
||||
|
||||
@@ -47,30 +47,16 @@ func GetHook(repoPath, name string) (*Hook, error) {
|
||||
name: name,
|
||||
path: filepath.Join(repoPath, "hooks", name+".d", name),
|
||||
}
|
||||
isFile, err := util.IsFile(h.path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if isFile {
|
||||
data, err := os.ReadFile(h.path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if data, err := os.ReadFile(h.path); err == nil {
|
||||
h.IsActive = true
|
||||
h.Content = string(data)
|
||||
return h, nil
|
||||
} else if !os.IsNotExist(err) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
samplePath := filepath.Join(repoPath, "hooks", name+".sample")
|
||||
isFile, err = util.IsFile(samplePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if isFile {
|
||||
data, err := os.ReadFile(samplePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if data, err := os.ReadFile(samplePath); err == nil {
|
||||
h.Sample = string(data)
|
||||
}
|
||||
return h, nil
|
||||
|
||||
@@ -34,12 +34,12 @@ func TestParseGitURLs(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
kase: "git@[fe80:14fc:cec5:c174:d88%2510]:go-gitea/gitea.git",
|
||||
kase: "git@[fe80::14fc:cec5:c174:d88%2510]:go-gitea/gitea.git",
|
||||
expected: &GitURL{
|
||||
URL: &url.URL{
|
||||
Scheme: "ssh",
|
||||
User: url.User("git"),
|
||||
Host: "[fe80:14fc:cec5:c174:d88%10]",
|
||||
Host: "[fe80::14fc:cec5:c174:d88%10]",
|
||||
Path: "go-gitea/gitea.git",
|
||||
},
|
||||
extraMark: 1,
|
||||
@@ -137,11 +137,11 @@ func TestParseGitURLs(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
kase: "https://[fe80:14fc:cec5:c174:d88%2510]:20/go-gitea/gitea.git",
|
||||
kase: "https://[fe80::14fc:cec5:c174:d88%2510]:20/go-gitea/gitea.git",
|
||||
expected: &GitURL{
|
||||
URL: &url.URL{
|
||||
Scheme: "https",
|
||||
Host: "[fe80:14fc:cec5:c174:d88%10]:20",
|
||||
Host: "[fe80::14fc:cec5:c174:d88%10]:20",
|
||||
Path: "/go-gitea/gitea.git",
|
||||
},
|
||||
extraMark: 0,
|
||||
|
||||
@@ -6,7 +6,6 @@ package git
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"encoding/hex"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
@@ -68,32 +67,6 @@ func ParseBool(value string) (result, valid bool) {
|
||||
return intValue != 0, true
|
||||
}
|
||||
|
||||
// LimitedReaderCloser is a limited reader closer
|
||||
type LimitedReaderCloser struct {
|
||||
R io.Reader
|
||||
C io.Closer
|
||||
N int64
|
||||
}
|
||||
|
||||
// Read implements io.Reader
|
||||
func (l *LimitedReaderCloser) Read(p []byte) (n int, err error) {
|
||||
if l.N <= 0 {
|
||||
_ = l.C.Close()
|
||||
return 0, io.EOF
|
||||
}
|
||||
if int64(len(p)) > l.N {
|
||||
p = p[0:l.N]
|
||||
}
|
||||
n, err = l.R.Read(p)
|
||||
l.N -= int64(n)
|
||||
return n, err
|
||||
}
|
||||
|
||||
// Close implements io.Closer
|
||||
func (l *LimitedReaderCloser) Close() error {
|
||||
return l.C.Close()
|
||||
}
|
||||
|
||||
func HashFilePathForWebUI(s string) string {
|
||||
h := sha1.New()
|
||||
_, _ = h.Write([]byte(s))
|
||||
|
||||
@@ -4,7 +4,10 @@
|
||||
package hcaptcha
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -21,6 +24,33 @@ func TestMain(m *testing.M) {
|
||||
os.Exit(m.Run())
|
||||
}
|
||||
|
||||
type mockTransport struct{}
|
||||
|
||||
func (mockTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
if req.URL.String() != verifyURL {
|
||||
return nil, errors.New("unsupported url")
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
bodyValues, err := url.ParseQuery(string(body))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var responseText string
|
||||
if bodyValues.Get("response") == dummyToken {
|
||||
responseText = `{"success":true,"credit":false,"hostname":"dummy-key-pass","challenge_ts":"2025-10-08T16:02:56.136Z"}`
|
||||
} else {
|
||||
responseText = `{"success":false,"error-codes":["invalid-input-response"]}`
|
||||
}
|
||||
|
||||
return &http.Response{Request: req, Body: io.NopCloser(strings.NewReader(responseText))}, nil
|
||||
}
|
||||
|
||||
func TestCaptcha(t *testing.T) {
|
||||
tt := []struct {
|
||||
Name string
|
||||
@@ -55,6 +85,7 @@ func TestCaptcha(t *testing.T) {
|
||||
t.Run(tc.Name, func(t *testing.T) {
|
||||
client, err := New(tc.Secret, WithHTTP(&http.Client{
|
||||
Timeout: time.Second * 5,
|
||||
Transport: mockTransport{},
|
||||
}))
|
||||
if err != nil {
|
||||
// The only error that can be returned from creating a client
|
||||
|
||||
@@ -7,54 +7,53 @@ package httplib
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
var defaultSetting = Settings{"GiteaServer", 60 * time.Second, 60 * time.Second, nil, nil}
|
||||
var defaultTransport = sync.OnceValue(func() http.RoundTripper {
|
||||
return &http.Transport{
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
DialContext: DialContextWithTimeout(10 * time.Second), // it is good enough in modern days
|
||||
}
|
||||
})
|
||||
|
||||
// newRequest returns *Request with specific method
|
||||
func newRequest(url, method string) *Request {
|
||||
var resp http.Response
|
||||
req := http.Request{
|
||||
func DialContextWithTimeout(timeout time.Duration) func(ctx context.Context, network, address string) (net.Conn, error) {
|
||||
return func(ctx context.Context, network, address string) (net.Conn, error) {
|
||||
return (&net.Dialer{Timeout: timeout}).DialContext(ctx, network, address)
|
||||
}
|
||||
}
|
||||
|
||||
func NewRequest(url, method string) *Request {
|
||||
return &Request{
|
||||
url: url,
|
||||
req: &http.Request{
|
||||
Method: method,
|
||||
Header: make(http.Header),
|
||||
Proto: "HTTP/1.1",
|
||||
Proto: "HTTP/1.1", // FIXME: from legacy httplib, it shouldn't be hardcoded
|
||||
ProtoMajor: 1,
|
||||
ProtoMinor: 1,
|
||||
},
|
||||
params: map[string]string{},
|
||||
|
||||
// ATTENTION: from legacy httplib, callers must pay more attention to it, it will cause annoying bugs when the response takes a long time
|
||||
readWriteTimeout: 60 * time.Second,
|
||||
}
|
||||
return &Request{url, &req, map[string]string{}, defaultSetting, &resp, nil}
|
||||
}
|
||||
|
||||
// NewRequest returns *Request with specific method
|
||||
func NewRequest(url, method string) *Request {
|
||||
return newRequest(url, method)
|
||||
}
|
||||
|
||||
// Settings is the default settings for http client
|
||||
type Settings struct {
|
||||
UserAgent string
|
||||
ConnectTimeout time.Duration
|
||||
ReadWriteTimeout time.Duration
|
||||
TLSClientConfig *tls.Config
|
||||
Transport http.RoundTripper
|
||||
}
|
||||
|
||||
// Request provides more useful methods for requesting one url than http.Request.
|
||||
type Request struct {
|
||||
url string
|
||||
req *http.Request
|
||||
params map[string]string
|
||||
setting Settings
|
||||
resp *http.Response
|
||||
body []byte
|
||||
|
||||
readWriteTimeout time.Duration
|
||||
transport http.RoundTripper
|
||||
}
|
||||
|
||||
// SetContext sets the request's Context
|
||||
@@ -63,36 +62,24 @@ func (r *Request) SetContext(ctx context.Context) *Request {
|
||||
return r
|
||||
}
|
||||
|
||||
// SetTimeout sets connect time out and read-write time out for BeegoRequest.
|
||||
func (r *Request) SetTimeout(connectTimeout, readWriteTimeout time.Duration) *Request {
|
||||
r.setting.ConnectTimeout = connectTimeout
|
||||
r.setting.ReadWriteTimeout = readWriteTimeout
|
||||
// SetTransport sets the request transport, if not set, will use httplib's default transport with environment proxy support
|
||||
// ATTENTION: the http.Transport has a connection pool, so it should be reused as much as possible, do not create a lot of transports
|
||||
func (r *Request) SetTransport(transport http.RoundTripper) *Request {
|
||||
r.transport = transport
|
||||
return r
|
||||
}
|
||||
|
||||
func (r *Request) SetReadWriteTimeout(readWriteTimeout time.Duration) *Request {
|
||||
r.setting.ReadWriteTimeout = readWriteTimeout
|
||||
r.readWriteTimeout = readWriteTimeout
|
||||
return r
|
||||
}
|
||||
|
||||
// SetTLSClientConfig sets tls connection configurations if visiting https url.
|
||||
func (r *Request) SetTLSClientConfig(config *tls.Config) *Request {
|
||||
r.setting.TLSClientConfig = config
|
||||
return r
|
||||
}
|
||||
|
||||
// Header add header item string in request.
|
||||
// Header set header item string in request.
|
||||
func (r *Request) Header(key, value string) *Request {
|
||||
r.req.Header.Set(key, value)
|
||||
return r
|
||||
}
|
||||
|
||||
// SetTransport sets transport to
|
||||
func (r *Request) SetTransport(transport http.RoundTripper) *Request {
|
||||
r.setting.Transport = transport
|
||||
return r
|
||||
}
|
||||
|
||||
// Param adds query param in to request.
|
||||
// params build query string as ?key1=value1&key2=value2...
|
||||
func (r *Request) Param(key, value string) *Request {
|
||||
@@ -125,11 +112,9 @@ func (r *Request) Body(data any) *Request {
|
||||
return r
|
||||
}
|
||||
|
||||
func (r *Request) getResponse() (*http.Response, error) {
|
||||
if r.resp.StatusCode != 0 {
|
||||
return r.resp, nil
|
||||
}
|
||||
|
||||
// Response executes request client and returns the response.
|
||||
// Caller MUST close the response body if no error occurs.
|
||||
func (r *Request) Response() (*http.Response, error) {
|
||||
var paramBody string
|
||||
if len(r.params) > 0 {
|
||||
var buf bytes.Buffer
|
||||
@@ -160,59 +145,19 @@ func (r *Request) getResponse() (*http.Response, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
trans := r.setting.Transport
|
||||
if trans == nil {
|
||||
// create default transport
|
||||
trans = &http.Transport{
|
||||
TLSClientConfig: r.setting.TLSClientConfig,
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
DialContext: TimeoutDialer(r.setting.ConnectTimeout),
|
||||
}
|
||||
} else if t, ok := trans.(*http.Transport); ok {
|
||||
if t.TLSClientConfig == nil {
|
||||
t.TLSClientConfig = r.setting.TLSClientConfig
|
||||
}
|
||||
if t.DialContext == nil {
|
||||
t.DialContext = TimeoutDialer(r.setting.ConnectTimeout)
|
||||
}
|
||||
}
|
||||
|
||||
client := &http.Client{
|
||||
Transport: trans,
|
||||
Timeout: r.setting.ReadWriteTimeout,
|
||||
Transport: r.transport,
|
||||
Timeout: r.readWriteTimeout,
|
||||
}
|
||||
if client.Transport == nil {
|
||||
client.Transport = defaultTransport()
|
||||
}
|
||||
|
||||
if len(r.setting.UserAgent) > 0 && len(r.req.Header.Get("User-Agent")) == 0 {
|
||||
r.req.Header.Set("User-Agent", r.setting.UserAgent)
|
||||
if r.req.Header.Get("User-Agent") == "" {
|
||||
r.req.Header.Set("User-Agent", "GiteaHttpLib")
|
||||
}
|
||||
|
||||
resp, err := client.Do(r.req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r.resp = resp
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
// Response executes request client gets response manually.
|
||||
// Caller MUST close the response body if no error occurs
|
||||
func (r *Request) Response() (*http.Response, error) {
|
||||
if r == nil {
|
||||
return nil, errors.New("invalid request")
|
||||
}
|
||||
return r.getResponse()
|
||||
}
|
||||
|
||||
// TimeoutDialer returns functions of connection dialer with timeout settings for http.Transport Dial field.
|
||||
func TimeoutDialer(cTimeout time.Duration) func(ctx context.Context, net, addr string) (c net.Conn, err error) {
|
||||
return func(ctx context.Context, netw, addr string) (net.Conn, error) {
|
||||
d := net.Dialer{Timeout: cTimeout}
|
||||
conn, err := d.DialContext(ctx, netw, addr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return conn, nil
|
||||
}
|
||||
return client.Do(r.req)
|
||||
}
|
||||
|
||||
func (r *Request) GoString() string {
|
||||
|
||||
@@ -126,6 +126,7 @@ func setServeHeadersByFile(r *http.Request, w http.ResponseWriter, mineBuf []byt
|
||||
// no sandbox attribute for pdf as it breaks rendering in at least safari. this
|
||||
// should generally be safe as scripts inside PDF can not escape the PDF document
|
||||
// see https://bugs.chromium.org/p/chromium/issues/detail?id=413851 for more discussion
|
||||
// HINT: PDF-RENDER-SANDBOX: PDF won't render in sandboxed context
|
||||
w.Header().Set("Content-Security-Policy", "default-src 'none'; style-src 'unsafe-inline'")
|
||||
}
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
"code.gitea.io/gitea/modules/queue"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -166,12 +167,12 @@ func Init() {
|
||||
log.Fatal("PID: %d Unable to initialize the bleve Repository Indexer at path: %s Error: %v", os.Getpid(), setting.Indexer.RepoPath, err)
|
||||
}
|
||||
case "elasticsearch":
|
||||
log.Info("PID: %d Initializing Repository Indexer at: %s", os.Getpid(), setting.Indexer.RepoConnStr)
|
||||
log.Info("PID: %d Initializing Repository Indexer at: %s", os.Getpid(), util.SanitizeCredentialURLs(setting.Indexer.RepoConnStr))
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
log.Error("PANIC whilst initializing repository indexer: %v\nStacktrace: %s", err, log.Stack(2))
|
||||
log.Error("The indexer files are likely corrupted and may need to be deleted")
|
||||
log.Error("You can completely remove the \"%s\" index to make Gitea recreate the indexes", setting.Indexer.RepoConnStr)
|
||||
log.Error("You can completely remove the \"%s\" index to make Gitea recreate the indexes", util.SanitizeCredentialURLs(setting.Indexer.RepoConnStr))
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -181,7 +182,7 @@ func Init() {
|
||||
cancel()
|
||||
(*globalIndexer.Load()).Close()
|
||||
close(waitChannel)
|
||||
log.Fatal("PID: %d Unable to initialize the elasticsearch Repository Indexer connstr: %s Error: %v", os.Getpid(), setting.Indexer.RepoConnStr, err)
|
||||
log.Fatal("PID: %d Unable to initialize the elasticsearch Repository Indexer connstr: %s Error: %v", os.Getpid(), util.SanitizeCredentialURLs(setting.Indexer.RepoConnStr), err)
|
||||
}
|
||||
|
||||
default:
|
||||
|
||||
@@ -25,6 +25,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
"code.gitea.io/gitea/modules/queue"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
// IndexerMetadata is used to send data to the queue, so it contains only the ids.
|
||||
@@ -100,7 +101,7 @@ func InitIssueIndexer(syncReindex bool) {
|
||||
issueIndexer = elasticsearch.NewIndexer(setting.Indexer.IssueConnStr, setting.Indexer.IssueIndexerName)
|
||||
existed, err = issueIndexer.Init(ctx)
|
||||
if err != nil {
|
||||
log.Fatal("Unable to issueIndexer.Init with connection %s Error: %v", setting.Indexer.IssueConnStr, err)
|
||||
log.Fatal("Unable to issueIndexer.Init with connection %s Error: %v", util.SanitizeCredentialURLs(setting.Indexer.IssueConnStr), err)
|
||||
}
|
||||
case "db":
|
||||
issueIndexer = db.GetIndexer()
|
||||
@@ -108,7 +109,7 @@ func InitIssueIndexer(syncReindex bool) {
|
||||
issueIndexer = meilisearch.NewIndexer(setting.Indexer.IssueConnStr, setting.Indexer.IssueConnAuth, setting.Indexer.IssueIndexerName)
|
||||
existed, err = issueIndexer.Init(ctx)
|
||||
if err != nil {
|
||||
log.Fatal("Unable to issueIndexer.Init with connection %s Error: %v", setting.Indexer.IssueConnStr, err)
|
||||
log.Fatal("Unable to issueIndexer.Init with connection %s Error: %v", util.SanitizeCredentialURLs(setting.Indexer.IssueConnStr), err)
|
||||
}
|
||||
default:
|
||||
log.Fatal("Unknown issue indexer type: %s", setting.Indexer.IssueType)
|
||||
|
||||
@@ -157,7 +157,7 @@ func (g *GiteaBackend) Batch(_ string, pointers []transfer.BatchItem, args trans
|
||||
}
|
||||
|
||||
// Download implements transfer.Backend. The returned reader must be closed by the caller.
|
||||
func (g *GiteaBackend) Download(oid string, args transfer.Args) (io.ReadCloser, int64, error) {
|
||||
func (g *GiteaBackend) Download(oid string, args transfer.Args) (_ io.ReadCloser, _ int64, retErr error) {
|
||||
idMapStr, exists := args[argID]
|
||||
if !exists {
|
||||
return nil, 0, ErrMissingID
|
||||
@@ -188,7 +188,15 @@ func (g *GiteaBackend) Download(oid string, args transfer.Args) (io.ReadCloser,
|
||||
if err != nil {
|
||||
return nil, 0, fmt.Errorf("failed to get response: %w", err)
|
||||
}
|
||||
// no need to close the body here by "defer resp.Body.Close()", see below
|
||||
// We must return the ReaderCloser but not "ReadAll", to avoid OOM.
|
||||
// "transfer.Backend" will check io.Closer interface and close the Body reader.
|
||||
// So only close the Body when error occurs
|
||||
defer func() {
|
||||
if retErr != nil {
|
||||
_ = resp.Body.Close()
|
||||
}
|
||||
}()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, 0, statusCodeToErr(resp.StatusCode)
|
||||
}
|
||||
@@ -197,7 +205,6 @@ func (g *GiteaBackend) Download(oid string, args transfer.Args) (io.ReadCloser,
|
||||
if err != nil {
|
||||
return nil, 0, fmt.Errorf("failed to parse content length: %w", err)
|
||||
}
|
||||
// transfer.Backend will check io.Closer interface and close this Body reader
|
||||
return resp.Body, respSize, nil
|
||||
}
|
||||
|
||||
|
||||
20
modules/markup/external/external.go
vendored
20
modules/markup/external/external.go
vendored
@@ -15,6 +15,8 @@ import (
|
||||
"code.gitea.io/gitea/modules/markup"
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
|
||||
"github.com/kballard/go-shellquote"
|
||||
)
|
||||
|
||||
// RegisterRenderers registers all supported third part renderers according settings
|
||||
@@ -56,14 +58,11 @@ func (p *Renderer) SanitizerRules() []setting.MarkupSanitizerRule {
|
||||
return p.MarkupSanitizerRules
|
||||
}
|
||||
|
||||
// SanitizerDisabled disabled sanitize if return true
|
||||
func (p *Renderer) SanitizerDisabled() bool {
|
||||
return p.RenderContentMode == setting.RenderContentModeNoSanitizer || p.RenderContentMode == setting.RenderContentModeIframe
|
||||
}
|
||||
|
||||
// DisplayInIFrame represents whether render the content with an iframe
|
||||
func (p *Renderer) DisplayInIFrame() bool {
|
||||
return p.RenderContentMode == setting.RenderContentModeIframe
|
||||
func (p *Renderer) GetExternalRendererOptions() (ret markup.ExternalRendererOptions) {
|
||||
ret.SanitizerDisabled = p.RenderContentMode == setting.RenderContentModeNoSanitizer || p.RenderContentMode == setting.RenderContentModeIframe
|
||||
ret.DisplayInIframe = p.RenderContentMode == setting.RenderContentModeIframe
|
||||
ret.ContentSandbox = p.RenderContentSandbox
|
||||
return ret
|
||||
}
|
||||
|
||||
func envMark(envName string) string {
|
||||
@@ -81,7 +80,10 @@ func (p *Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.
|
||||
envMark("GITEA_PREFIX_SRC"), baseLinkSrc,
|
||||
envMark("GITEA_PREFIX_RAW"), baseLinkRaw,
|
||||
).Replace(p.Command)
|
||||
commands := strings.Fields(command)
|
||||
commands, err := shellquote.Split(command)
|
||||
if err != nil || len(commands) == 0 {
|
||||
return fmt.Errorf("%s invalid command %q: %w", p.Name(), p.Command, err)
|
||||
}
|
||||
args := commands[1:]
|
||||
|
||||
if p.IsInputFile {
|
||||
|
||||
@@ -5,6 +5,7 @@ package markup
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"code.gitea.io/gitea/modules/emoji"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
@@ -66,26 +67,31 @@ func emojiShortCodeProcessor(ctx *RenderContext, node *html.Node) {
|
||||
}
|
||||
m[0] += start
|
||||
m[1] += start
|
||||
|
||||
start = m[1]
|
||||
|
||||
alias := node.Data[m[0]:m[1]]
|
||||
alias = strings.ReplaceAll(alias, ":", "")
|
||||
converted := emoji.FromAlias(alias)
|
||||
if converted == nil {
|
||||
// check if this is a custom reaction
|
||||
if _, exist := setting.UI.CustomEmojisMap[alias]; exist {
|
||||
replaceContent(node, m[0], m[1], createCustomEmoji(ctx, alias))
|
||||
node = node.NextSibling.NextSibling
|
||||
start = 0
|
||||
continue
|
||||
|
||||
var nextChar byte
|
||||
if m[1] < len(node.Data) {
|
||||
nextChar = node.Data[m[1]]
|
||||
}
|
||||
if nextChar == ':' || unicode.IsLetter(rune(nextChar)) || unicode.IsDigit(rune(nextChar)) {
|
||||
continue
|
||||
}
|
||||
|
||||
alias = strings.Trim(alias, ":")
|
||||
converted := emoji.FromAlias(alias)
|
||||
if converted != nil {
|
||||
// standard emoji
|
||||
replaceContent(node, m[0], m[1], createEmoji(ctx, converted.Emoji, converted.Description))
|
||||
node = node.NextSibling.NextSibling
|
||||
start = 0
|
||||
start = 0 // restart searching start since node has changed
|
||||
} else if _, exist := setting.UI.CustomEmojisMap[alias]; exist {
|
||||
// custom reaction
|
||||
replaceContent(node, m[0], m[1], createCustomEmoji(ctx, alias))
|
||||
node = node.NextSibling.NextSibling
|
||||
start = 0 // restart searching start since node has changed
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -357,12 +357,9 @@ func TestRender_emoji(t *testing.T) {
|
||||
`<p><span class="emoji" aria-label="smiling face with sunglasses">😎</span><span class="emoji" aria-label="zany face">🤪</span><span class="emoji" aria-label="locked with key">🔐</span><span class="emoji" aria-label="money-mouth face">🤑</span><span class="emoji" aria-label="red question mark">❓</span></p>`)
|
||||
|
||||
// should match nothing
|
||||
test(
|
||||
"2001:0db8:85a3:0000:0000:8a2e:0370:7334",
|
||||
`<p>2001:0db8:85a3:0000:0000:8a2e:0370:7334</p>`)
|
||||
test(
|
||||
":not exist:",
|
||||
`<p>:not exist:</p>`)
|
||||
test(":100:200", `<p>:100:200</p>`)
|
||||
test("std::thread::something", `<p>std::thread::something</p>`)
|
||||
test(":not exist:", `<p>:not exist:</p>`)
|
||||
}
|
||||
|
||||
func TestRender_ShortLinks(t *testing.T) {
|
||||
|
||||
@@ -5,11 +5,13 @@ package internal
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"html/template"
|
||||
"io"
|
||||
)
|
||||
|
||||
type finalProcessor struct {
|
||||
renderInternal *RenderInternal
|
||||
extraHeadHTML template.HTML
|
||||
|
||||
output io.Writer
|
||||
buf bytes.Buffer
|
||||
@@ -25,6 +27,32 @@ func (p *finalProcessor) Close() error {
|
||||
// because "postProcess" already does so. In the future we could optimize the code to process data on the fly.
|
||||
buf := p.buf.Bytes()
|
||||
buf = bytes.ReplaceAll(buf, []byte(` data-attr-class="`+p.renderInternal.secureIDPrefix), []byte(` class="`))
|
||||
|
||||
tmp := bytes.TrimSpace(buf)
|
||||
isLikelyHTML := len(tmp) != 0 && tmp[0] == '<' && tmp[len(tmp)-1] == '>' && bytes.Index(tmp, []byte(`</`)) > 0
|
||||
if !isLikelyHTML {
|
||||
// not HTML, write back directly
|
||||
_, err := p.output.Write(buf)
|
||||
return err
|
||||
}
|
||||
|
||||
// add our extra head HTML into output
|
||||
headBytes := []byte("<head>")
|
||||
posHead := bytes.Index(buf, headBytes)
|
||||
var part1, part2 []byte
|
||||
if posHead >= 0 {
|
||||
part1, part2 = buf[:posHead+len(headBytes)], buf[posHead+len(headBytes):]
|
||||
} else {
|
||||
part1, part2 = nil, buf
|
||||
}
|
||||
if len(part1) > 0 {
|
||||
if _, err := p.output.Write(part1); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if _, err := io.WriteString(p.output, string(p.extraHeadHTML)); err != nil {
|
||||
return err
|
||||
}
|
||||
_, err := p.output.Write(part2)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestRenderInternal(t *testing.T) {
|
||||
func TestRenderInternalAttrs(t *testing.T) {
|
||||
cases := []struct {
|
||||
input, protected, recovered string
|
||||
}{
|
||||
@@ -30,7 +30,7 @@ func TestRenderInternal(t *testing.T) {
|
||||
for _, c := range cases {
|
||||
var r RenderInternal
|
||||
out := &bytes.Buffer{}
|
||||
in := r.init("sec", out)
|
||||
in := r.init("sec", out, "")
|
||||
protected := r.ProtectSafeAttrs(template.HTML(c.input))
|
||||
assert.EqualValues(t, c.protected, protected)
|
||||
_, _ = io.WriteString(in, string(protected))
|
||||
@@ -41,7 +41,7 @@ func TestRenderInternal(t *testing.T) {
|
||||
var r1, r2 RenderInternal
|
||||
protected := r1.ProtectSafeAttrs(`<div class="test"></div>`)
|
||||
assert.EqualValues(t, `<div class="test"></div>`, protected, "non-initialized RenderInternal should not protect any attributes")
|
||||
_ = r1.init("sec", nil)
|
||||
_ = r1.init("sec", nil, "")
|
||||
protected = r1.ProtectSafeAttrs(`<div class="test"></div>`)
|
||||
assert.EqualValues(t, `<div data-attr-class="sec:test"></div>`, protected)
|
||||
assert.Equal(t, "data-attr-class", r1.SafeAttr("class"))
|
||||
@@ -54,8 +54,37 @@ func TestRenderInternal(t *testing.T) {
|
||||
assert.Empty(t, recovered)
|
||||
|
||||
out2 := &bytes.Buffer{}
|
||||
in2 := r2.init("sec-other", out2)
|
||||
in2 := r2.init("sec-other", out2, "")
|
||||
_, _ = io.WriteString(in2, string(protected))
|
||||
_ = in2.Close()
|
||||
assert.Equal(t, `<div data-attr-class="sec:test"></div>`, out2.String(), "different secureID should not recover the value")
|
||||
}
|
||||
|
||||
func TestRenderInternalExtraHead(t *testing.T) {
|
||||
t.Run("HeadExists", func(t *testing.T) {
|
||||
out := &bytes.Buffer{}
|
||||
var r RenderInternal
|
||||
in := r.init("sec", out, `<MY-TAG>`)
|
||||
_, _ = io.WriteString(in, `<head>any</head>`)
|
||||
_ = in.Close()
|
||||
assert.Equal(t, `<head><MY-TAG>any</head>`, out.String())
|
||||
})
|
||||
|
||||
t.Run("HeadNotExists", func(t *testing.T) {
|
||||
out := &bytes.Buffer{}
|
||||
var r RenderInternal
|
||||
in := r.init("sec", out, `<MY-TAG>`)
|
||||
_, _ = io.WriteString(in, `<div></div>`)
|
||||
_ = in.Close()
|
||||
assert.Equal(t, `<MY-TAG><div></div>`, out.String())
|
||||
})
|
||||
|
||||
t.Run("NotHTML", func(t *testing.T) {
|
||||
out := &bytes.Buffer{}
|
||||
var r RenderInternal
|
||||
in := r.init("sec", out, `<MY-TAG>`)
|
||||
_, _ = io.WriteString(in, `<any>`)
|
||||
_ = in.Close()
|
||||
assert.Equal(t, `<any>`, out.String())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -29,19 +29,19 @@ type RenderInternal struct {
|
||||
secureIDPrefix string
|
||||
}
|
||||
|
||||
func (r *RenderInternal) Init(output io.Writer) io.WriteCloser {
|
||||
func (r *RenderInternal) Init(output io.Writer, extraHeadHTML template.HTML) io.WriteCloser {
|
||||
buf := make([]byte, 12)
|
||||
_, err := rand.Read(buf)
|
||||
if err != nil {
|
||||
panic("unable to generate secure id")
|
||||
}
|
||||
return r.init(base64.URLEncoding.EncodeToString(buf), output)
|
||||
return r.init(base64.URLEncoding.EncodeToString(buf), output, extraHeadHTML)
|
||||
}
|
||||
|
||||
func (r *RenderInternal) init(secID string, output io.Writer) io.WriteCloser {
|
||||
func (r *RenderInternal) init(secID string, output io.Writer, extraHeadHTML template.HTML) io.WriteCloser {
|
||||
r.secureID = secID
|
||||
r.secureIDPrefix = r.secureID + ":"
|
||||
return &finalProcessor{renderInternal: r, output: output}
|
||||
return &finalProcessor{renderInternal: r, output: output, extraHeadHTML: extraHeadHTML}
|
||||
}
|
||||
|
||||
func (r *RenderInternal) RecoverProtectedValue(v string) (string, bool) {
|
||||
|
||||
@@ -6,12 +6,14 @@ package markup
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/htmlutil"
|
||||
"code.gitea.io/gitea/modules/markup/internal"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
@@ -120,31 +122,38 @@ func (ctx *RenderContext) WithHelper(helper RenderHelper) *RenderContext {
|
||||
return ctx
|
||||
}
|
||||
|
||||
// Render renders markup file to HTML with all specific handling stuff.
|
||||
func Render(ctx *RenderContext, input io.Reader, output io.Writer) error {
|
||||
// FindRendererByContext finds renderer by RenderContext
|
||||
// TODO: it should be merged with other similar functions like GetRendererByFileName, DetectMarkupTypeByFileName, etc
|
||||
func FindRendererByContext(ctx *RenderContext) (Renderer, error) {
|
||||
if ctx.RenderOptions.MarkupType == "" && ctx.RenderOptions.RelativePath != "" {
|
||||
ctx.RenderOptions.MarkupType = DetectMarkupTypeByFileName(ctx.RenderOptions.RelativePath)
|
||||
if ctx.RenderOptions.MarkupType == "" {
|
||||
return util.NewInvalidArgumentErrorf("unsupported file to render: %q", ctx.RenderOptions.RelativePath)
|
||||
return nil, util.NewInvalidArgumentErrorf("unsupported file to render: %q", ctx.RenderOptions.RelativePath)
|
||||
}
|
||||
}
|
||||
|
||||
renderer := renderers[ctx.RenderOptions.MarkupType]
|
||||
if renderer == nil {
|
||||
return util.NewInvalidArgumentErrorf("unsupported markup type: %q", ctx.RenderOptions.MarkupType)
|
||||
return nil, util.NewNotExistErrorf("unsupported markup type: %q", ctx.RenderOptions.MarkupType)
|
||||
}
|
||||
|
||||
if ctx.RenderOptions.RelativePath != "" {
|
||||
if externalRender, ok := renderer.(ExternalRenderer); ok && externalRender.DisplayInIFrame() {
|
||||
if !ctx.RenderOptions.InStandalonePage {
|
||||
// for an external "DisplayInIFrame" render, it could only output its content in a standalone page
|
||||
// otherwise, a <iframe> should be outputted to embed the external rendered page
|
||||
return renderIFrame(ctx, output)
|
||||
}
|
||||
}
|
||||
}
|
||||
return renderer, nil
|
||||
}
|
||||
|
||||
return render(ctx, renderer, input, output)
|
||||
func RendererNeedPostProcess(renderer Renderer) bool {
|
||||
if r, ok := renderer.(PostProcessRenderer); ok && r.NeedPostProcess() {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Render renders markup file to HTML with all specific handling stuff.
|
||||
func Render(ctx *RenderContext, input io.Reader, output io.Writer) error {
|
||||
renderer, err := FindRendererByContext(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return RenderWithRenderer(ctx, renderer, input, output)
|
||||
}
|
||||
|
||||
// RenderString renders Markup string to HTML with all specific handling stuff and return string
|
||||
@@ -156,24 +165,20 @@ func RenderString(ctx *RenderContext, content string) (string, error) {
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
func renderIFrame(ctx *RenderContext, output io.Writer) error {
|
||||
// set height="0" ahead, otherwise the scrollHeight would be max(150, realHeight)
|
||||
// at the moment, only "allow-scripts" is allowed for sandbox mode.
|
||||
// "allow-same-origin" should never be used, it leads to XSS attack, and it makes the JS in iframe can access parent window's config and CSRF token
|
||||
// TODO: when using dark theme, if the rendered content doesn't have proper style, the default text color is black, which is not easy to read
|
||||
_, err := io.WriteString(output, fmt.Sprintf(`
|
||||
<iframe src="%s/%s/%s/render/%s/%s"
|
||||
name="giteaExternalRender"
|
||||
onload="this.height=giteaExternalRender.document.documentElement.scrollHeight"
|
||||
width="100%%" height="0" scrolling="no" frameborder="0" style="overflow: hidden"
|
||||
sandbox="allow-scripts"
|
||||
></iframe>`,
|
||||
setting.AppSubURL,
|
||||
func renderIFrame(ctx *RenderContext, sandbox string, output io.Writer) error {
|
||||
src := fmt.Sprintf("%s/%s/%s/render/%s/%s", setting.AppSubURL,
|
||||
url.PathEscape(ctx.RenderOptions.Metas["user"]),
|
||||
url.PathEscape(ctx.RenderOptions.Metas["repo"]),
|
||||
ctx.RenderOptions.Metas["RefTypeNameSubURL"],
|
||||
url.PathEscape(ctx.RenderOptions.RelativePath),
|
||||
))
|
||||
util.PathEscapeSegments(ctx.RenderOptions.Metas["RefTypeNameSubURL"]),
|
||||
util.PathEscapeSegments(ctx.RenderOptions.RelativePath),
|
||||
)
|
||||
|
||||
var sandboxAttrValue template.HTML
|
||||
if sandbox != "" {
|
||||
sandboxAttrValue = htmlutil.HTMLFormat(`sandbox="%s"`, sandbox)
|
||||
}
|
||||
iframe := htmlutil.HTMLFormat(`<iframe data-src="%s" class="external-render-iframe" %s></iframe>`, src, sandboxAttrValue)
|
||||
_, err := io.WriteString(output, string(iframe))
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -185,13 +190,34 @@ func pipes() (io.ReadCloser, io.WriteCloser, func()) {
|
||||
}
|
||||
}
|
||||
|
||||
func render(ctx *RenderContext, renderer Renderer, input io.Reader, output io.Writer) error {
|
||||
func getExternalRendererOptions(renderer Renderer) (ret ExternalRendererOptions, _ bool) {
|
||||
if externalRender, ok := renderer.(ExternalRenderer); ok {
|
||||
return externalRender.GetExternalRendererOptions(), true
|
||||
}
|
||||
return ret, false
|
||||
}
|
||||
|
||||
func RenderWithRenderer(ctx *RenderContext, renderer Renderer, input io.Reader, output io.Writer) error {
|
||||
var extraHeadHTML template.HTML
|
||||
if extOpts, ok := getExternalRendererOptions(renderer); ok && extOpts.DisplayInIframe {
|
||||
if !ctx.RenderOptions.InStandalonePage {
|
||||
// for an external "DisplayInIFrame" render, it could only output its content in a standalone page
|
||||
// otherwise, a <iframe> should be outputted to embed the external rendered page
|
||||
return renderIFrame(ctx, extOpts.ContentSandbox, output)
|
||||
}
|
||||
// else: this is a standalone page, fallthrough to the real rendering, and add extra JS/CSS
|
||||
extraStyleHref := setting.AppSubURL + "/assets/css/external-render-iframe.css"
|
||||
extraScriptSrc := setting.AppSubURL + "/assets/js/external-render-iframe.js"
|
||||
// "<script>" must go before "<link>", to make Golang's http.DetectContentType() can still recognize the content as "text/html"
|
||||
extraHeadHTML = htmlutil.HTMLFormat(`<script src="%s"></script><link rel="stylesheet" href="%s">`, extraScriptSrc, extraStyleHref)
|
||||
}
|
||||
|
||||
ctx.usedByRender = true
|
||||
if ctx.RenderHelper != nil {
|
||||
defer ctx.RenderHelper.CleanUp()
|
||||
}
|
||||
|
||||
finalProcessor := ctx.RenderInternal.Init(output)
|
||||
finalProcessor := ctx.RenderInternal.Init(output, extraHeadHTML)
|
||||
defer finalProcessor.Close()
|
||||
|
||||
// input -> (pw1=pr1) -> renderer -> (pw2=pr2) -> SanitizeReader -> finalProcessor -> output
|
||||
@@ -202,7 +228,7 @@ func render(ctx *RenderContext, renderer Renderer, input io.Reader, output io.Wr
|
||||
eg, _ := errgroup.WithContext(ctx)
|
||||
var pw2 io.WriteCloser = util.NopCloser{Writer: finalProcessor}
|
||||
|
||||
if r, ok := renderer.(ExternalRenderer); !ok || !r.SanitizerDisabled() {
|
||||
if r, ok := renderer.(ExternalRenderer); !ok || !r.GetExternalRendererOptions().SanitizerDisabled {
|
||||
var pr2 io.ReadCloser
|
||||
var close2 func()
|
||||
pr2, pw2, close2 = pipes()
|
||||
@@ -214,7 +240,7 @@ func render(ctx *RenderContext, renderer Renderer, input io.Reader, output io.Wr
|
||||
}
|
||||
|
||||
eg.Go(func() (err error) {
|
||||
if r, ok := renderer.(PostProcessRenderer); ok && r.NeedPostProcess() {
|
||||
if RendererNeedPostProcess(renderer) {
|
||||
err = PostProcessDefault(ctx, pr1, pw2)
|
||||
} else {
|
||||
_, err = io.Copy(pw2, pr1)
|
||||
|
||||
@@ -25,13 +25,15 @@ type PostProcessRenderer interface {
|
||||
NeedPostProcess() bool
|
||||
}
|
||||
|
||||
type ExternalRendererOptions struct {
|
||||
SanitizerDisabled bool
|
||||
DisplayInIframe bool
|
||||
ContentSandbox string
|
||||
}
|
||||
|
||||
// ExternalRenderer defines an interface for external renderers
|
||||
type ExternalRenderer interface {
|
||||
// SanitizerDisabled disabled sanitize if return true
|
||||
SanitizerDisabled() bool
|
||||
|
||||
// DisplayInIFrame represents whether render the content with an iframe
|
||||
DisplayInIFrame() bool
|
||||
GetExternalRendererOptions() ExternalRendererOptions
|
||||
}
|
||||
|
||||
// RendererContentDetector detects if the content can be rendered
|
||||
|
||||
@@ -46,7 +46,7 @@ var (
|
||||
// https://www.debian.org/doc/debian-policy/ch-controlfields.html#source
|
||||
namePattern = regexp.MustCompile(`\A[a-z0-9][a-z0-9+-.]+\z`)
|
||||
// https://www.debian.org/doc/debian-policy/ch-controlfields.html#version
|
||||
versionPattern = regexp.MustCompile(`\A(?:[0-9]:)?[a-zA-Z0-9.+~]+(?:-[a-zA-Z0-9.+-~]+)?\z`)
|
||||
versionPattern = regexp.MustCompile(`\A(?:(0|[1-9][0-9]*):)?[a-zA-Z0-9.+~]+(?:-[a-zA-Z0-9.+-~]+)?\z`)
|
||||
)
|
||||
|
||||
type Package struct {
|
||||
|
||||
@@ -176,4 +176,12 @@ func TestParseControlFile(t *testing.T) {
|
||||
assert.Equal(t, []string{"a", "b"}, p.Metadata.Dependencies)
|
||||
assert.Equal(t, full, p.Control)
|
||||
})
|
||||
|
||||
t.Run("ValidVersions", func(t *testing.T) {
|
||||
for _, version := range []string{"1.0", "0:1.2", "9:1.0", "10:1.0", "900:1a.2b-x-y_z~1+2"} {
|
||||
p, err := ParseControlFile(buildContent("testpkg", version, "amd64"))
|
||||
assert.NoError(t, err, "ParseControlFile with version %q", version)
|
||||
assert.NotNil(t, p)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/httplib"
|
||||
@@ -33,6 +34,35 @@ func getClientIP() string {
|
||||
return strings.Fields(sshConnEnv)[0]
|
||||
}
|
||||
|
||||
func dialContextInternalAPI(ctx context.Context, network, address string) (conn net.Conn, err error) {
|
||||
d := net.Dialer{Timeout: 10 * time.Second}
|
||||
if setting.Protocol == setting.HTTPUnix {
|
||||
conn, err = d.DialContext(ctx, "unix", setting.HTTPAddr)
|
||||
} else {
|
||||
conn, err = d.DialContext(ctx, network, address)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if setting.LocalUseProxyProtocol {
|
||||
if err = proxyprotocol.WriteLocalHeader(conn); err != nil {
|
||||
_ = conn.Close()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return conn, nil
|
||||
}
|
||||
|
||||
var internalAPITransport = sync.OnceValue(func() http.RoundTripper {
|
||||
return &http.Transport{
|
||||
DialContext: dialContextInternalAPI,
|
||||
TLSClientConfig: &tls.Config{
|
||||
InsecureSkipVerify: true,
|
||||
ServerName: setting.Domain,
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
func NewInternalRequest(ctx context.Context, url, method string) *httplib.Request {
|
||||
if setting.InternalToken == "" {
|
||||
log.Fatal(`The INTERNAL_TOKEN setting is missing from the configuration file: %q.
|
||||
@@ -43,49 +73,11 @@ Ensure you are running in the correct environment or set the correct configurati
|
||||
log.Fatal("Invalid internal request URL: %q", url)
|
||||
}
|
||||
|
||||
req := httplib.NewRequest(url, method).
|
||||
return httplib.NewRequest(url, method).
|
||||
SetContext(ctx).
|
||||
SetTransport(internalAPITransport()).
|
||||
Header("X-Real-IP", getClientIP()).
|
||||
Header("X-Gitea-Internal-Auth", "Bearer "+setting.InternalToken).
|
||||
SetTLSClientConfig(&tls.Config{
|
||||
InsecureSkipVerify: true,
|
||||
ServerName: setting.Domain,
|
||||
})
|
||||
|
||||
if setting.Protocol == setting.HTTPUnix {
|
||||
req.SetTransport(&http.Transport{
|
||||
DialContext: func(ctx context.Context, _, _ string) (net.Conn, error) {
|
||||
var d net.Dialer
|
||||
conn, err := d.DialContext(ctx, "unix", setting.HTTPAddr)
|
||||
if err != nil {
|
||||
return conn, err
|
||||
}
|
||||
if setting.LocalUseProxyProtocol {
|
||||
if err = proxyprotocol.WriteLocalHeader(conn); err != nil {
|
||||
_ = conn.Close()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return conn, err
|
||||
},
|
||||
})
|
||||
} else if setting.LocalUseProxyProtocol {
|
||||
req.SetTransport(&http.Transport{
|
||||
DialContext: func(ctx context.Context, network, address string) (net.Conn, error) {
|
||||
var d net.Dialer
|
||||
conn, err := d.DialContext(ctx, network, address)
|
||||
if err != nil {
|
||||
return conn, err
|
||||
}
|
||||
if err = proxyprotocol.WriteLocalHeader(conn); err != nil {
|
||||
_ = conn.Close()
|
||||
return nil, err
|
||||
}
|
||||
return conn, err
|
||||
},
|
||||
})
|
||||
}
|
||||
return req
|
||||
Header("X-Gitea-Internal-Auth", "Bearer "+setting.InternalToken)
|
||||
}
|
||||
|
||||
func newInternalRequestAPI(ctx context.Context, url, method string, body ...any) *httplib.Request {
|
||||
@@ -98,6 +90,6 @@ func newInternalRequestAPI(ctx context.Context, url, method string, body ...any)
|
||||
log.Fatal("Too many arguments for newInternalRequestAPI")
|
||||
}
|
||||
|
||||
req.SetTimeout(10*time.Second, 60*time.Second)
|
||||
req.SetReadWriteTimeout(60 * time.Second)
|
||||
return req
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ package private
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
)
|
||||
@@ -31,6 +30,6 @@ func RestoreRepo(ctx context.Context, repoDir, ownerName, repoName string, units
|
||||
Units: units,
|
||||
Validation: validation,
|
||||
})
|
||||
req.SetTimeout(3*time.Second, 0) // since the request will spend much time, don't timeout
|
||||
req.SetReadWriteTimeout(0) // since the request will spend much time, don't timeout
|
||||
return requestJSONClientMsg(req, fmt.Sprintf("Restore repo %s/%s successfully", ownerName, repoName))
|
||||
}
|
||||
|
||||
@@ -16,7 +16,11 @@ var Attachment AttachmentSettingType
|
||||
func loadAttachmentFrom(rootCfg ConfigProvider) (err error) {
|
||||
Attachment = AttachmentSettingType{
|
||||
AllowedTypes: ".avif,.cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.webp,.xls,.xlsx,.zip",
|
||||
|
||||
// FIXME: this size is used for both "issue attachment" and "release attachment"
|
||||
// The design is not right, these two should be different settings
|
||||
MaxSize: 2048,
|
||||
|
||||
MaxFiles: 5,
|
||||
Enabled: true,
|
||||
}
|
||||
|
||||
@@ -202,11 +202,11 @@ func NewConfigProviderFromFile(file string) (ConfigProvider, error) {
|
||||
loadedFromEmpty := true
|
||||
|
||||
if file != "" {
|
||||
isFile, err := util.IsFile(file)
|
||||
isExist, err := util.IsExist(file)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to check if %q is a file. Error: %v", file, err)
|
||||
return nil, fmt.Errorf("unable to check if %q exists: %v", file, err)
|
||||
}
|
||||
if isFile {
|
||||
if isExist {
|
||||
if err = cfg.Append(file); err != nil {
|
||||
return nil, fmt.Errorf("failed to load config file %q: %v", file, err)
|
||||
}
|
||||
|
||||
@@ -63,6 +63,7 @@ type MarkupRenderer struct {
|
||||
NeedPostProcess bool
|
||||
MarkupSanitizerRules []MarkupSanitizerRule
|
||||
RenderContentMode string
|
||||
RenderContentSandbox string
|
||||
}
|
||||
|
||||
// MarkupSanitizerRule defines the policy for whitelisting attributes on
|
||||
@@ -253,13 +254,24 @@ func newMarkupRenderer(name string, sec ConfigSection) {
|
||||
renderContentMode = RenderContentModeSanitized
|
||||
}
|
||||
|
||||
// ATTENTION! at the moment, only a safe set like "allow-scripts" are allowed for sandbox mode.
|
||||
// "allow-same-origin" should never be used, it leads to XSS attack, and it makes the JS in iframe can access parent window's config and CSRF token
|
||||
renderContentSandbox := sec.Key("RENDER_CONTENT_SANDBOX").MustString("allow-scripts allow-popups")
|
||||
if renderContentSandbox == "disabled" {
|
||||
renderContentSandbox = ""
|
||||
}
|
||||
|
||||
ExternalMarkupRenderers = append(ExternalMarkupRenderers, &MarkupRenderer{
|
||||
Enabled: sec.Key("ENABLED").MustBool(false),
|
||||
MarkupName: name,
|
||||
FileExtensions: exts,
|
||||
Command: command,
|
||||
IsInputFile: sec.Key("IS_INPUT_FILE").MustBool(false),
|
||||
NeedPostProcess: sec.Key("NEED_POSTPROCESS").MustBool(true),
|
||||
|
||||
RenderContentMode: renderContentMode,
|
||||
RenderContentSandbox: renderContentSandbox,
|
||||
|
||||
// if no sanitizer is needed, no post process is needed
|
||||
NeedPostProcess: sec.Key("NEED_POST_PROCESS").MustBool(renderContentMode == RenderContentModeSanitized),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -33,6 +33,8 @@ var UI = struct {
|
||||
ReactionsLookup container.Set[string] `ini:"-"`
|
||||
CustomEmojis []string
|
||||
CustomEmojisMap map[string]string `ini:"-"`
|
||||
EnabledEmojis []string
|
||||
EnabledEmojisSet container.Set[string] `ini:"-"`
|
||||
SearchRepoDescription bool
|
||||
OnlyShowRelevantRepos bool
|
||||
ExploreDefaultSort string `ini:"EXPLORE_PAGING_DEFAULT_SORT"`
|
||||
@@ -169,4 +171,5 @@ func loadUIFrom(rootCfg ConfigProvider) {
|
||||
for _, emoji := range UI.CustomEmojis {
|
||||
UI.CustomEmojisMap[emoji] = ":" + emoji + ":"
|
||||
}
|
||||
UI.EnabledEmojisSet = container.SetOf(UI.EnabledEmojis...)
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ package util
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"html/template"
|
||||
)
|
||||
|
||||
// Common Errors forming the base of our error system
|
||||
@@ -16,6 +17,7 @@ var (
|
||||
ErrPermissionDenied = errors.New("permission denied") // also implies HTTP 403
|
||||
ErrNotExist = errors.New("resource does not exist") // also implies HTTP 404
|
||||
ErrAlreadyExist = errors.New("resource already exists") // also implies HTTP 409
|
||||
ErrContentTooLarge = errors.New("content exceeds limit") // also implies HTTP 413
|
||||
|
||||
// ErrUnprocessableContent implies HTTP 422, the syntax of the request content is correct,
|
||||
// but the server is unable to process the contained instructions
|
||||
@@ -39,22 +41,6 @@ func (w errorWrapper) Unwrap() error {
|
||||
return w.Err
|
||||
}
|
||||
|
||||
type LocaleWrapper struct {
|
||||
err error
|
||||
TrKey string
|
||||
TrArgs []any
|
||||
}
|
||||
|
||||
// Error returns the message
|
||||
func (w LocaleWrapper) Error() string {
|
||||
return w.err.Error()
|
||||
}
|
||||
|
||||
// Unwrap returns the underlying error
|
||||
func (w LocaleWrapper) Unwrap() error {
|
||||
return w.err
|
||||
}
|
||||
|
||||
// ErrorWrap returns an error that formats as the given text but unwraps as the provided error
|
||||
func ErrorWrap(unwrap error, message string, args ...any) error {
|
||||
if len(args) == 0 {
|
||||
@@ -83,15 +69,39 @@ func NewNotExistErrorf(message string, args ...any) error {
|
||||
return ErrorWrap(ErrNotExist, message, args...)
|
||||
}
|
||||
|
||||
// ErrorWrapLocale wraps an err with a translation key and arguments
|
||||
func ErrorWrapLocale(err error, trKey string, trArgs ...any) error {
|
||||
return LocaleWrapper{err: err, TrKey: trKey, TrArgs: trArgs}
|
||||
// ErrorTranslatable wraps an error with translation information
|
||||
type ErrorTranslatable interface {
|
||||
error
|
||||
Unwrap() error
|
||||
Translate(ErrorLocaleTranslator) template.HTML
|
||||
}
|
||||
|
||||
func ErrorAsLocale(err error) *LocaleWrapper {
|
||||
var e LocaleWrapper
|
||||
type errorTranslatableWrapper struct {
|
||||
err error
|
||||
trKey string
|
||||
trArgs []any
|
||||
}
|
||||
|
||||
type ErrorLocaleTranslator interface {
|
||||
Tr(key string, args ...any) template.HTML
|
||||
}
|
||||
|
||||
func (w *errorTranslatableWrapper) Error() string { return w.err.Error() }
|
||||
|
||||
func (w *errorTranslatableWrapper) Unwrap() error { return w.err }
|
||||
|
||||
func (w *errorTranslatableWrapper) Translate(t ErrorLocaleTranslator) template.HTML {
|
||||
return t.Tr(w.trKey, w.trArgs...)
|
||||
}
|
||||
|
||||
func ErrorWrapTranslatable(err error, trKey string, trArgs ...any) ErrorTranslatable {
|
||||
return &errorTranslatableWrapper{err: err, trKey: trKey, trArgs: trArgs}
|
||||
}
|
||||
|
||||
func ErrorAsTranslatable(err error) ErrorTranslatable {
|
||||
var e *errorTranslatableWrapper
|
||||
if errors.As(err, &e) {
|
||||
return &e
|
||||
return e
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
29
modules/util/error_test.go
Normal file
29
modules/util/error_test.go
Normal file
@@ -0,0 +1,29 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package util
|
||||
|
||||
import (
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestErrorTranslatable(t *testing.T) {
|
||||
var err error
|
||||
|
||||
err = ErrorWrapTranslatable(io.EOF, "key", 1)
|
||||
assert.ErrorIs(t, err, io.EOF)
|
||||
assert.Equal(t, "EOF", err.Error())
|
||||
assert.Equal(t, "key", err.(*errorTranslatableWrapper).trKey)
|
||||
assert.Equal(t, []any{1}, err.(*errorTranslatableWrapper).trArgs)
|
||||
|
||||
err = ErrorWrap(err, "new msg %d", 100)
|
||||
assert.ErrorIs(t, err, io.EOF)
|
||||
assert.Equal(t, "new msg 100", err.Error())
|
||||
|
||||
errTr := ErrorAsTranslatable(err)
|
||||
assert.Equal(t, "EOF", errTr.Error())
|
||||
assert.Equal(t, "key", errTr.(*errorTranslatableWrapper).trKey)
|
||||
}
|
||||
@@ -115,15 +115,10 @@ func IsDir(dir string) (bool, error) {
|
||||
return false, err
|
||||
}
|
||||
|
||||
// IsFile returns true if given path is a file,
|
||||
// or returns false when it's a directory or does not exist.
|
||||
func IsFile(filePath string) (bool, error) {
|
||||
f, err := os.Stat(filePath)
|
||||
func IsRegularFile(filePath string) (bool, error) {
|
||||
f, err := os.Lstat(filePath)
|
||||
if err == nil {
|
||||
return !f.IsDir(), nil
|
||||
}
|
||||
if os.IsNotExist(err) {
|
||||
return false, nil
|
||||
return f.Mode().IsRegular(), nil
|
||||
}
|
||||
return false, err
|
||||
}
|
||||
|
||||
@@ -22,6 +22,8 @@ func GetContextData(c context.Context) reqctx.ContextData {
|
||||
|
||||
func CommonTemplateContextData() reqctx.ContextData {
|
||||
return reqctx.ContextData{
|
||||
"PageTitleCommon": setting.AppName,
|
||||
|
||||
"IsLandingPageOrganizations": setting.LandingPageURL == setting.LandingPageOrganizations,
|
||||
|
||||
"ShowRegistrationButton": setting.Service.ShowRegistrationButton,
|
||||
|
||||
@@ -104,7 +104,8 @@ func logPrinter(logger log.Logger) func(trigger Event, record *requestRecord) {
|
||||
}
|
||||
logf := logInfo
|
||||
// lower the log level for some specific requests, in most cases these logs are not useful
|
||||
if strings.HasPrefix(req.RequestURI, "/assets/") /* static assets */ ||
|
||||
if status > 0 && status < 400 &&
|
||||
strings.HasPrefix(req.RequestURI, "/assets/") /* static assets */ ||
|
||||
req.RequestURI == "/user/events" /* Server-Sent Events (SSE) handler */ ||
|
||||
req.RequestURI == "/api/actions/runner.v1.RunnerService/FetchTask" /* Actions Runner polling */ {
|
||||
logf = logTrace
|
||||
|
||||
@@ -2433,7 +2433,9 @@ settings.event_workflow_job_desc = Gitea Actions Workflow job queued, waiting, i
|
||||
settings.event_package = Package
|
||||
settings.event_package_desc = Package created or deleted in a repository.
|
||||
settings.branch_filter = Branch filter
|
||||
settings.branch_filter_desc = Branch whitelist for push, branch creation and branch deletion events, specified as glob pattern. If empty or <code>*</code>, events for all branches are reported. See <a href="%[1]s">%[2]s</a> documentation for syntax. Examples: <code>master</code>, <code>{master,release*}</code>.
|
||||
settings.branch_filter_desc_1 = Branch (and ref name) allowlist for push, branch creation and branch deletion events, specified as glob pattern. If empty or <code>*</code>, events for all branches and tags are reported.
|
||||
settings.branch_filter_desc_2 = Use <code>refs/heads/</code> or <code>refs/tags/</code> prefix to match full ref names.
|
||||
settings.branch_filter_desc_doc = See <a href="%[1]s">%[2]s</a> documentation for syntax.
|
||||
settings.authorization_header = Authorization Header
|
||||
settings.authorization_header_desc = Will be included as authorization header for requests when present. Examples: %s.
|
||||
settings.active = Active
|
||||
|
||||
1
public/assets/img/svg/gitea-running.svg
generated
Normal file
1
public/assets/img/svg/gitea-running.svg
generated
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" class="svg gitea-running" width="16" height="16" aria-hidden="true"><path fill="none" stroke="currentColor" stroke-width="2" d="M3.05 3.05a7 7 0 1 1 9.9 9.9 7 7 0 0 1-9.9-9.9Z" opacity=".5"/><path fill="currentColor" fill-rule="evenodd" d="M8 4a4 4 0 1 0 0 8 4 4 0 0 0 0-8" clip-rule="evenodd"/><path fill="currentColor" d="M14 8a6 6 0 0 0-6-6V0a8 8 0 0 1 8 8z"/></svg>
|
||||
|
After Width: | Height: | Size: 429 B |
@@ -1423,6 +1423,7 @@ func Routes() *web.Router {
|
||||
m.Get("/tags/{sha}", repo.GetAnnotatedTag)
|
||||
m.Get("/notes/{sha}", repo.GetNote)
|
||||
}, context.ReferencesGitRepo(true), reqRepoReader(unit.TypeCode))
|
||||
m.Post("/diffpatch", mustEnableEditor, reqToken(), bind(api.ApplyDiffPatchFileOptions{}), repo.ReqChangeRepoFileOptionsAndCheck, repo.ApplyDiffPatch)
|
||||
m.Group("/contents", func() {
|
||||
m.Get("", repo.GetContentsList)
|
||||
m.Get("/*", repo.GetContents)
|
||||
@@ -1434,7 +1435,6 @@ func Routes() *web.Router {
|
||||
m.Put("", bind(api.UpdateFileOptions{}), repo.ReqChangeRepoFileOptionsAndCheck, repo.UpdateFile)
|
||||
m.Delete("", bind(api.DeleteFileOptions{}), repo.ReqChangeRepoFileOptionsAndCheck, repo.DeleteFile)
|
||||
})
|
||||
m.Post("/diffpatch", bind(api.ApplyDiffPatchFileOptions{}), repo.ReqChangeRepoFileOptionsAndCheck, repo.ApplyDiffPatch)
|
||||
}, mustEnableEditor, reqToken())
|
||||
}, reqRepoReader(unit.TypeCode), context.ReferencesGitRepo())
|
||||
m.Group("/contents-ext", func() {
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
package repo
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
|
||||
issues_model "code.gitea.io/gitea/models/issues"
|
||||
@@ -11,6 +12,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/modules/web"
|
||||
attachment_service "code.gitea.io/gitea/services/attachment"
|
||||
"code.gitea.io/gitea/services/context"
|
||||
@@ -154,6 +156,8 @@ func CreateIssueAttachment(ctx *context.APIContext) {
|
||||
// "$ref": "#/responses/error"
|
||||
// "404":
|
||||
// "$ref": "#/responses/error"
|
||||
// "413":
|
||||
// "$ref": "#/responses/error"
|
||||
// "422":
|
||||
// "$ref": "#/responses/validationError"
|
||||
// "423":
|
||||
@@ -181,7 +185,8 @@ func CreateIssueAttachment(ctx *context.APIContext) {
|
||||
filename = query
|
||||
}
|
||||
|
||||
attachment, err := attachment_service.UploadAttachment(ctx, file, setting.Attachment.AllowedTypes, header.Size, &repo_model.Attachment{
|
||||
uploaderFile := attachment_service.NewLimitedUploaderKnownSize(file, header.Size)
|
||||
attachment, err := attachment_service.UploadAttachmentGeneralSizeLimit(ctx, uploaderFile, setting.Attachment.AllowedTypes, &repo_model.Attachment{
|
||||
Name: filename,
|
||||
UploaderID: ctx.Doer.ID,
|
||||
RepoID: ctx.Repo.Repository.ID,
|
||||
@@ -190,6 +195,8 @@ func CreateIssueAttachment(ctx *context.APIContext) {
|
||||
if err != nil {
|
||||
if upload.IsErrFileTypeForbidden(err) {
|
||||
ctx.APIError(http.StatusUnprocessableEntity, err)
|
||||
} else if errors.Is(err, util.ErrContentTooLarge) {
|
||||
ctx.APIError(http.StatusRequestEntityTooLarge, err)
|
||||
} else {
|
||||
ctx.APIErrorInternal(err)
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/modules/web"
|
||||
attachment_service "code.gitea.io/gitea/services/attachment"
|
||||
"code.gitea.io/gitea/services/context"
|
||||
@@ -161,6 +162,8 @@ func CreateIssueCommentAttachment(ctx *context.APIContext) {
|
||||
// "$ref": "#/responses/forbidden"
|
||||
// "404":
|
||||
// "$ref": "#/responses/error"
|
||||
// "413":
|
||||
// "$ref": "#/responses/error"
|
||||
// "422":
|
||||
// "$ref": "#/responses/validationError"
|
||||
// "423":
|
||||
@@ -189,7 +192,8 @@ func CreateIssueCommentAttachment(ctx *context.APIContext) {
|
||||
filename = query
|
||||
}
|
||||
|
||||
attachment, err := attachment_service.UploadAttachment(ctx, file, setting.Attachment.AllowedTypes, header.Size, &repo_model.Attachment{
|
||||
uploaderFile := attachment_service.NewLimitedUploaderKnownSize(file, header.Size)
|
||||
attachment, err := attachment_service.UploadAttachmentGeneralSizeLimit(ctx, uploaderFile, setting.Attachment.AllowedTypes, &repo_model.Attachment{
|
||||
Name: filename,
|
||||
UploaderID: ctx.Doer.ID,
|
||||
RepoID: ctx.Repo.Repository.ID,
|
||||
@@ -199,6 +203,8 @@ func CreateIssueCommentAttachment(ctx *context.APIContext) {
|
||||
if err != nil {
|
||||
if upload.IsErrFileTypeForbidden(err) {
|
||||
ctx.APIError(http.StatusUnprocessableEntity, err)
|
||||
} else if errors.Is(err, util.ErrContentTooLarge) {
|
||||
ctx.APIError(http.StatusRequestEntityTooLarge, err)
|
||||
} else {
|
||||
ctx.APIErrorInternal(err)
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
package repo
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
gocontext "context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
@@ -173,7 +173,7 @@ func Migrate(ctx *context.APIContext) {
|
||||
opts.AWSSecretAccessKey = form.AWSSecretAccessKey
|
||||
}
|
||||
|
||||
repo, err := repo_service.CreateRepositoryDirectly(ctx, ctx.Doer, repoOwner, repo_service.CreateRepoOptions{
|
||||
createdRepo, err := repo_service.CreateRepositoryDirectly(ctx, ctx.Doer, repoOwner, repo_service.CreateRepoOptions{
|
||||
Name: opts.RepoName,
|
||||
Description: opts.Description,
|
||||
OriginalURL: form.CloneAddr,
|
||||
@@ -187,35 +187,37 @@ func Migrate(ctx *context.APIContext) {
|
||||
return
|
||||
}
|
||||
|
||||
opts.MigrateToRepoID = repo.ID
|
||||
opts.MigrateToRepoID = createdRepo.ID
|
||||
|
||||
doLongTimeMigrate := func(ctx gocontext.Context, doer *user_model.User) (migratedRepo *repo_model.Repository, retErr error) {
|
||||
defer func() {
|
||||
if e := recover(); e != nil {
|
||||
var buf bytes.Buffer
|
||||
fmt.Fprintf(&buf, "Handler crashed with error: %v", log.Stack(2))
|
||||
|
||||
err = errors.New(buf.String())
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
notify_service.MigrateRepository(ctx, ctx.Doer, repoOwner, repo)
|
||||
return
|
||||
}
|
||||
|
||||
if repo != nil {
|
||||
if errDelete := repo_service.DeleteRepositoryDirectly(ctx, repo.ID); errDelete != nil {
|
||||
log.Error("DeleteRepository: %v", errDelete)
|
||||
log.Error("MigrateRepository panic: %v\n%s", e, log.Stack(2))
|
||||
if errDelete := repo_service.DeleteRepositoryDirectly(ctx, createdRepo.ID); errDelete != nil {
|
||||
log.Error("Unable to delete repo after MigrateRepository panic: %v", errDelete)
|
||||
}
|
||||
retErr = errors.New("MigrateRepository panic") // no idea why it would happen, just legacy code
|
||||
}
|
||||
}()
|
||||
|
||||
if repo, err = migrations.MigrateRepository(graceful.GetManager().HammerContext(), ctx.Doer, repoOwner.Name, opts, nil); err != nil {
|
||||
migratedRepo, err := migrations.MigrateRepository(ctx, doer, repoOwner.Name, opts, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
notify_service.MigrateRepository(ctx, doer, repoOwner, migratedRepo)
|
||||
return migratedRepo, nil
|
||||
}
|
||||
|
||||
// use a background context, don't cancel the migration even if the client goes away
|
||||
// HammerContext doesn't seem right (from https://github.com/go-gitea/gitea/pull/9335/files)
|
||||
// There are other abuses, maybe most HammerContext abuses should be fixed together in the future.
|
||||
migratedRepo, err := doLongTimeMigrate(graceful.GetManager().HammerContext(), ctx.Doer)
|
||||
if err != nil {
|
||||
handleMigrateError(ctx, repoOwner, err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Trace("Repository migrated: %s/%s", repoOwner.Name, form.RepoName)
|
||||
ctx.JSON(http.StatusCreated, convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeAdmin}))
|
||||
ctx.JSON(http.StatusCreated, convert.ToRepo(ctx, migratedRepo, access_model.Permission{AccessMode: perm.AccessModeAdmin}))
|
||||
}
|
||||
|
||||
func handleMigrateError(ctx *context.APIContext, repoOwner *user_model.User, err error) {
|
||||
|
||||
@@ -36,7 +36,7 @@ func ApplyDiffPatch(ctx *context.APIContext) {
|
||||
// in: body
|
||||
// required: true
|
||||
// schema:
|
||||
// "$ref": "#/definitions/UpdateFileOptions"
|
||||
// "$ref": "#/definitions/ApplyDiffPatchFileOptions"
|
||||
// responses:
|
||||
// "200":
|
||||
// "$ref": "#/responses/FileResponse"
|
||||
|
||||
@@ -13,7 +13,6 @@ import (
|
||||
"time"
|
||||
|
||||
activities_model "code.gitea.io/gitea/models/activities"
|
||||
git_model "code.gitea.io/gitea/models/git"
|
||||
issues_model "code.gitea.io/gitea/models/issues"
|
||||
access_model "code.gitea.io/gitea/models/perm/access"
|
||||
pull_model "code.gitea.io/gitea/models/pull"
|
||||
@@ -938,7 +937,7 @@ func MergePullRequest(ctx *context.APIContext) {
|
||||
} else if errors.Is(err, pull_service.ErrNoPermissionToMerge) {
|
||||
ctx.APIError(http.StatusMethodNotAllowed, "User not allowed to merge PR")
|
||||
} else if errors.Is(err, pull_service.ErrHasMerged) {
|
||||
ctx.APIError(http.StatusMethodNotAllowed, "")
|
||||
ctx.APIError(http.StatusMethodNotAllowed, "The PR is already merged")
|
||||
} else if errors.Is(err, pull_service.ErrIsWorkInProgress) {
|
||||
ctx.APIError(http.StatusMethodNotAllowed, "Work in progress PRs cannot be merged")
|
||||
} else if errors.Is(err, pull_service.ErrNotMergeableState) {
|
||||
@@ -989,8 +988,14 @@ func MergePullRequest(ctx *context.APIContext) {
|
||||
message += "\n\n" + form.MergeMessageField
|
||||
}
|
||||
|
||||
deleteBranchAfterMerge, err := pull_service.ShouldDeleteBranchAfterMerge(ctx, form.DeleteBranchAfterMerge, ctx.Repo.Repository, pr)
|
||||
if err != nil {
|
||||
ctx.APIErrorInternal(err)
|
||||
return
|
||||
}
|
||||
|
||||
if form.MergeWhenChecksSucceed {
|
||||
scheduled, err := automerge.ScheduleAutoMerge(ctx, ctx.Doer, pr, repo_model.MergeStyle(form.Do), message, form.DeleteBranchAfterMerge)
|
||||
scheduled, err := automerge.ScheduleAutoMerge(ctx, ctx.Doer, pr, repo_model.MergeStyle(form.Do), message, deleteBranchAfterMerge)
|
||||
if err != nil {
|
||||
if pull_model.IsErrAlreadyScheduledToAutoMerge(err) {
|
||||
ctx.APIError(http.StatusConflict, err)
|
||||
@@ -1035,47 +1040,10 @@ func MergePullRequest(ctx *context.APIContext) {
|
||||
}
|
||||
log.Trace("Pull request merged: %d", pr.ID)
|
||||
|
||||
// for agit flow, we should not delete the agit reference after merge
|
||||
if form.DeleteBranchAfterMerge && pr.Flow == issues_model.PullRequestFlowGithub {
|
||||
// check permission even it has been checked in repo_service.DeleteBranch so that we don't need to
|
||||
// do RetargetChildrenOnMerge
|
||||
if err := repo_service.CanDeleteBranch(ctx, pr.HeadRepo, pr.HeadBranch, ctx.Doer); err == nil {
|
||||
// Don't cleanup when there are other PR's that use this branch as head branch.
|
||||
exist, err := issues_model.HasUnmergedPullRequestsByHeadInfo(ctx, pr.HeadRepoID, pr.HeadBranch)
|
||||
if err != nil {
|
||||
ctx.APIErrorInternal(err)
|
||||
return
|
||||
}
|
||||
if exist {
|
||||
ctx.Status(http.StatusOK)
|
||||
return
|
||||
}
|
||||
|
||||
var headRepo *git.Repository
|
||||
if ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == pr.HeadRepoID && ctx.Repo.GitRepo != nil {
|
||||
headRepo = ctx.Repo.GitRepo
|
||||
} else {
|
||||
headRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
|
||||
if err != nil {
|
||||
ctx.APIErrorInternal(err)
|
||||
return
|
||||
}
|
||||
defer headRepo.Close()
|
||||
}
|
||||
|
||||
if err := repo_service.DeleteBranch(ctx, ctx.Doer, pr.HeadRepo, headRepo, pr.HeadBranch, pr); err != nil {
|
||||
switch {
|
||||
case git.IsErrBranchNotExist(err):
|
||||
ctx.APIErrorNotFound(err)
|
||||
case errors.Is(err, repo_service.ErrBranchIsDefault):
|
||||
ctx.APIError(http.StatusForbidden, errors.New("can not delete default branch"))
|
||||
case errors.Is(err, git_model.ErrBranchIsProtected):
|
||||
ctx.APIError(http.StatusForbidden, errors.New("branch protected"))
|
||||
default:
|
||||
ctx.APIErrorInternal(err)
|
||||
}
|
||||
return
|
||||
}
|
||||
if deleteBranchAfterMerge {
|
||||
if err = repo_service.DeleteBranchAfterMerge(ctx, ctx.Doer, pr.ID, nil); err != nil {
|
||||
// no way to tell users that what error happens, and the PR has been merged, so ignore the error
|
||||
log.Debug("DeleteBranchAfterMerge: pr %d, err: %v", pr.ID, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
package repo
|
||||
|
||||
import (
|
||||
"io"
|
||||
"errors"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/modules/web"
|
||||
attachment_service "code.gitea.io/gitea/services/attachment"
|
||||
"code.gitea.io/gitea/services/context"
|
||||
@@ -191,6 +192,8 @@ func CreateReleaseAttachment(ctx *context.APIContext) {
|
||||
// "$ref": "#/responses/error"
|
||||
// "404":
|
||||
// "$ref": "#/responses/notFound"
|
||||
// "413":
|
||||
// "$ref": "#/responses/error"
|
||||
|
||||
// Check if attachments are enabled
|
||||
if !setting.Attachment.Enabled {
|
||||
@@ -205,10 +208,8 @@ func CreateReleaseAttachment(ctx *context.APIContext) {
|
||||
}
|
||||
|
||||
// Get uploaded file from request
|
||||
var content io.ReadCloser
|
||||
var filename string
|
||||
var size int64 = -1
|
||||
|
||||
var uploaderFile *attachment_service.UploaderFile
|
||||
if strings.HasPrefix(strings.ToLower(ctx.Req.Header.Get("Content-Type")), "multipart/form-data") {
|
||||
file, header, err := ctx.Req.FormFile("attachment")
|
||||
if err != nil {
|
||||
@@ -217,15 +218,14 @@ func CreateReleaseAttachment(ctx *context.APIContext) {
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
content = file
|
||||
size = header.Size
|
||||
filename = header.Filename
|
||||
if name := ctx.FormString("name"); name != "" {
|
||||
filename = name
|
||||
}
|
||||
uploaderFile = attachment_service.NewLimitedUploaderKnownSize(file, header.Size)
|
||||
} else {
|
||||
content = ctx.Req.Body
|
||||
filename = ctx.FormString("name")
|
||||
uploaderFile = attachment_service.NewLimitedUploaderMaxBytesReader(ctx.Req.Body, ctx.Resp)
|
||||
}
|
||||
|
||||
if filename == "" {
|
||||
@@ -234,7 +234,7 @@ func CreateReleaseAttachment(ctx *context.APIContext) {
|
||||
}
|
||||
|
||||
// Create a new attachment and save the file
|
||||
attach, err := attachment_service.UploadAttachment(ctx, content, setting.Repository.Release.AllowedTypes, size, &repo_model.Attachment{
|
||||
attach, err := attachment_service.UploadAttachmentGeneralSizeLimit(ctx, uploaderFile, setting.Repository.Release.AllowedTypes, &repo_model.Attachment{
|
||||
Name: filename,
|
||||
UploaderID: ctx.Doer.ID,
|
||||
RepoID: ctx.Repo.Repository.ID,
|
||||
@@ -245,6 +245,12 @@ func CreateReleaseAttachment(ctx *context.APIContext) {
|
||||
ctx.APIError(http.StatusBadRequest, err)
|
||||
return
|
||||
}
|
||||
|
||||
if errors.Is(err, util.ErrContentTooLarge) {
|
||||
ctx.APIError(http.StatusRequestEntityTooLarge, err)
|
||||
return
|
||||
}
|
||||
|
||||
ctx.APIErrorInternal(err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -121,6 +121,9 @@ type swaggerParameterBodies struct {
|
||||
// in:body
|
||||
GetFilesOptions api.GetFilesOptions
|
||||
|
||||
// in:body
|
||||
ApplyDiffPatchFileOptions api.ApplyDiffPatchFileOptions
|
||||
|
||||
// in:body
|
||||
ChangeFilesOptions api.ChangeFilesOptions
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ func UpdatePublicKeyInRepo(ctx *context.PrivateContext) {
|
||||
ctx.PlainText(http.StatusOK, "success")
|
||||
}
|
||||
|
||||
// AuthorizedPublicKeyByContent searches content as prefix (leak e-mail part)
|
||||
// AuthorizedPublicKeyByContent searches content as prefix (without comment part)
|
||||
// and returns public key found.
|
||||
func AuthorizedPublicKeyByContent(ctx *context.PrivateContext) {
|
||||
content := ctx.FormString("content")
|
||||
@@ -57,5 +57,14 @@ func AuthorizedPublicKeyByContent(ctx *context.PrivateContext) {
|
||||
})
|
||||
return
|
||||
}
|
||||
ctx.PlainText(http.StatusOK, publicKey.AuthorizedString())
|
||||
|
||||
authorizedString, err := asymkey_model.AuthorizedStringForKey(publicKey)
|
||||
if err != nil {
|
||||
ctx.JSON(http.StatusInternalServerError, private.Response{
|
||||
Err: err.Error(),
|
||||
UserMsg: "invalid public key",
|
||||
})
|
||||
return
|
||||
}
|
||||
ctx.PlainText(http.StatusOK, authorizedString)
|
||||
}
|
||||
|
||||
@@ -636,6 +636,7 @@ func handleAuthorizationCode(ctx *context.Context, form forms.AccessTokenForm, s
|
||||
ErrorCode: oauth2_provider.AccessTokenErrorCodeInvalidRequest,
|
||||
ErrorDescription: "cannot proceed your request",
|
||||
})
|
||||
return
|
||||
}
|
||||
resp, tokenErr := oauth2_provider.NewAccessTokenResponse(ctx, authorizationCode.Grant, serverKey, clientKey)
|
||||
if tokenErr != nil {
|
||||
|
||||
@@ -6,6 +6,7 @@ package devtest
|
||||
import (
|
||||
mathRand "math/rand/v2"
|
||||
"net/http"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -17,25 +18,29 @@ import (
|
||||
"code.gitea.io/gitea/services/context"
|
||||
)
|
||||
|
||||
func generateMockStepsLog(logCur actions.LogCursor) (stepsLog []*actions.ViewStepLog) {
|
||||
mockedLogs := []string{
|
||||
"::group::test group for: step={step}, cursor={cursor}",
|
||||
"in group msg for: step={step}, cursor={cursor}",
|
||||
"in group msg for: step={step}, cursor={cursor}",
|
||||
"in group msg for: step={step}, cursor={cursor}",
|
||||
"::endgroup::",
|
||||
type generateMockStepsLogOptions struct {
|
||||
mockCountFirst int
|
||||
mockCountGeneral int
|
||||
groupRepeat int
|
||||
}
|
||||
|
||||
func generateMockStepsLog(logCur actions.LogCursor, opts generateMockStepsLogOptions) (stepsLog []*actions.ViewStepLog) {
|
||||
var mockedLogs []string
|
||||
mockedLogs = append(mockedLogs, "::group::test group for: step={step}, cursor={cursor}")
|
||||
mockedLogs = append(mockedLogs, slices.Repeat([]string{"in group msg for: step={step}, cursor={cursor}"}, opts.groupRepeat)...)
|
||||
mockedLogs = append(mockedLogs, "::endgroup::")
|
||||
mockedLogs = append(mockedLogs,
|
||||
"message for: step={step}, cursor={cursor}",
|
||||
"message for: step={step}, cursor={cursor}",
|
||||
"##[group]test group for: step={step}, cursor={cursor}",
|
||||
"in group msg for: step={step}, cursor={cursor}",
|
||||
"##[endgroup]",
|
||||
}
|
||||
cur := logCur.Cursor // usually the cursor is the "file offset", but here we abuse it as "line number" to make the mock easier, intentionally
|
||||
mockCount := util.Iif(logCur.Step == 0, 3, 1)
|
||||
if logCur.Step == 1 && logCur.Cursor == 0 {
|
||||
mockCount = 30 // for the first batch, return as many as possible to test the auto-expand and auto-scroll
|
||||
}
|
||||
for i := 0; i < mockCount; i++ {
|
||||
)
|
||||
// usually the cursor is the "file offset", but here we abuse it as "line number" to make the mock easier, intentionally
|
||||
cur := logCur.Cursor
|
||||
// for the first batch, return as many as possible to test the auto-expand and auto-scroll
|
||||
mockCount := util.Iif(logCur.Cursor == 0, opts.mockCountFirst, opts.mockCountGeneral)
|
||||
for range mockCount {
|
||||
logStr := mockedLogs[int(cur)%len(mockedLogs)]
|
||||
cur++
|
||||
logStr = strings.ReplaceAll(logStr, "{step}", strconv.Itoa(logCur.Step))
|
||||
@@ -127,21 +132,28 @@ func MockActionsRunsJobs(ctx *context.Context) {
|
||||
Duration: "3h",
|
||||
})
|
||||
|
||||
var mockLogOptions []generateMockStepsLogOptions
|
||||
resp.State.CurrentJob.Steps = append(resp.State.CurrentJob.Steps, &actions.ViewJobStep{
|
||||
Summary: "step 0 (mock slow)",
|
||||
Duration: time.Hour.String(),
|
||||
Status: actions_model.StatusRunning.String(),
|
||||
})
|
||||
mockLogOptions = append(mockLogOptions, generateMockStepsLogOptions{mockCountFirst: 30, mockCountGeneral: 1, groupRepeat: 3})
|
||||
|
||||
resp.State.CurrentJob.Steps = append(resp.State.CurrentJob.Steps, &actions.ViewJobStep{
|
||||
Summary: "step 1 (mock fast)",
|
||||
Duration: time.Hour.String(),
|
||||
Status: actions_model.StatusRunning.String(),
|
||||
})
|
||||
mockLogOptions = append(mockLogOptions, generateMockStepsLogOptions{mockCountFirst: 30, mockCountGeneral: 3, groupRepeat: 20})
|
||||
|
||||
resp.State.CurrentJob.Steps = append(resp.State.CurrentJob.Steps, &actions.ViewJobStep{
|
||||
Summary: "step 2 (mock error)",
|
||||
Duration: time.Hour.String(),
|
||||
Status: actions_model.StatusRunning.String(),
|
||||
})
|
||||
mockLogOptions = append(mockLogOptions, generateMockStepsLogOptions{mockCountFirst: 30, mockCountGeneral: 3, groupRepeat: 3})
|
||||
|
||||
if len(req.LogCursors) == 0 {
|
||||
ctx.JSON(http.StatusOK, resp)
|
||||
return
|
||||
@@ -156,7 +168,7 @@ func MockActionsRunsJobs(ctx *context.Context) {
|
||||
}
|
||||
doSlowResponse = doSlowResponse || logCur.Step == 0
|
||||
doErrorResponse = doErrorResponse || logCur.Step == 2
|
||||
resp.Logs.StepsLog = append(resp.Logs.StepsLog, generateMockStepsLog(logCur)...)
|
||||
resp.Logs.StepsLog = append(resp.Logs.StepsLog, generateMockStepsLog(logCur, mockLogOptions[logCur.Step])...)
|
||||
}
|
||||
if doErrorResponse {
|
||||
if mathRand.Float64() > 0.5 {
|
||||
|
||||
@@ -425,7 +425,8 @@ func Rerun(ctx *context_module.Context) {
|
||||
run.PreviousDuration = run.Duration()
|
||||
run.Started = 0
|
||||
run.Stopped = 0
|
||||
if err := actions_model.UpdateRun(ctx, run, "started", "stopped", "previous_duration"); err != nil {
|
||||
run.Status = actions_model.StatusWaiting
|
||||
if err := actions_model.UpdateRun(ctx, run, "started", "stopped", "status", "previous_duration"); err != nil {
|
||||
ctx.ServerError("UpdateRun", err)
|
||||
return
|
||||
}
|
||||
@@ -849,8 +850,8 @@ func Run(ctx *context_module.Context) {
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
if errLocale := util.ErrorAsLocale(err); errLocale != nil {
|
||||
ctx.Flash.Error(ctx.Tr(errLocale.TrKey, errLocale.TrArgs...))
|
||||
if errTr := util.ErrorAsTranslatable(err); errTr != nil {
|
||||
ctx.Flash.Error(errTr.Translate(ctx.Locale))
|
||||
ctx.Redirect(redirectURL)
|
||||
} else {
|
||||
ctx.ServerError("DispatchActionWorkflow", err)
|
||||
|
||||
@@ -45,7 +45,8 @@ func uploadAttachment(ctx *context.Context, repoID int64, allowedTypes string) {
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
attach, err := attachment.UploadAttachment(ctx, file, allowedTypes, header.Size, &repo_model.Attachment{
|
||||
uploaderFile := attachment.NewLimitedUploaderKnownSize(file, header.Size)
|
||||
attach, err := attachment.UploadAttachmentGeneralSizeLimit(ctx, uploaderFile, allowedTypes, &repo_model.Attachment{
|
||||
Name: header.Filename,
|
||||
UploaderID: ctx.Doer.ID,
|
||||
RepoID: repoID,
|
||||
|
||||
@@ -295,14 +295,14 @@ func EditFile(ctx *context.Context) {
|
||||
}
|
||||
defer dataRc.Close()
|
||||
|
||||
ctx.Data["FileSize"] = fInfo.fileSize
|
||||
ctx.Data["FileSize"] = fInfo.blobOrLfsSize
|
||||
|
||||
// Only some file types are editable online as text.
|
||||
if fInfo.isLFSFile() {
|
||||
ctx.Data["NotEditableReason"] = ctx.Tr("repo.editor.cannot_edit_lfs_files")
|
||||
} else if !fInfo.st.IsRepresentableAsText() {
|
||||
ctx.Data["NotEditableReason"] = ctx.Tr("repo.editor.cannot_edit_non_text_files")
|
||||
} else if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
|
||||
} else if fInfo.blobOrLfsSize >= setting.UI.MaxDisplayFileSize {
|
||||
ctx.Data["NotEditableReason"] = ctx.Tr("repo.editor.cannot_edit_too_large_file")
|
||||
}
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ func NewDiffPatchPost(ctx *context.Context) {
|
||||
Committer: parsed.GitCommitter,
|
||||
})
|
||||
if err != nil {
|
||||
err = util.ErrorWrapLocale(err, "repo.editor.fail_to_apply_patch")
|
||||
err = util.ErrorWrapTranslatable(err, "repo.editor.fail_to_apply_patch")
|
||||
}
|
||||
if err != nil {
|
||||
editorHandleFileOperationError(ctx, parsed.NewBranchName, err)
|
||||
|
||||
@@ -74,7 +74,7 @@ func CherryPickPost(ctx *context.Context) {
|
||||
opts.Content = buf.String()
|
||||
_, err = files.ApplyDiffPatch(ctx, ctx.Repo.Repository, ctx.Doer, opts)
|
||||
if err != nil {
|
||||
err = util.ErrorWrapLocale(err, "repo.editor.fail_to_apply_patch")
|
||||
err = util.ErrorWrapTranslatable(err, "repo.editor.fail_to_apply_patch")
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
|
||||
@@ -38,8 +38,8 @@ func editorHandleFileOperationErrorRender(ctx *context_service.Context, message,
|
||||
}
|
||||
|
||||
func editorHandleFileOperationError(ctx *context_service.Context, targetBranchName string, err error) {
|
||||
if errAs := util.ErrorAsLocale(err); errAs != nil {
|
||||
ctx.JSONError(ctx.Tr(errAs.TrKey, errAs.TrArgs...))
|
||||
if errAs := util.ErrorAsTranslatable(err); errAs != nil {
|
||||
ctx.JSONError(errAs.Translate(ctx.Locale))
|
||||
} else if errAs, ok := errorAs[git.ErrNotExist](err); ok {
|
||||
ctx.JSONError(ctx.Tr("repo.editor.file_modifying_no_longer_exists", errAs.RelPath))
|
||||
} else if errAs, ok := errorAs[git_model.ErrLFSFileLocked](err); ok {
|
||||
|
||||
@@ -41,6 +41,8 @@ func UploadFileToServer(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
// FIXME: need to check the file size according to setting.Repository.Upload.FileMaxSize
|
||||
|
||||
uploaded, err := repo_model.NewUpload(ctx, name, buf, file)
|
||||
if err != nil {
|
||||
ctx.ServerError("NewUpload", err)
|
||||
|
||||
@@ -9,12 +9,14 @@ import (
|
||||
"html/template"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
issues_model "code.gitea.io/gitea/models/issues"
|
||||
"code.gitea.io/gitea/models/renderhelper"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/htmlutil"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/markup/markdown"
|
||||
repo_module "code.gitea.io/gitea/modules/repository"
|
||||
@@ -287,9 +289,10 @@ func UpdateCommentContent(ctx *context.Context) {
|
||||
ctx.ServerError("RenderString", err)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
contentEmpty := fmt.Sprintf(`<span class="no-content">%s</span>`, ctx.Tr("repo.issues.no_content"))
|
||||
renderedContent = template.HTML(contentEmpty)
|
||||
}
|
||||
|
||||
if strings.TrimSpace(string(renderedContent)) == "" {
|
||||
renderedContent = htmlutil.HTMLFormat(`<span class="no-content">%s</span>`, ctx.Tr("repo.issues.no_content"))
|
||||
}
|
||||
|
||||
ctx.JSON(http.StatusOK, map[string]any{
|
||||
|
||||
@@ -131,7 +131,7 @@ func getPullInfo(ctx *context.Context) (issue *issues_model.Issue, ok bool) {
|
||||
ctx.Data["Issue"] = issue
|
||||
|
||||
if !issue.IsPull {
|
||||
ctx.NotFound(nil)
|
||||
ctx.Redirect(issue.Link())
|
||||
return nil, false
|
||||
}
|
||||
|
||||
@@ -1095,11 +1095,17 @@ func MergePullRequest(ctx *context.Context) {
|
||||
message += "\n\n" + form.MergeMessageField
|
||||
}
|
||||
|
||||
deleteBranchAfterMerge, err := pull_service.ShouldDeleteBranchAfterMerge(ctx, form.DeleteBranchAfterMerge, ctx.Repo.Repository, pr)
|
||||
if err != nil {
|
||||
ctx.ServerError("ShouldDeleteBranchAfterMerge", err)
|
||||
return
|
||||
}
|
||||
|
||||
if form.MergeWhenChecksSucceed {
|
||||
// delete all scheduled auto merges
|
||||
_ = pull_model.DeleteScheduledAutoMerge(ctx, pr.ID)
|
||||
// schedule auto merge
|
||||
scheduled, err := automerge.ScheduleAutoMerge(ctx, ctx.Doer, pr, repo_model.MergeStyle(form.Do), message, form.DeleteBranchAfterMerge)
|
||||
scheduled, err := automerge.ScheduleAutoMerge(ctx, ctx.Doer, pr, repo_model.MergeStyle(form.Do), message, deleteBranchAfterMerge)
|
||||
if err != nil {
|
||||
ctx.ServerError("ScheduleAutoMerge", err)
|
||||
return
|
||||
@@ -1185,35 +1191,27 @@ func MergePullRequest(ctx *context.Context) {
|
||||
|
||||
log.Trace("Pull request merged: %d", pr.ID)
|
||||
|
||||
if !form.DeleteBranchAfterMerge {
|
||||
ctx.JSONRedirect(issue.Link())
|
||||
if deleteBranchAfterMerge {
|
||||
deleteBranchAfterMergeAndFlashMessage(ctx, pr.ID)
|
||||
if ctx.Written() {
|
||||
return
|
||||
}
|
||||
}
|
||||
ctx.JSONRedirect(issue.Link())
|
||||
}
|
||||
|
||||
// Don't cleanup when other pr use this branch as head branch
|
||||
exist, err := issues_model.HasUnmergedPullRequestsByHeadInfo(ctx, pr.HeadRepoID, pr.HeadBranch)
|
||||
if err != nil {
|
||||
ctx.ServerError("HasUnmergedPullRequestsByHeadInfo", err)
|
||||
func deleteBranchAfterMergeAndFlashMessage(ctx *context.Context, prID int64) {
|
||||
var fullBranchName string
|
||||
err := repo_service.DeleteBranchAfterMerge(ctx, ctx.Doer, prID, &fullBranchName)
|
||||
if errTr := util.ErrorAsTranslatable(err); errTr != nil {
|
||||
ctx.Flash.Error(errTr.Translate(ctx.Locale))
|
||||
return
|
||||
} else if err == nil {
|
||||
ctx.Flash.Success(ctx.Tr("repo.branch.deletion_success", fullBranchName))
|
||||
return
|
||||
}
|
||||
if exist {
|
||||
ctx.JSONRedirect(issue.Link())
|
||||
return
|
||||
}
|
||||
|
||||
var headRepo *git.Repository
|
||||
if ctx.Repo != nil && ctx.Repo.Repository != nil && pr.HeadRepoID == ctx.Repo.Repository.ID && ctx.Repo.GitRepo != nil {
|
||||
headRepo = ctx.Repo.GitRepo
|
||||
} else {
|
||||
headRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
|
||||
if err != nil {
|
||||
ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.FullName()), err)
|
||||
return
|
||||
}
|
||||
defer headRepo.Close()
|
||||
}
|
||||
deleteBranch(ctx, pr, headRepo)
|
||||
ctx.JSONRedirect(issue.Link())
|
||||
// catch unknown errors
|
||||
ctx.ServerError("DeleteBranchAfterMerge", err)
|
||||
}
|
||||
|
||||
// CancelAutoMergePullRequest cancels a scheduled pr
|
||||
@@ -1402,131 +1400,17 @@ func CompareAndPullRequestPost(ctx *context.Context) {
|
||||
}
|
||||
|
||||
// CleanUpPullRequest responses for delete merged branch when PR has been merged
|
||||
// Used by "DeleteBranchLink" for "delete branch" button
|
||||
func CleanUpPullRequest(ctx *context.Context) {
|
||||
issue, ok := getPullInfo(ctx)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
pr := issue.PullRequest
|
||||
|
||||
// Don't cleanup unmerged and unclosed PRs and agit PRs
|
||||
if !pr.HasMerged && !issue.IsClosed && pr.Flow != issues_model.PullRequestFlowGithub {
|
||||
ctx.NotFound(nil)
|
||||
deleteBranchAfterMergeAndFlashMessage(ctx, issue.PullRequest.ID)
|
||||
if ctx.Written() {
|
||||
return
|
||||
}
|
||||
|
||||
// Don't cleanup when there are other PR's that use this branch as head branch.
|
||||
exist, err := issues_model.HasUnmergedPullRequestsByHeadInfo(ctx, pr.HeadRepoID, pr.HeadBranch)
|
||||
if err != nil {
|
||||
ctx.ServerError("HasUnmergedPullRequestsByHeadInfo", err)
|
||||
return
|
||||
}
|
||||
if exist {
|
||||
ctx.NotFound(nil)
|
||||
return
|
||||
}
|
||||
|
||||
if err := pr.LoadHeadRepo(ctx); err != nil {
|
||||
ctx.ServerError("LoadHeadRepo", err)
|
||||
return
|
||||
} else if pr.HeadRepo == nil {
|
||||
// Forked repository has already been deleted
|
||||
ctx.NotFound(nil)
|
||||
return
|
||||
} else if err = pr.LoadBaseRepo(ctx); err != nil {
|
||||
ctx.ServerError("LoadBaseRepo", err)
|
||||
return
|
||||
} else if err = pr.HeadRepo.LoadOwner(ctx); err != nil {
|
||||
ctx.ServerError("HeadRepo.LoadOwner", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := repo_service.CanDeleteBranch(ctx, pr.HeadRepo, pr.HeadBranch, ctx.Doer); err != nil {
|
||||
if errors.Is(err, util.ErrPermissionDenied) {
|
||||
ctx.NotFound(nil)
|
||||
} else {
|
||||
ctx.ServerError("CanDeleteBranch", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
fullBranchName := pr.HeadRepo.Owner.Name + "/" + pr.HeadBranch
|
||||
|
||||
var gitBaseRepo *git.Repository
|
||||
|
||||
// Assume that the base repo is the current context (almost certainly)
|
||||
if ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == pr.BaseRepoID && ctx.Repo.GitRepo != nil {
|
||||
gitBaseRepo = ctx.Repo.GitRepo
|
||||
} else {
|
||||
// If not just open it
|
||||
gitBaseRepo, err = gitrepo.OpenRepository(ctx, pr.BaseRepo)
|
||||
if err != nil {
|
||||
ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.BaseRepo.FullName()), err)
|
||||
return
|
||||
}
|
||||
defer gitBaseRepo.Close()
|
||||
}
|
||||
|
||||
// Now assume that the head repo is the same as the base repo (reasonable chance)
|
||||
gitRepo := gitBaseRepo
|
||||
// But if not: is it the same as the context?
|
||||
if pr.BaseRepoID != pr.HeadRepoID && ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == pr.HeadRepoID && ctx.Repo.GitRepo != nil {
|
||||
gitRepo = ctx.Repo.GitRepo
|
||||
} else if pr.BaseRepoID != pr.HeadRepoID {
|
||||
// Otherwise just load it up
|
||||
gitRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
|
||||
if err != nil {
|
||||
ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.FullName()), err)
|
||||
return
|
||||
}
|
||||
defer gitRepo.Close()
|
||||
}
|
||||
|
||||
defer func() {
|
||||
ctx.JSONRedirect(issue.Link())
|
||||
}()
|
||||
|
||||
// Check if branch has no new commits
|
||||
headCommitID, err := gitBaseRepo.GetRefCommitID(pr.GetGitHeadRefName())
|
||||
if err != nil {
|
||||
log.Error("GetRefCommitID: %v", err)
|
||||
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
|
||||
return
|
||||
}
|
||||
branchCommitID, err := gitRepo.GetBranchCommitID(pr.HeadBranch)
|
||||
if err != nil {
|
||||
log.Error("GetBranchCommitID: %v", err)
|
||||
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
|
||||
return
|
||||
}
|
||||
if headCommitID != branchCommitID {
|
||||
ctx.Flash.Error(ctx.Tr("repo.branch.delete_branch_has_new_commits", fullBranchName))
|
||||
return
|
||||
}
|
||||
|
||||
deleteBranch(ctx, pr, gitRepo)
|
||||
}
|
||||
|
||||
func deleteBranch(ctx *context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository) {
|
||||
fullBranchName := pr.HeadRepo.FullName() + ":" + pr.HeadBranch
|
||||
|
||||
if err := repo_service.DeleteBranch(ctx, ctx.Doer, pr.HeadRepo, gitRepo, pr.HeadBranch, pr); err != nil {
|
||||
switch {
|
||||
case git.IsErrBranchNotExist(err):
|
||||
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
|
||||
case errors.Is(err, repo_service.ErrBranchIsDefault):
|
||||
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
|
||||
case errors.Is(err, git_model.ErrBranchIsProtected):
|
||||
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
|
||||
default:
|
||||
log.Error("DeleteBranch: %v", err)
|
||||
ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
ctx.Flash.Success(ctx.Tr("repo.branch.deletion_success", fullBranchName))
|
||||
}
|
||||
|
||||
// DownloadPullDiff render a pull's raw diff
|
||||
@@ -1583,7 +1467,16 @@ func UpdatePullRequestTarget(ctx *context.Context) {
|
||||
}
|
||||
|
||||
if err := pull_service.ChangeTargetBranch(ctx, pr, ctx.Doer, targetBranch); err != nil {
|
||||
if issues_model.IsErrPullRequestAlreadyExists(err) {
|
||||
switch {
|
||||
case git_model.IsErrBranchNotExist(err):
|
||||
errorMessage := ctx.Tr("form.target_branch_not_exist")
|
||||
|
||||
ctx.Flash.Error(errorMessage)
|
||||
ctx.JSON(http.StatusBadRequest, map[string]any{
|
||||
"error": err.Error(),
|
||||
"user_error": errorMessage,
|
||||
})
|
||||
case issues_model.IsErrPullRequestAlreadyExists(err):
|
||||
err := err.(issues_model.ErrPullRequestAlreadyExists)
|
||||
|
||||
RepoRelPath := ctx.Repo.Owner.Name + "/" + ctx.Repo.Repository.Name
|
||||
@@ -1594,7 +1487,7 @@ func UpdatePullRequestTarget(ctx *context.Context) {
|
||||
"error": err.Error(),
|
||||
"user_error": errorMessage,
|
||||
})
|
||||
} else if issues_model.IsErrIssueIsClosed(err) {
|
||||
case issues_model.IsErrIssueIsClosed(err):
|
||||
errorMessage := ctx.Tr("repo.pulls.is_closed")
|
||||
|
||||
ctx.Flash.Error(errorMessage)
|
||||
@@ -1602,7 +1495,7 @@ func UpdatePullRequestTarget(ctx *context.Context) {
|
||||
"error": err.Error(),
|
||||
"user_error": errorMessage,
|
||||
})
|
||||
} else if pull_service.IsErrPullRequestHasMerged(err) {
|
||||
case pull_service.IsErrPullRequestHasMerged(err):
|
||||
errorMessage := ctx.Tr("repo.pulls.has_merged")
|
||||
|
||||
ctx.Flash.Error(errorMessage)
|
||||
@@ -1610,7 +1503,7 @@ func UpdatePullRequestTarget(ctx *context.Context) {
|
||||
"error": err.Error(),
|
||||
"user_error": errorMessage,
|
||||
})
|
||||
} else if git_model.IsErrBranchesEqual(err) {
|
||||
case git_model.IsErrBranchesEqual(err):
|
||||
errorMessage := ctx.Tr("repo.pulls.nothing_to_compare")
|
||||
|
||||
ctx.Flash.Error(errorMessage)
|
||||
@@ -1618,7 +1511,7 @@ func UpdatePullRequestTarget(ctx *context.Context) {
|
||||
"error": err.Error(),
|
||||
"user_error": errorMessage,
|
||||
})
|
||||
} else {
|
||||
default:
|
||||
ctx.ServerError("UpdatePullRequestTarget", err)
|
||||
}
|
||||
return
|
||||
|
||||
@@ -4,18 +4,13 @@
|
||||
package repo
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"net/http"
|
||||
"path"
|
||||
|
||||
"code.gitea.io/gitea/models/renderhelper"
|
||||
"code.gitea.io/gitea/modules/charset"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/markup"
|
||||
"code.gitea.io/gitea/modules/typesniffer"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/services/context"
|
||||
)
|
||||
|
||||
@@ -44,22 +39,8 @@ func RenderFile(ctx *context.Context) {
|
||||
}
|
||||
defer dataRc.Close()
|
||||
|
||||
buf := make([]byte, 1024)
|
||||
n, _ := util.ReadAtMost(dataRc, buf)
|
||||
buf = buf[:n]
|
||||
|
||||
st := typesniffer.DetectContentType(buf)
|
||||
isTextFile := st.IsText()
|
||||
|
||||
rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
|
||||
ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'; sandbox allow-scripts")
|
||||
|
||||
if markupType := markup.DetectMarkupTypeByFileName(blob.Name()); markupType == "" {
|
||||
if isTextFile {
|
||||
_, _ = io.Copy(ctx.Resp, rd)
|
||||
} else {
|
||||
http.Error(ctx.Resp, "Unsupported file type render", http.StatusInternalServerError)
|
||||
}
|
||||
http.Error(ctx.Resp, "Unsupported file type render", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -68,7 +49,29 @@ func RenderFile(ctx *context.Context) {
|
||||
CurrentTreePath: path.Dir(ctx.Repo.TreePath),
|
||||
}).WithRelativePath(ctx.Repo.TreePath).WithInStandalonePage(true)
|
||||
|
||||
err = markup.Render(rctx, rd, ctx.Resp)
|
||||
renderer, err := markup.FindRendererByContext(rctx)
|
||||
if err != nil {
|
||||
http.Error(ctx.Resp, "Unable to find renderer", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
extRenderer, ok := renderer.(markup.ExternalRenderer)
|
||||
if !ok {
|
||||
http.Error(ctx.Resp, "Unable to get external renderer", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// To render PDF in iframe, the sandbox must NOT be used (iframe & CSP header).
|
||||
// Chrome blocks the PDF rendering when sandboxed, even if all "allow-*" are set.
|
||||
// HINT: PDF-RENDER-SANDBOX: PDF won't render in sandboxed context
|
||||
extRendererOpts := extRenderer.GetExternalRendererOptions()
|
||||
if extRendererOpts.ContentSandbox != "" {
|
||||
ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'; sandbox "+extRendererOpts.ContentSandbox)
|
||||
} else {
|
||||
ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'")
|
||||
}
|
||||
|
||||
err = markup.RenderWithRenderer(rctx, renderer, dataRc, ctx.Resp)
|
||||
if err != nil {
|
||||
log.Error("Failed to render file %q: %v", ctx.Repo.TreePath, err)
|
||||
http.Error(ctx.Resp, "Failed to render file", http.StatusInternalServerError)
|
||||
|
||||
@@ -270,8 +270,7 @@ func LFSFileGet(ctx *context.Context) {
|
||||
// FIXME: there is no IsPlainText set, but template uses it
|
||||
ctx.Data["IsTextFile"] = st.IsText()
|
||||
ctx.Data["FileSize"] = meta.Size
|
||||
// FIXME: the last field is the URL-base64-encoded filename, it should not be "direct"
|
||||
ctx.Data["RawFileLink"] = fmt.Sprintf("%s%s/%s.git/info/lfs/objects/%s/%s", setting.AppURL, url.PathEscape(ctx.Repo.Repository.OwnerName), url.PathEscape(ctx.Repo.Repository.Name), url.PathEscape(meta.Oid), "direct")
|
||||
ctx.Data["RawFileLink"] = fmt.Sprintf("%s/%s/%s.git/info/lfs/objects/%s", setting.AppSubURL, url.PathEscape(ctx.Repo.Repository.OwnerName), url.PathEscape(ctx.Repo.Repository.Name), url.PathEscape(meta.Oid))
|
||||
switch {
|
||||
case st.IsRepresentableAsText():
|
||||
if meta.Size >= setting.UI.MaxDisplayFileSize {
|
||||
|
||||
@@ -60,7 +60,7 @@ const (
|
||||
)
|
||||
|
||||
type fileInfo struct {
|
||||
fileSize int64
|
||||
blobOrLfsSize int64
|
||||
lfsMeta *lfs.Pointer
|
||||
st typesniffer.SniffedType
|
||||
}
|
||||
@@ -81,7 +81,7 @@ func getFileReader(ctx gocontext.Context, repoID int64, blob *git.Blob) (buf []b
|
||||
n, _ := util.ReadAtMost(dataRc, buf)
|
||||
buf = buf[:n]
|
||||
|
||||
fi = &fileInfo{fileSize: blob.Size(), st: typesniffer.DetectContentType(buf)}
|
||||
fi = &fileInfo{blobOrLfsSize: blob.Size(), st: typesniffer.DetectContentType(buf)}
|
||||
|
||||
// FIXME: what happens when README file is an image?
|
||||
if !fi.st.IsText() || !setting.LFS.StartServer {
|
||||
@@ -114,7 +114,7 @@ func getFileReader(ctx gocontext.Context, repoID int64, blob *git.Blob) (buf []b
|
||||
}
|
||||
buf = buf[:n]
|
||||
fi.st = typesniffer.DetectContentType(buf)
|
||||
fi.fileSize = blob.Size()
|
||||
fi.blobOrLfsSize = meta.Pointer.Size
|
||||
fi.lfsMeta = &meta.Pointer
|
||||
return buf, dataRc, fi, nil
|
||||
}
|
||||
@@ -151,17 +151,28 @@ func loadLatestCommitData(ctx *context.Context, latestCommit *git.Commit) bool {
|
||||
}
|
||||
|
||||
func markupRender(ctx *context.Context, renderCtx *markup.RenderContext, input io.Reader) (escaped *charset.EscapeStatus, output template.HTML, err error) {
|
||||
renderer, err := markup.FindRendererByContext(renderCtx)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
markupRd, markupWr := io.Pipe()
|
||||
defer markupWr.Close()
|
||||
|
||||
done := make(chan struct{})
|
||||
go func() {
|
||||
sb := &strings.Builder{}
|
||||
// We allow NBSP here this is rendered
|
||||
escaped, _ = charset.EscapeControlReader(markupRd, sb, ctx.Locale, charset.RuneNBSP)
|
||||
if markup.RendererNeedPostProcess(renderer) {
|
||||
escaped, _ = charset.EscapeControlReader(markupRd, sb, ctx.Locale, charset.RuneNBSP) // We allow NBSP here this is rendered
|
||||
} else {
|
||||
escaped = &charset.EscapeStatus{}
|
||||
_, _ = io.Copy(sb, markupRd)
|
||||
}
|
||||
output = template.HTML(sb.String())
|
||||
close(done)
|
||||
}()
|
||||
err = markup.Render(renderCtx, input, markupWr)
|
||||
|
||||
err = markup.RenderWithRenderer(renderCtx, renderer, input, markupWr)
|
||||
_ = markupWr.CloseWithError(err)
|
||||
<-done
|
||||
return escaped, output, err
|
||||
|
||||
@@ -172,7 +172,7 @@ func prepareFileView(ctx *context.Context, entry *git.TreeEntry) {
|
||||
|
||||
blob := entry.Blob()
|
||||
|
||||
ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+path.Base(ctx.Repo.TreePath), ctx.Repo.RefFullName.ShortName())
|
||||
ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+ctx.Repo.TreePath, ctx.Repo.RefFullName.ShortName())
|
||||
ctx.Data["FileIsSymlink"] = entry.IsLink()
|
||||
ctx.Data["FileTreePath"] = ctx.Repo.TreePath
|
||||
ctx.Data["RawFileLink"] = ctx.Repo.RepoLink + "/raw/" + ctx.Repo.RefTypeNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
|
||||
@@ -226,7 +226,7 @@ func prepareFileView(ctx *context.Context, entry *git.TreeEntry) {
|
||||
}
|
||||
|
||||
ctx.Data["IsLFSFile"] = fInfo.isLFSFile()
|
||||
ctx.Data["FileSize"] = fInfo.fileSize
|
||||
ctx.Data["FileSize"] = fInfo.blobOrLfsSize
|
||||
ctx.Data["IsRepresentableAsText"] = fInfo.st.IsRepresentableAsText()
|
||||
ctx.Data["IsExecutable"] = entry.IsExecutable()
|
||||
ctx.Data["CanCopyContent"] = fInfo.st.IsRepresentableAsText() || fInfo.st.IsImage()
|
||||
@@ -243,7 +243,7 @@ func prepareFileView(ctx *context.Context, entry *git.TreeEntry) {
|
||||
|
||||
utf8Reader := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
|
||||
switch {
|
||||
case fInfo.fileSize >= setting.UI.MaxDisplayFileSize:
|
||||
case fInfo.blobOrLfsSize >= setting.UI.MaxDisplayFileSize:
|
||||
ctx.Data["IsFileTooLarge"] = true
|
||||
case handleFileViewRenderMarkup(ctx, entry.Name(), fInfo.st, buf, utf8Reader):
|
||||
// it also sets ctx.Data["FileContent"] and more
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -139,7 +138,7 @@ func prepareToRenderDirectory(ctx *context.Context) {
|
||||
|
||||
if ctx.Repo.TreePath != "" {
|
||||
ctx.Data["HideRepoInfo"] = true
|
||||
ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+path.Base(ctx.Repo.TreePath), ctx.Repo.RefFullName.ShortName())
|
||||
ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+ctx.Repo.TreePath, ctx.Repo.RefFullName.ShortName())
|
||||
}
|
||||
|
||||
subfolder, readmeFile, err := findReadmeFileInEntries(ctx, ctx.Repo.TreePath, entries, true)
|
||||
|
||||
@@ -170,7 +170,7 @@ func prepareToRenderReadmeFile(ctx *context.Context, subfolder string, readmeFil
|
||||
|
||||
ctx.Data["FileIsText"] = fInfo.st.IsText()
|
||||
ctx.Data["FileTreePath"] = readmeFullPath
|
||||
ctx.Data["FileSize"] = fInfo.fileSize
|
||||
ctx.Data["FileSize"] = fInfo.blobOrLfsSize
|
||||
ctx.Data["IsLFSFile"] = fInfo.isLFSFile()
|
||||
|
||||
if fInfo.isLFSFile() {
|
||||
@@ -182,7 +182,7 @@ func prepareToRenderReadmeFile(ctx *context.Context, subfolder string, readmeFil
|
||||
return
|
||||
}
|
||||
|
||||
if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
|
||||
if fInfo.blobOrLfsSize >= setting.UI.MaxDisplayFileSize {
|
||||
// Pretend that this is a normal text file to display 'This file is too large to be shown'
|
||||
ctx.Data["IsFileTooLarge"] = true
|
||||
return
|
||||
|
||||
@@ -303,13 +303,6 @@ func registerWebRoutes(m *web.Router) {
|
||||
|
||||
validation.AddBindingRules()
|
||||
|
||||
linkAccountEnabled := func(ctx *context.Context) {
|
||||
if !setting.Service.EnableOpenIDSignIn && !setting.Service.EnableOpenIDSignUp && !setting.OAuth2.Enabled {
|
||||
ctx.HTTPError(http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
openIDSignInEnabled := func(ctx *context.Context) {
|
||||
if !setting.Service.EnableOpenIDSignIn {
|
||||
ctx.HTTPError(http.StatusForbidden)
|
||||
@@ -541,9 +534,9 @@ func registerWebRoutes(m *web.Router) {
|
||||
}, openIDSignInEnabled)
|
||||
m.Get("/sign_up", auth.SignUp)
|
||||
m.Post("/sign_up", web.Bind(forms.RegisterForm{}), auth.SignUpPost)
|
||||
m.Get("/link_account", linkAccountEnabled, auth.LinkAccount)
|
||||
m.Post("/link_account_signin", linkAccountEnabled, web.Bind(forms.SignInForm{}), auth.LinkAccountPostSignIn)
|
||||
m.Post("/link_account_signup", linkAccountEnabled, web.Bind(forms.RegisterForm{}), auth.LinkAccountPostRegister)
|
||||
m.Get("/link_account", auth.LinkAccount)
|
||||
m.Post("/link_account_signin", web.Bind(forms.SignInForm{}), auth.LinkAccountPostSignIn)
|
||||
m.Post("/link_account_signup", web.Bind(forms.RegisterForm{}), auth.LinkAccountPostRegister)
|
||||
m.Group("/two_factor", func() {
|
||||
m.Get("", auth.TwoFactor)
|
||||
m.Post("", web.Bind(forms.TwoFactorAuthForm{}), auth.TwoFactorPost)
|
||||
@@ -618,7 +611,7 @@ func registerWebRoutes(m *web.Router) {
|
||||
m.Post("/delete", security.DeleteOpenID)
|
||||
m.Post("/toggle_visibility", security.ToggleOpenIDVisibility)
|
||||
}, openIDSignInEnabled)
|
||||
m.Post("/account_link", linkAccountEnabled, security.DeleteAccountLink)
|
||||
m.Post("/account_link", security.DeleteAccountLink)
|
||||
})
|
||||
|
||||
m.Group("/applications", func() {
|
||||
|
||||
@@ -5,10 +5,8 @@ package actions
|
||||
|
||||
import (
|
||||
"context"
|
||||
"regexp"
|
||||
|
||||
actions_model "code.gitea.io/gitea/models/actions"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
secret_service "code.gitea.io/gitea/services/secrets"
|
||||
)
|
||||
@@ -18,10 +16,6 @@ func CreateVariable(ctx context.Context, ownerID, repoID int64, name, data, desc
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := envNameCIRegexMatch(name); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
v, err := actions_model.InsertVariable(ctx, ownerID, repoID, name, util.ReserveLineBreakForTextarea(data), description)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -35,10 +29,6 @@ func UpdateVariableNameData(ctx context.Context, variable *actions_model.ActionV
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err := envNameCIRegexMatch(variable.Name); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
variable.Data = util.ReserveLineBreakForTextarea(variable.Data)
|
||||
|
||||
return actions_model.UpdateVariableCols(ctx, variable, "name", "data", "description")
|
||||
@@ -49,14 +39,6 @@ func DeleteVariableByID(ctx context.Context, variableID int64) error {
|
||||
}
|
||||
|
||||
func DeleteVariableByName(ctx context.Context, ownerID, repoID int64, name string) error {
|
||||
if err := secret_service.ValidateName(name); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := envNameCIRegexMatch(name); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
v, err := GetVariable(ctx, actions_model.FindVariablesOpts{
|
||||
OwnerID: ownerID,
|
||||
RepoID: repoID,
|
||||
@@ -79,19 +61,3 @@ func GetVariable(ctx context.Context, opts actions_model.FindVariablesOpts) (*ac
|
||||
}
|
||||
return vars[0], nil
|
||||
}
|
||||
|
||||
// some regular expression of `variables` and `secrets`
|
||||
// reference to:
|
||||
// https://docs.github.com/en/actions/learn-github-actions/variables#naming-conventions-for-configuration-variables
|
||||
// https://docs.github.com/en/actions/security-guides/encrypted-secrets#naming-your-secrets
|
||||
var (
|
||||
forbiddenEnvNameCIRx = regexp.MustCompile("(?i)^CI")
|
||||
)
|
||||
|
||||
func envNameCIRegexMatch(name string) error {
|
||||
if forbiddenEnvNameCIRx.MatchString(name) {
|
||||
log.Error("Env Name cannot be ci")
|
||||
return util.NewInvalidArgumentErrorf("env name cannot be ci")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ import (
|
||||
|
||||
"github.com/nektos/act/pkg/jobparser"
|
||||
"github.com/nektos/act/pkg/model"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func EnableOrDisableWorkflow(ctx *context.APIContext, workflowID string, isEnable bool) error {
|
||||
@@ -48,14 +49,14 @@ func EnableOrDisableWorkflow(ctx *context.APIContext, workflowID string, isEnabl
|
||||
|
||||
func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, repo *repo_model.Repository, gitRepo *git.Repository, workflowID, ref string, processInputs func(model *model.WorkflowDispatch, inputs map[string]any) error) error {
|
||||
if workflowID == "" {
|
||||
return util.ErrorWrapLocale(
|
||||
return util.ErrorWrapTranslatable(
|
||||
util.NewNotExistErrorf("workflowID is empty"),
|
||||
"actions.workflow.not_found", workflowID,
|
||||
)
|
||||
}
|
||||
|
||||
if ref == "" {
|
||||
return util.ErrorWrapLocale(
|
||||
return util.ErrorWrapTranslatable(
|
||||
util.NewNotExistErrorf("ref is empty"),
|
||||
"form.target_ref_not_exist", ref,
|
||||
)
|
||||
@@ -65,7 +66,7 @@ func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, re
|
||||
cfgUnit := repo.MustGetUnit(ctx, unit.TypeActions)
|
||||
cfg := cfgUnit.ActionsConfig()
|
||||
if cfg.IsWorkflowDisabled(workflowID) {
|
||||
return util.ErrorWrapLocale(
|
||||
return util.ErrorWrapTranslatable(
|
||||
util.NewPermissionDeniedErrorf("workflow is disabled"),
|
||||
"actions.workflow.disabled",
|
||||
)
|
||||
@@ -84,7 +85,7 @@ func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, re
|
||||
runTargetCommit, err = gitRepo.GetBranchCommit(ref)
|
||||
}
|
||||
if err != nil {
|
||||
return util.ErrorWrapLocale(
|
||||
return util.ErrorWrapTranslatable(
|
||||
util.NewNotExistErrorf("ref %q doesn't exist", ref),
|
||||
"form.target_ref_not_exist", ref,
|
||||
)
|
||||
@@ -125,7 +126,7 @@ func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, re
|
||||
}
|
||||
|
||||
if entry == nil {
|
||||
return util.ErrorWrapLocale(
|
||||
return util.ErrorWrapTranslatable(
|
||||
util.NewNotExistErrorf("workflow %q doesn't exist", workflowID),
|
||||
"actions.workflow.not_found", workflowID,
|
||||
)
|
||||
@@ -136,9 +137,24 @@ func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, re
|
||||
return err
|
||||
}
|
||||
|
||||
singleWorkflow := &jobparser.SingleWorkflow{}
|
||||
if err := yaml.Unmarshal(content, singleWorkflow); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal workflow content: %w", err)
|
||||
}
|
||||
// get inputs from post
|
||||
workflow := &model.Workflow{
|
||||
RawOn: singleWorkflow.RawOn,
|
||||
}
|
||||
inputsWithDefaults := make(map[string]any)
|
||||
if workflowDispatch := workflow.WorkflowDispatchConfig(); workflowDispatch != nil {
|
||||
if err = processInputs(workflowDispatch, inputsWithDefaults); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
giteaCtx := GenerateGiteaContext(run, nil)
|
||||
|
||||
workflows, err = jobparser.Parse(content, jobparser.WithGitContext(giteaCtx.ToGitHubContext()))
|
||||
workflows, err = jobparser.Parse(content, jobparser.WithGitContext(giteaCtx.ToGitHubContext()), jobparser.WithInputs(inputsWithDefaults))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -148,23 +164,12 @@ func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, re
|
||||
}
|
||||
|
||||
if len(workflows) == 0 {
|
||||
return util.ErrorWrapLocale(
|
||||
return util.ErrorWrapTranslatable(
|
||||
util.NewNotExistErrorf("workflow %q doesn't exist", workflowID),
|
||||
"actions.workflow.not_found", workflowID,
|
||||
)
|
||||
}
|
||||
|
||||
// get inputs from post
|
||||
workflow := &model.Workflow{
|
||||
RawOn: workflows[0].RawOn,
|
||||
}
|
||||
inputsWithDefaults := make(map[string]any)
|
||||
if workflowDispatch := workflow.WorkflowDispatchConfig(); workflowDispatch != nil {
|
||||
if err = processInputs(workflowDispatch, inputsWithDefaults); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// ctx.Req.PostForm -> WorkflowDispatchPayload.Inputs -> ActionRun.EventPayload -> runner: ghc.Event
|
||||
// https://docs.github.com/en/actions/learn-github-actions/contexts#github-context
|
||||
// https://docs.github.com/en/webhooks/webhook-events-and-payloads#workflow_dispatch
|
||||
|
||||
@@ -25,10 +25,7 @@ import (
|
||||
// There is a dependence on the database within RewriteAllPrincipalKeys & RegeneratePrincipalKeys
|
||||
// The sshOpLocker is used from ssh_key_authorized_keys.go
|
||||
|
||||
const (
|
||||
authorizedPrincipalsFile = "authorized_principals"
|
||||
tplCommentPrefix = `# gitea public key`
|
||||
)
|
||||
const authorizedPrincipalsFile = "authorized_principals"
|
||||
|
||||
// RewriteAllPrincipalKeys removes any authorized principal and rewrite all keys from database again.
|
||||
// Note: db.GetEngine(ctx).Iterate does not get latest data after insert/delete, so we have to call this function
|
||||
@@ -90,10 +87,9 @@ func rewriteAllPrincipalKeys(ctx context.Context) error {
|
||||
return util.Rename(tmpPath, fPath)
|
||||
}
|
||||
|
||||
func regeneratePrincipalKeys(ctx context.Context, t io.StringWriter) error {
|
||||
func regeneratePrincipalKeys(ctx context.Context, t io.Writer) error {
|
||||
if err := db.GetEngine(ctx).Where("type = ?", asymkey_model.KeyTypePrincipal).Iterate(new(asymkey_model.PublicKey), func(idx int, bean any) (err error) {
|
||||
_, err = t.WriteString((bean.(*asymkey_model.PublicKey)).AuthorizedString())
|
||||
return err
|
||||
return asymkey_model.WriteAuthorizedStringForValidKey(bean.(*asymkey_model.PublicKey), t)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -114,11 +110,11 @@ func regeneratePrincipalKeys(ctx context.Context, t io.StringWriter) error {
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||
if strings.HasPrefix(line, asymkey_model.AuthorizedStringCommentPrefix) {
|
||||
scanner.Scan()
|
||||
continue
|
||||
}
|
||||
_, err = t.WriteString(line + "\n")
|
||||
_, err = io.WriteString(t, line+"\n")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -6,11 +6,14 @@ package attachment
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/storage"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/services/context/upload"
|
||||
@@ -28,27 +31,56 @@ func NewAttachment(ctx context.Context, attach *repo_model.Attachment, file io.R
|
||||
attach.UUID = uuid.New().String()
|
||||
size, err := storage.Attachments.Save(attach.RelativePath(), file, size)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Create: %w", err)
|
||||
return fmt.Errorf("Attachments.Save: %w", err)
|
||||
}
|
||||
attach.Size = size
|
||||
|
||||
return db.Insert(ctx, attach)
|
||||
})
|
||||
|
||||
return attach, err
|
||||
}
|
||||
|
||||
// UploadAttachment upload new attachment into storage and update database
|
||||
func UploadAttachment(ctx context.Context, file io.Reader, allowedTypes string, fileSize int64, attach *repo_model.Attachment) (*repo_model.Attachment, error) {
|
||||
type UploaderFile struct {
|
||||
rd io.ReadCloser
|
||||
size int64
|
||||
respWriter http.ResponseWriter
|
||||
}
|
||||
|
||||
func NewLimitedUploaderKnownSize(r io.Reader, size int64) *UploaderFile {
|
||||
return &UploaderFile{rd: io.NopCloser(r), size: size}
|
||||
}
|
||||
|
||||
func NewLimitedUploaderMaxBytesReader(r io.ReadCloser, w http.ResponseWriter) *UploaderFile {
|
||||
return &UploaderFile{rd: r, size: -1, respWriter: w}
|
||||
}
|
||||
|
||||
func UploadAttachmentGeneralSizeLimit(ctx context.Context, file *UploaderFile, allowedTypes string, attach *repo_model.Attachment) (*repo_model.Attachment, error) {
|
||||
return uploadAttachment(ctx, file, allowedTypes, setting.Attachment.MaxSize<<20, attach)
|
||||
}
|
||||
|
||||
func uploadAttachment(ctx context.Context, file *UploaderFile, allowedTypes string, maxFileSize int64, attach *repo_model.Attachment) (*repo_model.Attachment, error) {
|
||||
src := file.rd
|
||||
if file.size < 0 {
|
||||
src = http.MaxBytesReader(file.respWriter, src, maxFileSize)
|
||||
}
|
||||
buf := make([]byte, 1024)
|
||||
n, _ := util.ReadAtMost(file, buf)
|
||||
n, _ := util.ReadAtMost(src, buf)
|
||||
buf = buf[:n]
|
||||
|
||||
if err := upload.Verify(buf, attach.Name, allowedTypes); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return NewAttachment(ctx, attach, io.MultiReader(bytes.NewReader(buf), file), fileSize)
|
||||
if maxFileSize >= 0 && file.size > maxFileSize {
|
||||
return nil, util.ErrorWrap(util.ErrContentTooLarge, "attachment exceeds limit %d", maxFileSize)
|
||||
}
|
||||
|
||||
attach, err := NewAttachment(ctx, attach, io.MultiReader(bytes.NewReader(buf), src), file.size)
|
||||
var maxBytesError *http.MaxBytesError
|
||||
if errors.As(err, &maxBytesError) {
|
||||
return nil, util.ErrorWrap(util.ErrContentTooLarge, "attachment exceeds limit %d", maxFileSize)
|
||||
}
|
||||
return attach, err
|
||||
}
|
||||
|
||||
// UpdateAttachment updates an attachment, verifying that its name is among the allowed types.
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
|
||||
"code.gitea.io/gitea/models/auth"
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/secret"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
)
|
||||
@@ -66,9 +67,12 @@ func (source *Source) FromDB(bs []byte) error {
|
||||
}
|
||||
if source.BindPasswordEncrypt != "" {
|
||||
source.BindPassword, err = secret.DecryptSecret(setting.SecretKey, source.BindPasswordEncrypt)
|
||||
if err != nil {
|
||||
log.Error("Unable to decrypt bind password for LDAP source, maybe SECRET_KEY is wrong: %v", err)
|
||||
}
|
||||
source.BindPasswordEncrypt = ""
|
||||
}
|
||||
return err
|
||||
return nil
|
||||
}
|
||||
|
||||
// ToDB exports a LDAPConfig to a serialized format.
|
||||
|
||||
@@ -205,18 +205,6 @@ func handlePullRequestAutoMerge(pullID int64, sha string) {
|
||||
return
|
||||
}
|
||||
|
||||
var headGitRepo *git.Repository
|
||||
if pr.BaseRepoID == pr.HeadRepoID {
|
||||
headGitRepo = baseGitRepo
|
||||
} else {
|
||||
headGitRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
|
||||
if err != nil {
|
||||
log.Error("OpenRepository %-v: %v", pr.HeadRepo, err)
|
||||
return
|
||||
}
|
||||
defer headGitRepo.Close()
|
||||
}
|
||||
|
||||
switch pr.Flow {
|
||||
case issues_model.PullRequestFlowGithub:
|
||||
headBranchExist := pr.HeadRepo != nil && gitrepo.IsBranchExist(ctx, pr.HeadRepo, pr.HeadBranch)
|
||||
@@ -276,9 +264,12 @@ func handlePullRequestAutoMerge(pullID int64, sha string) {
|
||||
return
|
||||
}
|
||||
|
||||
if pr.Flow == issues_model.PullRequestFlowGithub && scheduledPRM.DeleteBranchAfterMerge {
|
||||
if err := repo_service.DeleteBranch(ctx, doer, pr.HeadRepo, headGitRepo, pr.HeadBranch, pr); err != nil {
|
||||
log.Error("DeletePullRequestHeadBranch: %v", err)
|
||||
deleteBranchAfterMerge, err := pull_service.ShouldDeleteBranchAfterMerge(ctx, &scheduledPRM.DeleteBranchAfterMerge, pr.BaseRepo, pr)
|
||||
if err != nil {
|
||||
log.Error("ShouldDeleteBranchAfterMerge: %v", err)
|
||||
} else if deleteBranchAfterMerge {
|
||||
if err = repo_service.DeleteBranchAfterMerge(ctx, doer, pr.ID, nil); err != nil {
|
||||
log.Error("DeleteBranchAfterMerge: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ package automergequeue
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
issues_model "code.gitea.io/gitea/models/issues"
|
||||
@@ -17,7 +18,7 @@ var AutoMergeQueue *queue.WorkerPoolQueue[string]
|
||||
|
||||
var AddToQueue = func(pr *issues_model.PullRequest, sha string) {
|
||||
log.Trace("Adding pullID: %d to the pull requests patch checking queue with sha %s", pr.ID, sha)
|
||||
if err := AutoMergeQueue.Push(fmt.Sprintf("%d_%s", pr.ID, sha)); err != nil {
|
||||
if err := AutoMergeQueue.Push(fmt.Sprintf("%d_%s", pr.ID, sha)); err != nil && !errors.Is(err, queue.ErrAlreadyInQueue) {
|
||||
log.Error("Error adding pullID: %d to the pull requests patch checking queue %v", pr.ID, err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -229,8 +229,7 @@ func APIContexter() func(http.Handler) http.Handler {
|
||||
|
||||
// If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid.
|
||||
if ctx.Req.Method == http.MethodPost && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") {
|
||||
if err := ctx.Req.ParseMultipartForm(setting.Attachment.MaxSize << 20); err != nil && !strings.Contains(err.Error(), "EOF") { // 32MB max size
|
||||
ctx.APIErrorInternal(err)
|
||||
if !ctx.ParseMultipartForm() {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
package context
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io"
|
||||
@@ -42,6 +43,20 @@ type Base struct {
|
||||
Locale translation.Locale
|
||||
}
|
||||
|
||||
func (b *Base) ParseMultipartForm() bool {
|
||||
err := b.Req.ParseMultipartForm(32 << 20)
|
||||
if err != nil {
|
||||
// TODO: all errors caused by client side should be ignored (connection closed).
|
||||
if !errors.Is(err, io.EOF) && !errors.Is(err, io.ErrUnexpectedEOF) {
|
||||
// Errors caused by server side (disk full) should be logged.
|
||||
log.Error("Failed to parse request multipart form for %s: %v", b.Req.RequestURI, err)
|
||||
}
|
||||
b.HTTPError(http.StatusInternalServerError, "failed to parse request multipart form")
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// AppendAccessControlExposeHeaders append headers by name to "Access-Control-Expose-Headers" header
|
||||
func (b *Base) AppendAccessControlExposeHeaders(names ...string) {
|
||||
val := b.RespHeader().Get("Access-Control-Expose-Headers")
|
||||
|
||||
@@ -186,8 +186,7 @@ func Contexter() func(next http.Handler) http.Handler {
|
||||
|
||||
// If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid.
|
||||
if ctx.Req.Method == http.MethodPost && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") {
|
||||
if err := ctx.Req.ParseMultipartForm(setting.Attachment.MaxSize << 20); err != nil && !strings.Contains(err.Error(), "EOF") { // 32MB max size
|
||||
ctx.ServerError("ParseMultipartForm", err)
|
||||
if !ctx.ParseMultipartForm() {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
@@ -537,6 +537,7 @@ func RepoAssignment(ctx *Context) {
|
||||
}
|
||||
|
||||
ctx.Data["Title"] = repo.Owner.Name + "/" + repo.Name
|
||||
ctx.Data["PageTitleCommon"] = repo.Name + " - " + setting.AppName
|
||||
ctx.Data["Repository"] = repo
|
||||
ctx.Data["Owner"] = ctx.Repo.Repository.Owner
|
||||
ctx.Data["CanWriteCode"] = ctx.Repo.CanWrite(unit_model.TypeCode)
|
||||
|
||||
@@ -20,8 +20,6 @@ import (
|
||||
asymkey_service "code.gitea.io/gitea/services/asymkey"
|
||||
)
|
||||
|
||||
const tplCommentPrefix = `# gitea public key`
|
||||
|
||||
func checkAuthorizedKeys(ctx context.Context, logger log.Logger, autofix bool) error {
|
||||
if setting.SSH.StartBuiltinServer || !setting.SSH.CreateAuthorizedKeysFile {
|
||||
return nil
|
||||
@@ -47,7 +45,7 @@ func checkAuthorizedKeys(ctx context.Context, logger log.Logger, autofix bool) e
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||
if strings.HasPrefix(line, asymkey_model.AuthorizedStringCommentPrefix) {
|
||||
continue
|
||||
}
|
||||
linesInAuthorizedKeys.Add(line)
|
||||
@@ -67,7 +65,7 @@ func checkAuthorizedKeys(ctx context.Context, logger log.Logger, autofix bool) e
|
||||
scanner = bufio.NewScanner(regenerated)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||
if strings.HasPrefix(line, asymkey_model.AuthorizedStringCommentPrefix) {
|
||||
continue
|
||||
}
|
||||
if linesInAuthorizedKeys.Contains(line) {
|
||||
|
||||
@@ -540,7 +540,7 @@ type MergePullRequestForm struct {
|
||||
HeadCommitID string `json:"head_commit_id,omitempty"`
|
||||
ForceMerge bool `json:"force_merge,omitempty"`
|
||||
MergeWhenChecksSucceed bool `json:"merge_when_checks_succeed,omitempty"`
|
||||
DeleteBranchAfterMerge bool `json:"delete_branch_after_merge,omitempty"`
|
||||
DeleteBranchAfterMerge *bool `json:"delete_branch_after_merge,omitempty"`
|
||||
}
|
||||
|
||||
// Validate validates the fields
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
git_service "code.gitea.io/gitea/services/git"
|
||||
notify_service "code.gitea.io/gitea/services/notify"
|
||||
@@ -151,15 +152,15 @@ func DeleteComment(ctx context.Context, doer *user_model.User, comment *issues_m
|
||||
}
|
||||
|
||||
// LoadCommentPushCommits Load push commits
|
||||
func LoadCommentPushCommits(ctx context.Context, c *issues_model.Comment) (err error) {
|
||||
func LoadCommentPushCommits(ctx context.Context, c *issues_model.Comment) error {
|
||||
if c.Content == "" || c.Commits != nil || c.Type != issues_model.CommentTypePullRequestPush {
|
||||
return nil
|
||||
}
|
||||
|
||||
var data issues_model.PushActionContent
|
||||
err = json.Unmarshal([]byte(c.Content), &data)
|
||||
if err != nil {
|
||||
return err
|
||||
if err := json.Unmarshal([]byte(c.Content), &data); err != nil {
|
||||
log.Debug("Unmarshal: %v", err) // no need to show 500 error to end user when the JSON is broken
|
||||
return nil
|
||||
}
|
||||
|
||||
c.IsForcePush = data.IsForcePush
|
||||
@@ -168,9 +169,15 @@ func LoadCommentPushCommits(ctx context.Context, c *issues_model.Comment) (err e
|
||||
if len(data.CommitIDs) != 2 {
|
||||
return nil
|
||||
}
|
||||
c.OldCommit = data.CommitIDs[0]
|
||||
c.NewCommit = data.CommitIDs[1]
|
||||
c.OldCommit, c.NewCommit = data.CommitIDs[0], data.CommitIDs[1]
|
||||
} else {
|
||||
if err := c.LoadIssue(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := c.Issue.LoadRepo(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, c.Issue.Repo)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -179,10 +186,11 @@ func LoadCommentPushCommits(ctx context.Context, c *issues_model.Comment) (err e
|
||||
|
||||
c.Commits, err = git_service.ConvertFromGitCommit(ctx, gitRepo.GetCommitsFromIDs(data.CommitIDs), c.Issue.Repo)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debug("ConvertFromGitCommit: %v", err) // no need to show 500 error to end user when the commit does not exist
|
||||
} else {
|
||||
c.CommitsNum = int64(len(c.Commits))
|
||||
}
|
||||
}
|
||||
|
||||
return err
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ import (
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
actions_model "code.gitea.io/gitea/models/actions"
|
||||
auth_model "code.gitea.io/gitea/models/auth"
|
||||
@@ -54,6 +55,33 @@ type Claims struct {
|
||||
jwt.RegisteredClaims
|
||||
}
|
||||
|
||||
type AuthTokenOptions struct {
|
||||
Op string
|
||||
UserID int64
|
||||
RepoID int64
|
||||
}
|
||||
|
||||
func GetLFSAuthTokenWithBearer(opts AuthTokenOptions) (string, error) {
|
||||
now := time.Now()
|
||||
claims := Claims{
|
||||
RegisteredClaims: jwt.RegisteredClaims{
|
||||
ExpiresAt: jwt.NewNumericDate(now.Add(setting.LFS.HTTPAuthExpiry)),
|
||||
NotBefore: jwt.NewNumericDate(now),
|
||||
},
|
||||
RepoID: opts.RepoID,
|
||||
Op: opts.Op,
|
||||
UserID: opts.UserID,
|
||||
}
|
||||
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
|
||||
|
||||
// Sign and get the complete encoded token as a string using the secret
|
||||
tokenString, err := token.SignedString(setting.LFS.JWTSecretBytes)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to sign LFS JWT token: %w", err)
|
||||
}
|
||||
return "Bearer " + tokenString, nil
|
||||
}
|
||||
|
||||
// DownloadLink builds a URL to download the object.
|
||||
func (rc *requestContext) DownloadLink(p lfs_module.Pointer) string {
|
||||
return setting.AppURL + path.Join(url.PathEscape(rc.User), url.PathEscape(rc.Repo+".git"), "info/lfs/objects", url.PathEscape(p.Oid))
|
||||
@@ -559,9 +587,6 @@ func authenticate(ctx *context.Context, repository *repo_model.Repository, autho
|
||||
}
|
||||
|
||||
func handleLFSToken(ctx stdCtx.Context, tokenSHA string, target *repo_model.Repository, mode perm_model.AccessMode) (*user_model.User, error) {
|
||||
if !strings.Contains(tokenSHA, ".") {
|
||||
return nil, nil
|
||||
}
|
||||
token, err := jwt.ParseWithClaims(tokenSHA, &Claims{}, func(t *jwt.Token) (any, error) {
|
||||
if _, ok := t.Method.(*jwt.SigningMethodHMAC); !ok {
|
||||
return nil, fmt.Errorf("unexpected signing method: %v", t.Header["alg"])
|
||||
@@ -569,7 +594,7 @@ func handleLFSToken(ctx stdCtx.Context, tokenSHA string, target *repo_model.Repo
|
||||
return setting.LFS.JWTSecretBytes, nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
return nil, errors.New("invalid token")
|
||||
}
|
||||
|
||||
claims, claimsOk := token.Claims.(*Claims)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user