mirror of
https://github.com/go-gitea/gitea.git
synced 2025-11-05 18:32:41 +09:00
Compare commits
53 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
48eb5ac685 | ||
|
|
8f5b2f1ddf | ||
|
|
bbfd34575a | ||
|
|
760cf419ba | ||
|
|
90982bffa5 | ||
|
|
8fa62be905 | ||
|
|
7b3ffe5745 | ||
|
|
c50d4202ef | ||
|
|
660a83bd2e | ||
|
|
4c7786b3b6 | ||
|
|
c702e7995d | ||
|
|
b2e58edd74 | ||
|
|
98b7714c3b | ||
|
|
9da4642c8c | ||
|
|
1d191f9b5a | ||
|
|
2e1afd54b2 | ||
|
|
9e68261ca7 | ||
|
|
e4238583db | ||
|
|
656d5a144f | ||
|
|
43d1183f67 | ||
|
|
8fa419c4c1 | ||
|
|
77c89572e9 | ||
|
|
68b908d92a | ||
|
|
638fbd0b78 | ||
|
|
3647e62ef9 | ||
|
|
37bbf2c902 | ||
|
|
a239d6c4a9 | ||
|
|
ff2014690d | ||
|
|
03c644c48c | ||
|
|
965376d476 | ||
|
|
2e12161620 | ||
|
|
9cde526f87 | ||
|
|
4c20be7c00 | ||
|
|
263d06f616 | ||
|
|
6dc16c1154 | ||
|
|
fd2c250b52 | ||
|
|
e6d6bce1f6 | ||
|
|
a9ba7379fe | ||
|
|
6be1d71e2b | ||
|
|
9f5e44bf50 | ||
|
|
f204ff4ef7 | ||
|
|
f6cb7860a2 | ||
|
|
6068978c42 | ||
|
|
c320caed97 | ||
|
|
f1c826ed29 | ||
|
|
3c531d3957 | ||
|
|
1ae2525922 | ||
|
|
fd7ebaaa9c | ||
|
|
fa33271157 | ||
|
|
4b3e456afa | ||
|
|
63e5db5d7a | ||
|
|
e6e2c2f4a4 | ||
|
|
e902b98cc2 |
42
.drone.yml
42
.drone.yml
@@ -769,10 +769,16 @@ steps:
|
|||||||
image: woodpeckerci/plugin-s3:latest
|
image: woodpeckerci/plugin-s3:latest
|
||||||
pull: always
|
pull: always
|
||||||
settings:
|
settings:
|
||||||
acl: public-read
|
acl:
|
||||||
bucket: gitea-artifacts
|
from_secret: aws_s3_acl
|
||||||
endpoint: https://ams3.digitaloceanspaces.com
|
region:
|
||||||
path_style: true
|
from_secret: aws_s3_region
|
||||||
|
bucket:
|
||||||
|
from_secret: aws_s3_bucket
|
||||||
|
endpoint:
|
||||||
|
from_secret: aws_s3_endpoint
|
||||||
|
path_style:
|
||||||
|
from_secret: aws_s3_path_style
|
||||||
source: "dist/release/*"
|
source: "dist/release/*"
|
||||||
strip_prefix: dist/release/
|
strip_prefix: dist/release/
|
||||||
target: "/gitea/${DRONE_BRANCH##release/v}"
|
target: "/gitea/${DRONE_BRANCH##release/v}"
|
||||||
@@ -790,10 +796,16 @@ steps:
|
|||||||
- name: release-main
|
- name: release-main
|
||||||
image: woodpeckerci/plugin-s3:latest
|
image: woodpeckerci/plugin-s3:latest
|
||||||
settings:
|
settings:
|
||||||
acl: public-read
|
acl:
|
||||||
bucket: gitea-artifacts
|
from_secret: aws_s3_acl
|
||||||
endpoint: https://ams3.digitaloceanspaces.com
|
region:
|
||||||
path_style: true
|
from_secret: aws_s3_region
|
||||||
|
bucket:
|
||||||
|
from_secret: aws_s3_bucket
|
||||||
|
endpoint:
|
||||||
|
from_secret: aws_s3_endpoint
|
||||||
|
path_style:
|
||||||
|
from_secret: aws_s3_path_style
|
||||||
source: "dist/release/*"
|
source: "dist/release/*"
|
||||||
strip_prefix: dist/release/
|
strip_prefix: dist/release/
|
||||||
target: /gitea/main
|
target: /gitea/main
|
||||||
@@ -892,10 +904,16 @@ steps:
|
|||||||
image: woodpeckerci/plugin-s3:latest
|
image: woodpeckerci/plugin-s3:latest
|
||||||
pull: always
|
pull: always
|
||||||
settings:
|
settings:
|
||||||
acl: public-read
|
acl:
|
||||||
bucket: gitea-artifacts
|
from_secret: aws_s3_acl
|
||||||
endpoint: https://ams3.digitaloceanspaces.com
|
region:
|
||||||
path_style: true
|
from_secret: aws_s3_region
|
||||||
|
bucket:
|
||||||
|
from_secret: aws_s3_bucket
|
||||||
|
endpoint:
|
||||||
|
from_secret: aws_s3_endpoint
|
||||||
|
path_style:
|
||||||
|
from_secret: aws_s3_path_style
|
||||||
source: "dist/release/*"
|
source: "dist/release/*"
|
||||||
strip_prefix: dist/release/
|
strip_prefix: dist/release/
|
||||||
target: "/gitea/${DRONE_TAG##v}"
|
target: "/gitea/${DRONE_TAG##v}"
|
||||||
|
|||||||
@@ -173,3 +173,6 @@ issues:
|
|||||||
linters:
|
linters:
|
||||||
- revive
|
- revive
|
||||||
text: "exported: type name will be used as user.UserBadge by other packages, and that stutters; consider calling this Badge"
|
text: "exported: type name will be used as user.UserBadge by other packages, and that stutters; consider calling this Badge"
|
||||||
|
- path: models/db/sql_postgres_with_schema.go
|
||||||
|
linters:
|
||||||
|
- nolintlint
|
||||||
|
|||||||
70
CHANGELOG.md
70
CHANGELOG.md
@@ -4,6 +4,76 @@ This changelog goes through all the changes that have been made in each release
|
|||||||
without substantial changes to our git log; to see the highlights of what has
|
without substantial changes to our git log; to see the highlights of what has
|
||||||
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||||
|
|
||||||
|
## [1.18.5](https://github.com/go-gitea/gitea/releases/tag/v1.18.5) - 2023-02-21
|
||||||
|
|
||||||
|
* ENHANCEMENTS
|
||||||
|
* Hide 2FA status from other members in organization members list (#22999) (#23023)
|
||||||
|
* BUGFIXES
|
||||||
|
* Add force_merge to merge request and fix checking mergable (#23010) (#23032)
|
||||||
|
* Use `--message=%s` for git commit message (#23028) (#23029)
|
||||||
|
* Render access log template as text instead of HTML (#23013) (#23025)
|
||||||
|
* Fix the Manually Merged form (#23015) (#23017)
|
||||||
|
* Use beforeCommit instead of baseCommit (#22949) (#22996)
|
||||||
|
* Display attachments of review comment when comment content is blank (#23035) (#23046)
|
||||||
|
* Return empty url for submodule tree entries (#23043) (#23048)
|
||||||
|
|
||||||
|
## [1.18.4](https://github.com/go-gitea/gitea/releases/tag/1.18.4) - 2023-02-20
|
||||||
|
|
||||||
|
* SECURITY
|
||||||
|
* Provide the ability to set password hash algorithm parameters (#22942) (#22943)
|
||||||
|
* Add command to bulk set must-change-password (#22823) (#22928)
|
||||||
|
* ENHANCEMENTS
|
||||||
|
* Use import of OCI structs (#22765) (#22805)
|
||||||
|
* Fix color of tertiary button on dark theme (#22739) (#22744)
|
||||||
|
* Link issue and pull requests status change in UI notifications directly to their event in the timelined view. (#22627) (#22642)
|
||||||
|
* BUGFIXES
|
||||||
|
* Notify on container image create (#22806) (#22965)
|
||||||
|
* Fix blame view missing lines (#22826) (#22929)
|
||||||
|
* Fix incorrect role labels for migrated issues and comments (#22914) (#22923)
|
||||||
|
* Fix PR file tree folders no longer collapsing (#22864) (#22872)
|
||||||
|
* Escape filename when assemble URL (#22850) (#22871)
|
||||||
|
* Fix isAllowed of escapeStreamer (#22814) (#22837)
|
||||||
|
* Load issue before accessing index in merge message (#22822) (#22830)
|
||||||
|
* Improve trace logging for pulls and processes (#22633) (#22812)
|
||||||
|
* Fix restore repo bug, clarify the problem of ForeignIndex (#22776) (#22794)
|
||||||
|
* Add default user visibility to cli command "admin user create" (#22750) (#22760)
|
||||||
|
* Escape path for the file list (#22741) (#22757)
|
||||||
|
* Fix bugs with WebAuthn preventing sign in and registration. (#22651) (#22721)
|
||||||
|
* Add missing close bracket in imagediff (#22710) (#22712)
|
||||||
|
* Move code comments to a standalone file and fix the bug when adding a reply to an outdated review appears to not post(#20821) (#22707)
|
||||||
|
* Fix line spacing for plaintext previews (#22699) (#22701)
|
||||||
|
* Fix wrong hint when deleting a branch successfully from pull request UI (#22673) (#22698)
|
||||||
|
* Fix README TOC links (#22577) (#22677)
|
||||||
|
* Fix missing message in git hook when pull requests disabled on fork (#22625) (#22658)
|
||||||
|
* Improve checkIfPRContentChanged (#22611) (#22644)
|
||||||
|
* Prevent duplicate labels when importing more than 99 (#22591) (#22598)
|
||||||
|
* Don't return duplicated users who can create org repo (#22560) (#22562)
|
||||||
|
* BUILD
|
||||||
|
* Upgrade golangcilint to v1.51.0 (#22764)
|
||||||
|
* MISC
|
||||||
|
* Use proxy for pull mirror (#22771) (#22772)
|
||||||
|
* Use `--index-url` in PyPi description (#22620) (#22636)
|
||||||
|
|
||||||
|
## [1.18.3](https://github.com/go-gitea/gitea/releases/tag/v1.18.3) - 2023-01-23
|
||||||
|
|
||||||
|
* SECURITY
|
||||||
|
* Prevent multiple `To` recipients (#22566) (#22569)
|
||||||
|
* BUGFIXES
|
||||||
|
* Truncate commit summary on repo files table. (#22551) (#22552)
|
||||||
|
* Mute all links in issue timeline (#22534)
|
||||||
|
|
||||||
|
## [1.18.2](https://github.com/go-gitea/gitea/releases/tag/v1.18.2) - 2023-01-19
|
||||||
|
|
||||||
|
* BUGFIXES
|
||||||
|
* When updating by rebase we need to set the environment for head repo (#22535) (#22536)
|
||||||
|
* Fix issue not auto-closing when it includes a reference to a branch (#22514) (#22521)
|
||||||
|
* Fix invalid issue branch reference if not specified in template (#22513) (#22520)
|
||||||
|
* Fix 500 error viewing pull request when fork has pull requests disabled (#22512) (#22515)
|
||||||
|
* Reliable selection of admin user (#22509) (#22511)
|
||||||
|
* Set disable_gravatar/enable_federated_avatar when offline mode is true (#22479) (#22496)
|
||||||
|
* BUILD
|
||||||
|
* cgo cross-compile for freebsd (#22397) (#22519)
|
||||||
|
|
||||||
## [1.18.1](https://github.com/go-gitea/gitea/releases/tag/v1.18.1) - 2023-01-17
|
## [1.18.1](https://github.com/go-gitea/gitea/releases/tag/v1.18.1) - 2023-01-17
|
||||||
|
|
||||||
* API
|
* API
|
||||||
|
|||||||
13
Makefile
13
Makefile
@@ -28,8 +28,8 @@ XGO_VERSION := go-1.19.x
|
|||||||
AIR_PACKAGE ?= github.com/cosmtrek/air@v1.40.4
|
AIR_PACKAGE ?= github.com/cosmtrek/air@v1.40.4
|
||||||
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/cmd/editorconfig-checker@2.5.0
|
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/cmd/editorconfig-checker@2.5.0
|
||||||
ERRCHECK_PACKAGE ?= github.com/kisielk/errcheck@v1.6.1
|
ERRCHECK_PACKAGE ?= github.com/kisielk/errcheck@v1.6.1
|
||||||
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.3.1
|
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.4.0
|
||||||
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.47.0
|
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.51.0
|
||||||
GXZ_PAGAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.10
|
GXZ_PAGAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.10
|
||||||
MISSPELL_PACKAGE ?= github.com/client9/misspell/cmd/misspell@v0.3.4
|
MISSPELL_PACKAGE ?= github.com/client9/misspell/cmd/misspell@v0.3.4
|
||||||
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.30.0
|
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.30.0
|
||||||
@@ -733,7 +733,7 @@ $(EXECUTABLE): $(GO_SOURCES) $(TAGS_PREREQ)
|
|||||||
CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) build $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)' -o $@
|
CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) build $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)' -o $@
|
||||||
|
|
||||||
.PHONY: release
|
.PHONY: release
|
||||||
release: frontend generate release-windows release-linux release-darwin release-copy release-compress vendor release-sources release-docs release-check
|
release: frontend generate release-windows release-linux release-darwin release-freebsd release-copy release-compress vendor release-sources release-docs release-check
|
||||||
|
|
||||||
$(DIST_DIRS):
|
$(DIST_DIRS):
|
||||||
mkdir -p $(DIST_DIRS)
|
mkdir -p $(DIST_DIRS)
|
||||||
@@ -762,6 +762,13 @@ ifeq ($(CI),true)
|
|||||||
cp /build/* $(DIST)/binaries
|
cp /build/* $(DIST)/binaries
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
.PHONY: release-freebsd
|
||||||
|
release-freebsd: | $(DIST_DIRS)
|
||||||
|
CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) run $(XGO_PACKAGE) -go $(XGO_VERSION) -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '$(LDFLAGS)' -targets 'freebsd/amd64' -out gitea-$(VERSION) .
|
||||||
|
ifeq ($(CI),true)
|
||||||
|
cp /build/* $(DIST)/binaries
|
||||||
|
endif
|
||||||
|
|
||||||
.PHONY: release-copy
|
.PHONY: release-copy
|
||||||
release-copy: | $(DIST_DIRS)
|
release-copy: | $(DIST_DIRS)
|
||||||
cd $(DIST); for file in `find . -type f -name "*"`; do cp $${file} ./release/; done;
|
cd $(DIST); for file in `find . -type f -name "*"`; do cp $${file} ./release/; done;
|
||||||
|
|||||||
10
assets/go-licenses.json
generated
10
assets/go-licenses.json
generated
File diff suppressed because one or more lines are too long
392
cmd/admin.go
392
cmd/admin.go
@@ -6,7 +6,6 @@
|
|||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
@@ -17,20 +16,14 @@ import (
|
|||||||
auth_model "code.gitea.io/gitea/models/auth"
|
auth_model "code.gitea.io/gitea/models/auth"
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
repo_model "code.gitea.io/gitea/models/repo"
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/graceful"
|
"code.gitea.io/gitea/modules/graceful"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
pwd "code.gitea.io/gitea/modules/password"
|
|
||||||
repo_module "code.gitea.io/gitea/modules/repository"
|
repo_module "code.gitea.io/gitea/modules/repository"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
|
||||||
"code.gitea.io/gitea/modules/storage"
|
|
||||||
"code.gitea.io/gitea/modules/util"
|
|
||||||
auth_service "code.gitea.io/gitea/services/auth"
|
auth_service "code.gitea.io/gitea/services/auth"
|
||||||
"code.gitea.io/gitea/services/auth/source/oauth2"
|
"code.gitea.io/gitea/services/auth/source/oauth2"
|
||||||
"code.gitea.io/gitea/services/auth/source/smtp"
|
"code.gitea.io/gitea/services/auth/source/smtp"
|
||||||
repo_service "code.gitea.io/gitea/services/repository"
|
repo_service "code.gitea.io/gitea/services/repository"
|
||||||
user_service "code.gitea.io/gitea/services/user"
|
|
||||||
|
|
||||||
"github.com/urfave/cli"
|
"github.com/urfave/cli"
|
||||||
)
|
)
|
||||||
@@ -49,142 +42,6 @@ var (
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
subcmdUser = cli.Command{
|
|
||||||
Name: "user",
|
|
||||||
Usage: "Modify users",
|
|
||||||
Subcommands: []cli.Command{
|
|
||||||
microcmdUserCreate,
|
|
||||||
microcmdUserList,
|
|
||||||
microcmdUserChangePassword,
|
|
||||||
microcmdUserDelete,
|
|
||||||
microcmdUserGenerateAccessToken,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
microcmdUserList = cli.Command{
|
|
||||||
Name: "list",
|
|
||||||
Usage: "List users",
|
|
||||||
Action: runListUsers,
|
|
||||||
Flags: []cli.Flag{
|
|
||||||
cli.BoolFlag{
|
|
||||||
Name: "admin",
|
|
||||||
Usage: "List only admin users",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
microcmdUserCreate = cli.Command{
|
|
||||||
Name: "create",
|
|
||||||
Usage: "Create a new user in database",
|
|
||||||
Action: runCreateUser,
|
|
||||||
Flags: []cli.Flag{
|
|
||||||
cli.StringFlag{
|
|
||||||
Name: "name",
|
|
||||||
Usage: "Username. DEPRECATED: use username instead",
|
|
||||||
},
|
|
||||||
cli.StringFlag{
|
|
||||||
Name: "username",
|
|
||||||
Usage: "Username",
|
|
||||||
},
|
|
||||||
cli.StringFlag{
|
|
||||||
Name: "password",
|
|
||||||
Usage: "User password",
|
|
||||||
},
|
|
||||||
cli.StringFlag{
|
|
||||||
Name: "email",
|
|
||||||
Usage: "User email address",
|
|
||||||
},
|
|
||||||
cli.BoolFlag{
|
|
||||||
Name: "admin",
|
|
||||||
Usage: "User is an admin",
|
|
||||||
},
|
|
||||||
cli.BoolFlag{
|
|
||||||
Name: "random-password",
|
|
||||||
Usage: "Generate a random password for the user",
|
|
||||||
},
|
|
||||||
cli.BoolFlag{
|
|
||||||
Name: "must-change-password",
|
|
||||||
Usage: "Set this option to false to prevent forcing the user to change their password after initial login, (Default: true)",
|
|
||||||
},
|
|
||||||
cli.IntFlag{
|
|
||||||
Name: "random-password-length",
|
|
||||||
Usage: "Length of the random password to be generated",
|
|
||||||
Value: 12,
|
|
||||||
},
|
|
||||||
cli.BoolFlag{
|
|
||||||
Name: "access-token",
|
|
||||||
Usage: "Generate access token for the user",
|
|
||||||
},
|
|
||||||
cli.BoolFlag{
|
|
||||||
Name: "restricted",
|
|
||||||
Usage: "Make a restricted user account",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
microcmdUserChangePassword = cli.Command{
|
|
||||||
Name: "change-password",
|
|
||||||
Usage: "Change a user's password",
|
|
||||||
Action: runChangePassword,
|
|
||||||
Flags: []cli.Flag{
|
|
||||||
cli.StringFlag{
|
|
||||||
Name: "username,u",
|
|
||||||
Value: "",
|
|
||||||
Usage: "The user to change password for",
|
|
||||||
},
|
|
||||||
cli.StringFlag{
|
|
||||||
Name: "password,p",
|
|
||||||
Value: "",
|
|
||||||
Usage: "New password to set for user",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
microcmdUserDelete = cli.Command{
|
|
||||||
Name: "delete",
|
|
||||||
Usage: "Delete specific user by id, name or email",
|
|
||||||
Flags: []cli.Flag{
|
|
||||||
cli.Int64Flag{
|
|
||||||
Name: "id",
|
|
||||||
Usage: "ID of user of the user to delete",
|
|
||||||
},
|
|
||||||
cli.StringFlag{
|
|
||||||
Name: "username,u",
|
|
||||||
Usage: "Username of the user to delete",
|
|
||||||
},
|
|
||||||
cli.StringFlag{
|
|
||||||
Name: "email,e",
|
|
||||||
Usage: "Email of the user to delete",
|
|
||||||
},
|
|
||||||
cli.BoolFlag{
|
|
||||||
Name: "purge",
|
|
||||||
Usage: "Purge user, all their repositories, organizations and comments",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Action: runDeleteUser,
|
|
||||||
}
|
|
||||||
|
|
||||||
microcmdUserGenerateAccessToken = cli.Command{
|
|
||||||
Name: "generate-access-token",
|
|
||||||
Usage: "Generate a access token for a specific user",
|
|
||||||
Flags: []cli.Flag{
|
|
||||||
cli.StringFlag{
|
|
||||||
Name: "username,u",
|
|
||||||
Usage: "Username",
|
|
||||||
},
|
|
||||||
cli.StringFlag{
|
|
||||||
Name: "token-name,t",
|
|
||||||
Usage: "Token name",
|
|
||||||
Value: "gitea-admin",
|
|
||||||
},
|
|
||||||
cli.BoolFlag{
|
|
||||||
Name: "raw",
|
|
||||||
Usage: "Display only the token value",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Action: runGenerateAccessToken,
|
|
||||||
}
|
|
||||||
|
|
||||||
subcmdRepoSyncReleases = cli.Command{
|
subcmdRepoSyncReleases = cli.Command{
|
||||||
Name: "repo-sync-releases",
|
Name: "repo-sync-releases",
|
||||||
Usage: "Synchronize repository releases with tags",
|
Usage: "Synchronize repository releases with tags",
|
||||||
@@ -468,255 +325,6 @@ var (
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
func runChangePassword(c *cli.Context) error {
|
|
||||||
if err := argsSet(c, "username", "password"); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx, cancel := installSignals()
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
if err := initDB(ctx); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if len(c.String("password")) < setting.MinPasswordLength {
|
|
||||||
return fmt.Errorf("Password is not long enough. Needs to be at least %d", setting.MinPasswordLength)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !pwd.IsComplexEnough(c.String("password")) {
|
|
||||||
return errors.New("Password does not meet complexity requirements")
|
|
||||||
}
|
|
||||||
pwned, err := pwd.IsPwned(context.Background(), c.String("password"))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if pwned {
|
|
||||||
return errors.New("The password you chose is on a list of stolen passwords previously exposed in public data breaches. Please try again with a different password.\nFor more details, see https://haveibeenpwned.com/Passwords")
|
|
||||||
}
|
|
||||||
uname := c.String("username")
|
|
||||||
user, err := user_model.GetUserByName(ctx, uname)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err = user.SetPassword(c.String("password")); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = user_model.UpdateUserCols(ctx, user, "passwd", "passwd_hash_algo", "salt"); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Printf("%s's password has been successfully updated!\n", user.Name)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func runCreateUser(c *cli.Context) error {
|
|
||||||
if err := argsSet(c, "email"); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if c.IsSet("name") && c.IsSet("username") {
|
|
||||||
return errors.New("Cannot set both --name and --username flags")
|
|
||||||
}
|
|
||||||
if !c.IsSet("name") && !c.IsSet("username") {
|
|
||||||
return errors.New("One of --name or --username flags must be set")
|
|
||||||
}
|
|
||||||
|
|
||||||
if c.IsSet("password") && c.IsSet("random-password") {
|
|
||||||
return errors.New("cannot set both -random-password and -password flags")
|
|
||||||
}
|
|
||||||
|
|
||||||
var username string
|
|
||||||
if c.IsSet("username") {
|
|
||||||
username = c.String("username")
|
|
||||||
} else {
|
|
||||||
username = c.String("name")
|
|
||||||
fmt.Fprintf(os.Stderr, "--name flag is deprecated. Use --username instead.\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx, cancel := installSignals()
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
if err := initDB(ctx); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
var password string
|
|
||||||
if c.IsSet("password") {
|
|
||||||
password = c.String("password")
|
|
||||||
} else if c.IsSet("random-password") {
|
|
||||||
var err error
|
|
||||||
password, err = pwd.Generate(c.Int("random-password-length"))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
fmt.Printf("generated random password is '%s'\n", password)
|
|
||||||
} else {
|
|
||||||
return errors.New("must set either password or random-password flag")
|
|
||||||
}
|
|
||||||
|
|
||||||
// always default to true
|
|
||||||
changePassword := true
|
|
||||||
|
|
||||||
// If this is the first user being created.
|
|
||||||
// Take it as the admin and don't force a password update.
|
|
||||||
if n := user_model.CountUsers(nil); n == 0 {
|
|
||||||
changePassword = false
|
|
||||||
}
|
|
||||||
|
|
||||||
if c.IsSet("must-change-password") {
|
|
||||||
changePassword = c.Bool("must-change-password")
|
|
||||||
}
|
|
||||||
|
|
||||||
restricted := util.OptionalBoolNone
|
|
||||||
|
|
||||||
if c.IsSet("restricted") {
|
|
||||||
restricted = util.OptionalBoolOf(c.Bool("restricted"))
|
|
||||||
}
|
|
||||||
|
|
||||||
u := &user_model.User{
|
|
||||||
Name: username,
|
|
||||||
Email: c.String("email"),
|
|
||||||
Passwd: password,
|
|
||||||
IsAdmin: c.Bool("admin"),
|
|
||||||
MustChangePassword: changePassword,
|
|
||||||
}
|
|
||||||
|
|
||||||
overwriteDefault := &user_model.CreateUserOverwriteOptions{
|
|
||||||
IsActive: util.OptionalBoolTrue,
|
|
||||||
IsRestricted: restricted,
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := user_model.CreateUser(u, overwriteDefault); err != nil {
|
|
||||||
return fmt.Errorf("CreateUser: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if c.Bool("access-token") {
|
|
||||||
t := &auth_model.AccessToken{
|
|
||||||
Name: "gitea-admin",
|
|
||||||
UID: u.ID,
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := auth_model.NewAccessToken(t); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Printf("Access token was successfully created... %s\n", t.Token)
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Printf("New user '%s' has been successfully created!\n", username)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func runListUsers(c *cli.Context) error {
|
|
||||||
ctx, cancel := installSignals()
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
if err := initDB(ctx); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
users, err := user_model.GetAllUsers()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
w := tabwriter.NewWriter(os.Stdout, 5, 0, 1, ' ', 0)
|
|
||||||
|
|
||||||
if c.IsSet("admin") {
|
|
||||||
fmt.Fprintf(w, "ID\tUsername\tEmail\tIsActive\n")
|
|
||||||
for _, u := range users {
|
|
||||||
if u.IsAdmin {
|
|
||||||
fmt.Fprintf(w, "%d\t%s\t%s\t%t\n", u.ID, u.Name, u.Email, u.IsActive)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
twofa := user_model.UserList(users).GetTwoFaStatus()
|
|
||||||
fmt.Fprintf(w, "ID\tUsername\tEmail\tIsActive\tIsAdmin\t2FA\n")
|
|
||||||
for _, u := range users {
|
|
||||||
fmt.Fprintf(w, "%d\t%s\t%s\t%t\t%t\t%t\n", u.ID, u.Name, u.Email, u.IsActive, u.IsAdmin, twofa[u.ID])
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
w.Flush()
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func runDeleteUser(c *cli.Context) error {
|
|
||||||
if !c.IsSet("id") && !c.IsSet("username") && !c.IsSet("email") {
|
|
||||||
return fmt.Errorf("You must provide the id, username or email of a user to delete")
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx, cancel := installSignals()
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
if err := initDB(ctx); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := storage.Init(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
var err error
|
|
||||||
var user *user_model.User
|
|
||||||
if c.IsSet("email") {
|
|
||||||
user, err = user_model.GetUserByEmail(c.String("email"))
|
|
||||||
} else if c.IsSet("username") {
|
|
||||||
user, err = user_model.GetUserByName(ctx, c.String("username"))
|
|
||||||
} else {
|
|
||||||
user, err = user_model.GetUserByID(c.Int64("id"))
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if c.IsSet("username") && user.LowerName != strings.ToLower(strings.TrimSpace(c.String("username"))) {
|
|
||||||
return fmt.Errorf("The user %s who has email %s does not match the provided username %s", user.Name, c.String("email"), c.String("username"))
|
|
||||||
}
|
|
||||||
|
|
||||||
if c.IsSet("id") && user.ID != c.Int64("id") {
|
|
||||||
return fmt.Errorf("The user %s does not match the provided id %d", user.Name, c.Int64("id"))
|
|
||||||
}
|
|
||||||
|
|
||||||
return user_service.DeleteUser(ctx, user, c.Bool("purge"))
|
|
||||||
}
|
|
||||||
|
|
||||||
func runGenerateAccessToken(c *cli.Context) error {
|
|
||||||
if !c.IsSet("username") {
|
|
||||||
return fmt.Errorf("You must provide the username to generate a token for them")
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx, cancel := installSignals()
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
if err := initDB(ctx); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
user, err := user_model.GetUserByName(ctx, c.String("username"))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
t := &auth_model.AccessToken{
|
|
||||||
Name: c.String("token-name"),
|
|
||||||
UID: user.ID,
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := auth_model.NewAccessToken(t); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if c.Bool("raw") {
|
|
||||||
fmt.Printf("%s\n", t.Token)
|
|
||||||
} else {
|
|
||||||
fmt.Printf("Access token was successfully created: %s\n", t.Token)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func runRepoSyncReleases(_ *cli.Context) error {
|
func runRepoSyncReleases(_ *cli.Context) error {
|
||||||
ctx, cancel := installSignals()
|
ctx, cancel := installSignals()
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|||||||
21
cmd/admin_user.go
Normal file
21
cmd/admin_user.go
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/urfave/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
var subcmdUser = cli.Command{
|
||||||
|
Name: "user",
|
||||||
|
Usage: "Modify users",
|
||||||
|
Subcommands: []cli.Command{
|
||||||
|
microcmdUserCreate,
|
||||||
|
microcmdUserList,
|
||||||
|
microcmdUserChangePassword,
|
||||||
|
microcmdUserDelete,
|
||||||
|
microcmdUserGenerateAccessToken,
|
||||||
|
microcmdUserMustChangePassword,
|
||||||
|
},
|
||||||
|
}
|
||||||
76
cmd/admin_user_change_password.go
Normal file
76
cmd/admin_user_change_password.go
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
pwd "code.gitea.io/gitea/modules/auth/password"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
|
||||||
|
"github.com/urfave/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
var microcmdUserChangePassword = cli.Command{
|
||||||
|
Name: "change-password",
|
||||||
|
Usage: "Change a user's password",
|
||||||
|
Action: runChangePassword,
|
||||||
|
Flags: []cli.Flag{
|
||||||
|
cli.StringFlag{
|
||||||
|
Name: "username,u",
|
||||||
|
Value: "",
|
||||||
|
Usage: "The user to change password for",
|
||||||
|
},
|
||||||
|
cli.StringFlag{
|
||||||
|
Name: "password,p",
|
||||||
|
Value: "",
|
||||||
|
Usage: "New password to set for user",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func runChangePassword(c *cli.Context) error {
|
||||||
|
if err := argsSet(c, "username", "password"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx, cancel := installSignals()
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
if err := initDB(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if len(c.String("password")) < setting.MinPasswordLength {
|
||||||
|
return fmt.Errorf("Password is not long enough. Needs to be at least %d", setting.MinPasswordLength)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !pwd.IsComplexEnough(c.String("password")) {
|
||||||
|
return errors.New("Password does not meet complexity requirements")
|
||||||
|
}
|
||||||
|
pwned, err := pwd.IsPwned(context.Background(), c.String("password"))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if pwned {
|
||||||
|
return errors.New("The password you chose is on a list of stolen passwords previously exposed in public data breaches. Please try again with a different password.\nFor more details, see https://haveibeenpwned.com/Passwords")
|
||||||
|
}
|
||||||
|
uname := c.String("username")
|
||||||
|
user, err := user_model.GetUserByName(ctx, uname)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err = user.SetPassword(c.String("password")); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = user_model.UpdateUserCols(ctx, user, "passwd", "passwd_hash_algo", "salt"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("%s's password has been successfully updated!\n", user.Name)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
169
cmd/admin_user_create.go
Normal file
169
cmd/admin_user_create.go
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
auth_model "code.gitea.io/gitea/models/auth"
|
||||||
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
pwd "code.gitea.io/gitea/modules/auth/password"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
|
"github.com/urfave/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
var microcmdUserCreate = cli.Command{
|
||||||
|
Name: "create",
|
||||||
|
Usage: "Create a new user in database",
|
||||||
|
Action: runCreateUser,
|
||||||
|
Flags: []cli.Flag{
|
||||||
|
cli.StringFlag{
|
||||||
|
Name: "name",
|
||||||
|
Usage: "Username. DEPRECATED: use username instead",
|
||||||
|
},
|
||||||
|
cli.StringFlag{
|
||||||
|
Name: "username",
|
||||||
|
Usage: "Username",
|
||||||
|
},
|
||||||
|
cli.StringFlag{
|
||||||
|
Name: "password",
|
||||||
|
Usage: "User password",
|
||||||
|
},
|
||||||
|
cli.StringFlag{
|
||||||
|
Name: "email",
|
||||||
|
Usage: "User email address",
|
||||||
|
},
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "admin",
|
||||||
|
Usage: "User is an admin",
|
||||||
|
},
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "random-password",
|
||||||
|
Usage: "Generate a random password for the user",
|
||||||
|
},
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "must-change-password",
|
||||||
|
Usage: "Set this option to false to prevent forcing the user to change their password after initial login, (Default: true)",
|
||||||
|
},
|
||||||
|
cli.IntFlag{
|
||||||
|
Name: "random-password-length",
|
||||||
|
Usage: "Length of the random password to be generated",
|
||||||
|
Value: 12,
|
||||||
|
},
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "access-token",
|
||||||
|
Usage: "Generate access token for the user",
|
||||||
|
},
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "restricted",
|
||||||
|
Usage: "Make a restricted user account",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func runCreateUser(c *cli.Context) error {
|
||||||
|
if err := argsSet(c, "email"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.IsSet("name") && c.IsSet("username") {
|
||||||
|
return errors.New("Cannot set both --name and --username flags")
|
||||||
|
}
|
||||||
|
if !c.IsSet("name") && !c.IsSet("username") {
|
||||||
|
return errors.New("One of --name or --username flags must be set")
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.IsSet("password") && c.IsSet("random-password") {
|
||||||
|
return errors.New("cannot set both -random-password and -password flags")
|
||||||
|
}
|
||||||
|
|
||||||
|
var username string
|
||||||
|
if c.IsSet("username") {
|
||||||
|
username = c.String("username")
|
||||||
|
} else {
|
||||||
|
username = c.String("name")
|
||||||
|
fmt.Fprintf(os.Stderr, "--name flag is deprecated. Use --username instead.\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx, cancel := installSignals()
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
if err := initDB(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var password string
|
||||||
|
if c.IsSet("password") {
|
||||||
|
password = c.String("password")
|
||||||
|
} else if c.IsSet("random-password") {
|
||||||
|
var err error
|
||||||
|
password, err = pwd.Generate(c.Int("random-password-length"))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
fmt.Printf("generated random password is '%s'\n", password)
|
||||||
|
} else {
|
||||||
|
return errors.New("must set either password or random-password flag")
|
||||||
|
}
|
||||||
|
|
||||||
|
// always default to true
|
||||||
|
changePassword := true
|
||||||
|
|
||||||
|
// If this is the first user being created.
|
||||||
|
// Take it as the admin and don't force a password update.
|
||||||
|
if n := user_model.CountUsers(nil); n == 0 {
|
||||||
|
changePassword = false
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.IsSet("must-change-password") {
|
||||||
|
changePassword = c.Bool("must-change-password")
|
||||||
|
}
|
||||||
|
|
||||||
|
restricted := util.OptionalBoolNone
|
||||||
|
|
||||||
|
if c.IsSet("restricted") {
|
||||||
|
restricted = util.OptionalBoolOf(c.Bool("restricted"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// default user visibility in app.ini
|
||||||
|
visibility := setting.Service.DefaultUserVisibilityMode
|
||||||
|
|
||||||
|
u := &user_model.User{
|
||||||
|
Name: username,
|
||||||
|
Email: c.String("email"),
|
||||||
|
Passwd: password,
|
||||||
|
IsAdmin: c.Bool("admin"),
|
||||||
|
MustChangePassword: changePassword,
|
||||||
|
Visibility: visibility,
|
||||||
|
}
|
||||||
|
|
||||||
|
overwriteDefault := &user_model.CreateUserOverwriteOptions{
|
||||||
|
IsActive: util.OptionalBoolTrue,
|
||||||
|
IsRestricted: restricted,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := user_model.CreateUser(u, overwriteDefault); err != nil {
|
||||||
|
return fmt.Errorf("CreateUser: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.Bool("access-token") {
|
||||||
|
t := &auth_model.AccessToken{
|
||||||
|
Name: "gitea-admin",
|
||||||
|
UID: u.ID,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := auth_model.NewAccessToken(t); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Access token was successfully created... %s\n", t.Token)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("New user '%s' has been successfully created!\n", username)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
78
cmd/admin_user_delete.go
Normal file
78
cmd/admin_user_delete.go
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/storage"
|
||||||
|
user_service "code.gitea.io/gitea/services/user"
|
||||||
|
|
||||||
|
"github.com/urfave/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
var microcmdUserDelete = cli.Command{
|
||||||
|
Name: "delete",
|
||||||
|
Usage: "Delete specific user by id, name or email",
|
||||||
|
Flags: []cli.Flag{
|
||||||
|
cli.Int64Flag{
|
||||||
|
Name: "id",
|
||||||
|
Usage: "ID of user of the user to delete",
|
||||||
|
},
|
||||||
|
cli.StringFlag{
|
||||||
|
Name: "username,u",
|
||||||
|
Usage: "Username of the user to delete",
|
||||||
|
},
|
||||||
|
cli.StringFlag{
|
||||||
|
Name: "email,e",
|
||||||
|
Usage: "Email of the user to delete",
|
||||||
|
},
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "purge",
|
||||||
|
Usage: "Purge user, all their repositories, organizations and comments",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Action: runDeleteUser,
|
||||||
|
}
|
||||||
|
|
||||||
|
func runDeleteUser(c *cli.Context) error {
|
||||||
|
if !c.IsSet("id") && !c.IsSet("username") && !c.IsSet("email") {
|
||||||
|
return fmt.Errorf("You must provide the id, username or email of a user to delete")
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx, cancel := installSignals()
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
if err := initDB(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := storage.Init(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
var user *user_model.User
|
||||||
|
if c.IsSet("email") {
|
||||||
|
user, err = user_model.GetUserByEmail(c.String("email"))
|
||||||
|
} else if c.IsSet("username") {
|
||||||
|
user, err = user_model.GetUserByName(ctx, c.String("username"))
|
||||||
|
} else {
|
||||||
|
user, err = user_model.GetUserByID(c.Int64("id"))
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if c.IsSet("username") && user.LowerName != strings.ToLower(strings.TrimSpace(c.String("username"))) {
|
||||||
|
return fmt.Errorf("The user %s who has email %s does not match the provided username %s", user.Name, c.String("email"), c.String("username"))
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.IsSet("id") && user.ID != c.Int64("id") {
|
||||||
|
return fmt.Errorf("The user %s does not match the provided id %d", user.Name, c.Int64("id"))
|
||||||
|
}
|
||||||
|
|
||||||
|
return user_service.DeleteUser(ctx, user, c.Bool("purge"))
|
||||||
|
}
|
||||||
69
cmd/admin_user_generate_access_token.go
Normal file
69
cmd/admin_user_generate_access_token.go
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
auth_model "code.gitea.io/gitea/models/auth"
|
||||||
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
|
||||||
|
"github.com/urfave/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
var microcmdUserGenerateAccessToken = cli.Command{
|
||||||
|
Name: "generate-access-token",
|
||||||
|
Usage: "Generate an access token for a specific user",
|
||||||
|
Flags: []cli.Flag{
|
||||||
|
cli.StringFlag{
|
||||||
|
Name: "username,u",
|
||||||
|
Usage: "Username",
|
||||||
|
},
|
||||||
|
cli.StringFlag{
|
||||||
|
Name: "token-name,t",
|
||||||
|
Usage: "Token name",
|
||||||
|
Value: "gitea-admin",
|
||||||
|
},
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "raw",
|
||||||
|
Usage: "Display only the token value",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Action: runGenerateAccessToken,
|
||||||
|
}
|
||||||
|
|
||||||
|
func runGenerateAccessToken(c *cli.Context) error {
|
||||||
|
if !c.IsSet("username") {
|
||||||
|
return fmt.Errorf("You must provide a username to generate a token for")
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx, cancel := installSignals()
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
if err := initDB(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
user, err := user_model.GetUserByName(ctx, c.String("username"))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
t := &auth_model.AccessToken{
|
||||||
|
Name: c.String("token-name"),
|
||||||
|
UID: user.ID,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := auth_model.NewAccessToken(t); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.Bool("raw") {
|
||||||
|
fmt.Printf("%s\n", t.Token)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("Access token was successfully created: %s\n", t.Token)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
60
cmd/admin_user_list.go
Normal file
60
cmd/admin_user_list.go
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"text/tabwriter"
|
||||||
|
|
||||||
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
|
||||||
|
"github.com/urfave/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
var microcmdUserList = cli.Command{
|
||||||
|
Name: "list",
|
||||||
|
Usage: "List users",
|
||||||
|
Action: runListUsers,
|
||||||
|
Flags: []cli.Flag{
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "admin",
|
||||||
|
Usage: "List only admin users",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func runListUsers(c *cli.Context) error {
|
||||||
|
ctx, cancel := installSignals()
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
if err := initDB(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
users, err := user_model.GetAllUsers()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
w := tabwriter.NewWriter(os.Stdout, 5, 0, 1, ' ', 0)
|
||||||
|
|
||||||
|
if c.IsSet("admin") {
|
||||||
|
fmt.Fprintf(w, "ID\tUsername\tEmail\tIsActive\n")
|
||||||
|
for _, u := range users {
|
||||||
|
if u.IsAdmin {
|
||||||
|
fmt.Fprintf(w, "%d\t%s\t%s\t%t\n", u.ID, u.Name, u.Email, u.IsActive)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
twofa := user_model.UserList(users).GetTwoFaStatus()
|
||||||
|
fmt.Fprintf(w, "ID\tUsername\tEmail\tIsActive\tIsAdmin\t2FA\n")
|
||||||
|
for _, u := range users {
|
||||||
|
fmt.Fprintf(w, "%d\t%s\t%s\t%t\t%t\t%t\n", u.ID, u.Name, u.Email, u.IsActive, u.IsAdmin, twofa[u.ID])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Flush()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
58
cmd/admin_user_must_change_password.go
Normal file
58
cmd/admin_user_must_change_password.go
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
|
||||||
|
"github.com/urfave/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
var microcmdUserMustChangePassword = cli.Command{
|
||||||
|
Name: "must-change-password",
|
||||||
|
Usage: "Set the must change password flag for the provided users or all users",
|
||||||
|
Action: runMustChangePassword,
|
||||||
|
Flags: []cli.Flag{
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "all,A",
|
||||||
|
Usage: "All users must change password, except those explicitly excluded with --exclude",
|
||||||
|
},
|
||||||
|
cli.StringSliceFlag{
|
||||||
|
Name: "exclude,e",
|
||||||
|
Usage: "Do not change the must-change-password flag for these users",
|
||||||
|
},
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "unset",
|
||||||
|
Usage: "Instead of setting the must-change-password flag, unset it",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func runMustChangePassword(c *cli.Context) error {
|
||||||
|
ctx, cancel := installSignals()
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
if c.NArg() == 0 && !c.IsSet("all") {
|
||||||
|
return errors.New("either usernames or --all must be provided")
|
||||||
|
}
|
||||||
|
|
||||||
|
mustChangePassword := !c.Bool("unset")
|
||||||
|
all := c.Bool("all")
|
||||||
|
exclude := c.StringSlice("exclude")
|
||||||
|
|
||||||
|
if err := initDB(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
n, err := user_model.SetMustChangePassword(ctx, all, mustChangePassword, c.Args(), exclude)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Updated %d users setting MustChangePassword to %t\n", n, mustChangePassword)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@@ -523,7 +523,21 @@ Certain queues have defaults that override the defaults set in `[queue]` (this o
|
|||||||
- `IMPORT_LOCAL_PATHS`: **false**: Set to `false` to prevent all users (including admin) from importing local path on server.
|
- `IMPORT_LOCAL_PATHS`: **false**: Set to `false` to prevent all users (including admin) from importing local path on server.
|
||||||
- `INTERNAL_TOKEN`: **\<random at every install if no uri set\>**: Secret used to validate communication within Gitea binary.
|
- `INTERNAL_TOKEN`: **\<random at every install if no uri set\>**: Secret used to validate communication within Gitea binary.
|
||||||
- `INTERNAL_TOKEN_URI`: **<empty>**: Instead of defining INTERNAL_TOKEN in the configuration, this configuration option can be used to give Gitea a path to a file that contains the internal token (example value: `file:/etc/gitea/internal_token`)
|
- `INTERNAL_TOKEN_URI`: **<empty>**: Instead of defining INTERNAL_TOKEN in the configuration, this configuration option can be used to give Gitea a path to a file that contains the internal token (example value: `file:/etc/gitea/internal_token`)
|
||||||
- `PASSWORD_HASH_ALGO`: **pbkdf2**: The hash algorithm to use \[argon2, pbkdf2, scrypt, bcrypt\], argon2 will spend more memory than others.
|
- `PASSWORD_HASH_ALGO`: **pbkdf2**: The hash algorithm to use \[argon2, pbkdf2, pbkdf2_v1, scrypt, bcrypt\], argon2 and scrypt will spend significant amounts of memory.
|
||||||
|
- Note: The default parameters for `pbkdf2` hashing have changed - the previous settings are available as `pbkdf2_v1` but are not recommended.
|
||||||
|
- The hash functions may be tuned by using `$` after the algorithm:
|
||||||
|
- `argon2$<time>$<memory>$<threads>$<key-length>`
|
||||||
|
- `bcrypt$<cost>`
|
||||||
|
- `pbkdf2$<iterations>$<key-length>`
|
||||||
|
- `scrypt$<n>$<r>$<p>$<key-length>`
|
||||||
|
- The defaults are:
|
||||||
|
- `argon2`: `argon2$2$65536$8$50`
|
||||||
|
- `bcrypt`: `bcrypt$10`
|
||||||
|
- `pbkdf2`: `pbkdf2$320000$50`
|
||||||
|
- `pbkdf2_v1`: `pbkdf2$10000$50`
|
||||||
|
- `pbkdf2_v2`: `pbkdf2$320000$50`
|
||||||
|
- `scrypt`: `scrypt$65536$16$2$50`
|
||||||
|
- Adjusting the algorithm parameters using this functionality is done at your own risk.
|
||||||
- `CSRF_COOKIE_HTTP_ONLY`: **true**: Set false to allow JavaScript to read CSRF cookie.
|
- `CSRF_COOKIE_HTTP_ONLY`: **true**: Set false to allow JavaScript to read CSRF cookie.
|
||||||
- `MIN_PASSWORD_LENGTH`: **6**: Minimum password length for new users.
|
- `MIN_PASSWORD_LENGTH`: **6**: Minimum password length for new users.
|
||||||
- `PASSWORD_COMPLEXITY`: **off**: Comma separated list of character classes required to pass minimum complexity. If left empty or no valid values are specified, checking is disabled (off):
|
- `PASSWORD_COMPLEXITY`: **off**: Comma separated list of character classes required to pass minimum complexity. If left empty or no valid values are specified, checking is disabled (off):
|
||||||
|
|||||||
@@ -77,6 +77,8 @@ For example:
|
|||||||
pip install --index-url https://testuser:password123@gitea.example.com/api/packages/testuser/pypi/simple --no-deps test_package
|
pip install --index-url https://testuser:password123@gitea.example.com/api/packages/testuser/pypi/simple --no-deps test_package
|
||||||
```
|
```
|
||||||
|
|
||||||
|
You can use `--extra-index-url` instead of `--index-url` but that makes you vulnerable to dependency confusion attacks because `pip` checks the official PyPi repository for the package before it checks the specified custom repository. Read the `pip` docs for more information.
|
||||||
|
|
||||||
## Supported commands
|
## Supported commands
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -99,6 +99,13 @@ Admin operations:
|
|||||||
- `--password value`, `-p value`: New password. Required.
|
- `--password value`, `-p value`: New password. Required.
|
||||||
- Examples:
|
- Examples:
|
||||||
- `gitea admin user change-password --username myname --password asecurepassword`
|
- `gitea admin user change-password --username myname --password asecurepassword`
|
||||||
|
- `must-change-password`:
|
||||||
|
- Args:
|
||||||
|
- `[username...]`: Users that must change their passwords
|
||||||
|
- Options:
|
||||||
|
- `--all`, `-A`: Force a password change for all users
|
||||||
|
- `--exclude username`, `-e username`: Exclude the given user. Can be set multiple times.
|
||||||
|
- `--unset`: Revoke forced password change for the given users
|
||||||
- `regenerate`
|
- `regenerate`
|
||||||
- Options:
|
- Options:
|
||||||
- `hooks`: Regenerate Git Hooks for all repositories
|
- `hooks`: Regenerate Git Hooks for all repositories
|
||||||
|
|||||||
3
go.mod
3
go.mod
@@ -75,6 +75,8 @@ require (
|
|||||||
github.com/niklasfasching/go-org v1.6.5
|
github.com/niklasfasching/go-org v1.6.5
|
||||||
github.com/oliamb/cutter v0.2.2
|
github.com/oliamb/cutter v0.2.2
|
||||||
github.com/olivere/elastic/v7 v7.0.32
|
github.com/olivere/elastic/v7 v7.0.32
|
||||||
|
github.com/opencontainers/go-digest v1.0.0
|
||||||
|
github.com/opencontainers/image-spec v1.1.0-rc2
|
||||||
github.com/pkg/errors v0.9.1
|
github.com/pkg/errors v0.9.1
|
||||||
github.com/pquerna/otp v1.3.0
|
github.com/pquerna/otp v1.3.0
|
||||||
github.com/prometheus/client_golang v1.13.0
|
github.com/prometheus/client_golang v1.13.0
|
||||||
@@ -285,6 +287,7 @@ require (
|
|||||||
go.uber.org/multierr v1.8.0 // indirect
|
go.uber.org/multierr v1.8.0 // indirect
|
||||||
go.uber.org/zap v1.23.0 // indirect
|
go.uber.org/zap v1.23.0 // indirect
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
|
||||||
|
golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde // indirect
|
||||||
golang.org/x/time v0.0.0-20220922220347-f3bd1da661af // indirect
|
golang.org/x/time v0.0.0-20220922220347-f3bd1da661af // indirect
|
||||||
google.golang.org/appengine v1.6.7 // indirect
|
google.golang.org/appengine v1.6.7 // indirect
|
||||||
google.golang.org/genproto v0.0.0-20220616135557-88e70c0c3a90 // indirect
|
google.golang.org/genproto v0.0.0-20220616135557-88e70c0c3a90 // indirect
|
||||||
|
|||||||
7
go.sum
7
go.sum
@@ -1174,6 +1174,10 @@ github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1y
|
|||||||
github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc=
|
github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc=
|
||||||
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
|
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
|
||||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
|
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||||
|
github.com/opencontainers/image-spec v1.1.0-rc2 h1:2zx/Stx4Wc5pIPDvIxHXvXtQFW/7XWJGmnM7r3wg034=
|
||||||
|
github.com/opencontainers/image-spec v1.1.0-rc2/go.mod h1:3OVijpioIKYWTqjiG0zfF6wvoJ4fAXGbjdZuI2NgsRQ=
|
||||||
github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis=
|
github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis=
|
||||||
github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74=
|
github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74=
|
||||||
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||||
@@ -1759,7 +1763,8 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
|
|||||||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 h1:uVc8UZUe6tr40fFVnUP5Oj+veunVezqYl9z7DYw9xzw=
|
golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde h1:ejfdSekXMDxDLbRrJMwUk6KnSLZ2McaUCVcIKM+N6jc=
|
||||||
|
golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
|||||||
@@ -157,7 +157,7 @@ func CreateRepoTransferNotification(doer, newOwner *user_model.User, repo *repo_
|
|||||||
}
|
}
|
||||||
for i := range users {
|
for i := range users {
|
||||||
notify = append(notify, &Notification{
|
notify = append(notify, &Notification{
|
||||||
UserID: users[i].ID,
|
UserID: i,
|
||||||
RepoID: repo.ID,
|
RepoID: repo.ID,
|
||||||
Status: NotificationStatusUnread,
|
Status: NotificationStatusUnread,
|
||||||
UpdatedBy: doer.ID,
|
UpdatedBy: doer.ID,
|
||||||
|
|||||||
@@ -24,8 +24,10 @@ type contextKey struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// enginedContextKey is a context key. It is used with context.Value() to get the current Engined for the context
|
// enginedContextKey is a context key. It is used with context.Value() to get the current Engined for the context
|
||||||
var enginedContextKey = &contextKey{"engined"}
|
var (
|
||||||
var _ Engined = &Context{}
|
enginedContextKey = &contextKey{"engined"}
|
||||||
|
_ Engined = &Context{}
|
||||||
|
)
|
||||||
|
|
||||||
// Context represents a db context
|
// Context represents a db context
|
||||||
type Context struct {
|
type Context struct {
|
||||||
|
|||||||
@@ -5,8 +5,11 @@
|
|||||||
package db
|
package db
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
|
||||||
|
"xorm.io/builder"
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -19,6 +22,7 @@ const (
|
|||||||
type Paginator interface {
|
type Paginator interface {
|
||||||
GetSkipTake() (skip, take int)
|
GetSkipTake() (skip, take int)
|
||||||
GetStartEnd() (start, end int)
|
GetStartEnd() (start, end int)
|
||||||
|
IsListAll() bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetPaginatedSession creates a paginated database session
|
// GetPaginatedSession creates a paginated database session
|
||||||
@@ -46,8 +50,11 @@ func SetEnginePagination(e Engine, p Paginator) Engine {
|
|||||||
type ListOptions struct {
|
type ListOptions struct {
|
||||||
PageSize int
|
PageSize int
|
||||||
Page int // start from 1
|
Page int // start from 1
|
||||||
|
ListAll bool // if true, then PageSize and Page will not be taken
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var _ Paginator = &ListOptions{}
|
||||||
|
|
||||||
// GetSkipTake returns the skip and take values
|
// GetSkipTake returns the skip and take values
|
||||||
func (opts *ListOptions) GetSkipTake() (skip, take int) {
|
func (opts *ListOptions) GetSkipTake() (skip, take int) {
|
||||||
opts.SetDefaultValues()
|
opts.SetDefaultValues()
|
||||||
@@ -61,6 +68,11 @@ func (opts *ListOptions) GetStartEnd() (start, end int) {
|
|||||||
return start, end
|
return start, end
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsListAll indicates PageSize and Page will be ignored
|
||||||
|
func (opts *ListOptions) IsListAll() bool {
|
||||||
|
return opts.ListAll
|
||||||
|
}
|
||||||
|
|
||||||
// SetDefaultValues sets default values
|
// SetDefaultValues sets default values
|
||||||
func (opts *ListOptions) SetDefaultValues() {
|
func (opts *ListOptions) SetDefaultValues() {
|
||||||
if opts.PageSize <= 0 {
|
if opts.PageSize <= 0 {
|
||||||
@@ -80,6 +92,8 @@ type AbsoluteListOptions struct {
|
|||||||
take int
|
take int
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var _ Paginator = &AbsoluteListOptions{}
|
||||||
|
|
||||||
// NewAbsoluteListOptions creates a list option with applied limits
|
// NewAbsoluteListOptions creates a list option with applied limits
|
||||||
func NewAbsoluteListOptions(skip, take int) *AbsoluteListOptions {
|
func NewAbsoluteListOptions(skip, take int) *AbsoluteListOptions {
|
||||||
if skip < 0 {
|
if skip < 0 {
|
||||||
@@ -94,6 +108,11 @@ func NewAbsoluteListOptions(skip, take int) *AbsoluteListOptions {
|
|||||||
return &AbsoluteListOptions{skip, take}
|
return &AbsoluteListOptions{skip, take}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsListAll will always return false
|
||||||
|
func (opts *AbsoluteListOptions) IsListAll() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// GetSkipTake returns the skip and take values
|
// GetSkipTake returns the skip and take values
|
||||||
func (opts *AbsoluteListOptions) GetSkipTake() (skip, take int) {
|
func (opts *AbsoluteListOptions) GetSkipTake() (skip, take int) {
|
||||||
return opts.skip, opts.take
|
return opts.skip, opts.take
|
||||||
@@ -103,3 +122,32 @@ func (opts *AbsoluteListOptions) GetSkipTake() (skip, take int) {
|
|||||||
func (opts *AbsoluteListOptions) GetStartEnd() (start, end int) {
|
func (opts *AbsoluteListOptions) GetStartEnd() (start, end int) {
|
||||||
return opts.skip, opts.skip + opts.take
|
return opts.skip, opts.skip + opts.take
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FindOptions represents a find options
|
||||||
|
type FindOptions interface {
|
||||||
|
Paginator
|
||||||
|
ToConds() builder.Cond
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find represents a common find function which accept an options interface
|
||||||
|
func Find[T any](ctx context.Context, opts FindOptions, objects *[]T) error {
|
||||||
|
sess := GetEngine(ctx).Where(opts.ToConds())
|
||||||
|
if !opts.IsListAll() {
|
||||||
|
sess.Limit(opts.GetSkipTake())
|
||||||
|
}
|
||||||
|
return sess.Find(&objects)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count represents a common count function which accept an options interface
|
||||||
|
func Count[T any](ctx context.Context, opts FindOptions, object T) (int64, error) {
|
||||||
|
return GetEngine(ctx).Where(opts.ToConds()).Count(object)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindAndCount represents a common findandcount function which accept an options interface
|
||||||
|
func FindAndCount[T any](ctx context.Context, opts FindOptions, objects *[]T) (int64, error) {
|
||||||
|
sess := GetEngine(ctx).Where(opts.ToConds())
|
||||||
|
if !opts.IsListAll() {
|
||||||
|
sess.Limit(opts.GetSkipTake())
|
||||||
|
}
|
||||||
|
return sess.FindAndCount(&objects)
|
||||||
|
}
|
||||||
|
|||||||
@@ -544,3 +544,16 @@
|
|||||||
repo_id: 51
|
repo_id: 51
|
||||||
type: 2
|
type: 2
|
||||||
created_unix: 946684810
|
created_unix: 946684810
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 80
|
||||||
|
repo_id: 31
|
||||||
|
type: 1
|
||||||
|
created_unix: 946684810
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 81
|
||||||
|
repo_id: 31
|
||||||
|
type: 3
|
||||||
|
config: "{\"IgnoreWhitespaceConflicts\":false,\"AllowMerge\":true,\"AllowRebase\":true,\"AllowRebaseMerge\":true,\"AllowSquash\":true}"
|
||||||
|
created_unix: 946684810
|
||||||
|
|||||||
@@ -140,3 +140,14 @@
|
|||||||
num_members: 1
|
num_members: 1
|
||||||
includes_all_repositories: false
|
includes_all_repositories: false
|
||||||
can_create_org_repo: false
|
can_create_org_repo: false
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 14
|
||||||
|
org_id: 3
|
||||||
|
lower_name: teamcreaterepo
|
||||||
|
name: teamCreateRepo
|
||||||
|
authorize: 2 # write
|
||||||
|
num_repos: 0
|
||||||
|
num_members: 1
|
||||||
|
includes_all_repositories: false
|
||||||
|
can_create_org_repo: true
|
||||||
|
|||||||
@@ -93,3 +93,9 @@
|
|||||||
org_id: 19
|
org_id: 19
|
||||||
team_id: 6
|
team_id: 6
|
||||||
uid: 31
|
uid: 31
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 17
|
||||||
|
org_id: 3
|
||||||
|
team_id: 14
|
||||||
|
uid: 2
|
||||||
|
|||||||
@@ -8,8 +8,8 @@
|
|||||||
email: user1@example.com
|
email: user1@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user1
|
login_name: user1
|
||||||
@@ -45,8 +45,8 @@
|
|||||||
email: user2@example.com
|
email: user2@example.com
|
||||||
keep_email_private: true
|
keep_email_private: true
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user2
|
login_name: user2
|
||||||
@@ -82,8 +82,8 @@
|
|||||||
email: user3@example.com
|
email: user3@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: onmention
|
email_notifications_preference: onmention
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user3
|
login_name: user3
|
||||||
@@ -104,7 +104,7 @@
|
|||||||
num_following: 0
|
num_following: 0
|
||||||
num_stars: 0
|
num_stars: 0
|
||||||
num_repos: 3
|
num_repos: 3
|
||||||
num_teams: 4
|
num_teams: 5
|
||||||
num_members: 3
|
num_members: 3
|
||||||
visibility: 0
|
visibility: 0
|
||||||
repo_admin_change_team_access: false
|
repo_admin_change_team_access: false
|
||||||
@@ -119,8 +119,8 @@
|
|||||||
email: user4@example.com
|
email: user4@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: onmention
|
email_notifications_preference: onmention
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user4
|
login_name: user4
|
||||||
@@ -156,8 +156,8 @@
|
|||||||
email: user5@example.com
|
email: user5@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user5
|
login_name: user5
|
||||||
@@ -193,8 +193,8 @@
|
|||||||
email: user6@example.com
|
email: user6@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user6
|
login_name: user6
|
||||||
@@ -230,8 +230,8 @@
|
|||||||
email: user7@example.com
|
email: user7@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: disabled
|
email_notifications_preference: disabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user7
|
login_name: user7
|
||||||
@@ -267,8 +267,8 @@
|
|||||||
email: user8@example.com
|
email: user8@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user8
|
login_name: user8
|
||||||
@@ -304,8 +304,8 @@
|
|||||||
email: user9@example.com
|
email: user9@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: onmention
|
email_notifications_preference: onmention
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user9
|
login_name: user9
|
||||||
@@ -341,8 +341,8 @@
|
|||||||
email: user10@example.com
|
email: user10@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user10
|
login_name: user10
|
||||||
@@ -378,8 +378,8 @@
|
|||||||
email: user11@example.com
|
email: user11@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user11
|
login_name: user11
|
||||||
@@ -415,8 +415,8 @@
|
|||||||
email: user12@example.com
|
email: user12@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user12
|
login_name: user12
|
||||||
@@ -452,8 +452,8 @@
|
|||||||
email: user13@example.com
|
email: user13@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user13
|
login_name: user13
|
||||||
@@ -489,8 +489,8 @@
|
|||||||
email: user14@example.com
|
email: user14@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user14
|
login_name: user14
|
||||||
@@ -526,8 +526,8 @@
|
|||||||
email: user15@example.com
|
email: user15@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user15
|
login_name: user15
|
||||||
@@ -563,8 +563,8 @@
|
|||||||
email: user16@example.com
|
email: user16@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user16
|
login_name: user16
|
||||||
@@ -600,8 +600,8 @@
|
|||||||
email: user17@example.com
|
email: user17@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user17
|
login_name: user17
|
||||||
@@ -637,8 +637,8 @@
|
|||||||
email: user18@example.com
|
email: user18@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user18
|
login_name: user18
|
||||||
@@ -674,8 +674,8 @@
|
|||||||
email: user19@example.com
|
email: user19@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user19
|
login_name: user19
|
||||||
@@ -711,8 +711,8 @@
|
|||||||
email: user20@example.com
|
email: user20@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user20
|
login_name: user20
|
||||||
@@ -748,8 +748,8 @@
|
|||||||
email: user21@example.com
|
email: user21@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user21
|
login_name: user21
|
||||||
@@ -785,8 +785,8 @@
|
|||||||
email: limited_org@example.com
|
email: limited_org@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: limited_org
|
login_name: limited_org
|
||||||
@@ -822,8 +822,8 @@
|
|||||||
email: privated_org@example.com
|
email: privated_org@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: privated_org
|
login_name: privated_org
|
||||||
@@ -859,8 +859,8 @@
|
|||||||
email: user24@example.com
|
email: user24@example.com
|
||||||
keep_email_private: true
|
keep_email_private: true
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user24
|
login_name: user24
|
||||||
@@ -896,8 +896,8 @@
|
|||||||
email: org25@example.com
|
email: org25@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: org25
|
login_name: org25
|
||||||
@@ -933,8 +933,8 @@
|
|||||||
email: org26@example.com
|
email: org26@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: onmention
|
email_notifications_preference: onmention
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: org26
|
login_name: org26
|
||||||
@@ -970,8 +970,8 @@
|
|||||||
email: user27@example.com
|
email: user27@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user27
|
login_name: user27
|
||||||
@@ -1007,8 +1007,8 @@
|
|||||||
email: user28@example.com
|
email: user28@example.com
|
||||||
keep_email_private: true
|
keep_email_private: true
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user28
|
login_name: user28
|
||||||
@@ -1044,8 +1044,8 @@
|
|||||||
email: user29@example.com
|
email: user29@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user29
|
login_name: user29
|
||||||
@@ -1081,8 +1081,8 @@
|
|||||||
email: user30@example.com
|
email: user30@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user30
|
login_name: user30
|
||||||
@@ -1118,8 +1118,8 @@
|
|||||||
email: user31@example.com
|
email: user31@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user31
|
login_name: user31
|
||||||
@@ -1155,7 +1155,7 @@
|
|||||||
email: user32@example.com
|
email: user32@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: 7d93daa0d1e6f2305cc8fa496847d61dc7320bb16262f9c55dd753480207234cdd96a93194e408341971742f4701772a025a
|
passwd: 7d93daa0d1e6f2305cc8fa496847d61dc7320bb16262f9c55dd753480207234cdd96a93194e408341971742f47017
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: argon2
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
@@ -1192,8 +1192,8 @@
|
|||||||
email: user33@example.com
|
email: user33@example.com
|
||||||
keep_email_private: false
|
keep_email_private: false
|
||||||
email_notifications_preference: enabled
|
email_notifications_preference: enabled
|
||||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b
|
passwd: e82bc8ae42a53b98c3bd0f941aacc4aa2a264407534b0a11bf270137f67af912f694b67951f92148c45f91717e1478ca7889
|
||||||
passwd_hash_algo: argon2
|
passwd_hash_algo: pbkdf2$50000$50
|
||||||
must_change_password: false
|
must_change_password: false
|
||||||
login_source: 0
|
login_source: 0
|
||||||
login_name: user33
|
login_name: user33
|
||||||
|
|||||||
@@ -9,9 +9,7 @@ package issues
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"regexp"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
@@ -23,8 +21,6 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/json"
|
"code.gitea.io/gitea/modules/json"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/markup"
|
|
||||||
"code.gitea.io/gitea/modules/markup/markdown"
|
|
||||||
"code.gitea.io/gitea/modules/references"
|
"code.gitea.io/gitea/modules/references"
|
||||||
"code.gitea.io/gitea/modules/structs"
|
"code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
@@ -697,31 +693,6 @@ func (c *Comment) LoadReview() error {
|
|||||||
return c.loadReview(db.DefaultContext)
|
return c.loadReview(db.DefaultContext)
|
||||||
}
|
}
|
||||||
|
|
||||||
var notEnoughLines = regexp.MustCompile(`fatal: file .* has only \d+ lines?`)
|
|
||||||
|
|
||||||
func (c *Comment) checkInvalidation(doer *user_model.User, repo *git.Repository, branch string) error {
|
|
||||||
// FIXME differentiate between previous and proposed line
|
|
||||||
commit, err := repo.LineBlame(branch, repo.Path, c.TreePath, uint(c.UnsignedLine()))
|
|
||||||
if err != nil && (strings.Contains(err.Error(), "fatal: no such path") || notEnoughLines.MatchString(err.Error())) {
|
|
||||||
c.Invalidated = true
|
|
||||||
return UpdateComment(c, doer)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if c.CommitSHA != "" && c.CommitSHA != commit.ID.String() {
|
|
||||||
c.Invalidated = true
|
|
||||||
return UpdateComment(c, doer)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// CheckInvalidation checks if the line of code comment got changed by another commit.
|
|
||||||
// If the line got changed the comment is going to be invalidated.
|
|
||||||
func (c *Comment) CheckInvalidation(repo *git.Repository, doer *user_model.User, branch string) error {
|
|
||||||
return c.checkInvalidation(doer, repo, branch)
|
|
||||||
}
|
|
||||||
|
|
||||||
// DiffSide returns "previous" if Comment.Line is a LOC of the previous changes and "proposed" if it is a LOC of the proposed changes.
|
// DiffSide returns "previous" if Comment.Line is a LOC of the previous changes and "proposed" if it is a LOC of the proposed changes.
|
||||||
func (c *Comment) DiffSide() string {
|
func (c *Comment) DiffSide() string {
|
||||||
if c.Line < 0 {
|
if c.Line < 0 {
|
||||||
@@ -1073,15 +1044,20 @@ type FindCommentsOptions struct {
|
|||||||
Line int64
|
Line int64
|
||||||
TreePath string
|
TreePath string
|
||||||
Type CommentType
|
Type CommentType
|
||||||
|
IssueIDs []int64
|
||||||
|
Invalidated util.OptionalBool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (opts *FindCommentsOptions) toConds() builder.Cond {
|
// ToConds implements FindOptions interface
|
||||||
|
func (opts *FindCommentsOptions) ToConds() builder.Cond {
|
||||||
cond := builder.NewCond()
|
cond := builder.NewCond()
|
||||||
if opts.RepoID > 0 {
|
if opts.RepoID > 0 {
|
||||||
cond = cond.And(builder.Eq{"issue.repo_id": opts.RepoID})
|
cond = cond.And(builder.Eq{"issue.repo_id": opts.RepoID})
|
||||||
}
|
}
|
||||||
if opts.IssueID > 0 {
|
if opts.IssueID > 0 {
|
||||||
cond = cond.And(builder.Eq{"comment.issue_id": opts.IssueID})
|
cond = cond.And(builder.Eq{"comment.issue_id": opts.IssueID})
|
||||||
|
} else if len(opts.IssueIDs) > 0 {
|
||||||
|
cond = cond.And(builder.In("comment.issue_id", opts.IssueIDs))
|
||||||
}
|
}
|
||||||
if opts.ReviewID > 0 {
|
if opts.ReviewID > 0 {
|
||||||
cond = cond.And(builder.Eq{"comment.review_id": opts.ReviewID})
|
cond = cond.And(builder.Eq{"comment.review_id": opts.ReviewID})
|
||||||
@@ -1101,13 +1077,16 @@ func (opts *FindCommentsOptions) toConds() builder.Cond {
|
|||||||
if len(opts.TreePath) > 0 {
|
if len(opts.TreePath) > 0 {
|
||||||
cond = cond.And(builder.Eq{"comment.tree_path": opts.TreePath})
|
cond = cond.And(builder.Eq{"comment.tree_path": opts.TreePath})
|
||||||
}
|
}
|
||||||
|
if !opts.Invalidated.IsNone() {
|
||||||
|
cond = cond.And(builder.Eq{"comment.invalidated": opts.Invalidated.IsTrue()})
|
||||||
|
}
|
||||||
return cond
|
return cond
|
||||||
}
|
}
|
||||||
|
|
||||||
// FindComments returns all comments according options
|
// FindComments returns all comments according options
|
||||||
func FindComments(ctx context.Context, opts *FindCommentsOptions) ([]*Comment, error) {
|
func FindComments(ctx context.Context, opts *FindCommentsOptions) ([]*Comment, error) {
|
||||||
comments := make([]*Comment, 0, 10)
|
comments := make([]*Comment, 0, 10)
|
||||||
sess := db.GetEngine(ctx).Where(opts.toConds())
|
sess := db.GetEngine(ctx).Where(opts.ToConds())
|
||||||
if opts.RepoID > 0 {
|
if opts.RepoID > 0 {
|
||||||
sess.Join("INNER", "issue", "issue.id = comment.issue_id")
|
sess.Join("INNER", "issue", "issue.id = comment.issue_id")
|
||||||
}
|
}
|
||||||
@@ -1126,13 +1105,19 @@ func FindComments(ctx context.Context, opts *FindCommentsOptions) ([]*Comment, e
|
|||||||
|
|
||||||
// CountComments count all comments according options by ignoring pagination
|
// CountComments count all comments according options by ignoring pagination
|
||||||
func CountComments(opts *FindCommentsOptions) (int64, error) {
|
func CountComments(opts *FindCommentsOptions) (int64, error) {
|
||||||
sess := db.GetEngine(db.DefaultContext).Where(opts.toConds())
|
sess := db.GetEngine(db.DefaultContext).Where(opts.ToConds())
|
||||||
if opts.RepoID > 0 {
|
if opts.RepoID > 0 {
|
||||||
sess.Join("INNER", "issue", "issue.id = comment.issue_id")
|
sess.Join("INNER", "issue", "issue.id = comment.issue_id")
|
||||||
}
|
}
|
||||||
return sess.Count(&Comment{})
|
return sess.Count(&Comment{})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdateCommentInvalidate updates comment invalidated column
|
||||||
|
func UpdateCommentInvalidate(ctx context.Context, c *Comment) error {
|
||||||
|
_, err := db.GetEngine(ctx).ID(c.ID).Cols("invalidated").Update(c)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
// UpdateComment updates information of comment.
|
// UpdateComment updates information of comment.
|
||||||
func UpdateComment(c *Comment, doer *user_model.User) error {
|
func UpdateComment(c *Comment, doer *user_model.User) error {
|
||||||
ctx, committer, err := db.TxContext()
|
ctx, committer, err := db.TxContext()
|
||||||
@@ -1191,120 +1176,6 @@ func DeleteComment(ctx context.Context, comment *Comment) error {
|
|||||||
return DeleteReaction(ctx, &ReactionOptions{CommentID: comment.ID})
|
return DeleteReaction(ctx, &ReactionOptions{CommentID: comment.ID})
|
||||||
}
|
}
|
||||||
|
|
||||||
// CodeComments represents comments on code by using this structure: FILENAME -> LINE (+ == proposed; - == previous) -> COMMENTS
|
|
||||||
type CodeComments map[string]map[int64][]*Comment
|
|
||||||
|
|
||||||
// FetchCodeComments will return a 2d-map: ["Path"]["Line"] = Comments at line
|
|
||||||
func FetchCodeComments(ctx context.Context, issue *Issue, currentUser *user_model.User) (CodeComments, error) {
|
|
||||||
return fetchCodeCommentsByReview(ctx, issue, currentUser, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
func fetchCodeCommentsByReview(ctx context.Context, issue *Issue, currentUser *user_model.User, review *Review) (CodeComments, error) {
|
|
||||||
pathToLineToComment := make(CodeComments)
|
|
||||||
if review == nil {
|
|
||||||
review = &Review{ID: 0}
|
|
||||||
}
|
|
||||||
opts := FindCommentsOptions{
|
|
||||||
Type: CommentTypeCode,
|
|
||||||
IssueID: issue.ID,
|
|
||||||
ReviewID: review.ID,
|
|
||||||
}
|
|
||||||
|
|
||||||
comments, err := findCodeComments(ctx, opts, issue, currentUser, review)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, comment := range comments {
|
|
||||||
if pathToLineToComment[comment.TreePath] == nil {
|
|
||||||
pathToLineToComment[comment.TreePath] = make(map[int64][]*Comment)
|
|
||||||
}
|
|
||||||
pathToLineToComment[comment.TreePath][comment.Line] = append(pathToLineToComment[comment.TreePath][comment.Line], comment)
|
|
||||||
}
|
|
||||||
return pathToLineToComment, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issue, currentUser *user_model.User, review *Review) ([]*Comment, error) {
|
|
||||||
var comments []*Comment
|
|
||||||
if review == nil {
|
|
||||||
review = &Review{ID: 0}
|
|
||||||
}
|
|
||||||
conds := opts.toConds()
|
|
||||||
if review.ID == 0 {
|
|
||||||
conds = conds.And(builder.Eq{"invalidated": false})
|
|
||||||
}
|
|
||||||
e := db.GetEngine(ctx)
|
|
||||||
if err := e.Where(conds).
|
|
||||||
Asc("comment.created_unix").
|
|
||||||
Asc("comment.id").
|
|
||||||
Find(&comments); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := issue.LoadRepo(ctx); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := CommentList(comments).loadPosters(ctx); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find all reviews by ReviewID
|
|
||||||
reviews := make(map[int64]*Review)
|
|
||||||
ids := make([]int64, 0, len(comments))
|
|
||||||
for _, comment := range comments {
|
|
||||||
if comment.ReviewID != 0 {
|
|
||||||
ids = append(ids, comment.ReviewID)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if err := e.In("id", ids).Find(&reviews); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
n := 0
|
|
||||||
for _, comment := range comments {
|
|
||||||
if re, ok := reviews[comment.ReviewID]; ok && re != nil {
|
|
||||||
// If the review is pending only the author can see the comments (except if the review is set)
|
|
||||||
if review.ID == 0 && re.Type == ReviewTypePending &&
|
|
||||||
(currentUser == nil || currentUser.ID != re.ReviewerID) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
comment.Review = re
|
|
||||||
}
|
|
||||||
comments[n] = comment
|
|
||||||
n++
|
|
||||||
|
|
||||||
if err := comment.LoadResolveDoer(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := comment.LoadReactions(issue.Repo); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var err error
|
|
||||||
if comment.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
|
|
||||||
Ctx: ctx,
|
|
||||||
URLPrefix: issue.Repo.Link(),
|
|
||||||
Metas: issue.Repo.ComposeMetas(),
|
|
||||||
}, comment.Content); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return comments[:n], nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// FetchCodeCommentsByLine fetches the code comments for a given treePath and line number
|
|
||||||
func FetchCodeCommentsByLine(ctx context.Context, issue *Issue, currentUser *user_model.User, treePath string, line int64) ([]*Comment, error) {
|
|
||||||
opts := FindCommentsOptions{
|
|
||||||
Type: CommentTypeCode,
|
|
||||||
IssueID: issue.ID,
|
|
||||||
TreePath: treePath,
|
|
||||||
Line: line,
|
|
||||||
}
|
|
||||||
return findCodeComments(ctx, opts, issue, currentUser, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
// UpdateCommentsMigrationsByType updates comments' migrations information via given git service type and original id and poster id
|
// UpdateCommentsMigrationsByType updates comments' migrations information via given git service type and original id and poster id
|
||||||
func UpdateCommentsMigrationsByType(tp structs.GitServiceType, originalAuthorID string, posterID int64) error {
|
func UpdateCommentsMigrationsByType(tp structs.GitServiceType, originalAuthorID string, posterID int64) error {
|
||||||
_, err := db.GetEngine(db.DefaultContext).Table("comment").
|
_, err := db.GetEngine(db.DefaultContext).Table("comment").
|
||||||
@@ -1549,3 +1420,8 @@ func FixCommentTypeLabelWithOutsideLabels() (int64, error) {
|
|||||||
|
|
||||||
return res.RowsAffected()
|
return res.RowsAffected()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HasOriginalAuthor returns if a comment was migrated and has an original author.
|
||||||
|
func (c *Comment) HasOriginalAuthor() bool {
|
||||||
|
return c.OriginalAuthor != "" && c.OriginalAuthorID != 0
|
||||||
|
}
|
||||||
|
|||||||
129
models/issues/comment_code.go
Normal file
129
models/issues/comment_code.go
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
// Copyright 2022 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package issues
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/markup"
|
||||||
|
"code.gitea.io/gitea/modules/markup/markdown"
|
||||||
|
|
||||||
|
"xorm.io/builder"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CodeComments represents comments on code by using this structure: FILENAME -> LINE (+ == proposed; - == previous) -> COMMENTS
|
||||||
|
type CodeComments map[string]map[int64][]*Comment
|
||||||
|
|
||||||
|
// FetchCodeComments will return a 2d-map: ["Path"]["Line"] = Comments at line
|
||||||
|
func FetchCodeComments(ctx context.Context, issue *Issue, currentUser *user_model.User) (CodeComments, error) {
|
||||||
|
return fetchCodeCommentsByReview(ctx, issue, currentUser, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func fetchCodeCommentsByReview(ctx context.Context, issue *Issue, currentUser *user_model.User, review *Review) (CodeComments, error) {
|
||||||
|
pathToLineToComment := make(CodeComments)
|
||||||
|
if review == nil {
|
||||||
|
review = &Review{ID: 0}
|
||||||
|
}
|
||||||
|
opts := FindCommentsOptions{
|
||||||
|
Type: CommentTypeCode,
|
||||||
|
IssueID: issue.ID,
|
||||||
|
ReviewID: review.ID,
|
||||||
|
}
|
||||||
|
|
||||||
|
comments, err := findCodeComments(ctx, opts, issue, currentUser, review)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, comment := range comments {
|
||||||
|
if pathToLineToComment[comment.TreePath] == nil {
|
||||||
|
pathToLineToComment[comment.TreePath] = make(map[int64][]*Comment)
|
||||||
|
}
|
||||||
|
pathToLineToComment[comment.TreePath][comment.Line] = append(pathToLineToComment[comment.TreePath][comment.Line], comment)
|
||||||
|
}
|
||||||
|
return pathToLineToComment, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issue, currentUser *user_model.User, review *Review) ([]*Comment, error) {
|
||||||
|
var comments []*Comment
|
||||||
|
if review == nil {
|
||||||
|
review = &Review{ID: 0}
|
||||||
|
}
|
||||||
|
conds := opts.ToConds()
|
||||||
|
if review.ID == 0 {
|
||||||
|
conds = conds.And(builder.Eq{"invalidated": false})
|
||||||
|
}
|
||||||
|
e := db.GetEngine(ctx)
|
||||||
|
if err := e.Where(conds).
|
||||||
|
Asc("comment.created_unix").
|
||||||
|
Asc("comment.id").
|
||||||
|
Find(&comments); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := issue.LoadRepo(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := CommentList(comments).loadPosters(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find all reviews by ReviewID
|
||||||
|
reviews := make(map[int64]*Review)
|
||||||
|
ids := make([]int64, 0, len(comments))
|
||||||
|
for _, comment := range comments {
|
||||||
|
if comment.ReviewID != 0 {
|
||||||
|
ids = append(ids, comment.ReviewID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := e.In("id", ids).Find(&reviews); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
n := 0
|
||||||
|
for _, comment := range comments {
|
||||||
|
if re, ok := reviews[comment.ReviewID]; ok && re != nil {
|
||||||
|
// If the review is pending only the author can see the comments (except if the review is set)
|
||||||
|
if review.ID == 0 && re.Type == ReviewTypePending &&
|
||||||
|
(currentUser == nil || currentUser.ID != re.ReviewerID) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
comment.Review = re
|
||||||
|
}
|
||||||
|
comments[n] = comment
|
||||||
|
n++
|
||||||
|
|
||||||
|
if err := comment.LoadResolveDoer(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := comment.LoadReactions(issue.Repo); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
if comment.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
|
||||||
|
Ctx: ctx,
|
||||||
|
URLPrefix: issue.Repo.Link(),
|
||||||
|
Metas: issue.Repo.ComposeMetas(),
|
||||||
|
}, comment.Content); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return comments[:n], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FetchCodeCommentsByLine fetches the code comments for a given treePath and line number
|
||||||
|
func FetchCodeCommentsByLine(ctx context.Context, issue *Issue, currentUser *user_model.User, treePath string, line int64) ([]*Comment, error) {
|
||||||
|
opts := FindCommentsOptions{
|
||||||
|
Type: CommentTypeCode,
|
||||||
|
IssueID: issue.ID,
|
||||||
|
TreePath: treePath,
|
||||||
|
Line: line,
|
||||||
|
}
|
||||||
|
return findCodeComments(ctx, opts, issue, currentUser, nil)
|
||||||
|
}
|
||||||
@@ -2466,3 +2466,8 @@ func DeleteOrphanedIssues() error {
|
|||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HasOriginalAuthor returns if an issue was migrated and has an original author.
|
||||||
|
func (issue *Issue) HasOriginalAuthor() bool {
|
||||||
|
return issue.OriginalAuthor != "" && issue.OriginalAuthorID != 0
|
||||||
|
}
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
@@ -133,6 +134,27 @@ const (
|
|||||||
PullRequestStatusAncestor
|
PullRequestStatusAncestor
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func (status PullRequestStatus) String() string {
|
||||||
|
switch status {
|
||||||
|
case PullRequestStatusConflict:
|
||||||
|
return "CONFLICT"
|
||||||
|
case PullRequestStatusChecking:
|
||||||
|
return "CHECKING"
|
||||||
|
case PullRequestStatusMergeable:
|
||||||
|
return "MERGEABLE"
|
||||||
|
case PullRequestStatusManuallyMerged:
|
||||||
|
return "MANUALLY_MERGED"
|
||||||
|
case PullRequestStatusError:
|
||||||
|
return "ERROR"
|
||||||
|
case PullRequestStatusEmpty:
|
||||||
|
return "EMPTY"
|
||||||
|
case PullRequestStatusAncestor:
|
||||||
|
return "ANCESTOR"
|
||||||
|
default:
|
||||||
|
return strconv.Itoa(int(status))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// PullRequestFlow the flow of pull request
|
// PullRequestFlow the flow of pull request
|
||||||
type PullRequestFlow int
|
type PullRequestFlow int
|
||||||
|
|
||||||
@@ -204,6 +226,42 @@ func DeletePullsByBaseRepoID(ctx context.Context, repoID int64) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ColorFormat writes a colored string to identify this struct
|
||||||
|
func (pr *PullRequest) ColorFormat(s fmt.State) {
|
||||||
|
if pr == nil {
|
||||||
|
log.ColorFprintf(s, "PR[%d]%s#%d[%s...%s:%s]",
|
||||||
|
log.NewColoredIDValue(0),
|
||||||
|
log.NewColoredValue("<nil>/<nil>"),
|
||||||
|
log.NewColoredIDValue(0),
|
||||||
|
log.NewColoredValue("<nil>"),
|
||||||
|
log.NewColoredValue("<nil>/<nil>"),
|
||||||
|
log.NewColoredValue("<nil>"),
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
log.ColorFprintf(s, "PR[%d]", log.NewColoredIDValue(pr.ID))
|
||||||
|
if pr.BaseRepo != nil {
|
||||||
|
log.ColorFprintf(s, "%s#%d[%s...", log.NewColoredValue(pr.BaseRepo.FullName()),
|
||||||
|
log.NewColoredIDValue(pr.Index), log.NewColoredValue(pr.BaseBranch))
|
||||||
|
} else {
|
||||||
|
log.ColorFprintf(s, "Repo[%d]#%d[%s...", log.NewColoredIDValue(pr.BaseRepoID),
|
||||||
|
log.NewColoredIDValue(pr.Index), log.NewColoredValue(pr.BaseBranch))
|
||||||
|
}
|
||||||
|
if pr.HeadRepoID == pr.BaseRepoID {
|
||||||
|
log.ColorFprintf(s, "%s]", log.NewColoredValue(pr.HeadBranch))
|
||||||
|
} else if pr.HeadRepo != nil {
|
||||||
|
log.ColorFprintf(s, "%s:%s]", log.NewColoredValue(pr.HeadRepo.FullName()), log.NewColoredValue(pr.HeadBranch))
|
||||||
|
} else {
|
||||||
|
log.ColorFprintf(s, "Repo[%d]:%s]", log.NewColoredIDValue(pr.HeadRepoID), log.NewColoredValue(pr.HeadBranch))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// String represents the pr as a simple string
|
||||||
|
func (pr *PullRequest) String() string {
|
||||||
|
return log.ColorFormatAsString(pr)
|
||||||
|
}
|
||||||
|
|
||||||
// MustHeadUserName returns the HeadRepo's username if failed return blank
|
// MustHeadUserName returns the HeadRepo's username if failed return blank
|
||||||
func (pr *PullRequest) MustHeadUserName() string {
|
func (pr *PullRequest) MustHeadUserName() string {
|
||||||
if err := pr.LoadHeadRepo(); err != nil {
|
if err := pr.LoadHeadRepo(); err != nil {
|
||||||
@@ -255,7 +313,7 @@ func (pr *PullRequest) LoadHeadRepoCtx(ctx context.Context) (err error) {
|
|||||||
|
|
||||||
pr.HeadRepo, err = repo_model.GetRepositoryByIDCtx(ctx, pr.HeadRepoID)
|
pr.HeadRepo, err = repo_model.GetRepositoryByIDCtx(ctx, pr.HeadRepoID)
|
||||||
if err != nil && !repo_model.IsErrRepoNotExist(err) { // Head repo maybe deleted, but it should still work
|
if err != nil && !repo_model.IsErrRepoNotExist(err) { // Head repo maybe deleted, but it should still work
|
||||||
return fmt.Errorf("getRepositoryByID(head): %w", err)
|
return fmt.Errorf("pr[%d].LoadHeadRepo[%d]: %w", pr.ID, pr.HeadRepoID, err)
|
||||||
}
|
}
|
||||||
pr.isHeadRepoLoaded = true
|
pr.isHeadRepoLoaded = true
|
||||||
}
|
}
|
||||||
@@ -290,7 +348,7 @@ func (pr *PullRequest) LoadBaseRepoCtx(ctx context.Context) (err error) {
|
|||||||
|
|
||||||
pr.BaseRepo, err = repo_model.GetRepositoryByIDCtx(ctx, pr.BaseRepoID)
|
pr.BaseRepo, err = repo_model.GetRepositoryByIDCtx(ctx, pr.BaseRepoID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("repo_model.GetRepositoryByID(base): %w", err)
|
return fmt.Errorf("pr[%d].LoadBaseRepo[%d]: %w", pr.ID, pr.BaseRepoID, err)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ import (
|
|||||||
"code.gitea.io/gitea/models/unit"
|
"code.gitea.io/gitea/models/unit"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
"code.gitea.io/gitea/modules/base"
|
"code.gitea.io/gitea/modules/base"
|
||||||
"code.gitea.io/gitea/modules/git"
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
@@ -162,7 +161,7 @@ func (prs PullRequestList) loadAttributes(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Load issues.
|
// Load issues.
|
||||||
issueIDs := prs.getIssueIDs()
|
issueIDs := prs.GetIssueIDs()
|
||||||
issues := make([]*Issue, 0, len(issueIDs))
|
issues := make([]*Issue, 0, len(issueIDs))
|
||||||
if err := db.GetEngine(ctx).
|
if err := db.GetEngine(ctx).
|
||||||
Where("id > 0").
|
Where("id > 0").
|
||||||
@@ -181,7 +180,8 @@ func (prs PullRequestList) loadAttributes(ctx context.Context) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (prs PullRequestList) getIssueIDs() []int64 {
|
// GetIssueIDs returns all issue ids
|
||||||
|
func (prs PullRequestList) GetIssueIDs() []int64 {
|
||||||
issueIDs := make([]int64, 0, len(prs))
|
issueIDs := make([]int64, 0, len(prs))
|
||||||
for i := range prs {
|
for i := range prs {
|
||||||
issueIDs = append(issueIDs, prs[i].IssueID)
|
issueIDs = append(issueIDs, prs[i].IssueID)
|
||||||
@@ -193,24 +193,3 @@ func (prs PullRequestList) getIssueIDs() []int64 {
|
|||||||
func (prs PullRequestList) LoadAttributes() error {
|
func (prs PullRequestList) LoadAttributes() error {
|
||||||
return prs.loadAttributes(db.DefaultContext)
|
return prs.loadAttributes(db.DefaultContext)
|
||||||
}
|
}
|
||||||
|
|
||||||
// InvalidateCodeComments will lookup the prs for code comments which got invalidated by change
|
|
||||||
func (prs PullRequestList) InvalidateCodeComments(ctx context.Context, doer *user_model.User, repo *git.Repository, branch string) error {
|
|
||||||
if len(prs) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
issueIDs := prs.getIssueIDs()
|
|
||||||
var codeComments []*Comment
|
|
||||||
if err := db.GetEngine(ctx).
|
|
||||||
Where("type = ? and invalidated = ?", CommentTypeCode, false).
|
|
||||||
In("issue_id", issueIDs).
|
|
||||||
Find(&codeComments); err != nil {
|
|
||||||
return fmt.Errorf("find code comments: %w", err)
|
|
||||||
}
|
|
||||||
for _, comment := range codeComments {
|
|
||||||
if err := comment.CheckInvalidation(repo, doer, branch); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -978,7 +978,7 @@ func DeleteReview(r *Review) error {
|
|||||||
ReviewID: r.ID,
|
ReviewID: r.ID,
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := sess.Where(opts.toConds()).Delete(new(Comment)); err != nil {
|
if _, err := sess.Where(opts.ToConds()).Delete(new(Comment)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -988,7 +988,7 @@ func DeleteReview(r *Review) error {
|
|||||||
ReviewID: r.ID,
|
ReviewID: r.ID,
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := sess.Where(opts.toConds()).Delete(new(Comment)); err != nil {
|
if _, err := sess.Where(opts.ToConds()).Delete(new(Comment)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1012,7 +1012,7 @@ func (r *Review) GetCodeCommentsCount() int {
|
|||||||
IssueID: r.IssueID,
|
IssueID: r.IssueID,
|
||||||
ReviewID: r.ID,
|
ReviewID: r.ID,
|
||||||
}
|
}
|
||||||
conds := opts.toConds()
|
conds := opts.ToConds()
|
||||||
if r.ID == 0 {
|
if r.ID == 0 {
|
||||||
conds = conds.And(builder.Eq{"invalidated": false})
|
conds = conds.And(builder.Eq{"invalidated": false})
|
||||||
}
|
}
|
||||||
@@ -1032,7 +1032,7 @@ func (r *Review) HTMLURL() string {
|
|||||||
ReviewID: r.ID,
|
ReviewID: r.ID,
|
||||||
}
|
}
|
||||||
comment := new(Comment)
|
comment := new(Comment)
|
||||||
has, err := db.GetEngine(db.DefaultContext).Where(opts.toConds()).Get(comment)
|
has, err := db.GetEngine(db.DefaultContext).Where(opts.ToConds()).Get(comment)
|
||||||
if err != nil || !has {
|
if err != nil || !has {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -396,13 +396,14 @@ func (org *Organization) GetOrgUserMaxAuthorizeLevel(uid int64) (perm.AccessMode
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GetUsersWhoCanCreateOrgRepo returns users which are able to create repo in organization
|
// GetUsersWhoCanCreateOrgRepo returns users which are able to create repo in organization
|
||||||
func GetUsersWhoCanCreateOrgRepo(ctx context.Context, orgID int64) ([]*user_model.User, error) {
|
func GetUsersWhoCanCreateOrgRepo(ctx context.Context, orgID int64) (map[int64]*user_model.User, error) {
|
||||||
users := make([]*user_model.User, 0, 10)
|
// Use a map, in order to de-duplicate users.
|
||||||
|
users := make(map[int64]*user_model.User)
|
||||||
return users, db.GetEngine(ctx).
|
return users, db.GetEngine(ctx).
|
||||||
Join("INNER", "`team_user`", "`team_user`.uid=`user`.id").
|
Join("INNER", "`team_user`", "`team_user`.uid=`user`.id").
|
||||||
Join("INNER", "`team`", "`team`.id=`team_user`.team_id").
|
Join("INNER", "`team`", "`team`.id=`team_user`.team_id").
|
||||||
Where(builder.Eq{"team.can_create_org_repo": true}.Or(builder.Eq{"team.authorize": perm.AccessModeOwner})).
|
Where(builder.Eq{"team.can_create_org_repo": true}.Or(builder.Eq{"team.authorize": perm.AccessModeOwner})).
|
||||||
And("team_user.org_id = ?", orgID).Asc("`user`.name").Find(&users)
|
And("team_user.org_id = ?", orgID).Find(&users)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SearchOrganizationsOptions options to filter organizations
|
// SearchOrganizationsOptions options to filter organizations
|
||||||
|
|||||||
@@ -92,11 +92,12 @@ func TestUser_GetTeams(t *testing.T) {
|
|||||||
org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3})
|
org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3})
|
||||||
teams, err := org.LoadTeams()
|
teams, err := org.LoadTeams()
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
if assert.Len(t, teams, 4) {
|
if assert.Len(t, teams, 5) {
|
||||||
assert.Equal(t, int64(1), teams[0].ID)
|
assert.Equal(t, int64(1), teams[0].ID)
|
||||||
assert.Equal(t, int64(2), teams[1].ID)
|
assert.Equal(t, int64(2), teams[1].ID)
|
||||||
assert.Equal(t, int64(12), teams[2].ID)
|
assert.Equal(t, int64(12), teams[2].ID)
|
||||||
assert.Equal(t, int64(7), teams[3].ID)
|
assert.Equal(t, int64(14), teams[3].ID)
|
||||||
|
assert.Equal(t, int64(7), teams[4].ID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -293,7 +294,7 @@ func TestUser_GetUserTeamIDs(t *testing.T) {
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, expected, teamIDs)
|
assert.Equal(t, expected, teamIDs)
|
||||||
}
|
}
|
||||||
testSuccess(2, []int64{1, 2})
|
testSuccess(2, []int64{1, 2, 14})
|
||||||
testSuccess(4, []int64{2})
|
testSuccess(4, []int64{2})
|
||||||
testSuccess(unittest.NonexistentID, []int64{})
|
testSuccess(unittest.NonexistentID, []int64{})
|
||||||
}
|
}
|
||||||
@@ -448,7 +449,7 @@ func TestGetUsersWhoCanCreateOrgRepo(t *testing.T) {
|
|||||||
users, err = organization.GetUsersWhoCanCreateOrgRepo(db.DefaultContext, 7)
|
users, err = organization.GetUsersWhoCanCreateOrgRepo(db.DefaultContext, 7)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, users, 1)
|
assert.Len(t, users, 1)
|
||||||
assert.EqualValues(t, 5, users[0].ID)
|
assert.NotNil(t, users[5])
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUser_RemoveOrgRepo(t *testing.T) {
|
func TestUser_RemoveOrgRepo(t *testing.T) {
|
||||||
|
|||||||
@@ -269,6 +269,16 @@ func Init() error {
|
|||||||
if setting_module.OfflineMode {
|
if setting_module.OfflineMode {
|
||||||
disableGravatar = true
|
disableGravatar = true
|
||||||
enableFederatedAvatar = false
|
enableFederatedAvatar = false
|
||||||
|
if !GetSettingBool(KeyPictureDisableGravatar) {
|
||||||
|
if err := SetSettingNoVersion(KeyPictureDisableGravatar, "true"); err != nil {
|
||||||
|
return fmt.Errorf("Failed to set setting %q: %w", KeyPictureDisableGravatar, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if GetSettingBool(KeyPictureEnableFederatedAvatar) {
|
||||||
|
if err := SetSettingNoVersion(KeyPictureEnableFederatedAvatar, "false"); err != nil {
|
||||||
|
return fmt.Errorf("Failed to set setting %q: %w", KeyPictureEnableFederatedAvatar, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if enableFederatedAvatar || !disableGravatar {
|
if enableFederatedAvatar || !disableGravatar {
|
||||||
|
|||||||
49
models/user/must_change_password.go
Normal file
49
models/user/must_change_password.go
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package user
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
|
||||||
|
"xorm.io/builder"
|
||||||
|
)
|
||||||
|
|
||||||
|
func SetMustChangePassword(ctx context.Context, all, mustChangePassword bool, include, exclude []string) (int64, error) {
|
||||||
|
sliceTrimSpaceDropEmpty := func(input []string) []string {
|
||||||
|
output := make([]string, 0, len(input))
|
||||||
|
for _, in := range input {
|
||||||
|
in = strings.ToLower(strings.TrimSpace(in))
|
||||||
|
if in == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
output = append(output, in)
|
||||||
|
}
|
||||||
|
return output
|
||||||
|
}
|
||||||
|
|
||||||
|
var cond builder.Cond
|
||||||
|
|
||||||
|
// Only include the users where something changes to get an accurate count
|
||||||
|
cond = builder.Neq{"must_change_password": mustChangePassword}
|
||||||
|
|
||||||
|
if !all {
|
||||||
|
include = sliceTrimSpaceDropEmpty(include)
|
||||||
|
if len(include) == 0 {
|
||||||
|
return 0, fmt.Errorf("no users to include provided")
|
||||||
|
}
|
||||||
|
|
||||||
|
cond = cond.And(builder.In("lower_name", include))
|
||||||
|
}
|
||||||
|
|
||||||
|
exclude = sliceTrimSpaceDropEmpty(exclude)
|
||||||
|
if len(exclude) > 0 {
|
||||||
|
cond = cond.And(builder.NotIn("lower_name", exclude))
|
||||||
|
}
|
||||||
|
|
||||||
|
return db.GetEngine(ctx).Where(cond).MustCols("must_change_password").Update(&User{MustChangePassword: mustChangePassword})
|
||||||
|
}
|
||||||
@@ -7,8 +7,6 @@ package user
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"crypto/sha256"
|
|
||||||
"crypto/subtle"
|
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
@@ -22,6 +20,7 @@ import (
|
|||||||
"code.gitea.io/gitea/models/auth"
|
"code.gitea.io/gitea/models/auth"
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
"code.gitea.io/gitea/modules/auth/openid"
|
"code.gitea.io/gitea/modules/auth/openid"
|
||||||
|
"code.gitea.io/gitea/modules/auth/password/hash"
|
||||||
"code.gitea.io/gitea/modules/base"
|
"code.gitea.io/gitea/modules/base"
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
@@ -30,10 +29,6 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
"golang.org/x/crypto/argon2"
|
|
||||||
"golang.org/x/crypto/bcrypt"
|
|
||||||
"golang.org/x/crypto/pbkdf2"
|
|
||||||
"golang.org/x/crypto/scrypt"
|
|
||||||
"xorm.io/builder"
|
"xorm.io/builder"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -48,21 +43,6 @@ const (
|
|||||||
UserTypeOrganization
|
UserTypeOrganization
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
|
||||||
algoBcrypt = "bcrypt"
|
|
||||||
algoScrypt = "scrypt"
|
|
||||||
algoArgon2 = "argon2"
|
|
||||||
algoPbkdf2 = "pbkdf2"
|
|
||||||
)
|
|
||||||
|
|
||||||
// AvailableHashAlgorithms represents the available password hashing algorithms
|
|
||||||
var AvailableHashAlgorithms = []string{
|
|
||||||
algoPbkdf2,
|
|
||||||
algoArgon2,
|
|
||||||
algoScrypt,
|
|
||||||
algoBcrypt,
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// EmailNotificationsEnabled indicates that the user would like to receive all email notifications except your own
|
// EmailNotificationsEnabled indicates that the user would like to receive all email notifications except your own
|
||||||
EmailNotificationsEnabled = "enabled"
|
EmailNotificationsEnabled = "enabled"
|
||||||
@@ -368,42 +348,6 @@ func (u *User) NewGitSig() *git.Signature {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func hashPassword(passwd, salt, algo string) (string, error) {
|
|
||||||
var tempPasswd []byte
|
|
||||||
var saltBytes []byte
|
|
||||||
|
|
||||||
// There are two formats for the Salt value:
|
|
||||||
// * The new format is a (32+)-byte hex-encoded string
|
|
||||||
// * The old format was a 10-byte binary format
|
|
||||||
// We have to tolerate both here but Authenticate should
|
|
||||||
// regenerate the Salt following a successful validation.
|
|
||||||
if len(salt) == 10 {
|
|
||||||
saltBytes = []byte(salt)
|
|
||||||
} else {
|
|
||||||
var err error
|
|
||||||
saltBytes, err = hex.DecodeString(salt)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
switch algo {
|
|
||||||
case algoBcrypt:
|
|
||||||
tempPasswd, _ = bcrypt.GenerateFromPassword([]byte(passwd), bcrypt.DefaultCost)
|
|
||||||
return string(tempPasswd), nil
|
|
||||||
case algoScrypt:
|
|
||||||
tempPasswd, _ = scrypt.Key([]byte(passwd), saltBytes, 65536, 16, 2, 50)
|
|
||||||
case algoArgon2:
|
|
||||||
tempPasswd = argon2.IDKey([]byte(passwd), saltBytes, 2, 65536, 8, 50)
|
|
||||||
case algoPbkdf2:
|
|
||||||
fallthrough
|
|
||||||
default:
|
|
||||||
tempPasswd = pbkdf2.Key([]byte(passwd), saltBytes, 10000, 50, sha256.New)
|
|
||||||
}
|
|
||||||
|
|
||||||
return fmt.Sprintf("%x", tempPasswd), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetPassword hashes a password using the algorithm defined in the config value of PASSWORD_HASH_ALGO
|
// SetPassword hashes a password using the algorithm defined in the config value of PASSWORD_HASH_ALGO
|
||||||
// change passwd, salt and passwd_hash_algo fields
|
// change passwd, salt and passwd_hash_algo fields
|
||||||
func (u *User) SetPassword(passwd string) (err error) {
|
func (u *User) SetPassword(passwd string) (err error) {
|
||||||
@@ -417,7 +361,7 @@ func (u *User) SetPassword(passwd string) (err error) {
|
|||||||
if u.Salt, err = GetUserSalt(); err != nil {
|
if u.Salt, err = GetUserSalt(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if u.Passwd, err = hashPassword(passwd, u.Salt, setting.PasswordHashAlgo); err != nil {
|
if u.Passwd, err = hash.Parse(setting.PasswordHashAlgo).Hash(passwd, u.Salt); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
u.PasswdHashAlgo = setting.PasswordHashAlgo
|
u.PasswdHashAlgo = setting.PasswordHashAlgo
|
||||||
@@ -425,20 +369,9 @@ func (u *User) SetPassword(passwd string) (err error) {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// ValidatePassword checks if given password matches the one belongs to the user.
|
// ValidatePassword checks if the given password matches the one belonging to the user.
|
||||||
func (u *User) ValidatePassword(passwd string) bool {
|
func (u *User) ValidatePassword(passwd string) bool {
|
||||||
tempHash, err := hashPassword(passwd, u.Salt, u.PasswdHashAlgo)
|
return hash.Parse(u.PasswdHashAlgo).VerifyPassword(passwd, u.Passwd, u.Salt)
|
||||||
if err != nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if u.PasswdHashAlgo != algoBcrypt && subtle.ConstantTimeCompare([]byte(u.Passwd), []byte(tempHash)) == 1 {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if u.PasswdHashAlgo == algoBcrypt && bcrypt.CompareHashAndPassword([]byte(u.Passwd), []byte(passwd)) == nil {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsPasswordSet checks if the password is set or left empty
|
// IsPasswordSet checks if the password is set or left empty
|
||||||
@@ -1227,7 +1160,10 @@ func GetUserByOpenID(uri string) (*User, error) {
|
|||||||
// GetAdminUser returns the first administrator
|
// GetAdminUser returns the first administrator
|
||||||
func GetAdminUser() (*User, error) {
|
func GetAdminUser() (*User, error) {
|
||||||
var admin User
|
var admin User
|
||||||
has, err := db.GetEngine(db.DefaultContext).Where("is_admin=?", true).Get(&admin)
|
has, err := db.GetEngine(db.DefaultContext).
|
||||||
|
Where("is_admin=?", true).
|
||||||
|
Asc("id"). // Reliably get the admin with the lowest ID.
|
||||||
|
Get(&admin)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
} else if !has {
|
} else if !has {
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ import (
|
|||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
"code.gitea.io/gitea/models/unittest"
|
"code.gitea.io/gitea/models/unittest"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/auth/password/hash"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/structs"
|
"code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
@@ -162,7 +163,7 @@ func TestEmailNotificationPreferences(t *testing.T) {
|
|||||||
func TestHashPasswordDeterministic(t *testing.T) {
|
func TestHashPasswordDeterministic(t *testing.T) {
|
||||||
b := make([]byte, 16)
|
b := make([]byte, 16)
|
||||||
u := &user_model.User{}
|
u := &user_model.User{}
|
||||||
algos := []string{"argon2", "pbkdf2", "scrypt", "bcrypt"}
|
algos := hash.RecommendedHashAlgorithms
|
||||||
for j := 0; j < len(algos); j++ {
|
for j := 0; j < len(algos); j++ {
|
||||||
u.PasswdHashAlgo = algos[j]
|
u.PasswdHashAlgo = algos[j]
|
||||||
for i := 0; i < 50; i++ {
|
for i := 0; i < 50; i++ {
|
||||||
|
|||||||
77
modules/auth/password/hash/argon2.go
Normal file
77
modules/auth/password/hash/argon2.go
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package hash
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/hex"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
|
||||||
|
"golang.org/x/crypto/argon2"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
Register("argon2", NewArgon2Hasher)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Argon2Hasher implements PasswordHasher
|
||||||
|
// and uses the Argon2 key derivation function, hybrant variant
|
||||||
|
type Argon2Hasher struct {
|
||||||
|
time uint32
|
||||||
|
memory uint32
|
||||||
|
threads uint8
|
||||||
|
keyLen uint32
|
||||||
|
}
|
||||||
|
|
||||||
|
// HashWithSaltBytes a provided password and salt
|
||||||
|
func (hasher *Argon2Hasher) HashWithSaltBytes(password string, salt []byte) string {
|
||||||
|
if hasher == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return hex.EncodeToString(argon2.IDKey([]byte(password), salt, hasher.time, hasher.memory, hasher.threads, hasher.keyLen))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewArgon2Hasher is a factory method to create an Argon2Hasher
|
||||||
|
// The provided config should be either empty or of the form:
|
||||||
|
// "<time>$<memory>$<threads>$<keyLen>", where <x> is the string representation
|
||||||
|
// of an integer
|
||||||
|
func NewArgon2Hasher(config string) *Argon2Hasher {
|
||||||
|
// This default configuration uses the following parameters:
|
||||||
|
// time=2, memory=64*1024, threads=8, keyLen=50.
|
||||||
|
// It will make two passes through the memory, using 64MiB in total.
|
||||||
|
hasher := &Argon2Hasher{
|
||||||
|
time: 2,
|
||||||
|
memory: 1 << 16,
|
||||||
|
threads: 8,
|
||||||
|
keyLen: 50,
|
||||||
|
}
|
||||||
|
|
||||||
|
if config == "" {
|
||||||
|
return hasher
|
||||||
|
}
|
||||||
|
|
||||||
|
vals := strings.SplitN(config, "$", 4)
|
||||||
|
if len(vals) != 4 {
|
||||||
|
log.Error("invalid argon2 hash spec %s", config)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
parsed, err := parseUIntParam(vals[0], "time", "argon2", config, nil)
|
||||||
|
hasher.time = uint32(parsed)
|
||||||
|
|
||||||
|
parsed, err = parseUIntParam(vals[1], "memory", "argon2", config, err)
|
||||||
|
hasher.memory = uint32(parsed)
|
||||||
|
|
||||||
|
parsed, err = parseUIntParam(vals[2], "threads", "argon2", config, err)
|
||||||
|
hasher.threads = uint8(parsed)
|
||||||
|
|
||||||
|
parsed, err = parseUIntParam(vals[3], "keyLen", "argon2", config, err)
|
||||||
|
hasher.keyLen = uint32(parsed)
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return hasher
|
||||||
|
}
|
||||||
51
modules/auth/password/hash/bcrypt.go
Normal file
51
modules/auth/password/hash/bcrypt.go
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package hash
|
||||||
|
|
||||||
|
import (
|
||||||
|
"golang.org/x/crypto/bcrypt"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
Register("bcrypt", NewBcryptHasher)
|
||||||
|
}
|
||||||
|
|
||||||
|
// BcryptHasher implements PasswordHasher
|
||||||
|
// and uses the bcrypt password hash function.
|
||||||
|
type BcryptHasher struct {
|
||||||
|
cost int
|
||||||
|
}
|
||||||
|
|
||||||
|
// HashWithSaltBytes a provided password and salt
|
||||||
|
func (hasher *BcryptHasher) HashWithSaltBytes(password string, salt []byte) string {
|
||||||
|
if hasher == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
hashedPassword, _ := bcrypt.GenerateFromPassword([]byte(password), hasher.cost)
|
||||||
|
return string(hashedPassword)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hasher *BcryptHasher) VerifyPassword(password, hashedPassword, salt string) bool {
|
||||||
|
return bcrypt.CompareHashAndPassword([]byte(hashedPassword), []byte(password)) == nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewBcryptHasher is a factory method to create an BcryptHasher
|
||||||
|
// The provided config should be either empty or the string representation of the "<cost>"
|
||||||
|
// as an integer
|
||||||
|
func NewBcryptHasher(config string) *BcryptHasher {
|
||||||
|
hasher := &BcryptHasher{
|
||||||
|
cost: 10, // cost=10. i.e. 2^10 rounds of key expansion.
|
||||||
|
}
|
||||||
|
|
||||||
|
if config == "" {
|
||||||
|
return hasher
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
hasher.cost, err = parseIntParam(config, "cost", "bcrypt", config, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return hasher
|
||||||
|
}
|
||||||
28
modules/auth/password/hash/common.go
Normal file
28
modules/auth/password/hash/common.go
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package hash
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
func parseIntParam(value, param, algorithmName, config string, previousErr error) (int, error) {
|
||||||
|
parsed, err := strconv.Atoi(value)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("invalid integer for %s representation in %s hash spec %s", param, algorithmName, config)
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return parsed, previousErr // <- Keep the previous error as this function should still return an error once everything has been checked if any call failed
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseUIntParam(value, param, algorithmName, config string, previousErr error) (uint64, error) {
|
||||||
|
parsed, err := strconv.ParseUint(value, 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("invalid integer for %s representation in %s hash spec %s", param, algorithmName, config)
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return parsed, previousErr // <- Keep the previous error as this function should still return an error once everything has been checked if any call failed
|
||||||
|
}
|
||||||
147
modules/auth/password/hash/hash.go
Normal file
147
modules/auth/password/hash/hash.go
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package hash
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/subtle"
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"sync/atomic"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
// This package takes care of hashing passwords, verifying passwords, defining
|
||||||
|
// available password algorithms, defining recommended password algorithms and
|
||||||
|
// choosing the default password algorithm.
|
||||||
|
|
||||||
|
// PasswordSaltHasher will hash a provided password with the provided saltBytes
|
||||||
|
type PasswordSaltHasher interface {
|
||||||
|
HashWithSaltBytes(password string, saltBytes []byte) string
|
||||||
|
}
|
||||||
|
|
||||||
|
// PasswordHasher will hash a provided password with the salt
|
||||||
|
type PasswordHasher interface {
|
||||||
|
Hash(password, salt string) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PasswordVerifier will ensure that a providedPassword matches the hashPassword when hashed with the salt
|
||||||
|
type PasswordVerifier interface {
|
||||||
|
VerifyPassword(providedPassword, hashedPassword, salt string) bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// PasswordHashAlgorithms are named PasswordSaltHashers with a default verifier and hash function
|
||||||
|
type PasswordHashAlgorithm struct {
|
||||||
|
PasswordSaltHasher
|
||||||
|
Name string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hash the provided password with the salt and return the hash
|
||||||
|
func (algorithm *PasswordHashAlgorithm) Hash(password, salt string) (string, error) {
|
||||||
|
var saltBytes []byte
|
||||||
|
|
||||||
|
// There are two formats for the salt value:
|
||||||
|
// * The new format is a (32+)-byte hex-encoded string
|
||||||
|
// * The old format was a 10-byte binary format
|
||||||
|
// We have to tolerate both here.
|
||||||
|
if len(salt) == 10 {
|
||||||
|
saltBytes = []byte(salt)
|
||||||
|
} else {
|
||||||
|
var err error
|
||||||
|
saltBytes, err = hex.DecodeString(salt)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return algorithm.HashWithSaltBytes(password, saltBytes), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the provided password matches the hashPassword when hashed with the salt
|
||||||
|
func (algorithm *PasswordHashAlgorithm) VerifyPassword(providedPassword, hashedPassword, salt string) bool {
|
||||||
|
// The bcrypt package has its own specialized compare function that takes into
|
||||||
|
// account the stored password's bcrypt parameters.
|
||||||
|
if verifier, ok := algorithm.PasswordSaltHasher.(PasswordVerifier); ok {
|
||||||
|
return verifier.VerifyPassword(providedPassword, hashedPassword, salt)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute the hash of the password.
|
||||||
|
providedPasswordHash, err := algorithm.Hash(providedPassword, salt)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("passwordhash: %v.Hash(): %v", algorithm.Name, err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare it against the hashed password in constant-time.
|
||||||
|
return subtle.ConstantTimeCompare([]byte(hashedPassword), []byte(providedPasswordHash)) == 1
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
lastNonDefaultAlgorithm atomic.Value
|
||||||
|
availableHasherFactories = map[string]func(string) PasswordSaltHasher{}
|
||||||
|
)
|
||||||
|
|
||||||
|
// Register registers a PasswordSaltHasher with the availableHasherFactories
|
||||||
|
// This is not thread safe.
|
||||||
|
func Register[T PasswordSaltHasher](name string, newFn func(config string) T) {
|
||||||
|
if _, has := availableHasherFactories[name]; has {
|
||||||
|
panic(fmt.Errorf("duplicate registration of password salt hasher: %s", name))
|
||||||
|
}
|
||||||
|
|
||||||
|
availableHasherFactories[name] = func(config string) PasswordSaltHasher {
|
||||||
|
n := newFn(config)
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// In early versions of gitea the password hash algorithm field could be empty
|
||||||
|
// At that point the default was `pbkdf2` without configuration values
|
||||||
|
// Please note this is not the same as the DefaultAlgorithm
|
||||||
|
const defaultEmptyHashAlgorithmName = "pbkdf2"
|
||||||
|
|
||||||
|
func Parse(algorithm string) *PasswordHashAlgorithm {
|
||||||
|
if algorithm == "" {
|
||||||
|
algorithm = defaultEmptyHashAlgorithmName
|
||||||
|
}
|
||||||
|
|
||||||
|
if DefaultHashAlgorithm != nil && algorithm == DefaultHashAlgorithm.Name {
|
||||||
|
return DefaultHashAlgorithm
|
||||||
|
}
|
||||||
|
|
||||||
|
ptr := lastNonDefaultAlgorithm.Load()
|
||||||
|
if ptr != nil {
|
||||||
|
hashAlgorithm, ok := ptr.(*PasswordHashAlgorithm)
|
||||||
|
if ok && hashAlgorithm.Name == algorithm {
|
||||||
|
return hashAlgorithm
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
vals := strings.SplitN(algorithm, "$", 2)
|
||||||
|
var name string
|
||||||
|
var config string
|
||||||
|
if len(vals) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
name = vals[0]
|
||||||
|
if len(vals) > 1 {
|
||||||
|
config = vals[1]
|
||||||
|
}
|
||||||
|
newFn, has := availableHasherFactories[name]
|
||||||
|
if !has {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
ph := newFn(config)
|
||||||
|
if ph == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
hashAlgorithm := &PasswordHashAlgorithm{
|
||||||
|
PasswordSaltHasher: ph,
|
||||||
|
Name: algorithm,
|
||||||
|
}
|
||||||
|
|
||||||
|
lastNonDefaultAlgorithm.Store(hashAlgorithm)
|
||||||
|
|
||||||
|
return hashAlgorithm
|
||||||
|
}
|
||||||
186
modules/auth/password/hash/hash_test.go
Normal file
186
modules/auth/password/hash/hash_test.go
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package hash
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/hex"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
type testSaltHasher string
|
||||||
|
|
||||||
|
func (t testSaltHasher) HashWithSaltBytes(password string, salt []byte) string {
|
||||||
|
return password + "$" + string(salt) + "$" + string(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_registerHasher(t *testing.T) {
|
||||||
|
Register("Test_registerHasher", func(config string) testSaltHasher {
|
||||||
|
return testSaltHasher(config)
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.Panics(t, func() {
|
||||||
|
Register("Test_registerHasher", func(config string) testSaltHasher {
|
||||||
|
return testSaltHasher(config)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.Equal(t, "password$salt$",
|
||||||
|
Parse("Test_registerHasher").PasswordSaltHasher.HashWithSaltBytes("password", []byte("salt")))
|
||||||
|
|
||||||
|
assert.Equal(t, "password$salt$config",
|
||||||
|
Parse("Test_registerHasher$config").PasswordSaltHasher.HashWithSaltBytes("password", []byte("salt")))
|
||||||
|
|
||||||
|
delete(availableHasherFactories, "Test_registerHasher")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParse(t *testing.T) {
|
||||||
|
hashAlgorithmsToTest := []string{}
|
||||||
|
for plainHashAlgorithmNames := range availableHasherFactories {
|
||||||
|
hashAlgorithmsToTest = append(hashAlgorithmsToTest, plainHashAlgorithmNames)
|
||||||
|
}
|
||||||
|
for _, aliased := range aliasAlgorithmNames {
|
||||||
|
if strings.Contains(aliased, "$") {
|
||||||
|
hashAlgorithmsToTest = append(hashAlgorithmsToTest, aliased)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, algorithmName := range hashAlgorithmsToTest {
|
||||||
|
t.Run(algorithmName, func(t *testing.T) {
|
||||||
|
algo := Parse(algorithmName)
|
||||||
|
assert.NotNil(t, algo, "Algorithm %s resulted in an empty algorithm", algorithmName)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHashing(t *testing.T) {
|
||||||
|
hashAlgorithmsToTest := []string{}
|
||||||
|
for plainHashAlgorithmNames := range availableHasherFactories {
|
||||||
|
hashAlgorithmsToTest = append(hashAlgorithmsToTest, plainHashAlgorithmNames)
|
||||||
|
}
|
||||||
|
for _, aliased := range aliasAlgorithmNames {
|
||||||
|
if strings.Contains(aliased, "$") {
|
||||||
|
hashAlgorithmsToTest = append(hashAlgorithmsToTest, aliased)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
runTests := func(password, salt string, shouldPass bool) {
|
||||||
|
for _, algorithmName := range hashAlgorithmsToTest {
|
||||||
|
t.Run(algorithmName, func(t *testing.T) {
|
||||||
|
output, err := Parse(algorithmName).Hash(password, salt)
|
||||||
|
if shouldPass {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotEmpty(t, output, "output for %s was empty", algorithmName)
|
||||||
|
} else {
|
||||||
|
assert.Error(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, Parse(algorithmName).VerifyPassword(password, output, salt), shouldPass)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test with new salt format.
|
||||||
|
runTests(strings.Repeat("a", 16), hex.EncodeToString([]byte{0x01, 0x02, 0x03}), true)
|
||||||
|
|
||||||
|
// Test with legacy salt format.
|
||||||
|
runTests(strings.Repeat("a", 16), strings.Repeat("b", 10), true)
|
||||||
|
|
||||||
|
// Test with invalid salt.
|
||||||
|
runTests(strings.Repeat("a", 16), "a", false)
|
||||||
|
}
|
||||||
|
|
||||||
|
// vectors were generated using the current codebase.
|
||||||
|
var vectors = []struct {
|
||||||
|
algorithms []string
|
||||||
|
password string
|
||||||
|
salt string
|
||||||
|
output string
|
||||||
|
shouldfail bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
algorithms: []string{"bcrypt", "bcrypt$10"},
|
||||||
|
password: "abcdef",
|
||||||
|
salt: strings.Repeat("a", 10),
|
||||||
|
output: "$2a$10$fjtm8BsQ2crym01/piJroenO3oSVUBhSLKaGdTYJ4tG0ePVCrU0G2",
|
||||||
|
shouldfail: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
algorithms: []string{"scrypt", "scrypt$65536$16$2$50"},
|
||||||
|
password: "abcdef",
|
||||||
|
salt: strings.Repeat("a", 10),
|
||||||
|
output: "3b571d0c07c62d42b7bad3dbf18fb0cd67d4d8cd4ad4c6928e1090e5b2a4a84437c6fd2627d897c0e7e65025ca62b67a0002",
|
||||||
|
shouldfail: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
algorithms: []string{"argon2", "argon2$2$65536$8$50"},
|
||||||
|
password: "abcdef",
|
||||||
|
salt: strings.Repeat("a", 10),
|
||||||
|
output: "551f089f570f989975b6f7c6a8ff3cf89bc486dd7bbe87ed4d80ad4362f8ee599ec8dda78dac196301b98456402bcda775dc",
|
||||||
|
shouldfail: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
algorithms: []string{"pbkdf2", "pbkdf2$10000$50"},
|
||||||
|
password: "abcdef",
|
||||||
|
salt: strings.Repeat("a", 10),
|
||||||
|
output: "ab48d5471b7e6ed42d10001db88c852ff7303c788e49da5c3c7b63d5adf96360303724b74b679223a3dea8a242d10abb1913",
|
||||||
|
shouldfail: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
algorithms: []string{"bcrypt", "bcrypt$10"},
|
||||||
|
password: "abcdef",
|
||||||
|
salt: hex.EncodeToString([]byte{0x01, 0x02, 0x03, 0x04}),
|
||||||
|
output: "$2a$10$qhgm32w9ZpqLygugWJsLjey8xRGcaq9iXAfmCeNBXxddgyoaOC3Gq",
|
||||||
|
shouldfail: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
algorithms: []string{"scrypt", "scrypt$65536$16$2$50"},
|
||||||
|
password: "abcdef",
|
||||||
|
salt: hex.EncodeToString([]byte{0x01, 0x02, 0x03, 0x04}),
|
||||||
|
output: "25fe5f66b43fa4eb7b6717905317cd2223cf841092dc8e0a1e8c75720ad4846cb5d9387303e14bc3c69faa3b1c51ef4b7de1",
|
||||||
|
shouldfail: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
algorithms: []string{"argon2", "argon2$2$65536$8$50"},
|
||||||
|
password: "abcdef",
|
||||||
|
salt: hex.EncodeToString([]byte{0x01, 0x02, 0x03, 0x04}),
|
||||||
|
output: "9c287db63a91d18bb1414b703216da4fc431387c1ae7c8acdb280222f11f0929831055dbfd5126a3b48566692e83ec750d2a",
|
||||||
|
shouldfail: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
algorithms: []string{"pbkdf2", "pbkdf2$10000$50"},
|
||||||
|
password: "abcdef",
|
||||||
|
salt: hex.EncodeToString([]byte{0x01, 0x02, 0x03, 0x04}),
|
||||||
|
output: "45d6cdc843d65cf0eda7b90ab41435762a282f7df013477a1c5b212ba81dbdca2edf1ecc4b5cb05956bb9e0c37ab29315d78",
|
||||||
|
shouldfail: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
algorithms: []string{"pbkdf2$320000$50"},
|
||||||
|
password: "abcdef",
|
||||||
|
salt: hex.EncodeToString([]byte{0x01, 0x02, 0x03, 0x04}),
|
||||||
|
output: "84e233114499e8721da80e85568e5b7b5900b3e49a30845fcda9d1e1756da4547d70f8740ac2b4a5d82f88cebcd27f21bfe2",
|
||||||
|
shouldfail: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
algorithms: []string{"pbkdf2", "pbkdf2$10000$50"},
|
||||||
|
password: "abcdef",
|
||||||
|
salt: "",
|
||||||
|
output: "",
|
||||||
|
shouldfail: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure that the current code will correctly verify against the test vectors.
|
||||||
|
func TestVectors(t *testing.T) {
|
||||||
|
for i, vector := range vectors {
|
||||||
|
for _, algorithm := range vector.algorithms {
|
||||||
|
t.Run(strconv.Itoa(i)+": "+algorithm, func(t *testing.T) {
|
||||||
|
pa := Parse(algorithm)
|
||||||
|
assert.Equal(t, !vector.shouldfail, pa.VerifyPassword(vector.password, vector.output, vector.salt))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
62
modules/auth/password/hash/pbkdf2.go
Normal file
62
modules/auth/password/hash/pbkdf2.go
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package hash
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/hex"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
|
||||||
|
"golang.org/x/crypto/pbkdf2"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
Register("pbkdf2", NewPBKDF2Hasher)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PBKDF2Hasher implements PasswordHasher
|
||||||
|
// and uses the PBKDF2 key derivation function.
|
||||||
|
type PBKDF2Hasher struct {
|
||||||
|
iter, keyLen int
|
||||||
|
}
|
||||||
|
|
||||||
|
// HashWithSaltBytes a provided password and salt
|
||||||
|
func (hasher *PBKDF2Hasher) HashWithSaltBytes(password string, salt []byte) string {
|
||||||
|
if hasher == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return hex.EncodeToString(pbkdf2.Key([]byte(password), salt, hasher.iter, hasher.keyLen, sha256.New))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPBKDF2Hasher is a factory method to create an PBKDF2Hasher
|
||||||
|
// config should be either empty or of the form:
|
||||||
|
// "<iter>$<keyLen>", where <x> is the string representation
|
||||||
|
// of an integer
|
||||||
|
func NewPBKDF2Hasher(config string) *PBKDF2Hasher {
|
||||||
|
hasher := &PBKDF2Hasher{
|
||||||
|
iter: 10_000,
|
||||||
|
keyLen: 50,
|
||||||
|
}
|
||||||
|
|
||||||
|
if config == "" {
|
||||||
|
return hasher
|
||||||
|
}
|
||||||
|
|
||||||
|
vals := strings.SplitN(config, "$", 2)
|
||||||
|
if len(vals) != 2 {
|
||||||
|
log.Error("invalid pbkdf2 hash spec %s", config)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
hasher.iter, err = parseIntParam(vals[0], "iter", "pbkdf2", config, nil)
|
||||||
|
hasher.keyLen, err = parseIntParam(vals[1], "keyLen", "pbkdf2", config, err)
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return hasher
|
||||||
|
}
|
||||||
64
modules/auth/password/hash/scrypt.go
Normal file
64
modules/auth/password/hash/scrypt.go
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package hash
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/hex"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
|
||||||
|
"golang.org/x/crypto/scrypt"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
Register("scrypt", NewScryptHasher)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ScryptHasher implements PasswordHasher
|
||||||
|
// and uses the scrypt key derivation function.
|
||||||
|
type ScryptHasher struct {
|
||||||
|
n, r, p, keyLen int
|
||||||
|
}
|
||||||
|
|
||||||
|
// HashWithSaltBytes a provided password and salt
|
||||||
|
func (hasher *ScryptHasher) HashWithSaltBytes(password string, salt []byte) string {
|
||||||
|
if hasher == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
hashedPassword, _ := scrypt.Key([]byte(password), salt, hasher.n, hasher.r, hasher.p, hasher.keyLen)
|
||||||
|
return hex.EncodeToString(hashedPassword)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewScryptHasher is a factory method to create an ScryptHasher
|
||||||
|
// The provided config should be either empty or of the form:
|
||||||
|
// "<n>$<r>$<p>$<keyLen>", where <x> is the string representation
|
||||||
|
// of an integer
|
||||||
|
func NewScryptHasher(config string) *ScryptHasher {
|
||||||
|
hasher := &ScryptHasher{
|
||||||
|
n: 1 << 16,
|
||||||
|
r: 16,
|
||||||
|
p: 2, // 2 passes through memory - this default config will use 128MiB in total.
|
||||||
|
keyLen: 50,
|
||||||
|
}
|
||||||
|
|
||||||
|
if config == "" {
|
||||||
|
return hasher
|
||||||
|
}
|
||||||
|
|
||||||
|
vals := strings.SplitN(config, "$", 4)
|
||||||
|
if len(vals) != 4 {
|
||||||
|
log.Error("invalid scrypt hash spec %s", config)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
hasher.n, err = parseIntParam(vals[0], "n", "scrypt", config, nil)
|
||||||
|
hasher.r, err = parseIntParam(vals[1], "r", "scrypt", config, err)
|
||||||
|
hasher.p, err = parseIntParam(vals[2], "p", "scrypt", config, err)
|
||||||
|
hasher.keyLen, err = parseIntParam(vals[3], "keyLen", "scrypt", config, err)
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return hasher
|
||||||
|
}
|
||||||
44
modules/auth/password/hash/setting.go
Normal file
44
modules/auth/password/hash/setting.go
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package hash
|
||||||
|
|
||||||
|
const DefaultHashAlgorithmName = "pbkdf2"
|
||||||
|
|
||||||
|
var DefaultHashAlgorithm *PasswordHashAlgorithm
|
||||||
|
|
||||||
|
var aliasAlgorithmNames = map[string]string{
|
||||||
|
"argon2": "argon2$2$65536$8$50",
|
||||||
|
"bcrypt": "bcrypt$10",
|
||||||
|
"scrypt": "scrypt$65536$16$2$50",
|
||||||
|
"pbkdf2": "pbkdf2_v2", // pbkdf2 should default to pbkdf2_v2
|
||||||
|
"pbkdf2_v1": "pbkdf2$10000$50",
|
||||||
|
// The latest PBKDF2 password algorithm is used as the default since it doesn't
|
||||||
|
// use a lot of memory and is safer to use on less powerful devices.
|
||||||
|
"pbkdf2_v2": "pbkdf2$50000$50",
|
||||||
|
// The pbkdf2_hi password algorithm is offered as a stronger alternative to the
|
||||||
|
// slightly improved pbkdf2_v2 algorithm
|
||||||
|
"pbkdf2_hi": "pbkdf2$320000$50",
|
||||||
|
}
|
||||||
|
|
||||||
|
var RecommendedHashAlgorithms = []string{
|
||||||
|
"pbkdf2",
|
||||||
|
"argon2",
|
||||||
|
"bcrypt",
|
||||||
|
"scrypt",
|
||||||
|
"pbkdf2_hi",
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetDefaultPasswordHashAlgorithm(algorithmName string) (string, *PasswordHashAlgorithm) {
|
||||||
|
if algorithmName == "" {
|
||||||
|
algorithmName = DefaultHashAlgorithmName
|
||||||
|
}
|
||||||
|
alias, has := aliasAlgorithmNames[algorithmName]
|
||||||
|
for has {
|
||||||
|
algorithmName = alias
|
||||||
|
alias, has = aliasAlgorithmNames[algorithmName]
|
||||||
|
}
|
||||||
|
DefaultHashAlgorithm = Parse(algorithmName)
|
||||||
|
|
||||||
|
return algorithmName, DefaultHashAlgorithm
|
||||||
|
}
|
||||||
38
modules/auth/password/hash/setting_test.go
Normal file
38
modules/auth/password/hash/setting_test.go
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package hash
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCheckSettingPasswordHashAlgorithm(t *testing.T) {
|
||||||
|
t.Run("pbkdf2 is pbkdf2_v2", func(t *testing.T) {
|
||||||
|
pbkdf2v2Config, pbkdf2v2Algo := SetDefaultPasswordHashAlgorithm("pbkdf2_v2")
|
||||||
|
pbkdf2Config, pbkdf2Algo := SetDefaultPasswordHashAlgorithm("pbkdf2")
|
||||||
|
|
||||||
|
assert.Equal(t, pbkdf2v2Config, pbkdf2Config)
|
||||||
|
assert.Equal(t, pbkdf2v2Algo.Name, pbkdf2Algo.Name)
|
||||||
|
})
|
||||||
|
|
||||||
|
for a, b := range aliasAlgorithmNames {
|
||||||
|
t.Run(a+"="+b, func(t *testing.T) {
|
||||||
|
aConfig, aAlgo := SetDefaultPasswordHashAlgorithm(a)
|
||||||
|
bConfig, bAlgo := SetDefaultPasswordHashAlgorithm(b)
|
||||||
|
|
||||||
|
assert.Equal(t, bConfig, aConfig)
|
||||||
|
assert.Equal(t, aAlgo.Name, bAlgo.Name)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Run("pbkdf2_v2 is the default when default password hash algorithm is empty", func(t *testing.T) {
|
||||||
|
emptyConfig, emptyAlgo := SetDefaultPasswordHashAlgorithm("")
|
||||||
|
pbkdf2v2Config, pbkdf2v2Algo := SetDefaultPasswordHashAlgorithm("pbkdf2_v2")
|
||||||
|
|
||||||
|
assert.Equal(t, pbkdf2v2Config, emptyConfig)
|
||||||
|
assert.Equal(t, pbkdf2v2Algo.Name, emptyAlgo.Name)
|
||||||
|
})
|
||||||
|
}
|
||||||
@@ -12,8 +12,8 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/context"
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"code.gitea.io/gitea/modules/translation"
|
||||||
)
|
)
|
||||||
|
|
||||||
// complexity contains information about a particular kind of password complexity
|
// complexity contains information about a particular kind of password complexity
|
||||||
@@ -113,13 +113,13 @@ func Generate(n int) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// BuildComplexityError builds the error message when password complexity checks fail
|
// BuildComplexityError builds the error message when password complexity checks fail
|
||||||
func BuildComplexityError(ctx *context.Context) string {
|
func BuildComplexityError(locale translation.Locale) string {
|
||||||
var buffer bytes.Buffer
|
var buffer bytes.Buffer
|
||||||
buffer.WriteString(ctx.Tr("form.password_complexity"))
|
buffer.WriteString(locale.Tr("form.password_complexity"))
|
||||||
buffer.WriteString("<ul>")
|
buffer.WriteString("<ul>")
|
||||||
for _, c := range requiredList {
|
for _, c := range requiredList {
|
||||||
buffer.WriteString("<li>")
|
buffer.WriteString("<li>")
|
||||||
buffer.WriteString(ctx.Tr(c.TrNameOne))
|
buffer.WriteString(locale.Tr(c.TrNameOne))
|
||||||
buffer.WriteString("</li>")
|
buffer.WriteString("</li>")
|
||||||
}
|
}
|
||||||
buffer.WriteString("</ul>")
|
buffer.WriteString("</ul>")
|
||||||
@@ -45,7 +45,7 @@ func EscapeControlReader(reader io.Reader, writer io.Writer, locale translation.
|
|||||||
return streamer.escaped, err
|
return streamer.escaped, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// EscapeControlStringReader escapes the unicode control sequences in a provided reader of string content and writer in a locale and returns the findings as an EscapeStatus and the escaped []byte
|
// EscapeControlStringReader escapes the unicode control sequences in a provided reader of string content and writer in a locale and returns the findings as an EscapeStatus and the escaped []byte. HTML line breaks are not inserted after every newline by this method.
|
||||||
func EscapeControlStringReader(reader io.Reader, writer io.Writer, locale translation.Locale, allowed ...rune) (escaped *EscapeStatus, err error) {
|
func EscapeControlStringReader(reader io.Reader, writer io.Writer, locale translation.Locale, allowed ...rune) (escaped *EscapeStatus, err error) {
|
||||||
bufRd := bufio.NewReader(reader)
|
bufRd := bufio.NewReader(reader)
|
||||||
outputStream := &HTMLStreamerWriter{Writer: writer}
|
outputStream := &HTMLStreamerWriter{Writer: writer}
|
||||||
@@ -66,10 +66,6 @@ func EscapeControlStringReader(reader io.Reader, writer io.Writer, locale transl
|
|||||||
}
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
if err := streamer.SelfClosingTag("br"); err != nil {
|
|
||||||
streamer.escaped.HasError = true
|
|
||||||
return streamer.escaped, err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return streamer.escaped, err
|
return streamer.escaped, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ package charset
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"regexp"
|
"regexp"
|
||||||
"sort"
|
|
||||||
"strings"
|
"strings"
|
||||||
"unicode"
|
"unicode"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
@@ -21,12 +20,16 @@ import (
|
|||||||
var defaultWordRegexp = regexp.MustCompile(`(-?\d*\.\d\w*)|([^\` + "`" + `\~\!\@\#\$\%\^\&\*\(\)\-\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s\x00-\x1f]+)`)
|
var defaultWordRegexp = regexp.MustCompile(`(-?\d*\.\d\w*)|([^\` + "`" + `\~\!\@\#\$\%\^\&\*\(\)\-\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s\x00-\x1f]+)`)
|
||||||
|
|
||||||
func NewEscapeStreamer(locale translation.Locale, next HTMLStreamer, allowed ...rune) HTMLStreamer {
|
func NewEscapeStreamer(locale translation.Locale, next HTMLStreamer, allowed ...rune) HTMLStreamer {
|
||||||
|
allowedM := make(map[rune]bool, len(allowed))
|
||||||
|
for _, v := range allowed {
|
||||||
|
allowedM[v] = true
|
||||||
|
}
|
||||||
return &escapeStreamer{
|
return &escapeStreamer{
|
||||||
escaped: &EscapeStatus{},
|
escaped: &EscapeStatus{},
|
||||||
PassthroughHTMLStreamer: *NewPassthroughStreamer(next),
|
PassthroughHTMLStreamer: *NewPassthroughStreamer(next),
|
||||||
locale: locale,
|
locale: locale,
|
||||||
ambiguousTables: AmbiguousTablesForLocale(locale),
|
ambiguousTables: AmbiguousTablesForLocale(locale),
|
||||||
allowed: allowed,
|
allowed: allowedM,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -35,7 +38,7 @@ type escapeStreamer struct {
|
|||||||
escaped *EscapeStatus
|
escaped *EscapeStatus
|
||||||
locale translation.Locale
|
locale translation.Locale
|
||||||
ambiguousTables []*AmbiguousTable
|
ambiguousTables []*AmbiguousTable
|
||||||
allowed []rune
|
allowed map[rune]bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *escapeStreamer) EscapeStatus() *EscapeStatus {
|
func (e *escapeStreamer) EscapeStatus() *EscapeStatus {
|
||||||
@@ -257,7 +260,7 @@ func (e *escapeStreamer) runeTypes(runes ...rune) (types []runeType, confusables
|
|||||||
runeCounts.numBrokenRunes++
|
runeCounts.numBrokenRunes++
|
||||||
case r == ' ' || r == '\t' || r == '\n':
|
case r == ' ' || r == '\t' || r == '\n':
|
||||||
runeCounts.numBasicRunes++
|
runeCounts.numBasicRunes++
|
||||||
case e.isAllowed(r):
|
case e.allowed[r]:
|
||||||
if r > 0x7e || r < 0x20 {
|
if r > 0x7e || r < 0x20 {
|
||||||
types[i] = nonBasicASCIIRuneType
|
types[i] = nonBasicASCIIRuneType
|
||||||
runeCounts.numNonConfusingNonBasicRunes++
|
runeCounts.numNonConfusingNonBasicRunes++
|
||||||
@@ -283,16 +286,3 @@ func (e *escapeStreamer) runeTypes(runes ...rune) (types []runeType, confusables
|
|||||||
}
|
}
|
||||||
return types, confusables, runeCounts
|
return types, confusables, runeCounts
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *escapeStreamer) isAllowed(r rune) bool {
|
|
||||||
if len(e.allowed) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if len(e.allowed) == 1 {
|
|
||||||
return e.allowed[0] == r
|
|
||||||
}
|
|
||||||
|
|
||||||
return sort.Search(len(e.allowed), func(i int) bool {
|
|
||||||
return e.allowed[i] >= r
|
|
||||||
}) >= 0
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -7,8 +7,8 @@ package context
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"html/template"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"text/template"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
|||||||
@@ -25,11 +25,11 @@ type BlamePart struct {
|
|||||||
// BlameReader returns part of file blame one by one
|
// BlameReader returns part of file blame one by one
|
||||||
type BlameReader struct {
|
type BlameReader struct {
|
||||||
cmd *exec.Cmd
|
cmd *exec.Cmd
|
||||||
output io.ReadCloser
|
reader io.ReadCloser
|
||||||
reader *bufio.Reader
|
|
||||||
lastSha *string
|
lastSha *string
|
||||||
cancel context.CancelFunc // Cancels the context that this reader runs in
|
cancel context.CancelFunc // Cancels the context that this reader runs in
|
||||||
finished process.FinishedFunc // Tells the process manager we're finished and it can remove the associated process from the process table
|
finished process.FinishedFunc // Tells the process manager we're finished and it can remove the associated process from the process table
|
||||||
|
bufferedReader *bufio.Reader
|
||||||
}
|
}
|
||||||
|
|
||||||
var shaLineRegex = regexp.MustCompile("^([a-z0-9]{40})")
|
var shaLineRegex = regexp.MustCompile("^([a-z0-9]{40})")
|
||||||
@@ -38,8 +38,6 @@ var shaLineRegex = regexp.MustCompile("^([a-z0-9]{40})")
|
|||||||
func (r *BlameReader) NextPart() (*BlamePart, error) {
|
func (r *BlameReader) NextPart() (*BlamePart, error) {
|
||||||
var blamePart *BlamePart
|
var blamePart *BlamePart
|
||||||
|
|
||||||
reader := r.reader
|
|
||||||
|
|
||||||
if r.lastSha != nil {
|
if r.lastSha != nil {
|
||||||
blamePart = &BlamePart{*r.lastSha, make([]string, 0)}
|
blamePart = &BlamePart{*r.lastSha, make([]string, 0)}
|
||||||
}
|
}
|
||||||
@@ -49,7 +47,7 @@ func (r *BlameReader) NextPart() (*BlamePart, error) {
|
|||||||
var err error
|
var err error
|
||||||
|
|
||||||
for err != io.EOF {
|
for err != io.EOF {
|
||||||
line, isPrefix, err = reader.ReadLine()
|
line, isPrefix, err = r.bufferedReader.ReadLine()
|
||||||
if err != nil && err != io.EOF {
|
if err != nil && err != io.EOF {
|
||||||
return blamePart, err
|
return blamePart, err
|
||||||
}
|
}
|
||||||
@@ -71,7 +69,7 @@ func (r *BlameReader) NextPart() (*BlamePart, error) {
|
|||||||
r.lastSha = &sha1
|
r.lastSha = &sha1
|
||||||
// need to munch to end of line...
|
// need to munch to end of line...
|
||||||
for isPrefix {
|
for isPrefix {
|
||||||
_, isPrefix, err = reader.ReadLine()
|
_, isPrefix, err = r.bufferedReader.ReadLine()
|
||||||
if err != nil && err != io.EOF {
|
if err != nil && err != io.EOF {
|
||||||
return blamePart, err
|
return blamePart, err
|
||||||
}
|
}
|
||||||
@@ -86,7 +84,7 @@ func (r *BlameReader) NextPart() (*BlamePart, error) {
|
|||||||
|
|
||||||
// need to munch to end of line...
|
// need to munch to end of line...
|
||||||
for isPrefix {
|
for isPrefix {
|
||||||
_, isPrefix, err = reader.ReadLine()
|
_, isPrefix, err = r.bufferedReader.ReadLine()
|
||||||
if err != nil && err != io.EOF {
|
if err != nil && err != io.EOF {
|
||||||
return blamePart, err
|
return blamePart, err
|
||||||
}
|
}
|
||||||
@@ -102,9 +100,9 @@ func (r *BlameReader) NextPart() (*BlamePart, error) {
|
|||||||
func (r *BlameReader) Close() error {
|
func (r *BlameReader) Close() error {
|
||||||
defer r.finished() // Only remove the process from the process table when the underlying command is closed
|
defer r.finished() // Only remove the process from the process table when the underlying command is closed
|
||||||
r.cancel() // However, first cancel our own context early
|
r.cancel() // However, first cancel our own context early
|
||||||
|
r.bufferedReader = nil
|
||||||
|
|
||||||
_ = r.output.Close()
|
_ = r.reader.Close()
|
||||||
|
|
||||||
if err := r.cmd.Wait(); err != nil {
|
if err := r.cmd.Wait(); err != nil {
|
||||||
return fmt.Errorf("Wait: %w", err)
|
return fmt.Errorf("Wait: %w", err)
|
||||||
}
|
}
|
||||||
@@ -126,25 +124,27 @@ func createBlameReader(ctx context.Context, dir string, command ...string) (*Bla
|
|||||||
cmd.Stderr = os.Stderr
|
cmd.Stderr = os.Stderr
|
||||||
process.SetSysProcAttribute(cmd)
|
process.SetSysProcAttribute(cmd)
|
||||||
|
|
||||||
stdout, err := cmd.StdoutPipe()
|
reader, stdout, err := os.Pipe()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
defer finished()
|
defer finished()
|
||||||
return nil, fmt.Errorf("StdoutPipe: %w", err)
|
return nil, fmt.Errorf("StdoutPipe: %w", err)
|
||||||
}
|
}
|
||||||
|
cmd.Stdout = stdout
|
||||||
|
|
||||||
if err = cmd.Start(); err != nil {
|
if err = cmd.Start(); err != nil {
|
||||||
defer finished()
|
defer finished()
|
||||||
_ = stdout.Close()
|
_ = stdout.Close()
|
||||||
return nil, fmt.Errorf("Start: %w", err)
|
return nil, fmt.Errorf("Start: %w", err)
|
||||||
}
|
}
|
||||||
|
_ = stdout.Close()
|
||||||
|
|
||||||
reader := bufio.NewReader(stdout)
|
bufferedReader := bufio.NewReader(reader)
|
||||||
|
|
||||||
return &BlameReader{
|
return &BlameReader{
|
||||||
cmd: cmd,
|
cmd: cmd,
|
||||||
output: stdout,
|
|
||||||
reader: reader,
|
reader: reader,
|
||||||
cancel: cancel,
|
cancel: cancel,
|
||||||
finished: finished,
|
finished: finished,
|
||||||
|
bufferedReader: bufferedReader,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -65,7 +65,7 @@ summary Add code of delete user
|
|||||||
previous be0ba9ea88aff8a658d0495d36accf944b74888d gogs.go
|
previous be0ba9ea88aff8a658d0495d36accf944b74888d gogs.go
|
||||||
filename gogs.go
|
filename gogs.go
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
` + `
|
||||||
e2aa991e10ffd924a828ec149951f2f20eecead2 6 6 2
|
e2aa991e10ffd924a828ec149951f2f20eecead2 6 6 2
|
||||||
author Lunny Xiao
|
author Lunny Xiao
|
||||||
author-mail <xiaolunwen@gmail.com>
|
author-mail <xiaolunwen@gmail.com>
|
||||||
@@ -112,9 +112,7 @@ func TestReadingBlameOutput(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ce21ed6c3490cdfad797319cbb1145e2330a8fef",
|
"ce21ed6c3490cdfad797319cbb1145e2330a8fef",
|
||||||
[]string{
|
[]string{"// Copyright 2016 The Gitea Authors. All rights reserved."},
|
||||||
"// Copyright 2016 The Gitea Authors. All rights reserved.",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"4b92a6c2df28054ad766bc262f308db9f6066596",
|
"4b92a6c2df28054ad766bc262f308db9f6066596",
|
||||||
|
|||||||
@@ -132,7 +132,7 @@ func CommitChangesWithArgs(repoPath string, args []CmdArg, opts CommitChangesOpt
|
|||||||
if opts.Author != nil {
|
if opts.Author != nil {
|
||||||
cmd.AddArguments(CmdArg(fmt.Sprintf("--author='%s <%s>'", opts.Author.Name, opts.Author.Email)))
|
cmd.AddArguments(CmdArg(fmt.Sprintf("--author='%s <%s>'", opts.Author.Name, opts.Author.Email)))
|
||||||
}
|
}
|
||||||
cmd.AddArguments("-m").AddDynamicArguments(opts.Message)
|
cmd.AddArguments(CmdArg("--message=" + opts.Message))
|
||||||
|
|
||||||
_, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath})
|
_, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath})
|
||||||
// No stderr but exit status 1 means nothing to commit.
|
// No stderr but exit status 1 means nothing to commit.
|
||||||
|
|||||||
@@ -164,10 +164,8 @@ func CloneWithArgs(ctx context.Context, args []CmdArg, from, to string, opts Clo
|
|||||||
|
|
||||||
envs := os.Environ()
|
envs := os.Environ()
|
||||||
u, err := url.Parse(from)
|
u, err := url.Parse(from)
|
||||||
if err == nil && (strings.EqualFold(u.Scheme, "http") || strings.EqualFold(u.Scheme, "https")) {
|
if err == nil {
|
||||||
if proxy.Match(u.Host) {
|
envs = proxy.EnvWithProxy(u)
|
||||||
envs = append(envs, fmt.Sprintf("https_proxy=%s", proxy.GetProxyURL()))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
stderr := new(bytes.Buffer)
|
stderr := new(bytes.Buffer)
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
package lfs
|
package lfs
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
@@ -13,6 +12,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
// DetermineEndpoint determines an endpoint from the clone url or uses the specified LFS url.
|
// DetermineEndpoint determines an endpoint from the clone url or uses the specified LFS url.
|
||||||
@@ -96,7 +96,7 @@ func endpointFromLocalPath(path string) *url.URL {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
path = fmt.Sprintf("file://%s%s", slash, filepath.ToSlash(path))
|
path = "file://" + slash + util.PathEscapeSegments(filepath.ToSlash(path))
|
||||||
|
|
||||||
u, _ := url.Parse(path)
|
u, _ := url.Parse(path)
|
||||||
|
|
||||||
|
|||||||
@@ -384,6 +384,13 @@ func (cv *ColoredValue) Format(s fmt.State, c rune) {
|
|||||||
s.Write(*cv.resetBytes)
|
s.Write(*cv.resetBytes)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ColorFormatAsString returns the result of the ColorFormat without the color
|
||||||
|
func ColorFormatAsString(colorVal ColorFormatted) string {
|
||||||
|
s := new(strings.Builder)
|
||||||
|
_, _ = ColorFprintf(&protectedANSIWriter{w: s, mode: removeColor}, "%-v", colorVal)
|
||||||
|
return s.String()
|
||||||
|
}
|
||||||
|
|
||||||
// SetColorBytes will allow a user to set the colorBytes of a colored value
|
// SetColorBytes will allow a user to set the colorBytes of a colored value
|
||||||
func (cv *ColoredValue) SetColorBytes(colorBytes []byte) {
|
func (cv *ColoredValue) SetColorBytes(colorBytes []byte) {
|
||||||
cv.colorBytes = &colorBytes
|
cv.colorBytes = &colorBytes
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ import (
|
|||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/process"
|
||||||
)
|
)
|
||||||
|
|
||||||
type loggerMap struct {
|
type loggerMap struct {
|
||||||
@@ -286,6 +288,15 @@ func (l *LoggerAsWriter) Log(msg string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
process.Trace = func(start bool, pid process.IDType, description string, parentPID process.IDType, typ string) {
|
||||||
|
if start && parentPID != "" {
|
||||||
|
Log(1, TRACE, "Start %s: %s (from %s) (%s)", NewColoredValue(pid, FgHiYellow), description, NewColoredValue(parentPID, FgYellow), NewColoredValue(typ, Reset))
|
||||||
|
} else if start {
|
||||||
|
Log(1, TRACE, "Start %s: %s (%s)", NewColoredValue(pid, FgHiYellow), description, NewColoredValue(typ, Reset))
|
||||||
|
} else {
|
||||||
|
Log(1, TRACE, "Done %s: %s", NewColoredValue(pid, FgHiYellow), NewColoredValue(description, Reset))
|
||||||
|
}
|
||||||
|
}
|
||||||
_, filename, _, _ := runtime.Caller(0)
|
_, filename, _, _ := runtime.Caller(0)
|
||||||
prefix = strings.TrimSuffix(filename, "modules/log/log.go")
|
prefix = strings.TrimSuffix(filename, "modules/log/log.go")
|
||||||
if prefix == filename {
|
if prefix == filename {
|
||||||
|
|||||||
@@ -358,12 +358,19 @@ func postProcess(ctx *RenderContext, procs []processor, input io.Reader, output
|
|||||||
}
|
}
|
||||||
|
|
||||||
func visitNode(ctx *RenderContext, procs, textProcs []processor, node *html.Node) {
|
func visitNode(ctx *RenderContext, procs, textProcs []processor, node *html.Node) {
|
||||||
// Add user-content- to IDs if they don't already have them
|
// Add user-content- to IDs and "#" links if they don't already have them
|
||||||
for idx, attr := range node.Attr {
|
for idx, attr := range node.Attr {
|
||||||
if attr.Key == "id" && !(strings.HasPrefix(attr.Val, "user-content-") || blackfridayExtRegex.MatchString(attr.Val)) {
|
val := strings.TrimPrefix(attr.Val, "#")
|
||||||
|
notHasPrefix := !(strings.HasPrefix(val, "user-content-") || blackfridayExtRegex.MatchString(val))
|
||||||
|
|
||||||
|
if attr.Key == "id" && notHasPrefix {
|
||||||
node.Attr[idx].Val = "user-content-" + attr.Val
|
node.Attr[idx].Val = "user-content-" + attr.Val
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if attr.Key == "href" && strings.HasPrefix(attr.Val, "#") && notHasPrefix {
|
||||||
|
node.Attr[idx].Val = "#user-content-" + val
|
||||||
|
}
|
||||||
|
|
||||||
if attr.Key == "class" && attr.Val == "emoji" {
|
if attr.Key == "class" && attr.Val == "emoji" {
|
||||||
textProcs = nil
|
textProcs = nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,8 +9,7 @@ import "time"
|
|||||||
|
|
||||||
// Commentable can be commented upon
|
// Commentable can be commented upon
|
||||||
type Commentable interface {
|
type Commentable interface {
|
||||||
GetLocalIndex() int64
|
Reviewable
|
||||||
GetForeignIndex() int64
|
|
||||||
GetContext() DownloaderContext
|
GetContext() DownloaderContext
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -36,5 +36,14 @@ func (issue *Issue) GetExternalName() string { return issue.PosterName }
|
|||||||
func (issue *Issue) GetExternalID() int64 { return issue.PosterID }
|
func (issue *Issue) GetExternalID() int64 { return issue.PosterID }
|
||||||
|
|
||||||
func (issue *Issue) GetLocalIndex() int64 { return issue.Number }
|
func (issue *Issue) GetLocalIndex() int64 { return issue.Number }
|
||||||
func (issue *Issue) GetForeignIndex() int64 { return issue.ForeignIndex }
|
|
||||||
|
func (issue *Issue) GetForeignIndex() int64 {
|
||||||
|
// see the comment of Reviewable.GetForeignIndex
|
||||||
|
// if there is no ForeignIndex, then use LocalIndex
|
||||||
|
if issue.ForeignIndex == 0 {
|
||||||
|
return issue.Number
|
||||||
|
}
|
||||||
|
return issue.ForeignIndex
|
||||||
|
}
|
||||||
|
|
||||||
func (issue *Issue) GetContext() DownloaderContext { return issue.Context }
|
func (issue *Issue) GetContext() DownloaderContext { return issue.Context }
|
||||||
|
|||||||
@@ -9,6 +9,16 @@ import "time"
|
|||||||
// Reviewable can be reviewed
|
// Reviewable can be reviewed
|
||||||
type Reviewable interface {
|
type Reviewable interface {
|
||||||
GetLocalIndex() int64
|
GetLocalIndex() int64
|
||||||
|
|
||||||
|
// GetForeignIndex presents the foreign index, which could be misused:
|
||||||
|
// For example, if there are 2 Gitea sites: site-A exports a dataset, then site-B imports it:
|
||||||
|
// * if site-A exports files by using its LocalIndex
|
||||||
|
// * from site-A's view, LocalIndex is site-A's IssueIndex while ForeignIndex is site-B's IssueIndex
|
||||||
|
// * but from site-B's view, LocalIndex is site-B's IssueIndex while ForeignIndex is site-A's IssueIndex
|
||||||
|
//
|
||||||
|
// So the exporting/importing must be paired, but the meaning of them looks confusing then:
|
||||||
|
// * either site-A and site-B both use LocalIndex during dumping/restoring
|
||||||
|
// * or site-A and site-B both use ForeignIndex
|
||||||
GetForeignIndex() int64
|
GetForeignIndex() int64
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -38,7 +48,7 @@ type Review struct {
|
|||||||
// GetExternalName ExternalUserMigrated interface
|
// GetExternalName ExternalUserMigrated interface
|
||||||
func (r *Review) GetExternalName() string { return r.ReviewerName }
|
func (r *Review) GetExternalName() string { return r.ReviewerName }
|
||||||
|
|
||||||
// ExternalID ExternalUserMigrated interface
|
// GetExternalID ExternalUserMigrated interface
|
||||||
func (r *Review) GetExternalID() int64 { return r.ReviewerID }
|
func (r *Review) GetExternalID() int64 { return r.ReviewerID }
|
||||||
|
|
||||||
// ReviewComment represents a review comment
|
// ReviewComment represents a review comment
|
||||||
|
|||||||
@@ -96,6 +96,7 @@ func (ns *notificationService) NotifyIssueChangeStatus(doer *user_model.User, is
|
|||||||
_ = ns.issueQueue.Push(issueNotificationOpts{
|
_ = ns.issueQueue.Push(issueNotificationOpts{
|
||||||
IssueID: issue.ID,
|
IssueID: issue.ID,
|
||||||
NotificationAuthorID: doer.ID,
|
NotificationAuthorID: doer.ID,
|
||||||
|
CommentID: actionComment.ID,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,8 +11,9 @@ import (
|
|||||||
|
|
||||||
"code.gitea.io/gitea/modules/json"
|
"code.gitea.io/gitea/modules/json"
|
||||||
"code.gitea.io/gitea/modules/packages/container/helm"
|
"code.gitea.io/gitea/modules/packages/container/helm"
|
||||||
"code.gitea.io/gitea/modules/packages/container/oci"
|
|
||||||
"code.gitea.io/gitea/modules/validation"
|
"code.gitea.io/gitea/modules/validation"
|
||||||
|
|
||||||
|
oci "github.com/opencontainers/image-spec/specs-go/v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@@ -66,8 +67,8 @@ type Metadata struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ParseImageConfig parses the metadata of an image config
|
// ParseImageConfig parses the metadata of an image config
|
||||||
func ParseImageConfig(mediaType oci.MediaType, r io.Reader) (*Metadata, error) {
|
func ParseImageConfig(mt string, r io.Reader) (*Metadata, error) {
|
||||||
if strings.EqualFold(string(mediaType), helm.ConfigMediaType) {
|
if strings.EqualFold(mt, helm.ConfigMediaType) {
|
||||||
return parseHelmConfig(r)
|
return parseHelmConfig(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/packages/container/helm"
|
"code.gitea.io/gitea/modules/packages/container/helm"
|
||||||
"code.gitea.io/gitea/modules/packages/container/oci"
|
|
||||||
|
|
||||||
|
oci "github.com/opencontainers/image-spec/specs-go/v1"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -24,7 +24,7 @@ func TestParseImageConfig(t *testing.T) {
|
|||||||
|
|
||||||
configOCI := `{"config": {"labels": {"` + labelAuthors + `": "` + author + `", "` + labelLicenses + `": "` + license + `", "` + labelURL + `": "` + projectURL + `", "` + labelSource + `": "` + repositoryURL + `", "` + labelDocumentation + `": "` + documentationURL + `", "` + labelDescription + `": "` + description + `"}}, "history": [{"created_by": "do it 1"}, {"created_by": "dummy #(nop) do it 2"}]}`
|
configOCI := `{"config": {"labels": {"` + labelAuthors + `": "` + author + `", "` + labelLicenses + `": "` + license + `", "` + labelURL + `": "` + projectURL + `", "` + labelSource + `": "` + repositoryURL + `", "` + labelDocumentation + `": "` + documentationURL + `", "` + labelDescription + `": "` + description + `"}}, "history": [{"created_by": "do it 1"}, {"created_by": "dummy #(nop) do it 2"}]}`
|
||||||
|
|
||||||
metadata, err := ParseImageConfig(oci.MediaType(oci.MediaTypeImageManifest), strings.NewReader(configOCI))
|
metadata, err := ParseImageConfig(oci.MediaTypeImageManifest, strings.NewReader(configOCI))
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, TypeOCI, metadata.Type)
|
assert.Equal(t, TypeOCI, metadata.Type)
|
||||||
@@ -51,7 +51,7 @@ func TestParseImageConfig(t *testing.T) {
|
|||||||
|
|
||||||
configHelm := `{"description":"` + description + `", "home": "` + projectURL + `", "sources": ["` + repositoryURL + `"], "maintainers":[{"name":"` + author + `"}]}`
|
configHelm := `{"description":"` + description + `", "home": "` + projectURL + `", "sources": ["` + repositoryURL + `"], "maintainers":[{"name":"` + author + `"}]}`
|
||||||
|
|
||||||
metadata, err = ParseImageConfig(oci.MediaType(helm.ConfigMediaType), strings.NewReader(configHelm))
|
metadata, err = ParseImageConfig(helm.ConfigMediaType, strings.NewReader(configHelm))
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, TypeHelm, metadata.Type)
|
assert.Equal(t, TypeHelm, metadata.Type)
|
||||||
|
|||||||
@@ -1,27 +0,0 @@
|
|||||||
// Copyright 2022 The Gitea Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a MIT-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package oci
|
|
||||||
|
|
||||||
import (
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
var digestPattern = regexp.MustCompile(`\Asha256:[a-f0-9]{64}\z`)
|
|
||||||
|
|
||||||
type Digest string
|
|
||||||
|
|
||||||
// Validate checks if the digest has a valid SHA256 signature
|
|
||||||
func (d Digest) Validate() bool {
|
|
||||||
return digestPattern.MatchString(string(d))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d Digest) Hash() string {
|
|
||||||
p := strings.SplitN(string(d), ":", 2)
|
|
||||||
if len(p) != 2 {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return p[1]
|
|
||||||
}
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
// Copyright 2022 The Gitea Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a MIT-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package oci
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
MediaTypeImageManifest = "application/vnd.oci.image.manifest.v1+json"
|
|
||||||
MediaTypeImageIndex = "application/vnd.oci.image.index.v1+json"
|
|
||||||
MediaTypeDockerManifest = "application/vnd.docker.distribution.manifest.v2+json"
|
|
||||||
MediaTypeDockerManifestList = "application/vnd.docker.distribution.manifest.list.v2+json"
|
|
||||||
)
|
|
||||||
|
|
||||||
type MediaType string
|
|
||||||
|
|
||||||
// IsValid tests if the media type is in the OCI or Docker namespace
|
|
||||||
func (m MediaType) IsValid() bool {
|
|
||||||
s := string(m)
|
|
||||||
return strings.HasPrefix(s, "application/vnd.docker.") || strings.HasPrefix(s, "application/vnd.oci.")
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsImageManifest tests if the media type is an image manifest
|
|
||||||
func (m MediaType) IsImageManifest() bool {
|
|
||||||
s := string(m)
|
|
||||||
return strings.EqualFold(s, MediaTypeDockerManifest) || strings.EqualFold(s, MediaTypeImageManifest)
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsImageIndex tests if the media type is an image index
|
|
||||||
func (m MediaType) IsImageIndex() bool {
|
|
||||||
s := string(m)
|
|
||||||
return strings.EqualFold(s, MediaTypeDockerManifestList) || strings.EqualFold(s, MediaTypeImageIndex)
|
|
||||||
}
|
|
||||||
@@ -1,191 +0,0 @@
|
|||||||
// Copyright 2022 The Gitea Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a MIT-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package oci
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
// https://github.com/opencontainers/image-spec/tree/main/specs-go/v1
|
|
||||||
|
|
||||||
// ImageConfig defines the execution parameters which should be used as a base when running a container using an image.
|
|
||||||
type ImageConfig struct {
|
|
||||||
// User defines the username or UID which the process in the container should run as.
|
|
||||||
User string `json:"User,omitempty"`
|
|
||||||
|
|
||||||
// ExposedPorts a set of ports to expose from a container running this image.
|
|
||||||
ExposedPorts map[string]struct{} `json:"ExposedPorts,omitempty"`
|
|
||||||
|
|
||||||
// Env is a list of environment variables to be used in a container.
|
|
||||||
Env []string `json:"Env,omitempty"`
|
|
||||||
|
|
||||||
// Entrypoint defines a list of arguments to use as the command to execute when the container starts.
|
|
||||||
Entrypoint []string `json:"Entrypoint,omitempty"`
|
|
||||||
|
|
||||||
// Cmd defines the default arguments to the entrypoint of the container.
|
|
||||||
Cmd []string `json:"Cmd,omitempty"`
|
|
||||||
|
|
||||||
// Volumes is a set of directories describing where the process is likely write data specific to a container instance.
|
|
||||||
Volumes map[string]struct{} `json:"Volumes,omitempty"`
|
|
||||||
|
|
||||||
// WorkingDir sets the current working directory of the entrypoint process in the container.
|
|
||||||
WorkingDir string `json:"WorkingDir,omitempty"`
|
|
||||||
|
|
||||||
// Labels contains arbitrary metadata for the container.
|
|
||||||
Labels map[string]string `json:"Labels,omitempty"`
|
|
||||||
|
|
||||||
// StopSignal contains the system call signal that will be sent to the container to exit.
|
|
||||||
StopSignal string `json:"StopSignal,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// RootFS describes a layer content addresses
|
|
||||||
type RootFS struct {
|
|
||||||
// Type is the type of the rootfs.
|
|
||||||
Type string `json:"type"`
|
|
||||||
|
|
||||||
// DiffIDs is an array of layer content hashes, in order from bottom-most to top-most.
|
|
||||||
DiffIDs []string `json:"diff_ids"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// History describes the history of a layer.
|
|
||||||
type History struct {
|
|
||||||
// Created is the combined date and time at which the layer was created, formatted as defined by RFC 3339, section 5.6.
|
|
||||||
Created *time.Time `json:"created,omitempty"`
|
|
||||||
|
|
||||||
// CreatedBy is the command which created the layer.
|
|
||||||
CreatedBy string `json:"created_by,omitempty"`
|
|
||||||
|
|
||||||
// Author is the author of the build point.
|
|
||||||
Author string `json:"author,omitempty"`
|
|
||||||
|
|
||||||
// Comment is a custom message set when creating the layer.
|
|
||||||
Comment string `json:"comment,omitempty"`
|
|
||||||
|
|
||||||
// EmptyLayer is used to mark if the history item created a filesystem diff.
|
|
||||||
EmptyLayer bool `json:"empty_layer,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Image is the JSON structure which describes some basic information about the image.
|
|
||||||
// This provides the `application/vnd.oci.image.config.v1+json` mediatype when marshalled to JSON.
|
|
||||||
type Image struct {
|
|
||||||
// Created is the combined date and time at which the image was created, formatted as defined by RFC 3339, section 5.6.
|
|
||||||
Created *time.Time `json:"created,omitempty"`
|
|
||||||
|
|
||||||
// Author defines the name and/or email address of the person or entity which created and is responsible for maintaining the image.
|
|
||||||
Author string `json:"author,omitempty"`
|
|
||||||
|
|
||||||
// Architecture is the CPU architecture which the binaries in this image are built to run on.
|
|
||||||
Architecture string `json:"architecture"`
|
|
||||||
|
|
||||||
// Variant is the variant of the specified CPU architecture which image binaries are intended to run on.
|
|
||||||
Variant string `json:"variant,omitempty"`
|
|
||||||
|
|
||||||
// OS is the name of the operating system which the image is built to run on.
|
|
||||||
OS string `json:"os"`
|
|
||||||
|
|
||||||
// OSVersion is an optional field specifying the operating system
|
|
||||||
// version, for example on Windows `10.0.14393.1066`.
|
|
||||||
OSVersion string `json:"os.version,omitempty"`
|
|
||||||
|
|
||||||
// OSFeatures is an optional field specifying an array of strings,
|
|
||||||
// each listing a required OS feature (for example on Windows `win32k`).
|
|
||||||
OSFeatures []string `json:"os.features,omitempty"`
|
|
||||||
|
|
||||||
// Config defines the execution parameters which should be used as a base when running a container using the image.
|
|
||||||
Config ImageConfig `json:"config,omitempty"`
|
|
||||||
|
|
||||||
// RootFS references the layer content addresses used by the image.
|
|
||||||
RootFS RootFS `json:"rootfs"`
|
|
||||||
|
|
||||||
// History describes the history of each layer.
|
|
||||||
History []History `json:"history,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Descriptor describes the disposition of targeted content.
|
|
||||||
// This structure provides `application/vnd.oci.descriptor.v1+json` mediatype
|
|
||||||
// when marshalled to JSON.
|
|
||||||
type Descriptor struct {
|
|
||||||
// MediaType is the media type of the object this schema refers to.
|
|
||||||
MediaType MediaType `json:"mediaType,omitempty"`
|
|
||||||
|
|
||||||
// Digest is the digest of the targeted content.
|
|
||||||
Digest Digest `json:"digest"`
|
|
||||||
|
|
||||||
// Size specifies the size in bytes of the blob.
|
|
||||||
Size int64 `json:"size"`
|
|
||||||
|
|
||||||
// URLs specifies a list of URLs from which this object MAY be downloaded
|
|
||||||
URLs []string `json:"urls,omitempty"`
|
|
||||||
|
|
||||||
// Annotations contains arbitrary metadata relating to the targeted content.
|
|
||||||
Annotations map[string]string `json:"annotations,omitempty"`
|
|
||||||
|
|
||||||
// Data is an embedding of the targeted content. This is encoded as a base64
|
|
||||||
// string when marshalled to JSON (automatically, by encoding/json). If
|
|
||||||
// present, Data can be used directly to avoid fetching the targeted content.
|
|
||||||
Data []byte `json:"data,omitempty"`
|
|
||||||
|
|
||||||
// Platform describes the platform which the image in the manifest runs on.
|
|
||||||
//
|
|
||||||
// This should only be used when referring to a manifest.
|
|
||||||
Platform *Platform `json:"platform,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Platform describes the platform which the image in the manifest runs on.
|
|
||||||
type Platform struct {
|
|
||||||
// Architecture field specifies the CPU architecture, for example
|
|
||||||
// `amd64` or `ppc64`.
|
|
||||||
Architecture string `json:"architecture"`
|
|
||||||
|
|
||||||
// OS specifies the operating system, for example `linux` or `windows`.
|
|
||||||
OS string `json:"os"`
|
|
||||||
|
|
||||||
// OSVersion is an optional field specifying the operating system
|
|
||||||
// version, for example on Windows `10.0.14393.1066`.
|
|
||||||
OSVersion string `json:"os.version,omitempty"`
|
|
||||||
|
|
||||||
// OSFeatures is an optional field specifying an array of strings,
|
|
||||||
// each listing a required OS feature (for example on Windows `win32k`).
|
|
||||||
OSFeatures []string `json:"os.features,omitempty"`
|
|
||||||
|
|
||||||
// Variant is an optional field specifying a variant of the CPU, for
|
|
||||||
// example `v7` to specify ARMv7 when architecture is `arm`.
|
|
||||||
Variant string `json:"variant,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type SchemaMediaBase struct {
|
|
||||||
// SchemaVersion is the image manifest schema that this image follows
|
|
||||||
SchemaVersion int `json:"schemaVersion"`
|
|
||||||
|
|
||||||
// MediaType specifies the type of this document data structure e.g. `application/vnd.oci.image.manifest.v1+json`
|
|
||||||
MediaType MediaType `json:"mediaType,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Manifest provides `application/vnd.oci.image.manifest.v1+json` mediatype structure when marshalled to JSON.
|
|
||||||
type Manifest struct {
|
|
||||||
SchemaMediaBase
|
|
||||||
|
|
||||||
// Config references a configuration object for a container, by digest.
|
|
||||||
// The referenced configuration object is a JSON blob that the runtime uses to set up the container.
|
|
||||||
Config Descriptor `json:"config"`
|
|
||||||
|
|
||||||
// Layers is an indexed list of layers referenced by the manifest.
|
|
||||||
Layers []Descriptor `json:"layers"`
|
|
||||||
|
|
||||||
// Annotations contains arbitrary metadata for the image manifest.
|
|
||||||
Annotations map[string]string `json:"annotations,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Index references manifests for various platforms.
|
|
||||||
// This structure provides `application/vnd.oci.image.index.v1+json` mediatype when marshalled to JSON.
|
|
||||||
type Index struct {
|
|
||||||
SchemaMediaBase
|
|
||||||
|
|
||||||
// Manifests references platform specific manifests.
|
|
||||||
Manifests []Descriptor `json:"manifests"`
|
|
||||||
|
|
||||||
// Annotations contains arbitrary metadata for the image index.
|
|
||||||
Annotations map[string]string `json:"annotations,omitempty"`
|
|
||||||
}
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
// Copyright 2022 The Gitea Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a MIT-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package oci
|
|
||||||
|
|
||||||
import (
|
|
||||||
"regexp"
|
|
||||||
)
|
|
||||||
|
|
||||||
var referencePattern = regexp.MustCompile(`\A[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}\z`)
|
|
||||||
|
|
||||||
type Reference string
|
|
||||||
|
|
||||||
func (r Reference) Validate() bool {
|
|
||||||
return referencePattern.MatchString(string(r))
|
|
||||||
}
|
|
||||||
@@ -7,6 +7,7 @@ package process
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"log"
|
||||||
"runtime/pprof"
|
"runtime/pprof"
|
||||||
"strconv"
|
"strconv"
|
||||||
"sync"
|
"sync"
|
||||||
@@ -44,6 +45,18 @@ type IDType string
|
|||||||
// - it is simply an alias for context.CancelFunc and is only for documentary purposes
|
// - it is simply an alias for context.CancelFunc and is only for documentary purposes
|
||||||
type FinishedFunc = context.CancelFunc
|
type FinishedFunc = context.CancelFunc
|
||||||
|
|
||||||
|
var Trace = defaultTrace // this global can be overridden by particular logging packages - thus avoiding import cycles
|
||||||
|
|
||||||
|
func defaultTrace(start bool, pid IDType, description string, parentPID IDType, typ string) {
|
||||||
|
if start && parentPID != "" {
|
||||||
|
log.Printf("start process %s: %s (from %s) (%s)", pid, description, parentPID, typ)
|
||||||
|
} else if start {
|
||||||
|
log.Printf("start process %s: %s (%s)", pid, description, typ)
|
||||||
|
} else {
|
||||||
|
log.Printf("end process %s: %s", pid, description)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Manager manages all processes and counts PIDs.
|
// Manager manages all processes and counts PIDs.
|
||||||
type Manager struct {
|
type Manager struct {
|
||||||
mutex sync.Mutex
|
mutex sync.Mutex
|
||||||
@@ -155,6 +168,7 @@ func (pm *Manager) Add(ctx context.Context, description string, cancel context.C
|
|||||||
|
|
||||||
pm.processMap[pid] = process
|
pm.processMap[pid] = process
|
||||||
pm.mutex.Unlock()
|
pm.mutex.Unlock()
|
||||||
|
Trace(true, pid, description, parentPID, processType)
|
||||||
|
|
||||||
pprofCtx := pprof.WithLabels(ctx, pprof.Labels(DescriptionPProfLabel, description, PPIDPProfLabel, string(parentPID), PIDPProfLabel, string(pid), ProcessTypePProfLabel, processType))
|
pprofCtx := pprof.WithLabels(ctx, pprof.Labels(DescriptionPProfLabel, description, PPIDPProfLabel, string(parentPID), PIDPProfLabel, string(pid), ProcessTypePProfLabel, processType))
|
||||||
if currentlyRunning {
|
if currentlyRunning {
|
||||||
@@ -186,18 +200,12 @@ func (pm *Manager) nextPID() (start time.Time, pid IDType) {
|
|||||||
return start, pid
|
return start, pid
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove a process from the ProcessManager.
|
|
||||||
func (pm *Manager) Remove(pid IDType) {
|
|
||||||
pm.mutex.Lock()
|
|
||||||
delete(pm.processMap, pid)
|
|
||||||
pm.mutex.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pm *Manager) remove(process *process) {
|
func (pm *Manager) remove(process *process) {
|
||||||
pm.mutex.Lock()
|
pm.mutex.Lock()
|
||||||
defer pm.mutex.Unlock()
|
defer pm.mutex.Unlock()
|
||||||
if p := pm.processMap[process.PID]; p == process {
|
if p := pm.processMap[process.PID]; p == process {
|
||||||
delete(pm.processMap, process.PID)
|
delete(pm.processMap, process.PID)
|
||||||
|
Trace(false, process.PID, process.Description, process.ParentPID, process.Type)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ func TestManager_Remove(t *testing.T) {
|
|||||||
|
|
||||||
assert.NotEqual(t, GetContext(p1Ctx).GetPID(), GetContext(p2Ctx).GetPID(), "expected to get different pids got %s == %s", GetContext(p2Ctx).GetPID(), GetContext(p1Ctx).GetPID())
|
assert.NotEqual(t, GetContext(p1Ctx).GetPID(), GetContext(p2Ctx).GetPID(), "expected to get different pids got %s == %s", GetContext(p2Ctx).GetPID(), GetContext(p1Ctx).GetPID())
|
||||||
|
|
||||||
pm.Remove(GetPID(p2Ctx))
|
finished()
|
||||||
|
|
||||||
_, exists := pm.processMap[GetPID(p2Ctx)]
|
_, exists := pm.processMap[GetPID(p2Ctx)]
|
||||||
assert.False(t, exists, "PID %d is in the list but shouldn't", GetPID(p2Ctx))
|
assert.False(t, exists, "PID %d is in the list but shouldn't", GetPID(p2Ctx))
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
@@ -83,3 +84,16 @@ func Proxy() func(req *http.Request) (*url.URL, error) {
|
|||||||
return http.ProxyFromEnvironment(req)
|
return http.ProxyFromEnvironment(req)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// EnvWithProxy returns os.Environ(), with a https_proxy env, if the given url
|
||||||
|
// needs to be proxied.
|
||||||
|
func EnvWithProxy(u *url.URL) []string {
|
||||||
|
envs := os.Environ()
|
||||||
|
if strings.EqualFold(u.Scheme, "http") || strings.EqualFold(u.Scheme, "https") {
|
||||||
|
if Match(u.Host) {
|
||||||
|
envs = append(envs, "https_proxy="+GetProxyURL())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return envs
|
||||||
|
}
|
||||||
|
|||||||
@@ -319,7 +319,7 @@ func initRepoCommit(ctx context.Context, tmpPath string, repo *repo_model.Reposi
|
|||||||
|
|
||||||
cmd := git.NewCommand(ctx,
|
cmd := git.NewCommand(ctx,
|
||||||
"commit", git.CmdArg(fmt.Sprintf("--author='%s <%s>'", sig.Name, sig.Email)),
|
"commit", git.CmdArg(fmt.Sprintf("--author='%s <%s>'", sig.Name, sig.Email)),
|
||||||
"-m", "Initial commit",
|
"--message=Initial commit",
|
||||||
)
|
)
|
||||||
|
|
||||||
sign, keyID, signer, _ := asymkey_service.SignInitialCommit(ctx, tmpPath, u)
|
sign, keyID, signer, _ := asymkey_service.SignInitialCommit(ctx, tmpPath, u)
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ import (
|
|||||||
"text/template"
|
"text/template"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/auth/password/hash"
|
||||||
"code.gitea.io/gitea/modules/container"
|
"code.gitea.io/gitea/modules/container"
|
||||||
"code.gitea.io/gitea/modules/generate"
|
"code.gitea.io/gitea/modules/generate"
|
||||||
"code.gitea.io/gitea/modules/json"
|
"code.gitea.io/gitea/modules/json"
|
||||||
@@ -964,7 +965,14 @@ func loadFromConf(allowEmpty bool, extraConfig string) {
|
|||||||
DisableGitHooks = sec.Key("DISABLE_GIT_HOOKS").MustBool(true)
|
DisableGitHooks = sec.Key("DISABLE_GIT_HOOKS").MustBool(true)
|
||||||
DisableWebhooks = sec.Key("DISABLE_WEBHOOKS").MustBool(false)
|
DisableWebhooks = sec.Key("DISABLE_WEBHOOKS").MustBool(false)
|
||||||
OnlyAllowPushIfGiteaEnvironmentSet = sec.Key("ONLY_ALLOW_PUSH_IF_GITEA_ENVIRONMENT_SET").MustBool(true)
|
OnlyAllowPushIfGiteaEnvironmentSet = sec.Key("ONLY_ALLOW_PUSH_IF_GITEA_ENVIRONMENT_SET").MustBool(true)
|
||||||
PasswordHashAlgo = sec.Key("PASSWORD_HASH_ALGO").MustString("pbkdf2")
|
|
||||||
|
// Ensure that the provided default hash algorithm is a valid hash algorithm
|
||||||
|
var algorithm *hash.PasswordHashAlgorithm
|
||||||
|
PasswordHashAlgo, algorithm = hash.SetDefaultPasswordHashAlgorithm(sec.Key("PASSWORD_HASH_ALGO").MustString(""))
|
||||||
|
if algorithm == nil {
|
||||||
|
log.Fatal("The provided password hash algorithm was invalid: %s", sec.Key("PASSWORD_HASH_ALGO").MustString(""))
|
||||||
|
}
|
||||||
|
|
||||||
CSRFCookieHTTPOnly = sec.Key("CSRF_COOKIE_HTTP_ONLY").MustBool(true)
|
CSRFCookieHTTPOnly = sec.Key("CSRF_COOKIE_HTTP_ONLY").MustBool(true)
|
||||||
PasswordCheckPwn = sec.Key("PASSWORD_CHECK_PWN").MustBool(false)
|
PasswordCheckPwn = sec.Key("PASSWORD_CHECK_PWN").MustBool(false)
|
||||||
SuccessfulTokensCacheSize = sec.Key("SUCCESSFUL_TOKENS_CACHE_SIZE").MustInt(20)
|
SuccessfulTokensCacheSize = sec.Key("SUCCESSFUL_TOKENS_CACHE_SIZE").MustInt(20)
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
package util
|
package util
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"io"
|
"io"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -18,3 +19,24 @@ func ReadAtMost(r io.Reader, buf []byte) (n int, err error) {
|
|||||||
}
|
}
|
||||||
return n, err
|
return n, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ErrNotEmpty is an error reported when there is a non-empty reader
|
||||||
|
var ErrNotEmpty = errors.New("not-empty")
|
||||||
|
|
||||||
|
// IsEmptyReader reads a reader and ensures it is empty
|
||||||
|
func IsEmptyReader(r io.Reader) (err error) {
|
||||||
|
var buf [1]byte
|
||||||
|
|
||||||
|
for {
|
||||||
|
n, err := r.Read(buf[:])
|
||||||
|
if err != nil {
|
||||||
|
if err == io.EOF {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if n > 0 {
|
||||||
|
return ErrNotEmpty
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -23,19 +23,23 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
packages_module "code.gitea.io/gitea/modules/packages"
|
packages_module "code.gitea.io/gitea/modules/packages"
|
||||||
container_module "code.gitea.io/gitea/modules/packages/container"
|
container_module "code.gitea.io/gitea/modules/packages/container"
|
||||||
"code.gitea.io/gitea/modules/packages/container/oci"
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
"code.gitea.io/gitea/routers/api/packages/helper"
|
"code.gitea.io/gitea/routers/api/packages/helper"
|
||||||
packages_service "code.gitea.io/gitea/services/packages"
|
packages_service "code.gitea.io/gitea/services/packages"
|
||||||
container_service "code.gitea.io/gitea/services/packages/container"
|
container_service "code.gitea.io/gitea/services/packages/container"
|
||||||
|
|
||||||
|
digest "github.com/opencontainers/go-digest"
|
||||||
)
|
)
|
||||||
|
|
||||||
// maximum size of a container manifest
|
// maximum size of a container manifest
|
||||||
// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#pushing-manifests
|
// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#pushing-manifests
|
||||||
const maxManifestSize = 10 * 1024 * 1024
|
const maxManifestSize = 10 * 1024 * 1024
|
||||||
|
|
||||||
var imageNamePattern = regexp.MustCompile(`\A[a-z0-9]+([._-][a-z0-9]+)*(/[a-z0-9]+([._-][a-z0-9]+)*)*\z`)
|
var (
|
||||||
|
imageNamePattern = regexp.MustCompile(`\A[a-z0-9]+([._-][a-z0-9]+)*(/[a-z0-9]+([._-][a-z0-9]+)*)*\z`)
|
||||||
|
referencePattern = regexp.MustCompile(`\A[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}\z`)
|
||||||
|
)
|
||||||
|
|
||||||
type containerHeaders struct {
|
type containerHeaders struct {
|
||||||
Status int
|
Status int
|
||||||
@@ -407,16 +411,16 @@ func CancelUploadBlob(ctx *context.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func getBlobFromContext(ctx *context.Context) (*packages_model.PackageFileDescriptor, error) {
|
func getBlobFromContext(ctx *context.Context) (*packages_model.PackageFileDescriptor, error) {
|
||||||
digest := ctx.Params("digest")
|
d := ctx.Params("digest")
|
||||||
|
|
||||||
if !oci.Digest(digest).Validate() {
|
if digest.Digest(d).Validate() != nil {
|
||||||
return nil, container_model.ErrContainerBlobNotExist
|
return nil, container_model.ErrContainerBlobNotExist
|
||||||
}
|
}
|
||||||
|
|
||||||
return workaroundGetContainerBlob(ctx, &container_model.BlobSearchOptions{
|
return workaroundGetContainerBlob(ctx, &container_model.BlobSearchOptions{
|
||||||
OwnerID: ctx.Package.Owner.ID,
|
OwnerID: ctx.Package.Owner.ID,
|
||||||
Image: ctx.Params("image"),
|
Image: ctx.Params("image"),
|
||||||
Digest: digest,
|
Digest: d,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -471,14 +475,14 @@ func GetBlob(ctx *context.Context) {
|
|||||||
|
|
||||||
// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#deleting-blobs
|
// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#deleting-blobs
|
||||||
func DeleteBlob(ctx *context.Context) {
|
func DeleteBlob(ctx *context.Context) {
|
||||||
digest := ctx.Params("digest")
|
d := ctx.Params("digest")
|
||||||
|
|
||||||
if !oci.Digest(digest).Validate() {
|
if digest.Digest(d).Validate() != nil {
|
||||||
apiErrorDefined(ctx, errBlobUnknown)
|
apiErrorDefined(ctx, errBlobUnknown)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := deleteBlob(ctx.Package.Owner.ID, ctx.Params("image"), digest); err != nil {
|
if err := deleteBlob(ctx.Package.Owner.ID, ctx.Params("image"), d); err != nil {
|
||||||
apiError(ctx, http.StatusInternalServerError, err)
|
apiError(ctx, http.StatusInternalServerError, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -493,15 +497,15 @@ func UploadManifest(ctx *context.Context) {
|
|||||||
reference := ctx.Params("reference")
|
reference := ctx.Params("reference")
|
||||||
|
|
||||||
mci := &manifestCreationInfo{
|
mci := &manifestCreationInfo{
|
||||||
MediaType: oci.MediaType(ctx.Req.Header.Get("Content-Type")),
|
MediaType: ctx.Req.Header.Get("Content-Type"),
|
||||||
Owner: ctx.Package.Owner,
|
Owner: ctx.Package.Owner,
|
||||||
Creator: ctx.Doer,
|
Creator: ctx.Doer,
|
||||||
Image: ctx.Params("image"),
|
Image: ctx.Params("image"),
|
||||||
Reference: reference,
|
Reference: reference,
|
||||||
IsTagged: !oci.Digest(reference).Validate(),
|
IsTagged: digest.Digest(reference).Validate() != nil,
|
||||||
}
|
}
|
||||||
|
|
||||||
if mci.IsTagged && !oci.Reference(reference).Validate() {
|
if mci.IsTagged && !referencePattern.MatchString(reference) {
|
||||||
apiErrorDefined(ctx, errManifestInvalid.WithMessage("Tag is invalid"))
|
apiErrorDefined(ctx, errManifestInvalid.WithMessage("Tag is invalid"))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -539,7 +543,7 @@ func UploadManifest(ctx *context.Context) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func getManifestFromContext(ctx *context.Context) (*packages_model.PackageFileDescriptor, error) {
|
func getBlobSearchOptionsFromContext(ctx *context.Context) (*container_model.BlobSearchOptions, error) {
|
||||||
reference := ctx.Params("reference")
|
reference := ctx.Params("reference")
|
||||||
|
|
||||||
opts := &container_model.BlobSearchOptions{
|
opts := &container_model.BlobSearchOptions{
|
||||||
@@ -547,14 +551,24 @@ func getManifestFromContext(ctx *context.Context) (*packages_model.PackageFileDe
|
|||||||
Image: ctx.Params("image"),
|
Image: ctx.Params("image"),
|
||||||
IsManifest: true,
|
IsManifest: true,
|
||||||
}
|
}
|
||||||
if oci.Digest(reference).Validate() {
|
|
||||||
|
if digest.Digest(reference).Validate() == nil {
|
||||||
opts.Digest = reference
|
opts.Digest = reference
|
||||||
} else if oci.Reference(reference).Validate() {
|
} else if referencePattern.MatchString(reference) {
|
||||||
opts.Tag = reference
|
opts.Tag = reference
|
||||||
} else {
|
} else {
|
||||||
return nil, container_model.ErrContainerBlobNotExist
|
return nil, container_model.ErrContainerBlobNotExist
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return opts, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getManifestFromContext(ctx *context.Context) (*packages_model.PackageFileDescriptor, error) {
|
||||||
|
opts, err := getBlobSearchOptionsFromContext(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
return workaroundGetContainerBlob(ctx, opts)
|
return workaroundGetContainerBlob(ctx, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -611,18 +625,8 @@ func GetManifest(ctx *context.Context) {
|
|||||||
// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#deleting-tags
|
// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#deleting-tags
|
||||||
// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#deleting-manifests
|
// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#deleting-manifests
|
||||||
func DeleteManifest(ctx *context.Context) {
|
func DeleteManifest(ctx *context.Context) {
|
||||||
reference := ctx.Params("reference")
|
opts, err := getBlobSearchOptionsFromContext(ctx)
|
||||||
|
if err != nil {
|
||||||
opts := &container_model.BlobSearchOptions{
|
|
||||||
OwnerID: ctx.Package.Owner.ID,
|
|
||||||
Image: ctx.Params("image"),
|
|
||||||
IsManifest: true,
|
|
||||||
}
|
|
||||||
if oci.Digest(reference).Validate() {
|
|
||||||
opts.Digest = reference
|
|
||||||
} else if oci.Reference(reference).Validate() {
|
|
||||||
opts.Tag = reference
|
|
||||||
} else {
|
|
||||||
apiErrorDefined(ctx, errManifestUnknown)
|
apiErrorDefined(ctx, errManifestUnknown)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,16 +18,31 @@ import (
|
|||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
"code.gitea.io/gitea/modules/json"
|
"code.gitea.io/gitea/modules/json"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
"code.gitea.io/gitea/modules/notification"
|
||||||
packages_module "code.gitea.io/gitea/modules/packages"
|
packages_module "code.gitea.io/gitea/modules/packages"
|
||||||
container_module "code.gitea.io/gitea/modules/packages/container"
|
container_module "code.gitea.io/gitea/modules/packages/container"
|
||||||
"code.gitea.io/gitea/modules/packages/container/oci"
|
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
packages_service "code.gitea.io/gitea/services/packages"
|
packages_service "code.gitea.io/gitea/services/packages"
|
||||||
|
|
||||||
|
digest "github.com/opencontainers/go-digest"
|
||||||
|
oci "github.com/opencontainers/image-spec/specs-go/v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func isValidMediaType(mt string) bool {
|
||||||
|
return strings.HasPrefix(mt, "application/vnd.docker.") || strings.HasPrefix(mt, "application/vnd.oci.")
|
||||||
|
}
|
||||||
|
|
||||||
|
func isImageManifestMediaType(mt string) bool {
|
||||||
|
return strings.EqualFold(mt, oci.MediaTypeImageManifest) || strings.EqualFold(mt, "application/vnd.docker.distribution.manifest.v2+json")
|
||||||
|
}
|
||||||
|
|
||||||
|
func isImageIndexMediaType(mt string) bool {
|
||||||
|
return strings.EqualFold(mt, oci.MediaTypeImageIndex) || strings.EqualFold(mt, "application/vnd.docker.distribution.manifest.list.v2+json")
|
||||||
|
}
|
||||||
|
|
||||||
// manifestCreationInfo describes a manifest to create
|
// manifestCreationInfo describes a manifest to create
|
||||||
type manifestCreationInfo struct {
|
type manifestCreationInfo struct {
|
||||||
MediaType oci.MediaType
|
MediaType string
|
||||||
Owner *user_model.User
|
Owner *user_model.User
|
||||||
Creator *user_model.User
|
Creator *user_model.User
|
||||||
Image string
|
Image string
|
||||||
@@ -37,12 +52,12 @@ type manifestCreationInfo struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func processManifest(mci *manifestCreationInfo, buf *packages_module.HashedBuffer) (string, error) {
|
func processManifest(mci *manifestCreationInfo, buf *packages_module.HashedBuffer) (string, error) {
|
||||||
var schema oci.SchemaMediaBase
|
var index oci.Index
|
||||||
if err := json.NewDecoder(buf).Decode(&schema); err != nil {
|
if err := json.NewDecoder(buf).Decode(&index); err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
if schema.SchemaVersion != 2 {
|
if index.SchemaVersion != 2 {
|
||||||
return "", errUnsupported.WithMessage("Schema version is not supported")
|
return "", errUnsupported.WithMessage("Schema version is not supported")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -50,19 +65,17 @@ func processManifest(mci *manifestCreationInfo, buf *packages_module.HashedBuffe
|
|||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
if !mci.MediaType.IsValid() {
|
if !isValidMediaType(mci.MediaType) {
|
||||||
mci.MediaType = schema.MediaType
|
mci.MediaType = index.MediaType
|
||||||
if !mci.MediaType.IsValid() {
|
if !isValidMediaType(mci.MediaType) {
|
||||||
return "", errManifestInvalid.WithMessage("MediaType not recognized")
|
return "", errManifestInvalid.WithMessage("MediaType not recognized")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if mci.MediaType.IsImageManifest() {
|
if isImageManifestMediaType(mci.MediaType) {
|
||||||
d, err := processImageManifest(mci, buf)
|
return processImageManifest(mci, buf)
|
||||||
return d, err
|
} else if isImageIndexMediaType(mci.MediaType) {
|
||||||
} else if mci.MediaType.IsImageIndex() {
|
return processImageManifestIndex(mci, buf)
|
||||||
d, err := processImageManifestIndex(mci, buf)
|
|
||||||
return d, err
|
|
||||||
}
|
}
|
||||||
return "", errManifestInvalid
|
return "", errManifestInvalid
|
||||||
}
|
}
|
||||||
@@ -169,6 +182,10 @@ func processImageManifest(mci *manifestCreationInfo, buf *packages_module.Hashed
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := notifyPackageCreate(mci.Creator, pv); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
manifestDigest = digest
|
manifestDigest = digest
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@@ -205,7 +222,7 @@ func processImageManifestIndex(mci *manifestCreationInfo, buf *packages_module.H
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, manifest := range index.Manifests {
|
for _, manifest := range index.Manifests {
|
||||||
if !manifest.MediaType.IsImageManifest() {
|
if !isImageManifestMediaType(manifest.MediaType) {
|
||||||
return errManifestInvalid
|
return errManifestInvalid
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -258,6 +275,10 @@ func processImageManifestIndex(mci *manifestCreationInfo, buf *packages_module.H
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := notifyPackageCreate(mci.Creator, pv); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
manifestDigest = digest
|
manifestDigest = digest
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@@ -269,6 +290,17 @@ func processImageManifestIndex(mci *manifestCreationInfo, buf *packages_module.H
|
|||||||
return manifestDigest, nil
|
return manifestDigest, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func notifyPackageCreate(doer *user_model.User, pv *packages_model.PackageVersion) error {
|
||||||
|
pd, err := packages_model.GetPackageDescriptor(db.DefaultContext, pv)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
notification.NotifyPackageCreate(doer, pd)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func createPackageAndVersion(ctx context.Context, mci *manifestCreationInfo, metadata *container_module.Metadata) (*packages_model.PackageVersion, error) {
|
func createPackageAndVersion(ctx context.Context, mci *manifestCreationInfo, metadata *container_module.Metadata) (*packages_model.PackageVersion, error) {
|
||||||
created := true
|
created := true
|
||||||
p := &packages_model.Package{
|
p := &packages_model.Package{
|
||||||
@@ -345,8 +377,8 @@ func createPackageAndVersion(ctx context.Context, mci *manifestCreationInfo, met
|
|||||||
}
|
}
|
||||||
|
|
||||||
type blobReference struct {
|
type blobReference struct {
|
||||||
Digest oci.Digest
|
Digest digest.Digest
|
||||||
MediaType oci.MediaType
|
MediaType string
|
||||||
Name string
|
Name string
|
||||||
File *packages_model.PackageFileDescriptor
|
File *packages_model.PackageFileDescriptor
|
||||||
ExpectedSize int64
|
ExpectedSize int64
|
||||||
@@ -380,7 +412,7 @@ func createFileFromBlobReference(ctx context.Context, pv, uploadVersion *package
|
|||||||
}
|
}
|
||||||
|
|
||||||
props := map[string]string{
|
props := map[string]string{
|
||||||
container_module.PropertyMediaType: string(ref.MediaType),
|
container_module.PropertyMediaType: ref.MediaType,
|
||||||
container_module.PropertyDigest: string(ref.Digest),
|
container_module.PropertyDigest: string(ref.Digest),
|
||||||
}
|
}
|
||||||
for name, value := range props {
|
for name, value := range props {
|
||||||
@@ -425,7 +457,7 @@ func createManifestBlob(ctx context.Context, mci *manifestCreationInfo, pv *pack
|
|||||||
|
|
||||||
manifestDigest := digestFromHashSummer(buf)
|
manifestDigest := digestFromHashSummer(buf)
|
||||||
err = createFileFromBlobReference(ctx, pv, nil, &blobReference{
|
err = createFileFromBlobReference(ctx, pv, nil, &blobReference{
|
||||||
Digest: oci.Digest(manifestDigest),
|
Digest: digest.Digest(manifestDigest),
|
||||||
MediaType: mci.MediaType,
|
MediaType: mci.MediaType,
|
||||||
Name: container_model.ManifestFilename,
|
Name: container_model.ManifestFilename,
|
||||||
File: &packages_model.PackageFileDescriptor{Blob: pb},
|
File: &packages_model.PackageFileDescriptor{Blob: pb},
|
||||||
|
|||||||
@@ -22,8 +22,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// https://peps.python.org/pep-0426/#name
|
// https://peps.python.org/pep-0426/#name
|
||||||
var normalizer = strings.NewReplacer(".", "-", "_", "-")
|
var (
|
||||||
var nameMatcher = regexp.MustCompile(`\A(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\.\-_]*[a-zA-Z0-9])\z`)
|
normalizer = strings.NewReplacer(".", "-", "_", "-")
|
||||||
|
nameMatcher = regexp.MustCompile(`\A(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\.\-_]*[a-zA-Z0-9])\z`)
|
||||||
|
)
|
||||||
|
|
||||||
// https://peps.python.org/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions
|
// https://peps.python.org/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions
|
||||||
var versionMatcher = regexp.MustCompile(`\Av?` +
|
var versionMatcher = regexp.MustCompile(`\Av?` +
|
||||||
|
|||||||
@@ -16,10 +16,10 @@ import (
|
|||||||
"code.gitea.io/gitea/models/auth"
|
"code.gitea.io/gitea/models/auth"
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/auth/password"
|
||||||
"code.gitea.io/gitea/modules/context"
|
"code.gitea.io/gitea/modules/context"
|
||||||
"code.gitea.io/gitea/modules/convert"
|
"code.gitea.io/gitea/modules/convert"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/password"
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|||||||
@@ -767,11 +767,18 @@ func MergePullRequest(ctx *context.APIContext) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
manuallMerge := repo_model.MergeStyle(form.Do) == repo_model.MergeStyleManuallyMerged
|
manuallyMerged := repo_model.MergeStyle(form.Do) == repo_model.MergeStyleManuallyMerged
|
||||||
force := form.ForceMerge != nil && *form.ForceMerge
|
|
||||||
|
mergeCheckType := pull_service.MergeCheckTypeGeneral
|
||||||
|
if form.MergeWhenChecksSucceed {
|
||||||
|
mergeCheckType = pull_service.MergeCheckTypeAuto
|
||||||
|
}
|
||||||
|
if manuallyMerged {
|
||||||
|
mergeCheckType = pull_service.MergeCheckTypeManually
|
||||||
|
}
|
||||||
|
|
||||||
// start with merging by checking
|
// start with merging by checking
|
||||||
if err := pull_service.CheckPullMergable(ctx, ctx.Doer, &ctx.Repo.Permission, pr, manuallMerge, force); err != nil {
|
if err := pull_service.CheckPullMergable(ctx, ctx.Doer, &ctx.Repo.Permission, pr, mergeCheckType, form.ForceMerge); err != nil {
|
||||||
if errors.Is(err, pull_service.ErrIsClosed) {
|
if errors.Is(err, pull_service.ErrIsClosed) {
|
||||||
ctx.NotFound()
|
ctx.NotFound()
|
||||||
} else if errors.Is(err, pull_service.ErrUserNotAllowedToMerge) {
|
} else if errors.Is(err, pull_service.ErrUserNotAllowedToMerge) {
|
||||||
@@ -793,7 +800,7 @@ func MergePullRequest(ctx *context.APIContext) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// handle manually-merged mark
|
// handle manually-merged mark
|
||||||
if manuallMerge {
|
if manuallyMerged {
|
||||||
if err := pull_service.MergedManually(pr, ctx.Doer, ctx.Repo.GitRepo, form.MergeCommitID); err != nil {
|
if err := pull_service.MergedManually(pr, ctx.Doer, ctx.Repo.GitRepo, form.MergeCommitID); err != nil {
|
||||||
if models.IsErrInvalidMergeStyle(err) {
|
if models.IsErrInvalidMergeStyle(err) {
|
||||||
ctx.Error(http.StatusMethodNotAllowed, "Invalid merge style", fmt.Errorf("%s is not allowed an allowed merge style for this repository", repo_model.MergeStyle(form.Do)))
|
ctx.Error(http.StatusMethodNotAllowed, "Invalid merge style", fmt.Errorf("%s is not allowed an allowed merge style for this repository", repo_model.MergeStyle(form.Do)))
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ import (
|
|||||||
"code.gitea.io/gitea/models/migrations"
|
"code.gitea.io/gitea/models/migrations"
|
||||||
system_model "code.gitea.io/gitea/models/system"
|
system_model "code.gitea.io/gitea/models/system"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/auth/password/hash"
|
||||||
"code.gitea.io/gitea/modules/base"
|
"code.gitea.io/gitea/modules/base"
|
||||||
"code.gitea.io/gitea/modules/context"
|
"code.gitea.io/gitea/modules/context"
|
||||||
"code.gitea.io/gitea/modules/generate"
|
"code.gitea.io/gitea/modules/generate"
|
||||||
@@ -80,7 +81,7 @@ func Init(ctx goctx.Context) func(next http.Handler) http.Handler {
|
|||||||
"AllLangs": translation.AllLangs(),
|
"AllLangs": translation.AllLangs(),
|
||||||
"PageStartTime": startTime,
|
"PageStartTime": startTime,
|
||||||
|
|
||||||
"PasswordHashAlgorithms": user_model.AvailableHashAlgorithms,
|
"PasswordHashAlgorithms": hash.RecommendedHashAlgorithms,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
defer ctx.Close()
|
defer ctx.Close()
|
||||||
|
|||||||
@@ -174,13 +174,6 @@ func HookPostReceive(ctx *gitea_context.PrivateContext) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if !repo.AllowsPulls() {
|
|
||||||
// We can stop there's no need to go any further
|
|
||||||
ctx.JSON(http.StatusOK, private.HookPostReceiveResult{
|
|
||||||
RepoWasEmpty: wasEmpty,
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
baseRepo = repo
|
baseRepo = repo
|
||||||
|
|
||||||
if repo.IsFork {
|
if repo.IsFork {
|
||||||
@@ -192,10 +185,20 @@ func HookPostReceive(ctx *gitea_context.PrivateContext) {
|
|||||||
})
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if repo.BaseRepo.AllowsPulls() {
|
||||||
baseRepo = repo.BaseRepo
|
baseRepo = repo.BaseRepo
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !baseRepo.AllowsPulls() {
|
||||||
|
// We can stop there's no need to go any further
|
||||||
|
ctx.JSON(http.StatusOK, private.HookPostReceiveResult{
|
||||||
|
RepoWasEmpty: wasEmpty,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// If our branch is the default branch of an unforked repo - there's no PR to create or refer to
|
// If our branch is the default branch of an unforked repo - there's no PR to create or refer to
|
||||||
if !repo.IsFork && branch == baseRepo.DefaultBranch {
|
if !repo.IsFork && branch == baseRepo.DefaultBranch {
|
||||||
results = append(results, private.HookPostReceiveBranchResult{})
|
results = append(results, private.HookPostReceiveBranchResult{})
|
||||||
@@ -218,14 +221,14 @@ func HookPostReceive(ctx *gitea_context.PrivateContext) {
|
|||||||
branch = fmt.Sprintf("%s:%s", repo.OwnerName, branch)
|
branch = fmt.Sprintf("%s:%s", repo.OwnerName, branch)
|
||||||
}
|
}
|
||||||
results = append(results, private.HookPostReceiveBranchResult{
|
results = append(results, private.HookPostReceiveBranchResult{
|
||||||
Message: setting.Git.PullRequestPushMessage && repo.AllowsPulls(),
|
Message: setting.Git.PullRequestPushMessage && baseRepo.AllowsPulls(),
|
||||||
Create: true,
|
Create: true,
|
||||||
Branch: branch,
|
Branch: branch,
|
||||||
URL: fmt.Sprintf("%s/compare/%s...%s", baseRepo.HTMLURL(), util.PathEscapeSegments(baseRepo.DefaultBranch), util.PathEscapeSegments(branch)),
|
URL: fmt.Sprintf("%s/compare/%s...%s", baseRepo.HTMLURL(), util.PathEscapeSegments(baseRepo.DefaultBranch), util.PathEscapeSegments(branch)),
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
results = append(results, private.HookPostReceiveBranchResult{
|
results = append(results, private.HookPostReceiveBranchResult{
|
||||||
Message: setting.Git.PullRequestPushMessage && repo.AllowsPulls(),
|
Message: setting.Git.PullRequestPushMessage && baseRepo.AllowsPulls(),
|
||||||
Create: false,
|
Create: false,
|
||||||
Branch: branch,
|
Branch: branch,
|
||||||
URL: fmt.Sprintf("%s/pulls/%d", baseRepo.HTMLURL(), pr.Index),
|
URL: fmt.Sprintf("%s/pulls/%d", baseRepo.HTMLURL(), pr.Index),
|
||||||
|
|||||||
@@ -81,7 +81,7 @@ func SendEmail(ctx *context.PrivateContext) {
|
|||||||
|
|
||||||
func sendEmail(ctx *context.PrivateContext, subject, message string, to []string) {
|
func sendEmail(ctx *context.PrivateContext, subject, message string, to []string) {
|
||||||
for _, email := range to {
|
for _, email := range to {
|
||||||
msg := mailer.NewMessage([]string{email}, subject, message)
|
msg := mailer.NewMessage(email, subject, message)
|
||||||
mailer.SendAsync(msg)
|
mailer.SendAsync(msg)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,9 +6,11 @@
|
|||||||
package admin
|
package admin
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
system_model "code.gitea.io/gitea/models/system"
|
system_model "code.gitea.io/gitea/models/system"
|
||||||
@@ -202,6 +204,16 @@ func ChangeConfig(ctx *context.Context) {
|
|||||||
value := ctx.FormString("value")
|
value := ctx.FormString("value")
|
||||||
version := ctx.FormInt("version")
|
version := ctx.FormInt("version")
|
||||||
|
|
||||||
|
if check, ok := changeConfigChecks[key]; ok {
|
||||||
|
if err := check(ctx, value); err != nil {
|
||||||
|
log.Warn("refused to set setting: %v", err)
|
||||||
|
ctx.JSON(http.StatusOK, map[string]string{
|
||||||
|
"err": ctx.Tr("admin.config.set_setting_failed", key),
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if err := system_model.SetSetting(&system_model.Setting{
|
if err := system_model.SetSetting(&system_model.Setting{
|
||||||
SettingKey: key,
|
SettingKey: key,
|
||||||
SettingValue: value,
|
SettingValue: value,
|
||||||
@@ -218,3 +230,18 @@ func ChangeConfig(ctx *context.Context) {
|
|||||||
"version": version + 1,
|
"version": version + 1,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var changeConfigChecks = map[string]func(ctx *context.Context, newValue string) error{
|
||||||
|
system_model.KeyPictureDisableGravatar: func(_ *context.Context, newValue string) error {
|
||||||
|
if v, _ := strconv.ParseBool(newValue); setting.OfflineMode && !v {
|
||||||
|
return fmt.Errorf("%q should be true when OFFLINE_MODE is true", system_model.KeyPictureDisableGravatar)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
system_model.KeyPictureEnableFederatedAvatar: func(_ *context.Context, newValue string) error {
|
||||||
|
if v, _ := strconv.ParseBool(newValue); setting.OfflineMode && v {
|
||||||
|
return fmt.Errorf("%q cannot be false when OFFLINE_MODE is true", system_model.KeyPictureEnableFederatedAvatar)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|||||||
@@ -15,10 +15,10 @@ import (
|
|||||||
"code.gitea.io/gitea/models/auth"
|
"code.gitea.io/gitea/models/auth"
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/auth/password"
|
||||||
"code.gitea.io/gitea/modules/base"
|
"code.gitea.io/gitea/modules/base"
|
||||||
"code.gitea.io/gitea/modules/context"
|
"code.gitea.io/gitea/modules/context"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/password"
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
"code.gitea.io/gitea/modules/web"
|
"code.gitea.io/gitea/modules/web"
|
||||||
|
|||||||
@@ -14,13 +14,13 @@ import (
|
|||||||
"code.gitea.io/gitea/models/auth"
|
"code.gitea.io/gitea/models/auth"
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/auth/password"
|
||||||
"code.gitea.io/gitea/modules/base"
|
"code.gitea.io/gitea/modules/base"
|
||||||
"code.gitea.io/gitea/modules/context"
|
"code.gitea.io/gitea/modules/context"
|
||||||
"code.gitea.io/gitea/modules/eventsource"
|
"code.gitea.io/gitea/modules/eventsource"
|
||||||
"code.gitea.io/gitea/modules/hcaptcha"
|
"code.gitea.io/gitea/modules/hcaptcha"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/mcaptcha"
|
"code.gitea.io/gitea/modules/mcaptcha"
|
||||||
"code.gitea.io/gitea/modules/password"
|
|
||||||
"code.gitea.io/gitea/modules/recaptcha"
|
"code.gitea.io/gitea/modules/recaptcha"
|
||||||
"code.gitea.io/gitea/modules/session"
|
"code.gitea.io/gitea/modules/session"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
|||||||
@@ -10,10 +10,10 @@ import (
|
|||||||
|
|
||||||
"code.gitea.io/gitea/models/auth"
|
"code.gitea.io/gitea/models/auth"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/auth/password"
|
||||||
"code.gitea.io/gitea/modules/base"
|
"code.gitea.io/gitea/modules/base"
|
||||||
"code.gitea.io/gitea/modules/context"
|
"code.gitea.io/gitea/modules/context"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/password"
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
"code.gitea.io/gitea/modules/web"
|
"code.gitea.io/gitea/modules/web"
|
||||||
|
|||||||
@@ -43,8 +43,8 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// setCompareContext sets context data.
|
// setCompareContext sets context data.
|
||||||
func setCompareContext(ctx *context.Context, base, head *git.Commit, headOwner, headName string) {
|
func setCompareContext(ctx *context.Context, before, head *git.Commit, headOwner, headName string) {
|
||||||
ctx.Data["BaseCommit"] = base
|
ctx.Data["BeforeCommit"] = before
|
||||||
ctx.Data["HeadCommit"] = head
|
ctx.Data["HeadCommit"] = head
|
||||||
|
|
||||||
ctx.Data["GetBlobByPathForCommit"] = func(commit *git.Commit, path string) *git.Blob {
|
ctx.Data["GetBlobByPathForCommit"] = func(commit *git.Commit, path string) *git.Blob {
|
||||||
@@ -59,7 +59,7 @@ func setCompareContext(ctx *context.Context, base, head *git.Commit, headOwner,
|
|||||||
return blob
|
return blob
|
||||||
}
|
}
|
||||||
|
|
||||||
setPathsCompareContext(ctx, base, head, headOwner, headName)
|
setPathsCompareContext(ctx, before, head, headOwner, headName)
|
||||||
setImageCompareContext(ctx)
|
setImageCompareContext(ctx)
|
||||||
setCsvCompareContext(ctx)
|
setCsvCompareContext(ctx)
|
||||||
}
|
}
|
||||||
@@ -629,9 +629,8 @@ func PrepareCompareDiff(
|
|||||||
}
|
}
|
||||||
|
|
||||||
baseGitRepo := ctx.Repo.GitRepo
|
baseGitRepo := ctx.Repo.GitRepo
|
||||||
baseCommitID := ci.CompareInfo.BaseCommitID
|
|
||||||
|
|
||||||
baseCommit, err := baseGitRepo.GetCommit(baseCommitID)
|
beforeCommit, err := baseGitRepo.GetCommit(beforeCommitID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.ServerError("GetCommit", err)
|
ctx.ServerError("GetCommit", err)
|
||||||
return false
|
return false
|
||||||
@@ -668,7 +667,7 @@ func PrepareCompareDiff(
|
|||||||
ctx.Data["Username"] = ci.HeadUser.Name
|
ctx.Data["Username"] = ci.HeadUser.Name
|
||||||
ctx.Data["Reponame"] = ci.HeadRepo.Name
|
ctx.Data["Reponame"] = ci.HeadRepo.Name
|
||||||
|
|
||||||
setCompareContext(ctx, baseCommit, headCommit, ci.HeadUser.Name, repo.Name)
|
setCompareContext(ctx, beforeCommit, headCommit, ci.HeadUser.Name, repo.Name)
|
||||||
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -786,7 +786,8 @@ func setTemplateIfExists(ctx *context.Context, ctxDataKey string, possibleFiles
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
if !strings.HasPrefix(template.Ref, "refs/") { // Assume that the ref intended is always a branch - for tags users should use refs/tags/<ref>
|
|
||||||
|
if template.Ref != "" && !strings.HasPrefix(template.Ref, "refs/") { // Assume that the ref intended is always a branch - for tags users should use refs/tags/<ref>
|
||||||
template.Ref = git.BranchPrefix + template.Ref
|
template.Ref = git.BranchPrefix + template.Ref
|
||||||
}
|
}
|
||||||
ctx.Data["HasSelectedLabel"] = len(labelIDs) > 0
|
ctx.Data["HasSelectedLabel"] = len(labelIDs) > 0
|
||||||
@@ -1117,7 +1118,11 @@ func NewIssuePost(ctx *context.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// roleDescriptor returns the Role Descriptor for a comment in/with the given repo, poster and issue
|
// roleDescriptor returns the Role Descriptor for a comment in/with the given repo, poster and issue
|
||||||
func roleDescriptor(ctx stdCtx.Context, repo *repo_model.Repository, poster *user_model.User, issue *issues_model.Issue) (issues_model.RoleDescriptor, error) {
|
func roleDescriptor(ctx stdCtx.Context, repo *repo_model.Repository, poster *user_model.User, issue *issues_model.Issue, hasOriginalAuthor bool) (issues_model.RoleDescriptor, error) {
|
||||||
|
if hasOriginalAuthor {
|
||||||
|
return issues_model.RoleDescriptorNone, nil
|
||||||
|
}
|
||||||
|
|
||||||
perm, err := access_model.GetUserRepoPermission(ctx, repo, poster)
|
perm, err := access_model.GetUserRepoPermission(ctx, repo, poster)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return issues_model.RoleDescriptorNone, err
|
return issues_model.RoleDescriptorNone, err
|
||||||
@@ -1419,7 +1424,7 @@ func ViewIssue(ctx *context.Context) {
|
|||||||
// check if dependencies can be created across repositories
|
// check if dependencies can be created across repositories
|
||||||
ctx.Data["AllowCrossRepositoryDependencies"] = setting.Service.AllowCrossRepositoryDependencies
|
ctx.Data["AllowCrossRepositoryDependencies"] = setting.Service.AllowCrossRepositoryDependencies
|
||||||
|
|
||||||
if issue.ShowRole, err = roleDescriptor(ctx, repo, issue.Poster, issue); err != nil {
|
if issue.ShowRole, err = roleDescriptor(ctx, repo, issue.Poster, issue, issue.HasOriginalAuthor()); err != nil {
|
||||||
ctx.ServerError("roleDescriptor", err)
|
ctx.ServerError("roleDescriptor", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -1458,7 +1463,7 @@ func ViewIssue(ctx *context.Context) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
comment.ShowRole, err = roleDescriptor(ctx, repo, comment.Poster, issue)
|
comment.ShowRole, err = roleDescriptor(ctx, repo, comment.Poster, issue, comment.HasOriginalAuthor())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.ServerError("roleDescriptor", err)
|
ctx.ServerError("roleDescriptor", err)
|
||||||
return
|
return
|
||||||
@@ -1557,7 +1562,7 @@ func ViewIssue(ctx *context.Context) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
c.ShowRole, err = roleDescriptor(ctx, repo, c.Poster, issue)
|
c.ShowRole, err = roleDescriptor(ctx, repo, c.Poster, issue, c.HasOriginalAuthor())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.ServerError("roleDescriptor", err)
|
ctx.ServerError("roleDescriptor", err)
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -912,60 +912,63 @@ func MergePullRequest(ctx *context.Context) {
|
|||||||
pr := issue.PullRequest
|
pr := issue.PullRequest
|
||||||
pr.Issue = issue
|
pr.Issue = issue
|
||||||
pr.Issue.Repo = ctx.Repo.Repository
|
pr.Issue.Repo = ctx.Repo.Repository
|
||||||
manuallMerge := repo_model.MergeStyle(form.Do) == repo_model.MergeStyleManuallyMerged
|
|
||||||
forceMerge := form.ForceMerge != nil && *form.ForceMerge
|
manuallyMerged := repo_model.MergeStyle(form.Do) == repo_model.MergeStyleManuallyMerged
|
||||||
|
|
||||||
|
mergeCheckType := pull_service.MergeCheckTypeGeneral
|
||||||
|
if form.MergeWhenChecksSucceed {
|
||||||
|
mergeCheckType = pull_service.MergeCheckTypeAuto
|
||||||
|
}
|
||||||
|
if manuallyMerged {
|
||||||
|
mergeCheckType = pull_service.MergeCheckTypeManually
|
||||||
|
}
|
||||||
|
|
||||||
// start with merging by checking
|
// start with merging by checking
|
||||||
if err := pull_service.CheckPullMergable(ctx, ctx.Doer, &ctx.Repo.Permission, pr, manuallMerge, forceMerge); err != nil {
|
if err := pull_service.CheckPullMergable(ctx, ctx.Doer, &ctx.Repo.Permission, pr, mergeCheckType, form.ForceMerge); err != nil {
|
||||||
if errors.Is(err, pull_service.ErrIsClosed) {
|
switch {
|
||||||
|
case errors.Is(err, pull_service.ErrIsClosed):
|
||||||
if issue.IsPull {
|
if issue.IsPull {
|
||||||
ctx.Flash.Error(ctx.Tr("repo.pulls.is_closed"))
|
ctx.Flash.Error(ctx.Tr("repo.pulls.is_closed"))
|
||||||
ctx.Redirect(issue.Link())
|
|
||||||
} else {
|
} else {
|
||||||
ctx.Flash.Error(ctx.Tr("repo.issues.closed_title"))
|
ctx.Flash.Error(ctx.Tr("repo.issues.closed_title"))
|
||||||
ctx.Redirect(issue.Link())
|
|
||||||
}
|
}
|
||||||
} else if errors.Is(err, pull_service.ErrUserNotAllowedToMerge) {
|
case errors.Is(err, pull_service.ErrUserNotAllowedToMerge):
|
||||||
ctx.Flash.Error(ctx.Tr("repo.pulls.update_not_allowed"))
|
ctx.Flash.Error(ctx.Tr("repo.pulls.update_not_allowed"))
|
||||||
ctx.Redirect(issue.Link())
|
case errors.Is(err, pull_service.ErrHasMerged):
|
||||||
} else if errors.Is(err, pull_service.ErrHasMerged) {
|
|
||||||
ctx.Flash.Error(ctx.Tr("repo.pulls.has_merged"))
|
ctx.Flash.Error(ctx.Tr("repo.pulls.has_merged"))
|
||||||
ctx.Redirect(issue.Link())
|
case errors.Is(err, pull_service.ErrIsWorkInProgress):
|
||||||
} else if errors.Is(err, pull_service.ErrIsWorkInProgress) {
|
|
||||||
ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_wip"))
|
ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_wip"))
|
||||||
ctx.Redirect(issue.Link())
|
case errors.Is(err, pull_service.ErrNotMergableState):
|
||||||
} else if errors.Is(err, pull_service.ErrNotMergableState) {
|
|
||||||
ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_not_ready"))
|
ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_not_ready"))
|
||||||
ctx.Redirect(issue.Link())
|
case models.IsErrDisallowedToMerge(err):
|
||||||
} else if models.IsErrDisallowedToMerge(err) {
|
|
||||||
ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_not_ready"))
|
ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_not_ready"))
|
||||||
ctx.Redirect(issue.Link())
|
case asymkey_service.IsErrWontSign(err):
|
||||||
} else if asymkey_service.IsErrWontSign(err) {
|
ctx.Flash.Error(err.Error()) // has no translation ...
|
||||||
ctx.Flash.Error(err.Error()) // has not translation ...
|
case errors.Is(err, pull_service.ErrDependenciesLeft):
|
||||||
ctx.Redirect(issue.Link())
|
|
||||||
} else if errors.Is(err, pull_service.ErrDependenciesLeft) {
|
|
||||||
ctx.Flash.Error(ctx.Tr("repo.issues.dependency.pr_close_blocked"))
|
ctx.Flash.Error(ctx.Tr("repo.issues.dependency.pr_close_blocked"))
|
||||||
ctx.Redirect(issue.Link())
|
default:
|
||||||
} else {
|
|
||||||
ctx.ServerError("WebCheck", err)
|
ctx.ServerError("WebCheck", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ctx.Redirect(issue.Link())
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// handle manually-merged mark
|
// handle manually-merged mark
|
||||||
if manuallMerge {
|
if manuallyMerged {
|
||||||
if err := pull_service.MergedManually(pr, ctx.Doer, ctx.Repo.GitRepo, form.MergeCommitID); err != nil {
|
if err := pull_service.MergedManually(pr, ctx.Doer, ctx.Repo.GitRepo, form.MergeCommitID); err != nil {
|
||||||
if models.IsErrInvalidMergeStyle(err) {
|
switch {
|
||||||
|
|
||||||
|
case models.IsErrInvalidMergeStyle(err):
|
||||||
ctx.Flash.Error(ctx.Tr("repo.pulls.invalid_merge_option"))
|
ctx.Flash.Error(ctx.Tr("repo.pulls.invalid_merge_option"))
|
||||||
ctx.Redirect(issue.Link())
|
case strings.Contains(err.Error(), "Wrong commit ID"):
|
||||||
} else if strings.Contains(err.Error(), "Wrong commit ID") {
|
|
||||||
ctx.Flash.Error(ctx.Tr("repo.pulls.wrong_commit_id"))
|
ctx.Flash.Error(ctx.Tr("repo.pulls.wrong_commit_id"))
|
||||||
ctx.Redirect(issue.Link())
|
default:
|
||||||
} else {
|
|
||||||
ctx.ServerError("MergedManually", err)
|
ctx.ServerError("MergedManually", err)
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
ctx.Redirect(issue.Link())
|
ctx.Redirect(issue.Link())
|
||||||
return
|
return
|
||||||
@@ -1381,7 +1384,7 @@ func CleanUpPullRequest(ctx *context.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func deleteBranch(ctx *context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository) {
|
func deleteBranch(ctx *context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository) {
|
||||||
fullBranchName := pr.HeadRepo.Owner.Name + "/" + pr.HeadBranch
|
fullBranchName := pr.HeadRepo.FullName() + ":" + pr.HeadBranch
|
||||||
if err := repo_service.DeleteBranch(ctx.Doer, pr.HeadRepo, gitRepo, pr.HeadBranch); err != nil {
|
if err := repo_service.DeleteBranch(ctx.Doer, pr.HeadRepo, gitRepo, pr.HeadBranch); err != nil {
|
||||||
switch {
|
switch {
|
||||||
case git.IsErrBranchNotExist(err):
|
case git.IsErrBranchNotExist(err):
|
||||||
|
|||||||
@@ -12,10 +12,10 @@ import (
|
|||||||
|
|
||||||
"code.gitea.io/gitea/models"
|
"code.gitea.io/gitea/models"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/auth/password"
|
||||||
"code.gitea.io/gitea/modules/base"
|
"code.gitea.io/gitea/modules/base"
|
||||||
"code.gitea.io/gitea/modules/context"
|
"code.gitea.io/gitea/modules/context"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/password"
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
"code.gitea.io/gitea/modules/web"
|
"code.gitea.io/gitea/modules/web"
|
||||||
|
|||||||
@@ -165,7 +165,7 @@ func getPullRequestsByHeadSHA(ctx context.Context, sha string, repo *repo_model.
|
|||||||
|
|
||||||
func handlePull(pullID int64, sha string) {
|
func handlePull(pullID int64, sha string) {
|
||||||
ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(),
|
ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(),
|
||||||
fmt.Sprintf("Handle AutoMerge of pull[%d] with sha[%s]", pullID, sha))
|
fmt.Sprintf("Handle AutoMerge of PR[%d] with sha[%s]", pullID, sha))
|
||||||
defer finished()
|
defer finished()
|
||||||
|
|
||||||
pr, err := issues_model.GetPullRequestByID(ctx, pullID)
|
pr, err := issues_model.GetPullRequestByID(ctx, pullID)
|
||||||
@@ -177,7 +177,7 @@ func handlePull(pullID int64, sha string) {
|
|||||||
// Check if there is a scheduled pr in the db
|
// Check if there is a scheduled pr in the db
|
||||||
exists, scheduledPRM, err := pull_model.GetScheduledMergeByPullID(ctx, pr.ID)
|
exists, scheduledPRM, err := pull_model.GetScheduledMergeByPullID(ctx, pr.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("pull[%d] GetScheduledMergeByPullID: %v", pr.ID, err)
|
log.Error("%-v GetScheduledMergeByPullID: %v", pr, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !exists {
|
if !exists {
|
||||||
@@ -189,13 +189,13 @@ func handlePull(pullID int64, sha string) {
|
|||||||
// did not succeed or was not finished yet.
|
// did not succeed or was not finished yet.
|
||||||
|
|
||||||
if err = pr.LoadHeadRepoCtx(ctx); err != nil {
|
if err = pr.LoadHeadRepoCtx(ctx); err != nil {
|
||||||
log.Error("pull[%d] LoadHeadRepoCtx: %v", pr.ID, err)
|
log.Error("%-v LoadHeadRepo: %v", pr, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
headGitRepo, err := git.OpenRepository(ctx, pr.HeadRepo.RepoPath())
|
headGitRepo, err := git.OpenRepository(ctx, pr.HeadRepo.RepoPath())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("OpenRepository: %v", err)
|
log.Error("OpenRepository %-v: %v", pr.HeadRepo, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
defer headGitRepo.Close()
|
defer headGitRepo.Close()
|
||||||
@@ -203,40 +203,40 @@ func handlePull(pullID int64, sha string) {
|
|||||||
headBranchExist := headGitRepo.IsBranchExist(pr.HeadBranch)
|
headBranchExist := headGitRepo.IsBranchExist(pr.HeadBranch)
|
||||||
|
|
||||||
if pr.HeadRepo == nil || !headBranchExist {
|
if pr.HeadRepo == nil || !headBranchExist {
|
||||||
log.Warn("Head branch of auto merge pr does not exist [HeadRepoID: %d, Branch: %s, PR ID: %d]", pr.HeadRepoID, pr.HeadBranch, pr.ID)
|
log.Warn("Head branch of auto merge %-v does not exist [HeadRepoID: %d, Branch: %s]", pr, pr.HeadRepoID, pr.HeadBranch)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if all checks succeeded
|
// Check if all checks succeeded
|
||||||
pass, err := pull_service.IsPullCommitStatusPass(ctx, pr)
|
pass, err := pull_service.IsPullCommitStatusPass(ctx, pr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("IsPullCommitStatusPass: %v", err)
|
log.Error("%-v IsPullCommitStatusPass: %v", pr, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !pass {
|
if !pass {
|
||||||
log.Info("Scheduled auto merge pr has unsuccessful status checks [PullID: %d]", pr.ID)
|
log.Info("Scheduled auto merge %-v has unsuccessful status checks", pr)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Merge if all checks succeeded
|
// Merge if all checks succeeded
|
||||||
doer, err := user_model.GetUserByIDCtx(ctx, scheduledPRM.DoerID)
|
doer, err := user_model.GetUserByIDCtx(ctx, scheduledPRM.DoerID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("GetUserByIDCtx: %v", err)
|
log.Error("Unable to get scheduled User[%d]: %v", scheduledPRM.DoerID, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
perm, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, doer)
|
perm, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, doer)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("GetUserRepoPermission: %v", err)
|
log.Error("GetUserRepoPermission %-v: %v", pr.HeadRepo, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := pull_service.CheckPullMergable(ctx, doer, &perm, pr, false, false); err != nil {
|
if err := pull_service.CheckPullMergable(ctx, doer, &perm, pr, pull_service.MergeCheckTypeGeneral, false); err != nil {
|
||||||
if errors.Is(pull_service.ErrUserNotAllowedToMerge, err) {
|
if errors.Is(pull_service.ErrUserNotAllowedToMerge, err) {
|
||||||
log.Info("PR %d was scheduled to automerge by an unauthorized user", pr.ID)
|
log.Info("%-v was scheduled to automerge by an unauthorized user", pr)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
log.Error("pull[%d] CheckPullMergable: %v", pr.ID, err)
|
log.Error("%-v CheckPullMergable: %v", pr, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -245,13 +245,13 @@ func handlePull(pullID int64, sha string) {
|
|||||||
baseGitRepo = headGitRepo
|
baseGitRepo = headGitRepo
|
||||||
} else {
|
} else {
|
||||||
if err = pr.LoadBaseRepoCtx(ctx); err != nil {
|
if err = pr.LoadBaseRepoCtx(ctx); err != nil {
|
||||||
log.Error("LoadBaseRepoCtx: %v", err)
|
log.Error("%-v LoadBaseRepo: %v", pr, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
baseGitRepo, err = git.OpenRepository(ctx, pr.BaseRepo.RepoPath())
|
baseGitRepo, err = git.OpenRepository(ctx, pr.BaseRepo.RepoPath())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("OpenRepository: %v", err)
|
log.Error("OpenRepository %-v: %v", pr.BaseRepo, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
defer baseGitRepo.Close()
|
defer baseGitRepo.Close()
|
||||||
|
|||||||
@@ -597,7 +597,7 @@ type MergePullRequestForm struct {
|
|||||||
MergeMessageField string
|
MergeMessageField string
|
||||||
MergeCommitID string // only used for manually-merged
|
MergeCommitID string // only used for manually-merged
|
||||||
HeadCommitID string `json:"head_commit_id,omitempty"`
|
HeadCommitID string `json:"head_commit_id,omitempty"`
|
||||||
ForceMerge *bool `json:"force_merge,omitempty"`
|
ForceMerge bool `json:"force_merge,omitempty"`
|
||||||
MergeWhenChecksSucceed bool `json:"merge_when_checks_succeed,omitempty"`
|
MergeWhenChecksSucceed bool `json:"merge_when_checks_succeed,omitempty"`
|
||||||
DeleteBranchAfterMerge bool `json:"delete_branch_after_merge,omitempty"`
|
DeleteBranchAfterMerge bool `json:"delete_branch_after_merge,omitempty"`
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ import (
|
|||||||
repo_model "code.gitea.io/gitea/models/repo"
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
"code.gitea.io/gitea/modules/container"
|
"code.gitea.io/gitea/modules/container"
|
||||||
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/references"
|
"code.gitea.io/gitea/modules/references"
|
||||||
"code.gitea.io/gitea/modules/repository"
|
"code.gitea.io/gitea/modules/repository"
|
||||||
@@ -176,7 +177,8 @@ func UpdateIssuesCommit(doer *user_model.User, repo *repo_model.Repository, comm
|
|||||||
if !repo.CloseIssuesViaCommitInAnyBranch {
|
if !repo.CloseIssuesViaCommitInAnyBranch {
|
||||||
// If the issue was specified to be in a particular branch, don't allow commits in other branches to close it
|
// If the issue was specified to be in a particular branch, don't allow commits in other branches to close it
|
||||||
if refIssue.Ref != "" {
|
if refIssue.Ref != "" {
|
||||||
if branchName != refIssue.Ref {
|
issueBranchName := strings.TrimPrefix(refIssue.Ref, git.BranchPrefix)
|
||||||
|
if branchName != issueBranchName {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// Otherwise, only process commits to the default branch
|
// Otherwise, only process commits to the default branch
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ func SendTestMail(email string) error {
|
|||||||
// No mail service configured
|
// No mail service configured
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return gomail.Send(Sender, NewMessage([]string{email}, "Gitea Test Email!", "Gitea Test Email!").ToMessage())
|
return gomail.Send(Sender, NewMessage(email, "Gitea Test Email!", "Gitea Test Email!").ToMessage())
|
||||||
}
|
}
|
||||||
|
|
||||||
// sendUserMail sends a mail to the user
|
// sendUserMail sends a mail to the user
|
||||||
@@ -86,7 +86,7 @@ func sendUserMail(language string, u *user_model.User, tpl base.TplName, code, s
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
msg := NewMessage([]string{u.Email}, subject, content.String())
|
msg := NewMessage(u.Email, subject, content.String())
|
||||||
msg.Info = fmt.Sprintf("UID: %d, %s", u.ID, info)
|
msg.Info = fmt.Sprintf("UID: %d, %s", u.ID, info)
|
||||||
|
|
||||||
SendAsync(msg)
|
SendAsync(msg)
|
||||||
@@ -137,7 +137,7 @@ func SendActivateEmailMail(u *user_model.User, email *user_model.EmailAddress) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
msg := NewMessage([]string{email.Email}, locale.Tr("mail.activate_email"), content.String())
|
msg := NewMessage(email.Email, locale.Tr("mail.activate_email"), content.String())
|
||||||
msg.Info = fmt.Sprintf("UID: %d, activate email", u.ID)
|
msg.Info = fmt.Sprintf("UID: %d, activate email", u.ID)
|
||||||
|
|
||||||
SendAsync(msg)
|
SendAsync(msg)
|
||||||
@@ -168,7 +168,7 @@ func SendRegisterNotifyMail(u *user_model.User) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
msg := NewMessage([]string{u.Email}, locale.Tr("mail.register_notify"), content.String())
|
msg := NewMessage(u.Email, locale.Tr("mail.register_notify"), content.String())
|
||||||
msg.Info = fmt.Sprintf("UID: %d, registration notify", u.ID)
|
msg.Info = fmt.Sprintf("UID: %d, registration notify", u.ID)
|
||||||
|
|
||||||
SendAsync(msg)
|
SendAsync(msg)
|
||||||
@@ -202,7 +202,7 @@ func SendCollaboratorMail(u, doer *user_model.User, repo *repo_model.Repository)
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
msg := NewMessage([]string{u.Email}, subject, content.String())
|
msg := NewMessage(u.Email, subject, content.String())
|
||||||
msg.Info = fmt.Sprintf("UID: %d, add collaborator", u.ID)
|
msg.Info = fmt.Sprintf("UID: %d, add collaborator", u.ID)
|
||||||
|
|
||||||
SendAsync(msg)
|
SendAsync(msg)
|
||||||
@@ -306,7 +306,7 @@ func composeIssueCommentMessages(ctx *mailCommentContext, lang string, recipient
|
|||||||
|
|
||||||
msgs := make([]*Message, 0, len(recipients))
|
msgs := make([]*Message, 0, len(recipients))
|
||||||
for _, recipient := range recipients {
|
for _, recipient := range recipients {
|
||||||
msg := NewMessageFrom([]string{recipient.Email}, ctx.Doer.DisplayName(), setting.MailService.FromEmail, subject, mailBody.String())
|
msg := NewMessageFrom(recipient.Email, ctx.Doer.DisplayName(), setting.MailService.FromEmail, subject, mailBody.String())
|
||||||
msg.Info = fmt.Sprintf("Subject: %s, %s", subject, info)
|
msg.Info = fmt.Sprintf("Subject: %s, %s", subject, info)
|
||||||
|
|
||||||
msg.SetHeader("Message-ID", "<"+msgID+">")
|
msg.SetHeader("Message-ID", "<"+msgID+">")
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ func mailNewRelease(ctx context.Context, lang string, tos []string, rel *repo_mo
|
|||||||
publisherName := rel.Publisher.DisplayName()
|
publisherName := rel.Publisher.DisplayName()
|
||||||
relURL := "<" + rel.HTMLURL() + ">"
|
relURL := "<" + rel.HTMLURL() + ">"
|
||||||
for _, to := range tos {
|
for _, to := range tos {
|
||||||
msg := NewMessageFrom([]string{to}, publisherName, setting.MailService.FromEmail, subject, mailBody.String())
|
msg := NewMessageFrom(to, publisherName, setting.MailService.FromEmail, subject, mailBody.String())
|
||||||
msg.Info = subject
|
msg.Info = subject
|
||||||
msg.SetHeader("Message-ID", relURL)
|
msg.SetHeader("Message-ID", relURL)
|
||||||
msgs = append(msgs, msg)
|
msgs = append(msgs, msg)
|
||||||
|
|||||||
@@ -83,9 +83,12 @@ func sendRepoTransferNotifyMailPerLang(lang string, newOwner, doer *user_model.U
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
msg := NewMessage(emails, subject, content.String())
|
for _, to := range emails {
|
||||||
|
msg := NewMessage(to, subject, content.String())
|
||||||
msg.Info = fmt.Sprintf("UID: %d, repository pending transfer notification", newOwner.ID)
|
msg.Info = fmt.Sprintf("UID: %d, repository pending transfer notification", newOwner.ID)
|
||||||
|
|
||||||
SendAsync(msg)
|
SendAsync(msg)
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ func MailTeamInvite(ctx context.Context, inviter *user_model.User, team *org_mod
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
msg := NewMessage([]string{invite.Email}, subject, mailBody.String())
|
msg := NewMessage(invite.Email, subject, mailBody.String())
|
||||||
msg.Info = subject
|
msg.Info = subject
|
||||||
|
|
||||||
SendAsync(msg)
|
SendAsync(msg)
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ type Message struct {
|
|||||||
Info string // Message information for log purpose.
|
Info string // Message information for log purpose.
|
||||||
FromAddress string
|
FromAddress string
|
||||||
FromDisplayName string
|
FromDisplayName string
|
||||||
To []string
|
To string // Use only one recipient to prevent leaking of addresses
|
||||||
Subject string
|
Subject string
|
||||||
Date time.Time
|
Date time.Time
|
||||||
Body string
|
Body string
|
||||||
@@ -47,7 +47,7 @@ type Message struct {
|
|||||||
func (m *Message) ToMessage() *gomail.Message {
|
func (m *Message) ToMessage() *gomail.Message {
|
||||||
msg := gomail.NewMessage()
|
msg := gomail.NewMessage()
|
||||||
msg.SetAddressHeader("From", m.FromAddress, m.FromDisplayName)
|
msg.SetAddressHeader("From", m.FromAddress, m.FromDisplayName)
|
||||||
msg.SetHeader("To", m.To...)
|
msg.SetHeader("To", m.To)
|
||||||
for header := range m.Headers {
|
for header := range m.Headers {
|
||||||
msg.SetHeader(header, m.Headers[header]...)
|
msg.SetHeader(header, m.Headers[header]...)
|
||||||
}
|
}
|
||||||
@@ -86,7 +86,7 @@ func (m *Message) generateAutoMessageID() string {
|
|||||||
dateMs := m.Date.UnixNano() / 1e6
|
dateMs := m.Date.UnixNano() / 1e6
|
||||||
h := fnv.New64()
|
h := fnv.New64()
|
||||||
if len(m.To) > 0 {
|
if len(m.To) > 0 {
|
||||||
_, _ = h.Write([]byte(m.To[0]))
|
_, _ = h.Write([]byte(m.To))
|
||||||
}
|
}
|
||||||
_, _ = h.Write([]byte(m.Subject))
|
_, _ = h.Write([]byte(m.Subject))
|
||||||
_, _ = h.Write([]byte(m.Body))
|
_, _ = h.Write([]byte(m.Body))
|
||||||
@@ -94,7 +94,7 @@ func (m *Message) generateAutoMessageID() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewMessageFrom creates new mail message object with custom From header.
|
// NewMessageFrom creates new mail message object with custom From header.
|
||||||
func NewMessageFrom(to []string, fromDisplayName, fromAddress, subject, body string) *Message {
|
func NewMessageFrom(to, fromDisplayName, fromAddress, subject, body string) *Message {
|
||||||
log.Trace("NewMessageFrom (body):\n%s", body)
|
log.Trace("NewMessageFrom (body):\n%s", body)
|
||||||
|
|
||||||
return &Message{
|
return &Message{
|
||||||
@@ -109,7 +109,7 @@ func NewMessageFrom(to []string, fromDisplayName, fromAddress, subject, body str
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewMessage creates new mail message object with default From header.
|
// NewMessage creates new mail message object with default From header.
|
||||||
func NewMessage(to []string, subject, body string) *Message {
|
func NewMessage(to, subject, body string) *Message {
|
||||||
return NewMessageFrom(to, setting.MailService.FromName, setting.MailService.FromEmail, subject, body)
|
return NewMessageFrom(to, setting.MailService.FromName, setting.MailService.FromEmail, subject, body)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user