mirror of
https://github.com/go-gitea/gitea.git
synced 2025-11-03 08:02:36 +09:00
Compare commits
110 Commits
v1.11.1
...
release/v1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
34fe3d390b | ||
|
|
ce51c2bdf6 | ||
|
|
7937f1463a | ||
|
|
dbe9c11238 | ||
|
|
313ace93d0 | ||
|
|
f79a2e193f | ||
|
|
5d4251eb78 | ||
|
|
88008b681d | ||
|
|
5462fdcbbd | ||
|
|
161e550200 | ||
|
|
95af6096fb | ||
|
|
801f4b9e7a | ||
|
|
c0c3a533a0 | ||
|
|
ed646078e1 | ||
|
|
dc0ea133e1 | ||
|
|
a854846f06 | ||
|
|
b52e8de7de | ||
|
|
1b62916393 | ||
|
|
1d57c309ef | ||
|
|
cf97e65b66 | ||
|
|
42a46cff35 | ||
|
|
2cb3db2d20 | ||
|
|
04e480d477 | ||
|
|
de9a96c4de | ||
|
|
878434146f | ||
|
|
d78be7ddf9 | ||
|
|
83f8414e1e | ||
|
|
0b216f40fd | ||
|
|
dd6e604f8f | ||
|
|
86863ae939 | ||
|
|
f3a90057a5 | ||
|
|
03fdd82d63 | ||
|
|
cd7fa15d1d | ||
|
|
79868d7096 | ||
|
|
19626b93f8 | ||
|
|
91e6a7f7ea | ||
|
|
ff7eaa1eb4 | ||
|
|
5131206aad | ||
|
|
bfc25fcf40 | ||
|
|
4a6765fba2 | ||
|
|
dca8ef9407 | ||
|
|
cebef5c871 | ||
|
|
245d6ebda5 | ||
|
|
d9875ff2e1 | ||
|
|
cc2a6c1d30 | ||
|
|
b5fd55de73 | ||
|
|
e11b3a1076 | ||
|
|
0c4be64345 | ||
|
|
c34ad62eea | ||
|
|
f7d7cf4e2d | ||
|
|
99a364a9dc | ||
|
|
3afbbfe921 | ||
|
|
bfce841b04 | ||
|
|
139fc7cfee | ||
|
|
596eebb2b6 | ||
|
|
1d5d745851 | ||
|
|
3dabfd4933 | ||
|
|
6ee6731290 | ||
|
|
602fe45936 | ||
|
|
e2da9cd21f | ||
|
|
c0b917b7eb | ||
|
|
54ea58ddf0 | ||
|
|
0158725387 | ||
|
|
f3cacf1332 | ||
|
|
a15dc93011 | ||
|
|
66b31786d3 | ||
|
|
931ddfec6d | ||
|
|
7e0a5b17db | ||
|
|
07688231c2 | ||
|
|
21eaeb8418 | ||
|
|
9a929ad17f | ||
|
|
c19ac41b34 | ||
|
|
fd85d31cb4 | ||
|
|
c9e4d7a564 | ||
|
|
9990430e32 | ||
|
|
6f5656ab0e | ||
|
|
e4a876cee1 | ||
|
|
abb534ba7a | ||
|
|
65dceb6a40 | ||
|
|
db26f0aca9 | ||
|
|
76878fd69b | ||
|
|
3444fa2dd7 | ||
|
|
caa2aeaa52 | ||
|
|
11300ee582 | ||
|
|
c6b78c3d31 | ||
|
|
4c40aa5be9 | ||
|
|
50f2e90b76 | ||
|
|
5d11ccc9e1 | ||
|
|
93860af542 | ||
|
|
7bf5834f2c | ||
|
|
1fbdd9335f | ||
|
|
e9061a537c | ||
|
|
ed664a9e1d | ||
|
|
4cb18601ff | ||
|
|
3abb25166c | ||
|
|
9e6ad64d48 | ||
|
|
b51d7c459e | ||
|
|
d3b6f001fe | ||
|
|
e938f1d945 | ||
|
|
7284327a00 | ||
|
|
919f3f11e2 | ||
|
|
3cee15e6f9 | ||
|
|
34e3644ada | ||
|
|
14bd120cdc | ||
|
|
3e40f8bebc | ||
|
|
df5f1d9dca | ||
|
|
457ee1ab5a | ||
|
|
4f64688902 | ||
|
|
117dcf1c02 | ||
|
|
4529a262c0 |
@@ -387,7 +387,7 @@ steps:
|
|||||||
|
|
||||||
- name: static
|
- name: static
|
||||||
pull: always
|
pull: always
|
||||||
image: techknowlogick/xgo:latest
|
image: techknowlogick/xgo:go-1.13.x
|
||||||
commands:
|
commands:
|
||||||
- apt update && apt -y install curl
|
- apt update && apt -y install curl
|
||||||
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
||||||
@@ -485,7 +485,7 @@ steps:
|
|||||||
|
|
||||||
- name: static
|
- name: static
|
||||||
pull: always
|
pull: always
|
||||||
image: techknowlogick/xgo:latest
|
image: techknowlogick/xgo:go-1.13.x
|
||||||
commands:
|
commands:
|
||||||
- apt update && apt -y install curl
|
- apt update && apt -y install curl
|
||||||
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
- curl -sL https://deb.nodesource.com/setup_12.x | bash - && apt -y install nodejs
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -69,6 +69,7 @@ coverage.all
|
|||||||
/yarn.lock
|
/yarn.lock
|
||||||
/public/js
|
/public/js
|
||||||
/public/css
|
/public/css
|
||||||
|
/VERSION
|
||||||
|
|
||||||
# Snapcraft
|
# Snapcraft
|
||||||
snap/.snapcraft/
|
snap/.snapcraft/
|
||||||
|
|||||||
140
CHANGELOG.md
140
CHANGELOG.md
@@ -4,6 +4,135 @@ This changelog goes through all the changes that have been made in each release
|
|||||||
without substantial changes to our git log; to see the highlights of what has
|
without substantial changes to our git log; to see the highlights of what has
|
||||||
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||||
|
|
||||||
|
## [1.11.8](https://github.com/go-gitea/gitea/releases/tag/v1.11.8) - 2020-06-21
|
||||||
|
|
||||||
|
* BUGFIXES
|
||||||
|
* Really fix __webpack_public_path__ for 1.11 (#11961)
|
||||||
|
|
||||||
|
## [1.11.7](https://github.com/go-gitea/gitea/releases/tag/v1.11.7) - 2020-06-18
|
||||||
|
|
||||||
|
* BUGFIXES
|
||||||
|
* Use ID or Where to instead directly use Get when load object from database (#11925) (#11935)
|
||||||
|
* Fix __webpack_public_path__ for 1.11 (#11907)
|
||||||
|
* Fix verification of subkeys of default gpg key (#11713) (#11902)
|
||||||
|
* Remove unnecessary parentheses in wiki/view template (#11781)
|
||||||
|
* Doctor fix xorm.Count nil on sqlite error (#11741)
|
||||||
|
|
||||||
|
## [1.11.6](https://github.com/go-gitea/gitea/releases/tag/v1.11.6) - 2020-05-30
|
||||||
|
|
||||||
|
* SECURITY
|
||||||
|
* Fix missing authorization check on pull for public repos of private/limited org (#11656) (#11683)
|
||||||
|
* Use session for retrieving org teams (#11438) (#11439)
|
||||||
|
* BUGFIXES
|
||||||
|
* Return json on 500 error from API (#11574) (#11660)
|
||||||
|
* Fix wrong milestone in webhook message (#11596) (#11612)
|
||||||
|
* Prevent (caught) panic on login (#11590) (#11598)
|
||||||
|
* Fix commit page js error (#11527)
|
||||||
|
* Use media links for img in post-process (#10515) (#11504)
|
||||||
|
* Ensure public repositories in private organizations are visible and fix admin organizations list (#11465) (#11475)
|
||||||
|
* Set correct Content-Type value for Gogs/Gitea webhooks (#9504) (#10456) (#11461)
|
||||||
|
* Allow all members of private orgs to see public repos (#11442) (#11459)
|
||||||
|
* Whenever the ctx.Session is updated, release it to save it before sending the redirect (#11456) (#11457)
|
||||||
|
* Forcibly clean and destroy the session on logout (#11447) (#11451)
|
||||||
|
* Fix /api/v1/orgs/* endpoints by changing parameter to :org from :orgname (#11381)
|
||||||
|
* Add tracked time fix to doctor (part of #11111) (#11138)
|
||||||
|
* Fix webpack chunk loading with STATIC_URL_PREFIX (#11526) (#11544)
|
||||||
|
* Remove unnecessary parentheses in wiki/revision.tmpl to allow 1.11 to build on go1.14 (#11481)
|
||||||
|
|
||||||
|
## [1.11.5](https://github.com/go-gitea/gitea/releases/tag/v1.11.5) - 2020-05-09
|
||||||
|
|
||||||
|
* BUGFIXES
|
||||||
|
* Prevent timer leaks in Workerpool and others (#11333) (#11340)
|
||||||
|
* Fix tracked time issues (#11349) (#11354)
|
||||||
|
* Add NotifySyncPushCommits to indexer notifier (#11309) (#11338)
|
||||||
|
* Allow X in addition to x in tasks (#10979) (#11335)
|
||||||
|
* When delete tracked time through the API return 404 not 500 (#11319) (#11326)
|
||||||
|
* Prevent duplicate records in organizations list when creating a repository (#11303) (#11325)
|
||||||
|
* Manage port in submodule refurl (#11305) (#11323)
|
||||||
|
* api.Context.NotFound(...) should tolerate nil (#11288) (#11306)
|
||||||
|
* Show pull request selection even when unrelated branches (#11239) (#11283)
|
||||||
|
* Repo: milestone: make /milestone/:id endpoint accessible (#11264) (#11282)
|
||||||
|
* Fix GetContents(): Dont't ignore Executables (#11192) (#11209)
|
||||||
|
* Fix submodule paths when AppSubUrl is not root (#11098) (#11176)
|
||||||
|
* Prevent clones and pushes to disabled wiki (#11131) (#11134)
|
||||||
|
* Remove errant third closing curly-bracket from account.tmpl and send account ID in account.tmpl (#11130)
|
||||||
|
* On Repo Deletion: Delete related TrackedTimes too (#11110) (#11125)
|
||||||
|
* Refresh codemirror on show pull comment tab (#11100) (#11122)
|
||||||
|
* Fix merge dialog on protected branch with missing required statuses (#11074) (#11084)
|
||||||
|
* Load pr Issue Poster on API too (#11033) (#11039)
|
||||||
|
* Fix release counter on API repository info (#10968) (#10996)
|
||||||
|
* Generate Diff and Patch direct from Pull head (#10936) (#10938)
|
||||||
|
* Fix rebase conflict detection in git 2.26 (#10929) (#10930)
|
||||||
|
* ENHANCEMENT
|
||||||
|
* Fix 404 and 500 image size in small size screen (#11043) (#11049)
|
||||||
|
* Multiple Gitea Doctor improvements (#10943) (#10990) (#10064) (#9095) (#10991)
|
||||||
|
|
||||||
|
## [1.11.4](https://github.com/go-gitea/gitea/releases/tag/v1.11.4) - 2020-04-01
|
||||||
|
|
||||||
|
* BUGFIXES
|
||||||
|
* Only update merge_base if not already merged (#10909)
|
||||||
|
* Fix milestones too many SQL variables bug (#10880) (#10904)
|
||||||
|
* Protect against NPEs in notifications list (#10879) (#10883)
|
||||||
|
* Convert plumbing.ErrObjectNotFound to git.ErrNotExist in getCommit (#10862) (#10868)
|
||||||
|
* Convert plumbing.ErrReferenceNotFound to git.ErrNotExist in GetRefCommitID (#10676) (#10797)
|
||||||
|
* Account for empty lines in receive-hook message (#10773) (#10784)
|
||||||
|
* Fix bug on branch API (#10767) (#10775)
|
||||||
|
* Migrate to go-git/go-git v5.0.0 (#10735) (#10753)
|
||||||
|
* Fix hiding of fields in authorization source page (#10734) (#10752)
|
||||||
|
* Prevent default for linkAction (#10742) (#10743)
|
||||||
|
|
||||||
|
## [1.11.3](https://github.com/go-gitea/gitea/releases/tag/v1.11.3) - 2020-03-10
|
||||||
|
|
||||||
|
* BUGFIXES
|
||||||
|
* Prevent panic in stopwatch (#10670) (#10673)
|
||||||
|
* Fix bug on pull view when required status check no ci result (#10648) (#10651)
|
||||||
|
* Build explicitly with Go 1.13 (#10684)
|
||||||
|
|
||||||
|
## [1.11.2](https://github.com/go-gitea/gitea/releases/tag/v1.11.2) - 2020-03-06
|
||||||
|
|
||||||
|
* BREAKING
|
||||||
|
* Various fixes in login sources (#10428) (#10429)
|
||||||
|
* SECURITY
|
||||||
|
* Ensure only own addresses are updated (#10397) (#10399)
|
||||||
|
* Logout POST action (#10582) (#10585)
|
||||||
|
* Org action fixes and form cleanup (#10512) (#10514)
|
||||||
|
* Change action GETs to POST (#10462) (#10464)
|
||||||
|
* Fix admin notices (#10480) (#10483)
|
||||||
|
* Change admin dashboard to POST (#10465) (#10466)
|
||||||
|
* Update markbates/goth (#10444) (#10445)
|
||||||
|
* Update crypto vendors (#10385) (#10398)
|
||||||
|
* BUGFIXES
|
||||||
|
* Allow users with write permissions to modify issue descriptions and comments. (#10623) (#10626)
|
||||||
|
* Handle deleted base branch in PR (#10618) (#10619)
|
||||||
|
* Delete dependencies when deleting a repository (#10608) (#10616)
|
||||||
|
* Ensure executable bit is kept on the web editor (#10607) (#10614)
|
||||||
|
* Update mergebase in pr checker (#10586) (#10605)
|
||||||
|
* Fix release attachments being deleted while upgrading (#10572) (#10573)
|
||||||
|
* Fix redirection path if Slack webhook channel is invalid (#10566)
|
||||||
|
* Fix head.tmpl og:image picture location (#10531) (#10556)
|
||||||
|
* Fix 404 after activating secondary email (#10547) (#10553)
|
||||||
|
* Show Signer in commit lists and add basic trust (#10425 & #10511) (#10524)
|
||||||
|
* Fix potential bugs (#10513) (#10518)
|
||||||
|
* Use \[:space:\] instead of \\s (#10508) (#10509)
|
||||||
|
* Avoid mailing users that have explicitly unwatched an issue (#10475) (#10500)
|
||||||
|
* Handle push rejection message in Merge & Web Editor (#10373) (#10497)
|
||||||
|
* Fix SQLite concurrency problems by using BEGIN IMMEDIATE (#10368) (#10493)
|
||||||
|
* Fix double PR notification from API (#10482) (#10486)
|
||||||
|
* Show the username as a fallback on feeds if full name is blank (#10461)
|
||||||
|
* Trigger webhooks on issue label-change via API too (#10421) (#10439)
|
||||||
|
* Fix git reference type in webhooks (#10427) (#10432)
|
||||||
|
* Prevent panic on merge to PR (#10403) (#10408)
|
||||||
|
* Fix wrong num closed issues on repository when close issue via commit… (#10364) (#10380)
|
||||||
|
* Reading pull attachments should depend on read UnitTypePullRequests (#10346) (#10354)
|
||||||
|
* Set max-width on review-box comment box (#10348) (#10353)
|
||||||
|
* Prevent nil pointer in GetPullRequestCommitStatusState (#10342) (#10344)
|
||||||
|
* Fix protected branch status check settings (#10341) (#10343)
|
||||||
|
* Truncate long commit message header (#10301) (#10319)
|
||||||
|
* Set the initial commit status to Success otherwise it will always be Pending (#10317) (#10318)
|
||||||
|
* Don't manually replace whitespace during render (#10291) (#10315)
|
||||||
|
* ENHANCEMENT
|
||||||
|
* Admin page for managing user e-mail activation (#10557) (#10579)
|
||||||
|
|
||||||
## [1.11.1](https://github.com/go-gitea/gitea/releases/tag/v1.11.1) - 2020-02-15
|
## [1.11.1](https://github.com/go-gitea/gitea/releases/tag/v1.11.1) - 2020-02-15
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
@@ -467,6 +596,17 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
|||||||
* Update CodeMirror to version 5.49.0 (#8381)
|
* Update CodeMirror to version 5.49.0 (#8381)
|
||||||
* Wiki editor: enable side-by-side button (#7242)
|
* Wiki editor: enable side-by-side button (#7242)
|
||||||
|
|
||||||
|
## [1.10.6](https://github.com/go-gitea/gitea/releases/tag/v1.10.6) - 2020-03-10
|
||||||
|
|
||||||
|
This is a re-tag version of v1.10.5 and also explicitly built with Go 1.13.
|
||||||
|
|
||||||
|
WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be used.
|
||||||
|
|
||||||
|
## [1.10.5](https://github.com/go-gitea/gitea/releases/tag/v1.10.5) - 2020-03-06
|
||||||
|
|
||||||
|
* BUGFIXES
|
||||||
|
* Fix release attachments being deleted while upgrading (#10572) (#10574)
|
||||||
|
|
||||||
## [1.10.4](https://github.com/go-gitea/gitea/releases/tag/v1.10.4) - 2020-02-16
|
## [1.10.4](https://github.com/go-gitea/gitea/releases/tag/v1.10.4) - 2020-02-16
|
||||||
|
|
||||||
* FEATURE
|
* FEATURE
|
||||||
|
|||||||
52
Makefile
52
Makefile
@@ -29,6 +29,8 @@ EXTRA_GOFLAGS ?=
|
|||||||
|
|
||||||
MAKE_VERSION := $(shell $(MAKE) -v | head -n 1)
|
MAKE_VERSION := $(shell $(MAKE) -v | head -n 1)
|
||||||
|
|
||||||
|
STORED_VERSION_FILE := VERSION
|
||||||
|
|
||||||
ifneq ($(DRONE_TAG),)
|
ifneq ($(DRONE_TAG),)
|
||||||
VERSION ?= $(subst v,,$(DRONE_TAG))
|
VERSION ?= $(subst v,,$(DRONE_TAG))
|
||||||
GITEA_VERSION ?= $(VERSION)
|
GITEA_VERSION ?= $(VERSION)
|
||||||
@@ -38,7 +40,13 @@ else
|
|||||||
else
|
else
|
||||||
VERSION ?= master
|
VERSION ?= master
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
STORED_VERSION=$(shell cat $(STORED_VERSION_FILE) 2>/dev/null)
|
||||||
|
ifneq ($(STORED_VERSION),)
|
||||||
|
GITEA_VERSION ?= $(STORED_VERSION)
|
||||||
|
else
|
||||||
GITEA_VERSION ?= $(shell git describe --tags --always | sed 's/-/+/' | sed 's/^v//')
|
GITEA_VERSION ?= $(shell git describe --tags --always | sed 's/-/+/' | sed 's/^v//')
|
||||||
|
endif
|
||||||
endif
|
endif
|
||||||
|
|
||||||
LDFLAGS := $(LDFLAGS) -X "main.MakeVersion=$(MAKE_VERSION)" -X "main.Version=$(GITEA_VERSION)" -X "main.Tags=$(TAGS)"
|
LDFLAGS := $(LDFLAGS) -X "main.MakeVersion=$(MAKE_VERSION)" -X "main.Version=$(GITEA_VERSION)" -X "main.Tags=$(TAGS)"
|
||||||
@@ -96,13 +104,15 @@ include docker/Makefile
|
|||||||
help:
|
help:
|
||||||
@echo "Make Routines:"
|
@echo "Make Routines:"
|
||||||
@echo " - \"\" equivalent to \"build\""
|
@echo " - \"\" equivalent to \"build\""
|
||||||
@echo " - build creates the entire project"
|
@echo " - build build everything"
|
||||||
@echo " - clean delete integration files and build files but not css and js files"
|
@echo " - frontend build frontend files"
|
||||||
@echo " - clean-all delete all generated files (integration test, build, css and js files)"
|
@echo " - backend build backend files"
|
||||||
|
@echo " - clean delete backend and integration files"
|
||||||
|
@echo " - clean-all delete backend, frontend and integration files"
|
||||||
@echo " - css rebuild only css files"
|
@echo " - css rebuild only css files"
|
||||||
@echo " - js rebuild only js files"
|
@echo " - js rebuild only js files"
|
||||||
@echo " - generate run \"make css js\" and \"go generate\""
|
@echo " - generate run \"go generate\""
|
||||||
@echo " - fmt format the code"
|
@echo " - fmt format the Go code"
|
||||||
@echo " - generate-swagger generate the swagger spec from code comments"
|
@echo " - generate-swagger generate the swagger spec from code comments"
|
||||||
@echo " - swagger-validate check if the swagger spec is valide"
|
@echo " - swagger-validate check if the swagger spec is valide"
|
||||||
@echo " - revive run code linter revive"
|
@echo " - revive run code linter revive"
|
||||||
@@ -113,7 +123,7 @@ help:
|
|||||||
|
|
||||||
.PHONY: go-check
|
.PHONY: go-check
|
||||||
go-check:
|
go-check:
|
||||||
$(eval GO_VERSION := $(shell printf "%03d%03d%03d" $(shell go version | grep -Eo '[0-9]+\.?[0-9]+?\.?[0-9]?\s' | tr '.' ' ');))
|
$(eval GO_VERSION := $(shell printf "%03d%03d%03d" $(shell go version | grep -Eo '[0-9]+\.?[0-9]+?\.?[0-9]?[[:space:]]' | tr '.' ' ');))
|
||||||
@if [ "$(GO_VERSION)" -lt "001011000" ]; then \
|
@if [ "$(GO_VERSION)" -lt "001011000" ]; then \
|
||||||
echo "Gitea requires Go 1.11.0 or greater to build. You can get it at https://golang.org/dl/"; \
|
echo "Gitea requires Go 1.11.0 or greater to build. You can get it at https://golang.org/dl/"; \
|
||||||
exit 1; \
|
exit 1; \
|
||||||
@@ -156,10 +166,6 @@ fmt:
|
|||||||
vet:
|
vet:
|
||||||
$(GO) vet $(PACKAGES)
|
$(GO) vet $(PACKAGES)
|
||||||
|
|
||||||
.PHONY: generate
|
|
||||||
generate: js css
|
|
||||||
GO111MODULE=on $(GO) generate -mod=vendor $(PACKAGES)
|
|
||||||
|
|
||||||
.PHONY: generate-swagger
|
.PHONY: generate-swagger
|
||||||
generate-swagger:
|
generate-swagger:
|
||||||
$(SWAGGER) generate spec -o './$(SWAGGER_SPEC)'
|
$(SWAGGER) generate spec -o './$(SWAGGER_SPEC)'
|
||||||
@@ -414,13 +420,23 @@ install: $(wildcard *.go)
|
|||||||
$(GO) install -v -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)'
|
$(GO) install -v -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)'
|
||||||
|
|
||||||
.PHONY: build
|
.PHONY: build
|
||||||
build: go-check generate $(EXECUTABLE)
|
build: frontend backend
|
||||||
|
|
||||||
|
.PHONY: frontend
|
||||||
|
frontend: node-check js css
|
||||||
|
|
||||||
|
.PHONY: backend
|
||||||
|
backend: go-check generate $(EXECUTABLE)
|
||||||
|
|
||||||
|
.PHONY: generate
|
||||||
|
generate:
|
||||||
|
GO111MODULE=on $(GO) generate -mod=vendor $(PACKAGES)
|
||||||
|
|
||||||
$(EXECUTABLE): $(GO_SOURCES)
|
$(EXECUTABLE): $(GO_SOURCES)
|
||||||
GO111MODULE=on $(GO) build -mod=vendor $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)' -o $@
|
GO111MODULE=on $(GO) build -mod=vendor $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)' -o $@
|
||||||
|
|
||||||
.PHONY: release
|
.PHONY: release
|
||||||
release: generate release-dirs release-windows release-linux release-darwin release-copy release-compress release-sources release-check
|
release: frontend generate release-dirs release-windows release-linux release-darwin release-copy release-compress release-sources release-check
|
||||||
|
|
||||||
.PHONY: release-dirs
|
.PHONY: release-dirs
|
||||||
release-dirs:
|
release-dirs:
|
||||||
@@ -431,7 +447,7 @@ release-windows:
|
|||||||
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
||||||
$(GO) get -u src.techknowlogick.com/xgo; \
|
$(GO) get -u src.techknowlogick.com/xgo; \
|
||||||
fi
|
fi
|
||||||
xgo -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION) .
|
xgo -go go-1.13 -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION) .
|
||||||
ifeq ($(CI),drone)
|
ifeq ($(CI),drone)
|
||||||
cp /build/* $(DIST)/binaries
|
cp /build/* $(DIST)/binaries
|
||||||
endif
|
endif
|
||||||
@@ -441,7 +457,7 @@ release-linux:
|
|||||||
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
||||||
$(GO) get -u src.techknowlogick.com/xgo; \
|
$(GO) get -u src.techknowlogick.com/xgo; \
|
||||||
fi
|
fi
|
||||||
xgo -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'linux/amd64,linux/386,linux/arm-5,linux/arm-6,linux/arm64,linux/mips64le,linux/mips,linux/mipsle' -out gitea-$(VERSION) .
|
xgo -go go-1.13 -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'linux/amd64,linux/386,linux/arm-5,linux/arm-6,linux/arm64,linux/mips64le,linux/mips,linux/mipsle' -out gitea-$(VERSION) .
|
||||||
ifeq ($(CI),drone)
|
ifeq ($(CI),drone)
|
||||||
cp /build/* $(DIST)/binaries
|
cp /build/* $(DIST)/binaries
|
||||||
endif
|
endif
|
||||||
@@ -451,7 +467,7 @@ release-darwin:
|
|||||||
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
||||||
$(GO) get -u src.techknowlogick.com/xgo; \
|
$(GO) get -u src.techknowlogick.com/xgo; \
|
||||||
fi
|
fi
|
||||||
xgo -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '$(LDFLAGS)' -targets 'darwin/*' -out gitea-$(VERSION) .
|
xgo -go go-1.13 -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '$(LDFLAGS)' -targets 'darwin/*' -out gitea-$(VERSION) .
|
||||||
ifeq ($(CI),drone)
|
ifeq ($(CI),drone)
|
||||||
cp /build/* $(DIST)/binaries
|
cp /build/* $(DIST)/binaries
|
||||||
endif
|
endif
|
||||||
@@ -472,8 +488,10 @@ release-compress:
|
|||||||
cd $(DIST)/release/; for file in `find . -type f -name "*"`; do echo "compressing $${file}" && gxz -k -9 $${file}; done;
|
cd $(DIST)/release/; for file in `find . -type f -name "*"`; do echo "compressing $${file}" && gxz -k -9 $${file}; done;
|
||||||
|
|
||||||
.PHONY: release-sources
|
.PHONY: release-sources
|
||||||
release-sources:
|
release-sources: | node_modules
|
||||||
tar cvzf $(DIST)/release/gitea-src-$(VERSION).tar.gz --exclude $(DIST) --exclude .git .
|
echo $(VERSION) > $(STORED_VERSION_FILE)
|
||||||
|
tar --exclude=./$(DIST) --exclude=./.git --exclude=./node_modules/.cache -czf $(DIST)/release/gitea-src-$(VERSION).tar.gz .
|
||||||
|
rm -f $(STORED_VERSION_FILE)
|
||||||
|
|
||||||
node_modules: package-lock.json
|
node_modules: package-lock.json
|
||||||
npm install --no-save
|
npm install --no-save
|
||||||
|
|||||||
@@ -33,6 +33,15 @@ From the root of the source tree, run:
|
|||||||
|
|
||||||
TAGS="bindata" make build
|
TAGS="bindata" make build
|
||||||
|
|
||||||
|
The `build` target is split into two sub-targets:
|
||||||
|
|
||||||
|
- `make backend` which requires [Go 1.11](https://golang.org/dl/) or greater.
|
||||||
|
- `make frontend` which requires [Node.js 10.0.0](https://nodejs.org/en/download/) or greater.
|
||||||
|
|
||||||
|
If pre-built frontend files are present it is possible to only build the backend:
|
||||||
|
|
||||||
|
TAGS="bindata" make backend
|
||||||
|
|
||||||
More info: https://docs.gitea.io/en-us/install-from-source/
|
More info: https://docs.gitea.io/en-us/install-from-source/
|
||||||
|
|
||||||
## Using
|
## Using
|
||||||
|
|||||||
529
cmd/doctor.go
Normal file
529
cmd/doctor.go
Normal file
@@ -0,0 +1,529 @@
|
|||||||
|
// Copyright 2019 The Gitea Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
golog "log"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"text/tabwriter"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models"
|
||||||
|
"code.gitea.io/gitea/models/migrations"
|
||||||
|
"code.gitea.io/gitea/modules/git"
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
"code.gitea.io/gitea/modules/options"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"xorm.io/builder"
|
||||||
|
|
||||||
|
"github.com/urfave/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CmdDoctor represents the available doctor sub-command.
|
||||||
|
var CmdDoctor = cli.Command{
|
||||||
|
Name: "doctor",
|
||||||
|
Usage: "Diagnose problems",
|
||||||
|
Description: "A command to diagnose problems with the current Gitea instance according to the given configuration.",
|
||||||
|
Action: runDoctor,
|
||||||
|
Flags: []cli.Flag{
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "list",
|
||||||
|
Usage: "List the available checks",
|
||||||
|
},
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "default",
|
||||||
|
Usage: "Run the default checks (if neither --run or --all is set, this is the default behaviour)",
|
||||||
|
},
|
||||||
|
cli.StringSliceFlag{
|
||||||
|
Name: "run",
|
||||||
|
Usage: "Run the provided checks - (if --default is set, the default checks will also run)",
|
||||||
|
},
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "all",
|
||||||
|
Usage: "Run all the available checks",
|
||||||
|
},
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "fix",
|
||||||
|
Usage: "Automatically fix what we can",
|
||||||
|
},
|
||||||
|
cli.StringFlag{
|
||||||
|
Name: "log-file",
|
||||||
|
Usage: `Name of the log file (default: "doctor.log"). Set to "-" to output to stdout, set to "" to disable`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
type check struct {
|
||||||
|
title string
|
||||||
|
name string
|
||||||
|
isDefault bool
|
||||||
|
f func(ctx *cli.Context) ([]string, error)
|
||||||
|
abortIfFailed bool
|
||||||
|
skipDatabaseInit bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// checklist represents list for all checks
|
||||||
|
var checklist = []check{
|
||||||
|
{
|
||||||
|
// NOTE: this check should be the first in the list
|
||||||
|
title: "Check paths and basic configuration",
|
||||||
|
name: "paths",
|
||||||
|
isDefault: true,
|
||||||
|
f: runDoctorPathInfo,
|
||||||
|
abortIfFailed: true,
|
||||||
|
skipDatabaseInit: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: "Check Database Version",
|
||||||
|
name: "check-db-version",
|
||||||
|
isDefault: true,
|
||||||
|
f: runDoctorCheckDBVersion,
|
||||||
|
abortIfFailed: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: "Check if OpenSSH authorized_keys file is up-to-date",
|
||||||
|
name: "authorized_keys",
|
||||||
|
isDefault: true,
|
||||||
|
f: runDoctorAuthorizedKeys,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: "Check if SCRIPT_TYPE is available",
|
||||||
|
name: "script-type",
|
||||||
|
isDefault: false,
|
||||||
|
f: runDoctorScriptType,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: "Check if hook files are up-to-date and executable",
|
||||||
|
name: "hooks",
|
||||||
|
isDefault: false,
|
||||||
|
f: runDoctorHooks,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: "Recalculate merge bases",
|
||||||
|
name: "recalculate_merge_bases",
|
||||||
|
isDefault: false,
|
||||||
|
f: runDoctorPRMergeBase,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: "Check consistency of database",
|
||||||
|
name: "check-db-consistency",
|
||||||
|
isDefault: true,
|
||||||
|
f: runDoctorCheckDBConsistency,
|
||||||
|
},
|
||||||
|
// more checks please append here
|
||||||
|
}
|
||||||
|
|
||||||
|
func runDoctor(ctx *cli.Context) error {
|
||||||
|
|
||||||
|
// Silence the default loggers
|
||||||
|
log.DelNamedLogger("console")
|
||||||
|
log.DelNamedLogger(log.DEFAULT)
|
||||||
|
|
||||||
|
// Now setup our own
|
||||||
|
logFile := ctx.String("log-file")
|
||||||
|
if !ctx.IsSet("log-file") {
|
||||||
|
logFile = "doctor.log"
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(logFile) == 0 {
|
||||||
|
log.NewLogger(1000, "doctor", "console", `{"level":"NONE","stacktracelevel":"NONE","colorize":"%t"}`)
|
||||||
|
} else if logFile == "-" {
|
||||||
|
log.NewLogger(1000, "doctor", "console", `{"level":"trace","stacktracelevel":"NONE"}`)
|
||||||
|
} else {
|
||||||
|
log.NewLogger(1000, "doctor", "file", fmt.Sprintf(`{"filename":%q,"level":"trace","stacktracelevel":"NONE"}`, logFile))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finally redirect the default golog to here
|
||||||
|
golog.SetFlags(0)
|
||||||
|
golog.SetPrefix("")
|
||||||
|
golog.SetOutput(log.NewLoggerAsWriter("INFO", log.GetLogger(log.DEFAULT)))
|
||||||
|
|
||||||
|
if ctx.IsSet("list") {
|
||||||
|
w := tabwriter.NewWriter(os.Stdout, 0, 8, 0, '\t', 0)
|
||||||
|
_, _ = w.Write([]byte("Default\tName\tTitle\n"))
|
||||||
|
for _, check := range checklist {
|
||||||
|
if check.isDefault {
|
||||||
|
_, _ = w.Write([]byte{'*'})
|
||||||
|
}
|
||||||
|
_, _ = w.Write([]byte{'\t'})
|
||||||
|
_, _ = w.Write([]byte(check.name))
|
||||||
|
_, _ = w.Write([]byte{'\t'})
|
||||||
|
_, _ = w.Write([]byte(check.title))
|
||||||
|
_, _ = w.Write([]byte{'\n'})
|
||||||
|
}
|
||||||
|
return w.Flush()
|
||||||
|
}
|
||||||
|
|
||||||
|
var checks []check
|
||||||
|
if ctx.Bool("all") {
|
||||||
|
checks = checklist
|
||||||
|
} else if ctx.IsSet("run") {
|
||||||
|
addDefault := ctx.Bool("default")
|
||||||
|
names := ctx.StringSlice("run")
|
||||||
|
for i, name := range names {
|
||||||
|
names[i] = strings.ToLower(strings.TrimSpace(name))
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, check := range checklist {
|
||||||
|
if addDefault && check.isDefault {
|
||||||
|
checks = append(checks, check)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, name := range names {
|
||||||
|
if name == check.name {
|
||||||
|
checks = append(checks, check)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for _, check := range checklist {
|
||||||
|
if check.isDefault {
|
||||||
|
checks = append(checks, check)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dbIsInit := false
|
||||||
|
|
||||||
|
for i, check := range checks {
|
||||||
|
if !dbIsInit && !check.skipDatabaseInit {
|
||||||
|
// Only open database after the most basic configuration check
|
||||||
|
setting.EnableXORMLog = false
|
||||||
|
if err := initDBDisableConsole(true); err != nil {
|
||||||
|
fmt.Println(err)
|
||||||
|
fmt.Println("Check if you are using the right config file. You can use a --config directive to specify one.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
dbIsInit = true
|
||||||
|
}
|
||||||
|
fmt.Println("[", i+1, "]", check.title)
|
||||||
|
messages, err := check.f(ctx)
|
||||||
|
for _, message := range messages {
|
||||||
|
fmt.Println("-", message)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("Error:", err)
|
||||||
|
if check.abortIfFailed {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fmt.Println("OK.")
|
||||||
|
}
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func runDoctorPathInfo(ctx *cli.Context) ([]string, error) {
|
||||||
|
|
||||||
|
res := make([]string, 0, 10)
|
||||||
|
|
||||||
|
if fi, err := os.Stat(setting.CustomConf); err != nil || !fi.Mode().IsRegular() {
|
||||||
|
res = append(res, fmt.Sprintf("Failed to find configuration file at '%s'.", setting.CustomConf))
|
||||||
|
res = append(res, fmt.Sprintf("If you've never ran Gitea yet, this is normal and '%s' will be created for you on first run.", setting.CustomConf))
|
||||||
|
res = append(res, "Otherwise check that you are running this command from the correct path and/or provide a `--config` parameter.")
|
||||||
|
return res, fmt.Errorf("can't proceed without a configuration file")
|
||||||
|
}
|
||||||
|
|
||||||
|
setting.NewContext()
|
||||||
|
|
||||||
|
fail := false
|
||||||
|
|
||||||
|
check := func(name, path string, is_dir, required, is_write bool) {
|
||||||
|
res = append(res, fmt.Sprintf("%-25s '%s'", name+":", path))
|
||||||
|
fi, err := os.Stat(path)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) && ctx.Bool("fix") && is_dir {
|
||||||
|
if err := os.MkdirAll(path, 0777); err != nil {
|
||||||
|
res = append(res, fmt.Sprintf(" ERROR: %v", err))
|
||||||
|
fail = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
fi, err = os.Stat(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
if required {
|
||||||
|
res = append(res, fmt.Sprintf(" ERROR: %v", err))
|
||||||
|
fail = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
res = append(res, fmt.Sprintf(" NOTICE: not accessible (%v)", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if is_dir && !fi.IsDir() {
|
||||||
|
res = append(res, " ERROR: not a directory")
|
||||||
|
fail = true
|
||||||
|
return
|
||||||
|
} else if !is_dir && !fi.Mode().IsRegular() {
|
||||||
|
res = append(res, " ERROR: not a regular file")
|
||||||
|
fail = true
|
||||||
|
} else if is_write {
|
||||||
|
if err := runDoctorWritableDir(path); err != nil {
|
||||||
|
res = append(res, fmt.Sprintf(" ERROR: not writable: %v", err))
|
||||||
|
fail = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note print paths inside quotes to make any leading/trailing spaces evident
|
||||||
|
check("Configuration File Path", setting.CustomConf, false, true, false)
|
||||||
|
check("Repository Root Path", setting.RepoRootPath, true, true, true)
|
||||||
|
check("Data Root Path", setting.AppDataPath, true, true, true)
|
||||||
|
check("Custom File Root Path", setting.CustomPath, true, false, false)
|
||||||
|
check("Work directory", setting.AppWorkPath, true, true, false)
|
||||||
|
check("Log Root Path", setting.LogRootPath, true, true, true)
|
||||||
|
|
||||||
|
if options.IsDynamic() {
|
||||||
|
// Do not check/report on StaticRootPath if data is embedded in Gitea (-tags bindata)
|
||||||
|
check("Static File Root Path", setting.StaticRootPath, true, true, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
if fail {
|
||||||
|
return res, fmt.Errorf("please check your configuration file and try again")
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func runDoctorWritableDir(path string) error {
|
||||||
|
// There's no platform-independent way of checking if a directory is writable
|
||||||
|
// https://stackoverflow.com/questions/20026320/how-to-tell-if-folder-exists-and-is-writable
|
||||||
|
|
||||||
|
tmpFile, err := ioutil.TempFile(path, "doctors-order")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := os.Remove(tmpFile.Name()); err != nil {
|
||||||
|
fmt.Printf("Warning: can't remove temporary file: '%s'\n", tmpFile.Name())
|
||||||
|
}
|
||||||
|
tmpFile.Close()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const tplCommentPrefix = `# gitea public key`
|
||||||
|
|
||||||
|
func runDoctorAuthorizedKeys(ctx *cli.Context) ([]string, error) {
|
||||||
|
if setting.SSH.StartBuiltinServer || !setting.SSH.CreateAuthorizedKeysFile {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fPath := filepath.Join(setting.SSH.RootPath, "authorized_keys")
|
||||||
|
f, err := os.Open(fPath)
|
||||||
|
if err != nil {
|
||||||
|
if ctx.Bool("fix") {
|
||||||
|
return []string{fmt.Sprintf("Error whilst opening authorized_keys: %v. Attempting regeneration", err)}, models.RewriteAllPublicKeys()
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
linesInAuthorizedKeys := map[string]bool{}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(f)
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
linesInAuthorizedKeys[line] = true
|
||||||
|
}
|
||||||
|
f.Close()
|
||||||
|
|
||||||
|
// now we regenerate and check if there are any lines missing
|
||||||
|
regenerated := &bytes.Buffer{}
|
||||||
|
if err := models.RegeneratePublicKeys(regenerated); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
scanner = bufio.NewScanner(regenerated)
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := scanner.Text()
|
||||||
|
if strings.HasPrefix(line, tplCommentPrefix) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if ok := linesInAuthorizedKeys[line]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if ctx.Bool("fix") {
|
||||||
|
return []string{"authorized_keys is out of date, attempting regeneration"}, models.RewriteAllPublicKeys()
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("authorized_keys is out of date and should be regenerated with gitea admin regenerate keys")
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func runDoctorCheckDBVersion(ctx *cli.Context) ([]string, error) {
|
||||||
|
if err := models.NewEngine(context.Background(), migrations.EnsureUpToDate); err != nil {
|
||||||
|
if ctx.Bool("fix") {
|
||||||
|
return []string{fmt.Sprintf("WARN: Got Error %v during ensure up to date", err), "Attempting to migrate to the latest DB version to fix this."}, models.NewEngine(context.Background(), migrations.Migrate)
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func iterateRepositories(each func(*models.Repository) ([]string, error)) ([]string, error) {
|
||||||
|
results := []string{}
|
||||||
|
err := models.Iterate(
|
||||||
|
models.DefaultDBContext(),
|
||||||
|
new(models.Repository),
|
||||||
|
builder.Gt{"id": 0},
|
||||||
|
func(idx int, bean interface{}) error {
|
||||||
|
res, err := each(bean.(*models.Repository))
|
||||||
|
results = append(results, res...)
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return results, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func iteratePRs(repo *models.Repository, each func(*models.Repository, *models.PullRequest) ([]string, error)) ([]string, error) {
|
||||||
|
results := []string{}
|
||||||
|
err := models.Iterate(
|
||||||
|
models.DefaultDBContext(),
|
||||||
|
new(models.PullRequest),
|
||||||
|
builder.Eq{"base_repo_id": repo.ID},
|
||||||
|
func(idx int, bean interface{}) error {
|
||||||
|
res, err := each(repo, bean.(*models.PullRequest))
|
||||||
|
results = append(results, res...)
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return results, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func runDoctorHooks(ctx *cli.Context) ([]string, error) {
|
||||||
|
// Need to iterate across all of the repositories
|
||||||
|
return iterateRepositories(func(repo *models.Repository) ([]string, error) {
|
||||||
|
results, err := models.CheckDelegateHooks(repo.RepoPath())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(results) > 0 && ctx.Bool("fix") {
|
||||||
|
return []string{fmt.Sprintf("regenerated hooks for %s", repo.FullName())}, models.CreateDelegateHooks(repo.RepoPath())
|
||||||
|
}
|
||||||
|
|
||||||
|
return results, nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func runDoctorPRMergeBase(ctx *cli.Context) ([]string, error) {
|
||||||
|
numRepos := 0
|
||||||
|
numPRs := 0
|
||||||
|
numPRsUpdated := 0
|
||||||
|
results, err := iterateRepositories(func(repo *models.Repository) ([]string, error) {
|
||||||
|
numRepos++
|
||||||
|
return iteratePRs(repo, func(repo *models.Repository, pr *models.PullRequest) ([]string, error) {
|
||||||
|
numPRs++
|
||||||
|
results := []string{}
|
||||||
|
pr.BaseRepo = repo
|
||||||
|
repoPath := repo.RepoPath()
|
||||||
|
|
||||||
|
oldMergeBase := pr.MergeBase
|
||||||
|
|
||||||
|
if !pr.HasMerged {
|
||||||
|
var err error
|
||||||
|
pr.MergeBase, err = git.NewCommand("merge-base", "--", pr.BaseBranch, pr.GetGitRefName()).RunInDir(repoPath)
|
||||||
|
if err != nil {
|
||||||
|
var err2 error
|
||||||
|
pr.MergeBase, err2 = git.NewCommand("rev-parse", git.BranchPrefix+pr.BaseBranch).RunInDir(repoPath)
|
||||||
|
if err2 != nil {
|
||||||
|
results = append(results, fmt.Sprintf("WARN: Unable to get merge base for PR ID %d, #%d onto %s in %s/%s", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name))
|
||||||
|
log.Error("Unable to get merge base for PR ID %d, Index %d in %s/%s. Error: %v & %v", pr.ID, pr.Index, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err, err2)
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
parentsString, err := git.NewCommand("rev-list", "--parents", "-n", "1", pr.MergedCommitID).RunInDir(repoPath)
|
||||||
|
if err != nil {
|
||||||
|
results = append(results, fmt.Sprintf("WARN: Unable to get parents for merged PR ID %d, #%d onto %s in %s/%s", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name))
|
||||||
|
log.Error("Unable to get parents for merged PR ID %d, Index %d in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err)
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
parents := strings.Split(strings.TrimSpace(parentsString), " ")
|
||||||
|
if len(parents) < 2 {
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
args := append([]string{"merge-base", "--"}, parents[1:]...)
|
||||||
|
args = append(args, pr.GetGitRefName())
|
||||||
|
|
||||||
|
pr.MergeBase, err = git.NewCommand(args...).RunInDir(repoPath)
|
||||||
|
if err != nil {
|
||||||
|
results = append(results, fmt.Sprintf("WARN: Unable to get merge base for merged PR ID %d, #%d onto %s in %s/%s", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name))
|
||||||
|
log.Error("Unable to get merge base for merged PR ID %d, Index %d in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err)
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pr.MergeBase = strings.TrimSpace(pr.MergeBase)
|
||||||
|
if pr.MergeBase != oldMergeBase {
|
||||||
|
if ctx.Bool("fix") {
|
||||||
|
if err := pr.UpdateCols("merge_base"); err != nil {
|
||||||
|
return results, err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
results = append(results, fmt.Sprintf("#%d onto %s in %s/%s: MergeBase should be %s but is %s", pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, oldMergeBase, pr.MergeBase))
|
||||||
|
}
|
||||||
|
numPRsUpdated++
|
||||||
|
}
|
||||||
|
return results, nil
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
if ctx.Bool("fix") {
|
||||||
|
results = append(results, fmt.Sprintf("%d PR mergebases updated of %d PRs total in %d repos", numPRsUpdated, numPRs, numRepos))
|
||||||
|
} else {
|
||||||
|
if numPRsUpdated > 0 && err == nil {
|
||||||
|
return results, fmt.Errorf("%d PRs with incorrect mergebases of %d PRs total in %d repos", numPRsUpdated, numPRs, numRepos)
|
||||||
|
}
|
||||||
|
results = append(results, fmt.Sprintf("%d PRs with incorrect mergebases of %d PRs total in %d repos", numPRsUpdated, numPRs, numRepos))
|
||||||
|
}
|
||||||
|
|
||||||
|
return results, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func runDoctorScriptType(ctx *cli.Context) ([]string, error) {
|
||||||
|
path, err := exec.LookPath(setting.ScriptType)
|
||||||
|
if err != nil {
|
||||||
|
return []string{fmt.Sprintf("ScriptType %s is not on the current PATH", setting.ScriptType)}, err
|
||||||
|
}
|
||||||
|
return []string{fmt.Sprintf("ScriptType %s is on the current PATH at %s", setting.ScriptType, path)}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func runDoctorCheckDBConsistency(ctx *cli.Context) ([]string, error) {
|
||||||
|
var results []string
|
||||||
|
|
||||||
|
// make sure DB version is uptodate
|
||||||
|
if err := models.NewEngine(context.Background(), migrations.EnsureUpToDate); err != nil {
|
||||||
|
return nil, fmt.Errorf("model version on the database does not match the current Gitea version. Model consistency will not be checked until the database is upgraded")
|
||||||
|
}
|
||||||
|
|
||||||
|
//find tracked times without existing issues/pulls
|
||||||
|
count, err := models.CountOrphanedObjects("tracked_time", "issue", "tracked_time.issue_id=issue.id")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if count > 0 {
|
||||||
|
if ctx.Bool("fix") {
|
||||||
|
if err = models.DeleteOrphanedObjects("tracked_time", "issue", "tracked_time.issue_id=issue.id"); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
results = append(results, fmt.Sprintf("%d tracked times without existing issue deleted", count))
|
||||||
|
} else {
|
||||||
|
results = append(results, fmt.Sprintf("%d tracked times without existing issue", count))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
12
cmd/hook.go
12
cmd/hook.go
@@ -19,6 +19,7 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/private"
|
"code.gitea.io/gitea/modules/private"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
"github.com/urfave/cli"
|
"github.com/urfave/cli"
|
||||||
)
|
)
|
||||||
@@ -113,15 +114,8 @@ func (d *delayWriter) Close() error {
|
|||||||
if d == nil {
|
if d == nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
stopped := d.timer.Stop()
|
stopped := util.StopTimer(d.timer)
|
||||||
if stopped {
|
if stopped || d.buf == nil {
|
||||||
return nil
|
|
||||||
}
|
|
||||||
select {
|
|
||||||
case <-d.timer.C:
|
|
||||||
default:
|
|
||||||
}
|
|
||||||
if d.buf == nil {
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
_, err := d.internal.Write(d.buf.Bytes())
|
_, err := d.internal.Write(d.buf.Bytes())
|
||||||
|
|||||||
@@ -28,11 +28,11 @@ import (
|
|||||||
"code.gitea.io/gitea/routers"
|
"code.gitea.io/gitea/routers"
|
||||||
"code.gitea.io/gitea/routers/routes"
|
"code.gitea.io/gitea/routers/routes"
|
||||||
|
|
||||||
|
"github.com/go-git/go-git/v5"
|
||||||
|
"github.com/go-git/go-git/v5/config"
|
||||||
|
"github.com/go-git/go-git/v5/plumbing"
|
||||||
context2 "github.com/gorilla/context"
|
context2 "github.com/gorilla/context"
|
||||||
"github.com/unknwon/com"
|
"github.com/unknwon/com"
|
||||||
"gopkg.in/src-d/go-git.v4"
|
|
||||||
"gopkg.in/src-d/go-git.v4/config"
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
|
||||||
"gopkg.in/testfixtures.v2"
|
"gopkg.in/testfixtures.v2"
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ params:
|
|||||||
description: Git with a cup of tea
|
description: Git with a cup of tea
|
||||||
author: The Gitea Authors
|
author: The Gitea Authors
|
||||||
website: https://docs.gitea.io
|
website: https://docs.gitea.io
|
||||||
version: 1.11.0
|
version: 1.11.5
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
home:
|
home:
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ _Symbols used in table:_
|
|||||||
| Git LFS 2.0 | ✓ | ✘ | ✓ | ✓ | ✓ | ⁄ | ✓ |
|
| Git LFS 2.0 | ✓ | ✘ | ✓ | ✓ | ✓ | ⁄ | ✓ |
|
||||||
| Group Milestones | ✘ | ✘ | ✘ | ✓ | ✓ | ✘ | ✘ |
|
| Group Milestones | ✘ | ✘ | ✘ | ✓ | ✓ | ✘ | ✘ |
|
||||||
| Granular user roles (Code, Issues, Wiki etc) | ✓ | ✘ | ✘ | ✓ | ✓ | ✘ | ✘ |
|
| Granular user roles (Code, Issues, Wiki etc) | ✓ | ✘ | ✘ | ✓ | ✓ | ✘ | ✘ |
|
||||||
| Verified Committer | ✘ | ✘ | ? | ✓ | ✓ | ✓ | ✘ |
|
| Verified Committer | ⁄ | ✘ | ? | ✓ | ✓ | ✓ | ✘ |
|
||||||
| GPG Signed Commits | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
| GPG Signed Commits | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||||
| Reject unsigned commits | [✘](https://github.com/go-gitea/gitea/issues/2770) | ✘ | ✓ | ✓ | ✓ | ✘ | ✓ |
|
| Reject unsigned commits | [✘](https://github.com/go-gitea/gitea/issues/2770) | ✘ | ✓ | ✓ | ✓ | ✘ | ✓ |
|
||||||
| Repository Activity page | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
| Repository Activity page | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||||
|
|||||||
@@ -114,6 +114,17 @@ recommended way to build from source is therefore:
|
|||||||
TAGS="bindata sqlite sqlite_unlock_notify" make build
|
TAGS="bindata sqlite sqlite_unlock_notify" make build
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The `build` target is split into two sub-targets:
|
||||||
|
|
||||||
|
- `make backend` which requires [Go 1.11](https://golang.org/dl/) or greater.
|
||||||
|
- `make frontend` which requires [Node.js 10.0.0](https://nodejs.org/en/download/) or greater.
|
||||||
|
|
||||||
|
If pre-built frontend files are present it is possible to only build the backend:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
TAGS="bindata" make backend
|
||||||
|
``
|
||||||
|
|
||||||
## Test
|
## Test
|
||||||
|
|
||||||
After following the steps above, a `gitea` binary will be available in the working directory.
|
After following the steps above, a `gitea` binary will be available in the working directory.
|
||||||
|
|||||||
14
go.mod
14
go.mod
@@ -41,6 +41,8 @@ require (
|
|||||||
github.com/facebookgo/subset v0.0.0-20150612182917-8dac2c3c4870 // indirect
|
github.com/facebookgo/subset v0.0.0-20150612182917-8dac2c3c4870 // indirect
|
||||||
github.com/gliderlabs/ssh v0.2.2
|
github.com/gliderlabs/ssh v0.2.2
|
||||||
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a // indirect
|
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a // indirect
|
||||||
|
github.com/go-git/go-billy/v5 v5.0.0
|
||||||
|
github.com/go-git/go-git/v5 v5.0.0
|
||||||
github.com/go-openapi/jsonreference v0.19.3 // indirect
|
github.com/go-openapi/jsonreference v0.19.3 // indirect
|
||||||
github.com/go-redis/redis v6.15.2+incompatible
|
github.com/go-redis/redis v6.15.2+incompatible
|
||||||
github.com/go-sql-driver/mysql v1.4.1
|
github.com/go-sql-driver/mysql v1.4.1
|
||||||
@@ -62,7 +64,7 @@ require (
|
|||||||
github.com/lib/pq v1.2.0
|
github.com/lib/pq v1.2.0
|
||||||
github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96
|
github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96
|
||||||
github.com/mailru/easyjson v0.7.0 // indirect
|
github.com/mailru/easyjson v0.7.0 // indirect
|
||||||
github.com/markbates/goth v1.56.0
|
github.com/markbates/goth v1.61.2
|
||||||
github.com/mattn/go-isatty v0.0.7
|
github.com/mattn/go-isatty v0.0.7
|
||||||
github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d // indirect
|
github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d // indirect
|
||||||
github.com/mattn/go-sqlite3 v1.11.0
|
github.com/mattn/go-sqlite3 v1.11.0
|
||||||
@@ -80,7 +82,7 @@ require (
|
|||||||
github.com/quasoft/websspi v1.0.0
|
github.com/quasoft/websspi v1.0.0
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 // indirect
|
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 // indirect
|
||||||
github.com/satori/go.uuid v1.2.0
|
github.com/satori/go.uuid v1.2.0
|
||||||
github.com/sergi/go-diff v1.0.0
|
github.com/sergi/go-diff v1.1.0
|
||||||
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b // indirect
|
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b // indirect
|
||||||
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd
|
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd
|
||||||
github.com/steveyen/gtreap v0.0.0-20150807155958-0abe01ef9be2 // indirect
|
github.com/steveyen/gtreap v0.0.0-20150807155958-0abe01ef9be2 // indirect
|
||||||
@@ -95,10 +97,10 @@ require (
|
|||||||
github.com/yohcop/openid-go v0.0.0-20160914080427-2c050d2dae53
|
github.com/yohcop/openid-go v0.0.0-20160914080427-2c050d2dae53
|
||||||
github.com/yuin/goldmark v1.1.19
|
github.com/yuin/goldmark v1.1.19
|
||||||
go.etcd.io/bbolt v1.3.3 // indirect
|
go.etcd.io/bbolt v1.3.3 // indirect
|
||||||
golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876
|
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073
|
||||||
golang.org/x/net v0.0.0-20191101175033-0deb6923b6d9
|
golang.org/x/net v0.0.0-20200301022130-244492dfa37a
|
||||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
|
||||||
golang.org/x/sys v0.0.0-20191127021746-63cb32ae39b2
|
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527
|
||||||
golang.org/x/text v0.3.2
|
golang.org/x/text v0.3.2
|
||||||
golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935 // indirect
|
golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935 // indirect
|
||||||
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
|
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
|
||||||
@@ -106,8 +108,6 @@ require (
|
|||||||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
|
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
|
||||||
gopkg.in/ini.v1 v1.51.1
|
gopkg.in/ini.v1 v1.51.1
|
||||||
gopkg.in/ldap.v3 v3.0.2
|
gopkg.in/ldap.v3 v3.0.2
|
||||||
gopkg.in/src-d/go-billy.v4 v4.3.2
|
|
||||||
gopkg.in/src-d/go-git.v4 v4.13.1
|
|
||||||
gopkg.in/testfixtures.v2 v2.5.0
|
gopkg.in/testfixtures.v2 v2.5.0
|
||||||
mvdan.cc/xurls/v2 v2.1.0
|
mvdan.cc/xurls/v2 v2.1.0
|
||||||
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251
|
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251
|
||||||
|
|||||||
59
go.sum
59
go.sum
@@ -110,7 +110,7 @@ github.com/couchbase/vellum v0.0.0-20190829182332-ef2e028c01fd/go.mod h1:xbc8Ff/
|
|||||||
github.com/couchbaselabs/go-couchbase v0.0.0-20190708161019-23e7ca2ce2b7 h1:1XjEY/gnjQ+AfXef2U6dxCquhiRzkEpxZuWqs+QxTL8=
|
github.com/couchbaselabs/go-couchbase v0.0.0-20190708161019-23e7ca2ce2b7 h1:1XjEY/gnjQ+AfXef2U6dxCquhiRzkEpxZuWqs+QxTL8=
|
||||||
github.com/couchbaselabs/go-couchbase v0.0.0-20190708161019-23e7ca2ce2b7/go.mod h1:mby/05p8HE5yHEAKiIH/555NoblMs7PtW6NrYshDruc=
|
github.com/couchbaselabs/go-couchbase v0.0.0-20190708161019-23e7ca2ce2b7/go.mod h1:mby/05p8HE5yHEAKiIH/555NoblMs7PtW6NrYshDruc=
|
||||||
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
|
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
|
||||||
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
|
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||||
github.com/cupcake/rdb v0.0.0-20161107195141-43ba34106c76/go.mod h1:vYwsqCOLxGiisLwp9rITslkFNpZD5rz43tf41QFkTWY=
|
github.com/cupcake/rdb v0.0.0-20161107195141-43ba34106c76/go.mod h1:vYwsqCOLxGiisLwp9rITslkFNpZD5rz43tf41QFkTWY=
|
||||||
github.com/cznic/b v0.0.0-20181122101859-a26611c4d92d h1:SwD98825d6bdB+pEuTxWOXiSjBrHdOl/UVp75eI7JT8=
|
github.com/cznic/b v0.0.0-20181122101859-a26611c4d92d h1:SwD98825d6bdB+pEuTxWOXiSjBrHdOl/UVp75eI7JT8=
|
||||||
github.com/cznic/b v0.0.0-20181122101859-a26611c4d92d/go.mod h1:URriBxXwVq5ijiJ12C7iIZqlA69nTlI+LgI6/pwftG8=
|
github.com/cznic/b v0.0.0-20181122101859-a26611c4d92d/go.mod h1:URriBxXwVq5ijiJ12C7iIZqlA69nTlI+LgI6/pwftG8=
|
||||||
@@ -164,6 +164,14 @@ github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a h1:FQqo
|
|||||||
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
|
||||||
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31 h1:gclg6gY70GLy3PbkQ1AERPfmLMMagS60DKF78eWwLn8=
|
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31 h1:gclg6gY70GLy3PbkQ1AERPfmLMMagS60DKF78eWwLn8=
|
||||||
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
|
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
|
||||||
|
github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4=
|
||||||
|
github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E=
|
||||||
|
github.com/go-git/go-billy/v5 v5.0.0 h1:7NQHvd9FVid8VL4qVUMm8XifBK+2xCoZ2lSk0agRrHM=
|
||||||
|
github.com/go-git/go-billy/v5 v5.0.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0=
|
||||||
|
github.com/go-git/go-git-fixtures/v4 v4.0.1 h1:q+IFMfLx200Q3scvt2hN79JsEzy4AmBTp/pqnefH+Bc=
|
||||||
|
github.com/go-git/go-git-fixtures/v4 v4.0.1/go.mod h1:m+ICp2rF3jDhFgEZ/8yziagdT1C+ZpZcrJjappBCDSw=
|
||||||
|
github.com/go-git/go-git/v5 v5.0.0 h1:k5RWPm4iJwYtfWoxIJy4wJX9ON7ihPeZZYC1fLYDnpg=
|
||||||
|
github.com/go-git/go-git/v5 v5.0.0/go.mod h1:oYD8y9kWsGINPFJoLdaScGCN6dlKg23blmClfZwtUVA=
|
||||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||||
@@ -346,11 +354,13 @@ github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
|||||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
|
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
|
||||||
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
|
|
||||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
github.com/lafriks/xormstore v1.3.2 h1:hqi3F8s/B4rz8GuEZZDuHuOxRjeuOpEI/cC7vcnWwH4=
|
github.com/lafriks/xormstore v1.3.2 h1:hqi3F8s/B4rz8GuEZZDuHuOxRjeuOpEI/cC7vcnWwH4=
|
||||||
github.com/lafriks/xormstore v1.3.2/go.mod h1:mVNIwIa25QIr8rfR7YlVjrqN/apswHkVdtLCyVYBzXw=
|
github.com/lafriks/xormstore v1.3.2/go.mod h1:mVNIwIa25QIr8rfR7YlVjrqN/apswHkVdtLCyVYBzXw=
|
||||||
|
github.com/lestrrat-go/jwx v0.9.0/go.mod h1:iEoxlYfZjvoGpuWwxUz+eR5e6KTJGsaRcy/YNA/UnBk=
|
||||||
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||||
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
|
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
|
||||||
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||||
@@ -370,8 +380,8 @@ github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN
|
|||||||
github.com/mailru/easyjson v0.7.0 h1:aizVhC/NAAcKWb+5QsU1iNOZb4Yws5UO2I+aIprQITM=
|
github.com/mailru/easyjson v0.7.0 h1:aizVhC/NAAcKWb+5QsU1iNOZb4Yws5UO2I+aIprQITM=
|
||||||
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
|
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
|
||||||
github.com/markbates/going v1.0.0/go.mod h1:I6mnB4BPnEeqo85ynXIx1ZFLLbtiLHNXVgWeFO9OGOA=
|
github.com/markbates/going v1.0.0/go.mod h1:I6mnB4BPnEeqo85ynXIx1ZFLLbtiLHNXVgWeFO9OGOA=
|
||||||
github.com/markbates/goth v1.56.0 h1:XEYedCgMNz5pi3ojXI8z2XUmXtBnMeuKUpx4Z6HlNj8=
|
github.com/markbates/goth v1.61.2 h1:jDowrUH5qw8KGuQdKwFhLzkXkTYCIPfz3LHADJsiPIs=
|
||||||
github.com/markbates/goth v1.56.0/go.mod h1:zZmAw0Es0Dpm7TT/4AdN14QrkiWLMrrU9Xei1o+/mdA=
|
github.com/markbates/goth v1.61.2/go.mod h1:qh2QfwZoWRucQ+DR5KVKC6dUGkNCToWh4vS45GIzFsY=
|
||||||
github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=
|
github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=
|
||||||
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
||||||
github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d h1:m+dSK37rFf2fqppZhg15yI2IwC9BtucBiRwSDm9VL8g=
|
github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d h1:m+dSK37rFf2fqppZhg15yI2IwC9BtucBiRwSDm9VL8g=
|
||||||
@@ -402,6 +412,8 @@ github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc/go.mod h1:np1wUFZ6ty
|
|||||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||||
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5 h1:BvoENQQU+fZ9uukda/RzCAL/191HHwJA5b13R6diVlY=
|
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5 h1:BvoENQQU+fZ9uukda/RzCAL/191HHwJA5b13R6diVlY=
|
||||||
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
|
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
|
||||||
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||||
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||||
github.com/niklasfasching/go-org v0.1.8 h1:Kjvs6lP+LIILHhc9zIJ4Gu90a/pVY483if2Qmu8v4Fg=
|
github.com/niklasfasching/go-org v0.1.8 h1:Kjvs6lP+LIILHhc9zIJ4Gu90a/pVY483if2Qmu8v4Fg=
|
||||||
github.com/niklasfasching/go-org v0.1.8/go.mod h1:AsLD6X7djzRIz4/RFZu8vwRL0VGjUvGZCCH1Nz0VdrU=
|
github.com/niklasfasching/go-org v0.1.8/go.mod h1:AsLD6X7djzRIz4/RFZu8vwRL0VGjUvGZCCH1Nz0VdrU=
|
||||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||||
@@ -416,7 +428,6 @@ github.com/onsi/gomega v1.5.0 h1:izbySO9zDPmjJ8rDjLvkA2zJHIo+HkYXHnf7eN7SSyo=
|
|||||||
github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||||
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
|
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
|
||||||
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
|
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
|
||||||
github.com/pelletier/go-buffruneio v0.2.0/go.mod h1:JkE26KsDizTr40EUHkXVtNPvgGtbSNq5BcowyYOWdKo=
|
|
||||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||||
github.com/pelletier/go-toml v1.4.0 h1:u3Z1r+oOXJIkxqw34zVhyPgjBsm6X2wn21NWs/HfSeg=
|
github.com/pelletier/go-toml v1.4.0 h1:u3Z1r+oOXJIkxqw34zVhyPgjBsm6X2wn21NWs/HfSeg=
|
||||||
github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo=
|
github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo=
|
||||||
@@ -462,12 +473,11 @@ github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqn
|
|||||||
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 h1:YDeskXpkNDhPdWN3REluVa46HQOVuVkjkd2sWnrABNQ=
|
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 h1:YDeskXpkNDhPdWN3REluVa46HQOVuVkjkd2sWnrABNQ=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||||
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
|
|
||||||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||||
github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=
|
github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=
|
||||||
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||||
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
|
||||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||||
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b h1:4kg1wyftSKxLtnPAvcRWakIPpokB9w780/KwrNLnfPA=
|
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b h1:4kg1wyftSKxLtnPAvcRWakIPpokB9w780/KwrNLnfPA=
|
||||||
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
||||||
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd h1:ug7PpSOB5RBPK1Kg6qskGBoP3Vnj/aNYFTznWvlkGo0=
|
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd h1:ug7PpSOB5RBPK1Kg6qskGBoP3Vnj/aNYFTznWvlkGo0=
|
||||||
@@ -502,13 +512,10 @@ github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnIn
|
|||||||
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
||||||
github.com/spf13/viper v1.4.0 h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU=
|
github.com/spf13/viper v1.4.0 h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU=
|
||||||
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
|
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
|
||||||
github.com/src-d/gcfg v1.4.0 h1:xXbNR5AlLSA315x2UO+fTSSAXCDf+Ar38/6oyGbDKQ4=
|
|
||||||
github.com/src-d/gcfg v1.4.0/go.mod h1:p/UMsR43ujA89BJY9duynAwIpvqEujIH/jFlfL7jWoI=
|
|
||||||
github.com/steveyen/gtreap v0.0.0-20150807155958-0abe01ef9be2 h1:JNEGSiWg6D3lcBCMCBqN3ELniXujt+0QNHLhNnO0w3s=
|
github.com/steveyen/gtreap v0.0.0-20150807155958-0abe01ef9be2 h1:JNEGSiWg6D3lcBCMCBqN3ELniXujt+0QNHLhNnO0w3s=
|
||||||
github.com/steveyen/gtreap v0.0.0-20150807155958-0abe01ef9be2/go.mod h1:mjqs7N0Q6m5HpR7QfXVBZXZWSqTjQLeTujjA/xUp2uw=
|
github.com/steveyen/gtreap v0.0.0-20150807155958-0abe01ef9be2/go.mod h1:mjqs7N0Q6m5HpR7QfXVBZXZWSqTjQLeTujjA/xUp2uw=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=
|
|
||||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
@@ -532,7 +539,6 @@ github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGr
|
|||||||
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
||||||
github.com/unknwon/cae v0.0.0-20190822084630-55a0b64484a1 h1:SpoCl3+Pta5/ubQyF+Fmx65obtpfkyzeaOIneCE3MTw=
|
github.com/unknwon/cae v0.0.0-20190822084630-55a0b64484a1 h1:SpoCl3+Pta5/ubQyF+Fmx65obtpfkyzeaOIneCE3MTw=
|
||||||
github.com/unknwon/cae v0.0.0-20190822084630-55a0b64484a1/go.mod h1:QaSeRctcea9fK6piJpAMCCPKxzJ01+xFcr2k1m3WRPU=
|
github.com/unknwon/cae v0.0.0-20190822084630-55a0b64484a1/go.mod h1:QaSeRctcea9fK6piJpAMCCPKxzJ01+xFcr2k1m3WRPU=
|
||||||
github.com/unknwon/com v0.0.0-20190804042917-757f69c95f3e h1:GSGeB9EAKY2spCABz6xOX5DbxZEXolK+nBSvmsQwRjM=
|
|
||||||
github.com/unknwon/com v0.0.0-20190804042917-757f69c95f3e/go.mod h1:tOOxU81rwgoCLoOVVPHb6T/wt8HZygqH5id+GNnlCXM=
|
github.com/unknwon/com v0.0.0-20190804042917-757f69c95f3e/go.mod h1:tOOxU81rwgoCLoOVVPHb6T/wt8HZygqH5id+GNnlCXM=
|
||||||
github.com/unknwon/com v1.0.1 h1:3d1LTxD+Lnf3soQiD4Cp/0BRB+Rsa/+RTvz8GMMzIXs=
|
github.com/unknwon/com v1.0.1 h1:3d1LTxD+Lnf3soQiD4Cp/0BRB+Rsa/+RTvz8GMMzIXs=
|
||||||
github.com/unknwon/com v1.0.1/go.mod h1:tOOxU81rwgoCLoOVVPHb6T/wt8HZygqH5id+GNnlCXM=
|
github.com/unknwon/com v1.0.1/go.mod h1:tOOxU81rwgoCLoOVVPHb6T/wt8HZygqH5id+GNnlCXM=
|
||||||
@@ -578,11 +584,10 @@ golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8U
|
|||||||
golang.org/x/crypto v0.0.0-20190617133340-57b3e21c3d56/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190617133340-57b3e21c3d56/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190907121410-71b5226ff739/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190907121410-71b5226ff739/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad h1:5E5raQxcv+6CZ11RrBYQe5WRbUIWpScjh0kvHZkZIrQ=
|
|
||||||
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876 h1:sKJQZMuxjOAR/Uo2LBfU90onWEf1dF4C+0hPJCc9Mpc=
|
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073 h1:xMPOj6Pz6UipU1wXLkrtqpHbR0AVFnyPEQq/wRWz9lM=
|
||||||
golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||||
@@ -614,8 +619,8 @@ golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLL
|
|||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20191101175033-0deb6923b6d9 h1:DPz9iiH3YoKiKhX/ijjoZvT0VFwK2c6CWYWQ7Zyr8TU=
|
golang.org/x/net v0.0.0-20200301022130-244492dfa37a h1:GuSPYbZzB5/dcLNCwLQLsg3obCJtX9IJhpXkvY7kzk0=
|
||||||
golang.org/x/net v0.0.0-20191101175033-0deb6923b6d9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/oauth2 v0.0.0-20180620175406-ef147856a6dd/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180620175406-ef147856a6dd/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
@@ -645,15 +650,13 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||||||
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20190730183949-1393eb018365/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190730183949-1393eb018365/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190907184412-d223b2b6db03/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190907184412-d223b2b6db03/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191010194322-b09406accb47 h1:/XfQ9z7ib8eEJX2hdgFTZJ/ntt0swNk5oYBziWeTCvY=
|
|
||||||
golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191127021746-63cb32ae39b2 h1:/J2nHFg1MTqaRLFO7M+J78ASNsJoz3r0cvHBPQ77fsE=
|
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 h1:uYVVQ9WP/Ds2ROhcaGPeIdVq0RIXVLwsHlnvJ+cT1So=
|
||||||
golang.org/x/sys v0.0.0-20191127021746-63cb32ae39b2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
||||||
@@ -675,7 +678,6 @@ golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgw
|
|||||||
golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
golang.org/x/tools v0.0.0-20190617190820-da514acc4774/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190617190820-da514acc4774/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
golang.org/x/tools v0.0.0-20190729092621-ff9f1409240a/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
|
|
||||||
golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935 h1:kJQZhwFzSwJS2BxboKjdZzWczQOZx8VuH7Y8hhuGUtM=
|
golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935 h1:kJQZhwFzSwJS2BxboKjdZzWczQOZx8VuH7Y8hhuGUtM=
|
||||||
golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||||
@@ -715,8 +717,9 @@ gopkg.in/asn1-ber.v1 v1.0.0-20150924051756-4e86f4367175 h1:nn6Zav2sOQHCFJHEspya8
|
|||||||
gopkg.in/asn1-ber.v1 v1.0.0-20150924051756-4e86f4367175/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw=
|
gopkg.in/asn1-ber.v1 v1.0.0-20150924051756-4e86f4367175/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
|
||||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
|
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
|
||||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df h1:n7WqCuqOuCbNr617RXOY0AWRXxgwEyPp2z+p0+hgMuE=
|
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df h1:n7WqCuqOuCbNr617RXOY0AWRXxgwEyPp2z+p0+hgMuE=
|
||||||
@@ -731,12 +734,6 @@ gopkg.in/ldap.v3 v3.0.2 h1:R6RBtabK6e1GO0eQKtkyOFbAHO73QesLzI2w2DZ6b9w=
|
|||||||
gopkg.in/ldap.v3 v3.0.2/go.mod h1:oxD7NyBuxchC+SgJDE1Q5Od05eGt29SDQVBmV+HYbzw=
|
gopkg.in/ldap.v3 v3.0.2/go.mod h1:oxD7NyBuxchC+SgJDE1Q5Od05eGt29SDQVBmV+HYbzw=
|
||||||
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||||
gopkg.in/square/go-jose.v2 v2.3.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
|
gopkg.in/square/go-jose.v2 v2.3.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
|
||||||
gopkg.in/src-d/go-billy.v4 v4.3.2 h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg=
|
|
||||||
gopkg.in/src-d/go-billy.v4 v4.3.2/go.mod h1:nDjArDMp+XMs1aFAESLRjfGSgfvoYN0hDfzEk0GjC98=
|
|
||||||
gopkg.in/src-d/go-git-fixtures.v3 v3.5.0 h1:ivZFOIltbce2Mo8IjzUHAFoq/IylO9WHhNOAJK+LsJg=
|
|
||||||
gopkg.in/src-d/go-git-fixtures.v3 v3.5.0/go.mod h1:dLBcvytrw/TYZsNTWCnkNF2DSIlzWYqTe3rJR56Ac7g=
|
|
||||||
gopkg.in/src-d/go-git.v4 v4.13.1 h1:SRtFyV8Kxc0UP7aCHcijOMQGPxHSmMOPrzulQWolkYE=
|
|
||||||
gopkg.in/src-d/go-git.v4 v4.13.1/go.mod h1:nx5NYcxdKxq5fpltdHnPa2Exj4Sx0EclMWZQbYDu2z8=
|
|
||||||
gopkg.in/testfixtures.v2 v2.5.0 h1:N08B7l2GzFQenyYbzqthDnKAA+cmb17iAZhhFxr7JHw=
|
gopkg.in/testfixtures.v2 v2.5.0 h1:N08B7l2GzFQenyYbzqthDnKAA+cmb17iAZhhFxr7JHw=
|
||||||
gopkg.in/testfixtures.v2 v2.5.0/go.mod h1:vyAq+MYCgNpR29qitQdLZhdbLFf4mR/2MFJRFoQZZ2M=
|
gopkg.in/testfixtures.v2 v2.5.0/go.mod h1:vyAq+MYCgNpR29qitQdLZhdbLFf4mR/2MFJRFoQZZ2M=
|
||||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
||||||
@@ -745,8 +742,9 @@ gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
|
|||||||
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
|
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
|
||||||
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
||||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
|
||||||
|
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
@@ -760,7 +758,6 @@ xorm.io/builder v0.3.6 h1:ha28mQ2M+TFx96Hxo+iq6tQgnkC9IZkM6D8w9sKHHF8=
|
|||||||
xorm.io/builder v0.3.6/go.mod h1:LEFAPISnRzG+zxaxj2vPicRwz67BdhFreKg8yv8/TgU=
|
xorm.io/builder v0.3.6/go.mod h1:LEFAPISnRzG+zxaxj2vPicRwz67BdhFreKg8yv8/TgU=
|
||||||
xorm.io/core v0.7.2 h1:mEO22A2Z7a3fPaZMk6gKL/jMD80iiyNwRrX5HOv3XLw=
|
xorm.io/core v0.7.2 h1:mEO22A2Z7a3fPaZMk6gKL/jMD80iiyNwRrX5HOv3XLw=
|
||||||
xorm.io/core v0.7.2/go.mod h1:jJfd0UAEzZ4t87nbQYtVjmqpIODugN6PD2D9E+dJvdM=
|
xorm.io/core v0.7.2/go.mod h1:jJfd0UAEzZ4t87nbQYtVjmqpIODugN6PD2D9E+dJvdM=
|
||||||
xorm.io/xorm v0.8.0 h1:iALxgJrX8O00f8Jk22GbZwPmxJNgssV5Mv4uc2HL9PM=
|
|
||||||
xorm.io/xorm v0.8.0/go.mod h1:ZkJLEYLoVyg7amJK/5r779bHyzs2AU8f8VMiP6BM7uY=
|
xorm.io/xorm v0.8.0/go.mod h1:ZkJLEYLoVyg7amJK/5r779bHyzs2AU8f8VMiP6BM7uY=
|
||||||
xorm.io/xorm v0.8.1 h1:4f2KXuQxVdaX3RdI3Fw81NzMiSpZeyCZt8m3sEVeIkQ=
|
xorm.io/xorm v0.8.1 h1:4f2KXuQxVdaX3RdI3Fw81NzMiSpZeyCZt8m3sEVeIkQ=
|
||||||
xorm.io/xorm v0.8.1/go.mod h1:ZkJLEYLoVyg7amJK/5r779bHyzs2AU8f8VMiP6BM7uY=
|
xorm.io/xorm v0.8.1/go.mod h1:ZkJLEYLoVyg7amJK/5r779bHyzs2AU8f8VMiP6BM7uY=
|
||||||
|
|||||||
@@ -28,6 +28,8 @@ func testAPIGetBranch(t *testing.T, branchName string, exists bool) {
|
|||||||
var branch api.Branch
|
var branch api.Branch
|
||||||
DecodeJSON(t, resp, &branch)
|
DecodeJSON(t, resp, &branch)
|
||||||
assert.EqualValues(t, branchName, branch.Name)
|
assert.EqualValues(t, branchName, branch.Name)
|
||||||
|
assert.True(t, branch.UserCanPush)
|
||||||
|
assert.True(t, branch.UserCanMerge)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAPIGetBranch(t *testing.T) {
|
func TestAPIGetBranch(t *testing.T) {
|
||||||
|
|||||||
@@ -39,12 +39,12 @@ func TestAPICreateIssue(t *testing.T) {
|
|||||||
defer prepareTestEnv(t)()
|
defer prepareTestEnv(t)()
|
||||||
const body, title = "apiTestBody", "apiTestTitle"
|
const body, title = "apiTestBody", "apiTestTitle"
|
||||||
|
|
||||||
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
|
repoBefore := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
|
||||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repoBefore.OwnerID}).(*models.User)
|
||||||
|
|
||||||
session := loginUser(t, owner.Name)
|
session := loginUser(t, owner.Name)
|
||||||
token := getTokenForLoggedInUser(t, session)
|
token := getTokenForLoggedInUser(t, session)
|
||||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues?state=all&token=%s", owner.Name, repo.Name, token)
|
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues?state=all&token=%s", owner.Name, repoBefore.Name, token)
|
||||||
req := NewRequestWithJSON(t, "POST", urlStr, &api.CreateIssueOption{
|
req := NewRequestWithJSON(t, "POST", urlStr, &api.CreateIssueOption{
|
||||||
Body: body,
|
Body: body,
|
||||||
Title: title,
|
Title: title,
|
||||||
@@ -57,19 +57,23 @@ func TestAPICreateIssue(t *testing.T) {
|
|||||||
assert.Equal(t, apiIssue.Title, title)
|
assert.Equal(t, apiIssue.Title, title)
|
||||||
|
|
||||||
models.AssertExistsAndLoadBean(t, &models.Issue{
|
models.AssertExistsAndLoadBean(t, &models.Issue{
|
||||||
RepoID: repo.ID,
|
RepoID: repoBefore.ID,
|
||||||
AssigneeID: owner.ID,
|
AssigneeID: owner.ID,
|
||||||
Content: body,
|
Content: body,
|
||||||
Title: title,
|
Title: title,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
repoAfter := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
|
||||||
|
assert.Equal(t, repoBefore.NumIssues+1, repoAfter.NumIssues)
|
||||||
|
assert.Equal(t, repoBefore.NumClosedIssues, repoAfter.NumClosedIssues)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAPIEditIssue(t *testing.T) {
|
func TestAPIEditIssue(t *testing.T) {
|
||||||
defer prepareTestEnv(t)()
|
defer prepareTestEnv(t)()
|
||||||
|
|
||||||
issueBefore := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 10}).(*models.Issue)
|
issueBefore := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 10}).(*models.Issue)
|
||||||
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: issueBefore.RepoID}).(*models.Repository)
|
repoBefore := models.AssertExistsAndLoadBean(t, &models.Repository{ID: issueBefore.RepoID}).(*models.Repository)
|
||||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repoBefore.OwnerID}).(*models.User)
|
||||||
assert.NoError(t, issueBefore.LoadAttributes())
|
assert.NoError(t, issueBefore.LoadAttributes())
|
||||||
assert.Equal(t, int64(1019307200), int64(issueBefore.DeadlineUnix))
|
assert.Equal(t, int64(1019307200), int64(issueBefore.DeadlineUnix))
|
||||||
assert.Equal(t, api.StateOpen, issueBefore.State())
|
assert.Equal(t, api.StateOpen, issueBefore.State())
|
||||||
@@ -84,7 +88,7 @@ func TestAPIEditIssue(t *testing.T) {
|
|||||||
body := "new content!"
|
body := "new content!"
|
||||||
title := "new title from api set"
|
title := "new title from api set"
|
||||||
|
|
||||||
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d?token=%s", owner.Name, repo.Name, issueBefore.Index, token)
|
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d?token=%s", owner.Name, repoBefore.Name, issueBefore.Index, token)
|
||||||
req := NewRequestWithJSON(t, "PATCH", urlStr, api.EditIssueOption{
|
req := NewRequestWithJSON(t, "PATCH", urlStr, api.EditIssueOption{
|
||||||
State: &issueState,
|
State: &issueState,
|
||||||
RemoveDeadline: &removeDeadline,
|
RemoveDeadline: &removeDeadline,
|
||||||
@@ -99,6 +103,7 @@ func TestAPIEditIssue(t *testing.T) {
|
|||||||
DecodeJSON(t, resp, &apiIssue)
|
DecodeJSON(t, resp, &apiIssue)
|
||||||
|
|
||||||
issueAfter := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 10}).(*models.Issue)
|
issueAfter := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 10}).(*models.Issue)
|
||||||
|
repoAfter := models.AssertExistsAndLoadBean(t, &models.Repository{ID: issueBefore.RepoID}).(*models.Repository)
|
||||||
|
|
||||||
// check deleted user
|
// check deleted user
|
||||||
assert.Equal(t, int64(500), issueAfter.PosterID)
|
assert.Equal(t, int64(500), issueAfter.PosterID)
|
||||||
@@ -107,6 +112,9 @@ func TestAPIEditIssue(t *testing.T) {
|
|||||||
assert.Equal(t, int64(-1), issueBefore.PosterID)
|
assert.Equal(t, int64(-1), issueBefore.PosterID)
|
||||||
assert.Equal(t, int64(-1), apiIssue.Poster.ID)
|
assert.Equal(t, int64(-1), apiIssue.Poster.ID)
|
||||||
|
|
||||||
|
// check repo change
|
||||||
|
assert.Equal(t, repoBefore.NumClosedIssues+1, repoAfter.NumClosedIssues)
|
||||||
|
|
||||||
// API response
|
// API response
|
||||||
assert.Equal(t, api.StateClosed, apiIssue.State)
|
assert.Equal(t, api.StateClosed, apiIssue.State)
|
||||||
assert.Equal(t, milestone, apiIssue.Milestone.ID)
|
assert.Equal(t, milestone, apiIssue.Milestone.ID)
|
||||||
|
|||||||
@@ -60,17 +60,17 @@ func TestAPIDeleteTrackedTime(t *testing.T) {
|
|||||||
//Deletion not allowed
|
//Deletion not allowed
|
||||||
req := NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/issues/%d/times/%d?token=%s", user2.Name, issue2.Repo.Name, issue2.Index, time6.ID, token)
|
req := NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/issues/%d/times/%d?token=%s", user2.Name, issue2.Repo.Name, issue2.Index, time6.ID, token)
|
||||||
session.MakeRequest(t, req, http.StatusForbidden)
|
session.MakeRequest(t, req, http.StatusForbidden)
|
||||||
/* Delete own time <-- ToDo: timout without reason
|
|
||||||
time3 := models.AssertExistsAndLoadBean(t, &models.TrackedTime{ID: 3}).(*models.TrackedTime)
|
time3 := models.AssertExistsAndLoadBean(t, &models.TrackedTime{ID: 3}).(*models.TrackedTime)
|
||||||
req = NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/issues/%d/times/%d?token=%s", user2.Name, issue2.Repo.Name, issue2.Index, time3.ID, token)
|
req = NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/issues/%d/times/%d?token=%s", user2.Name, issue2.Repo.Name, issue2.Index, time3.ID, token)
|
||||||
session.MakeRequest(t, req, http.StatusNoContent)
|
session.MakeRequest(t, req, http.StatusNoContent)
|
||||||
//Delete non existing time
|
//Delete non existing time
|
||||||
session.MakeRequest(t, req, http.StatusInternalServerError) */
|
session.MakeRequest(t, req, http.StatusNotFound)
|
||||||
|
|
||||||
//Reset time of user 2 on issue 2
|
//Reset time of user 2 on issue 2
|
||||||
trackedSeconds, err := models.GetTrackedSeconds(models.FindTrackedTimesOptions{IssueID: 2, UserID: 2})
|
trackedSeconds, err := models.GetTrackedSeconds(models.FindTrackedTimesOptions{IssueID: 2, UserID: 2})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, int64(3662), trackedSeconds)
|
assert.Equal(t, int64(3661), trackedSeconds)
|
||||||
|
|
||||||
req = NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/issues/%d/times?token=%s", user2.Name, issue2.Repo.Name, issue2.Index, token)
|
req = NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/issues/%d/times?token=%s", user2.Name, issue2.Repo.Name, issue2.Index, token)
|
||||||
session.MakeRequest(t, req, http.StatusNoContent)
|
session.MakeRequest(t, req, http.StatusNoContent)
|
||||||
|
|||||||
@@ -209,13 +209,31 @@ func getRepo(t *testing.T, repoID int64) *models.Repository {
|
|||||||
func TestAPIViewRepo(t *testing.T) {
|
func TestAPIViewRepo(t *testing.T) {
|
||||||
defer prepareTestEnv(t)()
|
defer prepareTestEnv(t)()
|
||||||
|
|
||||||
|
var repo api.Repository
|
||||||
|
|
||||||
req := NewRequest(t, "GET", "/api/v1/repos/user2/repo1")
|
req := NewRequest(t, "GET", "/api/v1/repos/user2/repo1")
|
||||||
resp := MakeRequest(t, req, http.StatusOK)
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
var repo api.Repository
|
|
||||||
DecodeJSON(t, resp, &repo)
|
DecodeJSON(t, resp, &repo)
|
||||||
assert.EqualValues(t, 1, repo.ID)
|
assert.EqualValues(t, 1, repo.ID)
|
||||||
assert.EqualValues(t, "repo1", repo.Name)
|
assert.EqualValues(t, "repo1", repo.Name)
|
||||||
|
assert.EqualValues(t, 1, repo.Releases)
|
||||||
|
assert.EqualValues(t, 1, repo.OpenIssues)
|
||||||
|
assert.EqualValues(t, 2, repo.OpenPulls)
|
||||||
|
|
||||||
|
req = NewRequest(t, "GET", "/api/v1/repos/user12/repo10")
|
||||||
|
resp = MakeRequest(t, req, http.StatusOK)
|
||||||
|
DecodeJSON(t, resp, &repo)
|
||||||
|
assert.EqualValues(t, 10, repo.ID)
|
||||||
|
assert.EqualValues(t, "repo10", repo.Name)
|
||||||
|
assert.EqualValues(t, 1, repo.OpenPulls)
|
||||||
|
assert.EqualValues(t, 1, repo.Forks)
|
||||||
|
|
||||||
|
req = NewRequest(t, "GET", "/api/v1/repos/user5/repo4")
|
||||||
|
resp = MakeRequest(t, req, http.StatusOK)
|
||||||
|
DecodeJSON(t, resp, &repo)
|
||||||
|
assert.EqualValues(t, 4, repo.ID)
|
||||||
|
assert.EqualValues(t, "repo4", repo.Name)
|
||||||
|
assert.EqualValues(t, 1, repo.Stars)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAPIOrgRepos(t *testing.T) {
|
func TestAPIOrgRepos(t *testing.T) {
|
||||||
|
|||||||
@@ -71,7 +71,6 @@ func testGit(t *testing.T, u *url.URL) {
|
|||||||
t.Run("BranchProtectMerge", doBranchProtectPRMerge(&httpContext, dstPath))
|
t.Run("BranchProtectMerge", doBranchProtectPRMerge(&httpContext, dstPath))
|
||||||
t.Run("MergeFork", func(t *testing.T) {
|
t.Run("MergeFork", func(t *testing.T) {
|
||||||
t.Run("CreatePRAndMerge", doMergeFork(httpContext, forkedUserCtx, "master", httpContext.Username+":master"))
|
t.Run("CreatePRAndMerge", doMergeFork(httpContext, forkedUserCtx, "master", httpContext.Username+":master"))
|
||||||
t.Run("DeleteRepository", doAPIDeleteRepository(httpContext))
|
|
||||||
rawTest(t, &forkedUserCtx, little, big, littleLFS, bigLFS)
|
rawTest(t, &forkedUserCtx, little, big, littleLFS, bigLFS)
|
||||||
mediaTest(t, &forkedUserCtx, little, big, littleLFS, bigLFS)
|
mediaTest(t, &forkedUserCtx, little, big, littleLFS, bigLFS)
|
||||||
})
|
})
|
||||||
@@ -111,7 +110,6 @@ func testGit(t *testing.T, u *url.URL) {
|
|||||||
t.Run("BranchProtectMerge", doBranchProtectPRMerge(&sshContext, dstPath))
|
t.Run("BranchProtectMerge", doBranchProtectPRMerge(&sshContext, dstPath))
|
||||||
t.Run("MergeFork", func(t *testing.T) {
|
t.Run("MergeFork", func(t *testing.T) {
|
||||||
t.Run("CreatePRAndMerge", doMergeFork(sshContext, forkedUserCtx, "master", sshContext.Username+":master"))
|
t.Run("CreatePRAndMerge", doMergeFork(sshContext, forkedUserCtx, "master", sshContext.Username+":master"))
|
||||||
t.Run("DeleteRepository", doAPIDeleteRepository(sshContext))
|
|
||||||
rawTest(t, &forkedUserCtx, little, big, littleLFS, bigLFS)
|
rawTest(t, &forkedUserCtx, little, big, littleLFS, bigLFS)
|
||||||
mediaTest(t, &forkedUserCtx, little, big, littleLFS, bigLFS)
|
mediaTest(t, &forkedUserCtx, little, big, littleLFS, bigLFS)
|
||||||
})
|
})
|
||||||
@@ -351,6 +349,17 @@ func doBranchProtectPRMerge(baseCtx *APITestContext, dstPath string) func(t *tes
|
|||||||
pr, err = doAPICreatePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, "protected", "unprotected")(t)
|
pr, err = doAPICreatePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, "protected", "unprotected")(t)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
})
|
})
|
||||||
|
t.Run("GenerateCommit", func(t *testing.T) {
|
||||||
|
_, err := generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
})
|
||||||
|
t.Run("PushToUnprotectedBranch", doGitPushTestRepository(dstPath, "origin", "protected:unprotected-2"))
|
||||||
|
var pr2 api.PullRequest
|
||||||
|
t.Run("CreatePullRequest", func(t *testing.T) {
|
||||||
|
pr2, err = doAPICreatePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, "unprotected", "unprotected-2")(t)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
})
|
||||||
|
t.Run("MergePR2", doAPIMergePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr2.Index))
|
||||||
t.Run("MergePR", doAPIMergePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index))
|
t.Run("MergePR", doAPIMergePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index))
|
||||||
t.Run("PullProtected", doGitPull(dstPath, "origin", "protected"))
|
t.Run("PullProtected", doGitPull(dstPath, "origin", "protected"))
|
||||||
t.Run("ProtectProtectedBranchWhitelist", doProtectBranch(ctx, "protected", baseCtx.Username))
|
t.Run("ProtectProtectedBranchWhitelist", doProtectBranch(ctx, "protected", baseCtx.Username))
|
||||||
@@ -408,8 +417,62 @@ func doMergeFork(ctx, baseCtx APITestContext, baseBranch, headBranch string) fun
|
|||||||
pr, err = doAPICreatePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, baseBranch, headBranch)(t)
|
pr, err = doAPICreatePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, baseBranch, headBranch)(t)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
})
|
})
|
||||||
|
t.Run("EnsureCanSeePull", func(t *testing.T) {
|
||||||
|
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/files", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/commits", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
})
|
||||||
|
var diffStr string
|
||||||
|
t.Run("GetDiff", func(t *testing.T) {
|
||||||
|
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d.diff", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
resp := ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
diffStr = resp.Body.String()
|
||||||
|
})
|
||||||
t.Run("MergePR", doAPIMergePullRequest(baseCtx, baseCtx.Username, baseCtx.Reponame, pr.Index))
|
t.Run("MergePR", doAPIMergePullRequest(baseCtx, baseCtx.Username, baseCtx.Reponame, pr.Index))
|
||||||
|
t.Run("EnsureCanSeePull", func(t *testing.T) {
|
||||||
|
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/files", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/commits", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
})
|
||||||
|
t.Run("EnsureDiffNoChange", func(t *testing.T) {
|
||||||
|
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d.diff", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
resp := ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
assert.Equal(t, diffStr, resp.Body.String())
|
||||||
|
})
|
||||||
|
t.Run("DeleteHeadBranch", doBranchDelete(baseCtx, baseCtx.Username, baseCtx.Reponame, headBranch))
|
||||||
|
t.Run("EnsureCanSeePull", func(t *testing.T) {
|
||||||
|
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/files", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/commits", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
})
|
||||||
|
t.Run("EnsureDiffNoChange", func(t *testing.T) {
|
||||||
|
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d.diff", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
resp := ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
assert.Equal(t, diffStr, resp.Body.String())
|
||||||
|
})
|
||||||
|
t.Run("DeleteRepository", doAPIDeleteRepository(ctx))
|
||||||
|
t.Run("EnsureCanSeePull", func(t *testing.T) {
|
||||||
|
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/files", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/commits", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
})
|
||||||
|
t.Run("EnsureDiffNoChange", func(t *testing.T) {
|
||||||
|
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d.diff", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||||
|
resp := ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
assert.Equal(t, diffStr, resp.Body.String())
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -482,3 +545,14 @@ func doPushCreate(ctx APITestContext, u *url.URL) func(t *testing.T) {
|
|||||||
assert.True(t, repo.IsPrivate)
|
assert.True(t, repo.IsPrivate)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func doBranchDelete(ctx APITestContext, owner, repo, branch string) func(*testing.T) {
|
||||||
|
return func(t *testing.T) {
|
||||||
|
csrf := GetCSRF(t, ctx.Session, fmt.Sprintf("/%s/%s/branches", url.PathEscape(owner), url.PathEscape(repo)))
|
||||||
|
|
||||||
|
req := NewRequestWithValues(t, "POST", fmt.Sprintf("/%s/%s/branches/delete?name=%s", url.PathEscape(owner), url.PathEscape(repo), url.QueryEscape(branch)), map[string]string{
|
||||||
|
"_csrf": csrf,
|
||||||
|
})
|
||||||
|
ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -126,7 +126,7 @@ func restoreOldDB(t *testing.T, version string) bool {
|
|||||||
err := os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
|
err := os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
db, err := sql.Open("sqlite3", fmt.Sprintf("file:%s?cache=shared&mode=rwc&_busy_timeout=%d", setting.Database.Path, setting.Database.Timeout))
|
db, err := sql.Open("sqlite3", fmt.Sprintf("file:%s?cache=shared&mode=rwc&_busy_timeout=%d&_txlock=immediate", setting.Database.Path, setting.Database.Timeout))
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
defer db.Close()
|
defer db.Close()
|
||||||
|
|
||||||
|
|||||||
@@ -76,6 +76,53 @@ nARUPZ9SqaUmRm+KGsSyoYnvN9apiDk5KVQoyfrmweNN7DCIIcoh/B9Ax8nmouKz
|
|||||||
yBB2fjCM/bJNtN/AsgYbZIScuYK/xqTkwNtbe5WdCyD/QJOHTsPJzx59hgSVo6gf
|
yBB2fjCM/bJNtN/AsgYbZIScuYK/xqTkwNtbe5WdCyD/QJOHTsPJzx59hgSVo6gf
|
||||||
Fe8VBnxHtrY8gPSUU3gkhYLvLzyVX+YLNzRcffobd8gJbfumwFJUkz91oGvYz7xg
|
Fe8VBnxHtrY8gPSUU3gkhYLvLzyVX+YLNzRcffobd8gJbfumwFJUkz91oGvYz7xg
|
||||||
XN2qmsgBNCbTIzWZMpRDMAbY+n2QFImGf+EJZlMdj6gOrIYq8N4+nMW1FwJivsOb
|
XN2qmsgBNCbTIzWZMpRDMAbY+n2QFImGf+EJZlMdj6gOrIYq8N4+nMW1FwJivsOb
|
||||||
muqySyjZnD2AYjEA6OYPXfCVhaB5fTfhQXbIrZbgsEh4ob/eIdM=
|
muqySyjZnD2AYjEA6OYPXfCVhaB5fTfhQXbIrZbgsEh4ob/eIdOdBVgEXta5egEM
|
||||||
=oSDR
|
AMYlmZ47NqBMBeaN0o/ahYMe8eIMaroWkufMfC9VRBSMAkpbDl34oNp0cflmnMYo
|
||||||
|
AFAl8ucRMFTiUnjiWpo27q14tjSyDVsn/CqwbnrgJgCFNV/MGsYsToEkb4JwDIRC
|
||||||
|
bky+1BvqvI8RMlO3MlwzrlIaMrlQfx5NtUb9TyO7S4xZTz864+Ty5p3HhRwbdZMe
|
||||||
|
Ko8sfXFhCcCHFXosI0mX83EyzsrXlbkGRawId7jvrdOAUg/cYP8f/XmV6z1NHHH9
|
||||||
|
cvz+3oLOGuVxUdG0KuS/jigHrLWdRuKM3xfEeesp870yZU3AbyFdoHnGXROJePTl
|
||||||
|
FV8j2P5Ahf/yuVhjdyJSKdZC2h6+HtLG9RiGgLviLLYhtlZG2H6pYyKY5Ud3php+
|
||||||
|
qw1aYL1xtdxrHYkQlAa0vLY/mwpuPfMke9I+rtnrwlLRMCstdiN34ybZ4sRD+gL1
|
||||||
|
w5VIZ/aM6/Gsczd3s/T8psIi09TKPfEU2gWLMGvlDsgz+aSDdVP7XYQpNglaEPet
|
||||||
|
PwARAQABAAv8CHg6+hnV2pblTwGTlTU7V8DO3gwMfn/QhQ/8ju66G5a7J6p/ZreQ
|
||||||
|
nfCJnqYq4AgoW0SuqVSBbbTENF6YjixNmiSlb9iHMZ+ilms24xG0Y3lOMBYYCY3Y
|
||||||
|
nTSNf6nXyconz31TW7jLmTdG9hpykKEKO9WFgt5UpgWe+2CAgtUoBDZyaLrVBZ2h
|
||||||
|
te99WmziDbPQZeZPm7UQ0aX0iRBclxy4+dxjcnrcmi1mdQAM/glgs2sHbEjN7JnV
|
||||||
|
dTOvUSN7/8ixj6I719Wx6MN6jE+BNd0ytZOun6tcDl0vamfT5fBpqbQoJMib2ggo
|
||||||
|
+FGg9VFnzEMLqyI47LfOKUjCIhwVsxS4q9HXa2FtpO8UfRMPjDKgDZQzRTRJScrP
|
||||||
|
s1NJ9HiM/eCHS1YjRmgroo60HygxkoLVCHp+Rz/hi0tG/ptv4q6mdnm8Mwb5JJtV
|
||||||
|
48EvmZoNTWl9xOez1wmQn6caVHipc0qDqn/veoe8N5wdc+3hoMEXbSXqU+kx2KUa
|
||||||
|
cVxCCVoUeURhBgDUGWtx34j1y17zE92BYhtVJTCU89dDe4wOEqGPyCGvRtgTmZ+1
|
||||||
|
KwWr66pij91MV9mlY+7Ue2QHUSmgav2EFGIjVes956p4/F/CJ6qaYoekirMSnmX5
|
||||||
|
jhRt4p6RW7m4omha3LAQ+gN4Fqa4acZUywENBvv1x3v+IWbjGJGn3eBnRrP3o9P+
|
||||||
|
QUAtyMifiRm0ZN8J767o+bzUVmscXrkh7Qml47lQfDToyRI1UZZQmP2izpwHcwbZ
|
||||||
|
NtfkgRUdeEq4GJUGAO8o4Oebbt0ALZ54E2LHhk8xi4ofKkFBDCkUFjcqS3bJJNck
|
||||||
|
rkhfqEkMLETNhPbiC4TRNiunI5PXOinwNPkKI8P/hfp4S49WdIvnARazCoxjZNtl
|
||||||
|
0Cbo+F1wtOH9FZaaWzNlU2lCQ2JJ3MCpLHz+nEmdYWOIWGQu2/s7smLODVEFbYKR
|
||||||
|
50VWVRL7mB83v1XdfMFvExdQ7i5MOX4hFvmwi/WJIKClJfhNwTrHp6Jrm9jA66RL
|
||||||
|
+dNyPKfwcFcYrqt1gwYAruZzP7QgTYVL+cmvGtCaHY4KoR8hanbpqR4YbzzyEXwS
|
||||||
|
ll2FUCaVSokuRAdH3+/CHF9bqog3Zvn6HYcCS/A/rHVGIU9a+7s5IbRe0Ysc2FAN
|
||||||
|
Nm9AsC5YnuyoAjW3cJGaZLYxp2WOZcMEXZeLPFYrNz22R1nRoxnUIPRpsKICXcK0
|
||||||
|
aC4rSMk479jc/8WprWx4d45EVG+6Gsh1AT8LVhDL9yHFrh50ss2jCe1Fnftet6DI
|
||||||
|
V5zHcxBx4sCs91aPxxe12UiJA2wEGAEKACAWIQQ4G/p4KVUOUEVu5g5R68KXFICq
|
||||||
|
DwUCXta5egIbAgHACRBR68KXFICqD8D0IAQZAQoAHRYhBKAm5ShdO9gmF/o8jan0
|
||||||
|
RkmWoKbKBQJe1rl6AAoJEKn0RkmWoKbKacUL/3YYKmiVvcr5LYFzMdwdahkla+6m
|
||||||
|
hEEkL0l3dJNuU97Ou71tA1ieF0fjbVRSWjXKsntKwhyPoXjaZEZwMmv7iZ8BXV+b
|
||||||
|
oO/EG5sg2/6iukJFXZqGnQwMdLVo1jPoXDteZU1qYiCoxLHhGhHL7ivtD1ygEi6w
|
||||||
|
/cMbbOEB5Le1vOWIwqazs8dDcAYyy1PKthRl0ygvh8CpqPwy+AK3uLm0TVwetQAp
|
||||||
|
taux0bDYWCb5Aft1r1nlV44gU4RiC131TDo+TKd754+UuI+UHk1D+LjTmZxRX2S6
|
||||||
|
fXgoMXzrWmthGPdqvVOgKWm7Ef18hmaBECvPnp/tUJeDVVe02KrYQi8Bf2kxveSd
|
||||||
|
8T0N/ExcydU9HgzTL8MuyPI+yp086elQzKJu6vb9tpgxCcglQZrUNT9Uy82pzTRY
|
||||||
|
z9MmhnCDI2SD5L/CW5PsNpPTPy7s3f9DOV0G5Vka4LTSBOCK64NvAGBmRf8rFjJU
|
||||||
|
lPtRPhC7h6uHdUIx3Q550Xogvq5sQm8UBCsbG8OJDADT3FJSIulR9Sh96OsES3sc
|
||||||
|
H09juN4KcbpS03MAeUFwXqw3jBMhDoGKlsjX17Jf31qh/nI/XjigS3XWyj1BLSMG
|
||||||
|
rJfH0NyYoGDCnff37tf+8lD9km9TlnV4Qjd9ujYbDRsefhaSjLVcy/gqdxZEuNBC
|
||||||
|
BWmGwsmLI3nyZ4KDtNsa5JUHUNNZLBN20hvmE41Eszmz4Yg9Ho9DxKiFKvzUULMc
|
||||||
|
bnMHaVHseHHq6+NVUnN1SAcOA0ygjnEid8D57RtdBCD90LXjLB7vlR+HaSMZYOnr
|
||||||
|
DtseivHvqqy4+rxhwV2S3avnls9vRwE4bV6GCiqhoBnWIZRrARLZc2OTBIya82vS
|
||||||
|
BIS1eyhjif1mE7Lqhs6aPD+eqQK2mBtQ/sidN8P/IfKfVF5siXfFbuGZLz5nRIho
|
||||||
|
Yp1z7oO3OZ09lpUk0G1h+ouIFF6goDP48M/AKtbvs9OWk3QKxnOUZD8sRncq95x6
|
||||||
|
m4q1MVb+aJyxwBqDRGaFY+3TVArB1b+kG1JsAvV5dag=
|
||||||
|
=511T
|
||||||
-----END PGP PRIVATE KEY BLOCK-----
|
-----END PGP PRIVATE KEY BLOCK-----
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ func createNewRelease(t *testing.T, session *TestSession, repoURL, tag, title st
|
|||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
htmlDoc := NewHTMLParser(t, resp.Body)
|
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||||
|
|
||||||
link, exists := htmlDoc.doc.Find("form").Attr("action")
|
link, exists := htmlDoc.doc.Find("form.ui.form").Attr("action")
|
||||||
assert.True(t, exists, "The template has changed")
|
assert.True(t, exists, "The template has changed")
|
||||||
|
|
||||||
postData := map[string]string{
|
postData := map[string]string{
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ func TestSignOut(t *testing.T) {
|
|||||||
|
|
||||||
session := loginUser(t, "user2")
|
session := loginUser(t, "user2")
|
||||||
|
|
||||||
req := NewRequest(t, "GET", "/user/logout")
|
req := NewRequest(t, "POST", "/user/logout")
|
||||||
session.MakeRequest(t, req, http.StatusFound)
|
session.MakeRequest(t, req, http.StatusFound)
|
||||||
|
|
||||||
// try to view a private repo, should fail
|
// try to view a private repo, should fail
|
||||||
|
|||||||
1
main.go
1
main.go
@@ -68,6 +68,7 @@ arguments - which can alternatively be run by running the subcommand web.`
|
|||||||
cmd.CmdMigrate,
|
cmd.CmdMigrate,
|
||||||
cmd.CmdKeys,
|
cmd.CmdKeys,
|
||||||
cmd.CmdConvert,
|
cmd.CmdConvert,
|
||||||
|
cmd.CmdDoctor,
|
||||||
}
|
}
|
||||||
// Now adjust these commands to add our global configuration options
|
// Now adjust these commands to add our global configuration options
|
||||||
|
|
||||||
|
|||||||
@@ -122,10 +122,13 @@ func (a *Action) ShortActUserName() string {
|
|||||||
return base.EllipsisString(a.GetActUserName(), 20)
|
return base.EllipsisString(a.GetActUserName(), 20)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetDisplayName gets the action's display name based on DEFAULT_SHOW_FULL_NAME
|
// GetDisplayName gets the action's display name based on DEFAULT_SHOW_FULL_NAME, or falls back to the username if it is blank.
|
||||||
func (a *Action) GetDisplayName() string {
|
func (a *Action) GetDisplayName() string {
|
||||||
if setting.UI.DefaultShowFullName {
|
if setting.UI.DefaultShowFullName {
|
||||||
return a.GetActFullName()
|
trimmedFullName := strings.TrimSpace(a.GetActFullName())
|
||||||
|
if len(trimmedFullName) > 0 {
|
||||||
|
return trimmedFullName
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return a.ShortActUserName()
|
return a.ShortActUserName()
|
||||||
}
|
}
|
||||||
@@ -212,7 +215,7 @@ func (a *Action) getCommentLink(e Engine) string {
|
|||||||
return "#"
|
return "#"
|
||||||
}
|
}
|
||||||
if a.Comment == nil && a.CommentID != 0 {
|
if a.Comment == nil && a.CommentID != 0 {
|
||||||
a.Comment, _ = GetCommentByID(a.CommentID)
|
a.Comment, _ = getCommentByID(e, a.CommentID)
|
||||||
}
|
}
|
||||||
if a.Comment != nil {
|
if a.Comment != nil {
|
||||||
return a.Comment.HTMLURL()
|
return a.Comment.HTMLURL()
|
||||||
|
|||||||
@@ -79,7 +79,11 @@ func (a *Attachment) LinkedRepository() (*Repository, UnitType, error) {
|
|||||||
return nil, UnitTypeIssues, err
|
return nil, UnitTypeIssues, err
|
||||||
}
|
}
|
||||||
repo, err := GetRepositoryByID(iss.RepoID)
|
repo, err := GetRepositoryByID(iss.RepoID)
|
||||||
return repo, UnitTypeIssues, err
|
unitType := UnitTypeIssues
|
||||||
|
if iss.IsPull {
|
||||||
|
unitType = UnitTypePullRequests
|
||||||
|
}
|
||||||
|
return repo, unitType, err
|
||||||
} else if a.ReleaseID != 0 {
|
} else if a.ReleaseID != 0 {
|
||||||
rel, err := GetReleaseByID(a.ReleaseID)
|
rel, err := GetReleaseByID(a.ReleaseID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -132,9 +136,8 @@ func GetAttachmentByID(id int64) (*Attachment, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func getAttachmentByID(e Engine, id int64) (*Attachment, error) {
|
func getAttachmentByID(e Engine, id int64) (*Attachment, error) {
|
||||||
attach := &Attachment{ID: id}
|
attach := &Attachment{}
|
||||||
|
if has, err := e.ID(id).Get(attach); err != nil {
|
||||||
if has, err := e.Get(attach); err != nil {
|
|
||||||
return nil, err
|
return nil, err
|
||||||
} else if !has {
|
} else if !has {
|
||||||
return nil, ErrAttachmentNotExist{ID: id, UUID: ""}
|
return nil, ErrAttachmentNotExist{ID: id, UUID: ""}
|
||||||
@@ -143,8 +146,8 @@ func getAttachmentByID(e Engine, id int64) (*Attachment, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func getAttachmentByUUID(e Engine, uuid string) (*Attachment, error) {
|
func getAttachmentByUUID(e Engine, uuid string) (*Attachment, error) {
|
||||||
attach := &Attachment{UUID: uuid}
|
attach := &Attachment{}
|
||||||
has, err := e.Get(attach)
|
has, err := e.Where("uuid=?", uuid).Get(attach)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
} else if !has {
|
} else if !has {
|
||||||
@@ -195,7 +198,7 @@ func GetAttachmentsByCommentID(commentID int64) ([]*Attachment, error) {
|
|||||||
|
|
||||||
func getAttachmentsByCommentID(e Engine, commentID int64) ([]*Attachment, error) {
|
func getAttachmentsByCommentID(e Engine, commentID int64) ([]*Attachment, error) {
|
||||||
attachments := make([]*Attachment, 0, 10)
|
attachments := make([]*Attachment, 0, 10)
|
||||||
return attachments, x.Where("comment_id=?", commentID).Find(&attachments)
|
return attachments, e.Where("comment_id=?", commentID).Find(&attachments)
|
||||||
}
|
}
|
||||||
|
|
||||||
// getAttachmentByReleaseIDFileName return a file based on the the following infos:
|
// getAttachmentByReleaseIDFileName return a file based on the the following infos:
|
||||||
|
|||||||
@@ -138,7 +138,7 @@ func TestLinkedRepository(t *testing.T) {
|
|||||||
expectedUnitType UnitType
|
expectedUnitType UnitType
|
||||||
}{
|
}{
|
||||||
{"LinkedIssue", 1, &Repository{ID: 1}, UnitTypeIssues},
|
{"LinkedIssue", 1, &Repository{ID: 1}, UnitTypeIssues},
|
||||||
{"LinkedComment", 3, &Repository{ID: 1}, UnitTypeIssues},
|
{"LinkedComment", 3, &Repository{ID: 1}, UnitTypePullRequests},
|
||||||
{"LinkedRelease", 9, &Repository{ID: 1}, UnitTypeReleases},
|
{"LinkedRelease", 9, &Repository{ID: 1}, UnitTypeReleases},
|
||||||
{"Notlinked", 10, nil, -1},
|
{"Notlinked", 10, nil, -1},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -113,6 +113,28 @@ func (protectBranch *ProtectedBranch) CanUserMerge(userID int64) bool {
|
|||||||
return in
|
return in
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsUserMergeWhitelisted checks if some user is whitelisted to merge to this branch
|
||||||
|
func (protectBranch *ProtectedBranch) IsUserMergeWhitelisted(userID int64) bool {
|
||||||
|
if !protectBranch.EnableMergeWhitelist {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if base.Int64sContains(protectBranch.MergeWhitelistUserIDs, userID) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(protectBranch.MergeWhitelistTeamIDs) == 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
in, err := IsUserInTeams(userID, protectBranch.MergeWhitelistTeamIDs)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("IsUserInTeams: %v", err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return in
|
||||||
|
}
|
||||||
|
|
||||||
// IsUserOfficialReviewer check if user is official reviewer for the branch (counts towards required approvals)
|
// IsUserOfficialReviewer check if user is official reviewer for the branch (counts towards required approvals)
|
||||||
func (protectBranch *ProtectedBranch) IsUserOfficialReviewer(user *User) (bool, error) {
|
func (protectBranch *ProtectedBranch) IsUserOfficialReviewer(user *User) (bool, error) {
|
||||||
return protectBranch.isUserOfficialReviewer(x, user)
|
return protectBranch.isUserOfficialReviewer(x, user)
|
||||||
@@ -209,8 +231,8 @@ func getProtectedBranchBy(e Engine, repoID int64, branchName string) (*Protected
|
|||||||
|
|
||||||
// GetProtectedBranchByID getting protected branch by ID
|
// GetProtectedBranchByID getting protected branch by ID
|
||||||
func GetProtectedBranchByID(id int64) (*ProtectedBranch, error) {
|
func GetProtectedBranchByID(id int64) (*ProtectedBranch, error) {
|
||||||
rel := &ProtectedBranch{ID: id}
|
rel := &ProtectedBranch{}
|
||||||
has, err := x.Get(rel)
|
has, err := x.ID(id).Get(rel)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -499,9 +521,9 @@ func (repo *Repository) GetDeletedBranches() ([]*DeletedBranch, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GetDeletedBranchByID get a deleted branch by its ID
|
// GetDeletedBranchByID get a deleted branch by its ID
|
||||||
func (repo *Repository) GetDeletedBranchByID(ID int64) (*DeletedBranch, error) {
|
func (repo *Repository) GetDeletedBranchByID(id int64) (*DeletedBranch, error) {
|
||||||
deletedBranch := &DeletedBranch{ID: ID}
|
deletedBranch := &DeletedBranch{}
|
||||||
has, err := x.Get(deletedBranch)
|
has, err := x.ID(id).Get(deletedBranch)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"xorm.io/builder"
|
||||||
)
|
)
|
||||||
|
|
||||||
// consistencyCheckable a type that can be tested for database consistency
|
// consistencyCheckable a type that can be tested for database consistency
|
||||||
@@ -167,3 +168,23 @@ func (action *Action) checkForConsistency(t *testing.T) {
|
|||||||
repo := AssertExistsAndLoadBean(t, &Repository{ID: action.RepoID}).(*Repository)
|
repo := AssertExistsAndLoadBean(t, &Repository{ID: action.RepoID}).(*Repository)
|
||||||
assert.Equal(t, repo.IsPrivate, action.IsPrivate, "action: %+v", action)
|
assert.Equal(t, repo.IsPrivate, action.IsPrivate, "action: %+v", action)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CountOrphanedObjects count subjects with have no existing refobject anymore
|
||||||
|
func CountOrphanedObjects(subject, refobject, joinCond string) (int64, error) {
|
||||||
|
var ids []int64
|
||||||
|
|
||||||
|
return int64(len(ids)), x.Table("`"+subject+"`").
|
||||||
|
Join("LEFT", refobject, joinCond).
|
||||||
|
Where(builder.IsNull{"`" + refobject + "`.id"}).
|
||||||
|
Select("id").Find(&ids)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOrphanedObjects delete subjects with have no existing refobject anymore
|
||||||
|
func DeleteOrphanedObjects(subject, refobject, joinCond string) error {
|
||||||
|
_, err := x.In("id", builder.Select("`"+subject+"`.id").
|
||||||
|
From("`"+subject+"`").
|
||||||
|
Join("LEFT", "`"+refobject+"`", joinCond).
|
||||||
|
Where(builder.IsNull{"`" + refobject + "`.id"})).
|
||||||
|
Delete("`" + subject + "`")
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|||||||
@@ -4,6 +4,11 @@
|
|||||||
|
|
||||||
package models
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"xorm.io/builder"
|
||||||
|
)
|
||||||
|
|
||||||
// DBContext represents a db context
|
// DBContext represents a db context
|
||||||
type DBContext struct {
|
type DBContext struct {
|
||||||
e Engine
|
e Engine
|
||||||
@@ -53,3 +58,10 @@ func WithTx(f func(ctx DBContext) error) error {
|
|||||||
sess.Close()
|
sess.Close()
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Iterate iterates the databases and doing something
|
||||||
|
func Iterate(ctx DBContext, tableBean interface{}, cond builder.Cond, fun func(idx int, bean interface{}) error) error {
|
||||||
|
return ctx.e.Where(cond).
|
||||||
|
BufferSize(setting.Database.IterateBufferSize).
|
||||||
|
Iterate(tableBean, fun)
|
||||||
|
}
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ package models
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
)
|
)
|
||||||
@@ -56,6 +57,21 @@ func (err ErrNamePatternNotAllowed) Error() string {
|
|||||||
return fmt.Sprintf("name pattern is not allowed [pattern: %s]", err.Pattern)
|
return fmt.Sprintf("name pattern is not allowed [pattern: %s]", err.Pattern)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ErrNameCharsNotAllowed represents a "character not allowed in name" error.
|
||||||
|
type ErrNameCharsNotAllowed struct {
|
||||||
|
Name string
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsErrNameCharsNotAllowed checks if an error is an ErrNameCharsNotAllowed.
|
||||||
|
func IsErrNameCharsNotAllowed(err error) bool {
|
||||||
|
_, ok := err.(ErrNameCharsNotAllowed)
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err ErrNameCharsNotAllowed) Error() string {
|
||||||
|
return fmt.Sprintf("User name is invalid [%s]: must be valid alpha or numeric or dash(-_) or dot characters", err.Name)
|
||||||
|
}
|
||||||
|
|
||||||
// ErrSSHDisabled represents an "SSH disabled" error.
|
// ErrSSHDisabled represents an "SSH disabled" error.
|
||||||
type ErrSSHDisabled struct {
|
type ErrSSHDisabled struct {
|
||||||
}
|
}
|
||||||
@@ -1355,6 +1371,53 @@ func (err ErrMergePushOutOfDate) Error() string {
|
|||||||
return fmt.Sprintf("Merge PushOutOfDate Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut)
|
return fmt.Sprintf("Merge PushOutOfDate Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ErrPushRejected represents an error if merging fails due to rejection from a hook
|
||||||
|
type ErrPushRejected struct {
|
||||||
|
Style MergeStyle
|
||||||
|
Message string
|
||||||
|
StdOut string
|
||||||
|
StdErr string
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsErrPushRejected checks if an error is a ErrPushRejected.
|
||||||
|
func IsErrPushRejected(err error) bool {
|
||||||
|
_, ok := err.(ErrPushRejected)
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err ErrPushRejected) Error() string {
|
||||||
|
return fmt.Sprintf("Merge PushRejected Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GenerateMessage generates the remote message from the stderr
|
||||||
|
func (err *ErrPushRejected) GenerateMessage() {
|
||||||
|
messageBuilder := &strings.Builder{}
|
||||||
|
i := strings.Index(err.StdErr, "remote: ")
|
||||||
|
if i < 0 {
|
||||||
|
err.Message = ""
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for {
|
||||||
|
if len(err.StdErr) <= i+8 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err.StdErr[i:i+8] != "remote: " {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
i += 8
|
||||||
|
nl := strings.IndexByte(err.StdErr[i:], '\n')
|
||||||
|
if nl >= 0 {
|
||||||
|
messageBuilder.WriteString(err.StdErr[i : i+nl+1])
|
||||||
|
i = i + nl + 1
|
||||||
|
} else {
|
||||||
|
messageBuilder.WriteString(err.StdErr[i:])
|
||||||
|
i = len(err.StdErr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err.Message = strings.TrimSpace(messageBuilder.String())
|
||||||
|
}
|
||||||
|
|
||||||
// ErrRebaseConflicts represents an error if rebase fails with a conflict
|
// ErrRebaseConflicts represents an error if rebase fails with a conflict
|
||||||
type ErrRebaseConflicts struct {
|
type ErrRebaseConflicts struct {
|
||||||
Style MergeStyle
|
Style MergeStyle
|
||||||
|
|||||||
@@ -369,6 +369,7 @@ type CommitVerification struct {
|
|||||||
CommittingUser *User
|
CommittingUser *User
|
||||||
SigningEmail string
|
SigningEmail string
|
||||||
SigningKey *GPGKey
|
SigningKey *GPGKey
|
||||||
|
TrustStatus string
|
||||||
}
|
}
|
||||||
|
|
||||||
// SignCommit represents a commit with validation of signature.
|
// SignCommit represents a commit with validation of signature.
|
||||||
@@ -735,6 +736,21 @@ func verifyWithGPGSettings(gpgSettings *git.GPGSettings, sig *packet.Signature,
|
|||||||
CanSign: pubkey.CanSign(),
|
CanSign: pubkey.CanSign(),
|
||||||
KeyID: pubkey.KeyIdString(),
|
KeyID: pubkey.KeyIdString(),
|
||||||
}
|
}
|
||||||
|
for _, subKey := range ekey.Subkeys {
|
||||||
|
content, err := base64EncPubKey(subKey.PublicKey)
|
||||||
|
if err != nil {
|
||||||
|
return &CommitVerification{
|
||||||
|
CommittingUser: committer,
|
||||||
|
Verified: false,
|
||||||
|
Reason: "gpg.error.generate_hash",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
k.SubsKey = append(k.SubsKey, &GPGKey{
|
||||||
|
Content: content,
|
||||||
|
CanSign: subKey.PublicKey.CanSign(),
|
||||||
|
KeyID: subKey.PublicKey.KeyIdString(),
|
||||||
|
})
|
||||||
|
}
|
||||||
if commitVerification := hashAndVerifyWithSubKeys(sig, payload, k, committer, &User{
|
if commitVerification := hashAndVerifyWithSubKeys(sig, payload, k, committer, &User{
|
||||||
Name: gpgSettings.Name,
|
Name: gpgSettings.Name,
|
||||||
Email: gpgSettings.Email,
|
Email: gpgSettings.Email,
|
||||||
@@ -754,18 +770,54 @@ func verifyWithGPGSettings(gpgSettings *git.GPGSettings, sig *packet.Signature,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ParseCommitsWithSignature checks if signaute of commits are corresponding to users gpg keys.
|
// ParseCommitsWithSignature checks if signaute of commits are corresponding to users gpg keys.
|
||||||
func ParseCommitsWithSignature(oldCommits *list.List) *list.List {
|
func ParseCommitsWithSignature(oldCommits *list.List, repository *Repository) *list.List {
|
||||||
var (
|
var (
|
||||||
newCommits = list.New()
|
newCommits = list.New()
|
||||||
e = oldCommits.Front()
|
e = oldCommits.Front()
|
||||||
)
|
)
|
||||||
|
memberMap := map[int64]bool{}
|
||||||
|
|
||||||
for e != nil {
|
for e != nil {
|
||||||
c := e.Value.(UserCommit)
|
c := e.Value.(UserCommit)
|
||||||
newCommits.PushBack(SignCommit{
|
signCommit := SignCommit{
|
||||||
UserCommit: &c,
|
UserCommit: &c,
|
||||||
Verification: ParseCommitWithSignature(c.Commit),
|
Verification: ParseCommitWithSignature(c.Commit),
|
||||||
})
|
}
|
||||||
|
|
||||||
|
_ = CalculateTrustStatus(signCommit.Verification, repository, &memberMap)
|
||||||
|
|
||||||
|
newCommits.PushBack(signCommit)
|
||||||
e = e.Next()
|
e = e.Next()
|
||||||
}
|
}
|
||||||
return newCommits
|
return newCommits
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CalculateTrustStatus will calculate the TrustStatus for a commit verification within a repository
|
||||||
|
func CalculateTrustStatus(verification *CommitVerification, repository *Repository, memberMap *map[int64]bool) (err error) {
|
||||||
|
if verification.Verified {
|
||||||
|
verification.TrustStatus = "trusted"
|
||||||
|
if verification.SigningUser.ID != 0 {
|
||||||
|
var isMember bool
|
||||||
|
if memberMap != nil {
|
||||||
|
var has bool
|
||||||
|
isMember, has = (*memberMap)[verification.SigningUser.ID]
|
||||||
|
if !has {
|
||||||
|
isMember, err = repository.IsOwnerMemberCollaborator(verification.SigningUser.ID)
|
||||||
|
(*memberMap)[verification.SigningUser.ID] = isMember
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
isMember, err = repository.IsOwnerMemberCollaborator(verification.SigningUser.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !isMember {
|
||||||
|
verification.TrustStatus = "untrusted"
|
||||||
|
if verification.CommittingUser.ID != verification.SigningUser.ID {
|
||||||
|
// The committing user and the signing user are not the same and are not the default key
|
||||||
|
// This should be marked as questionable unless the signing user is a collaborator/team member etc.
|
||||||
|
verification.TrustStatus = "unmatched"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|||||||
@@ -74,8 +74,8 @@ var (
|
|||||||
issueTasksDonePat *regexp.Regexp
|
issueTasksDonePat *regexp.Regexp
|
||||||
)
|
)
|
||||||
|
|
||||||
const issueTasksRegexpStr = `(^\s*[-*]\s\[[\sx]\]\s.)|(\n\s*[-*]\s\[[\sx]\]\s.)`
|
const issueTasksRegexpStr = `(^\s*[-*]\s\[[\sxX]\]\s.)|(\n\s*[-*]\s\[[\sxX]\]\s.)`
|
||||||
const issueTasksDoneRegexpStr = `(^\s*[-*]\s\[[x]\]\s.)|(\n\s*[-*]\s\[[x]\]\s.)`
|
const issueTasksDoneRegexpStr = `(^\s*[-*]\s\[[xX]\]\s.)|(\n\s*[-*]\s\[[xX]\]\s.)`
|
||||||
const issueMaxDupIndexAttempts = 3
|
const issueMaxDupIndexAttempts = 3
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
@@ -241,7 +241,7 @@ func (issue *Issue) loadReactions(e Engine) (err error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (issue *Issue) loadMilestone(e Engine) (err error) {
|
func (issue *Issue) loadMilestone(e Engine) (err error) {
|
||||||
if issue.Milestone == nil && issue.MilestoneID > 0 {
|
if (issue.Milestone == nil || issue.Milestone.ID != issue.MilestoneID) && issue.MilestoneID > 0 {
|
||||||
issue.Milestone, err = getMilestoneByRepoID(e, issue.RepoID, issue.MilestoneID)
|
issue.Milestone, err = getMilestoneByRepoID(e, issue.RepoID, issue.MilestoneID)
|
||||||
if err != nil && !IsErrMilestoneNotExist(err) {
|
if err != nil && !IsErrMilestoneNotExist(err) {
|
||||||
return fmt.Errorf("getMilestoneByRepoID [repo_id: %d, milestone_id: %d]: %v", issue.RepoID, issue.MilestoneID, err)
|
return fmt.Errorf("getMilestoneByRepoID [repo_id: %d, milestone_id: %d]: %v", issue.RepoID, issue.MilestoneID, err)
|
||||||
@@ -673,6 +673,10 @@ func (issue *Issue) changeStatus(e *xorm.Session, doer *User, isClosed bool) (*C
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := issue.updateClosedNum(e); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// New action comment
|
// New action comment
|
||||||
cmtType := CommentTypeClose
|
cmtType := CommentTypeClose
|
||||||
if !issue.IsClosed {
|
if !issue.IsClosed {
|
||||||
|
|||||||
@@ -749,8 +749,12 @@ func CreateRefComment(doer *User, repo *Repository, issue *Issue, content, commi
|
|||||||
|
|
||||||
// GetCommentByID returns the comment by given ID.
|
// GetCommentByID returns the comment by given ID.
|
||||||
func GetCommentByID(id int64) (*Comment, error) {
|
func GetCommentByID(id int64) (*Comment, error) {
|
||||||
|
return getCommentByID(x, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getCommentByID(e Engine, id int64) (*Comment, error) {
|
||||||
c := new(Comment)
|
c := new(Comment)
|
||||||
has, err := x.ID(id).Get(c)
|
has, err := e.ID(id).Get(c)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
} else if !has {
|
} else if !has {
|
||||||
|
|||||||
@@ -212,11 +212,8 @@ func getLabelInRepoByName(e Engine, repoID int64, labelName string) (*Label, err
|
|||||||
return nil, ErrLabelNotExist{0, repoID}
|
return nil, ErrLabelNotExist{0, repoID}
|
||||||
}
|
}
|
||||||
|
|
||||||
l := &Label{
|
l := &Label{}
|
||||||
Name: labelName,
|
has, err := e.Where("name=? AND repo_id=?", labelName, repoID).Get(l)
|
||||||
RepoID: repoID,
|
|
||||||
}
|
|
||||||
has, err := e.Get(l)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
} else if !has {
|
} else if !has {
|
||||||
|
|||||||
@@ -521,10 +521,12 @@ func DeleteMilestoneByRepoID(repoID, id int64) error {
|
|||||||
return sess.Commit()
|
return sess.Commit()
|
||||||
}
|
}
|
||||||
|
|
||||||
// CountMilestonesByRepoIDs map from repoIDs to number of milestones matching the options`
|
// CountMilestones map from repo conditions to number of milestones matching the options`
|
||||||
func CountMilestonesByRepoIDs(repoIDs []int64, isClosed bool) (map[int64]int64, error) {
|
func CountMilestones(repoCond builder.Cond, isClosed bool) (map[int64]int64, error) {
|
||||||
sess := x.Where("is_closed = ?", isClosed)
|
sess := x.Where("is_closed = ?", isClosed)
|
||||||
sess.In("repo_id", repoIDs)
|
if repoCond.IsValid() {
|
||||||
|
sess.In("repo_id", builder.Select("id").From("repository").Where(repoCond))
|
||||||
|
}
|
||||||
|
|
||||||
countsSlice := make([]*struct {
|
countsSlice := make([]*struct {
|
||||||
RepoID int64
|
RepoID int64
|
||||||
@@ -544,11 +546,21 @@ func CountMilestonesByRepoIDs(repoIDs []int64, isClosed bool) (map[int64]int64,
|
|||||||
return countMap, nil
|
return countMap, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetMilestonesByRepoIDs returns a list of milestones of given repositories and status.
|
// CountMilestonesByRepoIDs map from repoIDs to number of milestones matching the options`
|
||||||
func GetMilestonesByRepoIDs(repoIDs []int64, page int, isClosed bool, sortType string) (MilestoneList, error) {
|
func CountMilestonesByRepoIDs(repoIDs []int64, isClosed bool) (map[int64]int64, error) {
|
||||||
|
return CountMilestones(
|
||||||
|
builder.In("repo_id", repoIDs),
|
||||||
|
isClosed,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchMilestones search milestones
|
||||||
|
func SearchMilestones(repoCond builder.Cond, page int, isClosed bool, sortType string) (MilestoneList, error) {
|
||||||
miles := make([]*Milestone, 0, setting.UI.IssuePagingNum)
|
miles := make([]*Milestone, 0, setting.UI.IssuePagingNum)
|
||||||
sess := x.Where("is_closed = ?", isClosed)
|
sess := x.Where("is_closed = ?", isClosed)
|
||||||
sess.In("repo_id", repoIDs)
|
if repoCond.IsValid() {
|
||||||
|
sess.In("repo_id", builder.Select("id").From("repository").Where(repoCond))
|
||||||
|
}
|
||||||
if page > 0 {
|
if page > 0 {
|
||||||
sess = sess.Limit(setting.UI.IssuePagingNum, (page-1)*setting.UI.IssuePagingNum)
|
sess = sess.Limit(setting.UI.IssuePagingNum, (page-1)*setting.UI.IssuePagingNum)
|
||||||
}
|
}
|
||||||
@@ -570,25 +582,45 @@ func GetMilestonesByRepoIDs(repoIDs []int64, page int, isClosed bool, sortType s
|
|||||||
return miles, sess.Find(&miles)
|
return miles, sess.Find(&miles)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetMilestonesByRepoIDs returns a list of milestones of given repositories and status.
|
||||||
|
func GetMilestonesByRepoIDs(repoIDs []int64, page int, isClosed bool, sortType string) (MilestoneList, error) {
|
||||||
|
return SearchMilestones(
|
||||||
|
builder.In("repo_id", repoIDs),
|
||||||
|
page,
|
||||||
|
isClosed,
|
||||||
|
sortType,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
// MilestonesStats represents milestone statistic information.
|
// MilestonesStats represents milestone statistic information.
|
||||||
type MilestonesStats struct {
|
type MilestonesStats struct {
|
||||||
OpenCount, ClosedCount int64
|
OpenCount, ClosedCount int64
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Total returns the total counts of milestones
|
||||||
|
func (m MilestonesStats) Total() int64 {
|
||||||
|
return m.OpenCount + m.ClosedCount
|
||||||
|
}
|
||||||
|
|
||||||
// GetMilestonesStats returns milestone statistic information for dashboard by given conditions.
|
// GetMilestonesStats returns milestone statistic information for dashboard by given conditions.
|
||||||
func GetMilestonesStats(userRepoIDs []int64) (*MilestonesStats, error) {
|
func GetMilestonesStats(repoCond builder.Cond) (*MilestonesStats, error) {
|
||||||
var err error
|
var err error
|
||||||
stats := &MilestonesStats{}
|
stats := &MilestonesStats{}
|
||||||
|
|
||||||
stats.OpenCount, err = x.Where("is_closed = ?", false).
|
sess := x.Where("is_closed = ?", false)
|
||||||
And(builder.In("repo_id", userRepoIDs)).
|
if repoCond.IsValid() {
|
||||||
Count(new(Milestone))
|
sess.And(builder.In("repo_id", builder.Select("id").From("repository").Where(repoCond)))
|
||||||
|
}
|
||||||
|
stats.OpenCount, err = sess.Count(new(Milestone))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
stats.ClosedCount, err = x.Where("is_closed = ?", true).
|
|
||||||
And(builder.In("repo_id", userRepoIDs)).
|
sess = x.Where("is_closed = ?", true)
|
||||||
Count(new(Milestone))
|
if repoCond.IsValid() {
|
||||||
|
sess.And(builder.In("repo_id", builder.Select("id").From("repository").Where(repoCond)))
|
||||||
|
}
|
||||||
|
stats.ClosedCount, err = sess.Count(new(Milestone))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import (
|
|||||||
|
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
|
"xorm.io/builder"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
@@ -370,7 +371,7 @@ func TestGetMilestonesStats(t *testing.T) {
|
|||||||
repo1 := AssertExistsAndLoadBean(t, &Repository{ID: 1}).(*Repository)
|
repo1 := AssertExistsAndLoadBean(t, &Repository{ID: 1}).(*Repository)
|
||||||
repo2 := AssertExistsAndLoadBean(t, &Repository{ID: 2}).(*Repository)
|
repo2 := AssertExistsAndLoadBean(t, &Repository{ID: 2}).(*Repository)
|
||||||
|
|
||||||
milestoneStats, err := GetMilestonesStats([]int64{repo1.ID, repo2.ID})
|
milestoneStats, err := GetMilestonesStats(builder.In("repo_id", []int64{repo1.ID, repo2.ID}))
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.EqualValues(t, repo1.NumOpenMilestones+repo2.NumOpenMilestones, milestoneStats.OpenCount)
|
assert.EqualValues(t, repo1.NumOpenMilestones+repo2.NumOpenMilestones, milestoneStats.OpenCount)
|
||||||
assert.EqualValues(t, repo1.NumClosedMilestones+repo2.NumClosedMilestones, milestoneStats.ClosedCount)
|
assert.EqualValues(t, repo1.NumClosedMilestones+repo2.NumClosedMilestones, milestoneStats.ClosedCount)
|
||||||
|
|||||||
@@ -273,6 +273,10 @@ func DeleteTime(t *TrackedTime) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := t.loadAttributes(sess); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
if err := deleteTime(sess, t); err != nil {
|
if err := deleteTime(sess, t); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -312,10 +316,8 @@ func deleteTime(e Engine, t *TrackedTime) error {
|
|||||||
|
|
||||||
// GetTrackedTimeByID returns raw TrackedTime without loading attributes by id
|
// GetTrackedTimeByID returns raw TrackedTime without loading attributes by id
|
||||||
func GetTrackedTimeByID(id int64) (*TrackedTime, error) {
|
func GetTrackedTimeByID(id int64) (*TrackedTime, error) {
|
||||||
time := &TrackedTime{
|
time := new(TrackedTime)
|
||||||
ID: id,
|
has, err := x.ID(id).Get(time)
|
||||||
}
|
|
||||||
has, err := x.Get(time)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
} else if !has {
|
} else if !has {
|
||||||
|
|||||||
@@ -64,14 +64,18 @@ func getIssueWatch(e Engine, userID, issueID int64) (iw *IssueWatch, exists bool
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetIssueWatchersIDs returns IDs of subscribers to a given issue id
|
// GetIssueWatchersIDs returns IDs of subscribers or explicit unsubscribers to a given issue id
|
||||||
// but avoids joining with `user` for performance reasons
|
// but avoids joining with `user` for performance reasons
|
||||||
// User permissions must be verified elsewhere if required
|
// User permissions must be verified elsewhere if required
|
||||||
func GetIssueWatchersIDs(issueID int64) ([]int64, error) {
|
func GetIssueWatchersIDs(issueID int64, watching bool) ([]int64, error) {
|
||||||
|
return getIssueWatchersIDs(x, issueID, watching)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getIssueWatchersIDs(e Engine, issueID int64, watching bool) ([]int64, error) {
|
||||||
ids := make([]int64, 0, 64)
|
ids := make([]int64, 0, 64)
|
||||||
return ids, x.Table("issue_watch").
|
return ids, e.Table("issue_watch").
|
||||||
Where("issue_id=?", issueID).
|
Where("issue_id=?", issueID).
|
||||||
And("is_watching = ?", true).
|
And("is_watching = ?", watching).
|
||||||
Select("user_id").
|
Select("user_id").
|
||||||
Find(&ids)
|
Find(&ids)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"net/smtp"
|
"net/smtp"
|
||||||
"net/textproto"
|
"net/textproto"
|
||||||
"regexp"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/auth/ldap"
|
"code.gitea.io/gitea/modules/auth/ldap"
|
||||||
@@ -301,7 +300,7 @@ func (source *LoginSource) SSPI() *SSPIConfig {
|
|||||||
// CreateLoginSource inserts a LoginSource in the DB if not already
|
// CreateLoginSource inserts a LoginSource in the DB if not already
|
||||||
// existing with the given name.
|
// existing with the given name.
|
||||||
func CreateLoginSource(source *LoginSource) error {
|
func CreateLoginSource(source *LoginSource) error {
|
||||||
has, err := x.Get(&LoginSource{Name: source.Name})
|
has, err := x.Where("name=?", source.Name).Exist(new(LoginSource))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
} else if has {
|
} else if has {
|
||||||
@@ -455,10 +454,6 @@ func composeFullName(firstname, surname, username string) string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
|
||||||
alphaDashDotPattern = regexp.MustCompile(`[^\w-\.]`)
|
|
||||||
)
|
|
||||||
|
|
||||||
// LoginViaLDAP queries if login/password is valid against the LDAP directory pool,
|
// LoginViaLDAP queries if login/password is valid against the LDAP directory pool,
|
||||||
// and create a local user if success when enabled.
|
// and create a local user if success when enabled.
|
||||||
func LoginViaLDAP(user *User, login, password string, source *LoginSource) (*User, error) {
|
func LoginViaLDAP(user *User, login, password string, source *LoginSource) (*User, error) {
|
||||||
@@ -503,10 +498,6 @@ func LoginViaLDAP(user *User, login, password string, source *LoginSource) (*Use
|
|||||||
if len(sr.Username) == 0 {
|
if len(sr.Username) == 0 {
|
||||||
sr.Username = login
|
sr.Username = login
|
||||||
}
|
}
|
||||||
// Validate username make sure it satisfies requirement.
|
|
||||||
if alphaDashDotPattern.MatchString(sr.Username) {
|
|
||||||
return nil, fmt.Errorf("Invalid pattern for attribute 'username' [%s]: must be valid alpha or numeric or dash(-_) or dot characters", sr.Username)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(sr.Mail) == 0 {
|
if len(sr.Mail) == 0 {
|
||||||
sr.Mail = fmt.Sprintf("%s@localhost", sr.Username)
|
sr.Mail = fmt.Sprintf("%s@localhost", sr.Username)
|
||||||
@@ -666,7 +657,8 @@ func LoginViaSMTP(user *User, login, password string, sourceID int64, cfg *SMTPC
|
|||||||
// LoginViaPAM queries if login/password is valid against the PAM,
|
// LoginViaPAM queries if login/password is valid against the PAM,
|
||||||
// and create a local user if success when enabled.
|
// and create a local user if success when enabled.
|
||||||
func LoginViaPAM(user *User, login, password string, sourceID int64, cfg *PAMConfig) (*User, error) {
|
func LoginViaPAM(user *User, login, password string, sourceID int64, cfg *PAMConfig) (*User, error) {
|
||||||
if err := pam.Auth(cfg.ServiceName, login, password); err != nil {
|
pamLogin, err := pam.Auth(cfg.ServiceName, login, password)
|
||||||
|
if err != nil {
|
||||||
if strings.Contains(err.Error(), "Authentication failure") {
|
if strings.Contains(err.Error(), "Authentication failure") {
|
||||||
return nil, ErrUserNotExist{0, login, 0}
|
return nil, ErrUserNotExist{0, login, 0}
|
||||||
}
|
}
|
||||||
@@ -677,14 +669,21 @@ func LoginViaPAM(user *User, login, password string, sourceID int64, cfg *PAMCon
|
|||||||
return user, nil
|
return user, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Allow PAM sources with `@` in their name, like from Active Directory
|
||||||
|
username := pamLogin
|
||||||
|
idx := strings.Index(pamLogin, "@")
|
||||||
|
if idx > -1 {
|
||||||
|
username = pamLogin[:idx]
|
||||||
|
}
|
||||||
|
|
||||||
user = &User{
|
user = &User{
|
||||||
LowerName: strings.ToLower(login),
|
LowerName: strings.ToLower(username),
|
||||||
Name: login,
|
Name: username,
|
||||||
Email: login,
|
Email: pamLogin,
|
||||||
Passwd: password,
|
Passwd: password,
|
||||||
LoginType: LoginPAM,
|
LoginType: LoginPAM,
|
||||||
LoginSource: sourceID,
|
LoginSource: sourceID,
|
||||||
LoginName: login,
|
LoginName: login, // This is what the user typed in
|
||||||
IsActive: true,
|
IsActive: true,
|
||||||
}
|
}
|
||||||
return user, CreateUser(user)
|
return user, CreateUser(user)
|
||||||
|
|||||||
@@ -292,6 +292,52 @@ var migrations = []Migration{
|
|||||||
NewMigration("Add block on rejected reviews branch protection", addBlockOnRejectedReviews),
|
NewMigration("Add block on rejected reviews branch protection", addBlockOnRejectedReviews),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetCurrentDBVersion returns the current db version
|
||||||
|
func GetCurrentDBVersion(x *xorm.Engine) (int64, error) {
|
||||||
|
if err := x.Sync(new(Version)); err != nil {
|
||||||
|
return -1, fmt.Errorf("sync: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
currentVersion := &Version{ID: 1}
|
||||||
|
has, err := x.Get(currentVersion)
|
||||||
|
if err != nil {
|
||||||
|
return -1, fmt.Errorf("get: %v", err)
|
||||||
|
}
|
||||||
|
if !has {
|
||||||
|
return -1, nil
|
||||||
|
}
|
||||||
|
return currentVersion.Version, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExpectedVersion returns the expected db version
|
||||||
|
func ExpectedVersion() int64 {
|
||||||
|
return int64(minDBVersion + len(migrations))
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnsureUpToDate will check if the db is at the correct version
|
||||||
|
func EnsureUpToDate(x *xorm.Engine) error {
|
||||||
|
currentDB, err := GetCurrentDBVersion(x)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if currentDB < 0 {
|
||||||
|
return fmt.Errorf("Database has not been initialised")
|
||||||
|
}
|
||||||
|
|
||||||
|
if minDBVersion > currentDB {
|
||||||
|
return fmt.Errorf("DB version %d (<= %d) is too old for auto-migration. Upgrade to Gitea 1.6.4 first then upgrade to this version", currentDB, minDBVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
expected := ExpectedVersion()
|
||||||
|
|
||||||
|
if currentDB != expected {
|
||||||
|
return fmt.Errorf(`Current database version %d is not equal to the expected version %d. Please run "gitea [--config /path/to/app.ini] migrate" to update the database version`, currentDB, expected)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// Migrate database to current version
|
// Migrate database to current version
|
||||||
func Migrate(x *xorm.Engine) error {
|
func Migrate(x *xorm.Engine) error {
|
||||||
if err := x.Sync(new(Version)); err != nil {
|
if err := x.Sync(new(Version)); err != nil {
|
||||||
|
|||||||
@@ -26,23 +26,38 @@ func deleteOrphanedAttachments(x *xorm.Engine) error {
|
|||||||
sess := x.NewSession()
|
sess := x.NewSession()
|
||||||
defer sess.Close()
|
defer sess.Close()
|
||||||
|
|
||||||
err := sess.BufferSize(setting.Database.IterateBufferSize).
|
var limit = setting.Database.IterateBufferSize
|
||||||
Where("`issue_id` = 0 and (`release_id` = 0 or `release_id` not in (select `id` from `release`))").Cols("uuid").
|
if limit <= 0 {
|
||||||
Iterate(new(Attachment),
|
limit = 50
|
||||||
func(idx int, bean interface{}) error {
|
}
|
||||||
attachment := bean.(*Attachment)
|
|
||||||
|
|
||||||
|
for {
|
||||||
|
attachements := make([]Attachment, 0, limit)
|
||||||
|
if err := sess.Where("`issue_id` = 0 and (`release_id` = 0 or `release_id` not in (select `id` from `release`))").
|
||||||
|
Cols("id, uuid").Limit(limit).
|
||||||
|
Asc("id").
|
||||||
|
Find(&attachements); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if len(attachements) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var ids = make([]int64, 0, limit)
|
||||||
|
for _, attachment := range attachements {
|
||||||
|
ids = append(ids, attachment.ID)
|
||||||
|
}
|
||||||
|
if _, err := sess.In("id", ids).Delete(new(Attachment)); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, attachment := range attachements {
|
||||||
if err := os.RemoveAll(models.AttachmentLocalPath(attachment.UUID)); err != nil {
|
if err := os.RemoveAll(models.AttachmentLocalPath(attachment.UUID)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err := sess.ID(attachment.ID).NoAutoCondition().Delete(attachment)
|
|
||||||
return err
|
|
||||||
})
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
if len(attachements) < limit {
|
||||||
return sess.Commit()
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ package models
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -281,9 +282,9 @@ func (nl NotificationList) getPendingRepoIDs() []int64 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// LoadRepos loads repositories from database
|
// LoadRepos loads repositories from database
|
||||||
func (nl NotificationList) LoadRepos() (RepositoryList, error) {
|
func (nl NotificationList) LoadRepos() (RepositoryList, []int, error) {
|
||||||
if len(nl) == 0 {
|
if len(nl) == 0 {
|
||||||
return RepositoryList{}, nil
|
return RepositoryList{}, []int{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var repoIDs = nl.getPendingRepoIDs()
|
var repoIDs = nl.getPendingRepoIDs()
|
||||||
@@ -298,7 +299,7 @@ func (nl NotificationList) LoadRepos() (RepositoryList, error) {
|
|||||||
In("id", repoIDs[:limit]).
|
In("id", repoIDs[:limit]).
|
||||||
Rows(new(Repository))
|
Rows(new(Repository))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
@@ -306,7 +307,7 @@ func (nl NotificationList) LoadRepos() (RepositoryList, error) {
|
|||||||
err = rows.Scan(&repo)
|
err = rows.Scan(&repo)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
rows.Close()
|
rows.Close()
|
||||||
return nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
repos[repo.ID] = &repo
|
repos[repo.ID] = &repo
|
||||||
@@ -317,14 +318,21 @@ func (nl NotificationList) LoadRepos() (RepositoryList, error) {
|
|||||||
repoIDs = repoIDs[limit:]
|
repoIDs = repoIDs[limit:]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
failed := []int{}
|
||||||
|
|
||||||
var reposList = make(RepositoryList, 0, len(repoIDs))
|
var reposList = make(RepositoryList, 0, len(repoIDs))
|
||||||
for _, notification := range nl {
|
for i, notification := range nl {
|
||||||
if notification.Repository == nil {
|
if notification.Repository == nil {
|
||||||
notification.Repository = repos[notification.RepoID]
|
notification.Repository = repos[notification.RepoID]
|
||||||
}
|
}
|
||||||
|
if notification.Repository == nil {
|
||||||
|
log.Error("Notification[%d]: RepoID: %d not found", notification.ID, notification.RepoID)
|
||||||
|
failed = append(failed, i)
|
||||||
|
continue
|
||||||
|
}
|
||||||
var found bool
|
var found bool
|
||||||
for _, r := range reposList {
|
for _, r := range reposList {
|
||||||
if r.ID == notification.Repository.ID {
|
if r.ID == notification.RepoID {
|
||||||
found = true
|
found = true
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@@ -333,7 +341,7 @@ func (nl NotificationList) LoadRepos() (RepositoryList, error) {
|
|||||||
reposList = append(reposList, notification.Repository)
|
reposList = append(reposList, notification.Repository)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return reposList, nil
|
return reposList, failed, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (nl NotificationList) getPendingIssueIDs() []int64 {
|
func (nl NotificationList) getPendingIssueIDs() []int64 {
|
||||||
@@ -350,9 +358,9 @@ func (nl NotificationList) getPendingIssueIDs() []int64 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// LoadIssues loads issues from database
|
// LoadIssues loads issues from database
|
||||||
func (nl NotificationList) LoadIssues() error {
|
func (nl NotificationList) LoadIssues() ([]int, error) {
|
||||||
if len(nl) == 0 {
|
if len(nl) == 0 {
|
||||||
return nil
|
return []int{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var issueIDs = nl.getPendingIssueIDs()
|
var issueIDs = nl.getPendingIssueIDs()
|
||||||
@@ -367,7 +375,7 @@ func (nl NotificationList) LoadIssues() error {
|
|||||||
In("id", issueIDs[:limit]).
|
In("id", issueIDs[:limit]).
|
||||||
Rows(new(Issue))
|
Rows(new(Issue))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
@@ -375,7 +383,7 @@ func (nl NotificationList) LoadIssues() error {
|
|||||||
err = rows.Scan(&issue)
|
err = rows.Scan(&issue)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
rows.Close()
|
rows.Close()
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
issues[issue.ID] = &issue
|
issues[issue.ID] = &issue
|
||||||
@@ -386,13 +394,38 @@ func (nl NotificationList) LoadIssues() error {
|
|||||||
issueIDs = issueIDs[limit:]
|
issueIDs = issueIDs[limit:]
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, notification := range nl {
|
failures := []int{}
|
||||||
|
|
||||||
|
for i, notification := range nl {
|
||||||
if notification.Issue == nil {
|
if notification.Issue == nil {
|
||||||
notification.Issue = issues[notification.IssueID]
|
notification.Issue = issues[notification.IssueID]
|
||||||
|
if notification.Issue == nil {
|
||||||
|
log.Error("Notification[%d]: IssueID: %d Not Found", notification.ID, notification.IssueID)
|
||||||
|
failures = append(failures, i)
|
||||||
|
continue
|
||||||
|
}
|
||||||
notification.Issue.Repo = notification.Repository
|
notification.Issue.Repo = notification.Repository
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return failures, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Without returns the notification list without the failures
|
||||||
|
func (nl NotificationList) Without(failures []int) NotificationList {
|
||||||
|
if len(failures) == 0 {
|
||||||
|
return nl
|
||||||
|
}
|
||||||
|
remaining := make([]*Notification, 0, len(nl))
|
||||||
|
last := -1
|
||||||
|
var i int
|
||||||
|
for _, i = range failures {
|
||||||
|
remaining = append(remaining, nl[last+1:i]...)
|
||||||
|
last = i
|
||||||
|
}
|
||||||
|
if len(nl) > i {
|
||||||
|
remaining = append(remaining, nl[i+1:]...)
|
||||||
|
}
|
||||||
|
return remaining
|
||||||
}
|
}
|
||||||
|
|
||||||
func (nl NotificationList) getPendingCommentIDs() []int64 {
|
func (nl NotificationList) getPendingCommentIDs() []int64 {
|
||||||
@@ -409,9 +442,9 @@ func (nl NotificationList) getPendingCommentIDs() []int64 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// LoadComments loads comments from database
|
// LoadComments loads comments from database
|
||||||
func (nl NotificationList) LoadComments() error {
|
func (nl NotificationList) LoadComments() ([]int, error) {
|
||||||
if len(nl) == 0 {
|
if len(nl) == 0 {
|
||||||
return nil
|
return []int{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var commentIDs = nl.getPendingCommentIDs()
|
var commentIDs = nl.getPendingCommentIDs()
|
||||||
@@ -426,7 +459,7 @@ func (nl NotificationList) LoadComments() error {
|
|||||||
In("id", commentIDs[:limit]).
|
In("id", commentIDs[:limit]).
|
||||||
Rows(new(Comment))
|
Rows(new(Comment))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
@@ -434,7 +467,7 @@ func (nl NotificationList) LoadComments() error {
|
|||||||
err = rows.Scan(&comment)
|
err = rows.Scan(&comment)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
rows.Close()
|
rows.Close()
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
comments[comment.ID] = &comment
|
comments[comment.ID] = &comment
|
||||||
@@ -445,13 +478,19 @@ func (nl NotificationList) LoadComments() error {
|
|||||||
commentIDs = commentIDs[limit:]
|
commentIDs = commentIDs[limit:]
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, notification := range nl {
|
failures := []int{}
|
||||||
|
for i, notification := range nl {
|
||||||
if notification.CommentID > 0 && notification.Comment == nil && comments[notification.CommentID] != nil {
|
if notification.CommentID > 0 && notification.Comment == nil && comments[notification.CommentID] != nil {
|
||||||
notification.Comment = comments[notification.CommentID]
|
notification.Comment = comments[notification.CommentID]
|
||||||
|
if notification.Comment == nil {
|
||||||
|
log.Error("Notification[%d]: CommentID[%d] failed to load", notification.ID, notification.CommentID)
|
||||||
|
failures = append(failures, i)
|
||||||
|
continue
|
||||||
|
}
|
||||||
notification.Comment.Issue = notification.Issue
|
notification.Comment.Issue = notification.Issue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return failures, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetNotificationCount returns the notification count for user
|
// GetNotificationCount returns the notification count for user
|
||||||
|
|||||||
@@ -470,12 +470,12 @@ func GetOwnedOrgsByUserIDDesc(userID int64, desc string) ([]*User, error) {
|
|||||||
func GetOrgsCanCreateRepoByUserID(userID int64) ([]*User, error) {
|
func GetOrgsCanCreateRepoByUserID(userID int64) ([]*User, error) {
|
||||||
orgs := make([]*User, 0, 10)
|
orgs := make([]*User, 0, 10)
|
||||||
|
|
||||||
return orgs, x.Join("INNER", "`team_user`", "`team_user`.org_id=`user`.id").
|
return orgs, x.Where(builder.In("id", builder.Select("`user`.id").From("`user`").
|
||||||
Join("INNER", "`team`", "`team`.id=`team_user`.team_id").
|
Join("INNER", "`team_user`", "`team_user`.org_id = `user`.id").
|
||||||
Where("`team_user`.uid=?", userID).
|
Join("INNER", "`team`", "`team`.id = `team_user`.team_id").
|
||||||
And(builder.Eq{"`team`.authorize": AccessModeOwner}.Or(builder.Eq{"`team`.can_create_org_repo": true})).
|
Where(builder.Eq{"`team_user`.uid": userID}).
|
||||||
Desc("`user`.updated_unix").
|
And(builder.Eq{"`team`.authorize": AccessModeOwner}.Or(builder.Eq{"`team`.can_create_org_repo": true})))).
|
||||||
Find(&orgs)
|
Desc("`user`.updated_unix").Find(&orgs)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetOrgUsersByUserID returns all organization-user relations by user ID.
|
// GetOrgUsersByUserID returns all organization-user relations by user ID.
|
||||||
|
|||||||
@@ -399,7 +399,7 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) {
|
|||||||
|
|
||||||
// Create org.
|
// Create org.
|
||||||
org := &User{
|
org := &User{
|
||||||
Name: "All repo",
|
Name: "All_repo",
|
||||||
IsActive: true,
|
IsActive: true,
|
||||||
Type: UserTypeOrganization,
|
Type: UserTypeOrganization,
|
||||||
Visibility: structs.VisibleTypePublic,
|
Visibility: structs.VisibleTypePublic,
|
||||||
|
|||||||
133
models/repo.go
133
models/repo.go
@@ -173,7 +173,6 @@ type Repository struct {
|
|||||||
NumMilestones int `xorm:"NOT NULL DEFAULT 0"`
|
NumMilestones int `xorm:"NOT NULL DEFAULT 0"`
|
||||||
NumClosedMilestones int `xorm:"NOT NULL DEFAULT 0"`
|
NumClosedMilestones int `xorm:"NOT NULL DEFAULT 0"`
|
||||||
NumOpenMilestones int `xorm:"-"`
|
NumOpenMilestones int `xorm:"-"`
|
||||||
NumReleases int `xorm:"-"`
|
|
||||||
|
|
||||||
IsPrivate bool `xorm:"INDEX"`
|
IsPrivate bool `xorm:"INDEX"`
|
||||||
IsEmpty bool `xorm:"INDEX"`
|
IsEmpty bool `xorm:"INDEX"`
|
||||||
@@ -364,6 +363,8 @@ func (repo *Repository) innerAPIFormat(e Engine, mode AccessMode, isParent bool)
|
|||||||
allowSquash = config.AllowSquash
|
allowSquash = config.AllowSquash
|
||||||
}
|
}
|
||||||
|
|
||||||
|
numReleases, _ := GetReleaseCountByRepoID(repo.ID, FindReleasesOptions{IncludeDrafts: false, IncludeTags: true})
|
||||||
|
|
||||||
return &api.Repository{
|
return &api.Repository{
|
||||||
ID: repo.ID,
|
ID: repo.ID,
|
||||||
Owner: repo.Owner.APIFormat(),
|
Owner: repo.Owner.APIFormat(),
|
||||||
@@ -387,7 +388,7 @@ func (repo *Repository) innerAPIFormat(e Engine, mode AccessMode, isParent bool)
|
|||||||
Watchers: repo.NumWatches,
|
Watchers: repo.NumWatches,
|
||||||
OpenIssues: repo.NumOpenIssues,
|
OpenIssues: repo.NumOpenIssues,
|
||||||
OpenPulls: repo.NumOpenPulls,
|
OpenPulls: repo.NumOpenPulls,
|
||||||
Releases: repo.NumReleases,
|
Releases: int(numReleases),
|
||||||
DefaultBranch: repo.DefaultBranch,
|
DefaultBranch: repo.DefaultBranch,
|
||||||
Created: repo.CreatedUnix.AsTime(),
|
Created: repo.CreatedUnix.AsTime(),
|
||||||
Updated: repo.UpdatedUnix.AsTime(),
|
Updated: repo.UpdatedUnix.AsTime(),
|
||||||
@@ -968,6 +969,21 @@ func CheckCreateRepository(doer, u *User, name string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) {
|
||||||
|
hookNames = []string{"pre-receive", "update", "post-receive"}
|
||||||
|
hookTpls = []string{
|
||||||
|
fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
|
||||||
|
fmt.Sprintf("#!/usr/bin/env %s\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\n\"${hook}\" $1 $2 $3\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
|
||||||
|
fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
|
||||||
|
}
|
||||||
|
giteaHookTpls = []string{
|
||||||
|
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' pre-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
|
||||||
|
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' update $1 $2 $3\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
|
||||||
|
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// CreateDelegateHooks creates all the hooks scripts for the repo
|
// CreateDelegateHooks creates all the hooks scripts for the repo
|
||||||
func CreateDelegateHooks(repoPath string) error {
|
func CreateDelegateHooks(repoPath string) error {
|
||||||
return createDelegateHooks(repoPath)
|
return createDelegateHooks(repoPath)
|
||||||
@@ -975,21 +991,7 @@ func CreateDelegateHooks(repoPath string) error {
|
|||||||
|
|
||||||
// createDelegateHooks creates all the hooks scripts for the repo
|
// createDelegateHooks creates all the hooks scripts for the repo
|
||||||
func createDelegateHooks(repoPath string) (err error) {
|
func createDelegateHooks(repoPath string) (err error) {
|
||||||
|
hookNames, hookTpls, giteaHookTpls := getHookTemplates()
|
||||||
var (
|
|
||||||
hookNames = []string{"pre-receive", "update", "post-receive"}
|
|
||||||
hookTpls = []string{
|
|
||||||
fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
|
|
||||||
fmt.Sprintf("#!/usr/bin/env %s\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" || continue\n\"${hook}\" $1 $2 $3\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
|
|
||||||
fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
|
|
||||||
}
|
|
||||||
giteaHookTpls = []string{
|
|
||||||
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' pre-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
|
|
||||||
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' update $1 $2 $3\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
|
|
||||||
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
hookDir := filepath.Join(repoPath, "hooks")
|
hookDir := filepath.Join(repoPath, "hooks")
|
||||||
|
|
||||||
for i, hookName := range hookNames {
|
for i, hookName := range hookNames {
|
||||||
@@ -1008,16 +1010,94 @@ func createDelegateHooks(repoPath string) (err error) {
|
|||||||
return fmt.Errorf("write old hook file '%s': %v", oldHookPath, err)
|
return fmt.Errorf("write old hook file '%s': %v", oldHookPath, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err = ensureExecutable(oldHookPath); err != nil {
|
||||||
|
return fmt.Errorf("Unable to set %s executable. Error %v", oldHookPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
if err = os.Remove(newHookPath); err != nil && !os.IsNotExist(err) {
|
if err = os.Remove(newHookPath); err != nil && !os.IsNotExist(err) {
|
||||||
return fmt.Errorf("unable to pre-remove new hook file '%s' prior to rewriting: %v", newHookPath, err)
|
return fmt.Errorf("unable to pre-remove new hook file '%s' prior to rewriting: %v", newHookPath, err)
|
||||||
}
|
}
|
||||||
if err = ioutil.WriteFile(newHookPath, []byte(giteaHookTpls[i]), 0777); err != nil {
|
if err = ioutil.WriteFile(newHookPath, []byte(giteaHookTpls[i]), 0777); err != nil {
|
||||||
return fmt.Errorf("write new hook file '%s': %v", newHookPath, err)
|
return fmt.Errorf("write new hook file '%s': %v", newHookPath, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err = ensureExecutable(newHookPath); err != nil {
|
||||||
|
return fmt.Errorf("Unable to set %s executable. Error %v", oldHookPath, err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
func checkExecutable(filename string) bool {
|
||||||
|
fileInfo, err := os.Stat(filename)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return (fileInfo.Mode() & 0100) > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func ensureExecutable(filename string) error {
|
||||||
|
fileInfo, err := os.Stat(filename)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if (fileInfo.Mode() & 0100) > 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
mode := fileInfo.Mode() | 0100
|
||||||
|
return os.Chmod(filename, mode)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CheckDelegateHooks checks the hooks scripts for the repo
|
||||||
|
func CheckDelegateHooks(repoPath string) ([]string, error) {
|
||||||
|
hookNames, hookTpls, giteaHookTpls := getHookTemplates()
|
||||||
|
|
||||||
|
hookDir := filepath.Join(repoPath, "hooks")
|
||||||
|
results := make([]string, 0, 10)
|
||||||
|
|
||||||
|
for i, hookName := range hookNames {
|
||||||
|
oldHookPath := filepath.Join(hookDir, hookName)
|
||||||
|
newHookPath := filepath.Join(hookDir, hookName+".d", "gitea")
|
||||||
|
|
||||||
|
cont := false
|
||||||
|
if !com.IsExist(oldHookPath) {
|
||||||
|
results = append(results, fmt.Sprintf("old hook file %s does not exist", oldHookPath))
|
||||||
|
cont = true
|
||||||
|
}
|
||||||
|
if !com.IsExist(oldHookPath + ".d") {
|
||||||
|
results = append(results, fmt.Sprintf("hooks directory %s does not exist", oldHookPath+".d"))
|
||||||
|
cont = true
|
||||||
|
}
|
||||||
|
if !com.IsExist(newHookPath) {
|
||||||
|
results = append(results, fmt.Sprintf("new hook file %s does not exist", newHookPath))
|
||||||
|
cont = true
|
||||||
|
}
|
||||||
|
if cont {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
contents, err := ioutil.ReadFile(oldHookPath)
|
||||||
|
if err != nil {
|
||||||
|
return results, err
|
||||||
|
}
|
||||||
|
if string(contents) != hookTpls[i] {
|
||||||
|
results = append(results, fmt.Sprintf("old hook file %s is out of date", oldHookPath))
|
||||||
|
}
|
||||||
|
if !checkExecutable(oldHookPath) {
|
||||||
|
results = append(results, fmt.Sprintf("old hook file %s is not executable", oldHookPath))
|
||||||
|
}
|
||||||
|
contents, err = ioutil.ReadFile(newHookPath)
|
||||||
|
if err != nil {
|
||||||
|
return results, err
|
||||||
|
}
|
||||||
|
if string(contents) != giteaHookTpls[i] {
|
||||||
|
results = append(results, fmt.Sprintf("new hook file %s is out of date", newHookPath))
|
||||||
|
}
|
||||||
|
if !checkExecutable(newHookPath) {
|
||||||
|
results = append(results, fmt.Sprintf("new hook file %s is not executable", newHookPath))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
// initRepoCommit temporarily changes with work directory.
|
// initRepoCommit temporarily changes with work directory.
|
||||||
func initRepoCommit(tmpPath string, repo *Repository, u *User) (err error) {
|
func initRepoCommit(tmpPath string, repo *Repository, u *User) (err error) {
|
||||||
@@ -1528,7 +1608,7 @@ func TransferOwnership(doer *User, newOwnerName string, repo *Repository) error
|
|||||||
}
|
}
|
||||||
|
|
||||||
if newOwner.IsOrganization() {
|
if newOwner.IsOrganization() {
|
||||||
if err := newOwner.GetTeams(); err != nil {
|
if err := newOwner.getTeams(sess); err != nil {
|
||||||
return fmt.Errorf("GetTeams: %v", err)
|
return fmt.Errorf("GetTeams: %v", err)
|
||||||
}
|
}
|
||||||
for _, t := range newOwner.Teams {
|
for _, t := range newOwner.Teams {
|
||||||
@@ -1856,6 +1936,18 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Dependencies for issues in this repository
|
||||||
|
if _, err = sess.In("issue_id", deleteCond).
|
||||||
|
Delete(&IssueDependency{}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete dependencies for issues in other repositories
|
||||||
|
if _, err = sess.In("dependency_id", deleteCond).
|
||||||
|
Delete(&IssueDependency{}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
if _, err = sess.In("issue_id", deleteCond).
|
if _, err = sess.In("issue_id", deleteCond).
|
||||||
Delete(&IssueUser{}); err != nil {
|
Delete(&IssueUser{}); err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -1876,6 +1968,11 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if _, err = sess.In("issue_id", deleteCond).
|
||||||
|
Delete(&TrackedTime{}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
attachments = attachments[:0]
|
attachments = attachments[:0]
|
||||||
if err = sess.Join("INNER", "issue", "issue.id = attachment.issue_id").
|
if err = sess.Join("INNER", "issue", "issue.id = attachment.issue_id").
|
||||||
Where("issue.repo_id = ?", repoID).
|
Where("issue.repo_id = ?", repoID).
|
||||||
|
|||||||
@@ -202,3 +202,23 @@ func (repo *Repository) getRepoTeams(e Engine) (teams []*Team, err error) {
|
|||||||
func (repo *Repository) GetRepoTeams() ([]*Team, error) {
|
func (repo *Repository) GetRepoTeams() ([]*Team, error) {
|
||||||
return repo.getRepoTeams(x)
|
return repo.getRepoTeams(x)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsOwnerMemberCollaborator checks if a provided user is the owner, a collaborator or a member of a team in a repository
|
||||||
|
func (repo *Repository) IsOwnerMemberCollaborator(userID int64) (bool, error) {
|
||||||
|
if repo.OwnerID == userID {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
teamMember, err := x.Join("INNER", "team_repo", "team_repo.team_id = team_user.team_id").
|
||||||
|
Join("INNER", "team_unit", "team_unit.team_id = team_user.team_id").
|
||||||
|
Where("team_repo.repo_id = ?", repo.ID).
|
||||||
|
And("team_unit.`type` = ?", UnitTypeCode).
|
||||||
|
And("team_user.uid = ?", userID).Table("team_user").Exist(&TeamUser{})
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
if teamMember {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return x.Get(&Collaboration{RepoID: repo.ID, UserID: userID})
|
||||||
|
}
|
||||||
|
|||||||
@@ -144,6 +144,10 @@ type SearchRepoOptions struct {
|
|||||||
TopicOnly bool
|
TopicOnly bool
|
||||||
// include description in keyword search
|
// include description in keyword search
|
||||||
IncludeDescription bool
|
IncludeDescription bool
|
||||||
|
// None -> include has milestones AND has no milestone
|
||||||
|
// True -> include just has milestones
|
||||||
|
// False -> include just has no milestone
|
||||||
|
HasMilestones util.OptionalBool
|
||||||
}
|
}
|
||||||
|
|
||||||
//SearchOrderBy is used to sort the result
|
//SearchOrderBy is used to sort the result
|
||||||
@@ -171,12 +175,9 @@ const (
|
|||||||
SearchOrderByForksReverse SearchOrderBy = "num_forks DESC"
|
SearchOrderByForksReverse SearchOrderBy = "num_forks DESC"
|
||||||
)
|
)
|
||||||
|
|
||||||
// SearchRepository returns repositories based on search options,
|
// SearchRepositoryCondition returns repositories based on search options,
|
||||||
// it returns results in given range and number of total results.
|
// it returns results in given range and number of total results.
|
||||||
func SearchRepository(opts *SearchRepoOptions) (RepositoryList, int64, error) {
|
func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond {
|
||||||
if opts.Page <= 0 {
|
|
||||||
opts.Page = 1
|
|
||||||
}
|
|
||||||
var cond = builder.NewCond()
|
var cond = builder.NewCond()
|
||||||
|
|
||||||
if opts.Private {
|
if opts.Private {
|
||||||
@@ -213,14 +214,35 @@ func SearchRepository(opts *SearchRepoOptions) (RepositoryList, int64, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if opts.Collaborate != util.OptionalBoolFalse {
|
if opts.Collaborate != util.OptionalBoolFalse {
|
||||||
|
// A Collaboration is:
|
||||||
collaborateCond := builder.And(
|
collaborateCond := builder.And(
|
||||||
|
// 1. Repository we don't own
|
||||||
|
builder.Neq{"owner_id": opts.OwnerID},
|
||||||
|
// 2. But we can see because of:
|
||||||
builder.Or(
|
builder.Or(
|
||||||
builder.Expr("repository.id IN (SELECT repo_id FROM `access` WHERE access.user_id = ?)", opts.OwnerID),
|
// A. We have access
|
||||||
builder.In("id", builder.Select("`team_repo`.repo_id").
|
builder.In("`repository`.id",
|
||||||
|
builder.Select("`access`.repo_id").
|
||||||
|
From("access").
|
||||||
|
Where(builder.Eq{"`access`.user_id": opts.OwnerID})),
|
||||||
|
// B. We are in a team for
|
||||||
|
builder.In("`repository`.id", builder.Select("`team_repo`.repo_id").
|
||||||
From("team_repo").
|
From("team_repo").
|
||||||
Where(builder.Eq{"`team_user`.uid": opts.OwnerID}).
|
Where(builder.Eq{"`team_user`.uid": opts.OwnerID}).
|
||||||
Join("INNER", "team_user", "`team_user`.team_id = `team_repo`.team_id"))),
|
Join("INNER", "team_user", "`team_user`.team_id = `team_repo`.team_id")),
|
||||||
builder.Neq{"owner_id": opts.OwnerID})
|
// C. Public repositories in private organizations that we are member of
|
||||||
|
builder.And(
|
||||||
|
builder.Eq{"`repository`.is_private": false},
|
||||||
|
builder.In("`repository`.owner_id",
|
||||||
|
builder.Select("`org_user`.org_id").
|
||||||
|
From("org_user").
|
||||||
|
Join("INNER", "`user`", "`user`.id = `org_user`.org_id").
|
||||||
|
Where(builder.Eq{
|
||||||
|
"`org_user`.uid": opts.OwnerID,
|
||||||
|
"`user`.type": UserTypeOrganization,
|
||||||
|
"`user`.visibility": structs.VisibleTypePrivate,
|
||||||
|
})))),
|
||||||
|
)
|
||||||
if !opts.Private {
|
if !opts.Private {
|
||||||
collaborateCond = collaborateCond.And(builder.Expr("owner_id NOT IN (SELECT org_id FROM org_user WHERE org_user.uid = ? AND org_user.is_public = ?)", opts.OwnerID, false))
|
collaborateCond = collaborateCond.And(builder.Expr("owner_id NOT IN (SELECT org_id FROM org_user WHERE org_user.uid = ? AND org_user.is_public = ?)", opts.OwnerID, false))
|
||||||
}
|
}
|
||||||
@@ -276,6 +298,29 @@ func SearchRepository(opts *SearchRepoOptions) (RepositoryList, int64, error) {
|
|||||||
cond = cond.And(builder.Eq{"is_mirror": opts.Mirror == util.OptionalBoolTrue})
|
cond = cond.And(builder.Eq{"is_mirror": opts.Mirror == util.OptionalBoolTrue})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
switch opts.HasMilestones {
|
||||||
|
case util.OptionalBoolTrue:
|
||||||
|
cond = cond.And(builder.Gt{"num_milestones": 0})
|
||||||
|
case util.OptionalBoolFalse:
|
||||||
|
cond = cond.And(builder.Eq{"num_milestones": 0}.Or(builder.IsNull{"num_milestones"}))
|
||||||
|
}
|
||||||
|
|
||||||
|
return cond
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchRepository returns repositories based on search options,
|
||||||
|
// it returns results in given range and number of total results.
|
||||||
|
func SearchRepository(opts *SearchRepoOptions) (RepositoryList, int64, error) {
|
||||||
|
cond := SearchRepositoryCondition(opts)
|
||||||
|
return SearchRepositoryByCondition(opts, cond)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchRepositoryByCondition search repositories by condition
|
||||||
|
func SearchRepositoryByCondition(opts *SearchRepoOptions, cond builder.Cond) (RepositoryList, int64, error) {
|
||||||
|
if opts.Page <= 0 {
|
||||||
|
opts.Page = 1
|
||||||
|
}
|
||||||
|
|
||||||
if len(opts.OrderBy) == 0 {
|
if len(opts.OrderBy) == 0 {
|
||||||
opts.OrderBy = SearchOrderByAlphabetically
|
opts.OrderBy = SearchOrderByAlphabetically
|
||||||
}
|
}
|
||||||
@@ -296,11 +341,11 @@ func SearchRepository(opts *SearchRepoOptions) (RepositoryList, int64, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
repos := make(RepositoryList, 0, opts.PageSize)
|
repos := make(RepositoryList, 0, opts.PageSize)
|
||||||
if err = sess.
|
sess.Where(cond).OrderBy(opts.OrderBy.String())
|
||||||
Where(cond).
|
if opts.PageSize > 0 {
|
||||||
OrderBy(opts.OrderBy.String()).
|
sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize)
|
||||||
Limit(opts.PageSize, (opts.Page-1)*opts.PageSize).
|
}
|
||||||
Find(&repos); err != nil {
|
if err = sess.Find(&repos); err != nil {
|
||||||
return nil, 0, fmt.Errorf("Repo: %v", err)
|
return nil, 0, fmt.Errorf("Repo: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -316,41 +361,39 @@ func SearchRepository(opts *SearchRepoOptions) (RepositoryList, int64, error) {
|
|||||||
// accessibleRepositoryCondition takes a user a returns a condition for checking if a repository is accessible
|
// accessibleRepositoryCondition takes a user a returns a condition for checking if a repository is accessible
|
||||||
func accessibleRepositoryCondition(userID int64) builder.Cond {
|
func accessibleRepositoryCondition(userID int64) builder.Cond {
|
||||||
if userID <= 0 {
|
if userID <= 0 {
|
||||||
|
// Public repositories that are not in private or limited organizations
|
||||||
return builder.And(
|
return builder.And(
|
||||||
builder.Eq{"`repository`.is_private": false},
|
builder.Eq{"`repository`.is_private": false},
|
||||||
builder.Or(
|
builder.NotIn("`repository`.owner_id",
|
||||||
// A. Aren't in organisations __OR__
|
builder.Select("id").From("`user`").Where(builder.Eq{"type": UserTypeOrganization}).And(builder.Neq{"visibility": structs.VisibleTypePublic})))
|
||||||
builder.NotIn("`repository`.owner_id", builder.Select("id").From("`user`").Where(builder.Eq{"type": UserTypeOrganization})),
|
|
||||||
// B. Is a public organisation.
|
|
||||||
builder.In("`repository`.owner_id", builder.Select("id").From("`user`").Where(builder.Eq{"visibility": structs.VisibleTypePublic}))),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return builder.Or(
|
return builder.Or(
|
||||||
// 1. Be able to see all non-private repositories that either:
|
// 1. All public repositories that are not in private organizations
|
||||||
builder.And(
|
builder.And(
|
||||||
builder.Eq{"`repository`.is_private": false},
|
builder.Eq{"`repository`.is_private": false},
|
||||||
builder.Or(
|
builder.NotIn("`repository`.owner_id",
|
||||||
// A. Aren't in organisations __OR__
|
builder.Select("id").From("`user`").Where(builder.Eq{"type": UserTypeOrganization}).And(builder.Eq{"visibility": structs.VisibleTypePrivate}))),
|
||||||
builder.NotIn("`repository`.owner_id", builder.Select("id").From("`user`").Where(builder.Eq{"type": UserTypeOrganization})),
|
// 2. Be able to see all repositories that we own
|
||||||
// B. Isn't a private organisation. (Limited is OK because we're logged in)
|
builder.Eq{"`repository`.owner_id": userID},
|
||||||
builder.NotIn("`repository`.owner_id", builder.Select("id").From("`user`").Where(builder.Eq{"visibility": structs.VisibleTypePrivate}))),
|
// 3. Be able to see all repositories that we have access to
|
||||||
),
|
|
||||||
// 2. Be able to see all repositories that we have access to
|
|
||||||
builder.Or(
|
|
||||||
builder.In("`repository`.id", builder.Select("repo_id").
|
builder.In("`repository`.id", builder.Select("repo_id").
|
||||||
From("`access`").
|
From("`access`").
|
||||||
Where(builder.And(
|
Where(builder.And(
|
||||||
builder.Eq{"user_id": userID},
|
builder.Eq{"user_id": userID},
|
||||||
builder.Gt{"mode": int(AccessModeNone)}))),
|
builder.Gt{"mode": int(AccessModeNone)}))),
|
||||||
builder.In("`repository`.id", builder.Select("id").
|
// 4. Be able to see all repositories that we are in a team
|
||||||
From("`repository`").
|
|
||||||
Where(builder.Eq{"owner_id": userID}))),
|
|
||||||
// 3. Be able to see all repositories that we are in a team
|
|
||||||
builder.In("`repository`.id", builder.Select("`team_repo`.repo_id").
|
builder.In("`repository`.id", builder.Select("`team_repo`.repo_id").
|
||||||
From("team_repo").
|
From("team_repo").
|
||||||
Where(builder.Eq{"`team_user`.uid": userID}).
|
Where(builder.Eq{"`team_user`.uid": userID}).
|
||||||
Join("INNER", "team_user", "`team_user`.team_id = `team_repo`.team_id")))
|
Join("INNER", "team_user", "`team_user`.team_id = `team_repo`.team_id")),
|
||||||
|
// 5. Be able to see all public repos in private organizations that we are an org_user of
|
||||||
|
builder.And(builder.Eq{"`repository`.is_private": false},
|
||||||
|
builder.In("`repository`.owner_id",
|
||||||
|
builder.Select("`org_user`.org_id").
|
||||||
|
From("org_user").
|
||||||
|
Where(builder.Eq{"`org_user`.uid": userID}))),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SearchRepositoryByName takes keyword and part of repository name to search,
|
// SearchRepositoryByName takes keyword and part of repository name to search,
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ import (
|
|||||||
"encoding/pem"
|
"encoding/pem"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"math/big"
|
"math/big"
|
||||||
"os"
|
"os"
|
||||||
@@ -687,14 +688,29 @@ func rewriteAllPublicKeys(e Engine) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
err = e.Iterate(new(PublicKey), func(idx int, bean interface{}) (err error) {
|
if err := regeneratePublicKeys(e, t); err != nil {
|
||||||
_, err = t.WriteString((bean.(*PublicKey)).AuthorizedString())
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Close()
|
||||||
|
return os.Rename(tmpPath, fPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RegeneratePublicKeys regenerates the authorized_keys file
|
||||||
|
func RegeneratePublicKeys(t io.Writer) error {
|
||||||
|
return regeneratePublicKeys(x, t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func regeneratePublicKeys(e Engine, t io.Writer) error {
|
||||||
|
err := e.Iterate(new(PublicKey), func(idx int, bean interface{}) (err error) {
|
||||||
|
_, err = t.Write([]byte((bean.(*PublicKey)).AuthorizedString()))
|
||||||
return err
|
return err
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fPath := filepath.Join(setting.SSH.RootPath, "authorized_keys")
|
||||||
if com.IsExist(fPath) {
|
if com.IsExist(fPath) {
|
||||||
f, err := os.Open(fPath)
|
f, err := os.Open(fPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -707,7 +723,7 @@ func rewriteAllPublicKeys(e Engine) error {
|
|||||||
scanner.Scan()
|
scanner.Scan()
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
_, err = t.WriteString(line + "\n")
|
_, err = t.Write([]byte(line + "\n"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
f.Close()
|
f.Close()
|
||||||
return err
|
return err
|
||||||
@@ -715,9 +731,7 @@ func rewriteAllPublicKeys(e Engine) error {
|
|||||||
}
|
}
|
||||||
f.Close()
|
f.Close()
|
||||||
}
|
}
|
||||||
|
return nil
|
||||||
t.Close()
|
|
||||||
return os.Rename(tmpPath, fPath)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ________ .__ ____ __.
|
// ________ .__ ____ __.
|
||||||
|
|||||||
@@ -142,8 +142,8 @@ func UpdateTwoFactor(t *TwoFactor) error {
|
|||||||
// GetTwoFactorByUID returns the two-factor authentication token associated with
|
// GetTwoFactorByUID returns the two-factor authentication token associated with
|
||||||
// the user, if any.
|
// the user, if any.
|
||||||
func GetTwoFactorByUID(uid int64) (*TwoFactor, error) {
|
func GetTwoFactorByUID(uid int64) (*TwoFactor, error) {
|
||||||
twofa := &TwoFactor{UID: uid}
|
twofa := &TwoFactor{}
|
||||||
has, err := x.Get(twofa)
|
has, err := x.Where("uid=?", uid).Get(twofa)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
} else if !has {
|
} else if !has {
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ func MainTest(m *testing.M, pathToGiteaRoot string) {
|
|||||||
|
|
||||||
func createTestEngine(fixturesDir string) error {
|
func createTestEngine(fixturesDir string) error {
|
||||||
var err error
|
var err error
|
||||||
x, err = xorm.NewEngine("sqlite3", "file::memory:?cache=shared")
|
x, err = xorm.NewEngine("sqlite3", "file::memory:?cache=shared&_txlock=immediate")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -76,8 +76,8 @@ func NewUpload(name string, buf []byte, file multipart.File) (_ *Upload, err err
|
|||||||
|
|
||||||
// GetUploadByUUID returns the Upload by UUID
|
// GetUploadByUUID returns the Upload by UUID
|
||||||
func GetUploadByUUID(uuid string) (*Upload, error) {
|
func GetUploadByUUID(uuid string) (*Upload, error) {
|
||||||
upload := &Upload{UUID: uuid}
|
upload := &Upload{}
|
||||||
has, err := x.Get(upload)
|
has, err := x.Where("uuid=?", uuid).Get(upload)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
} else if !has {
|
} else if !has {
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ import (
|
|||||||
"image/png"
|
"image/png"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
@@ -87,6 +88,9 @@ var (
|
|||||||
|
|
||||||
// ErrUnsupportedLoginType login source is unknown error
|
// ErrUnsupportedLoginType login source is unknown error
|
||||||
ErrUnsupportedLoginType = errors.New("Login source is unknown")
|
ErrUnsupportedLoginType = errors.New("Login source is unknown")
|
||||||
|
|
||||||
|
// Characters prohibited in a user name (anything except A-Za-z0-9_.-)
|
||||||
|
alphaDashDotPattern = regexp.MustCompile(`[^\w-\.]`)
|
||||||
)
|
)
|
||||||
|
|
||||||
// User represents the object of individual and member of organization.
|
// User represents the object of individual and member of organization.
|
||||||
@@ -708,10 +712,12 @@ func (u *User) DisplayName() string {
|
|||||||
// GetDisplayName returns full name if it's not empty and DEFAULT_SHOW_FULL_NAME is set,
|
// GetDisplayName returns full name if it's not empty and DEFAULT_SHOW_FULL_NAME is set,
|
||||||
// returns username otherwise.
|
// returns username otherwise.
|
||||||
func (u *User) GetDisplayName() string {
|
func (u *User) GetDisplayName() string {
|
||||||
|
if setting.UI.DefaultShowFullName {
|
||||||
trimmed := strings.TrimSpace(u.FullName)
|
trimmed := strings.TrimSpace(u.FullName)
|
||||||
if len(trimmed) > 0 && setting.UI.DefaultShowFullName {
|
if len(trimmed) > 0 {
|
||||||
return trimmed
|
return trimmed
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return u.Name
|
return u.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -870,6 +876,11 @@ func isUsableName(names, patterns []string, name string) error {
|
|||||||
|
|
||||||
// IsUsableUsername returns an error when a username is reserved
|
// IsUsableUsername returns an error when a username is reserved
|
||||||
func IsUsableUsername(name string) error {
|
func IsUsableUsername(name string) error {
|
||||||
|
// Validate username make sure it satisfies requirement.
|
||||||
|
if alphaDashDotPattern.MatchString(name) {
|
||||||
|
// Note: usually this error is normally caught up earlier in the UI
|
||||||
|
return ErrNameCharsNotAllowed{Name: name}
|
||||||
|
}
|
||||||
return isUsableName(reservedUsernames, reservedUserPatterns, name)
|
return isUsableName(reservedUsernames, reservedUserPatterns, name)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -989,7 +1000,7 @@ func VerifyActiveEmailCode(code, email string) *EmailAddress {
|
|||||||
data := com.ToStr(user.ID) + email + user.LowerName + user.Passwd + user.Rands
|
data := com.ToStr(user.ID) + email + user.LowerName + user.Passwd + user.Rands
|
||||||
|
|
||||||
if base.VerifyTimeLimitCode(data, minutes, prefix) {
|
if base.VerifyTimeLimitCode(data, minutes, prefix) {
|
||||||
emailAddress := &EmailAddress{Email: email}
|
emailAddress := &EmailAddress{UID: user.ID, Email: email}
|
||||||
if has, _ := x.Get(emailAddress); has {
|
if has, _ := x.Get(emailAddress); has {
|
||||||
return emailAddress
|
return emailAddress
|
||||||
}
|
}
|
||||||
@@ -1446,8 +1457,8 @@ func GetUserByEmail(email string) (*User, error) {
|
|||||||
// Finally, if email address is the protected email address:
|
// Finally, if email address is the protected email address:
|
||||||
if strings.HasSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress)) {
|
if strings.HasSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress)) {
|
||||||
username := strings.TrimSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress))
|
username := strings.TrimSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress))
|
||||||
user := &User{LowerName: username}
|
user := &User{}
|
||||||
has, err := x.Get(user)
|
has, err := x.Where("lower_name=?", username).Get(user)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
// Copyright 2016 The Gogs Authors. All rights reserved.
|
// Copyright 2016 The Gogs Authors. All rights reserved.
|
||||||
|
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||||
// Use of this source code is governed by a MIT-style
|
// Use of this source code is governed by a MIT-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
@@ -8,6 +9,12 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
|
"xorm.io/builder"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@@ -54,19 +61,72 @@ func GetEmailAddresses(uid int64) ([]*EmailAddress, error) {
|
|||||||
if !isPrimaryFound {
|
if !isPrimaryFound {
|
||||||
emails = append(emails, &EmailAddress{
|
emails = append(emails, &EmailAddress{
|
||||||
Email: u.Email,
|
Email: u.Email,
|
||||||
IsActivated: true,
|
IsActivated: u.IsActive,
|
||||||
IsPrimary: true,
|
IsPrimary: true,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return emails, nil
|
return emails, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetEmailAddressByID gets a user's email address by ID
|
||||||
|
func GetEmailAddressByID(uid, id int64) (*EmailAddress, error) {
|
||||||
|
// User ID is required for security reasons
|
||||||
|
email := &EmailAddress{UID: uid}
|
||||||
|
if has, err := x.ID(id).Get(email); err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else if !has {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return email, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func isEmailActive(e Engine, email string, userID, emailID int64) (bool, error) {
|
||||||
|
if len(email) == 0 {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Can't filter by boolean field unless it's explicit
|
||||||
|
cond := builder.NewCond()
|
||||||
|
cond = cond.And(builder.Eq{"email": email}, builder.Neq{"id": emailID})
|
||||||
|
if setting.Service.RegisterEmailConfirm {
|
||||||
|
// Inactive (unvalidated) addresses don't count as active if email validation is required
|
||||||
|
cond = cond.And(builder.Eq{"is_activated": true})
|
||||||
|
}
|
||||||
|
|
||||||
|
em := EmailAddress{}
|
||||||
|
|
||||||
|
if has, err := e.Where(cond).Get(&em); has || err != nil {
|
||||||
|
if has {
|
||||||
|
log.Info("isEmailActive('%s',%d,%d) found duplicate in email ID %d", email, userID, emailID, em.ID)
|
||||||
|
}
|
||||||
|
return has, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Can't filter by boolean field unless it's explicit
|
||||||
|
cond = builder.NewCond()
|
||||||
|
cond = cond.And(builder.Eq{"email": email}, builder.Neq{"id": userID})
|
||||||
|
if setting.Service.RegisterEmailConfirm {
|
||||||
|
cond = cond.And(builder.Eq{"is_active": true})
|
||||||
|
}
|
||||||
|
|
||||||
|
us := User{}
|
||||||
|
|
||||||
|
if has, err := e.Where(cond).Get(&us); has || err != nil {
|
||||||
|
if has {
|
||||||
|
log.Info("isEmailActive('%s',%d,%d) found duplicate in user ID %d", email, userID, emailID, us.ID)
|
||||||
|
}
|
||||||
|
return has, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
func isEmailUsed(e Engine, email string) (bool, error) {
|
func isEmailUsed(e Engine, email string) (bool, error) {
|
||||||
if len(email) == 0 {
|
if len(email) == 0 {
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return e.Get(&EmailAddress{Email: email})
|
return e.Where("email=?", email).Get(&EmailAddress{})
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsEmailUsed returns true if the email has been used.
|
// IsEmailUsed returns true if the email has been used.
|
||||||
@@ -118,31 +178,30 @@ func AddEmailAddresses(emails []*EmailAddress) error {
|
|||||||
|
|
||||||
// Activate activates the email address to given user.
|
// Activate activates the email address to given user.
|
||||||
func (email *EmailAddress) Activate() error {
|
func (email *EmailAddress) Activate() error {
|
||||||
user, err := GetUserByID(email.UID)
|
sess := x.NewSession()
|
||||||
|
defer sess.Close()
|
||||||
|
if err := sess.Begin(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := email.updateActivation(sess, true); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return sess.Commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (email *EmailAddress) updateActivation(e Engine, activate bool) error {
|
||||||
|
user, err := getUserByID(e, email.UID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if user.Rands, err = GetUserSalt(); err != nil {
|
if user.Rands, err = GetUserSalt(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
email.IsActivated = activate
|
||||||
sess := x.NewSession()
|
if _, err := e.ID(email.ID).Cols("is_activated").Update(email); err != nil {
|
||||||
defer sess.Close()
|
|
||||||
if err = sess.Begin(); err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
return updateUserCols(e, user, "rands")
|
||||||
email.IsActivated = true
|
|
||||||
if _, err := sess.
|
|
||||||
ID(email.ID).
|
|
||||||
Cols("is_activated").
|
|
||||||
Update(email); err != nil {
|
|
||||||
return err
|
|
||||||
} else if err = updateUserCols(sess, user, "rands"); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return sess.Commit()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// DeleteEmailAddress deletes an email address of given user.
|
// DeleteEmailAddress deletes an email address of given user.
|
||||||
@@ -192,8 +251,8 @@ func MakeEmailPrimary(email *EmailAddress) error {
|
|||||||
return ErrEmailNotActivated
|
return ErrEmailNotActivated
|
||||||
}
|
}
|
||||||
|
|
||||||
user := &User{ID: email.UID}
|
user := &User{}
|
||||||
has, err = x.Get(user)
|
has, err = x.ID(email.UID).Get(user)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
} else if !has {
|
} else if !has {
|
||||||
@@ -201,7 +260,7 @@ func MakeEmailPrimary(email *EmailAddress) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Make sure the former primary email doesn't disappear.
|
// Make sure the former primary email doesn't disappear.
|
||||||
formerPrimaryEmail := &EmailAddress{Email: user.Email}
|
formerPrimaryEmail := &EmailAddress{UID: user.ID, Email: user.Email}
|
||||||
has, err = x.Get(formerPrimaryEmail)
|
has, err = x.Get(formerPrimaryEmail)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -228,3 +287,199 @@ func MakeEmailPrimary(email *EmailAddress) error {
|
|||||||
|
|
||||||
return sess.Commit()
|
return sess.Commit()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SearchEmailOrderBy is used to sort the results from SearchEmails()
|
||||||
|
type SearchEmailOrderBy string
|
||||||
|
|
||||||
|
func (s SearchEmailOrderBy) String() string {
|
||||||
|
return string(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strings for sorting result
|
||||||
|
const (
|
||||||
|
SearchEmailOrderByEmail SearchEmailOrderBy = "emails.email ASC, is_primary DESC, sortid ASC"
|
||||||
|
SearchEmailOrderByEmailReverse SearchEmailOrderBy = "emails.email DESC, is_primary ASC, sortid DESC"
|
||||||
|
SearchEmailOrderByName SearchEmailOrderBy = "`user`.lower_name ASC, is_primary DESC, sortid ASC"
|
||||||
|
SearchEmailOrderByNameReverse SearchEmailOrderBy = "`user`.lower_name DESC, is_primary ASC, sortid DESC"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SearchEmailOptions are options to search e-mail addresses for the admin panel
|
||||||
|
type SearchEmailOptions struct {
|
||||||
|
Page int
|
||||||
|
PageSize int // Can be smaller than or equal to setting.UI.ExplorePagingNum
|
||||||
|
Keyword string
|
||||||
|
SortType SearchEmailOrderBy
|
||||||
|
IsPrimary util.OptionalBool
|
||||||
|
IsActivated util.OptionalBool
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchEmailResult is an e-mail address found in the user or email_address table
|
||||||
|
type SearchEmailResult struct {
|
||||||
|
UID int64
|
||||||
|
Email string
|
||||||
|
IsActivated bool
|
||||||
|
IsPrimary bool
|
||||||
|
// From User
|
||||||
|
Name string
|
||||||
|
FullName string
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchEmails takes options i.e. keyword and part of email name to search,
|
||||||
|
// it returns results in given range and number of total results.
|
||||||
|
func SearchEmails(opts *SearchEmailOptions) ([]*SearchEmailResult, int64, error) {
|
||||||
|
// Unfortunately, UNION support for SQLite in xorm is currently broken, so we must
|
||||||
|
// build the SQL ourselves.
|
||||||
|
where := make([]string, 0, 5)
|
||||||
|
args := make([]interface{}, 0, 5)
|
||||||
|
|
||||||
|
emailsSQL := "(SELECT id as sortid, uid, email, is_activated, 0 as is_primary " +
|
||||||
|
"FROM email_address " +
|
||||||
|
"UNION ALL " +
|
||||||
|
"SELECT id as sortid, id AS uid, email, is_active AS is_activated, 1 as is_primary " +
|
||||||
|
"FROM `user` " +
|
||||||
|
"WHERE type = ?) AS emails"
|
||||||
|
args = append(args, UserTypeIndividual)
|
||||||
|
|
||||||
|
if len(opts.Keyword) > 0 {
|
||||||
|
// Note: % can be injected in the Keyword parameter, but it won't do any harm.
|
||||||
|
where = append(where, "(lower(`user`.full_name) LIKE ? OR `user`.lower_name LIKE ? OR emails.email LIKE ?)")
|
||||||
|
likeStr := "%" + strings.ToLower(opts.Keyword) + "%"
|
||||||
|
args = append(args, likeStr)
|
||||||
|
args = append(args, likeStr)
|
||||||
|
args = append(args, likeStr)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case opts.IsPrimary.IsTrue():
|
||||||
|
where = append(where, "emails.is_primary = ?")
|
||||||
|
args = append(args, true)
|
||||||
|
case opts.IsPrimary.IsFalse():
|
||||||
|
where = append(where, "emails.is_primary = ?")
|
||||||
|
args = append(args, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case opts.IsActivated.IsTrue():
|
||||||
|
where = append(where, "emails.is_activated = ?")
|
||||||
|
args = append(args, true)
|
||||||
|
case opts.IsActivated.IsFalse():
|
||||||
|
where = append(where, "emails.is_activated = ?")
|
||||||
|
args = append(args, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
var whereStr string
|
||||||
|
if len(where) > 0 {
|
||||||
|
whereStr = "WHERE " + strings.Join(where, " AND ")
|
||||||
|
}
|
||||||
|
|
||||||
|
joinSQL := "FROM " + emailsSQL + " INNER JOIN `user` ON `user`.id = emails.uid " + whereStr
|
||||||
|
|
||||||
|
count, err := x.SQL("SELECT count(*) "+joinSQL, args...).Count()
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("Count: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
orderby := opts.SortType.String()
|
||||||
|
if orderby == "" {
|
||||||
|
orderby = SearchEmailOrderByEmail.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
querySQL := "SELECT emails.uid, emails.email, emails.is_activated, emails.is_primary, " +
|
||||||
|
"`user`.name, `user`.full_name " + joinSQL + " ORDER BY " + orderby
|
||||||
|
|
||||||
|
if opts.PageSize == 0 || opts.PageSize > setting.UI.ExplorePagingNum {
|
||||||
|
opts.PageSize = setting.UI.ExplorePagingNum
|
||||||
|
}
|
||||||
|
if opts.Page <= 0 {
|
||||||
|
opts.Page = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := x.SQL(querySQL, args...).Rows(new(SearchEmailResult))
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("Emails: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Page manually because xorm can't handle Limit() with raw SQL
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
emails := make([]*SearchEmailResult, 0, opts.PageSize)
|
||||||
|
skip := (opts.Page - 1) * opts.PageSize
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var email SearchEmailResult
|
||||||
|
if err := rows.Scan(&email); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
if skip > 0 {
|
||||||
|
skip--
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
emails = append(emails, &email)
|
||||||
|
if len(emails) == opts.PageSize {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return emails, count, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ActivateUserEmail will change the activated state of an email address,
|
||||||
|
// either primary (in the user table) or secondary (in the email_address table)
|
||||||
|
func ActivateUserEmail(userID int64, email string, primary, activate bool) (err error) {
|
||||||
|
sess := x.NewSession()
|
||||||
|
defer sess.Close()
|
||||||
|
if err = sess.Begin(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if primary {
|
||||||
|
// Activate/deactivate a user's primary email address
|
||||||
|
user := User{ID: userID, Email: email}
|
||||||
|
if has, err := sess.Get(&user); err != nil {
|
||||||
|
return err
|
||||||
|
} else if !has {
|
||||||
|
return fmt.Errorf("no such user: %d (%s)", userID, email)
|
||||||
|
}
|
||||||
|
if user.IsActive == activate {
|
||||||
|
// Already in the desired state; no action
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if activate {
|
||||||
|
if used, err := isEmailActive(sess, email, userID, 0); err != nil {
|
||||||
|
return fmt.Errorf("isEmailActive(): %v", err)
|
||||||
|
} else if used {
|
||||||
|
return ErrEmailAlreadyUsed{Email: email}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
user.IsActive = activate
|
||||||
|
if user.Rands, err = GetUserSalt(); err != nil {
|
||||||
|
return fmt.Errorf("generate salt: %v", err)
|
||||||
|
}
|
||||||
|
if err = updateUserCols(sess, &user, "is_active", "rands"); err != nil {
|
||||||
|
return fmt.Errorf("updateUserCols(): %v", err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Activate/deactivate a user's secondary email address
|
||||||
|
// First check if there's another user active with the same address
|
||||||
|
addr := EmailAddress{UID: userID, Email: email}
|
||||||
|
if has, err := sess.Get(&addr); err != nil {
|
||||||
|
return err
|
||||||
|
} else if !has {
|
||||||
|
return fmt.Errorf("no such email: %d (%s)", userID, email)
|
||||||
|
}
|
||||||
|
if addr.IsActivated == activate {
|
||||||
|
// Already in the desired state; no action
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if activate {
|
||||||
|
if used, err := isEmailActive(sess, email, 0, addr.ID); err != nil {
|
||||||
|
return fmt.Errorf("isEmailActive(): %v", err)
|
||||||
|
} else if used {
|
||||||
|
return ErrEmailAlreadyUsed{Email: email}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err = addr.updateActivation(sess, activate); err != nil {
|
||||||
|
return fmt.Errorf("updateActivation(): %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sess.Commit()
|
||||||
|
}
|
||||||
|
|||||||
@@ -7,6 +7,8 @@ package models
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -169,3 +171,65 @@ func TestActivate(t *testing.T) {
|
|||||||
assert.True(t, emails[2].IsActivated)
|
assert.True(t, emails[2].IsActivated)
|
||||||
assert.True(t, emails[2].IsPrimary)
|
assert.True(t, emails[2].IsPrimary)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestListEmails(t *testing.T) {
|
||||||
|
assert.NoError(t, PrepareTestDatabase())
|
||||||
|
|
||||||
|
// Must find all users and their emails
|
||||||
|
opts := &SearchEmailOptions{}
|
||||||
|
emails, count, err := SearchEmails(opts)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotEqual(t, int64(0), count)
|
||||||
|
assert.True(t, count > 5)
|
||||||
|
|
||||||
|
contains := func(match func(s *SearchEmailResult) bool) bool {
|
||||||
|
for _, v := range emails {
|
||||||
|
if match(v) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.True(t, contains(func(s *SearchEmailResult) bool { return s.UID == 18 }))
|
||||||
|
// 'user3' is an organization
|
||||||
|
assert.False(t, contains(func(s *SearchEmailResult) bool { return s.UID == 3 }))
|
||||||
|
|
||||||
|
// Must find no records
|
||||||
|
opts = &SearchEmailOptions{Keyword: "NOTFOUND"}
|
||||||
|
emails, count, err = SearchEmails(opts)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, int64(0), count)
|
||||||
|
|
||||||
|
// Must find users 'user2', 'user28', etc.
|
||||||
|
opts = &SearchEmailOptions{Keyword: "user2"}
|
||||||
|
emails, count, err = SearchEmails(opts)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotEqual(t, int64(0), count)
|
||||||
|
assert.True(t, contains(func(s *SearchEmailResult) bool { return s.UID == 2 }))
|
||||||
|
assert.True(t, contains(func(s *SearchEmailResult) bool { return s.UID == 27 }))
|
||||||
|
|
||||||
|
// Must find only primary addresses (i.e. from the `user` table)
|
||||||
|
opts = &SearchEmailOptions{IsPrimary: util.OptionalBoolTrue}
|
||||||
|
emails, count, err = SearchEmails(opts)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, contains(func(s *SearchEmailResult) bool { return s.IsPrimary }))
|
||||||
|
assert.False(t, contains(func(s *SearchEmailResult) bool { return !s.IsPrimary }))
|
||||||
|
|
||||||
|
// Must find only inactive addresses (i.e. not validated)
|
||||||
|
opts = &SearchEmailOptions{IsActivated: util.OptionalBoolFalse}
|
||||||
|
emails, count, err = SearchEmails(opts)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, contains(func(s *SearchEmailResult) bool { return !s.IsActivated }))
|
||||||
|
assert.False(t, contains(func(s *SearchEmailResult) bool { return s.IsActivated }))
|
||||||
|
|
||||||
|
// Must find more than one page, but retrieve only one
|
||||||
|
opts = &SearchEmailOptions{
|
||||||
|
PageSize: 5,
|
||||||
|
Page: 1,
|
||||||
|
}
|
||||||
|
emails, count, err = SearchEmails(opts)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, 5, len(emails))
|
||||||
|
assert.True(t, count > int64(len(emails)))
|
||||||
|
}
|
||||||
|
|||||||
@@ -111,8 +111,8 @@ func GetUserByOpenID(uri string) (*User, error) {
|
|||||||
log.Trace("Normalized OpenID URI: " + uri)
|
log.Trace("Normalized OpenID URI: " + uri)
|
||||||
|
|
||||||
// Otherwise, check in openid table
|
// Otherwise, check in openid table
|
||||||
oid := &UserOpenID{URI: uri}
|
oid := &UserOpenID{}
|
||||||
has, err := x.Get(oid)
|
has, err := x.Where("uri=?", uri).Get(oid)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -47,3 +47,13 @@ type AdminEditUserForm struct {
|
|||||||
func (f *AdminEditUserForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
|
func (f *AdminEditUserForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
|
||||||
return validate(errs, ctx.Data, f, ctx.Locale)
|
return validate(errs, ctx.Data, f, ctx.Locale)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AdminDashboardForm form for admin dashboard operations
|
||||||
|
type AdminDashboardForm struct {
|
||||||
|
Op int `binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate validates form fields
|
||||||
|
func (f *AdminDashboardForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
|
||||||
|
return validate(errs, ctx.Data, f, ctx.Locale)
|
||||||
|
}
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// Auth pam auth service
|
// Auth pam auth service
|
||||||
func Auth(serviceName, userName, passwd string) error {
|
func Auth(serviceName, userName, passwd string) (string, error) {
|
||||||
t, err := pam.StartFunc(serviceName, userName, func(s pam.Style, msg string) (string, error) {
|
t, err := pam.StartFunc(serviceName, userName, func(s pam.Style, msg string) (string, error) {
|
||||||
switch s {
|
switch s {
|
||||||
case pam.PromptEchoOff:
|
case pam.PromptEchoOff:
|
||||||
@@ -25,12 +25,14 @@ func Auth(serviceName, userName, passwd string) error {
|
|||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = t.Authenticate(0); err != nil {
|
if err = t.Authenticate(0); err != nil {
|
||||||
return err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
// PAM login names might suffer transformations in the PAM stack.
|
||||||
|
// We should take whatever the PAM stack returns for it.
|
||||||
|
return t.GetItem(pam.User)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,6 +11,6 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// Auth not supported lack of pam tag
|
// Auth not supported lack of pam tag
|
||||||
func Auth(serviceName, userName, passwd string) error {
|
func Auth(serviceName, userName, passwd string) (string, error) {
|
||||||
return errors.New("PAM not supported")
|
return "", errors.New("PAM not supported")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ package context
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@@ -64,18 +65,18 @@ type APINotFound struct{}
|
|||||||
// swagger:response redirect
|
// swagger:response redirect
|
||||||
type APIRedirect struct{}
|
type APIRedirect struct{}
|
||||||
|
|
||||||
// Error responses error message to client with given message.
|
// Error responds with an error message to client with given obj as the message.
|
||||||
// If status is 500, also it prints error to log.
|
// If status is 500, also it prints error to log.
|
||||||
func (ctx *APIContext) Error(status int, title string, obj interface{}) {
|
func (ctx *APIContext) Error(status int, title string, obj interface{}) {
|
||||||
var message string
|
var message string
|
||||||
if err, ok := obj.(error); ok {
|
if err, ok := obj.(error); ok {
|
||||||
message = err.Error()
|
message = err.Error()
|
||||||
} else {
|
} else {
|
||||||
message = obj.(string)
|
message = fmt.Sprintf("%s", obj)
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == 500 {
|
if status == http.StatusInternalServerError {
|
||||||
log.Error("%s: %s", title, message)
|
log.ErrorWithSkip(1, "%s: %s", title, message)
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.JSON(status, APIError{
|
ctx.JSON(status, APIError{
|
||||||
@@ -84,6 +85,22 @@ func (ctx *APIContext) Error(status int, title string, obj interface{}) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// InternalServerError responds with an error message to the client with the error as a message
|
||||||
|
// and the file and line of the caller.
|
||||||
|
func (ctx *APIContext) InternalServerError(err error) {
|
||||||
|
log.ErrorWithSkip(1, "InternalServerError: %v", err)
|
||||||
|
|
||||||
|
var message string
|
||||||
|
if macaron.Env != macaron.PROD {
|
||||||
|
message = err.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.JSON(http.StatusInternalServerError, APIError{
|
||||||
|
Message: message,
|
||||||
|
URL: setting.API.SwaggerURL,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func genAPILinks(curURL *url.URL, total, pageSize, curPage int) []string {
|
func genAPILinks(curURL *url.URL, total, pageSize, curPage int) []string {
|
||||||
page := NewPagination(total, pageSize, curPage, 0)
|
page := NewPagination(total, pageSize, curPage, 0)
|
||||||
paginater := page.Paginater
|
paginater := page.Paginater
|
||||||
@@ -212,6 +229,11 @@ func (ctx *APIContext) NotFound(objs ...interface{}) {
|
|||||||
var message = "Not Found"
|
var message = "Not Found"
|
||||||
var errors []string
|
var errors []string
|
||||||
for _, obj := range objs {
|
for _, obj := range objs {
|
||||||
|
// Ignore nil
|
||||||
|
if obj == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
if err, ok := obj.(error); ok {
|
if err, ok := obj.(error); ok {
|
||||||
errors = append(errors, err.Error())
|
errors = append(errors, err.Error())
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -396,7 +396,7 @@ func RepoAssignment() macaron.Handler {
|
|||||||
ctx.Data["RepoExternalIssuesLink"] = unit.ExternalTrackerConfig().ExternalTrackerURL
|
ctx.Data["RepoExternalIssuesLink"] = unit.ExternalTrackerConfig().ExternalTrackerURL
|
||||||
}
|
}
|
||||||
|
|
||||||
count, err := models.GetReleaseCountByRepoID(ctx.Repo.Repository.ID, models.FindReleasesOptions{
|
ctx.Data["NumReleases"], err = models.GetReleaseCountByRepoID(ctx.Repo.Repository.ID, models.FindReleasesOptions{
|
||||||
IncludeDrafts: false,
|
IncludeDrafts: false,
|
||||||
IncludeTags: true,
|
IncludeTags: true,
|
||||||
})
|
})
|
||||||
@@ -404,7 +404,6 @@ func RepoAssignment() macaron.Handler {
|
|||||||
ctx.ServerError("GetReleaseCountByRepoID", err)
|
ctx.ServerError("GetReleaseCountByRepoID", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
ctx.Repo.Repository.NumReleases = int(count)
|
|
||||||
|
|
||||||
ctx.Data["Title"] = owner.Name + "/" + repo.Name
|
ctx.Data["Title"] = owner.Name + "/" + repo.Name
|
||||||
ctx.Data["Repository"] = repo
|
ctx.Data["Repository"] = repo
|
||||||
|
|||||||
@@ -30,8 +30,17 @@ func ToEmail(email *models.EmailAddress) *api.Email {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ToBranch convert a git.Commit and git.Branch to an api.Branch
|
// ToBranch convert a git.Commit and git.Branch to an api.Branch
|
||||||
func ToBranch(repo *models.Repository, b *git.Branch, c *git.Commit, bp *models.ProtectedBranch, user *models.User) *api.Branch {
|
func ToBranch(repo *models.Repository, b *git.Branch, c *git.Commit, bp *models.ProtectedBranch, user *models.User) (*api.Branch, error) {
|
||||||
if bp == nil {
|
if bp == nil {
|
||||||
|
var hasPerm bool
|
||||||
|
var err error
|
||||||
|
if user != nil {
|
||||||
|
hasPerm, err = models.HasAccessUnit(user, repo, models.UnitTypeCode, models.AccessModeWrite)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return &api.Branch{
|
return &api.Branch{
|
||||||
Name: b.Name,
|
Name: b.Name,
|
||||||
Commit: ToCommit(repo, c),
|
Commit: ToCommit(repo, c),
|
||||||
@@ -39,20 +48,25 @@ func ToBranch(repo *models.Repository, b *git.Branch, c *git.Commit, bp *models.
|
|||||||
RequiredApprovals: 0,
|
RequiredApprovals: 0,
|
||||||
EnableStatusCheck: false,
|
EnableStatusCheck: false,
|
||||||
StatusCheckContexts: []string{},
|
StatusCheckContexts: []string{},
|
||||||
UserCanPush: true,
|
UserCanPush: hasPerm,
|
||||||
UserCanMerge: true,
|
UserCanMerge: hasPerm,
|
||||||
|
}, nil
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return &api.Branch{
|
branch := &api.Branch{
|
||||||
Name: b.Name,
|
Name: b.Name,
|
||||||
Commit: ToCommit(repo, c),
|
Commit: ToCommit(repo, c),
|
||||||
Protected: true,
|
Protected: true,
|
||||||
RequiredApprovals: bp.RequiredApprovals,
|
RequiredApprovals: bp.RequiredApprovals,
|
||||||
EnableStatusCheck: bp.EnableStatusCheck,
|
EnableStatusCheck: bp.EnableStatusCheck,
|
||||||
StatusCheckContexts: bp.StatusCheckContexts,
|
StatusCheckContexts: bp.StatusCheckContexts,
|
||||||
UserCanPush: bp.CanUserPush(user.ID),
|
|
||||||
UserCanMerge: bp.CanUserMerge(user.ID),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if user != nil {
|
||||||
|
branch.UserCanPush = bp.CanUserPush(user.ID)
|
||||||
|
branch.UserCanMerge = bp.IsUserMergeWhitelisted(user.ID)
|
||||||
|
}
|
||||||
|
return branch, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToTag convert a git.Tag to an api.Tag
|
// ToTag convert a git.Tag to an api.Tag
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
"github.com/go-git/go-git/v5/plumbing"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Blob represents a Git object.
|
// Blob represents a Git object.
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
"github.com/go-git/go-git/v5/plumbing/object"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Commit represents a git commit.
|
// Commit represents a git commit.
|
||||||
|
|||||||
@@ -6,9 +6,9 @@ package git
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/emirpasic/gods/trees/binaryheap"
|
"github.com/emirpasic/gods/trees/binaryheap"
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
"github.com/go-git/go-git/v5/plumbing"
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
"github.com/go-git/go-git/v5/plumbing/object"
|
||||||
cgobject "gopkg.in/src-d/go-git.v4/plumbing/object/commitgraph"
|
cgobject "github.com/go-git/go-git/v5/plumbing/object/commitgraph"
|
||||||
)
|
)
|
||||||
|
|
||||||
// GetCommitsInfo gets information of all commits that are corresponding to these entries
|
// GetCommitsInfo gets information of all commits that are corresponding to these entries
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ package git
|
|||||||
import (
|
import (
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
"github.com/go-git/go-git/v5/plumbing/object"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NotesRef is the git ref where Gitea will look for git-notes data.
|
// NotesRef is the git ref where Gitea will look for git-notes data.
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/filemode"
|
"github.com/go-git/go-git/v5/plumbing/filemode"
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
"github.com/go-git/go-git/v5/plumbing/object"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ParseTreeEntries parses the output of a `git ls-tree` command.
|
// ParseTreeEntries parses the output of a `git ls-tree` command.
|
||||||
|
|||||||
@@ -7,9 +7,9 @@ package git
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/go-git/go-git/v5/plumbing/filemode"
|
||||||
|
"github.com/go-git/go-git/v5/plumbing/object"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/filemode"
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestParseTreeEntries(t *testing.T) {
|
func TestParseTreeEntries(t *testing.T) {
|
||||||
|
|||||||
@@ -18,11 +18,11 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
gitealog "code.gitea.io/gitea/modules/log"
|
gitealog "code.gitea.io/gitea/modules/log"
|
||||||
|
"github.com/go-git/go-billy/v5/osfs"
|
||||||
|
gogit "github.com/go-git/go-git/v5"
|
||||||
|
"github.com/go-git/go-git/v5/plumbing/cache"
|
||||||
|
"github.com/go-git/go-git/v5/storage/filesystem"
|
||||||
"github.com/unknwon/com"
|
"github.com/unknwon/com"
|
||||||
"gopkg.in/src-d/go-billy.v4/osfs"
|
|
||||||
gogit "gopkg.in/src-d/go-git.v4"
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/cache"
|
|
||||||
"gopkg.in/src-d/go-git.v4/storage/filesystem"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Repository represents a Git repository.
|
// Repository represents a Git repository.
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
package git
|
package git
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
"github.com/go-git/go-git/v5/plumbing"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (repo *Repository) getBlob(id SHA1) (*Blob, error) {
|
func (repo *Repository) getBlob(id SHA1) (*Blob, error) {
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
"github.com/go-git/go-git/v5/plumbing"
|
||||||
)
|
)
|
||||||
|
|
||||||
// BranchPrefix base dir of the branch information file store on git
|
// BranchPrefix base dir of the branch information file store on git
|
||||||
|
|||||||
@@ -12,15 +12,20 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/go-git/go-git/v5/plumbing"
|
||||||
|
"github.com/go-git/go-git/v5/plumbing/object"
|
||||||
"github.com/mcuadros/go-version"
|
"github.com/mcuadros/go-version"
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// GetRefCommitID returns the last commit ID string of given reference (branch or tag).
|
// GetRefCommitID returns the last commit ID string of given reference (branch or tag).
|
||||||
func (repo *Repository) GetRefCommitID(name string) (string, error) {
|
func (repo *Repository) GetRefCommitID(name string) (string, error) {
|
||||||
ref, err := repo.gogitRepo.Reference(plumbing.ReferenceName(name), true)
|
ref, err := repo.gogitRepo.Reference(plumbing.ReferenceName(name), true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
if err == plumbing.ErrReferenceNotFound {
|
||||||
|
return "", ErrNotExist{
|
||||||
|
ID: name,
|
||||||
|
}
|
||||||
|
}
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -89,9 +94,15 @@ func (repo *Repository) getCommit(id SHA1) (*Commit, error) {
|
|||||||
gogitCommit, err := repo.gogitRepo.CommitObject(id)
|
gogitCommit, err := repo.gogitRepo.CommitObject(id)
|
||||||
if err == plumbing.ErrObjectNotFound {
|
if err == plumbing.ErrObjectNotFound {
|
||||||
tagObject, err = repo.gogitRepo.TagObject(id)
|
tagObject, err = repo.gogitRepo.TagObject(id)
|
||||||
|
if err == plumbing.ErrObjectNotFound {
|
||||||
|
return nil, ErrNotExist{
|
||||||
|
ID: id.String(),
|
||||||
|
}
|
||||||
|
}
|
||||||
if err == nil {
|
if err == nil {
|
||||||
gogitCommit, err = repo.gogitRepo.CommitObject(tagObject.Target)
|
gogitCommit, err = repo.gogitRepo.CommitObject(tagObject.Target)
|
||||||
}
|
}
|
||||||
|
// if we get a plumbing.ErrObjectNotFound here then the repository is broken and it should be 500
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ import (
|
|||||||
|
|
||||||
gitealog "code.gitea.io/gitea/modules/log"
|
gitealog "code.gitea.io/gitea/modules/log"
|
||||||
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/format/commitgraph"
|
"github.com/go-git/go-git/v5/plumbing/format/commitgraph"
|
||||||
cgobject "gopkg.in/src-d/go-git.v4/plumbing/object/commitgraph"
|
cgobject "github.com/go-git/go-git/v5/plumbing/object/commitgraph"
|
||||||
)
|
)
|
||||||
|
|
||||||
// CommitNodeIndex returns the index for walking commit graph
|
// CommitNodeIndex returns the index for walking commit graph
|
||||||
|
|||||||
@@ -7,8 +7,8 @@ package git
|
|||||||
import (
|
import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gopkg.in/src-d/go-git.v4"
|
"github.com/go-git/go-git/v5"
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
"github.com/go-git/go-git/v5/plumbing"
|
||||||
)
|
)
|
||||||
|
|
||||||
// GetRefs returns all references of the repository.
|
// GetRefs returns all references of the repository.
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/go-git/go-git/v5/plumbing"
|
||||||
"github.com/mcuadros/go-version"
|
"github.com/mcuadros/go-version"
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// TagPrefix tags prefix path on the repository
|
// TagPrefix tags prefix path on the repository
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
"github.com/go-git/go-git/v5/plumbing"
|
||||||
)
|
)
|
||||||
|
|
||||||
// EmptySHA defines empty git SHA
|
// EmptySHA defines empty git SHA
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
"github.com/go-git/go-git/v5/plumbing/object"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Signature represents the Author or Committer information.
|
// Signature represents the Author or Committer information.
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"net"
|
"net"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"path"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
@@ -38,7 +39,7 @@ func NewSubModuleFile(c *Commit, refURL, refID string) *SubModuleFile {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getRefURL(refURL, urlPrefix, parentPath string) string {
|
func getRefURL(refURL, urlPrefix, repoFullName string) string {
|
||||||
if refURL == "" {
|
if refURL == "" {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
@@ -51,14 +52,14 @@ func getRefURL(refURL, urlPrefix, parentPath string) string {
|
|||||||
urlPrefixHostname = prefixURL.Host
|
urlPrefixHostname = prefixURL.Host
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if strings.HasSuffix(urlPrefix, "/") {
|
||||||
|
urlPrefix = urlPrefix[:len(urlPrefix)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: Need to consider branch - which will require changes in modules/git/commit.go:GetSubModules
|
||||||
// Relative url prefix check (according to git submodule documentation)
|
// Relative url prefix check (according to git submodule documentation)
|
||||||
if strings.HasPrefix(refURI, "./") || strings.HasPrefix(refURI, "../") {
|
if strings.HasPrefix(refURI, "./") || strings.HasPrefix(refURI, "../") {
|
||||||
// ...construct and return correct submodule url here...
|
return urlPrefix + path.Clean(path.Join("/", repoFullName, refURI))
|
||||||
idx := strings.Index(parentPath, "/src/")
|
|
||||||
if idx == -1 {
|
|
||||||
return refURI
|
|
||||||
}
|
|
||||||
return strings.TrimSuffix(urlPrefix, "/") + parentPath[:idx] + "/" + refURI
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if !strings.Contains(refURI, "://") {
|
if !strings.Contains(refURI, "://") {
|
||||||
@@ -69,16 +70,16 @@ func getRefURL(refURL, urlPrefix, parentPath string) string {
|
|||||||
|
|
||||||
m := match[0]
|
m := match[0]
|
||||||
refHostname := m[2]
|
refHostname := m[2]
|
||||||
path := m[3]
|
pth := m[3]
|
||||||
|
|
||||||
if !strings.HasPrefix(path, "/") {
|
if !strings.HasPrefix(pth, "/") {
|
||||||
path = "/" + path
|
pth = "/" + pth
|
||||||
}
|
}
|
||||||
|
|
||||||
if urlPrefixHostname == refHostname {
|
if urlPrefixHostname == refHostname {
|
||||||
return prefixURL.Scheme + "://" + urlPrefixHostname + path
|
return urlPrefix + path.Clean(path.Join("/", pth))
|
||||||
}
|
}
|
||||||
return "http://" + refHostname + path
|
return "http://" + refHostname + pth
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -97,7 +98,7 @@ func getRefURL(refURL, urlPrefix, parentPath string) string {
|
|||||||
for _, scheme := range supportedSchemes {
|
for _, scheme := range supportedSchemes {
|
||||||
if ref.Scheme == scheme {
|
if ref.Scheme == scheme {
|
||||||
if urlPrefixHostname == refHostname {
|
if urlPrefixHostname == refHostname {
|
||||||
return prefixURL.Scheme + "://" + prefixURL.Host + ref.Path
|
return urlPrefix + path.Clean(path.Join("/", ref.Path))
|
||||||
} else if ref.Scheme == "http" || ref.Scheme == "https" {
|
} else if ref.Scheme == "http" || ref.Scheme == "https" {
|
||||||
if len(ref.User.Username()) > 0 {
|
if len(ref.User.Username()) > 0 {
|
||||||
return ref.Scheme + "://" + fmt.Sprintf("%v", ref.User) + "@" + ref.Host + ref.Path
|
return ref.Scheme + "://" + fmt.Sprintf("%v", ref.User) + "@" + ref.Host + ref.Path
|
||||||
@@ -113,8 +114,8 @@ func getRefURL(refURL, urlPrefix, parentPath string) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// RefURL guesses and returns reference URL.
|
// RefURL guesses and returns reference URL.
|
||||||
func (sf *SubModuleFile) RefURL(urlPrefix string, parentPath string) string {
|
func (sf *SubModuleFile) RefURL(urlPrefix string, repoFullName string) string {
|
||||||
return getRefURL(sf.refURL, urlPrefix, parentPath)
|
return getRefURL(sf.refURL, urlPrefix, repoFullName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// RefID returns reference ID.
|
// RefID returns reference ID.
|
||||||
|
|||||||
@@ -17,21 +17,21 @@ func TestGetRefURL(t *testing.T) {
|
|||||||
parentPath string
|
parentPath string
|
||||||
expect string
|
expect string
|
||||||
}{
|
}{
|
||||||
{"git://github.com/user1/repo1", "/", "/", "http://github.com/user1/repo1"},
|
{"git://github.com/user1/repo1", "/", "user1/repo2", "http://github.com/user1/repo1"},
|
||||||
{"https://localhost/user1/repo1.git", "/", "/", "https://localhost/user1/repo1"},
|
{"https://localhost/user1/repo1.git", "/", "user1/repo2", "https://localhost/user1/repo1"},
|
||||||
{"http://localhost/user1/repo1.git", "/", "/", "http://localhost/user1/repo1"},
|
{"http://localhost/user1/repo1.git", "/", "owner/reponame", "http://localhost/user1/repo1"},
|
||||||
{"git@github.com:user1/repo1.git", "/", "/", "http://github.com/user1/repo1"},
|
{"git@github.com:user1/repo1.git", "/", "owner/reponame", "http://github.com/user1/repo1"},
|
||||||
{"ssh://git@git.zefie.net:2222/zefie/lge_g6_kernel_scripts.git", "/", "/", "http://git.zefie.net/zefie/lge_g6_kernel_scripts"},
|
{"ssh://git@git.zefie.net:2222/zefie/lge_g6_kernel_scripts.git", "/", "zefie/lge_g6_kernel", "http://git.zefie.net/zefie/lge_g6_kernel_scripts"},
|
||||||
{"git@git.zefie.net:2222/zefie/lge_g6_kernel_scripts.git", "/", "/", "http://git.zefie.net/2222/zefie/lge_g6_kernel_scripts"},
|
{"git@git.zefie.net:2222/zefie/lge_g6_kernel_scripts.git", "/", "zefie/lge_g6_kernel", "http://git.zefie.net/2222/zefie/lge_g6_kernel_scripts"},
|
||||||
{"git@try.gitea.io:go-gitea/gitea", "https://try.gitea.io/go-gitea/gitea", "/", "https://try.gitea.io/go-gitea/gitea"},
|
{"git@try.gitea.io:go-gitea/gitea", "https://try.gitea.io/", "go-gitea/sdk", "https://try.gitea.io/go-gitea/gitea"},
|
||||||
{"ssh://git@try.gitea.io:9999/go-gitea/gitea", "https://try.gitea.io/go-gitea/gitea", "/", "https://try.gitea.io/go-gitea/gitea"},
|
{"ssh://git@try.gitea.io:9999/go-gitea/gitea", "https://try.gitea.io/", "go-gitea/sdk", "https://try.gitea.io/go-gitea/gitea"},
|
||||||
{"git://git@try.gitea.io:9999/go-gitea/gitea", "https://try.gitea.io/go-gitea/log", "/", "https://try.gitea.io/go-gitea/gitea"},
|
{"git://git@try.gitea.io:9999/go-gitea/gitea", "https://try.gitea.io/", "go-gitea/sdk", "https://try.gitea.io/go-gitea/gitea"},
|
||||||
{"ssh://git@127.0.0.1:9999/go-gitea/gitea", "https://127.0.0.1:3000/go-gitea/log", "/", "https://127.0.0.1:3000/go-gitea/gitea"},
|
{"ssh://git@127.0.0.1:9999/go-gitea/gitea", "https://127.0.0.1:3000/", "go-gitea/sdk", "https://127.0.0.1:3000/go-gitea/gitea"},
|
||||||
{"https://gitea.com:3000/user1/repo1.git", "https://127.0.0.1:3000/go-gitea/gitea", "/", "https://gitea.com:3000/user1/repo1"},
|
{"https://gitea.com:3000/user1/repo1.git", "https://127.0.0.1:3000/", "user/repo2", "https://gitea.com:3000/user1/repo1"},
|
||||||
{"https://username:password@github.com/username/repository.git", "/", "/", "https://username:password@github.com/username/repository"},
|
{"https://username:password@github.com/username/repository.git", "/", "username/repository2", "https://username:password@github.com/username/repository"},
|
||||||
{"somethingbad", "https://127.0.0.1:3000/go-gitea/gitea", "/", ""},
|
{"somethingbad", "https://127.0.0.1:3000/go-gitea/gitea", "/", ""},
|
||||||
{"git@localhost:user/repo", "https://localhost/user/repo2", "/", "https://localhost/user/repo"},
|
{"git@localhost:user/repo", "https://localhost/", "user2/repo1", "https://localhost/user/repo"},
|
||||||
{"../path/to/repo.git/", "https://localhost/user/repo2/src/branch/master/test", "/", "../path/to/repo.git/"},
|
{"../path/to/repo.git/", "https://localhost/", "user/repo2", "https://localhost/user/path/to/repo.git"},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, kase := range kases {
|
for _, kase := range kases {
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
"github.com/go-git/go-git/v5/plumbing"
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
"github.com/go-git/go-git/v5/plumbing/object"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Tree represents a flat directory listing.
|
// Tree represents a flat directory listing.
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ import (
|
|||||||
"path"
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/filemode"
|
"github.com/go-git/go-git/v5/plumbing/filemode"
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
"github.com/go-git/go-git/v5/plumbing/object"
|
||||||
)
|
)
|
||||||
|
|
||||||
// GetTreeEntryByPath get the tree entries according the sub dir
|
// GetTreeEntryByPath get the tree entries according the sub dir
|
||||||
|
|||||||
@@ -10,9 +10,9 @@ import (
|
|||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing"
|
"github.com/go-git/go-git/v5/plumbing"
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/filemode"
|
"github.com/go-git/go-git/v5/plumbing/filemode"
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
"github.com/go-git/go-git/v5/plumbing/object"
|
||||||
)
|
)
|
||||||
|
|
||||||
// EntryMode the type of the object in the git tree
|
// EntryMode the type of the object in the git tree
|
||||||
|
|||||||
@@ -7,9 +7,9 @@ package git
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/go-git/go-git/v5/plumbing/filemode"
|
||||||
|
"github.com/go-git/go-git/v5/plumbing/object"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/filemode"
|
|
||||||
"gopkg.in/src-d/go-git.v4/plumbing/object"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func getTestEntries() Entries {
|
func getTestEntries() Entries {
|
||||||
|
|||||||
@@ -108,7 +108,7 @@ func (s *linkifyParser) Parse(parent ast.Node, block text.Reader, pc parser.Cont
|
|||||||
}
|
}
|
||||||
at := bytes.IndexByte(line, '@')
|
at := bytes.IndexByte(line, '@')
|
||||||
m = []int{0, stop, at, stop - 1}
|
m = []int{0, stop, at, stop - 1}
|
||||||
if m == nil || bytes.IndexByte(line[m[2]:m[3]], '.') < 0 {
|
if bytes.IndexByte(line[m[2]:m[3]], '.') < 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
lastChar := line[m[1]-1]
|
lastChar := line[m[1]-1]
|
||||||
|
|||||||
@@ -290,7 +290,7 @@ func (ctx *postProcessCtx) postProcess(rawHTML []byte) ([]byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, node := range nodes {
|
for _, node := range nodes {
|
||||||
ctx.visitNode(node)
|
ctx.visitNode(node, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create buffer in which the data will be placed again. We know that the
|
// Create buffer in which the data will be placed again. We know that the
|
||||||
@@ -313,7 +313,7 @@ func (ctx *postProcessCtx) postProcess(rawHTML []byte) ([]byte, error) {
|
|||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ctx *postProcessCtx) visitNode(node *html.Node) {
|
func (ctx *postProcessCtx) visitNode(node *html.Node, visitText bool) {
|
||||||
// Add user-content- to IDs if they don't already have them
|
// Add user-content- to IDs if they don't already have them
|
||||||
for idx, attr := range node.Attr {
|
for idx, attr := range node.Attr {
|
||||||
if attr.Key == "id" && !(strings.HasPrefix(attr.Val, "user-content-") || blackfridayExtRegex.MatchString(attr.Val)) {
|
if attr.Key == "id" && !(strings.HasPrefix(attr.Val, "user-content-") || blackfridayExtRegex.MatchString(attr.Val)) {
|
||||||
@@ -323,13 +323,37 @@ func (ctx *postProcessCtx) visitNode(node *html.Node) {
|
|||||||
// We ignore code, pre and already generated links.
|
// We ignore code, pre and already generated links.
|
||||||
switch node.Type {
|
switch node.Type {
|
||||||
case html.TextNode:
|
case html.TextNode:
|
||||||
|
if visitText {
|
||||||
ctx.textNode(node)
|
ctx.textNode(node)
|
||||||
|
}
|
||||||
case html.ElementNode:
|
case html.ElementNode:
|
||||||
if node.Data == "a" || node.Data == "code" || node.Data == "pre" {
|
if node.Data == "img" {
|
||||||
|
attrs := node.Attr
|
||||||
|
for idx, attr := range attrs {
|
||||||
|
if attr.Key != "src" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
link := []byte(attr.Val)
|
||||||
|
if len(link) > 0 && !IsLink(link) {
|
||||||
|
prefix := ctx.urlPrefix
|
||||||
|
if ctx.isWikiMarkdown {
|
||||||
|
prefix = util.URLJoin(prefix, "wiki", "raw")
|
||||||
|
}
|
||||||
|
prefix = strings.Replace(prefix, "/src/", "/media/", 1)
|
||||||
|
|
||||||
|
lnk := string(link)
|
||||||
|
lnk = util.URLJoin(prefix, lnk)
|
||||||
|
link = []byte(lnk)
|
||||||
|
}
|
||||||
|
node.Attr[idx].Val = string(link)
|
||||||
|
}
|
||||||
|
} else if node.Data == "a" {
|
||||||
|
visitText = false
|
||||||
|
} else if node.Data == "code" || node.Data == "pre" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
for n := node.FirstChild; n != nil; n = n.NextSibling {
|
for n := node.FirstChild; n != nil; n = n.NextSibling {
|
||||||
ctx.visitNode(n)
|
ctx.visitNode(n, visitText)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// ignore everything else
|
// ignore everything else
|
||||||
|
|||||||
@@ -52,7 +52,6 @@ func (g *GiteaASTTransformer) Transform(node *ast.Document, reader text.Reader,
|
|||||||
|
|
||||||
lnk := string(link)
|
lnk := string(link)
|
||||||
lnk = giteautil.URLJoin(prefix, lnk)
|
lnk = giteautil.URLJoin(prefix, lnk)
|
||||||
lnk = strings.Replace(lnk, " ", "+", -1)
|
|
||||||
link = []byte(lnk)
|
link = []byte(lnk)
|
||||||
}
|
}
|
||||||
v.Destination = link
|
v.Destination = link
|
||||||
|
|||||||
@@ -81,7 +81,6 @@ func RenderWiki(filename string, rawBytes []byte, urlPrefix string, metas map[st
|
|||||||
}
|
}
|
||||||
|
|
||||||
func render(parser Parser, rawBytes []byte, urlPrefix string, metas map[string]string, isWiki bool) []byte {
|
func render(parser Parser, rawBytes []byte, urlPrefix string, metas map[string]string, isWiki bool) []byte {
|
||||||
urlPrefix = strings.Replace(urlPrefix, " ", "+", -1)
|
|
||||||
result := parser.Render(rawBytes, urlPrefix, metas, isWiki)
|
result := parser.Render(rawBytes, urlPrefix, metas, isWiki)
|
||||||
// TODO: one day the error should be returned.
|
// TODO: one day the error should be returned.
|
||||||
result, err := PostProcess(result, urlPrefix, metas, isWiki)
|
result, err := PostProcess(result, urlPrefix, metas, isWiki)
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/migrations/base"
|
"code.gitea.io/gitea/modules/migrations/base"
|
||||||
"code.gitea.io/gitea/modules/structs"
|
"code.gitea.io/gitea/modules/structs"
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
"github.com/google/go-github/v24/github"
|
"github.com/google/go-github/v24/github"
|
||||||
"golang.org/x/oauth2"
|
"golang.org/x/oauth2"
|
||||||
@@ -121,7 +122,7 @@ func (g *GithubDownloaderV3) sleep() {
|
|||||||
timer := time.NewTimer(time.Until(g.rate.Reset.Time))
|
timer := time.NewTimer(time.Until(g.rate.Reset.Time))
|
||||||
select {
|
select {
|
||||||
case <-g.ctx.Done():
|
case <-g.ctx.Done():
|
||||||
timer.Stop()
|
util.StopTimer(timer)
|
||||||
return
|
return
|
||||||
case <-timer.C:
|
case <-timer.C:
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -124,6 +124,12 @@ func (r *indexerNotifier) NotifyPushCommits(pusher *models.User, repo *models.Re
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *indexerNotifier) NotifySyncPushCommits(pusher *models.User, repo *models.Repository, refName, oldCommitID, newCommitID string, commits *models.PushCommits) {
|
||||||
|
if setting.Indexer.RepoIndexerEnabled && refName == git.BranchPrefix+repo.DefaultBranch {
|
||||||
|
code_indexer.UpdateRepoIndexer(repo)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (r *indexerNotifier) NotifyIssueChangeContent(doer *models.User, issue *models.Issue, oldContent string) {
|
func (r *indexerNotifier) NotifyIssueChangeContent(doer *models.User, issue *models.Issue, oldContent string) {
|
||||||
issue_indexer.UpdateIssueIndexer(issue)
|
issue_indexer.UpdateIssueIndexer(issue)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -689,12 +689,12 @@ func (m *webhookNotifier) NotifyDeleteRef(pusher *models.User, repo *models.Repo
|
|||||||
|
|
||||||
if err := webhook_module.PrepareWebhooks(repo, models.HookEventDelete, &api.DeletePayload{
|
if err := webhook_module.PrepareWebhooks(repo, models.HookEventDelete, &api.DeletePayload{
|
||||||
Ref: refName,
|
Ref: refName,
|
||||||
RefType: "branch",
|
RefType: refType,
|
||||||
PusherType: api.PusherTypeUser,
|
PusherType: api.PusherTypeUser,
|
||||||
Repo: apiRepo,
|
Repo: apiRepo,
|
||||||
Sender: apiPusher,
|
Sender: apiPusher,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("PrepareWebhooks.(delete branch): %v", err)
|
log.Error("PrepareWebhooks.(delete %s): %v", refType, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -98,3 +98,8 @@ func fileFromDir(name string) ([]byte, error) {
|
|||||||
|
|
||||||
return []byte{}, fmt.Errorf("Asset file does not exist: %s", name)
|
return []byte{}, fmt.Errorf("Asset file does not exist: %s", name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsDynamic will return false when using embedded data (-tags bindata)
|
||||||
|
func IsDynamic() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|||||||
@@ -112,3 +112,8 @@ func fileFromDir(name string) ([]byte, error) {
|
|||||||
|
|
||||||
return ioutil.ReadAll(f)
|
return ioutil.ReadAll(f)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsDynamic will return false when using embedded data (-tags bindata)
|
||||||
|
func IsDynamic() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
// WrappedQueueType is the type for a wrapped delayed starting queue
|
// WrappedQueueType is the type for a wrapped delayed starting queue
|
||||||
@@ -77,7 +78,7 @@ func (q *delayedStarter) setInternal(atShutdown func(context.Context, func()), h
|
|||||||
t := time.NewTimer(sleepTime)
|
t := time.NewTimer(sleepTime)
|
||||||
select {
|
select {
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
t.Stop()
|
util.StopTimer(t)
|
||||||
case <-t.C:
|
case <-t.C:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
// WorkerPool takes
|
// WorkerPool takes
|
||||||
@@ -56,12 +57,7 @@ func (p *WorkerPool) pushBoost(data Data) {
|
|||||||
p.lock.Unlock()
|
p.lock.Unlock()
|
||||||
select {
|
select {
|
||||||
case p.dataChan <- data:
|
case p.dataChan <- data:
|
||||||
if timer.Stop() {
|
util.StopTimer(timer)
|
||||||
select {
|
|
||||||
case <-timer.C:
|
|
||||||
default:
|
|
||||||
}
|
|
||||||
}
|
|
||||||
case <-timer.C:
|
case <-timer.C:
|
||||||
p.lock.Lock()
|
p.lock.Lock()
|
||||||
if p.blockTimeout > ourTimeout || (p.numberOfWorkers > p.maxNumberOfWorkers && p.maxNumberOfWorkers >= 0) {
|
if p.blockTimeout > ourTimeout || (p.numberOfWorkers > p.maxNumberOfWorkers && p.maxNumberOfWorkers >= 0) {
|
||||||
@@ -277,12 +273,7 @@ func (p *WorkerPool) doWork(ctx context.Context) {
|
|||||||
timer := time.NewTimer(delay)
|
timer := time.NewTimer(delay)
|
||||||
select {
|
select {
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
if timer.Stop() {
|
util.StopTimer(timer)
|
||||||
select {
|
|
||||||
case <-timer.C:
|
|
||||||
default:
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(data) > 0 {
|
if len(data) > 0 {
|
||||||
log.Trace("Handling: %d data, %v", len(data), data)
|
log.Trace("Handling: %d data, %v", len(data), data)
|
||||||
p.handle(data...)
|
p.handle(data...)
|
||||||
@@ -290,12 +281,7 @@ func (p *WorkerPool) doWork(ctx context.Context) {
|
|||||||
log.Trace("Worker shutting down")
|
log.Trace("Worker shutting down")
|
||||||
return
|
return
|
||||||
case datum, ok := <-p.dataChan:
|
case datum, ok := <-p.dataChan:
|
||||||
if timer.Stop() {
|
util.StopTimer(timer)
|
||||||
select {
|
|
||||||
case <-timer.C:
|
|
||||||
default:
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !ok {
|
if !ok {
|
||||||
// the dataChan has been closed - we should finish up:
|
// the dataChan has been closed - we should finish up:
|
||||||
if len(data) > 0 {
|
if len(data) > 0 {
|
||||||
|
|||||||
@@ -159,7 +159,7 @@ func GetContents(repo *models.Repository, treePath, ref string, forList bool) (*
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Now populate the rest of the ContentsResponse based on entry type
|
// Now populate the rest of the ContentsResponse based on entry type
|
||||||
if entry.IsRegular() {
|
if entry.IsRegular() || entry.IsExecutable() {
|
||||||
contentsResponse.Type = string(ContentTypeRegular)
|
contentsResponse.Type = string(ContentTypeRegular)
|
||||||
if blobResponse, err := GetBlobBySHA(repo, entry.ID.String()); err != nil {
|
if blobResponse, err := GetBlobBySHA(repo, entry.ID.String()); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|||||||
@@ -242,10 +242,30 @@ func (t *TemporaryUploadRepository) CommitTreeWithDate(author, committer *models
|
|||||||
func (t *TemporaryUploadRepository) Push(doer *models.User, commitHash string, branch string) error {
|
func (t *TemporaryUploadRepository) Push(doer *models.User, commitHash string, branch string) error {
|
||||||
// Because calls hooks we need to pass in the environment
|
// Because calls hooks we need to pass in the environment
|
||||||
env := models.PushingEnvironment(doer, t.repo)
|
env := models.PushingEnvironment(doer, t.repo)
|
||||||
|
stdout := &strings.Builder{}
|
||||||
|
stderr := &strings.Builder{}
|
||||||
|
|
||||||
if _, err := git.NewCommand("push", t.repo.RepoPath(), strings.TrimSpace(commitHash)+":refs/heads/"+strings.TrimSpace(branch)).RunInDirWithEnv(t.basePath, env); err != nil {
|
if err := git.NewCommand("push", t.repo.RepoPath(), strings.TrimSpace(commitHash)+":refs/heads/"+strings.TrimSpace(branch)).RunInDirTimeoutEnvPipeline(env, -1, t.basePath, stdout, stderr); err != nil {
|
||||||
log.Error("Unable to push back to repo from temporary repo: %s (%s) Error: %v",
|
errString := stderr.String()
|
||||||
t.repo.FullName(), t.basePath, err)
|
if strings.Contains(errString, "non-fast-forward") {
|
||||||
|
return models.ErrMergePushOutOfDate{
|
||||||
|
StdOut: stdout.String(),
|
||||||
|
StdErr: errString,
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
} else if strings.Contains(errString, "! [remote rejected]") {
|
||||||
|
log.Error("Unable to push back to repo from temporary repo due to rejection: %s (%s)\nStdout: %s\nStderr: %s\nError: %v",
|
||||||
|
t.repo.FullName(), t.basePath, stdout, errString, err)
|
||||||
|
err := models.ErrPushRejected{
|
||||||
|
StdOut: stdout.String(),
|
||||||
|
StdErr: errString,
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
err.GenerateMessage()
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
log.Error("Unable to push back to repo from temporary repo: %s (%s)\nStdout: %s\nError: %v",
|
||||||
|
t.repo.FullName(), t.basePath, stdout, err)
|
||||||
return fmt.Errorf("Unable to push back to repo from temporary repo: %s (%s) Error: %v",
|
return fmt.Errorf("Unable to push back to repo from temporary repo: %s (%s) Error: %v",
|
||||||
t.repo.FullName(), t.basePath, err)
|
t.repo.FullName(), t.basePath, err)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -210,6 +210,7 @@ func CreateOrUpdateRepoFile(repo *models.Repository, doer *models.User, opts *Up
|
|||||||
|
|
||||||
encoding := "UTF-8"
|
encoding := "UTF-8"
|
||||||
bom := false
|
bom := false
|
||||||
|
executable := false
|
||||||
|
|
||||||
if !opts.IsNewFile {
|
if !opts.IsNewFile {
|
||||||
fromEntry, err := commit.GetTreeEntryByPath(fromTreePath)
|
fromEntry, err := commit.GetTreeEntryByPath(fromTreePath)
|
||||||
@@ -245,6 +246,7 @@ func CreateOrUpdateRepoFile(repo *models.Repository, doer *models.User, opts *Up
|
|||||||
return nil, models.ErrSHAOrCommitIDNotProvided{}
|
return nil, models.ErrSHAOrCommitIDNotProvided{}
|
||||||
}
|
}
|
||||||
encoding, bom = detectEncodingAndBOM(fromEntry, repo)
|
encoding, bom = detectEncodingAndBOM(fromEntry, repo)
|
||||||
|
executable = fromEntry.IsExecutable()
|
||||||
}
|
}
|
||||||
|
|
||||||
// For the path where this file will be created/updated, we need to make
|
// For the path where this file will be created/updated, we need to make
|
||||||
@@ -368,9 +370,15 @@ func CreateOrUpdateRepoFile(repo *models.Repository, doer *models.User, opts *Up
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Add the object to the index
|
// Add the object to the index
|
||||||
|
if executable {
|
||||||
|
if err := t.AddObjectToIndex("100755", objectHash, treePath); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
if err := t.AddObjectToIndex("100644", objectHash, treePath); err != nil {
|
if err := t.AddObjectToIndex("100644", objectHash, treePath); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Now write the tree
|
// Now write the tree
|
||||||
treeHash, err := t.WriteTree()
|
treeHash, err := t.WriteTree()
|
||||||
|
|||||||
@@ -111,6 +111,7 @@ type VirtualStore struct {
|
|||||||
sid string
|
sid string
|
||||||
lock sync.RWMutex
|
lock sync.RWMutex
|
||||||
data map[interface{}]interface{}
|
data map[interface{}]interface{}
|
||||||
|
released bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewVirtualStore creates and returns a virtual session store.
|
// NewVirtualStore creates and returns a virtual session store.
|
||||||
@@ -164,7 +165,7 @@ func (s *VirtualStore) Release() error {
|
|||||||
// Now ensure that we don't exist!
|
// Now ensure that we don't exist!
|
||||||
realProvider := s.p.provider
|
realProvider := s.p.provider
|
||||||
|
|
||||||
if realProvider.Exist(s.sid) {
|
if !s.released && realProvider.Exist(s.sid) {
|
||||||
// This is an error!
|
// This is an error!
|
||||||
return fmt.Errorf("new sid '%s' already exists", s.sid)
|
return fmt.Errorf("new sid '%s' already exists", s.sid)
|
||||||
}
|
}
|
||||||
@@ -172,12 +173,19 @@ func (s *VirtualStore) Release() error {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
if err := realStore.Flush(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
for key, value := range s.data {
|
for key, value := range s.data {
|
||||||
if err := realStore.Set(key, value); err != nil {
|
if err := realStore.Set(key, value); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return realStore.Release()
|
err = realStore.Release()
|
||||||
|
if err == nil {
|
||||||
|
s.released = true
|
||||||
|
}
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -124,7 +124,7 @@ func DBConnStr() (string, error) {
|
|||||||
if err := os.MkdirAll(path.Dir(Database.Path), os.ModePerm); err != nil {
|
if err := os.MkdirAll(path.Dir(Database.Path), os.ModePerm); err != nil {
|
||||||
return "", fmt.Errorf("Failed to create directories: %v", err)
|
return "", fmt.Errorf("Failed to create directories: %v", err)
|
||||||
}
|
}
|
||||||
connStr = fmt.Sprintf("file:%s?cache=shared&mode=rwc&_busy_timeout=%d", Database.Path, Database.Timeout)
|
connStr = fmt.Sprintf("file:%s?cache=shared&mode=rwc&_busy_timeout=%d&_txlock=immediate", Database.Path, Database.Timeout)
|
||||||
default:
|
default:
|
||||||
return "", fmt.Errorf("Unknown database type: %s", Database.Type)
|
return "", fmt.Errorf("Unknown database type: %s", Database.Type)
|
||||||
}
|
}
|
||||||
|
|||||||
21
modules/util/timer.go
Normal file
21
modules/util/timer.go
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package util
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// StopTimer is a utility function to safely stop a time.Timer and clean its channel
|
||||||
|
func StopTimer(t *time.Timer) bool {
|
||||||
|
stopped := t.Stop()
|
||||||
|
if !stopped {
|
||||||
|
select {
|
||||||
|
case <-t.C:
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return stopped
|
||||||
|
}
|
||||||
@@ -374,6 +374,7 @@ user_bio = Biography
|
|||||||
|
|
||||||
form.name_reserved = The username '%s' is reserved.
|
form.name_reserved = The username '%s' is reserved.
|
||||||
form.name_pattern_not_allowed = The pattern '%s' is not allowed in a username.
|
form.name_pattern_not_allowed = The pattern '%s' is not allowed in a username.
|
||||||
|
form.name_chars_not_allowed = User name '%s' contains invalid characters.
|
||||||
|
|
||||||
[settings]
|
[settings]
|
||||||
profile = Profile
|
profile = Profile
|
||||||
@@ -434,7 +435,11 @@ manage_openid = Manage OpenID Addresses
|
|||||||
email_desc = Your primary email address will be used for notifications and other operations.
|
email_desc = Your primary email address will be used for notifications and other operations.
|
||||||
theme_desc = This will be your default theme across the site.
|
theme_desc = This will be your default theme across the site.
|
||||||
primary = Primary
|
primary = Primary
|
||||||
|
activated = Activated
|
||||||
|
requires_activation = Requires activation
|
||||||
primary_email = Make Primary
|
primary_email = Make Primary
|
||||||
|
activate_email = Send Activation
|
||||||
|
activations_pending = Activations Pending
|
||||||
delete_email = Remove
|
delete_email = Remove
|
||||||
email_deletion = Remove Email Address
|
email_deletion = Remove Email Address
|
||||||
email_deletion_desc = The email address and related information will be removed from your account. Git commits by this email address will remain unchanged. Continue?
|
email_deletion_desc = The email address and related information will be removed from your account. Git commits by this email address will remain unchanged. Continue?
|
||||||
@@ -775,6 +780,8 @@ editor.commit_empty_file_header = Commit an empty file
|
|||||||
editor.commit_empty_file_text = The file you're about commit is empty. Proceed?
|
editor.commit_empty_file_text = The file you're about commit is empty. Proceed?
|
||||||
editor.no_changes_to_show = There are no changes to show.
|
editor.no_changes_to_show = There are no changes to show.
|
||||||
editor.fail_to_update_file = Failed to update/create file '%s' with error: %v
|
editor.fail_to_update_file = Failed to update/create file '%s' with error: %v
|
||||||
|
editor.push_rejected_no_message = The change was rejected by the server without a message. Please check githooks.
|
||||||
|
editor.push_rejected = The change was rejected by the server with the following message:<br>%s<br> Please check githooks.
|
||||||
editor.add_subdir = Add a directory…
|
editor.add_subdir = Add a directory…
|
||||||
editor.unable_to_upload_files = Failed to upload files to '%s' with error: %v
|
editor.unable_to_upload_files = Failed to upload files to '%s' with error: %v
|
||||||
editor.upload_file_is_locked = File '%s' is locked by %s.
|
editor.upload_file_is_locked = File '%s' is locked by %s.
|
||||||
@@ -794,6 +801,8 @@ commits.date = Date
|
|||||||
commits.older = Older
|
commits.older = Older
|
||||||
commits.newer = Newer
|
commits.newer = Newer
|
||||||
commits.signed_by = Signed by
|
commits.signed_by = Signed by
|
||||||
|
commits.signed_by_untrusted_user = Signed by untrusted user
|
||||||
|
commits.signed_by_untrusted_user_unmatched = Signed by untrusted user who does not match committer
|
||||||
commits.gpg_key_id = GPG Key ID
|
commits.gpg_key_id = GPG Key ID
|
||||||
|
|
||||||
ext_issues = Ext. Issues
|
ext_issues = Ext. Issues
|
||||||
@@ -1053,6 +1062,7 @@ pulls.data_broken = This pull request is broken due to missing fork information.
|
|||||||
pulls.files_conflicted = This pull request has changes conflicting with the target branch.
|
pulls.files_conflicted = This pull request has changes conflicting with the target branch.
|
||||||
pulls.is_checking = "Merge conflict checking is in progress. Try again in few moments."
|
pulls.is_checking = "Merge conflict checking is in progress. Try again in few moments."
|
||||||
pulls.required_status_check_failed = Some required checks were not successful.
|
pulls.required_status_check_failed = Some required checks were not successful.
|
||||||
|
pulls.required_status_check_missing = Some required checks are missing.
|
||||||
pulls.required_status_check_administrator = As an administrator, you may still merge this pull request.
|
pulls.required_status_check_administrator = As an administrator, you may still merge this pull request.
|
||||||
pulls.blocked_by_approvals = "This Pull Request doesn't have enough approvals yet. %d of %d approvals granted."
|
pulls.blocked_by_approvals = "This Pull Request doesn't have enough approvals yet. %d of %d approvals granted."
|
||||||
pulls.blocked_by_rejection = "This Pull Request has changes requested by an official reviewer."
|
pulls.blocked_by_rejection = "This Pull Request has changes requested by an official reviewer."
|
||||||
@@ -1074,6 +1084,8 @@ pulls.merge_conflict = Merge Failed: There was a conflict whilst merging: %[1]s<
|
|||||||
pulls.rebase_conflict = Merge Failed: There was a conflict whilst rebasing commit: %[1]s<br>%[2]s<br>%[3]s<br>Hint:Try a different strategy
|
pulls.rebase_conflict = Merge Failed: There was a conflict whilst rebasing commit: %[1]s<br>%[2]s<br>%[3]s<br>Hint:Try a different strategy
|
||||||
pulls.unrelated_histories = Merge Failed: The merge head and base do not share a common history. Hint: Try a different strategy
|
pulls.unrelated_histories = Merge Failed: The merge head and base do not share a common history. Hint: Try a different strategy
|
||||||
pulls.merge_out_of_date = Merge Failed: Whilst generating the merge, the base was updated. Hint: Try again.
|
pulls.merge_out_of_date = Merge Failed: Whilst generating the merge, the base was updated. Hint: Try again.
|
||||||
|
pulls.push_rejected = Merge Failed: The push was rejected with the following message:<br>%s<br>Review the githooks for this repository
|
||||||
|
pulls.push_rejected_no_message = Merge Failed: The push was rejected but there was no remote message.<br>Review the githooks for this repository
|
||||||
pulls.open_unmerged_pull_exists = `You cannot perform a reopen operation because there is a pending pull request (#%d) with identical properties.`
|
pulls.open_unmerged_pull_exists = `You cannot perform a reopen operation because there is a pending pull request (#%d) with identical properties.`
|
||||||
pulls.status_checking = Some checks are pending
|
pulls.status_checking = Some checks are pending
|
||||||
pulls.status_checks_success = All checks were successful
|
pulls.status_checks_success = All checks were successful
|
||||||
@@ -1681,6 +1693,7 @@ organizations = Organizations
|
|||||||
repositories = Repositories
|
repositories = Repositories
|
||||||
hooks = Default Webhooks
|
hooks = Default Webhooks
|
||||||
authentication = Authentication Sources
|
authentication = Authentication Sources
|
||||||
|
emails = User Emails
|
||||||
config = Configuration
|
config = Configuration
|
||||||
notices = System Notices
|
notices = System Notices
|
||||||
monitor = Monitoring
|
monitor = Monitoring
|
||||||
@@ -1750,6 +1763,7 @@ dashboard.gc_times = GC Times
|
|||||||
users.user_manage_panel = User Account Management
|
users.user_manage_panel = User Account Management
|
||||||
users.new_account = Create User Account
|
users.new_account = Create User Account
|
||||||
users.name = Username
|
users.name = Username
|
||||||
|
users.full_name = Full Name
|
||||||
users.activated = Activated
|
users.activated = Activated
|
||||||
users.admin = Admin
|
users.admin = Admin
|
||||||
users.restricted = Restricted
|
users.restricted = Restricted
|
||||||
@@ -1781,6 +1795,19 @@ users.still_own_repo = This user still owns one or more repositories. Delete or
|
|||||||
users.still_has_org = This user is a member of an organization. Remove the user from any organizations first.
|
users.still_has_org = This user is a member of an organization. Remove the user from any organizations first.
|
||||||
users.deletion_success = The user account has been deleted.
|
users.deletion_success = The user account has been deleted.
|
||||||
|
|
||||||
|
emails.email_manage_panel = User Email Management
|
||||||
|
emails.primary = Primary
|
||||||
|
emails.activated = Activated
|
||||||
|
emails.filter_sort.email = Email
|
||||||
|
emails.filter_sort.email_reverse = Email (reverse)
|
||||||
|
emails.filter_sort.name = User Name
|
||||||
|
emails.filter_sort.name_reverse = User Name (reverse)
|
||||||
|
emails.updated = Email updated
|
||||||
|
emails.not_updated = Failed to update the requested email address: %v
|
||||||
|
emails.duplicate_active = This email address is already active for a different user.
|
||||||
|
emails.change_email_header = Update Email Properties
|
||||||
|
emails.change_email_text = Are your sure you want to update this email address?
|
||||||
|
|
||||||
orgs.org_manage_panel = Organization Management
|
orgs.org_manage_panel = Organization Management
|
||||||
orgs.name = Name
|
orgs.name = Name
|
||||||
orgs.teams = Teams
|
orgs.teams = Teams
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user