mirror of
https://github.com/go-gitea/gitea.git
synced 2025-11-15 12:33:45 +09:00
Compare commits
53 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
53b89c3474 | ||
|
|
f9648888f5 | ||
|
|
dc9f5a7311 | ||
|
|
da0460dea0 | ||
|
|
480efbdb96 | ||
|
|
4a71d4de68 | ||
|
|
0f2ee77968 | ||
|
|
4e10f33349 | ||
|
|
2123195b3a | ||
|
|
5a021801d9 | ||
|
|
e6723ef167 | ||
|
|
732f22ad7d | ||
|
|
c9e6069970 | ||
|
|
f1fd8a772f | ||
|
|
9f9a53e361 | ||
|
|
e6a82047ee | ||
|
|
99fb256411 | ||
|
|
09abdb8a65 | ||
|
|
e0ae0b3b94 | ||
|
|
f9942add50 | ||
|
|
d4af0df967 | ||
|
|
e1ed2a76b1 | ||
|
|
2bd7feed37 | ||
|
|
a0a77c9401 | ||
|
|
40a7660038 | ||
|
|
b05e178138 | ||
|
|
96918a442b | ||
|
|
aa1d9ef6cb | ||
|
|
20a75f86a1 | ||
|
|
c1c5e00d20 | ||
|
|
3e279dfb0b | ||
|
|
e9346fc4a9 | ||
|
|
b62e13a001 | ||
|
|
470b195da1 | ||
|
|
09178300b0 | ||
|
|
23aae3274a | ||
|
|
a98bf03204 | ||
|
|
65aef7b35f | ||
|
|
65ef634d5c | ||
|
|
869fd17b88 | ||
|
|
d624e91c0c | ||
|
|
62a3c847cd | ||
|
|
3a02f0896e | ||
|
|
408db95dc1 | ||
|
|
6305f07fdc | ||
|
|
ff9d99f63d | ||
|
|
37572551d7 | ||
|
|
0ee823be0b | ||
|
|
062ea40a79 | ||
|
|
7a25441abe | ||
|
|
dc71d00393 | ||
|
|
0bb56a413d | ||
|
|
2806a312e1 |
@@ -652,7 +652,6 @@ steps:
|
|||||||
event:
|
event:
|
||||||
exclude:
|
exclude:
|
||||||
- pull_request
|
- pull_request
|
||||||
|
|
||||||
---
|
---
|
||||||
kind: pipeline
|
kind: pipeline
|
||||||
name: docker-linux-arm64-dry-run
|
name: docker-linux-arm64-dry-run
|
||||||
@@ -682,6 +681,9 @@ steps:
|
|||||||
tags: linux-arm64
|
tags: linux-arm64
|
||||||
build_args:
|
build_args:
|
||||||
- GOPROXY=off
|
- GOPROXY=off
|
||||||
|
environment:
|
||||||
|
PLUGIN_MIRROR:
|
||||||
|
from_secret: plugin_mirror
|
||||||
when:
|
when:
|
||||||
event:
|
event:
|
||||||
- pull_request
|
- pull_request
|
||||||
@@ -726,11 +728,13 @@ steps:
|
|||||||
from_secret: docker_password
|
from_secret: docker_password
|
||||||
username:
|
username:
|
||||||
from_secret: docker_username
|
from_secret: docker_username
|
||||||
|
environment:
|
||||||
|
PLUGIN_MIRROR:
|
||||||
|
from_secret: plugin_mirror
|
||||||
when:
|
when:
|
||||||
event:
|
event:
|
||||||
exclude:
|
exclude:
|
||||||
- pull_request
|
- pull_request
|
||||||
|
|
||||||
---
|
---
|
||||||
kind: pipeline
|
kind: pipeline
|
||||||
name: docker-manifest
|
name: docker-manifest
|
||||||
|
|||||||
102
CHANGELOG.md
102
CHANGELOG.md
@@ -4,6 +4,60 @@ This changelog goes through all the changes that have been made in each release
|
|||||||
without substantial changes to our git log; to see the highlights of what has
|
without substantial changes to our git log; to see the highlights of what has
|
||||||
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||||
|
|
||||||
|
## [1.12.6](https://github.com/go-gitea/gitea/releases/tag/v1.12.6) - 2020-11-11
|
||||||
|
|
||||||
|
* SECURITY
|
||||||
|
* Prevent git operations for inactive users (#13527) (#13537)
|
||||||
|
* Disallow urlencoded new lines in git protocol paths if there is a port (#13521) (#13525)
|
||||||
|
* BUGFIXES
|
||||||
|
* API should only return Json (#13511) (#13564)
|
||||||
|
* Fix before and since query arguments at API (#13559) (#13560)
|
||||||
|
* Prevent panic on git blame by limiting lines to 4096 bytes at most (#13470) (#13492)
|
||||||
|
* Fix link detection in repository description with tailing '_' (#13407) (#13408)
|
||||||
|
* Remove obsolete change of email on profile page (#13341) (#13348)
|
||||||
|
* Fix permission check on get Reactions API endpoints (#13344) (#13346)
|
||||||
|
* Add migrated pulls to pull request task queue (#13331) (#13335)
|
||||||
|
* API deny wrong pull creation options (#13308) (#13327)
|
||||||
|
* Fix initial commit page & binary munching problem (#13249) (#13259)
|
||||||
|
* Fix diff parsing (#13157) (#13136) (#13139)
|
||||||
|
* Return error 404 not 500 from API if team does not exist (#13118) (#13119)
|
||||||
|
* Prohibit automatic downgrades (#13108) (#13111)
|
||||||
|
* Fix GitLab Migration Option AuthToken (#13101)
|
||||||
|
* GitLab Label Color Normalizer (#12793) (#13100)
|
||||||
|
* Log the underlying panic in runMigrateTask (#13096) (#13098)
|
||||||
|
* Fix attachments list in edit comment (#13036) (#13097)
|
||||||
|
* Fix deadlock when deleting team user (#13093)
|
||||||
|
* Fix error create comment on outdated file (#13041) (#13042)
|
||||||
|
* Fix repository create/delete event webhooks (#13008) (#13027)
|
||||||
|
* Fix internal server error on README in submodule (#13006) (#13016)
|
||||||
|
|
||||||
|
## [1.12.5](https://github.com/go-gitea/gitea/releases/tag/v1.12.5) - 2020-10-01
|
||||||
|
|
||||||
|
* BUGFIXES
|
||||||
|
* Allow U2F with default settings for gitea in subpath (#12990) (#13001)
|
||||||
|
* Prevent empty div when editing comment (#12404) (#12991)
|
||||||
|
* On mirror update also update address in DB (#12964) (#12967)
|
||||||
|
* Allow extended config on cron settings (#12939) (#12943)
|
||||||
|
* Open transaction when adding Avatar email-hash pairs to the DB (#12577) (#12940)
|
||||||
|
* Fix internal server error from ListUserOrgs API (#12910) (#12915)
|
||||||
|
* Update only the repository columns that need updating (#12900) (#12912)
|
||||||
|
* Fix panic when adding long comment (#12892) (#12894)
|
||||||
|
* Add size limit for content of comment on action ui (#12881) (#12890)
|
||||||
|
* Convert User expose ID each time (#12855) (#12883)
|
||||||
|
* Support slashes in release tags (#12864) (#12882)
|
||||||
|
* Add missing information to CreateRepo API endpoint (#12848) (#12867)
|
||||||
|
* On Migration respect old DefaultBranch (#12843) (#12858)
|
||||||
|
* Fix notifications page links (#12838) (#12853)
|
||||||
|
* Stop cloning unnecessarily on PR update (#12839) (#12852)
|
||||||
|
* Escape more things that are passed through str2html (#12622) (#12850)
|
||||||
|
* Remove double escape on labels addition in comments (#12809) (#12810)
|
||||||
|
* Fix "only mail on mention" bug (#12775) (#12789)
|
||||||
|
* Fix yet another bug with diff file names (#12771) (#12776)
|
||||||
|
* RepoInit Respect AlternateDefaultBranch (#12746) (#12751)
|
||||||
|
* Fix Avatar Resize (resize algo NearestNeighbor -> Bilinear) (#12745) (#12750)
|
||||||
|
* ENHANCEMENTS
|
||||||
|
* gitea dump: include version & Check InstallLock (#12760) (#12762)
|
||||||
|
|
||||||
## [1.12.4](https://github.com/go-gitea/gitea/releases/tag/v1.12.4) - 2020-09-02
|
## [1.12.4](https://github.com/go-gitea/gitea/releases/tag/v1.12.4) - 2020-09-02
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
@@ -103,7 +157,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
|||||||
* Add request review from specific reviewers feature in pull request (#10756)
|
* Add request review from specific reviewers feature in pull request (#10756)
|
||||||
* Add NextCloud oauth (#10562)
|
* Add NextCloud oauth (#10562)
|
||||||
* System-wide webhooks (#10546)
|
* System-wide webhooks (#10546)
|
||||||
* Relax sanitization as per https://github.com/jch/html-pipeline (#10527)
|
* Relax sanitization as per <https://github.com/jch/html-pipeline> (#10527)
|
||||||
* Use media links for img in post-process (#10515)
|
* Use media links for img in post-process (#10515)
|
||||||
* Add API endpoints to manage OAuth2 Application (list/create/delete) (#10437)
|
* Add API endpoints to manage OAuth2 Application (list/create/delete) (#10437)
|
||||||
* Render READMEs in docs/ .gitea or .github from root (#10361)
|
* Render READMEs in docs/ .gitea or .github from root (#10361)
|
||||||
@@ -560,6 +614,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
|
|||||||
* Blacklist manifest.json & milestones user (#10292) (#10293)
|
* Blacklist manifest.json & milestones user (#10292) (#10293)
|
||||||
|
|
||||||
## [1.11.0](https://github.com/go-gitea/gitea/releases/tag/v1.11.0) - 2020-02-10
|
## [1.11.0](https://github.com/go-gitea/gitea/releases/tag/v1.11.0) - 2020-02-10
|
||||||
|
|
||||||
* BREAKING
|
* BREAKING
|
||||||
* Fix followers and following tabs in profile (#10202) (#10203)
|
* Fix followers and following tabs in profile (#10202) (#10203)
|
||||||
* Make CertFile and KeyFile relative to CustomPath (#9868) (#9874)
|
* Make CertFile and KeyFile relative to CustomPath (#9868) (#9874)
|
||||||
@@ -1033,6 +1088,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Ensure that 2fa is checked on reset-password (#9857) (#9877)
|
* Ensure that 2fa is checked on reset-password (#9857) (#9877)
|
||||||
|
|
||||||
## [1.10.3](https://github.com/go-gitea/gitea/releases/tag/v1.10.3) - 2020-01-17
|
## [1.10.3](https://github.com/go-gitea/gitea/releases/tag/v1.10.3) - 2020-01-17
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Hide credentials when submitting migration (#9102) (#9704)
|
* Hide credentials when submitting migration (#9102) (#9704)
|
||||||
* Never allow an empty password to validate (#9682) (#9684)
|
* Never allow an empty password to validate (#9682) (#9684)
|
||||||
@@ -1051,6 +1107,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Branches not at ref commit ID should not be listed as Merged (#9614) (#9639)
|
* Branches not at ref commit ID should not be listed as Merged (#9614) (#9639)
|
||||||
|
|
||||||
## [1.10.2](https://github.com/go-gitea/gitea/releases/tag/v1.10.2) - 2020-01-02
|
## [1.10.2](https://github.com/go-gitea/gitea/releases/tag/v1.10.2) - 2020-01-02
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Allow only specific Columns to be updated on Issue via API (#9539) (#9580)
|
* Allow only specific Columns to be updated on Issue via API (#9539) (#9580)
|
||||||
* Add ErrReactionAlreadyExist error (#9550) (#9564)
|
* Add ErrReactionAlreadyExist error (#9550) (#9564)
|
||||||
@@ -1071,6 +1128,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix File Edit: Author/Committer interchanged (#9297) (#9300)
|
* Fix File Edit: Author/Committer interchanged (#9297) (#9300)
|
||||||
|
|
||||||
## [1.10.1](https://github.com/go-gitea/gitea/releases/tag/v1.10.1) - 2019-12-05
|
## [1.10.1](https://github.com/go-gitea/gitea/releases/tag/v1.10.1) - 2019-12-05
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Fix max length check and limit in multiple repo forms (#9148) (#9204)
|
* Fix max length check and limit in multiple repo forms (#9148) (#9204)
|
||||||
* Properly fix displaying virtual session provider in admin panel (#9137) (#9203)
|
* Properly fix displaying virtual session provider in admin panel (#9137) (#9203)
|
||||||
@@ -1092,6 +1150,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Shadow password correctly for session config (#8984) (#9002)
|
* Shadow password correctly for session config (#8984) (#9002)
|
||||||
|
|
||||||
## [1.10.0](https://github.com/go-gitea/gitea/releases/tag/v1.10.0) - 2019-11-13
|
## [1.10.0](https://github.com/go-gitea/gitea/releases/tag/v1.10.0) - 2019-11-13
|
||||||
|
|
||||||
* BREAKING
|
* BREAKING
|
||||||
* Fix deadline on update issue or PR via API (#8698)
|
* Fix deadline on update issue or PR via API (#8698)
|
||||||
* Hide some user information via API if user doesn't have enough permission (#8655) (#8657)
|
* Hide some user information via API if user doesn't have enough permission (#8655) (#8657)
|
||||||
@@ -1389,6 +1448,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix Statuses API only shows first 10 statuses: Add paging and extend API GetCommitStatuses (#7141)
|
* Fix Statuses API only shows first 10 statuses: Add paging and extend API GetCommitStatuses (#7141)
|
||||||
|
|
||||||
## [1.9.6](https://github.com/go-gitea/gitea/releases/tag/v1.9.6) - 2019-11-13
|
## [1.9.6](https://github.com/go-gitea/gitea/releases/tag/v1.9.6) - 2019-11-13
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Allow to merge if file path contains " or \ (#8629) (#8772)
|
* Allow to merge if file path contains " or \ (#8629) (#8772)
|
||||||
* Fix 500 when edit hook (#8782) (#8790)
|
* Fix 500 when edit hook (#8782) (#8790)
|
||||||
@@ -1397,6 +1457,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Add Close() method to gogitRepository (#8901) (#8958)
|
* Add Close() method to gogitRepository (#8901) (#8958)
|
||||||
|
|
||||||
## [1.9.5](https://github.com/go-gitea/gitea/releases/tag/v1.9.5) - 2019-10-30
|
## [1.9.5](https://github.com/go-gitea/gitea/releases/tag/v1.9.5) - 2019-10-30
|
||||||
|
|
||||||
* BREAKING
|
* BREAKING
|
||||||
* Hide some user information via API if user doesn't have enough permission (#8655) (#8658)
|
* Hide some user information via API if user doesn't have enough permission (#8655) (#8658)
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
@@ -1421,6 +1482,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Update heatmap fixtures to restore tests (#8615) (#8617)
|
* Update heatmap fixtures to restore tests (#8615) (#8617)
|
||||||
|
|
||||||
## [1.9.4](https://github.com/go-gitea/gitea/releases/tag/v1.9.4) - 2019-10-08
|
## [1.9.4](https://github.com/go-gitea/gitea/releases/tag/v1.9.4) - 2019-10-08
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Highlight issue references (#8101) (#8404)
|
* Highlight issue references (#8101) (#8404)
|
||||||
* Fix bug when migrating a private repository #7917 (#8403)
|
* Fix bug when migrating a private repository #7917 (#8403)
|
||||||
@@ -1447,6 +1509,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Make show private icon when repo avatar set (#8144) (#8175)
|
* Make show private icon when repo avatar set (#8144) (#8175)
|
||||||
|
|
||||||
## [1.9.3](https://github.com/go-gitea/gitea/releases/tag/v1.9.3) - 2019-09-06
|
## [1.9.3](https://github.com/go-gitea/gitea/releases/tag/v1.9.3) - 2019-09-06
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Fix go get from a private repository with Go 1.13 (#8100)
|
* Fix go get from a private repository with Go 1.13 (#8100)
|
||||||
* Strict name matching for Repository.GetTagID() (#8082)
|
* Strict name matching for Repository.GetTagID() (#8082)
|
||||||
@@ -1462,6 +1525,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Keep blame view buttons sequence consistent with normal view when viewing a file (#8007) (#8009)
|
* Keep blame view buttons sequence consistent with normal view when viewing a file (#8007) (#8009)
|
||||||
|
|
||||||
## [1.9.2](https://github.com/go-gitea/gitea/releases/tag/v1.9.2) - 2019-08-22
|
## [1.9.2](https://github.com/go-gitea/gitea/releases/tag/v1.9.2) - 2019-08-22
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Fix wrong sender when send slack webhook (#7918) (#7924)
|
* Fix wrong sender when send slack webhook (#7918) (#7924)
|
||||||
* Upload support text/plain; charset=utf8 (#7899)
|
* Upload support text/plain; charset=utf8 (#7899)
|
||||||
@@ -1469,18 +1533,19 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix non existent milestone with 500 error (#7867) (#7873)
|
* Fix non existent milestone with 500 error (#7867) (#7873)
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Fix No PGP signature on 1.9.1 tag (#7874)
|
* Fix No PGP signature on 1.9.1 tag (#7874)
|
||||||
* Release built with go 1.12.9 to fix security fixes in golang std lib, ref: https://groups.google.com/forum/#!msg/golang-announce/oeMaeUnkvVE/a49yvTLqAAAJ
|
* Release built with go 1.12.9 to fix security fixes in golang std lib, ref: <https://groups.google.com/forum/#!msg/golang-announce/oeMaeUnkvVE/a49yvTLqAAAJ>
|
||||||
* ENHANCEMENTS
|
* ENHANCEMENTS
|
||||||
* Fix pull creation with empty changes (#7920) (#7926)
|
* Fix pull creation with empty changes (#7920) (#7926)
|
||||||
* BUILD
|
* BUILD
|
||||||
* Drone/docker: prepare multi-arch release + provide arm64 image (#7571) (#7884)
|
* Drone/docker: prepare multi-arch release + provide arm64 image (#7571) (#7884)
|
||||||
|
|
||||||
## [1.9.1](https://github.com/go-gitea/gitea/releases/tag/v1.9.1) - 2019-08-14
|
## [1.9.1](https://github.com/go-gitea/gitea/releases/tag/v1.9.1) - 2019-08-14
|
||||||
|
|
||||||
* BREAKING
|
* BREAKING
|
||||||
* Add pagination for admin api get orgs and fix only list public orgs bug (#7742) (#7752)
|
* Add pagination for admin api get orgs and fix only list public orgs bug (#7742) (#7752)
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Be more strict with git arguments (#7715) (#7762)
|
* Be more strict with git arguments (#7715) (#7762)
|
||||||
* Release built with go 1.12.8 to fix security fixes in golang std lib, ref: https://groups.google.com/forum/#!topic/golang-nuts/fCQWxqxP8aA
|
* Release built with go 1.12.8 to fix security fixes in golang std lib, ref: <https://groups.google.com/forum/#!topic/golang-nuts/fCQWxqxP8aA>
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Fix local runs of ssh-requiring integration tests (#7855) (#7857)
|
* Fix local runs of ssh-requiring integration tests (#7855) (#7857)
|
||||||
* Fix hook problem (#7856) (#7754)
|
* Fix hook problem (#7856) (#7754)
|
||||||
@@ -1503,6 +1568,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Correct wrong datetime format for git (#7689) (#7690)
|
* Correct wrong datetime format for git (#7689) (#7690)
|
||||||
|
|
||||||
## [1.9.0](https://github.com/go-gitea/gitea/releases/tag/v1.9.0) - 2019-07-30
|
## [1.9.0](https://github.com/go-gitea/gitea/releases/tag/v1.9.0) - 2019-07-30
|
||||||
|
|
||||||
* BREAKING
|
* BREAKING
|
||||||
* Better logging (#6038) (#6095)
|
* Better logging (#6038) (#6095)
|
||||||
* SECURITY
|
* SECURITY
|
||||||
@@ -1859,6 +1925,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Added docker example for backup (#5846)
|
* Added docker example for backup (#5846)
|
||||||
|
|
||||||
## [1.8.3](https://github.com/go-gitea/gitea/releases/tag/v1.8.3) - 2019-06-17
|
## [1.8.3](https://github.com/go-gitea/gitea/releases/tag/v1.8.3) - 2019-06-17
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Always set userID on LFS authentication (#7224) (Part of #6993)
|
* Always set userID on LFS authentication (#7224) (Part of #6993)
|
||||||
* Fix LFS Locks over SSH (#6999) (#7223)
|
* Fix LFS Locks over SSH (#6999) (#7223)
|
||||||
@@ -1869,6 +1936,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix GCArgs load from ini (#7156) (#7157)
|
* Fix GCArgs load from ini (#7156) (#7157)
|
||||||
|
|
||||||
## [1.8.2](https://github.com/go-gitea/gitea/releases/tag/v1.8.2) - 2019-05-29
|
## [1.8.2](https://github.com/go-gitea/gitea/releases/tag/v1.8.2) - 2019-05-29
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Fix possbile mysql invalid connnection error (#7051) (#7071)
|
* Fix possbile mysql invalid connnection error (#7051) (#7071)
|
||||||
* Handle invalid administrator username on install page (#7060) (#7063)
|
* Handle invalid administrator username on install page (#7060) (#7063)
|
||||||
@@ -1884,6 +1952,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix wrong init dependency on markup extensions (#7038) (#7074)
|
* Fix wrong init dependency on markup extensions (#7038) (#7074)
|
||||||
|
|
||||||
## [1.8.1](https://github.com/go-gitea/gitea/releases/tag/v1.8.1) - 2019-05-08
|
## [1.8.1](https://github.com/go-gitea/gitea/releases/tag/v1.8.1) - 2019-05-08
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Fix 404 when sending pull requests in some situations (#6871) (#6873)
|
* Fix 404 when sending pull requests in some situations (#6871) (#6873)
|
||||||
* Enforce osusergo build tag for releases (#6862) (#6869)
|
* Enforce osusergo build tag for releases (#6862) (#6869)
|
||||||
@@ -1910,6 +1979,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix config ui error about cache ttl (#6861) (#6865)
|
* Fix config ui error about cache ttl (#6861) (#6865)
|
||||||
|
|
||||||
## [1.8.0](https://github.com/go-gitea/gitea/releases/tag/v1.8.0) - 2019-04-20
|
## [1.8.0](https://github.com/go-gitea/gitea/releases/tag/v1.8.0) - 2019-04-20
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Prevent remote code execution vulnerability with mirror repo URL settings (#6593) (#6594)
|
* Prevent remote code execution vulnerability with mirror repo URL settings (#6593) (#6594)
|
||||||
* Resolve 2FA bypass on API (#6676) (#6674)
|
* Resolve 2FA bypass on API (#6676) (#6674)
|
||||||
@@ -2144,18 +2214,21 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Migrate database if app.ini found (#5290)
|
* Migrate database if app.ini found (#5290)
|
||||||
|
|
||||||
## [1.7.6](https://github.com/go-gitea/gitea/releases/tag/v1.7.6) - 2019-04-12
|
## [1.7.6](https://github.com/go-gitea/gitea/releases/tag/v1.7.6) - 2019-04-12
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Prevent remote code execution vulnerability with mirror repo URL settings (#6593) (#6595)
|
* Prevent remote code execution vulnerability with mirror repo URL settings (#6593) (#6595)
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Allow resend of confirmation email when logged in (#6482) (#6487)
|
* Allow resend of confirmation email when logged in (#6482) (#6487)
|
||||||
|
|
||||||
## [1.7.5](https://github.com/go-gitea/gitea/releases/tag/v1.7.5) - 2019-03-27
|
## [1.7.5](https://github.com/go-gitea/gitea/releases/tag/v1.7.5) - 2019-03-27
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Fix unitTypeCode not being used in accessLevelUnit (#6419) (#6423)
|
* Fix unitTypeCode not being used in accessLevelUnit (#6419) (#6423)
|
||||||
* Fix bug where manifest.json was being requested without cookies and continuously creating new sessions (#6372) (#6383)
|
* Fix bug where manifest.json was being requested without cookies and continuously creating new sessions (#6372) (#6383)
|
||||||
* Fix ParsePatch function to work with quoted diff --git strings (#6323) (#6332)
|
* Fix ParsePatch function to work with quoted diff --git strings (#6323) (#6332)
|
||||||
|
|
||||||
## [1.7.4](https://github.com/go-gitea/gitea/releases/tag/v1.7.4) - 2019-03-12
|
## [1.7.4](https://github.com/go-gitea/gitea/releases/tag/v1.7.4) - 2019-03-12
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Fix potential XSS vulnerability in repository description. (#6306) (#6308)
|
* Fix potential XSS vulnerability in repository description. (#6306) (#6308)
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
@@ -2165,6 +2238,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix displaying dashboard even if required to change password (#6214) (#6215)
|
* Fix displaying dashboard even if required to change password (#6214) (#6215)
|
||||||
|
|
||||||
## [1.7.3](https://github.com/go-gitea/gitea/releases/tag/v1.7.3) - 2019-02-27
|
## [1.7.3](https://github.com/go-gitea/gitea/releases/tag/v1.7.3) - 2019-02-27
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Fix server 500 when trying to migrate to an already existing repository (#6188) (#6197)
|
* Fix server 500 when trying to migrate to an already existing repository (#6188) (#6197)
|
||||||
* Load Issue attributes for API /repos/{owner}/{repo}/issues/{index} (#6122) (#6185)
|
* Load Issue attributes for API /repos/{owner}/{repo}/issues/{index} (#6122) (#6185)
|
||||||
@@ -2179,6 +2253,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Recover panic in orgmode.Render if bad orgfile (#4982) (#5903) (#6097)
|
* Recover panic in orgmode.Render if bad orgfile (#4982) (#5903) (#6097)
|
||||||
|
|
||||||
## [1.7.2](https://github.com/go-gitea/gitea/releases/tag/v1.7.2) - 2019-02-14
|
## [1.7.2](https://github.com/go-gitea/gitea/releases/tag/v1.7.2) - 2019-02-14
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Remove all CommitStatus when a repo is deleted (#5940) (#5941)
|
* Remove all CommitStatus when a repo is deleted (#5940) (#5941)
|
||||||
* Fix notifications on pushing with deploy keys by setting hook environment variables (#5935) (#5944)
|
* Fix notifications on pushing with deploy keys by setting hook environment variables (#5935) (#5944)
|
||||||
@@ -2195,6 +2270,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* In basic auth check for tokens before call UserSignIn (#5725) (#6083)
|
* In basic auth check for tokens before call UserSignIn (#5725) (#6083)
|
||||||
|
|
||||||
## [1.7.1](https://github.com/go-gitea/gitea/releases/tag/v1.7.1) - 2019-01-31
|
## [1.7.1](https://github.com/go-gitea/gitea/releases/tag/v1.7.1) - 2019-01-31
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Disable redirect for i18n (#5910) (#5916)
|
* Disable redirect for i18n (#5910) (#5916)
|
||||||
* Only allow local login if password is non-empty (#5906) (#5908)
|
* Only allow local login if password is non-empty (#5906) (#5908)
|
||||||
@@ -2216,6 +2292,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Include Go toolchain to --version (#5832) (#5830)
|
* Include Go toolchain to --version (#5832) (#5830)
|
||||||
|
|
||||||
## [1.7.0](https://github.com/go-gitea/gitea/releases/tag/v1.7.0) - 2019-01-22
|
## [1.7.0](https://github.com/go-gitea/gitea/releases/tag/v1.7.0) - 2019-01-22
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Do not display the raw OpenID error in the UI (#5705) (#5712)
|
* Do not display the raw OpenID error in the UI (#5705) (#5712)
|
||||||
* When redirecting clean the path to avoid redirecting to external site (#5669) (#5679)
|
* When redirecting clean the path to avoid redirecting to external site (#5669) (#5679)
|
||||||
@@ -2372,18 +2449,21 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Only chown directories during docker setup if necessary. Fix #4425 (#5064)
|
* Only chown directories during docker setup if necessary. Fix #4425 (#5064)
|
||||||
|
|
||||||
## [1.6.4](https://github.com/go-gitea/gitea/releases/tag/v1.6.4) - 2019-01-15
|
## [1.6.4](https://github.com/go-gitea/gitea/releases/tag/v1.6.4) - 2019-01-15
|
||||||
|
|
||||||
* BUGFIX
|
* BUGFIX
|
||||||
* Fix SSH key now can be reused as public key after deleting as deploy key (#5671) (#5685)
|
* Fix SSH key now can be reused as public key after deleting as deploy key (#5671) (#5685)
|
||||||
* When redirecting clean the path to avoid redirecting to external site (#5669) (#5703)
|
* When redirecting clean the path to avoid redirecting to external site (#5669) (#5703)
|
||||||
* Fix to use correct value for "MSpan Structures Obtained" (#5706) (#5715)
|
* Fix to use correct value for "MSpan Structures Obtained" (#5706) (#5715)
|
||||||
|
|
||||||
## [1.6.3](https://github.com/go-gitea/gitea/releases/tag/v1.6.3) - 2019-01-04
|
## [1.6.3](https://github.com/go-gitea/gitea/releases/tag/v1.6.3) - 2019-01-04
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Prevent DeleteFilePost doing arbitrary deletion (#5631)
|
* Prevent DeleteFilePost doing arbitrary deletion (#5631)
|
||||||
* BUGFIX
|
* BUGFIX
|
||||||
* Fix wrong text getting saved on editing second comment on an issue (#5608)
|
* Fix wrong text getting saved on editing second comment on an issue (#5608)
|
||||||
|
|
||||||
## [1.6.2](https://github.com/go-gitea/gitea/releases/tag/v1.6.2) - 2018-12-21
|
## [1.6.2](https://github.com/go-gitea/gitea/releases/tag/v1.6.2) - 2018-12-21
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Sanitize uploaded file names (#5571) (#5573)
|
* Sanitize uploaded file names (#5571) (#5573)
|
||||||
* HTMLEncode user added text (#5570) (#5575)
|
* HTMLEncode user added text (#5570) (#5575)
|
||||||
@@ -2398,6 +2478,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix empty wiki (#5504) (#5508)
|
* Fix empty wiki (#5504) (#5508)
|
||||||
|
|
||||||
## [1.6.1](https://github.com/go-gitea/gitea/releases/tag/v1.6.1) - 2018-12-08
|
## [1.6.1](https://github.com/go-gitea/gitea/releases/tag/v1.6.1) - 2018-12-08
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Fix dependent issue searching when gitea is run in subpath (#5392) (#5400)
|
* Fix dependent issue searching when gitea is run in subpath (#5392) (#5400)
|
||||||
* API: '/orgs/:org/repos': return private repos with read access (#5393)
|
* API: '/orgs/:org/repos': return private repos with read access (#5393)
|
||||||
@@ -2408,6 +2489,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix topic name length on database (#5493) (#5495)
|
* Fix topic name length on database (#5493) (#5495)
|
||||||
|
|
||||||
## [1.6.0](https://github.com/go-gitea/gitea/releases/tag/v1.6.0) - 2018-11-22
|
## [1.6.0](https://github.com/go-gitea/gitea/releases/tag/v1.6.0) - 2018-11-22
|
||||||
|
|
||||||
* BREAKING
|
* BREAKING
|
||||||
* Respect email privacy option in user search via API (#4512)
|
* Respect email privacy option in user search via API (#4512)
|
||||||
* Simply remove tidb and deps (#3993)
|
* Simply remove tidb and deps (#3993)
|
||||||
@@ -2561,10 +2643,12 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix translation (#4355)
|
* Fix translation (#4355)
|
||||||
|
|
||||||
## [1.5.3](https://github.com/go-gitea/gitea/releases/tag/v1.5.3) - 2018-10-31
|
## [1.5.3](https://github.com/go-gitea/gitea/releases/tag/v1.5.3) - 2018-10-31
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Fix remote command execution vulnerability in upstream library (#5177) (#5196)
|
* Fix remote command execution vulnerability in upstream library (#5177) (#5196)
|
||||||
|
|
||||||
## [1.5.2](https://github.com/go-gitea/gitea/releases/tag/v1.5.2) - 2018-10-09
|
## [1.5.2](https://github.com/go-gitea/gitea/releases/tag/v1.5.2) - 2018-10-09
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Enforce token on api routes (#4840) (#4905)
|
* Enforce token on api routes (#4840) (#4905)
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
@@ -2581,6 +2665,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix trimming of markup section names (#4864)
|
* Fix trimming of markup section names (#4864)
|
||||||
|
|
||||||
## [1.5.1](https://github.com/go-gitea/gitea/releases/tag/v1.5.1) - 2018-09-03
|
## [1.5.1](https://github.com/go-gitea/gitea/releases/tag/v1.5.1) - 2018-09-03
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Don't disclose emails of all users when sending out emails (#4784)
|
* Don't disclose emails of all users when sending out emails (#4784)
|
||||||
* Improve URL validation for external wiki and external issues (#4710) (#4740)
|
* Improve URL validation for external wiki and external issues (#4710) (#4740)
|
||||||
@@ -2595,6 +2680,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix incorrect caption of webhook setting (#4701) (#4718)
|
* Fix incorrect caption of webhook setting (#4701) (#4718)
|
||||||
|
|
||||||
## [1.5.0](https://github.com/go-gitea/gitea/releases/tag/v1.5.0) - 2018-08-10
|
## [1.5.0](https://github.com/go-gitea/gitea/releases/tag/v1.5.0) - 2018-08-10
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Check that repositories can only be migrated to own user or organizations (#4366) (#4370)
|
* Check that repositories can only be migrated to own user or organizations (#4366) (#4370)
|
||||||
* Limit uploaded avatar image-size to 4096px x 3072px by default (#4353)
|
* Limit uploaded avatar image-size to 4096px x 3072px by default (#4353)
|
||||||
@@ -2658,6 +2744,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Sign release binaries (#4188)
|
* Sign release binaries (#4188)
|
||||||
|
|
||||||
## [1.4.3](https://github.com/go-gitea/gitea/releases/tag/v1.4.3) - 2018-06-26
|
## [1.4.3](https://github.com/go-gitea/gitea/releases/tag/v1.4.3) - 2018-06-26
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* HTML-escape plain-text READMEs (#4192) (#4214)
|
* HTML-escape plain-text READMEs (#4192) (#4214)
|
||||||
* Fix open redirect vulnerability on login screen (#4312) (#4312)
|
* Fix open redirect vulnerability on login screen (#4312) (#4312)
|
||||||
@@ -2670,6 +2757,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix webhook type conflation (#4285) (#4285)
|
* Fix webhook type conflation (#4285) (#4285)
|
||||||
|
|
||||||
## [1.4.2](https://github.com/go-gitea/gitea/releases/tag/v1.4.2) - 2018-06-04
|
## [1.4.2](https://github.com/go-gitea/gitea/releases/tag/v1.4.2) - 2018-06-04
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Adjust z-index for floating labels (#3939) (#3950)
|
* Adjust z-index for floating labels (#3939) (#3950)
|
||||||
* Add missing token validation on application settings page (#3976) #3978
|
* Add missing token validation on application settings page (#3976) #3978
|
||||||
@@ -2685,6 +2773,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Respository's home page not updated after first push (#4075)
|
* Respository's home page not updated after first push (#4075)
|
||||||
|
|
||||||
## [1.4.1](https://github.com/go-gitea/gitea/releases/tag/v1.4.1) - 2018-05-03
|
## [1.4.1](https://github.com/go-gitea/gitea/releases/tag/v1.4.1) - 2018-05-03
|
||||||
|
|
||||||
* BREAKING
|
* BREAKING
|
||||||
* Add "error" as reserved username (#3882) (#3886)
|
* Add "error" as reserved username (#3882) (#3886)
|
||||||
* SECURITY
|
* SECURITY
|
||||||
@@ -2702,6 +2791,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Show clipboard button if disable HTTP of git protocol (#3773) (#3774)
|
* Show clipboard button if disable HTTP of git protocol (#3773) (#3774)
|
||||||
|
|
||||||
## [1.4.0](https://github.com/go-gitea/gitea/releases/tag/v1.4.0) - 2018-03-25
|
## [1.4.0](https://github.com/go-gitea/gitea/releases/tag/v1.4.0) - 2018-03-25
|
||||||
|
|
||||||
* BREAKING
|
* BREAKING
|
||||||
* Drop deprecated GOGS\_WORK\_DIR use (#2946)
|
* Drop deprecated GOGS\_WORK\_DIR use (#2946)
|
||||||
* Fix API status code for hook creation (#2814)
|
* Fix API status code for hook creation (#2814)
|
||||||
@@ -2821,6 +2911,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Add owner to delete repo message (#2886)
|
* Add owner to delete repo message (#2886)
|
||||||
|
|
||||||
## [1.3.1](https://github.com/go-gitea/gitea/releases/tag/v1.3.1) - 2017-12-08
|
## [1.3.1](https://github.com/go-gitea/gitea/releases/tag/v1.3.1) - 2017-12-08
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Sanitize logs for mirror sync (#3057, #3082) (#3078)
|
* Sanitize logs for mirror sync (#3057, #3082) (#3078)
|
||||||
* Fix missing branch in release bug (#3108) (#3117)
|
* Fix missing branch in release bug (#3108) (#3117)
|
||||||
@@ -2831,6 +2922,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix missing password length check when change password (#3039) (#3071)
|
* Fix missing password length check when change password (#3039) (#3071)
|
||||||
|
|
||||||
## [1.3.0](https://github.com/go-gitea/gitea/releases/tag/v1.3.0) - 2017-11-29
|
## [1.3.0](https://github.com/go-gitea/gitea/releases/tag/v1.3.0) - 2017-11-29
|
||||||
|
|
||||||
* BREAKING
|
* BREAKING
|
||||||
* Make URL scheme unambiguous (#2408)
|
* Make URL scheme unambiguous (#2408)
|
||||||
* FEATURES
|
* FEATURES
|
||||||
@@ -3058,11 +3150,13 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Added vendor dir for js/css libs; Documented sources (#1484) (#2241)
|
* Added vendor dir for js/css libs; Documented sources (#1484) (#2241)
|
||||||
|
|
||||||
## [1.2.3](https://github.com/go-gitea/gitea/releases/tag/v1.2.3) - 2017-11-03
|
## [1.2.3](https://github.com/go-gitea/gitea/releases/tag/v1.2.3) - 2017-11-03
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Only require one email when validating GPG key (#2266, #2467, #2663) (#2788)
|
* Only require one email when validating GPG key (#2266, #2467, #2663) (#2788)
|
||||||
* Fix order of comments (#2835) (#2839)
|
* Fix order of comments (#2835) (#2839)
|
||||||
|
|
||||||
## [1.2.2](https://github.com/go-gitea/gitea/releases/tag/v1.2.2) - 2017-10-26
|
## [1.2.2](https://github.com/go-gitea/gitea/releases/tag/v1.2.2) - 2017-10-26
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Add checks for commits with missing author and time (#2771) (#2785)
|
* Add checks for commits with missing author and time (#2771) (#2785)
|
||||||
* Fix sending mail with a non-latin display name (#2559) (#2783)
|
* Fix sending mail with a non-latin display name (#2559) (#2783)
|
||||||
@@ -3071,6 +3165,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix emojify image URL (#2769) (#2773)
|
* Fix emojify image URL (#2769) (#2773)
|
||||||
|
|
||||||
## [1.2.1](https://github.com/go-gitea/gitea/releases/tag/v1.2.1) - 2017-10-16
|
## [1.2.1](https://github.com/go-gitea/gitea/releases/tag/v1.2.1) - 2017-10-16
|
||||||
|
|
||||||
* BUGFIXES
|
* BUGFIXES
|
||||||
* Fix PR, milestone and label functionality if issue unit is disabled (#2710) (#2714)
|
* Fix PR, milestone and label functionality if issue unit is disabled (#2710) (#2714)
|
||||||
* Fix plain readme didn't render correctly on repo home page (#2705) (#2712)
|
* Fix plain readme didn't render correctly on repo home page (#2705) (#2712)
|
||||||
@@ -3079,6 +3174,7 @@ WARNING: v1.10.5 is incorrectly tagged targeting 1.12-dev and should **not** be
|
|||||||
* Fix slice out of bounds error in mailer (#2479) (#2696)
|
* Fix slice out of bounds error in mailer (#2479) (#2696)
|
||||||
|
|
||||||
## [1.2.0](https://github.com/go-gitea/gitea/releases/tag/v1.2.0) - 2017-10-10
|
## [1.2.0](https://github.com/go-gitea/gitea/releases/tag/v1.2.0) - 2017-10-10
|
||||||
|
|
||||||
* SECURITY
|
* SECURITY
|
||||||
* Sanitation fix from Gogs (#1461)
|
* Sanitation fix from Gogs (#1461)
|
||||||
* BREAKING
|
* BREAKING
|
||||||
|
|||||||
@@ -66,6 +66,10 @@ func fatal(format string, args ...interface{}) {
|
|||||||
|
|
||||||
func runDump(ctx *cli.Context) error {
|
func runDump(ctx *cli.Context) error {
|
||||||
setting.NewContext()
|
setting.NewContext()
|
||||||
|
if !setting.InstallLock {
|
||||||
|
log.Error("Is '%s' really the right config path?\n", setting.CustomConf)
|
||||||
|
return fmt.Errorf("gitea is not initialized")
|
||||||
|
}
|
||||||
setting.NewServices() // cannot access session settings otherwise
|
setting.NewServices() // cannot access session settings otherwise
|
||||||
|
|
||||||
err := models.SetEngine()
|
err := models.SetEngine()
|
||||||
|
|||||||
4
go.mod
4
go.mod
@@ -75,7 +75,7 @@ require (
|
|||||||
github.com/microcosm-cc/bluemonday v1.0.3-0.20191119130333-0a75d7616912
|
github.com/microcosm-cc/bluemonday v1.0.3-0.20191119130333-0a75d7616912
|
||||||
github.com/mitchellh/go-homedir v1.1.0
|
github.com/mitchellh/go-homedir v1.1.0
|
||||||
github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc
|
github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc
|
||||||
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5
|
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646
|
||||||
github.com/niklasfasching/go-org v0.1.9
|
github.com/niklasfasching/go-org v0.1.9
|
||||||
github.com/oliamb/cutter v0.2.2
|
github.com/oliamb/cutter v0.2.2
|
||||||
github.com/olivere/elastic/v7 v7.0.9
|
github.com/olivere/elastic/v7 v7.0.9
|
||||||
@@ -117,7 +117,7 @@ require (
|
|||||||
gopkg.in/ldap.v3 v3.0.2
|
gopkg.in/ldap.v3 v3.0.2
|
||||||
gopkg.in/testfixtures.v2 v2.5.0
|
gopkg.in/testfixtures.v2 v2.5.0
|
||||||
gopkg.in/yaml.v2 v2.2.8
|
gopkg.in/yaml.v2 v2.2.8
|
||||||
mvdan.cc/xurls/v2 v2.1.0
|
mvdan.cc/xurls/v2 v2.2.0
|
||||||
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251
|
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251
|
||||||
xorm.io/builder v0.3.7
|
xorm.io/builder v0.3.7
|
||||||
xorm.io/xorm v1.0.1
|
xorm.io/xorm v1.0.1
|
||||||
|
|||||||
9
go.sum
9
go.sum
@@ -483,8 +483,8 @@ github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOl
|
|||||||
github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc h1:z1PgdCCmYYVL0BoJTUgmAq1p7ca8fzYIPsNyfsN3xAU=
|
github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc h1:z1PgdCCmYYVL0BoJTUgmAq1p7ca8fzYIPsNyfsN3xAU=
|
||||||
github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc/go.mod h1:np1wUFZ6tyoke22qDJZY40URn9Ae51gX7ljIWXN5TJs=
|
github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc/go.mod h1:np1wUFZ6tyoke22qDJZY40URn9Ae51gX7ljIWXN5TJs=
|
||||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||||
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5 h1:BvoENQQU+fZ9uukda/RzCAL/191HHwJA5b13R6diVlY=
|
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ=
|
||||||
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
|
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
|
||||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||||
github.com/niklasfasching/go-org v0.1.9 h1:Toz8WMIt+qJb52uYEk1YD/muLuOOmRt1CfkV+bKVMkI=
|
github.com/niklasfasching/go-org v0.1.9 h1:Toz8WMIt+qJb52uYEk1YD/muLuOOmRt1CfkV+bKVMkI=
|
||||||
@@ -554,6 +554,7 @@ github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqn
|
|||||||
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 h1:YDeskXpkNDhPdWN3REluVa46HQOVuVkjkd2sWnrABNQ=
|
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 h1:YDeskXpkNDhPdWN3REluVa46HQOVuVkjkd2sWnrABNQ=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||||
|
github.com/rogpeppe/go-internal v1.5.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||||
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
|
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
|
||||||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||||
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
|
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
|
||||||
@@ -852,6 +853,7 @@ gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogR
|
|||||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||||
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
|
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
|
||||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df h1:n7WqCuqOuCbNr617RXOY0AWRXxgwEyPp2z+p0+hgMuE=
|
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df h1:n7WqCuqOuCbNr617RXOY0AWRXxgwEyPp2z+p0+hgMuE=
|
||||||
@@ -885,8 +887,7 @@ honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWh
|
|||||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I=
|
k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I=
|
||||||
mvdan.cc/xurls/v2 v2.1.0 h1:KaMb5GLhlcSX+e+qhbRJODnUUBvlw01jt4yrjFIHAuA=
|
mvdan.cc/xurls/v2 v2.2.0/go.mod h1:EV1RMtya9D6G5DMYPGD8zTQzaHet6Jh8gFlRgGRJeO8=
|
||||||
mvdan.cc/xurls/v2 v2.1.0/go.mod h1:5GrSd9rOnKOpZaji1OZLYL/yeAAtGDlo/cFe+8K5n8E=
|
|
||||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||||
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 h1:mUcz5b3FJbP5Cvdq7Khzn6J9OCUQJaBwgBkCR+MOwSs=
|
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 h1:mUcz5b3FJbP5Cvdq7Khzn6J9OCUQJaBwgBkCR+MOwSs=
|
||||||
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251/go.mod h1:FJGmPh3vz9jSos1L/F91iAgnC/aejc0wIIrF2ZwJxdY=
|
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251/go.mod h1:FJGmPh3vz9jSos1L/F91iAgnC/aejc0wIIrF2ZwJxdY=
|
||||||
|
|||||||
@@ -5,14 +5,17 @@
|
|||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models"
|
"code.gitea.io/gitea/models"
|
||||||
"code.gitea.io/gitea/modules/auth"
|
"code.gitea.io/gitea/modules/auth"
|
||||||
|
"code.gitea.io/gitea/modules/queue"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -224,11 +227,25 @@ func doAPIMergePullRequest(ctx APITestContext, owner, repo string, index int64)
|
|||||||
Do: string(models.MergeStyleMerge),
|
Do: string(models.MergeStyleMerge),
|
||||||
})
|
})
|
||||||
|
|
||||||
if ctx.ExpectedCode != 0 {
|
resp := ctx.Session.MakeRequest(t, req, NoExpectedStatus)
|
||||||
ctx.Session.MakeRequest(t, req, ctx.ExpectedCode)
|
|
||||||
return
|
if resp.Code == http.StatusMethodNotAllowed {
|
||||||
|
err := api.APIError{}
|
||||||
|
DecodeJSON(t, resp, &err)
|
||||||
|
assert.EqualValues(t, "Please try again later", err.Message)
|
||||||
|
queue.GetManager().FlushAll(context.Background(), 5*time.Second)
|
||||||
|
resp = ctx.Session.MakeRequest(t, req, NoExpectedStatus)
|
||||||
|
}
|
||||||
|
|
||||||
|
expected := ctx.ExpectedCode
|
||||||
|
if expected == 0 {
|
||||||
|
expected = 200
|
||||||
|
}
|
||||||
|
|
||||||
|
if !assert.EqualValues(t, expected, resp.Code,
|
||||||
|
"Request: %s %s", req.Method, req.URL.String()) {
|
||||||
|
logUnexpectedResponse(t, resp)
|
||||||
}
|
}
|
||||||
ctx.Session.MakeRequest(t, req, 200)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ func TestAPIPullReview(t *testing.T) {
|
|||||||
assert.EqualValues(t, 10, reviews[5].ID)
|
assert.EqualValues(t, 10, reviews[5].ID)
|
||||||
assert.EqualValues(t, "REQUEST_CHANGES", reviews[5].State)
|
assert.EqualValues(t, "REQUEST_CHANGES", reviews[5].State)
|
||||||
assert.EqualValues(t, 1, reviews[5].CodeCommentsCount)
|
assert.EqualValues(t, 1, reviews[5].CodeCommentsCount)
|
||||||
assert.EqualValues(t, 0, reviews[5].Reviewer.ID) // ghost user
|
assert.EqualValues(t, -1, reviews[5].Reviewer.ID) // ghost user
|
||||||
assert.EqualValues(t, false, reviews[5].Stale)
|
assert.EqualValues(t, false, reviews[5].Stale)
|
||||||
assert.EqualValues(t, true, reviews[5].Official)
|
assert.EqualValues(t, true, reviews[5].Official)
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ func TestUserHeatmap(t *testing.T) {
|
|||||||
var heatmap []*models.UserHeatmapData
|
var heatmap []*models.UserHeatmapData
|
||||||
DecodeJSON(t, resp, &heatmap)
|
DecodeJSON(t, resp, &heatmap)
|
||||||
var dummyheatmap []*models.UserHeatmapData
|
var dummyheatmap []*models.UserHeatmapData
|
||||||
dummyheatmap = append(dummyheatmap, &models.UserHeatmapData{Timestamp: 1571616000, Contributions: 1})
|
dummyheatmap = append(dummyheatmap, &models.UserHeatmapData{Timestamp: 1603152000, Contributions: 1})
|
||||||
|
|
||||||
assert.Equal(t, dummyheatmap, heatmap)
|
assert.Equal(t, dummyheatmap, heatmap)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,9 +5,12 @@
|
|||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -45,8 +48,14 @@ func TestAPIUserSearchNotLoggedIn(t *testing.T) {
|
|||||||
var results SearchResults
|
var results SearchResults
|
||||||
DecodeJSON(t, resp, &results)
|
DecodeJSON(t, resp, &results)
|
||||||
assert.NotEmpty(t, results.Data)
|
assert.NotEmpty(t, results.Data)
|
||||||
|
var modelUser *models.User
|
||||||
for _, user := range results.Data {
|
for _, user := range results.Data {
|
||||||
assert.Contains(t, user.UserName, query)
|
assert.Contains(t, user.UserName, query)
|
||||||
assert.Empty(t, user.Email)
|
modelUser = models.AssertExistsAndLoadBean(t, &models.User{ID: user.ID}).(*models.User)
|
||||||
|
if modelUser.KeepEmailPrivate {
|
||||||
|
assert.EqualValues(t, fmt.Sprintf("%s@%s", modelUser.LowerName, setting.Service.NoReplyAddress), user.Email)
|
||||||
|
} else {
|
||||||
|
assert.EqualValues(t, modelUser.Email, user.Email)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -141,7 +141,7 @@ func TestLDAPUserSignin(t *testing.T) {
|
|||||||
|
|
||||||
assert.Equal(t, u.UserName, htmlDoc.GetInputValueByName("name"))
|
assert.Equal(t, u.UserName, htmlDoc.GetInputValueByName("name"))
|
||||||
assert.Equal(t, u.FullName, htmlDoc.GetInputValueByName("full_name"))
|
assert.Equal(t, u.FullName, htmlDoc.GetInputValueByName("full_name"))
|
||||||
assert.Equal(t, u.Email, htmlDoc.GetInputValueByName("email"))
|
assert.Equal(t, u.Email, htmlDoc.Find(`label[for="email"]`).Siblings().First().Text())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLDAPUserSync(t *testing.T) {
|
func TestLDAPUserSync(t *testing.T) {
|
||||||
|
|||||||
@@ -37,6 +37,13 @@ func (doc *HTMLDoc) GetInputValueByName(name string) string {
|
|||||||
return text
|
return text
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Find gets the descendants of each element in the current set of
|
||||||
|
// matched elements, filtered by a selector. It returns a new Selection
|
||||||
|
// object containing these matched elements.
|
||||||
|
func (doc *HTMLDoc) Find(selector string) *goquery.Selection {
|
||||||
|
return doc.doc.Find(selector)
|
||||||
|
}
|
||||||
|
|
||||||
// GetCSRF for get CSRC token value from input
|
// GetCSRF for get CSRC token value from input
|
||||||
func (doc *HTMLDoc) GetCSRF() string {
|
func (doc *HTMLDoc) GetCSRF() string {
|
||||||
return doc.GetInputValueByName("_csrf")
|
return doc.GetInputValueByName("_csrf")
|
||||||
|
|||||||
@@ -152,6 +152,7 @@ func restoreOldDB(t *testing.T, version string) bool {
|
|||||||
|
|
||||||
_, err = db.Exec(fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", setting.Database.Name))
|
_, err = db.Exec(fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", setting.Database.Name))
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
db.Close()
|
||||||
|
|
||||||
db, err = sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/%s?multiStatements=true",
|
db, err = sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/%s?multiStatements=true",
|
||||||
setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.Name))
|
setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.Name))
|
||||||
@@ -182,6 +183,8 @@ func restoreOldDB(t *testing.T, version string) bool {
|
|||||||
if !assert.NoError(t, err) {
|
if !assert.NoError(t, err) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
defer db.Close()
|
||||||
|
|
||||||
schrows, err := db.Query(fmt.Sprintf("SELECT 1 FROM information_schema.schemata WHERE schema_name = '%s'", setting.Database.Schema))
|
schrows, err := db.Query(fmt.Sprintf("SELECT 1 FROM information_schema.schemata WHERE schema_name = '%s'", setting.Database.Schema))
|
||||||
if !assert.NoError(t, err) || !assert.NotEmpty(t, schrows) {
|
if !assert.NoError(t, err) || !assert.NotEmpty(t, schrows) {
|
||||||
return false
|
return false
|
||||||
|
|||||||
@@ -41,7 +41,18 @@ func AvatarLink(email string) string {
|
|||||||
Email: lowerEmail,
|
Email: lowerEmail,
|
||||||
Hash: sum,
|
Hash: sum,
|
||||||
}
|
}
|
||||||
_, _ = x.Insert(emailHash)
|
// OK we're going to open a session just because I think that that might hide away any problems with postgres reporting errors
|
||||||
|
sess := x.NewSession()
|
||||||
|
defer sess.Close()
|
||||||
|
if err := sess.Begin(); err != nil {
|
||||||
|
// we don't care about any DB problem just return the lowerEmail
|
||||||
|
return lowerEmail, nil
|
||||||
|
}
|
||||||
|
_, _ = sess.Insert(emailHash)
|
||||||
|
if err := sess.Commit(); err != nil {
|
||||||
|
// Seriously we don't care about any DB problems just return the lowerEmail - we expect the transaction to fail most of the time
|
||||||
|
return lowerEmail, nil
|
||||||
|
}
|
||||||
return lowerEmail, nil
|
return lowerEmail, nil
|
||||||
})
|
})
|
||||||
return setting.AppSubURL + "/avatar/" + url.PathEscape(sum)
|
return setting.AppSubURL + "/avatar/" + url.PathEscape(sum)
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
act_user_id: 2
|
act_user_id: 2
|
||||||
repo_id: 2
|
repo_id: 2
|
||||||
is_private: true
|
is_private: true
|
||||||
created_unix: 1571686356
|
created_unix: 1603228283
|
||||||
|
|
||||||
-
|
-
|
||||||
id: 2
|
id: 2
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ package migrations
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@@ -290,12 +291,16 @@ Please try upgrading to a lower version first (suggested v1.6.4), then upgrade t
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Downgrading Gitea's database version not supported
|
||||||
if int(v-minDBVersion) > len(migrations) {
|
if int(v-minDBVersion) > len(migrations) {
|
||||||
// User downgraded Gitea.
|
msg := fmt.Sprintf("Downgrading database version from '%d' to '%d' is not supported and may result in loss of data integrity.\nIf you really know what you're doing, execute `UPDATE version SET version=%d WHERE id=1;`\n",
|
||||||
currentVersion.Version = int64(len(migrations) + minDBVersion)
|
v, minDBVersion+len(migrations), minDBVersion+len(migrations))
|
||||||
_, err = x.ID(1).Update(currentVersion)
|
fmt.Fprint(os.Stderr, msg)
|
||||||
return err
|
log.Fatal(msg)
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Migrate
|
||||||
for i, m := range migrations[v-minDBVersion:] {
|
for i, m := range migrations[v-minDBVersion:] {
|
||||||
log.Info("Migration[%d]: %s", v+int64(i), m.Description())
|
log.Info("Migration[%d]: %s", v+int64(i), m.Description())
|
||||||
if err = m.Migrate(x); err != nil {
|
if err = m.Migrate(x); err != nil {
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func addBranchProtectionCanPushAndEnableWhitelist(x *xorm.Engine) error {
|
func addBranchProtectionCanPushAndEnableWhitelist(x *xorm.Engine) error {
|
||||||
|
|
||||||
type ProtectedBranch struct {
|
type ProtectedBranch struct {
|
||||||
CanPush bool `xorm:"NOT NULL DEFAULT false"`
|
CanPush bool `xorm:"NOT NULL DEFAULT false"`
|
||||||
EnableApprovalsWhitelist bool `xorm:"NOT NULL DEFAULT false"`
|
EnableApprovalsWhitelist bool `xorm:"NOT NULL DEFAULT false"`
|
||||||
@@ -23,29 +22,26 @@ func addBranchProtectionCanPushAndEnableWhitelist(x *xorm.Engine) error {
|
|||||||
Official bool `xorm:"NOT NULL DEFAULT false"`
|
Official bool `xorm:"NOT NULL DEFAULT false"`
|
||||||
}
|
}
|
||||||
|
|
||||||
sess := x.NewSession()
|
if err := x.Sync2(new(ProtectedBranch)); err != nil {
|
||||||
defer sess.Close()
|
|
||||||
|
|
||||||
if err := sess.Sync2(new(ProtectedBranch)); err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := sess.Sync2(new(Review)); err != nil {
|
if err := x.Sync2(new(Review)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := sess.Exec("UPDATE `protected_branch` SET `enable_whitelist` = ? WHERE enable_whitelist IS NULL", false); err != nil {
|
if _, err := x.Exec("UPDATE `protected_branch` SET `enable_whitelist` = ? WHERE enable_whitelist IS NULL", false); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if _, err := sess.Exec("UPDATE `protected_branch` SET `can_push` = `enable_whitelist`"); err != nil {
|
if _, err := x.Exec("UPDATE `protected_branch` SET `can_push` = `enable_whitelist`"); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if _, err := sess.Exec("UPDATE `protected_branch` SET `enable_approvals_whitelist` = ? WHERE `required_approvals` > ?", true, 0); err != nil {
|
if _, err := x.Exec("UPDATE `protected_branch` SET `enable_approvals_whitelist` = ? WHERE `required_approvals` > ?", true, 0); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
var pageSize int64 = 20
|
var pageSize int64 = 20
|
||||||
qresult, err := sess.QueryInterface("SELECT max(id) as max_id FROM issue")
|
qresult, err := x.QueryInterface("SELECT max(id) as max_id FROM issue")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -57,10 +53,19 @@ func addBranchProtectionCanPushAndEnableWhitelist(x *xorm.Engine) error {
|
|||||||
}
|
}
|
||||||
totalPages := totalIssues / pageSize
|
totalPages := totalIssues / pageSize
|
||||||
|
|
||||||
|
sess := x.NewSession()
|
||||||
|
defer sess.Close()
|
||||||
|
|
||||||
|
if err := sess.Begin(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
// Find latest review of each user in each pull request, and set official field if appropriate
|
// Find latest review of each user in each pull request, and set official field if appropriate
|
||||||
reviews := []*models.Review{}
|
|
||||||
var page int64
|
var page int64
|
||||||
|
var count int
|
||||||
for page = 0; page <= totalPages; page++ {
|
for page = 0; page <= totalPages; page++ {
|
||||||
|
reviews := []*models.Review{}
|
||||||
if err := sess.SQL("SELECT * FROM review WHERE id IN (SELECT max(id) as id FROM review WHERE issue_id > ? AND issue_id <= ? AND type in (?, ?) GROUP BY issue_id, reviewer_id)",
|
if err := sess.SQL("SELECT * FROM review WHERE id IN (SELECT max(id) as id FROM review WHERE issue_id > ? AND issue_id <= ? AND type in (?, ?) GROUP BY issue_id, reviewer_id)",
|
||||||
page*pageSize, (page+1)*pageSize, models.ReviewTypeApprove, models.ReviewTypeReject).
|
page*pageSize, (page+1)*pageSize, models.ReviewTypeApprove, models.ReviewTypeReject).
|
||||||
Find(&reviews); err != nil {
|
Find(&reviews); err != nil {
|
||||||
@@ -68,23 +73,37 @@ func addBranchProtectionCanPushAndEnableWhitelist(x *xorm.Engine) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, review := range reviews {
|
for _, review := range reviews {
|
||||||
if err := review.LoadAttributes(); err != nil {
|
if err := review.LoadAttributesX(sess); err != nil {
|
||||||
// Error might occur if user or issue doesn't exist, ignore it.
|
// Error might occur if user or issue doesn't exist, ignore it.
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
official, err := models.IsOfficialReviewer(review.Issue, review.Reviewer)
|
official, err := models.IsOfficialReviewerX(sess, review.Issue, review.Reviewer)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Branch might not be proteced or other error, ignore it.
|
// Branch might not be proteced or other error, ignore it.
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
review.Official = official
|
review.Official = official
|
||||||
|
|
||||||
|
count++
|
||||||
|
|
||||||
if _, err := sess.ID(review.ID).Cols("official").Update(review); err != nil {
|
if _, err := sess.ID(review.ID).Cols("official").Update(review); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if count == 100 {
|
||||||
|
if err := sess.Commit(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
count = 0
|
||||||
|
if err := sess.Begin(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
if count > 0 {
|
||||||
|
|
||||||
return sess.Commit()
|
return sess.Commit()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -264,6 +264,17 @@ func DumpDatabase(filePath string, dbType string) error {
|
|||||||
}
|
}
|
||||||
tbs = append(tbs, t)
|
tbs = append(tbs, t)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Version struct {
|
||||||
|
ID int64 `xorm:"pk autoincr"`
|
||||||
|
Version int64
|
||||||
|
}
|
||||||
|
t, err := x.TableInfo(Version{})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
tbs = append(tbs, t)
|
||||||
|
|
||||||
if len(dbType) > 0 {
|
if len(dbType) > 0 {
|
||||||
return x.DumpTablesToFile(tbs, filePath, schemas.DBType(dbType))
|
return x.DumpTablesToFile(tbs, filePath, schemas.DBType(dbType))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,6 +21,12 @@ func TestDumpDatabase(t *testing.T) {
|
|||||||
dir, err := ioutil.TempDir(os.TempDir(), "dump")
|
dir, err := ioutil.TempDir(os.TempDir(), "dump")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
type Version struct {
|
||||||
|
ID int64 `xorm:"pk autoincr"`
|
||||||
|
Version int64
|
||||||
|
}
|
||||||
|
assert.NoError(t, x.Sync2(Version{}))
|
||||||
|
|
||||||
for _, dbName := range setting.SupportedDatabases {
|
for _, dbName := range setting.SupportedDatabases {
|
||||||
dbType := setting.GetDBTypeByName(dbName)
|
dbType := setting.GetDBTypeByName(dbName)
|
||||||
assert.NoError(t, DumpDatabase(filepath.Join(dir, dbType+".sql"), dbType))
|
assert.NoError(t, DumpDatabase(filepath.Join(dir, dbType+".sql"), dbType))
|
||||||
|
|||||||
@@ -178,7 +178,7 @@ func getUserRepoPermission(e Engine, repo *Repository, user *User) (perm Permiss
|
|||||||
|
|
||||||
// Prevent strangers from checking out public repo of private orginization
|
// Prevent strangers from checking out public repo of private orginization
|
||||||
// Allow user if they are collaborator of a repo within a private orginization but not a member of the orginization itself
|
// Allow user if they are collaborator of a repo within a private orginization but not a member of the orginization itself
|
||||||
if repo.Owner.IsOrganization() && !HasOrgVisible(repo.Owner, user) && !isCollaborator {
|
if repo.Owner.IsOrganization() && !hasOrgVisible(e, repo.Owner, user) && !isCollaborator {
|
||||||
perm.AccessMode = AccessModeNone
|
perm.AccessMode = AccessModeNone
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -110,7 +110,8 @@ func (r *Review) LoadReviewer() error {
|
|||||||
return r.loadReviewer(x)
|
return r.loadReviewer(x)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Review) loadAttributes(e Engine) (err error) {
|
// LoadAttributesX loads all attributes except CodeComments with an Engine parameter
|
||||||
|
func (r *Review) LoadAttributesX(e Engine) (err error) {
|
||||||
if err = r.loadIssue(e); err != nil {
|
if err = r.loadIssue(e); err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -125,7 +126,7 @@ func (r *Review) loadAttributes(e Engine) (err error) {
|
|||||||
|
|
||||||
// LoadAttributes loads all attributes except CodeComments
|
// LoadAttributes loads all attributes except CodeComments
|
||||||
func (r *Review) LoadAttributes() error {
|
func (r *Review) LoadAttributes() error {
|
||||||
return r.loadAttributes(x)
|
return r.LoadAttributesX(x)
|
||||||
}
|
}
|
||||||
|
|
||||||
func getReviewByID(e Engine, id int64) (*Review, error) {
|
func getReviewByID(e Engine, id int64) (*Review, error) {
|
||||||
@@ -203,6 +204,12 @@ func IsOfficialReviewer(issue *Issue, reviewer *User) (bool, error) {
|
|||||||
return isOfficialReviewer(x, issue, reviewer)
|
return isOfficialReviewer(x, issue, reviewer)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsOfficialReviewerX check if reviewer can make official reviews in issue (counts towards required approvals)
|
||||||
|
// with an Engine parameter
|
||||||
|
func IsOfficialReviewerX(e Engine, issue *Issue, reviewer *User) (bool, error) {
|
||||||
|
return isOfficialReviewer(x, issue, reviewer)
|
||||||
|
}
|
||||||
|
|
||||||
func isOfficialReviewer(e Engine, issue *Issue, reviewer *User) (bool, error) {
|
func isOfficialReviewer(e Engine, issue *Issue, reviewer *User) (bool, error) {
|
||||||
pr, err := getPullRequestByIssueID(e, issue.ID)
|
pr, err := getPullRequestByIssueID(e, issue.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -1418,11 +1418,21 @@ func getUserEmailsByNames(e Engine, names []string) []string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GetMaileableUsersByIDs gets users from ids, but only if they can receive mails
|
// GetMaileableUsersByIDs gets users from ids, but only if they can receive mails
|
||||||
func GetMaileableUsersByIDs(ids []int64) ([]*User, error) {
|
func GetMaileableUsersByIDs(ids []int64, isMention bool) ([]*User, error) {
|
||||||
if len(ids) == 0 {
|
if len(ids) == 0 {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
ous := make([]*User, 0, len(ids))
|
ous := make([]*User, 0, len(ids))
|
||||||
|
|
||||||
|
if isMention {
|
||||||
|
return ous, x.In("id", ids).
|
||||||
|
Where("`type` = ?", UserTypeIndividual).
|
||||||
|
And("`prohibit_login` = ?", false).
|
||||||
|
And("`is_active` = ?", true).
|
||||||
|
And("`email_notifications_preference` IN ( ?, ?)", EmailNotificationsEnabled, EmailNotificationsOnMention).
|
||||||
|
Find(&ous)
|
||||||
|
}
|
||||||
|
|
||||||
return ous, x.In("id", ids).
|
return ous, x.In("id", ids).
|
||||||
Where("`type` = ?", UserTypeIndividual).
|
Where("`type` = ?", UserTypeIndividual).
|
||||||
And("`prohibit_login` = ?", false).
|
And("`prohibit_login` = ?", false).
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ func TestGetUserHeatmapDataByUser(t *testing.T) {
|
|||||||
CountResult int
|
CountResult int
|
||||||
JSONResult string
|
JSONResult string
|
||||||
}{
|
}{
|
||||||
{2, 1, `[{"timestamp":1571616000,"contributions":1}]`},
|
{2, 1, `[{"timestamp":1603152000,"contributions":1}]`},
|
||||||
{3, 0, `[]`},
|
{3, 0, `[]`},
|
||||||
}
|
}
|
||||||
// Prepare
|
// Prepare
|
||||||
|
|||||||
@@ -389,3 +389,20 @@ func TestGetUserIDsByNames(t *testing.T) {
|
|||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
assert.Equal(t, []int64(nil), IDs)
|
assert.Equal(t, []int64(nil), IDs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestGetMaileableUsersByIDs(t *testing.T) {
|
||||||
|
results, err := GetMaileableUsersByIDs([]int64{1, 4}, false)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, 1, len(results))
|
||||||
|
if len(results) > 1 {
|
||||||
|
assert.Equal(t, results[0].ID, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
results, err = GetMaileableUsersByIDs([]int64{1, 4}, true)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, 2, len(results))
|
||||||
|
if len(results) > 2 {
|
||||||
|
assert.Equal(t, results[0].ID, 1)
|
||||||
|
assert.Equal(t, results[1].ID, 4)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -97,6 +97,9 @@ func (f MigrateRepoForm) ParseRemoteAddr(user *models.User) (string, error) {
|
|||||||
u.User = url.UserPassword(f.AuthUsername, f.AuthPassword)
|
u.User = url.UserPassword(f.AuthUsername, f.AuthPassword)
|
||||||
}
|
}
|
||||||
remoteAddr = u.String()
|
remoteAddr = u.String()
|
||||||
|
if u.Scheme == "git" && u.Port() != "" && (strings.Contains(remoteAddr, "%0d") || strings.Contains(remoteAddr, "%0a")) {
|
||||||
|
return "", models.ErrInvalidCloneAddr{IsURLError: true}
|
||||||
|
}
|
||||||
} else if !user.CanImportLocal() {
|
} else if !user.CanImportLocal() {
|
||||||
return "", models.ErrInvalidCloneAddr{IsPermissionDenied: true}
|
return "", models.ErrInvalidCloneAddr{IsPermissionDenied: true}
|
||||||
} else if !com.IsDir(remoteAddr) {
|
} else if !com.IsDir(remoteAddr) {
|
||||||
|
|||||||
@@ -198,12 +198,12 @@ func (f *AccessTokenForm) Validate(ctx *macaron.Context, errs binding.Errors) bi
|
|||||||
type UpdateProfileForm struct {
|
type UpdateProfileForm struct {
|
||||||
Name string `binding:"AlphaDashDot;MaxSize(40)"`
|
Name string `binding:"AlphaDashDot;MaxSize(40)"`
|
||||||
FullName string `binding:"MaxSize(100)"`
|
FullName string `binding:"MaxSize(100)"`
|
||||||
Email string `binding:"Required;Email;MaxSize(254)"`
|
|
||||||
KeepEmailPrivate bool
|
KeepEmailPrivate bool
|
||||||
Website string `binding:"ValidUrl;MaxSize(255)"`
|
Website string `binding:"ValidUrl;MaxSize(255)"`
|
||||||
Location string `binding:"MaxSize(50)"`
|
Location string `binding:"MaxSize(50)"`
|
||||||
Language string `binding:"Size(5)"`
|
Language string `binding:"Size(5)"`
|
||||||
Description string `binding:"MaxSize(255)"`
|
Description string `binding:"MaxSize(255)"`
|
||||||
|
KeepActivityPrivate bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate validates the fields
|
// Validate validates the fields
|
||||||
|
|||||||
@@ -89,6 +89,6 @@ func Prepare(data []byte) (*image.Image, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
img = resize.Resize(AvatarSize, AvatarSize, img, resize.NearestNeighbor)
|
img = resize.Resize(AvatarSize, AvatarSize, img, resize.Bilinear)
|
||||||
return &img, nil
|
return &img, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -251,3 +251,61 @@ func (ctx *APIContext) NotFound(objs ...interface{}) {
|
|||||||
"errors": errors,
|
"errors": errors,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RepoRefForAPI handles repository reference names when the ref name is not explicitly given
|
||||||
|
func RepoRefForAPI() macaron.Handler {
|
||||||
|
return func(ctx *APIContext) {
|
||||||
|
// Empty repository does not have reference information.
|
||||||
|
if ctx.Repo.Repository.IsEmpty {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
|
||||||
|
if ctx.Repo.GitRepo == nil {
|
||||||
|
repoPath := models.RepoPath(ctx.Repo.Owner.Name, ctx.Repo.Repository.Name)
|
||||||
|
ctx.Repo.GitRepo, err = git.OpenRepository(repoPath)
|
||||||
|
if err != nil {
|
||||||
|
ctx.InternalServerError(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// We opened it, we should close it
|
||||||
|
defer func() {
|
||||||
|
// If it's been set to nil then assume someone else has closed it.
|
||||||
|
if ctx.Repo.GitRepo != nil {
|
||||||
|
ctx.Repo.GitRepo.Close()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
refName := getRefName(ctx.Context, RepoRefAny)
|
||||||
|
|
||||||
|
if ctx.Repo.GitRepo.IsBranchExist(refName) {
|
||||||
|
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(refName)
|
||||||
|
if err != nil {
|
||||||
|
ctx.InternalServerError(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
|
||||||
|
} else if ctx.Repo.GitRepo.IsTagExist(refName) {
|
||||||
|
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetTagCommit(refName)
|
||||||
|
if err != nil {
|
||||||
|
ctx.InternalServerError(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
|
||||||
|
} else if len(refName) == 40 {
|
||||||
|
ctx.Repo.CommitID = refName
|
||||||
|
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetCommit(refName)
|
||||||
|
if err != nil {
|
||||||
|
ctx.NotFound("GetCommit", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ctx.NotFound(fmt.Errorf("not exist: '%s'", ctx.Params("*")))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.Next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -690,7 +690,6 @@ func RepoRefByType(refType RepoRefType) macaron.Handler {
|
|||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
|
|
||||||
// For API calls.
|
|
||||||
if ctx.Repo.GitRepo == nil {
|
if ctx.Repo.GitRepo == nil {
|
||||||
repoPath := models.RepoPath(ctx.Repo.Owner.Name, ctx.Repo.Repository.Name)
|
repoPath := models.RepoPath(ctx.Repo.Owner.Name, ctx.Repo.Repository.Name)
|
||||||
ctx.Repo.GitRepo, err = git.OpenRepository(repoPath)
|
ctx.Repo.GitRepo, err = git.OpenRepository(repoPath)
|
||||||
@@ -759,7 +758,7 @@ func RepoRefByType(refType RepoRefType) macaron.Handler {
|
|||||||
|
|
||||||
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetCommit(refName)
|
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetCommit(refName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.NotFound("GetCommit", nil)
|
ctx.NotFound("GetCommit", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
// Copyright 2015 The Gogs Authors. All rights reserved.
|
// Copyright 2015 The Gogs Authors. All rights reserved.
|
||||||
|
// Copyright 2018 The Gitea Authors. All rights reserved.
|
||||||
// Use of this source code is governed by a MIT-style
|
// Use of this source code is governed by a MIT-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
@@ -335,9 +336,11 @@ func ToTeam(team *models.Team) *api.Team {
|
|||||||
// signed shall only be set if requester is logged in. authed shall only be set if user is site admin or user himself
|
// signed shall only be set if requester is logged in. authed shall only be set if user is site admin or user himself
|
||||||
func ToUser(user *models.User, signed, authed bool) *api.User {
|
func ToUser(user *models.User, signed, authed bool) *api.User {
|
||||||
result := &api.User{
|
result := &api.User{
|
||||||
|
ID: user.ID,
|
||||||
UserName: user.Name,
|
UserName: user.Name,
|
||||||
AvatarURL: user.AvatarLink(),
|
|
||||||
FullName: markup.Sanitize(user.FullName),
|
FullName: markup.Sanitize(user.FullName),
|
||||||
|
Email: user.GetEmail(),
|
||||||
|
AvatarURL: user.AvatarLink(),
|
||||||
Created: user.CreatedUnix.AsTime(),
|
Created: user.CreatedUnix.AsTime(),
|
||||||
}
|
}
|
||||||
// hide primary email if API caller is anonymous or user keep email private
|
// hide primary email if API caller is anonymous or user keep email private
|
||||||
@@ -346,7 +349,6 @@ func ToUser(user *models.User, signed, authed bool) *api.User {
|
|||||||
}
|
}
|
||||||
// only site admin will get these information and possibly user himself
|
// only site admin will get these information and possibly user himself
|
||||||
if authed {
|
if authed {
|
||||||
result.ID = user.ID
|
|
||||||
result.IsAdmin = user.IsAdmin
|
result.IsAdmin = user.IsAdmin
|
||||||
result.LastLogin = user.LastLoginUnix.AsTime()
|
result.LastLogin = user.LastLoginUnix.AsTime()
|
||||||
result.Language = user.Language
|
result.Language = user.Language
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ type BlameReader struct {
|
|||||||
cmd *exec.Cmd
|
cmd *exec.Cmd
|
||||||
pid int64
|
pid int64
|
||||||
output io.ReadCloser
|
output io.ReadCloser
|
||||||
scanner *bufio.Scanner
|
reader *bufio.Reader
|
||||||
lastSha *string
|
lastSha *string
|
||||||
cancel context.CancelFunc
|
cancel context.CancelFunc
|
||||||
}
|
}
|
||||||
@@ -38,23 +38,30 @@ var shaLineRegex = regexp.MustCompile("^([a-z0-9]{40})")
|
|||||||
func (r *BlameReader) NextPart() (*BlamePart, error) {
|
func (r *BlameReader) NextPart() (*BlamePart, error) {
|
||||||
var blamePart *BlamePart
|
var blamePart *BlamePart
|
||||||
|
|
||||||
scanner := r.scanner
|
reader := r.reader
|
||||||
|
|
||||||
if r.lastSha != nil {
|
if r.lastSha != nil {
|
||||||
blamePart = &BlamePart{*r.lastSha, make([]string, 0)}
|
blamePart = &BlamePart{*r.lastSha, make([]string, 0)}
|
||||||
}
|
}
|
||||||
|
|
||||||
for scanner.Scan() {
|
var line []byte
|
||||||
line := scanner.Text()
|
var isPrefix bool
|
||||||
|
var err error
|
||||||
|
|
||||||
|
for err != io.EOF {
|
||||||
|
line, isPrefix, err = reader.ReadLine()
|
||||||
|
if err != nil && err != io.EOF {
|
||||||
|
return blamePart, err
|
||||||
|
}
|
||||||
|
|
||||||
// Skip empty lines
|
|
||||||
if len(line) == 0 {
|
if len(line) == 0 {
|
||||||
|
// isPrefix will be false
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
lines := shaLineRegex.FindStringSubmatch(line)
|
lines := shaLineRegex.FindSubmatch(line)
|
||||||
if lines != nil {
|
if lines != nil {
|
||||||
sha1 := lines[1]
|
sha1 := string(lines[1])
|
||||||
|
|
||||||
if blamePart == nil {
|
if blamePart == nil {
|
||||||
blamePart = &BlamePart{sha1, make([]string, 0)}
|
blamePart = &BlamePart{sha1, make([]string, 0)}
|
||||||
@@ -62,12 +69,27 @@ func (r *BlameReader) NextPart() (*BlamePart, error) {
|
|||||||
|
|
||||||
if blamePart.Sha != sha1 {
|
if blamePart.Sha != sha1 {
|
||||||
r.lastSha = &sha1
|
r.lastSha = &sha1
|
||||||
|
// need to munch to end of line...
|
||||||
|
for isPrefix {
|
||||||
|
_, isPrefix, err = reader.ReadLine()
|
||||||
|
if err != nil && err != io.EOF {
|
||||||
|
return blamePart, err
|
||||||
|
}
|
||||||
|
}
|
||||||
return blamePart, nil
|
return blamePart, nil
|
||||||
}
|
}
|
||||||
} else if line[0] == '\t' {
|
} else if line[0] == '\t' {
|
||||||
code := line[1:]
|
code := line[1:]
|
||||||
|
|
||||||
blamePart.Lines = append(blamePart.Lines, code)
|
blamePart.Lines = append(blamePart.Lines, string(code))
|
||||||
|
}
|
||||||
|
|
||||||
|
// need to munch to end of line...
|
||||||
|
for isPrefix {
|
||||||
|
_, isPrefix, err = reader.ReadLine()
|
||||||
|
if err != nil && err != io.EOF {
|
||||||
|
return blamePart, err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -121,13 +143,13 @@ func createBlameReader(ctx context.Context, dir string, command ...string) (*Bla
|
|||||||
|
|
||||||
pid := process.GetManager().Add(fmt.Sprintf("GetBlame [repo_path: %s]", dir), cancel)
|
pid := process.GetManager().Add(fmt.Sprintf("GetBlame [repo_path: %s]", dir), cancel)
|
||||||
|
|
||||||
scanner := bufio.NewScanner(stdout)
|
reader := bufio.NewReader(stdout)
|
||||||
|
|
||||||
return &BlameReader{
|
return &BlameReader{
|
||||||
cmd,
|
cmd,
|
||||||
pid,
|
pid,
|
||||||
stdout,
|
stdout,
|
||||||
scanner,
|
reader,
|
||||||
nil,
|
nil,
|
||||||
cancel,
|
cancel,
|
||||||
}, nil
|
}, nil
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import (
|
|||||||
"path"
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/go-git/go-git/v5/plumbing"
|
||||||
"github.com/go-git/go-git/v5/plumbing/filemode"
|
"github.com/go-git/go-git/v5/plumbing/filemode"
|
||||||
"github.com/go-git/go-git/v5/plumbing/object"
|
"github.com/go-git/go-git/v5/plumbing/object"
|
||||||
)
|
)
|
||||||
@@ -35,6 +36,11 @@ func (t *Tree) GetTreeEntryByPath(relpath string) (*TreeEntry, error) {
|
|||||||
if i == len(parts)-1 {
|
if i == len(parts)-1 {
|
||||||
entries, err := tree.ListEntries()
|
entries, err := tree.ListEntries()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
if err == plumbing.ErrObjectNotFound {
|
||||||
|
return nil, ErrNotExist{
|
||||||
|
RelPath: relpath,
|
||||||
|
}
|
||||||
|
}
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
for _, v := range entries {
|
for _, v := range entries {
|
||||||
@@ -45,6 +51,11 @@ func (t *Tree) GetTreeEntryByPath(relpath string) (*TreeEntry, error) {
|
|||||||
} else {
|
} else {
|
||||||
tree, err = tree.SubTree(name)
|
tree, err = tree.SubTree(name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
if err == plumbing.ErrObjectNotFound {
|
||||||
|
return nil, ErrNotExist{
|
||||||
|
RelPath: relpath,
|
||||||
|
}
|
||||||
|
}
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,4 +16,5 @@ type Repository struct {
|
|||||||
AuthPassword string
|
AuthPassword string
|
||||||
CloneURL string
|
CloneURL string
|
||||||
OriginalURL string
|
OriginalURL string
|
||||||
|
DefaultBranch string
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/structs"
|
"code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
|
"code.gitea.io/gitea/services/pull"
|
||||||
|
|
||||||
gouuid "github.com/google/uuid"
|
gouuid "github.com/google/uuid"
|
||||||
)
|
)
|
||||||
@@ -93,12 +94,15 @@ func (g *GiteaLocalUploader) CreateRepo(repo *base.Repository, opts base.Migrate
|
|||||||
}
|
}
|
||||||
|
|
||||||
var remoteAddr = repo.CloneURL
|
var remoteAddr = repo.CloneURL
|
||||||
if len(opts.AuthUsername) > 0 {
|
if len(opts.AuthToken) > 0 || len(opts.AuthUsername) > 0 {
|
||||||
u, err := url.Parse(repo.CloneURL)
|
u, err := url.Parse(repo.CloneURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
u.User = url.UserPassword(opts.AuthUsername, opts.AuthPassword)
|
u.User = url.UserPassword(opts.AuthUsername, opts.AuthPassword)
|
||||||
|
if len(opts.AuthToken) > 0 {
|
||||||
|
u.User = url.UserPassword("oauth2", opts.AuthToken)
|
||||||
|
}
|
||||||
remoteAddr = u.String()
|
remoteAddr = u.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -119,6 +123,7 @@ func (g *GiteaLocalUploader) CreateRepo(repo *base.Repository, opts base.Migrate
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
r.DefaultBranch = repo.DefaultBranch
|
||||||
|
|
||||||
r, err = repository.MigrateRepositoryGitData(g.doer, owner, r, structs.MigrateRepoOption{
|
r, err = repository.MigrateRepositoryGitData(g.doer, owner, r, structs.MigrateRepoOption{
|
||||||
RepoName: g.repoName,
|
RepoName: g.repoName,
|
||||||
@@ -524,6 +529,7 @@ func (g *GiteaLocalUploader) CreatePullRequests(prs ...*base.PullRequest) error
|
|||||||
}
|
}
|
||||||
for _, pr := range gprs {
|
for _, pr := range gprs {
|
||||||
g.issues.Store(pr.Issue.Index, pr.Issue.ID)
|
g.issues.Store(pr.Issue.Index, pr.Issue.ID)
|
||||||
|
pull.AddToTaskQueue(pr)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ func TestGiteaUploadRepo(t *testing.T) {
|
|||||||
user := models.AssertExistsAndLoadBean(t, &models.User{ID: 1}).(*models.User)
|
user := models.AssertExistsAndLoadBean(t, &models.User{ID: 1}).(*models.User)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
downloader = NewGithubDownloaderV3("", "", "go-xorm", "builder")
|
downloader = NewGithubDownloaderV3("", "", "", "go-xorm", "builder")
|
||||||
repoName = "builder-" + time.Now().Format("2006-01-02-15-04-05")
|
repoName = "builder-" + time.Now().Format("2006-01-02-15-04-05")
|
||||||
uploader = NewGiteaLocalUploader(graceful.GetManager().HammerContext(), user, user.Name, repoName)
|
uploader = NewGiteaLocalUploader(graceful.GetManager().HammerContext(), user, user.Name, repoName)
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ func (f *GithubDownloaderV3Factory) New(opts base.MigrateOptions) (base.Download
|
|||||||
|
|
||||||
log.Trace("Create github downloader: %s/%s", oldOwner, oldName)
|
log.Trace("Create github downloader: %s/%s", oldOwner, oldName)
|
||||||
|
|
||||||
return NewGithubDownloaderV3(opts.AuthUsername, opts.AuthPassword, oldOwner, oldName), nil
|
return NewGithubDownloaderV3(opts.AuthUsername, opts.AuthPassword, opts.AuthToken, oldOwner, oldName), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GitServiceType returns the type of git service
|
// GitServiceType returns the type of git service
|
||||||
@@ -81,7 +81,7 @@ type GithubDownloaderV3 struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewGithubDownloaderV3 creates a github Downloader via github v3 API
|
// NewGithubDownloaderV3 creates a github Downloader via github v3 API
|
||||||
func NewGithubDownloaderV3(userName, password, repoOwner, repoName string) *GithubDownloaderV3 {
|
func NewGithubDownloaderV3(userName, password, token, repoOwner, repoName string) *GithubDownloaderV3 {
|
||||||
var downloader = GithubDownloaderV3{
|
var downloader = GithubDownloaderV3{
|
||||||
userName: userName,
|
userName: userName,
|
||||||
password: password,
|
password: password,
|
||||||
@@ -90,15 +90,7 @@ func NewGithubDownloaderV3(userName, password, repoOwner, repoName string) *Gith
|
|||||||
repoName: repoName,
|
repoName: repoName,
|
||||||
}
|
}
|
||||||
|
|
||||||
var client *http.Client
|
client := &http.Client{
|
||||||
if userName != "" {
|
|
||||||
if password == "" {
|
|
||||||
ts := oauth2.StaticTokenSource(
|
|
||||||
&oauth2.Token{AccessToken: userName},
|
|
||||||
)
|
|
||||||
client = oauth2.NewClient(downloader.ctx, ts)
|
|
||||||
} else {
|
|
||||||
client = &http.Client{
|
|
||||||
Transport: &http.Transport{
|
Transport: &http.Transport{
|
||||||
Proxy: func(req *http.Request) (*url.URL, error) {
|
Proxy: func(req *http.Request) (*url.URL, error) {
|
||||||
req.SetBasicAuth(userName, password)
|
req.SetBasicAuth(userName, password)
|
||||||
@@ -106,7 +98,11 @@ func NewGithubDownloaderV3(userName, password, repoOwner, repoName string) *Gith
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
if token != "" {
|
||||||
|
ts := oauth2.StaticTokenSource(
|
||||||
|
&oauth2.Token{AccessToken: token},
|
||||||
|
)
|
||||||
|
client = oauth2.NewClient(downloader.ctx, ts)
|
||||||
}
|
}
|
||||||
downloader.client = github.NewClient(client)
|
downloader.client = github.NewClient(client)
|
||||||
return &downloader
|
return &downloader
|
||||||
@@ -154,6 +150,11 @@ func (g *GithubDownloaderV3) GetRepoInfo() (*base.Repository, error) {
|
|||||||
}
|
}
|
||||||
g.rate = &resp.Rate
|
g.rate = &resp.Rate
|
||||||
|
|
||||||
|
defaultBranch := ""
|
||||||
|
if gr.DefaultBranch != nil {
|
||||||
|
defaultBranch = *gr.DefaultBranch
|
||||||
|
}
|
||||||
|
|
||||||
// convert github repo to stand Repo
|
// convert github repo to stand Repo
|
||||||
return &base.Repository{
|
return &base.Repository{
|
||||||
Owner: g.repoOwner,
|
Owner: g.repoOwner,
|
||||||
@@ -162,6 +163,7 @@ func (g *GithubDownloaderV3) GetRepoInfo() (*base.Repository, error) {
|
|||||||
Description: gr.GetDescription(),
|
Description: gr.GetDescription(),
|
||||||
OriginalURL: gr.GetHTMLURL(),
|
OriginalURL: gr.GetHTMLURL(),
|
||||||
CloneURL: gr.GetCloneURL(),
|
CloneURL: gr.GetCloneURL(),
|
||||||
|
DefaultBranch: defaultBranch,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ func assertLabelEqual(t *testing.T, name, color, description string, label *base
|
|||||||
|
|
||||||
func TestGitHubDownloadRepo(t *testing.T) {
|
func TestGitHubDownloadRepo(t *testing.T) {
|
||||||
GithubLimitRateRemaining = 3 //Wait at 3 remaining since we could have 3 CI in //
|
GithubLimitRateRemaining = 3 //Wait at 3 remaining since we could have 3 CI in //
|
||||||
downloader := NewGithubDownloaderV3(os.Getenv("GITHUB_READ_TOKEN"), "", "go-gitea", "test_repo")
|
downloader := NewGithubDownloaderV3("", "", os.Getenv("GITHUB_READ_TOKEN"), "go-gitea", "test_repo")
|
||||||
err := downloader.RefreshRate()
|
err := downloader.RefreshRate()
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
@@ -76,6 +76,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
|
|||||||
Description: "Test repository for testing migration from github to gitea",
|
Description: "Test repository for testing migration from github to gitea",
|
||||||
CloneURL: "https://github.com/go-gitea/test_repo.git",
|
CloneURL: "https://github.com/go-gitea/test_repo.git",
|
||||||
OriginalURL: "https://github.com/go-gitea/test_repo",
|
OriginalURL: "https://github.com/go-gitea/test_repo",
|
||||||
|
DefaultBranch: "master",
|
||||||
}, repo)
|
}, repo)
|
||||||
|
|
||||||
topics, err := downloader.GetTopics()
|
topics, err := downloader.GetTopics()
|
||||||
|
|||||||
@@ -56,10 +56,11 @@ func (f *GitlabDownloaderFactory) New(opts base.MigrateOptions) (base.Downloader
|
|||||||
|
|
||||||
baseURL := u.Scheme + "://" + u.Host
|
baseURL := u.Scheme + "://" + u.Host
|
||||||
repoNameSpace := strings.TrimPrefix(u.Path, "/")
|
repoNameSpace := strings.TrimPrefix(u.Path, "/")
|
||||||
|
repoNameSpace = strings.TrimSuffix(repoNameSpace, ".git")
|
||||||
|
|
||||||
log.Trace("Create gitlab downloader. BaseURL: %s RepoName: %s", baseURL, repoNameSpace)
|
log.Trace("Create gitlab downloader. BaseURL: %s RepoName: %s", baseURL, repoNameSpace)
|
||||||
|
|
||||||
return NewGitlabDownloader(baseURL, repoNameSpace, opts.AuthUsername, opts.AuthPassword), nil
|
return NewGitlabDownloader(baseURL, repoNameSpace, opts.AuthUsername, opts.AuthPassword, opts.AuthToken), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GitServiceType returns the type of git service
|
// GitServiceType returns the type of git service
|
||||||
@@ -85,16 +86,13 @@ type GitlabDownloader struct {
|
|||||||
// NewGitlabDownloader creates a gitlab Downloader via gitlab API
|
// NewGitlabDownloader creates a gitlab Downloader via gitlab API
|
||||||
// Use either a username/password, personal token entered into the username field, or anonymous/public access
|
// Use either a username/password, personal token entered into the username field, or anonymous/public access
|
||||||
// Note: Public access only allows very basic access
|
// Note: Public access only allows very basic access
|
||||||
func NewGitlabDownloader(baseURL, repoPath, username, password string) *GitlabDownloader {
|
func NewGitlabDownloader(baseURL, repoPath, username, password, token string) *GitlabDownloader {
|
||||||
var gitlabClient *gitlab.Client
|
gitlabClient, err := gitlab.NewClient(token, gitlab.WithBaseURL(baseURL))
|
||||||
var err error
|
// Only use basic auth if token is blank and password is NOT
|
||||||
if username != "" {
|
// Basic auth will fail with empty strings, but empty token will allow anonymous public API usage
|
||||||
if password == "" {
|
if token == "" && password != "" {
|
||||||
gitlabClient, err = gitlab.NewClient(username, gitlab.WithBaseURL(baseURL))
|
|
||||||
} else {
|
|
||||||
gitlabClient, err = gitlab.NewBasicAuthClient(username, password, gitlab.WithBaseURL(baseURL))
|
gitlabClient, err = gitlab.NewBasicAuthClient(username, password, gitlab.WithBaseURL(baseURL))
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Trace("Error logging into gitlab: %v", err)
|
log.Trace("Error logging into gitlab: %v", err)
|
||||||
@@ -163,6 +161,7 @@ func (g *GitlabDownloader) GetRepoInfo() (*base.Repository, error) {
|
|||||||
Description: gr.Description,
|
Description: gr.Description,
|
||||||
OriginalURL: gr.WebURL,
|
OriginalURL: gr.WebURL,
|
||||||
CloneURL: gr.HTTPURLToRepo,
|
CloneURL: gr.HTTPURLToRepo,
|
||||||
|
DefaultBranch: gr.DefaultBranch,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -240,6 +239,19 @@ func (g *GitlabDownloader) GetMilestones() ([]*base.Milestone, error) {
|
|||||||
return milestones, nil
|
return milestones, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (g *GitlabDownloader) normalizeColor(val string) string {
|
||||||
|
val = strings.TrimLeft(val, "#")
|
||||||
|
val = strings.ToLower(val)
|
||||||
|
if len(val) == 3 {
|
||||||
|
c := []rune(val)
|
||||||
|
val = fmt.Sprintf("%c%c%c%c%c%c", c[0], c[0], c[1], c[1], c[2], c[2])
|
||||||
|
}
|
||||||
|
if len(val) != 6 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
|
||||||
// GetLabels returns labels
|
// GetLabels returns labels
|
||||||
func (g *GitlabDownloader) GetLabels() ([]*base.Label, error) {
|
func (g *GitlabDownloader) GetLabels() ([]*base.Label, error) {
|
||||||
if g == nil {
|
if g == nil {
|
||||||
@@ -258,7 +270,7 @@ func (g *GitlabDownloader) GetLabels() ([]*base.Label, error) {
|
|||||||
for _, label := range ls {
|
for _, label := range ls {
|
||||||
baseLabel := &base.Label{
|
baseLabel := &base.Label{
|
||||||
Name: label.Name,
|
Name: label.Name,
|
||||||
Color: strings.TrimLeft(label.Color, "#)"),
|
Color: g.normalizeColor(label.Color),
|
||||||
Description: label.Description,
|
Description: label.Description,
|
||||||
}
|
}
|
||||||
labels = append(labels, baseLabel)
|
labels = append(labels, baseLabel)
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ func TestGitlabDownloadRepo(t *testing.T) {
|
|||||||
t.Skipf("Can't access test repo, skipping %s", t.Name())
|
t.Skipf("Can't access test repo, skipping %s", t.Name())
|
||||||
}
|
}
|
||||||
|
|
||||||
downloader := NewGitlabDownloader("https://gitlab.com", "gitea/test_repo", gitlabPersonalAccessToken, "")
|
downloader := NewGitlabDownloader("https://gitlab.com", "gitea/test_repo", "", "", gitlabPersonalAccessToken)
|
||||||
if downloader == nil {
|
if downloader == nil {
|
||||||
t.Fatal("NewGitlabDownloader is nil")
|
t.Fatal("NewGitlabDownloader is nil")
|
||||||
}
|
}
|
||||||
@@ -40,6 +40,7 @@ func TestGitlabDownloadRepo(t *testing.T) {
|
|||||||
Description: "Test repository for testing migration from gitlab to gitea",
|
Description: "Test repository for testing migration from gitlab to gitea",
|
||||||
CloneURL: "https://gitlab.com/gitea/test_repo.git",
|
CloneURL: "https://gitlab.com/gitea/test_repo.git",
|
||||||
OriginalURL: "https://gitlab.com/gitea/test_repo",
|
OriginalURL: "https://gitlab.com/gitea/test_repo",
|
||||||
|
DefaultBranch: "master",
|
||||||
}, repo)
|
}, repo)
|
||||||
|
|
||||||
topics, err := downloader.GetTopics()
|
topics, err := downloader.GetTopics()
|
||||||
|
|||||||
@@ -36,6 +36,12 @@ func MigrateRepository(ctx context.Context, doer *models.User, ownerName string,
|
|||||||
theFactory base.DownloaderFactory
|
theFactory base.DownloaderFactory
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// determine if user is token
|
||||||
|
if len(opts.AuthUsername) != 0 && len(opts.AuthPassword) == 0 {
|
||||||
|
opts.AuthToken = opts.AuthUsername
|
||||||
|
opts.AuthUsername = ""
|
||||||
|
}
|
||||||
|
|
||||||
for _, factory := range factories {
|
for _, factory := range factories {
|
||||||
if match, err := factory.Match(opts); err != nil {
|
if match, err := factory.Match(opts); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -78,7 +84,7 @@ func MigrateRepository(ctx context.Context, doer *models.User, ownerName string,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if err2 := models.CreateRepositoryNotice(fmt.Sprintf("Migrate repository from %s failed: %v", opts.OriginalURL, err)); err2 != nil {
|
if err2 := models.CreateRepositoryNotice(fmt.Sprintf("Migrate repository from %s failed: %v", opts.OriginalURL, err)); err2 != nil {
|
||||||
log.Error("create respotiry notice failed: ", err2)
|
log.Error("create repository notice failed: ", err2)
|
||||||
}
|
}
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -92,13 +92,22 @@ func (a *actionNotifier) NotifyCreateIssueComment(doer *models.User, repo *model
|
|||||||
act := &models.Action{
|
act := &models.Action{
|
||||||
ActUserID: doer.ID,
|
ActUserID: doer.ID,
|
||||||
ActUser: doer,
|
ActUser: doer,
|
||||||
Content: fmt.Sprintf("%d|%s", issue.Index, comment.Content),
|
|
||||||
RepoID: issue.Repo.ID,
|
RepoID: issue.Repo.ID,
|
||||||
Repo: issue.Repo,
|
Repo: issue.Repo,
|
||||||
Comment: comment,
|
Comment: comment,
|
||||||
CommentID: comment.ID,
|
CommentID: comment.ID,
|
||||||
IsPrivate: issue.Repo.IsPrivate,
|
IsPrivate: issue.Repo.IsPrivate,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
content := ""
|
||||||
|
|
||||||
|
if len(comment.Content) > 200 {
|
||||||
|
content = comment.Content[:strings.LastIndex(comment.Content[0:200], " ")] + "…"
|
||||||
|
} else {
|
||||||
|
content = comment.Content
|
||||||
|
}
|
||||||
|
act.Content = fmt.Sprintf("%d|%s", issue.Index, content)
|
||||||
|
|
||||||
if issue.IsPull {
|
if issue.IsPull {
|
||||||
act.OpType = models.ActionCommentPull
|
act.OpType = models.ActionCommentPull
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -99,7 +99,6 @@ func (m *webhookNotifier) NotifyForkRepository(doer *models.User, oldRepo, repo
|
|||||||
|
|
||||||
func (m *webhookNotifier) NotifyCreateRepository(doer *models.User, u *models.User, repo *models.Repository) {
|
func (m *webhookNotifier) NotifyCreateRepository(doer *models.User, u *models.User, repo *models.Repository) {
|
||||||
// Add to hook queue for created repo after session commit.
|
// Add to hook queue for created repo after session commit.
|
||||||
if u.IsOrganization() {
|
|
||||||
if err := webhook_module.PrepareWebhooks(repo, models.HookEventRepository, &api.RepositoryPayload{
|
if err := webhook_module.PrepareWebhooks(repo, models.HookEventRepository, &api.RepositoryPayload{
|
||||||
Action: api.HookRepoCreated,
|
Action: api.HookRepoCreated,
|
||||||
Repository: repo.APIFormat(models.AccessModeOwner),
|
Repository: repo.APIFormat(models.AccessModeOwner),
|
||||||
@@ -108,13 +107,11 @@ func (m *webhookNotifier) NotifyCreateRepository(doer *models.User, u *models.Us
|
|||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("PrepareWebhooks [repo_id: %d]: %v", repo.ID, err)
|
log.Error("PrepareWebhooks [repo_id: %d]: %v", repo.ID, err)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *webhookNotifier) NotifyDeleteRepository(doer *models.User, repo *models.Repository) {
|
func (m *webhookNotifier) NotifyDeleteRepository(doer *models.User, repo *models.Repository) {
|
||||||
u := repo.MustOwner()
|
u := repo.MustOwner()
|
||||||
|
|
||||||
if u.IsOrganization() {
|
|
||||||
if err := webhook_module.PrepareWebhooks(repo, models.HookEventRepository, &api.RepositoryPayload{
|
if err := webhook_module.PrepareWebhooks(repo, models.HookEventRepository, &api.RepositoryPayload{
|
||||||
Action: api.HookRepoDeleted,
|
Action: api.HookRepoDeleted,
|
||||||
Repository: repo.APIFormat(models.AccessModeOwner),
|
Repository: repo.APIFormat(models.AccessModeOwner),
|
||||||
@@ -123,7 +120,6 @@ func (m *webhookNotifier) NotifyDeleteRepository(doer *models.User, repo *models
|
|||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("PrepareWebhooks [repo_id: %d]: %v", repo.ID, err)
|
log.Error("PrepareWebhooks [repo_id: %d]: %v", repo.ID, err)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *webhookNotifier) NotifyIssueChangeAssignee(doer *models.User, issue *models.Issue, assignee *models.User, removed bool, comment *models.Comment) {
|
func (m *webhookNotifier) NotifyIssueChangeAssignee(doer *models.User, issue *models.Issue, assignee *models.User, removed bool, comment *models.Comment) {
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ package public
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"path"
|
"path"
|
||||||
@@ -159,7 +160,7 @@ func (opts *Options) handle(ctx *macaron.Context, log *log.Logger, opt *Options)
|
|||||||
// Add an Expires header to the static content
|
// Add an Expires header to the static content
|
||||||
if opt.ExpiresAfter > 0 {
|
if opt.ExpiresAfter > 0 {
|
||||||
ctx.Resp.Header().Set("Expires", time.Now().Add(opt.ExpiresAfter).UTC().Format(http.TimeFormat))
|
ctx.Resp.Header().Set("Expires", time.Now().Add(opt.ExpiresAfter).UTC().Format(http.TimeFormat))
|
||||||
tag := GenerateETag(string(fi.Size()), fi.Name(), fi.ModTime().UTC().Format(http.TimeFormat))
|
tag := GenerateETag(fmt.Sprint(fi.Size()), fi.Name(), fi.ModTime().UTC().Format(http.TimeFormat))
|
||||||
ctx.Resp.Header().Set("ETag", tag)
|
ctx.Resp.Header().Set("ETag", tag)
|
||||||
if ctx.Req.Header.Get("If-None-Match") == tag {
|
if ctx.Req.Header.Get("If-None-Match") == tag {
|
||||||
ctx.Resp.WriteHeader(304)
|
ctx.Resp.WriteHeader(304)
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"html"
|
"html"
|
||||||
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models"
|
"code.gitea.io/gitea/models"
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
@@ -176,7 +177,7 @@ func CommitRepoAction(optsList ...*CommitRepoActionOptions) error {
|
|||||||
var err error
|
var err error
|
||||||
if repo != nil {
|
if repo != nil {
|
||||||
// Change repository empty status and update last updated time.
|
// Change repository empty status and update last updated time.
|
||||||
if err := models.UpdateRepository(repo, false); err != nil {
|
if err := models.UpdateRepositoryUpdatedTime(repo.ID, time.Now()); err != nil {
|
||||||
return fmt.Errorf("UpdateRepository: %v", err)
|
return fmt.Errorf("UpdateRepository: %v", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -204,6 +205,10 @@ func CommitRepoAction(optsList ...*CommitRepoActionOptions) error {
|
|||||||
}
|
}
|
||||||
gitRepo.Close()
|
gitRepo.Close()
|
||||||
}
|
}
|
||||||
|
// Update the is empty and default_branch columns
|
||||||
|
if err := models.UpdateRepositoryCols(repo, "default_branch", "is_empty"); err != nil {
|
||||||
|
return fmt.Errorf("UpdateRepositoryCols: %v", err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
opType := models.ActionCommitRepo
|
opType := models.ActionCommitRepo
|
||||||
@@ -274,7 +279,7 @@ func CommitRepoAction(optsList ...*CommitRepoActionOptions) error {
|
|||||||
|
|
||||||
if repo != nil {
|
if repo != nil {
|
||||||
// Change repository empty status and update last updated time.
|
// Change repository empty status and update last updated time.
|
||||||
if err := models.UpdateRepository(repo, false); err != nil {
|
if err := models.UpdateRepositoryUpdatedTime(repo.ID, time.Now()); err != nil {
|
||||||
return fmt.Errorf("UpdateRepository: %v", err)
|
return fmt.Errorf("UpdateRepository: %v", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -278,7 +278,7 @@ func (t *TemporaryUploadRepository) DiffIndex() (*gitdiff.Diff, error) {
|
|||||||
var diff *gitdiff.Diff
|
var diff *gitdiff.Diff
|
||||||
var finalErr error
|
var finalErr error
|
||||||
|
|
||||||
if err := git.NewCommand("diff-index", "--cached", "-p", "HEAD").
|
if err := git.NewCommand("diff-index", "--src-prefix=\\a/", "--dst-prefix=\\b/", "--cached", "-p", "HEAD").
|
||||||
RunInDirTimeoutEnvFullPipelineFunc(nil, 30*time.Second, t.basePath, stdoutWriter, stderr, nil, func(ctx context.Context, cancel context.CancelFunc) error {
|
RunInDirTimeoutEnvFullPipelineFunc(nil, 30*time.Second, t.basePath, stdoutWriter, stderr, nil, func(ctx context.Context, cancel context.CancelFunc) error {
|
||||||
_ = stdoutWriter.Close()
|
_ = stdoutWriter.Close()
|
||||||
diff, finalErr = gitdiff.ParsePatch(setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, stdoutReader)
|
diff, finalErr = gitdiff.ParsePatch(setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, stdoutReader)
|
||||||
|
|||||||
@@ -23,6 +23,10 @@ func CreateRepository(doer, u *models.User, opts models.CreateRepoOptions) (_ *m
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(opts.DefaultBranch) == 0 {
|
||||||
|
opts.DefaultBranch = setting.Repository.DefaultBranch
|
||||||
|
}
|
||||||
|
|
||||||
repo := &models.Repository{
|
repo := &models.Repository{
|
||||||
OwnerID: u.ID,
|
OwnerID: u.ID,
|
||||||
Owner: u,
|
Owner: u,
|
||||||
|
|||||||
@@ -102,7 +102,8 @@ func MigrateRepositoryGitData(doer, u *models.User, repo *models.Repository, opt
|
|||||||
return repo, fmt.Errorf("git.IsEmpty: %v", err)
|
return repo, fmt.Errorf("git.IsEmpty: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !opts.Releases && !repo.IsEmpty {
|
if !repo.IsEmpty {
|
||||||
|
if len(repo.DefaultBranch) == 0 {
|
||||||
// Try to get HEAD branch and set it as default branch.
|
// Try to get HEAD branch and set it as default branch.
|
||||||
headBranch, err := gitRepo.GetHEADBranch()
|
headBranch, err := gitRepo.GetHEADBranch()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -111,11 +112,14 @@ func MigrateRepositoryGitData(doer, u *models.User, repo *models.Repository, opt
|
|||||||
if headBranch != nil {
|
if headBranch != nil {
|
||||||
repo.DefaultBranch = headBranch.Name
|
repo.DefaultBranch = headBranch.Name
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !opts.Releases {
|
||||||
if err = SyncReleasesWithTags(repo, gitRepo); err != nil {
|
if err = SyncReleasesWithTags(repo, gitRepo); err != nil {
|
||||||
log.Error("Failed to synchronize tags to releases for repository: %v", err)
|
log.Error("Failed to synchronize tags to releases for repository: %v", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if err = repo.UpdateSize(models.DefaultDBContext()); err != nil {
|
if err = repo.UpdateSize(models.DefaultDBContext()); err != nil {
|
||||||
log.Error("Failed to update size for repository: %v", err)
|
log.Error("Failed to update size for repository: %v", err)
|
||||||
|
|||||||
@@ -4,8 +4,26 @@
|
|||||||
|
|
||||||
package setting
|
package setting
|
||||||
|
|
||||||
|
import "reflect"
|
||||||
|
|
||||||
// GetCronSettings maps the cron subsection to the provided config
|
// GetCronSettings maps the cron subsection to the provided config
|
||||||
func GetCronSettings(name string, config interface{}) (interface{}, error) {
|
func GetCronSettings(name string, config interface{}) (interface{}, error) {
|
||||||
err := Cfg.Section("cron." + name).MapTo(config)
|
if err := Cfg.Section("cron." + name).MapTo(config); err != nil {
|
||||||
return config, err
|
return config, err
|
||||||
|
}
|
||||||
|
|
||||||
|
typ := reflect.TypeOf(config).Elem()
|
||||||
|
val := reflect.ValueOf(config).Elem()
|
||||||
|
|
||||||
|
for i := 0; i < typ.NumField(); i++ {
|
||||||
|
field := val.Field(i)
|
||||||
|
tpField := typ.Field(i)
|
||||||
|
if tpField.Type.Kind() == reflect.Struct && tpField.Anonymous {
|
||||||
|
if err := Cfg.Section("cron." + name).MapTo(field.Addr().Interface()); err != nil {
|
||||||
|
return config, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return config, nil
|
||||||
}
|
}
|
||||||
|
|||||||
47
modules/setting/cron_test.go
Normal file
47
modules/setting/cron_test.go
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package setting
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
ini "gopkg.in/ini.v1"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_GetCronSettings(t *testing.T) {
|
||||||
|
|
||||||
|
type BaseStruct struct {
|
||||||
|
Base bool
|
||||||
|
Second string
|
||||||
|
}
|
||||||
|
|
||||||
|
type Extended struct {
|
||||||
|
BaseStruct
|
||||||
|
Extend bool
|
||||||
|
}
|
||||||
|
|
||||||
|
iniStr := `
|
||||||
|
[cron.test]
|
||||||
|
Base = true
|
||||||
|
Second = white rabbit
|
||||||
|
Extend = true
|
||||||
|
`
|
||||||
|
Cfg, _ = ini.Load([]byte(iniStr))
|
||||||
|
|
||||||
|
extended := &Extended{
|
||||||
|
BaseStruct: BaseStruct{
|
||||||
|
Second: "queen of hearts",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := GetCronSettings("test", extended)
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, extended.Base)
|
||||||
|
assert.EqualValues(t, extended.Second, "white rabbit")
|
||||||
|
assert.True(t, extended.Extend)
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1032,8 +1032,8 @@ func NewContext() {
|
|||||||
newMarkup()
|
newMarkup()
|
||||||
|
|
||||||
sec = Cfg.Section("U2F")
|
sec = Cfg.Section("U2F")
|
||||||
U2F.TrustedFacets, _ = shellquote.Split(sec.Key("TRUSTED_FACETS").MustString(strings.TrimRight(AppURL, "/")))
|
U2F.TrustedFacets, _ = shellquote.Split(sec.Key("TRUSTED_FACETS").MustString(strings.TrimSuffix(AppURL, AppSubURL+"/")))
|
||||||
U2F.AppID = sec.Key("APP_ID").MustString(strings.TrimRight(AppURL, "/"))
|
U2F.AppID = sec.Key("APP_ID").MustString(strings.TrimSuffix(AppURL, "/"))
|
||||||
|
|
||||||
zip.Verbose = false
|
zip.Verbose = false
|
||||||
|
|
||||||
|
|||||||
@@ -213,6 +213,7 @@ type MigrateRepoOption struct {
|
|||||||
CloneAddr string `json:"clone_addr" binding:"Required"`
|
CloneAddr string `json:"clone_addr" binding:"Required"`
|
||||||
AuthUsername string `json:"auth_username"`
|
AuthUsername string `json:"auth_username"`
|
||||||
AuthPassword string `json:"auth_password"`
|
AuthPassword string `json:"auth_password"`
|
||||||
|
AuthToken string `json:"auth_token"`
|
||||||
// required: true
|
// required: true
|
||||||
UID int `json:"uid" binding:"Required"`
|
UID int `json:"uid" binding:"Required"`
|
||||||
// required: true
|
// required: true
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
package task
|
package task
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
@@ -38,10 +37,8 @@ func handleCreateError(owner *models.User, err error, name string) error {
|
|||||||
func runMigrateTask(t *models.Task) (err error) {
|
func runMigrateTask(t *models.Task) (err error) {
|
||||||
defer func() {
|
defer func() {
|
||||||
if e := recover(); e != nil {
|
if e := recover(); e != nil {
|
||||||
var buf bytes.Buffer
|
err = fmt.Errorf("PANIC whilst trying to do migrate task: %v\nStacktrace: %v", err, log.Stack(2))
|
||||||
fmt.Fprintf(&buf, "Handler crashed with error: %v", log.Stack(2))
|
log.Critical("PANIC during runMigrateTask[%d] by DoerID[%d] to RepoID[%d] for OwnerID[%d]: %v", t.ID, t.DoerID, t.RepoID, t.OwnerID, err)
|
||||||
|
|
||||||
err = errors.New(buf.String())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
@@ -51,14 +48,14 @@ func runMigrateTask(t *models.Task) (err error) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Error("FinishMigrateTask failed: %s", err.Error())
|
log.Error("FinishMigrateTask[%d] by DoerID[%d] to RepoID[%d] for OwnerID[%d] failed: %v", t.ID, t.DoerID, t.RepoID, t.OwnerID, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
t.EndTime = timeutil.TimeStampNow()
|
t.EndTime = timeutil.TimeStampNow()
|
||||||
t.Status = structs.TaskStatusFailed
|
t.Status = structs.TaskStatusFailed
|
||||||
t.Errors = err.Error()
|
t.Errors = err.Error()
|
||||||
if err := t.UpdateCols("status", "errors", "end_time"); err != nil {
|
if err := t.UpdateCols("status", "errors", "end_time"); err != nil {
|
||||||
log.Error("Task UpdateCols failed: %s", err.Error())
|
log.Error("Task UpdateCols failed: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if t.Repo != nil {
|
if t.Repo != nil {
|
||||||
|
|||||||
@@ -619,7 +619,7 @@ func ActionContent2Commits(act Actioner) *repository.PushCommits {
|
|||||||
// DiffTypeToStr returns diff type name
|
// DiffTypeToStr returns diff type name
|
||||||
func DiffTypeToStr(diffType int) string {
|
func DiffTypeToStr(diffType int) string {
|
||||||
diffTypes := map[int]string{
|
diffTypes := map[int]string{
|
||||||
1: "add", 2: "modify", 3: "del", 4: "rename",
|
1: "add", 2: "modify", 3: "del", 4: "rename", 5: "copy",
|
||||||
}
|
}
|
||||||
return diffTypes[diffType]
|
return diffTypes[diffType]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -184,14 +184,14 @@ func reqToken() macaron.Handler {
|
|||||||
ctx.RequireCSRF()
|
ctx.RequireCSRF()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
ctx.Context.Error(http.StatusUnauthorized)
|
ctx.Error(http.StatusUnauthorized, "reqToken", "token is required")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func reqBasicAuth() macaron.Handler {
|
func reqBasicAuth() macaron.Handler {
|
||||||
return func(ctx *context.APIContext) {
|
return func(ctx *context.APIContext) {
|
||||||
if !ctx.Context.IsBasicAuth {
|
if !ctx.Context.IsBasicAuth {
|
||||||
ctx.Context.Error(http.StatusUnauthorized)
|
ctx.Error(http.StatusUnauthorized, "reqBasicAuth", "basic auth required")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
ctx.CheckForOTP()
|
ctx.CheckForOTP()
|
||||||
@@ -200,9 +200,9 @@ func reqBasicAuth() macaron.Handler {
|
|||||||
|
|
||||||
// reqSiteAdmin user should be the site admin
|
// reqSiteAdmin user should be the site admin
|
||||||
func reqSiteAdmin() macaron.Handler {
|
func reqSiteAdmin() macaron.Handler {
|
||||||
return func(ctx *context.Context) {
|
return func(ctx *context.APIContext) {
|
||||||
if !ctx.IsUserSiteAdmin() {
|
if !ctx.IsUserSiteAdmin() {
|
||||||
ctx.Error(http.StatusForbidden)
|
ctx.Error(http.StatusForbidden, "reqSiteAdmin", "user should be the site admin")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -210,9 +210,9 @@ func reqSiteAdmin() macaron.Handler {
|
|||||||
|
|
||||||
// reqOwner user should be the owner of the repo or site admin.
|
// reqOwner user should be the owner of the repo or site admin.
|
||||||
func reqOwner() macaron.Handler {
|
func reqOwner() macaron.Handler {
|
||||||
return func(ctx *context.Context) {
|
return func(ctx *context.APIContext) {
|
||||||
if !ctx.IsUserRepoOwner() && !ctx.IsUserSiteAdmin() {
|
if !ctx.IsUserRepoOwner() && !ctx.IsUserSiteAdmin() {
|
||||||
ctx.Error(http.StatusForbidden)
|
ctx.Error(http.StatusForbidden, "reqOwner", "user should be the owner of the repo")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -220,9 +220,9 @@ func reqOwner() macaron.Handler {
|
|||||||
|
|
||||||
// reqAdmin user should be an owner or a collaborator with admin write of a repository, or site admin
|
// reqAdmin user should be an owner or a collaborator with admin write of a repository, or site admin
|
||||||
func reqAdmin() macaron.Handler {
|
func reqAdmin() macaron.Handler {
|
||||||
return func(ctx *context.Context) {
|
return func(ctx *context.APIContext) {
|
||||||
if !ctx.IsUserRepoAdmin() && !ctx.IsUserSiteAdmin() {
|
if !ctx.IsUserRepoAdmin() && !ctx.IsUserSiteAdmin() {
|
||||||
ctx.Error(http.StatusForbidden)
|
ctx.Error(http.StatusForbidden, "reqAdmin", "user should be an owner or a collaborator with admin write of a repository")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -230,9 +230,9 @@ func reqAdmin() macaron.Handler {
|
|||||||
|
|
||||||
// reqRepoWriter user should have a permission to write to a repo, or be a site admin
|
// reqRepoWriter user should have a permission to write to a repo, or be a site admin
|
||||||
func reqRepoWriter(unitTypes ...models.UnitType) macaron.Handler {
|
func reqRepoWriter(unitTypes ...models.UnitType) macaron.Handler {
|
||||||
return func(ctx *context.Context) {
|
return func(ctx *context.APIContext) {
|
||||||
if !ctx.IsUserRepoWriter(unitTypes) && !ctx.IsUserRepoAdmin() && !ctx.IsUserSiteAdmin() {
|
if !ctx.IsUserRepoWriter(unitTypes) && !ctx.IsUserRepoAdmin() && !ctx.IsUserSiteAdmin() {
|
||||||
ctx.Error(http.StatusForbidden)
|
ctx.Error(http.StatusForbidden, "reqRepoWriter", "user should have a permission to write to a repo")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -240,9 +240,9 @@ func reqRepoWriter(unitTypes ...models.UnitType) macaron.Handler {
|
|||||||
|
|
||||||
// reqRepoReader user should have specific read permission or be a repo admin or a site admin
|
// reqRepoReader user should have specific read permission or be a repo admin or a site admin
|
||||||
func reqRepoReader(unitType models.UnitType) macaron.Handler {
|
func reqRepoReader(unitType models.UnitType) macaron.Handler {
|
||||||
return func(ctx *context.Context) {
|
return func(ctx *context.APIContext) {
|
||||||
if !ctx.IsUserRepoReaderSpecific(unitType) && !ctx.IsUserRepoAdmin() && !ctx.IsUserSiteAdmin() {
|
if !ctx.IsUserRepoReaderSpecific(unitType) && !ctx.IsUserRepoAdmin() && !ctx.IsUserSiteAdmin() {
|
||||||
ctx.Error(http.StatusForbidden)
|
ctx.Error(http.StatusForbidden, "reqRepoReader", "user should have specific read permission or be a repo admin or a site admin")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -250,9 +250,9 @@ func reqRepoReader(unitType models.UnitType) macaron.Handler {
|
|||||||
|
|
||||||
// reqAnyRepoReader user should have any permission to read repository or permissions of site admin
|
// reqAnyRepoReader user should have any permission to read repository or permissions of site admin
|
||||||
func reqAnyRepoReader() macaron.Handler {
|
func reqAnyRepoReader() macaron.Handler {
|
||||||
return func(ctx *context.Context) {
|
return func(ctx *context.APIContext) {
|
||||||
if !ctx.IsUserRepoReaderAny() && !ctx.IsUserSiteAdmin() {
|
if !ctx.IsUserRepoReaderAny() && !ctx.IsUserSiteAdmin() {
|
||||||
ctx.Error(http.StatusForbidden)
|
ctx.Error(http.StatusForbidden, "reqAnyRepoReader", "user should have any permission to read repository or permissions of site admin")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -397,7 +397,7 @@ func orgAssignment(args ...bool) macaron.Handler {
|
|||||||
if assignTeam {
|
if assignTeam {
|
||||||
ctx.Org.Team, err = models.GetTeamByID(ctx.ParamsInt64(":teamid"))
|
ctx.Org.Team, err = models.GetTeamByID(ctx.ParamsInt64(":teamid"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if models.IsErrUserNotExist(err) {
|
if models.IsErrTeamNotExist(err) {
|
||||||
ctx.NotFound()
|
ctx.NotFound()
|
||||||
} else {
|
} else {
|
||||||
ctx.Error(http.StatusInternalServerError, "GetTeamById", err)
|
ctx.Error(http.StatusInternalServerError, "GetTeamById", err)
|
||||||
@@ -495,7 +495,6 @@ func mustNotBeArchived(ctx *context.APIContext) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// RegisterRoutes registers all v1 APIs routes to web application.
|
// RegisterRoutes registers all v1 APIs routes to web application.
|
||||||
// FIXME: custom form error response
|
|
||||||
func RegisterRoutes(m *macaron.Macaron) {
|
func RegisterRoutes(m *macaron.Macaron) {
|
||||||
bind := binding.Bind
|
bind := binding.Bind
|
||||||
|
|
||||||
@@ -628,7 +627,7 @@ func RegisterRoutes(m *macaron.Macaron) {
|
|||||||
m.Group("/:username/:reponame", func() {
|
m.Group("/:username/:reponame", func() {
|
||||||
m.Combo("").Get(reqAnyRepoReader(), repo.Get).
|
m.Combo("").Get(reqAnyRepoReader(), repo.Get).
|
||||||
Delete(reqToken(), reqOwner(), repo.Delete).
|
Delete(reqToken(), reqOwner(), repo.Delete).
|
||||||
Patch(reqToken(), reqAdmin(), bind(api.EditRepoOption{}), context.RepoRef(), repo.Edit)
|
Patch(reqToken(), reqAdmin(), bind(api.EditRepoOption{}), context.RepoRefForAPI(), repo.Edit)
|
||||||
m.Post("/transfer", reqOwner(), bind(api.TransferRepoOption{}), repo.Transfer)
|
m.Post("/transfer", reqOwner(), bind(api.TransferRepoOption{}), repo.Transfer)
|
||||||
m.Combo("/notifications").
|
m.Combo("/notifications").
|
||||||
Get(reqToken(), notify.ListRepoNotifications).
|
Get(reqToken(), notify.ListRepoNotifications).
|
||||||
@@ -640,7 +639,7 @@ func RegisterRoutes(m *macaron.Macaron) {
|
|||||||
m.Combo("").Get(repo.GetHook).
|
m.Combo("").Get(repo.GetHook).
|
||||||
Patch(bind(api.EditHookOption{}), repo.EditHook).
|
Patch(bind(api.EditHookOption{}), repo.EditHook).
|
||||||
Delete(repo.DeleteHook)
|
Delete(repo.DeleteHook)
|
||||||
m.Post("/tests", context.RepoRef(), repo.TestHook)
|
m.Post("/tests", context.RepoRefForAPI(), repo.TestHook)
|
||||||
})
|
})
|
||||||
m.Group("/git", func() {
|
m.Group("/git", func() {
|
||||||
m.Combo("").Get(repo.ListGitHooks)
|
m.Combo("").Get(repo.ListGitHooks)
|
||||||
@@ -657,14 +656,14 @@ func RegisterRoutes(m *macaron.Macaron) {
|
|||||||
Put(reqAdmin(), bind(api.AddCollaboratorOption{}), repo.AddCollaborator).
|
Put(reqAdmin(), bind(api.AddCollaboratorOption{}), repo.AddCollaborator).
|
||||||
Delete(reqAdmin(), repo.DeleteCollaborator)
|
Delete(reqAdmin(), repo.DeleteCollaborator)
|
||||||
}, reqToken())
|
}, reqToken())
|
||||||
m.Get("/raw/*", context.RepoRefByType(context.RepoRefAny), reqRepoReader(models.UnitTypeCode), repo.GetRawFile)
|
m.Get("/raw/*", context.RepoRefForAPI(), reqRepoReader(models.UnitTypeCode), repo.GetRawFile)
|
||||||
m.Get("/archive/*", reqRepoReader(models.UnitTypeCode), repo.GetArchive)
|
m.Get("/archive/*", reqRepoReader(models.UnitTypeCode), repo.GetArchive)
|
||||||
m.Combo("/forks").Get(repo.ListForks).
|
m.Combo("/forks").Get(repo.ListForks).
|
||||||
Post(reqToken(), reqRepoReader(models.UnitTypeCode), bind(api.CreateForkOption{}), repo.CreateFork)
|
Post(reqToken(), reqRepoReader(models.UnitTypeCode), bind(api.CreateForkOption{}), repo.CreateFork)
|
||||||
m.Group("/branches", func() {
|
m.Group("/branches", func() {
|
||||||
m.Get("", repo.ListBranches)
|
m.Get("", repo.ListBranches)
|
||||||
m.Get("/*", context.RepoRefByType(context.RepoRefBranch), repo.GetBranch)
|
m.Get("/*", repo.GetBranch)
|
||||||
m.Delete("/*", reqRepoWriter(models.UnitTypeCode), context.RepoRefByType(context.RepoRefBranch), repo.DeleteBranch)
|
m.Delete("/*", context.ReferencesGitRepo(false), reqRepoWriter(models.UnitTypeCode), repo.DeleteBranch)
|
||||||
}, reqRepoReader(models.UnitTypeCode))
|
}, reqRepoReader(models.UnitTypeCode))
|
||||||
m.Group("/branch_protections", func() {
|
m.Group("/branch_protections", func() {
|
||||||
m.Get("", repo.ListBranchProtections)
|
m.Get("", repo.ListBranchProtections)
|
||||||
@@ -785,7 +784,7 @@ func RegisterRoutes(m *macaron.Macaron) {
|
|||||||
})
|
})
|
||||||
}, reqRepoReader(models.UnitTypeReleases))
|
}, reqRepoReader(models.UnitTypeReleases))
|
||||||
m.Post("/mirror-sync", reqToken(), reqRepoWriter(models.UnitTypeCode), repo.MirrorSync)
|
m.Post("/mirror-sync", reqToken(), reqRepoWriter(models.UnitTypeCode), repo.MirrorSync)
|
||||||
m.Get("/editorconfig/:filename", context.RepoRef(), reqRepoReader(models.UnitTypeCode), repo.GetEditorconfig)
|
m.Get("/editorconfig/:filename", context.RepoRefForAPI(), reqRepoReader(models.UnitTypeCode), repo.GetEditorconfig)
|
||||||
m.Group("/pulls", func() {
|
m.Group("/pulls", func() {
|
||||||
m.Combo("").Get(bind(api.ListPullRequestsOptions{}), repo.ListPullRequests).
|
m.Combo("").Get(bind(api.ListPullRequestsOptions{}), repo.ListPullRequests).
|
||||||
Post(reqToken(), mustNotBeArchived, bind(api.CreatePullRequestOption{}), repo.CreatePullRequest)
|
Post(reqToken(), mustNotBeArchived, bind(api.CreatePullRequestOption{}), repo.CreatePullRequest)
|
||||||
@@ -827,9 +826,9 @@ func RegisterRoutes(m *macaron.Macaron) {
|
|||||||
})
|
})
|
||||||
m.Get("/refs", repo.GetGitAllRefs)
|
m.Get("/refs", repo.GetGitAllRefs)
|
||||||
m.Get("/refs/*", repo.GetGitRefs)
|
m.Get("/refs/*", repo.GetGitRefs)
|
||||||
m.Get("/trees/:sha", context.RepoRef(), repo.GetTree)
|
m.Get("/trees/:sha", context.RepoRefForAPI(), repo.GetTree)
|
||||||
m.Get("/blobs/:sha", context.RepoRef(), repo.GetBlob)
|
m.Get("/blobs/:sha", context.RepoRefForAPI(), repo.GetBlob)
|
||||||
m.Get("/tags/:sha", context.RepoRef(), repo.GetTag)
|
m.Get("/tags/:sha", context.RepoRefForAPI(), repo.GetTag)
|
||||||
}, reqRepoReader(models.UnitTypeCode))
|
}, reqRepoReader(models.UnitTypeCode))
|
||||||
m.Group("/contents", func() {
|
m.Group("/contents", func() {
|
||||||
m.Get("", repo.GetContentsList)
|
m.Get("", repo.GetContentsList)
|
||||||
|
|||||||
@@ -101,7 +101,7 @@ func ListRepoNotifications(ctx *context.APIContext) {
|
|||||||
|
|
||||||
before, since, err := utils.GetQueryBeforeSince(ctx)
|
before, since, err := utils.GetQueryBeforeSince(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.InternalServerError(err)
|
ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
opts := models.FindNotificationOptions{
|
opts := models.FindNotificationOptions{
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ func ListNotifications(ctx *context.APIContext) {
|
|||||||
|
|
||||||
before, since, err := utils.GetQueryBeforeSince(ctx)
|
before, since, err := utils.GetQueryBeforeSince(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.InternalServerError(err)
|
ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
opts := models.FindNotificationOptions{
|
opts := models.FindNotificationOptions{
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ func ListUserOrgs(ctx *context.APIContext) {
|
|||||||
if ctx.Written() {
|
if ctx.Written() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
listUserOrgs(ctx, u, ctx.User.IsAdmin)
|
listUserOrgs(ctx, u, ctx.User != nil && (ctx.User.IsAdmin || ctx.User.ID == u.ID))
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetAll return list of all public organizations
|
// GetAll return list of all public organizations
|
||||||
|
|||||||
@@ -45,15 +45,12 @@ func GetBranch(ctx *context.APIContext) {
|
|||||||
// responses:
|
// responses:
|
||||||
// "200":
|
// "200":
|
||||||
// "$ref": "#/responses/Branch"
|
// "$ref": "#/responses/Branch"
|
||||||
|
// "404":
|
||||||
|
// "$ref": "#/responses/notFound"
|
||||||
|
|
||||||
if ctx.Repo.TreePath != "" {
|
branchName := ctx.Params("*")
|
||||||
// if TreePath != "", then URL contained extra slashes
|
|
||||||
// (i.e. "master/subbranch" instead of "master"), so branch does
|
branch, err := repo_module.GetBranch(ctx.Repo.Repository, branchName)
|
||||||
// not exist
|
|
||||||
ctx.NotFound()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
branch, err := repo_module.GetBranch(ctx.Repo.Repository, ctx.Repo.BranchName)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if git.IsErrBranchNotExist(err) {
|
if git.IsErrBranchNotExist(err) {
|
||||||
ctx.NotFound(err)
|
ctx.NotFound(err)
|
||||||
@@ -69,7 +66,7 @@ func GetBranch(ctx *context.APIContext) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
branchProtection, err := ctx.Repo.Repository.GetBranchProtection(ctx.Repo.BranchName)
|
branchProtection, err := ctx.Repo.Repository.GetBranchProtection(branchName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.Error(http.StatusInternalServerError, "GetBranchProtection", err)
|
ctx.Error(http.StatusInternalServerError, "GetBranchProtection", err)
|
||||||
return
|
return
|
||||||
@@ -112,21 +109,17 @@ func DeleteBranch(ctx *context.APIContext) {
|
|||||||
// "$ref": "#/responses/empty"
|
// "$ref": "#/responses/empty"
|
||||||
// "403":
|
// "403":
|
||||||
// "$ref": "#/responses/error"
|
// "$ref": "#/responses/error"
|
||||||
|
// "404":
|
||||||
|
// "$ref": "#/responses/notFound"
|
||||||
|
|
||||||
if ctx.Repo.TreePath != "" {
|
branchName := ctx.Params("*")
|
||||||
// if TreePath != "", then URL contained extra slashes
|
|
||||||
// (i.e. "master/subbranch" instead of "master"), so branch does
|
|
||||||
// not exist
|
|
||||||
ctx.NotFound()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if ctx.Repo.Repository.DefaultBranch == ctx.Repo.BranchName {
|
if ctx.Repo.Repository.DefaultBranch == branchName {
|
||||||
ctx.Error(http.StatusForbidden, "DefaultBranch", fmt.Errorf("can not delete default branch"))
|
ctx.Error(http.StatusForbidden, "DefaultBranch", fmt.Errorf("can not delete default branch"))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
isProtected, err := ctx.Repo.Repository.IsProtectedBranch(ctx.Repo.BranchName, ctx.User)
|
isProtected, err := ctx.Repo.Repository.IsProtectedBranch(branchName, ctx.User)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.InternalServerError(err)
|
ctx.InternalServerError(err)
|
||||||
return
|
return
|
||||||
@@ -136,7 +129,7 @@ func DeleteBranch(ctx *context.APIContext) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
branch, err := repo_module.GetBranch(ctx.Repo.Repository, ctx.Repo.BranchName)
|
branch, err := repo_module.GetBranch(ctx.Repo.Repository, branchName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if git.IsErrBranchNotExist(err) {
|
if git.IsErrBranchNotExist(err) {
|
||||||
ctx.NotFound(err)
|
ctx.NotFound(err)
|
||||||
@@ -152,7 +145,7 @@ func DeleteBranch(ctx *context.APIContext) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := ctx.Repo.GitRepo.DeleteBranch(ctx.Repo.BranchName, git.DeleteBranchOptions{
|
if err := ctx.Repo.GitRepo.DeleteBranch(branchName, git.DeleteBranchOptions{
|
||||||
Force: true,
|
Force: true,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
ctx.Error(http.StatusInternalServerError, "DeleteBranch", err)
|
ctx.Error(http.StatusInternalServerError, "DeleteBranch", err)
|
||||||
@@ -164,7 +157,7 @@ func DeleteBranch(ctx *context.APIContext) {
|
|||||||
ctx.Repo.Repository,
|
ctx.Repo.Repository,
|
||||||
ctx.Repo.BranchName,
|
ctx.Repo.BranchName,
|
||||||
repofiles.PushUpdateOptions{
|
repofiles.PushUpdateOptions{
|
||||||
RefFullName: git.BranchPrefix + ctx.Repo.BranchName,
|
RefFullName: git.BranchPrefix + branchName,
|
||||||
OldCommitID: c.ID.String(),
|
OldCommitID: c.ID.String(),
|
||||||
NewCommitID: git.EmptySHA,
|
NewCommitID: git.EmptySHA,
|
||||||
PusherID: ctx.User.ID,
|
PusherID: ctx.User.ID,
|
||||||
@@ -175,7 +168,7 @@ func DeleteBranch(ctx *context.APIContext) {
|
|||||||
log.Error("Update: %v", err)
|
log.Error("Update: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := ctx.Repo.Repository.AddDeletedBranch(ctx.Repo.BranchName, c.ID.String(), ctx.User.ID); err != nil {
|
if err := ctx.Repo.Repository.AddDeletedBranch(branchName, c.ID.String(), ctx.User.ID); err != nil {
|
||||||
log.Warn("AddDeletedBranch: %v", err)
|
log.Warn("AddDeletedBranch: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ func ListIssueComments(ctx *context.APIContext) {
|
|||||||
|
|
||||||
before, since, err := utils.GetQueryBeforeSince(ctx)
|
before, since, err := utils.GetQueryBeforeSince(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.Error(http.StatusInternalServerError, "GetQueryBeforeSince", err)
|
ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
issue, err := models.GetIssueByIndex(ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
|
issue, err := models.GetIssueByIndex(ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
|
||||||
@@ -132,7 +132,7 @@ func ListRepoIssueComments(ctx *context.APIContext) {
|
|||||||
|
|
||||||
before, since, err := utils.GetQueryBeforeSince(ctx)
|
before, since, err := utils.GetQueryBeforeSince(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.Error(http.StatusInternalServerError, "GetQueryBeforeSince", err)
|
ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -56,7 +56,11 @@ func GetIssueCommentReactions(ctx *context.APIContext) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if !ctx.Repo.CanRead(models.UnitTypeIssues) {
|
if err := comment.LoadIssue(); err != nil {
|
||||||
|
ctx.Error(http.StatusInternalServerError, "comment.LoadIssue", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !ctx.Repo.CanReadIssuesOrPulls(comment.Issue.IsPull) {
|
||||||
ctx.Error(http.StatusForbidden, "GetIssueCommentReactions", errors.New("no permission to get reactions"))
|
ctx.Error(http.StatusForbidden, "GetIssueCommentReactions", errors.New("no permission to get reactions"))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -270,7 +274,7 @@ func GetIssueReactions(ctx *context.APIContext) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if !ctx.Repo.CanRead(models.UnitTypeIssues) {
|
if !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull) {
|
||||||
ctx.Error(http.StatusForbidden, "GetIssueReactions", errors.New("no permission to get reactions"))
|
ctx.Error(http.StatusForbidden, "GetIssueReactions", errors.New("no permission to get reactions"))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -86,7 +86,7 @@ func ListTrackedTimes(ctx *context.APIContext) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if opts.CreatedBeforeUnix, opts.CreatedAfterUnix, err = utils.GetQueryBeforeSince(ctx); err != nil {
|
if opts.CreatedBeforeUnix, opts.CreatedAfterUnix, err = utils.GetQueryBeforeSince(ctx); err != nil {
|
||||||
ctx.InternalServerError(err)
|
ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -491,7 +491,7 @@ func ListTrackedTimesByRepository(ctx *context.APIContext) {
|
|||||||
|
|
||||||
var err error
|
var err error
|
||||||
if opts.CreatedBeforeUnix, opts.CreatedAfterUnix, err = utils.GetQueryBeforeSince(ctx); err != nil {
|
if opts.CreatedBeforeUnix, opts.CreatedAfterUnix, err = utils.GetQueryBeforeSince(ctx); err != nil {
|
||||||
ctx.InternalServerError(err)
|
ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -554,7 +554,7 @@ func ListMyTrackedTimes(ctx *context.APIContext) {
|
|||||||
|
|
||||||
var err error
|
var err error
|
||||||
if opts.CreatedBeforeUnix, opts.CreatedAfterUnix, err = utils.GetQueryBeforeSince(ctx); err != nil {
|
if opts.CreatedBeforeUnix, opts.CreatedAfterUnix, err = utils.GetQueryBeforeSince(ctx); err != nil {
|
||||||
ctx.InternalServerError(err)
|
ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -201,6 +201,12 @@ func CreatePullRequest(ctx *context.APIContext, form api.CreatePullRequestOption
|
|||||||
// "422":
|
// "422":
|
||||||
// "$ref": "#/responses/validationError"
|
// "$ref": "#/responses/validationError"
|
||||||
|
|
||||||
|
if form.Head == form.Base {
|
||||||
|
ctx.Error(http.StatusUnprocessableEntity, "BaseHeadSame",
|
||||||
|
"Invalid PullRequest: There are no changes between the head and the base")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
repo = ctx.Repo.Repository
|
repo = ctx.Repo.Repository
|
||||||
labelIDs []int64
|
labelIDs []int64
|
||||||
|
|||||||
@@ -256,6 +256,12 @@ func CreateUserRepo(ctx *context.APIContext, owner *models.User, opt api.CreateR
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// reload repo from db to get a real state after creation
|
||||||
|
repo, err = models.GetRepositoryByID(repo.ID)
|
||||||
|
if err != nil {
|
||||||
|
ctx.Error(http.StatusInternalServerError, "GetRepositoryByID", err)
|
||||||
|
}
|
||||||
|
|
||||||
ctx.JSON(http.StatusCreated, repo.APIFormat(models.AccessModeOwner))
|
ctx.JSON(http.StatusCreated, repo.APIFormat(models.AccessModeOwner))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
package utils
|
package utils
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@@ -15,30 +16,49 @@ import (
|
|||||||
|
|
||||||
// GetQueryBeforeSince return parsed time (unix format) from URL query's before and since
|
// GetQueryBeforeSince return parsed time (unix format) from URL query's before and since
|
||||||
func GetQueryBeforeSince(ctx *context.APIContext) (before, since int64, err error) {
|
func GetQueryBeforeSince(ctx *context.APIContext) (before, since int64, err error) {
|
||||||
qCreatedBefore := strings.Trim(ctx.Query("before"), " ")
|
qCreatedBefore, err := prepareQueryArg(ctx, "before")
|
||||||
if qCreatedBefore != "" {
|
|
||||||
createdBefore, err := time.Parse(time.RFC3339, qCreatedBefore)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, 0, err
|
return 0, 0, err
|
||||||
}
|
}
|
||||||
if !createdBefore.IsZero() {
|
|
||||||
before = createdBefore.Unix()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
qCreatedAfter := strings.Trim(ctx.Query("since"), " ")
|
qCreatedSince, err := prepareQueryArg(ctx, "since")
|
||||||
if qCreatedAfter != "" {
|
|
||||||
createdAfter, err := time.Parse(time.RFC3339, qCreatedAfter)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, 0, err
|
return 0, 0, err
|
||||||
}
|
}
|
||||||
if !createdAfter.IsZero() {
|
|
||||||
since = createdAfter.Unix()
|
before, err = parseTime(qCreatedBefore)
|
||||||
|
if err != nil {
|
||||||
|
return 0, 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
since, err = parseTime(qCreatedSince)
|
||||||
|
if err != nil {
|
||||||
|
return 0, 0, err
|
||||||
}
|
}
|
||||||
return before, since, nil
|
return before, since, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parseTime parse time and return unix timestamp
|
||||||
|
func parseTime(value string) (int64, error) {
|
||||||
|
if len(value) != 0 {
|
||||||
|
t, err := time.Parse(time.RFC3339, value)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
if !t.IsZero() {
|
||||||
|
return t.Unix(), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// prepareQueryArg unescape and trim a query arg
|
||||||
|
func prepareQueryArg(ctx *context.APIContext, name string) (value string, err error) {
|
||||||
|
value, err = url.PathUnescape(ctx.Query(name))
|
||||||
|
value = strings.Trim(value, " ")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// GetListOptions returns list options using the page and limit parameters
|
// GetListOptions returns list options using the page and limit parameters
|
||||||
func GetListOptions(ctx *context.APIContext) models.ListOptions {
|
func GetListOptions(ctx *context.APIContext) models.ListOptions {
|
||||||
return models.ListOptions{
|
return models.ListOptions{
|
||||||
|
|||||||
@@ -61,6 +61,12 @@ func ServNoCommand(ctx *macaron.Context) {
|
|||||||
})
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if !user.IsActive || user.ProhibitLogin {
|
||||||
|
ctx.JSON(http.StatusForbidden, map[string]interface{}{
|
||||||
|
"err": "Your account is disabled.",
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
results.Owner = user
|
results.Owner = user
|
||||||
}
|
}
|
||||||
ctx.JSON(http.StatusOK, &results)
|
ctx.JSON(http.StatusOK, &results)
|
||||||
@@ -98,9 +104,28 @@ func ServCommand(ctx *macaron.Context) {
|
|||||||
results.RepoName = repoName[:len(repoName)-5]
|
results.RepoName = repoName[:len(repoName)-5]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
owner, err := models.GetUserByName(results.OwnerName)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to get repository owner: %s/%s Error: %v", results.OwnerName, results.RepoName, err)
|
||||||
|
ctx.JSON(http.StatusInternalServerError, map[string]interface{}{
|
||||||
|
"results": results,
|
||||||
|
"type": "InternalServerError",
|
||||||
|
"err": fmt.Sprintf("Unable to get repository owner: %s/%s %v", results.OwnerName, results.RepoName, err),
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !owner.IsActive {
|
||||||
|
ctx.JSON(http.StatusForbidden, map[string]interface{}{
|
||||||
|
"results": results,
|
||||||
|
"type": "ForbiddenError",
|
||||||
|
"err": "Repository cannot be accessed, you could retry it later",
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Now get the Repository and set the results section
|
// Now get the Repository and set the results section
|
||||||
repoExist := true
|
repoExist := true
|
||||||
repo, err := models.GetRepositoryByOwnerAndName(results.OwnerName, results.RepoName)
|
repo, err := models.GetRepositoryByName(owner.ID, results.RepoName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if models.IsErrRepoNotExist(err) {
|
if models.IsErrRepoNotExist(err) {
|
||||||
repoExist = false
|
repoExist = false
|
||||||
@@ -127,6 +152,7 @@ func ServCommand(ctx *macaron.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if repoExist {
|
if repoExist {
|
||||||
|
repo.Owner = owner
|
||||||
repo.OwnerName = ownerName
|
repo.OwnerName = ownerName
|
||||||
results.RepoID = repo.ID
|
results.RepoID = repo.ID
|
||||||
|
|
||||||
@@ -238,6 +264,14 @@ func ServCommand(ctx *macaron.Context) {
|
|||||||
})
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !user.IsActive || user.ProhibitLogin {
|
||||||
|
ctx.JSON(http.StatusForbidden, map[string]interface{}{
|
||||||
|
"err": "Your account is disabled.",
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
results.UserName = user.Name
|
results.UserName = user.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -104,6 +104,10 @@ func HTTP(ctx *context.Context) {
|
|||||||
ctx.NotFoundOrServerError("GetUserByName", models.IsErrUserNotExist, err)
|
ctx.NotFoundOrServerError("GetUserByName", models.IsErrUserNotExist, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if !owner.IsActive {
|
||||||
|
ctx.HandleText(http.StatusForbidden, "Repository cannot be accessed. You cannot push or open issues/pull-requests.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
repoExist := true
|
repoExist := true
|
||||||
repo, err := models.GetRepositoryByName(owner.ID, reponame)
|
repo, err := models.GetRepositoryByName(owner.ID, reponame)
|
||||||
@@ -243,6 +247,11 @@ func HTTP(ctx *context.Context) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !authUser.IsActive || authUser.ProhibitLogin {
|
||||||
|
ctx.HandleText(http.StatusForbidden, "Your account is disabled.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if repoExist {
|
if repoExist {
|
||||||
perm, err := models.GetUserRepoPermission(repo, authUser)
|
perm, err := models.GetUserRepoPermission(repo, authUser)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -1956,7 +1956,7 @@ func updateAttachments(item interface{}, files []string) error {
|
|||||||
case *models.Comment:
|
case *models.Comment:
|
||||||
attachments = content.Attachments
|
attachments = content.Attachments
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("Unknow Type")
|
return fmt.Errorf("Unknown Type: %T", content)
|
||||||
}
|
}
|
||||||
for i := 0; i < len(attachments); i++ {
|
for i := 0; i < len(attachments); i++ {
|
||||||
if util.IsStringInSlice(attachments[i].UUID, files) {
|
if util.IsStringInSlice(attachments[i].UUID, files) {
|
||||||
@@ -1974,7 +1974,7 @@ func updateAttachments(item interface{}, files []string) error {
|
|||||||
case *models.Comment:
|
case *models.Comment:
|
||||||
err = content.UpdateAttachments(files)
|
err = content.UpdateAttachments(files)
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("Unknow Type")
|
return fmt.Errorf("Unknown Type: %T", content)
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -1986,7 +1986,7 @@ func updateAttachments(item interface{}, files []string) error {
|
|||||||
case *models.Comment:
|
case *models.Comment:
|
||||||
content.Attachments, err = models.GetAttachmentsByCommentID(content.ID)
|
content.Attachments, err = models.GetAttachmentsByCommentID(content.ID)
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("Unknow Type")
|
return fmt.Errorf("Unknown Type: %T", content)
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -132,7 +132,7 @@ func SingleRelease(ctx *context.Context) {
|
|||||||
writeAccess := ctx.Repo.CanWrite(models.UnitTypeReleases)
|
writeAccess := ctx.Repo.CanWrite(models.UnitTypeReleases)
|
||||||
ctx.Data["CanCreateRelease"] = writeAccess && !ctx.Repo.Repository.IsArchived
|
ctx.Data["CanCreateRelease"] = writeAccess && !ctx.Repo.Repository.IsArchived
|
||||||
|
|
||||||
release, err := models.GetRelease(ctx.Repo.Repository.ID, ctx.Params("tag"))
|
release, err := models.GetRelease(ctx.Repo.Repository.ID, ctx.Params("*"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if models.IsErrReleaseNotExist(err) {
|
if models.IsErrReleaseNotExist(err) {
|
||||||
ctx.NotFound("GetRelease", err)
|
ctx.NotFound("GetRelease", err)
|
||||||
|
|||||||
@@ -185,8 +185,8 @@ func SettingsPost(ctx *context.Context, form auth.RepoSettingForm) {
|
|||||||
|
|
||||||
address = u.String()
|
address = u.String()
|
||||||
|
|
||||||
if err := mirror_service.SaveAddress(ctx.Repo.Mirror, address); err != nil {
|
if err := mirror_service.UpdateAddress(ctx.Repo.Mirror, address); err != nil {
|
||||||
ctx.ServerError("SaveAddress", err)
|
ctx.ServerError("UpdateAddress", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -814,9 +814,9 @@ func RegisterRoutes(m *macaron.Macaron) {
|
|||||||
m.Group("/:username/:reponame", func() {
|
m.Group("/:username/:reponame", func() {
|
||||||
m.Group("/releases", func() {
|
m.Group("/releases", func() {
|
||||||
m.Get("/", repo.Releases)
|
m.Get("/", repo.Releases)
|
||||||
m.Get("/tag/:tag", repo.SingleRelease)
|
m.Get("/tag/*", repo.SingleRelease)
|
||||||
m.Get("/latest", repo.LatestRelease)
|
m.Get("/latest", repo.LatestRelease)
|
||||||
}, repo.MustBeNotEmpty, context.RepoRef())
|
}, repo.MustBeNotEmpty, context.RepoRefByType(context.RepoRefTag))
|
||||||
m.Group("/releases", func() {
|
m.Group("/releases", func() {
|
||||||
m.Get("/new", repo.NewRelease)
|
m.Get("/new", repo.NewRelease)
|
||||||
m.Post("/new", bindIgnErr(auth.NewReleaseForm{}), repo.NewReleasePost)
|
m.Post("/new", bindIgnErr(auth.NewReleaseForm{}), repo.NewReleasePost)
|
||||||
|
|||||||
@@ -174,6 +174,7 @@ func NotificationStatusPost(c *context.Context) {
|
|||||||
if c.Written() {
|
if c.Written() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
c.Data["Link"] = fmt.Sprintf("%snotifications", setting.AppURL)
|
||||||
|
|
||||||
c.HTML(http.StatusOK, tplNotificationDiv)
|
c.HTML(http.StatusOK, tplNotificationDiv)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ package user
|
|||||||
import (
|
import (
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"html"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@@ -271,8 +272,8 @@ func AuthorizeOAuth(ctx *context.Context, form auth.AuthorizationForm) {
|
|||||||
ctx.Data["Application"] = app
|
ctx.Data["Application"] = app
|
||||||
ctx.Data["RedirectURI"] = form.RedirectURI
|
ctx.Data["RedirectURI"] = form.RedirectURI
|
||||||
ctx.Data["State"] = form.State
|
ctx.Data["State"] = form.State
|
||||||
ctx.Data["ApplicationUserLink"] = "<a href=\"" + setting.AppURL + app.User.LowerName + "\">@" + app.User.Name + "</a>"
|
ctx.Data["ApplicationUserLink"] = "<a href=\"" + html.EscapeString(setting.AppURL) + html.EscapeString(url.PathEscape(app.User.LowerName)) + "\">@" + html.EscapeString(app.User.Name) + "</a>"
|
||||||
ctx.Data["ApplicationRedirectDomainHTML"] = "<strong>" + form.RedirectURI + "</strong>"
|
ctx.Data["ApplicationRedirectDomainHTML"] = "<strong>" + html.EscapeString(form.RedirectURI) + "</strong>"
|
||||||
// TODO document SESSION <=> FORM
|
// TODO document SESSION <=> FORM
|
||||||
err = ctx.Session.Set("client_id", app.ClientID)
|
err = ctx.Session.Set("client_id", app.ClientID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -90,7 +90,6 @@ func ProfilePost(ctx *context.Context, form auth.UpdateProfileForm) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ctx.User.FullName = form.FullName
|
ctx.User.FullName = form.FullName
|
||||||
ctx.User.Email = form.Email
|
|
||||||
ctx.User.KeepEmailPrivate = form.KeepEmailPrivate
|
ctx.User.KeepEmailPrivate = form.KeepEmailPrivate
|
||||||
ctx.User.Website = form.Website
|
ctx.User.Website = form.Website
|
||||||
ctx.User.Location = form.Location
|
ctx.User.Location = form.Location
|
||||||
|
|||||||
@@ -53,6 +53,7 @@ const (
|
|||||||
DiffFileChange
|
DiffFileChange
|
||||||
DiffFileDel
|
DiffFileDel
|
||||||
DiffFileRename
|
DiffFileRename
|
||||||
|
DiffFileCopy
|
||||||
)
|
)
|
||||||
|
|
||||||
// DiffLineExpandDirection represents the DiffLineSection expand direction
|
// DiffLineExpandDirection represents the DiffLineSection expand direction
|
||||||
@@ -404,91 +405,253 @@ func (diff *Diff) NumFiles() int {
|
|||||||
|
|
||||||
const cmdDiffHead = "diff --git "
|
const cmdDiffHead = "diff --git "
|
||||||
|
|
||||||
// ParsePatch builds a Diff object from a io.Reader and some
|
// ParsePatch builds a Diff object from a io.Reader and some parameters.
|
||||||
// parameters.
|
|
||||||
// TODO: move this function to gogits/git-module
|
|
||||||
func ParsePatch(maxLines, maxLineCharacters, maxFiles int, reader io.Reader) (*Diff, error) {
|
func ParsePatch(maxLines, maxLineCharacters, maxFiles int, reader io.Reader) (*Diff, error) {
|
||||||
var (
|
var curFile *DiffFile
|
||||||
diff = &Diff{Files: make([]*DiffFile, 0)}
|
|
||||||
|
|
||||||
curFile = &DiffFile{}
|
diff := &Diff{Files: make([]*DiffFile, 0)}
|
||||||
curSection = &DiffSection{
|
|
||||||
Lines: make([]*DiffLine, 0, 10),
|
sb := strings.Builder{}
|
||||||
|
|
||||||
|
// OK let's set a reasonable buffer size.
|
||||||
|
// This should be let's say at least the size of maxLineCharacters or 4096 whichever is larger.
|
||||||
|
readerSize := maxLineCharacters
|
||||||
|
if readerSize < 4096 {
|
||||||
|
readerSize = 4096
|
||||||
}
|
}
|
||||||
|
|
||||||
leftLine, rightLine int
|
input := bufio.NewReaderSize(reader, readerSize)
|
||||||
lineCount int
|
line, err := input.ReadString('\n')
|
||||||
|
if err != nil {
|
||||||
|
if err == io.EOF {
|
||||||
|
return diff, nil
|
||||||
|
}
|
||||||
|
return diff, err
|
||||||
|
}
|
||||||
|
parsingLoop:
|
||||||
|
for {
|
||||||
|
// 1. A patch file always begins with `diff --git ` + `a/path b/path` (possibly quoted)
|
||||||
|
// if it does not we have bad input!
|
||||||
|
if !strings.HasPrefix(line, cmdDiffHead) {
|
||||||
|
return diff, fmt.Errorf("Invalid first file line: %s", line)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Handle skipping first n files
|
||||||
|
if len(diff.Files) >= maxFiles {
|
||||||
|
diff.IsIncomplete = true
|
||||||
|
_, err := io.Copy(ioutil.Discard, reader)
|
||||||
|
if err != nil {
|
||||||
|
// By the definition of io.Copy this never returns io.EOF
|
||||||
|
return diff, fmt.Errorf("Copy: %v", err)
|
||||||
|
}
|
||||||
|
break parsingLoop
|
||||||
|
}
|
||||||
|
|
||||||
|
curFile = createDiffFile(diff, line)
|
||||||
|
diff.Files = append(diff.Files, curFile)
|
||||||
|
|
||||||
|
// 2. It is followed by one or more extended header lines:
|
||||||
|
//
|
||||||
|
// old mode <mode>
|
||||||
|
// new mode <mode>
|
||||||
|
// deleted file mode <mode>
|
||||||
|
// new file mode <mode>
|
||||||
|
// copy from <path>
|
||||||
|
// copy to <path>
|
||||||
|
// rename from <path>
|
||||||
|
// rename to <path>
|
||||||
|
// similarity index <number>
|
||||||
|
// dissimilarity index <number>
|
||||||
|
// index <hash>..<hash> <mode>
|
||||||
|
//
|
||||||
|
// * <mode> 6-digit octal numbers including the file type and file permission bits.
|
||||||
|
// * <path> does not include the a/ and b/ prefixes
|
||||||
|
// * <number> percentage of unchanged lines for similarity, percentage of changed
|
||||||
|
// lines dissimilarity as integer rounded down with terminal %. 100% => equal files.
|
||||||
|
// * The index line includes the blob object names before and after the change.
|
||||||
|
// The <mode> is included if the file mode does not change; otherwise, separate
|
||||||
|
// lines indicate the old and the new mode.
|
||||||
|
// 3. Following this header the "standard unified" diff format header may be encountered: (but not for every case...)
|
||||||
|
//
|
||||||
|
// --- a/<path>
|
||||||
|
// +++ b/<path>
|
||||||
|
//
|
||||||
|
// With multiple hunks
|
||||||
|
//
|
||||||
|
// @@ <hunk descriptor> @@
|
||||||
|
// +added line
|
||||||
|
// -removed line
|
||||||
|
// unchanged line
|
||||||
|
//
|
||||||
|
// 4. Binary files get:
|
||||||
|
//
|
||||||
|
// Binary files a/<path> and b/<path> differ
|
||||||
|
//
|
||||||
|
// but one of a/<path> and b/<path> could be /dev/null.
|
||||||
|
curFileLoop:
|
||||||
|
for {
|
||||||
|
line, err = input.ReadString('\n')
|
||||||
|
if err != nil {
|
||||||
|
if err != io.EOF {
|
||||||
|
return diff, err
|
||||||
|
}
|
||||||
|
break parsingLoop
|
||||||
|
}
|
||||||
|
switch {
|
||||||
|
case strings.HasPrefix(line, cmdDiffHead):
|
||||||
|
break curFileLoop
|
||||||
|
case strings.HasPrefix(line, "old mode ") ||
|
||||||
|
strings.HasPrefix(line, "new mode "):
|
||||||
|
if strings.HasSuffix(line, " 160000\n") {
|
||||||
|
curFile.IsSubmodule = true
|
||||||
|
}
|
||||||
|
case strings.HasPrefix(line, "copy from "):
|
||||||
|
curFile.IsRenamed = true
|
||||||
|
curFile.Type = DiffFileCopy
|
||||||
|
case strings.HasPrefix(line, "copy to "):
|
||||||
|
curFile.IsRenamed = true
|
||||||
|
curFile.Type = DiffFileCopy
|
||||||
|
case strings.HasPrefix(line, "new file"):
|
||||||
|
curFile.Type = DiffFileAdd
|
||||||
|
curFile.IsCreated = true
|
||||||
|
if strings.HasSuffix(line, " 160000\n") {
|
||||||
|
curFile.IsSubmodule = true
|
||||||
|
}
|
||||||
|
case strings.HasPrefix(line, "deleted"):
|
||||||
|
curFile.Type = DiffFileDel
|
||||||
|
curFile.IsDeleted = true
|
||||||
|
if strings.HasSuffix(line, " 160000\n") {
|
||||||
|
curFile.IsSubmodule = true
|
||||||
|
}
|
||||||
|
case strings.HasPrefix(line, "index"):
|
||||||
|
if strings.HasSuffix(line, " 160000\n") {
|
||||||
|
curFile.IsSubmodule = true
|
||||||
|
}
|
||||||
|
case strings.HasPrefix(line, "similarity index 100%"):
|
||||||
|
curFile.Type = DiffFileRename
|
||||||
|
case strings.HasPrefix(line, "Binary"):
|
||||||
|
curFile.IsBin = true
|
||||||
|
case strings.HasPrefix(line, "--- "):
|
||||||
|
// Do nothing with this line
|
||||||
|
case strings.HasPrefix(line, "+++ "):
|
||||||
|
// Do nothing with this line
|
||||||
|
lineBytes, isFragment, err := parseHunks(curFile, maxLines, maxLineCharacters, input)
|
||||||
|
diff.TotalAddition += curFile.Addition
|
||||||
|
diff.TotalDeletion += curFile.Deletion
|
||||||
|
if err != nil {
|
||||||
|
if err != io.EOF {
|
||||||
|
return diff, err
|
||||||
|
}
|
||||||
|
break parsingLoop
|
||||||
|
}
|
||||||
|
sb.Reset()
|
||||||
|
_, _ = sb.Write(lineBytes)
|
||||||
|
for isFragment {
|
||||||
|
lineBytes, isFragment, err = input.ReadLine()
|
||||||
|
if err != nil {
|
||||||
|
// Now by the definition of ReadLine this cannot be io.EOF
|
||||||
|
return diff, fmt.Errorf("Unable to ReadLine: %v", err)
|
||||||
|
}
|
||||||
|
_, _ = sb.Write(lineBytes)
|
||||||
|
}
|
||||||
|
line = sb.String()
|
||||||
|
sb.Reset()
|
||||||
|
|
||||||
|
break curFileLoop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: There are numerous issues with this:
|
||||||
|
// - we might want to consider detecting encoding while parsing but...
|
||||||
|
// - we're likely to fail to get the correct encoding here anyway as we won't have enough information
|
||||||
|
// - and this doesn't really account for changes in encoding
|
||||||
|
var buf bytes.Buffer
|
||||||
|
for _, f := range diff.Files {
|
||||||
|
buf.Reset()
|
||||||
|
for _, sec := range f.Sections {
|
||||||
|
for _, l := range sec.Lines {
|
||||||
|
if l.Type == DiffLineSection {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
buf.WriteString(l.Content[1:])
|
||||||
|
buf.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
charsetLabel, err := charset.DetectEncoding(buf.Bytes())
|
||||||
|
if charsetLabel != "UTF-8" && err == nil {
|
||||||
|
encoding, _ := stdcharset.Lookup(charsetLabel)
|
||||||
|
if encoding != nil {
|
||||||
|
d := encoding.NewDecoder()
|
||||||
|
for _, sec := range f.Sections {
|
||||||
|
for _, l := range sec.Lines {
|
||||||
|
if l.Type == DiffLineSection {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if c, _, err := transform.String(d, l.Content[1:]); err == nil {
|
||||||
|
l.Content = l.Content[0:1] + c
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return diff, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseHunks(curFile *DiffFile, maxLines, maxLineCharacters int, input *bufio.Reader) (lineBytes []byte, isFragment bool, err error) {
|
||||||
|
sb := strings.Builder{}
|
||||||
|
|
||||||
|
var (
|
||||||
|
curSection *DiffSection
|
||||||
curFileLinesCount int
|
curFileLinesCount int
|
||||||
curFileLFSPrefix bool
|
curFileLFSPrefix bool
|
||||||
)
|
)
|
||||||
|
|
||||||
input := bufio.NewReader(reader)
|
leftLine, rightLine := 1, 1
|
||||||
isEOF := false
|
|
||||||
for !isEOF {
|
|
||||||
var linebuf bytes.Buffer
|
|
||||||
for {
|
for {
|
||||||
b, err := input.ReadByte()
|
sb.Reset()
|
||||||
|
lineBytes, isFragment, err = input.ReadLine()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if err == io.EOF {
|
if err == io.EOF {
|
||||||
isEOF = true
|
return
|
||||||
break
|
|
||||||
} else {
|
|
||||||
return nil, fmt.Errorf("ReadByte: %v", err)
|
|
||||||
}
|
}
|
||||||
|
err = fmt.Errorf("Unable to ReadLine: %v", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
if b == '\n' {
|
if lineBytes[0] == 'd' {
|
||||||
break
|
// End of hunks
|
||||||
}
|
return
|
||||||
if linebuf.Len() < maxLineCharacters {
|
|
||||||
linebuf.WriteByte(b)
|
|
||||||
} else if linebuf.Len() == maxLineCharacters {
|
|
||||||
curFile.IsIncomplete = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
line := linebuf.String()
|
|
||||||
|
|
||||||
if strings.HasPrefix(line, "+++ ") || strings.HasPrefix(line, "--- ") || len(line) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
|
|
||||||
trimLine := strings.Trim(line, "+- ")
|
switch lineBytes[0] {
|
||||||
|
case '@':
|
||||||
if trimLine == models.LFSMetaFileIdentifier {
|
|
||||||
curFileLFSPrefix = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if curFileLFSPrefix && strings.HasPrefix(trimLine, models.LFSMetaFileOidPrefix) {
|
|
||||||
oid := strings.TrimPrefix(trimLine, models.LFSMetaFileOidPrefix)
|
|
||||||
|
|
||||||
if len(oid) == 64 {
|
|
||||||
m := &models.LFSMetaObject{Oid: oid}
|
|
||||||
count, err := models.Count(m)
|
|
||||||
|
|
||||||
if err == nil && count > 0 {
|
|
||||||
curFile.IsBin = true
|
|
||||||
curFile.IsLFSFile = true
|
|
||||||
curSection.Lines = nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
curFileLinesCount++
|
|
||||||
lineCount++
|
|
||||||
|
|
||||||
// Diff data too large, we only show the first about maxLines lines
|
|
||||||
if curFileLinesCount >= maxLines {
|
if curFileLinesCount >= maxLines {
|
||||||
curFile.IsIncomplete = true
|
curFile.IsIncomplete = true
|
||||||
}
|
|
||||||
switch {
|
|
||||||
case line[0] == ' ':
|
|
||||||
diffLine := &DiffLine{Type: DiffLinePlain, Content: line, LeftIdx: leftLine, RightIdx: rightLine}
|
|
||||||
leftLine++
|
|
||||||
rightLine++
|
|
||||||
curSection.Lines = append(curSection.Lines, diffLine)
|
|
||||||
continue
|
continue
|
||||||
case line[0] == '@':
|
}
|
||||||
|
|
||||||
|
_, _ = sb.Write(lineBytes)
|
||||||
|
for isFragment {
|
||||||
|
// This is very odd indeed - we're in a section header and the line is too long
|
||||||
|
// This really shouldn't happen...
|
||||||
|
lineBytes, isFragment, err = input.ReadLine()
|
||||||
|
if err != nil {
|
||||||
|
// Now by the definition of ReadLine this cannot be io.EOF
|
||||||
|
err = fmt.Errorf("Unable to ReadLine: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
_, _ = sb.Write(lineBytes)
|
||||||
|
}
|
||||||
|
line := sb.String()
|
||||||
|
|
||||||
|
// Create a new section to represent this hunk
|
||||||
curSection = &DiffSection{}
|
curSection = &DiffSection{}
|
||||||
curFile.Sections = append(curFile.Sections, curSection)
|
curFile.Sections = append(curFile.Sections, curSection)
|
||||||
|
|
||||||
lineSectionInfo := getDiffLineSectionInfo(curFile.Name, line, leftLine-1, rightLine-1)
|
lineSectionInfo := getDiffLineSectionInfo(curFile.Name, line, leftLine-1, rightLine-1)
|
||||||
diffLine := &DiffLine{
|
diffLine := &DiffLine{
|
||||||
Type: DiffLineSection,
|
Type: DiffLineSection,
|
||||||
@@ -500,134 +663,132 @@ func ParsePatch(maxLines, maxLineCharacters, maxFiles int, reader io.Reader) (*D
|
|||||||
leftLine = lineSectionInfo.LeftIdx
|
leftLine = lineSectionInfo.LeftIdx
|
||||||
rightLine = lineSectionInfo.RightIdx
|
rightLine = lineSectionInfo.RightIdx
|
||||||
continue
|
continue
|
||||||
case line[0] == '+':
|
case '\\':
|
||||||
|
if curFileLinesCount >= maxLines {
|
||||||
|
curFile.IsIncomplete = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// This is used only to indicate that the current file does not have a terminal newline
|
||||||
|
if !bytes.Equal(lineBytes, []byte("\\ No newline at end of file")) {
|
||||||
|
err = fmt.Errorf("Unexpected line in hunk: %s", string(lineBytes))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Technically this should be the end the file!
|
||||||
|
// FIXME: we should be putting a marker at the end of the file if there is no terminal new line
|
||||||
|
continue
|
||||||
|
case '+':
|
||||||
|
curFileLinesCount++
|
||||||
curFile.Addition++
|
curFile.Addition++
|
||||||
diff.TotalAddition++
|
if curFileLinesCount >= maxLines {
|
||||||
diffLine := &DiffLine{Type: DiffLineAdd, Content: line, RightIdx: rightLine}
|
curFile.IsIncomplete = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
diffLine := &DiffLine{Type: DiffLineAdd, RightIdx: rightLine}
|
||||||
rightLine++
|
rightLine++
|
||||||
curSection.Lines = append(curSection.Lines, diffLine)
|
curSection.Lines = append(curSection.Lines, diffLine)
|
||||||
continue
|
case '-':
|
||||||
case line[0] == '-':
|
curFileLinesCount++
|
||||||
curFile.Deletion++
|
curFile.Deletion++
|
||||||
diff.TotalDeletion++
|
if curFileLinesCount >= maxLines {
|
||||||
diffLine := &DiffLine{Type: DiffLineDel, Content: line, LeftIdx: leftLine}
|
curFile.IsIncomplete = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
diffLine := &DiffLine{Type: DiffLineDel, LeftIdx: leftLine}
|
||||||
if leftLine > 0 {
|
if leftLine > 0 {
|
||||||
leftLine++
|
leftLine++
|
||||||
}
|
}
|
||||||
curSection.Lines = append(curSection.Lines, diffLine)
|
curSection.Lines = append(curSection.Lines, diffLine)
|
||||||
case strings.HasPrefix(line, "Binary"):
|
case ' ':
|
||||||
curFile.IsBin = true
|
curFileLinesCount++
|
||||||
|
if curFileLinesCount >= maxLines {
|
||||||
|
curFile.IsIncomplete = true
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
diffLine := &DiffLine{Type: DiffLinePlain, LeftIdx: leftLine, RightIdx: rightLine}
|
||||||
|
leftLine++
|
||||||
|
rightLine++
|
||||||
|
curSection.Lines = append(curSection.Lines, diffLine)
|
||||||
|
default:
|
||||||
|
// This is unexpected
|
||||||
|
err = fmt.Errorf("Unexpected line in hunk: %s", string(lineBytes))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Get new file.
|
line := string(lineBytes)
|
||||||
if strings.HasPrefix(line, cmdDiffHead) {
|
if isFragment {
|
||||||
if len(diff.Files) >= maxFiles {
|
curFile.IsIncomplete = true
|
||||||
diff.IsIncomplete = true
|
for isFragment {
|
||||||
_, err := io.Copy(ioutil.Discard, reader)
|
lineBytes, isFragment, err = input.ReadLine()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("Copy: %v", err)
|
// Now by the definition of ReadLine this cannot be io.EOF
|
||||||
|
err = fmt.Errorf("Unable to ReadLine: %v", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
break
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
curSection.Lines[len(curSection.Lines)-1].Content = line
|
||||||
|
|
||||||
// Note: In case file name is surrounded by double quotes (it happens only in git-shell).
|
// handle LFS
|
||||||
// e.g. diff --git "a/xxx" "b/xxx"
|
if line[1:] == models.LFSMetaFileIdentifier {
|
||||||
var a string
|
curFileLFSPrefix = true
|
||||||
var b string
|
} else if curFileLFSPrefix && strings.HasPrefix(line[1:], models.LFSMetaFileOidPrefix) {
|
||||||
|
oid := strings.TrimPrefix(line[1:], models.LFSMetaFileOidPrefix)
|
||||||
|
if len(oid) == 64 {
|
||||||
|
m := &models.LFSMetaObject{Oid: oid}
|
||||||
|
count, err := models.Count(m)
|
||||||
|
|
||||||
rd := strings.NewReader(line[len(cmdDiffHead):])
|
if err == nil && count > 0 {
|
||||||
char, _ := rd.ReadByte()
|
curFile.IsBin = true
|
||||||
_ = rd.UnreadByte()
|
curFile.IsLFSFile = true
|
||||||
if char == '"' {
|
curSection.Lines = nil
|
||||||
fmt.Fscanf(rd, "%q ", &a)
|
|
||||||
} else {
|
|
||||||
fmt.Fscanf(rd, "%s ", &a)
|
|
||||||
}
|
}
|
||||||
char, _ = rd.ReadByte()
|
|
||||||
_ = rd.UnreadByte()
|
|
||||||
if char == '"' {
|
|
||||||
fmt.Fscanf(rd, "%q", &b)
|
|
||||||
} else {
|
|
||||||
fmt.Fscanf(rd, "%s", &b)
|
|
||||||
}
|
}
|
||||||
a = a[2:]
|
}
|
||||||
b = b[2:]
|
}
|
||||||
|
}
|
||||||
|
|
||||||
curFile = &DiffFile{
|
func createDiffFile(diff *Diff, line string) *DiffFile {
|
||||||
Name: b,
|
// The a/ and b/ filenames are the same unless rename/copy is involved.
|
||||||
OldName: a,
|
// Especially, even for a creation or a deletion, /dev/null is not used
|
||||||
|
// in place of the a/ or b/ filenames.
|
||||||
|
//
|
||||||
|
// When rename/copy is involved, file1 and file2 show the name of the
|
||||||
|
// source file of the rename/copy and the name of the file that rename/copy
|
||||||
|
// produces, respectively.
|
||||||
|
//
|
||||||
|
// Path names are quoted if necessary.
|
||||||
|
//
|
||||||
|
// This means that you should always be able to determine the file name even when there
|
||||||
|
// there is potential ambiguity...
|
||||||
|
//
|
||||||
|
// but we can be simpler with our heuristics by just forcing git to prefix things nicely
|
||||||
|
curFile := &DiffFile{
|
||||||
Index: len(diff.Files) + 1,
|
Index: len(diff.Files) + 1,
|
||||||
Type: DiffFileChange,
|
Type: DiffFileChange,
|
||||||
Sections: make([]*DiffSection, 0, 10),
|
Sections: make([]*DiffSection, 0, 10),
|
||||||
IsRenamed: a != b,
|
|
||||||
}
|
|
||||||
diff.Files = append(diff.Files, curFile)
|
|
||||||
curFileLinesCount = 0
|
|
||||||
leftLine = 1
|
|
||||||
rightLine = 1
|
|
||||||
curFileLFSPrefix = false
|
|
||||||
|
|
||||||
// Check file diff type and is submodule.
|
|
||||||
for {
|
|
||||||
line, err := input.ReadString('\n')
|
|
||||||
if err != nil {
|
|
||||||
if err == io.EOF {
|
|
||||||
isEOF = true
|
|
||||||
} else {
|
|
||||||
return nil, fmt.Errorf("ReadString: %v", err)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
switch {
|
rd := strings.NewReader(line[len(cmdDiffHead):] + " ")
|
||||||
case strings.HasPrefix(line, "new file"):
|
|
||||||
curFile.Type = DiffFileAdd
|
|
||||||
curFile.IsCreated = true
|
|
||||||
case strings.HasPrefix(line, "deleted"):
|
|
||||||
curFile.Type = DiffFileDel
|
|
||||||
curFile.IsDeleted = true
|
|
||||||
case strings.HasPrefix(line, "index"):
|
|
||||||
curFile.Type = DiffFileChange
|
curFile.Type = DiffFileChange
|
||||||
case strings.HasPrefix(line, "similarity index 100%"):
|
curFile.OldName = readFileName(rd)
|
||||||
curFile.Type = DiffFileRename
|
curFile.Name = readFileName(rd)
|
||||||
}
|
curFile.IsRenamed = curFile.Name != curFile.OldName
|
||||||
if curFile.Type > 0 {
|
return curFile
|
||||||
if strings.HasSuffix(line, " 160000\n") {
|
}
|
||||||
curFile.IsSubmodule = true
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME: detect encoding while parsing.
|
func readFileName(rd *strings.Reader) string {
|
||||||
var buf bytes.Buffer
|
var name string
|
||||||
for _, f := range diff.Files {
|
char, _ := rd.ReadByte()
|
||||||
buf.Reset()
|
_ = rd.UnreadByte()
|
||||||
for _, sec := range f.Sections {
|
if char == '"' {
|
||||||
for _, l := range sec.Lines {
|
fmt.Fscanf(rd, "%q ", &name)
|
||||||
buf.WriteString(l.Content)
|
if name[0] == '\\' {
|
||||||
buf.WriteString("\n")
|
name = name[1:]
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
fmt.Fscanf(rd, "%s ", &name)
|
||||||
}
|
}
|
||||||
charsetLabel, err := charset.DetectEncoding(buf.Bytes())
|
return name[2:]
|
||||||
if charsetLabel != "UTF-8" && err == nil {
|
|
||||||
encoding, _ := stdcharset.Lookup(charsetLabel)
|
|
||||||
if encoding != nil {
|
|
||||||
d := encoding.NewDecoder()
|
|
||||||
for _, sec := range f.Sections {
|
|
||||||
for _, l := range sec.Lines {
|
|
||||||
if c, _, err := transform.String(d, l.Content); err == nil {
|
|
||||||
l.Content = c
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return diff, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetDiffRange builds a Diff between two commits of a repository.
|
// GetDiffRange builds a Diff between two commits of a repository.
|
||||||
@@ -656,15 +817,22 @@ func GetDiffRangeWithWhitespaceBehavior(repoPath, beforeCommitID, afterCommitID
|
|||||||
ctx, cancel := context.WithCancel(git.DefaultContext)
|
ctx, cancel := context.WithCancel(git.DefaultContext)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
var cmd *exec.Cmd
|
var cmd *exec.Cmd
|
||||||
if len(beforeCommitID) == 0 && commit.ParentCount() == 0 {
|
if (len(beforeCommitID) == 0 || beforeCommitID == git.EmptySHA) && commit.ParentCount() == 0 {
|
||||||
cmd = exec.CommandContext(ctx, git.GitExecutable, "show", afterCommitID)
|
diffArgs := []string{"diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M"}
|
||||||
|
if len(whitespaceBehavior) != 0 {
|
||||||
|
diffArgs = append(diffArgs, whitespaceBehavior)
|
||||||
|
}
|
||||||
|
// append empty tree ref
|
||||||
|
diffArgs = append(diffArgs, "4b825dc642cb6eb9a060e54bf8d69288fbee4904")
|
||||||
|
diffArgs = append(diffArgs, afterCommitID)
|
||||||
|
cmd = exec.CommandContext(ctx, git.GitExecutable, diffArgs...)
|
||||||
} else {
|
} else {
|
||||||
actualBeforeCommitID := beforeCommitID
|
actualBeforeCommitID := beforeCommitID
|
||||||
if len(actualBeforeCommitID) == 0 {
|
if len(actualBeforeCommitID) == 0 {
|
||||||
parentCommit, _ := commit.Parent(0)
|
parentCommit, _ := commit.Parent(0)
|
||||||
actualBeforeCommitID = parentCommit.ID.String()
|
actualBeforeCommitID = parentCommit.ID.String()
|
||||||
}
|
}
|
||||||
diffArgs := []string{"diff", "-M"}
|
diffArgs := []string{"diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M"}
|
||||||
if len(whitespaceBehavior) != 0 {
|
if len(whitespaceBehavior) != 0 {
|
||||||
diffArgs = append(diffArgs, whitespaceBehavior)
|
diffArgs = append(diffArgs, whitespaceBehavior)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
package gitdiff
|
package gitdiff
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
"strings"
|
"strings"
|
||||||
@@ -41,7 +42,169 @@ func TestDiffToHTML(t *testing.T) {
|
|||||||
}, DiffLineDel))
|
}, DiffLineDel))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParsePatch(t *testing.T) {
|
func TestParsePatch_singlefile(t *testing.T) {
|
||||||
|
type testcase struct {
|
||||||
|
name string
|
||||||
|
gitdiff string
|
||||||
|
wantErr bool
|
||||||
|
addition int
|
||||||
|
deletion int
|
||||||
|
oldFilename string
|
||||||
|
filename string
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []testcase{
|
||||||
|
{
|
||||||
|
name: "readme.md2readme.md",
|
||||||
|
gitdiff: `diff --git "\\a/README.md" "\\b/README.md"
|
||||||
|
--- "\\a/README.md"
|
||||||
|
+++ "\\b/README.md"
|
||||||
|
@@ -1,3 +1,6 @@
|
||||||
|
# gitea-github-migrator
|
||||||
|
+
|
||||||
|
+ Build Status
|
||||||
|
- Latest Release
|
||||||
|
Docker Pulls
|
||||||
|
+ cut off
|
||||||
|
+ cut off
|
||||||
|
`,
|
||||||
|
addition: 4,
|
||||||
|
deletion: 1,
|
||||||
|
filename: "README.md",
|
||||||
|
oldFilename: "README.md",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "A \\ B",
|
||||||
|
gitdiff: `diff --git "a/A \\ B" "b/A \\ B"
|
||||||
|
--- "a/A \\ B"
|
||||||
|
+++ "b/A \\ B"
|
||||||
|
@@ -1,3 +1,6 @@
|
||||||
|
# gitea-github-migrator
|
||||||
|
+
|
||||||
|
+ Build Status
|
||||||
|
- Latest Release
|
||||||
|
Docker Pulls
|
||||||
|
+ cut off
|
||||||
|
+ cut off`,
|
||||||
|
addition: 4,
|
||||||
|
deletion: 1,
|
||||||
|
filename: "A \\ B",
|
||||||
|
oldFilename: "A \\ B",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "really weird filename",
|
||||||
|
gitdiff: `diff --git "\\a/a b/file b/a a/file" "\\b/a b/file b/a a/file"
|
||||||
|
index d2186f1..f5c8ed2 100644
|
||||||
|
--- "\\a/a b/file b/a a/file"
|
||||||
|
+++ "\\b/a b/file b/a a/file"
|
||||||
|
@@ -1,3 +1,2 @@
|
||||||
|
Create a weird file.
|
||||||
|
|
||||||
|
-and what does diff do here?
|
||||||
|
\ No newline at end of file`,
|
||||||
|
addition: 0,
|
||||||
|
deletion: 1,
|
||||||
|
filename: "a b/file b/a a/file",
|
||||||
|
oldFilename: "a b/file b/a a/file",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "delete file with blanks",
|
||||||
|
gitdiff: `diff --git "\\a/file with blanks" "\\b/file with blanks"
|
||||||
|
deleted file mode 100644
|
||||||
|
index 898651a..0000000
|
||||||
|
--- "\\a/file with blanks"
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,5 +0,0 @@
|
||||||
|
-a blank file
|
||||||
|
-
|
||||||
|
-has a couple o line
|
||||||
|
-
|
||||||
|
-the 5th line is the last
|
||||||
|
`,
|
||||||
|
addition: 0,
|
||||||
|
deletion: 5,
|
||||||
|
filename: "file with blanks",
|
||||||
|
oldFilename: "file with blanks",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "rename a—as",
|
||||||
|
gitdiff: `diff --git "a/\360\243\220\265b\342\200\240vs" "b/a\342\200\224as"
|
||||||
|
similarity index 100%
|
||||||
|
rename from "\360\243\220\265b\342\200\240vs"
|
||||||
|
rename to "a\342\200\224as"
|
||||||
|
`,
|
||||||
|
addition: 0,
|
||||||
|
deletion: 0,
|
||||||
|
oldFilename: "𣐵b†vs",
|
||||||
|
filename: "a—as",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "rename with spaces",
|
||||||
|
gitdiff: `diff --git "\\a/a b/file b/a a/file" "\\b/a b/a a/file b/b file"
|
||||||
|
similarity index 100%
|
||||||
|
rename from a b/file b/a a/file
|
||||||
|
rename to a b/a a/file b/b file
|
||||||
|
`,
|
||||||
|
oldFilename: "a b/file b/a a/file",
|
||||||
|
filename: "a b/a a/file b/b file",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "minuses-and-pluses",
|
||||||
|
gitdiff: `diff --git a/minuses-and-pluses b/minuses-and-pluses
|
||||||
|
index 6961180..9ba1a00 100644
|
||||||
|
--- a/minuses-and-pluses
|
||||||
|
+++ b/minuses-and-pluses
|
||||||
|
@@ -1,4 +1,4 @@
|
||||||
|
--- 1st line
|
||||||
|
-++ 2nd line
|
||||||
|
--- 3rd line
|
||||||
|
-++ 4th line
|
||||||
|
+++ 1st line
|
||||||
|
+-- 2nd line
|
||||||
|
+++ 3rd line
|
||||||
|
+-- 4th line
|
||||||
|
`,
|
||||||
|
oldFilename: "minuses-and-pluses",
|
||||||
|
filename: "minuses-and-pluses",
|
||||||
|
addition: 4,
|
||||||
|
deletion: 4,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testcase := range tests {
|
||||||
|
t.Run(testcase.name, func(t *testing.T) {
|
||||||
|
got, err := ParsePatch(setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(testcase.gitdiff))
|
||||||
|
if (err != nil) != testcase.wantErr {
|
||||||
|
t.Errorf("ParsePatch() error = %v, wantErr %v", err, testcase.wantErr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
gotMarshaled, _ := json.MarshalIndent(got, " ", " ")
|
||||||
|
if got.NumFiles() != 1 {
|
||||||
|
t.Errorf("ParsePath() did not receive 1 file:\n%s", string(gotMarshaled))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if got.TotalAddition != testcase.addition {
|
||||||
|
t.Errorf("ParsePath() does not have correct totalAddition %d, wanted %d", got.TotalAddition, testcase.addition)
|
||||||
|
}
|
||||||
|
if got.TotalDeletion != testcase.deletion {
|
||||||
|
t.Errorf("ParsePath() did not have correct totalDeletion %d, wanted %d", got.TotalDeletion, testcase.deletion)
|
||||||
|
}
|
||||||
|
file := got.Files[0]
|
||||||
|
if file.Addition != testcase.addition {
|
||||||
|
t.Errorf("ParsePath() does not have correct file addition %d, wanted %d", file.Addition, testcase.addition)
|
||||||
|
}
|
||||||
|
if file.Deletion != testcase.deletion {
|
||||||
|
t.Errorf("ParsePath() did not have correct file deletion %d, wanted %d", file.Deletion, testcase.deletion)
|
||||||
|
}
|
||||||
|
if file.OldName != testcase.oldFilename {
|
||||||
|
t.Errorf("ParsePath() did not have correct OldName %s, wanted %s", file.OldName, testcase.oldFilename)
|
||||||
|
}
|
||||||
|
if file.Name != testcase.filename {
|
||||||
|
t.Errorf("ParsePath() did not have correct Name %s, wanted %s", file.Name, testcase.filename)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
var diff = `diff --git "a/README.md" "b/README.md"
|
var diff = `diff --git "a/README.md" "b/README.md"
|
||||||
--- a/README.md
|
--- a/README.md
|
||||||
+++ b/README.md
|
+++ b/README.md
|
||||||
|
|||||||
@@ -118,7 +118,7 @@ func mailIssueCommentBatch(ctx *mailCommentContext, ids []int64, visited map[int
|
|||||||
visited[id] = true
|
visited[id] = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
recipients, err := models.GetMaileableUsersByIDs(unique)
|
recipients, err := models.GetMaileableUsersByIDs(unique, fromMention)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -90,8 +90,8 @@ func AddressNoCredentials(m *models.Mirror) string {
|
|||||||
return u.String()
|
return u.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
// SaveAddress writes new address to Git repository config.
|
// UpdateAddress writes new address to Git repository and database
|
||||||
func SaveAddress(m *models.Mirror, addr string) error {
|
func UpdateAddress(m *models.Mirror, addr string) error {
|
||||||
repoPath := m.Repo.RepoPath()
|
repoPath := m.Repo.RepoPath()
|
||||||
// Remove old origin
|
// Remove old origin
|
||||||
_, err := git.NewCommand("remote", "rm", "origin").RunInDir(repoPath)
|
_, err := git.NewCommand("remote", "rm", "origin").RunInDir(repoPath)
|
||||||
@@ -99,8 +99,12 @@ func SaveAddress(m *models.Mirror, addr string) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = git.NewCommand("remote", "add", "origin", "--mirror=fetch", addr).RunInDir(repoPath)
|
if _, err = git.NewCommand("remote", "add", "origin", "--mirror=fetch", addr).RunInDir(repoPath); err != nil {
|
||||||
return err
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
m.Repo.OriginalURL = addr
|
||||||
|
return models.UpdateRepositoryCols(m.Repo, "original_url")
|
||||||
}
|
}
|
||||||
|
|
||||||
// gitShortEmptySha Git short empty SHA
|
// gitShortEmptySha Git short empty SHA
|
||||||
|
|||||||
@@ -9,8 +9,6 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@@ -177,18 +175,6 @@ func checkForInvalidation(requests models.PullRequestList, repoID int64, doer *m
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func addHeadRepoTasks(prs []*models.PullRequest) {
|
|
||||||
for _, pr := range prs {
|
|
||||||
log.Trace("addHeadRepoTasks[%d]: composing new test task", pr.ID)
|
|
||||||
if err := PushToBaseRepo(pr); err != nil {
|
|
||||||
log.Error("PushToBaseRepo: %v", err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
AddToTaskQueue(pr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// AddTestPullRequestTask adds new test tasks by given head/base repository and head/base branch,
|
// AddTestPullRequestTask adds new test tasks by given head/base repository and head/base branch,
|
||||||
// and generate new patch for testing as needed.
|
// and generate new patch for testing as needed.
|
||||||
func AddTestPullRequestTask(doer *models.User, repoID int64, branch string, isSync bool, oldCommitID, newCommitID string) {
|
func AddTestPullRequestTask(doer *models.User, repoID int64, branch string, isSync bool, oldCommitID, newCommitID string) {
|
||||||
@@ -245,7 +231,15 @@ func AddTestPullRequestTask(doer *models.User, repoID int64, branch string, isSy
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
addHeadRepoTasks(prs)
|
for _, pr := range prs {
|
||||||
|
log.Trace("Updating PR[%d]: composing new test task", pr.ID)
|
||||||
|
if err := PushToBaseRepo(pr); err != nil {
|
||||||
|
log.Error("PushToBaseRepo: %v", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
AddToTaskQueue(pr)
|
||||||
|
}
|
||||||
|
|
||||||
log.Trace("AddTestPullRequestTask [base_repo_id: %d, base_branch: %s]: finding pull requests", repoID, branch)
|
log.Trace("AddTestPullRequestTask [base_repo_id: %d, base_branch: %s]: finding pull requests", repoID, branch)
|
||||||
prs, err = models.GetUnmergedPullRequestsByBaseInfo(repoID, branch)
|
prs, err = models.GetUnmergedPullRequestsByBaseInfo(repoID, branch)
|
||||||
@@ -345,54 +339,17 @@ func checkIfPRContentChanged(pr *models.PullRequest, oldCommitID, newCommitID st
|
|||||||
func PushToBaseRepo(pr *models.PullRequest) (err error) {
|
func PushToBaseRepo(pr *models.PullRequest) (err error) {
|
||||||
log.Trace("PushToBaseRepo[%d]: pushing commits to base repo '%s'", pr.BaseRepoID, pr.GetGitRefName())
|
log.Trace("PushToBaseRepo[%d]: pushing commits to base repo '%s'", pr.BaseRepoID, pr.GetGitRefName())
|
||||||
|
|
||||||
// Clone base repo.
|
|
||||||
tmpBasePath, err := models.CreateTemporaryPath("pull")
|
|
||||||
if err != nil {
|
|
||||||
log.Error("CreateTemporaryPath: %v", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer func() {
|
|
||||||
err := models.RemoveTemporaryPath(tmpBasePath)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("Error whilst removing temporary path: %s Error: %v", tmpBasePath, err)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
if err := pr.LoadHeadRepo(); err != nil {
|
if err := pr.LoadHeadRepo(); err != nil {
|
||||||
log.Error("Unable to load head repository for PR[%d] Error: %v", pr.ID, err)
|
log.Error("Unable to load head repository for PR[%d] Error: %v", pr.ID, err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
headRepoPath := pr.HeadRepo.RepoPath()
|
headRepoPath := pr.HeadRepo.RepoPath()
|
||||||
|
|
||||||
if err := git.Clone(headRepoPath, tmpBasePath, git.CloneRepoOptions{
|
|
||||||
Bare: true,
|
|
||||||
Shared: true,
|
|
||||||
Branch: pr.HeadBranch,
|
|
||||||
Quiet: true,
|
|
||||||
}); err != nil {
|
|
||||||
log.Error("git clone tmpBasePath: %v", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
gitRepo, err := git.OpenRepository(tmpBasePath)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("OpenRepository: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := pr.LoadBaseRepo(); err != nil {
|
if err := pr.LoadBaseRepo(); err != nil {
|
||||||
log.Error("Unable to load base repository for PR[%d] Error: %v", pr.ID, err)
|
log.Error("Unable to load base repository for PR[%d] Error: %v", pr.ID, err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := gitRepo.AddRemote("base", pr.BaseRepo.RepoPath(), false); err != nil {
|
baseRepoPath := pr.BaseRepo.RepoPath()
|
||||||
return fmt.Errorf("tmpGitRepo.AddRemote: %v", err)
|
|
||||||
}
|
|
||||||
defer gitRepo.Close()
|
|
||||||
|
|
||||||
headFile := pr.GetGitRefName()
|
|
||||||
|
|
||||||
// Remove head in case there is a conflict.
|
|
||||||
file := path.Join(pr.BaseRepo.RepoPath(), headFile)
|
|
||||||
|
|
||||||
_ = os.Remove(file)
|
|
||||||
|
|
||||||
if err = pr.LoadIssue(); err != nil {
|
if err = pr.LoadIssue(); err != nil {
|
||||||
return fmt.Errorf("unable to load issue %d for pr %d: %v", pr.IssueID, pr.ID, err)
|
return fmt.Errorf("unable to load issue %d for pr %d: %v", pr.IssueID, pr.ID, err)
|
||||||
@@ -401,24 +358,26 @@ func PushToBaseRepo(pr *models.PullRequest) (err error) {
|
|||||||
return fmt.Errorf("unable to load poster %d for pr %d: %v", pr.Issue.PosterID, pr.ID, err)
|
return fmt.Errorf("unable to load poster %d for pr %d: %v", pr.Issue.PosterID, pr.ID, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = git.Push(tmpBasePath, git.PushOptions{
|
gitRefName := pr.GetGitRefName()
|
||||||
Remote: "base",
|
|
||||||
Branch: fmt.Sprintf("%s:%s", pr.HeadBranch, headFile),
|
if err := git.Push(headRepoPath, git.PushOptions{
|
||||||
|
Remote: baseRepoPath,
|
||||||
|
Branch: pr.HeadBranch + ":" + gitRefName,
|
||||||
Force: true,
|
Force: true,
|
||||||
// Use InternalPushingEnvironment here because we know that pre-receive and post-receive do not run on a refs/pulls/...
|
// Use InternalPushingEnvironment here because we know that pre-receive and post-receive do not run on a refs/pulls/...
|
||||||
Env: models.InternalPushingEnvironment(pr.Issue.Poster, pr.BaseRepo),
|
Env: models.InternalPushingEnvironment(pr.Issue.Poster, pr.BaseRepo),
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
if git.IsErrPushOutOfDate(err) {
|
if git.IsErrPushOutOfDate(err) {
|
||||||
// This should not happen as we're using force!
|
// This should not happen as we're using force!
|
||||||
log.Error("Unable to push PR head for %s#%d (%-v:%s) due to ErrPushOfDate: %v", pr.BaseRepo.FullName(), pr.Index, pr.BaseRepo, headFile, err)
|
log.Error("Unable to push PR head for %s#%d (%-v:%s) due to ErrPushOfDate: %v", pr.BaseRepo.FullName(), pr.Index, pr.BaseRepo, gitRefName, err)
|
||||||
return err
|
return err
|
||||||
} else if git.IsErrPushRejected(err) {
|
} else if git.IsErrPushRejected(err) {
|
||||||
rejectErr := err.(*git.ErrPushRejected)
|
rejectErr := err.(*git.ErrPushRejected)
|
||||||
log.Info("Unable to push PR head for %s#%d (%-v:%s) due to rejection:\nStdout: %s\nStderr: %s\nError: %v", pr.BaseRepo.FullName(), pr.Index, pr.BaseRepo, headFile, rejectErr.StdOut, rejectErr.StdErr, rejectErr.Err)
|
log.Info("Unable to push PR head for %s#%d (%-v:%s) due to rejection:\nStdout: %s\nStderr: %s\nError: %v", pr.BaseRepo.FullName(), pr.Index, pr.BaseRepo, gitRefName, rejectErr.StdOut, rejectErr.StdErr, rejectErr.Err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
log.Error("Unable to push PR head for %s#%d (%-v:%s) due to Error: %v", pr.BaseRepo.FullName(), pr.Index, pr.BaseRepo, headFile, err)
|
log.Error("Unable to push PR head for %s#%d (%-v:%s) due to Error: %v", pr.BaseRepo.FullName(), pr.Index, pr.BaseRepo, gitRefName, err)
|
||||||
return fmt.Errorf("Push: %s:%s %s:%s %v", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), headFile, err)
|
return fmt.Errorf("Push: %s:%s %s:%s %v", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), gitRefName, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ package pull
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models"
|
"code.gitea.io/gitea/models"
|
||||||
@@ -104,6 +105,8 @@ func CreateCodeComment(doer *models.User, gitRepo *git.Repository, issue *models
|
|||||||
return comment, nil
|
return comment, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var notEnoughLines = regexp.MustCompile(`exit status 128 - fatal: file .* has only \d+ lines?`)
|
||||||
|
|
||||||
// createCodeComment creates a plain code comment at the specified line / path
|
// createCodeComment creates a plain code comment at the specified line / path
|
||||||
func createCodeComment(doer *models.User, repo *models.Repository, issue *models.Issue, content, treePath string, line, reviewID int64) (*models.Comment, error) {
|
func createCodeComment(doer *models.User, repo *models.Repository, issue *models.Issue, content, treePath string, line, reviewID int64) (*models.Comment, error) {
|
||||||
var commitID, patch string
|
var commitID, patch string
|
||||||
@@ -127,7 +130,7 @@ func createCodeComment(doer *models.User, repo *models.Repository, issue *models
|
|||||||
commit, err := gitRepo.LineBlame(pr.GetGitRefName(), gitRepo.Path, treePath, uint(line))
|
commit, err := gitRepo.LineBlame(pr.GetGitRefName(), gitRepo.Path, treePath, uint(line))
|
||||||
if err == nil {
|
if err == nil {
|
||||||
commitID = commit.ID.String()
|
commitID = commit.ID.String()
|
||||||
} else if !strings.Contains(err.Error(), "exit status 128 - fatal: no such path") {
|
} else if !(strings.Contains(err.Error(), "exit status 128 - fatal: no such path") || notEnoughLines.MatchString(err.Error())) {
|
||||||
return nil, fmt.Errorf("LineBlame[%s, %s, %s, %d]: %v", pr.GetGitRefName(), gitRepo.Path, treePath, line, err)
|
return nil, fmt.Errorf("LineBlame[%s, %s, %s, %d]: %v", pr.GetGitRefName(), gitRepo.Path, treePath, line, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -54,13 +54,11 @@ func DeleteRepository(doer *models.User, repo *models.Repository) error {
|
|||||||
log.Error("CloseRepoBranchesPulls failed: %v", err)
|
log.Error("CloseRepoBranchesPulls failed: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := models.DeleteRepository(doer, repo.OwnerID, repo.ID); err != nil {
|
// If the repo itself has webhooks, we need to trigger them before deleting it...
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
notification.NotifyDeleteRepository(doer, repo)
|
notification.NotifyDeleteRepository(doer, repo)
|
||||||
|
|
||||||
return nil
|
err := models.DeleteRepository(doer, repo.OwnerID, repo.ID)
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// PushCreateRepo creates a repository when a new repository is pushed to an appropriate namespace
|
// PushCreateRepo creates a repository when a new repository is pushed to an appropriate namespace
|
||||||
|
|||||||
@@ -56,10 +56,12 @@
|
|||||||
<div id="comment-{{.Issue.ID}}" class="raw-content hide">{{.Issue.Content}}</div>
|
<div id="comment-{{.Issue.ID}}" class="raw-content hide">{{.Issue.Content}}</div>
|
||||||
<div class="edit-content-zone hide" data-write="issue-{{.Issue.ID}}-write" data-preview="issue-{{.Issue.ID}}-preview" data-update-url="{{$.RepoLink}}/issues/{{.Issue.Index}}/content" data-context="{{.RepoLink}}" data-attachment-url="{{$.RepoLink}}/issues/{{.Issue.Index}}/attachments" data-view-attachment-url="{{$.RepoLink}}/issues/{{.Issue.Index}}/view-attachments"></div>
|
<div class="edit-content-zone hide" data-write="issue-{{.Issue.ID}}-write" data-preview="issue-{{.Issue.ID}}-preview" data-update-url="{{$.RepoLink}}/issues/{{.Issue.Index}}/content" data-context="{{.RepoLink}}" data-attachment-url="{{$.RepoLink}}/issues/{{.Issue.Index}}/attachments" data-view-attachment-url="{{$.RepoLink}}/issues/{{.Issue.Index}}/view-attachments"></div>
|
||||||
{{if .Issue.Attachments}}
|
{{if .Issue.Attachments}}
|
||||||
|
<div class="dropzone-attachments">
|
||||||
<div class="ui clearing divider"></div>
|
<div class="ui clearing divider"></div>
|
||||||
<div class="ui middle aligned padded grid">
|
<div class="ui middle aligned padded grid">
|
||||||
{{template "repo/issue/view_content/attachments" Dict "ctx" $ "Attachments" .Issue.Attachments}}
|
{{template "repo/issue/view_content/attachments" Dict "ctx" $ "Attachments" .Issue.Attachments}}
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
{{end}}
|
{{end}}
|
||||||
</div>
|
</div>
|
||||||
{{$reactions := .Issue.Reactions.GroupByType}}
|
{{$reactions := .Issue.Reactions.GroupByType}}
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
{{range .Attachments}}
|
{{- range .Attachments -}}
|
||||||
<div class="twelve wide column" style="padding: 6px;">
|
<div class="twelve wide column" style="padding: 6px;">
|
||||||
<a target="_blank" rel="noopener noreferrer" href="{{.DownloadURL}}" title='{{$.ctx.i18n.Tr "repo.issues.attachment.open_tab" .Name}}'>
|
<a target="_blank" rel="noopener noreferrer" href="{{.DownloadURL}}" title='{{$.ctx.i18n.Tr "repo.issues.attachment.open_tab" .Name}}'>
|
||||||
{{if FilenameIsImage .Name}}
|
{{if FilenameIsImage .Name}}
|
||||||
<span class="ui image">{{svg "octicon-file-media" 16}}</span>
|
<span class="ui image">{{svg "octicon-file" 16}}</span>
|
||||||
{{else}}
|
{{else}}
|
||||||
<span class="ui image">{{svg "octicon-desktop-download" 16}}</span>
|
<span class="ui image">{{svg "octicon-desktop-download" 16}}</span>
|
||||||
{{end}}
|
{{end}}
|
||||||
@@ -12,4 +12,4 @@
|
|||||||
<div class="four wide column" style="padding: 0px;">
|
<div class="four wide column" style="padding: 0px;">
|
||||||
<span class="ui text grey right">{{.Size | FileSize}}</span>
|
<span class="ui text grey right">{{.Size | FileSize}}</span>
|
||||||
</div>
|
</div>
|
||||||
{{end}}
|
{{end -}}
|
||||||
|
|||||||
@@ -53,10 +53,12 @@
|
|||||||
<div id="comment-{{.ID}}" class="raw-content hide">{{.Content}}</div>
|
<div id="comment-{{.ID}}" class="raw-content hide">{{.Content}}</div>
|
||||||
<div class="edit-content-zone hide" data-write="issuecomment-{{.ID}}-write" data-preview="issuecomment-{{.ID}}-preview" data-update-url="{{$.RepoLink}}/comments/{{.ID}}" data-context="{{$.RepoLink}}" data-attachment-url="{{$.RepoLink}}/comments/{{.ID}}/attachments"></div>
|
<div class="edit-content-zone hide" data-write="issuecomment-{{.ID}}-write" data-preview="issuecomment-{{.ID}}-preview" data-update-url="{{$.RepoLink}}/comments/{{.ID}}" data-context="{{$.RepoLink}}" data-attachment-url="{{$.RepoLink}}/comments/{{.ID}}/attachments"></div>
|
||||||
{{if .Attachments}}
|
{{if .Attachments}}
|
||||||
|
<div class="dropzone-attachments">
|
||||||
<div class="ui clearing divider"></div>
|
<div class="ui clearing divider"></div>
|
||||||
<div class="ui middle aligned padded grid">
|
<div class="ui middle aligned padded grid">
|
||||||
{{template "repo/issue/view_content/attachments" Dict "ctx" $ "Attachments" .Attachments}}
|
{{template "repo/issue/view_content/attachments" Dict "ctx" $ "Attachments" .Attachments}}
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
{{end}}
|
{{end}}
|
||||||
</div>
|
</div>
|
||||||
{{$reactions := .Reactions.GroupByType}}
|
{{$reactions := .Reactions.GroupByType}}
|
||||||
@@ -106,7 +108,7 @@
|
|||||||
<span class="text grey">
|
<span class="text grey">
|
||||||
<a class="author" href="{{.Poster.HomeLink}}">{{.Poster.GetDisplayName}}</a>
|
<a class="author" href="{{.Poster.HomeLink}}">{{.Poster.GetDisplayName}}</a>
|
||||||
{{$link := printf "%s/commit/%s" $.Repository.HTMLURL $.Issue.PullRequest.MergedCommitID}}
|
{{$link := printf "%s/commit/%s" $.Repository.HTMLURL $.Issue.PullRequest.MergedCommitID}}
|
||||||
{{$.i18n.Tr "repo.issues.pull_merged_at" $link (ShortSha $.Issue.PullRequest.MergedCommitID) $.BaseTarget $createdStr | Str2html}}
|
{{$.i18n.Tr "repo.issues.pull_merged_at" $link (ShortSha $.Issue.PullRequest.MergedCommitID) ($.BaseTarget|Escape) $createdStr | Str2html}}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
{{else if eq .Type 3 5 6}}
|
{{else if eq .Type 3 5 6}}
|
||||||
@@ -163,7 +165,7 @@
|
|||||||
</a>
|
</a>
|
||||||
<span class="text grey">
|
<span class="text grey">
|
||||||
<a class="author" href="{{.Poster.HomeLink}}">{{.Poster.GetDisplayName}}</a>
|
<a class="author" href="{{.Poster.HomeLink}}">{{.Poster.GetDisplayName}}</a>
|
||||||
{{if .Content}}{{$.i18n.Tr "repo.issues.add_label_at" .Label.ForegroundColor .Label.Color (.Label.Name|Escape|RenderEmoji) $createdStr | Safe}}{{else}}{{$.i18n.Tr "repo.issues.remove_label_at" .Label.ForegroundColor .Label.Color (.Label.Name|Escape|RenderEmoji) $createdStr | Safe}}{{end}}
|
{{if .Content}}{{$.i18n.Tr "repo.issues.add_label_at" .Label.ForegroundColor .Label.Color (.Label.Name|RenderEmoji) $createdStr | Safe}}{{else}}{{$.i18n.Tr "repo.issues.remove_label_at" .Label.ForegroundColor .Label.Color (.Label.Name|RenderEmoji) $createdStr | Safe}}{{end}}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|||||||
@@ -121,7 +121,7 @@
|
|||||||
{{else if .IsPullWorkInProgress}}
|
{{else if .IsPullWorkInProgress}}
|
||||||
<div class="item text grey">
|
<div class="item text grey">
|
||||||
<i class="icon icon-octicon">{{svg "octicon-x" 16}}</i>
|
<i class="icon icon-octicon">{{svg "octicon-x" 16}}</i>
|
||||||
{{$.i18n.Tr "repo.pulls.cannot_merge_work_in_progress" .WorkInProgressPrefix | Str2html}}
|
{{$.i18n.Tr "repo.pulls.cannot_merge_work_in_progress" (.WorkInProgressPrefix|Escape) | Str2html}}
|
||||||
</div>
|
</div>
|
||||||
{{else if .Issue.PullRequest.IsChecking}}
|
{{else if .Issue.PullRequest.IsChecking}}
|
||||||
<div class="item text yellow">
|
<div class="item text yellow">
|
||||||
|
|||||||
@@ -31,18 +31,18 @@
|
|||||||
{{ $mergedStr:= TimeSinceUnix .Issue.PullRequest.MergedUnix $.Lang }}
|
{{ $mergedStr:= TimeSinceUnix .Issue.PullRequest.MergedUnix $.Lang }}
|
||||||
{{if .Issue.OriginalAuthor }}
|
{{if .Issue.OriginalAuthor }}
|
||||||
{{.Issue.OriginalAuthor}}
|
{{.Issue.OriginalAuthor}}
|
||||||
<span class="pull-desc">{{$.i18n.Tr "repo.pulls.merged_title_desc" .NumCommits .HeadTarget .BaseTarget $mergedStr | Str2html}}</span>
|
<span class="pull-desc">{{$.i18n.Tr "repo.pulls.merged_title_desc" .NumCommits (.HeadTarget|Escape) (.BaseTarget|Escape) $mergedStr | Str2html}}</span>
|
||||||
{{else}}
|
{{else}}
|
||||||
<a {{if gt .Issue.PullRequest.Merger.ID 0}}href="{{.Issue.PullRequest.Merger.HomeLink}}"{{end}}>{{.Issue.PullRequest.Merger.GetDisplayName}}</a>
|
<a {{if gt .Issue.PullRequest.Merger.ID 0}}href="{{.Issue.PullRequest.Merger.HomeLink}}"{{end}}>{{.Issue.PullRequest.Merger.GetDisplayName}}</a>
|
||||||
<span class="pull-desc">{{$.i18n.Tr "repo.pulls.merged_title_desc" .NumCommits .HeadTarget .BaseTarget $mergedStr | Str2html}}</span>
|
<span class="pull-desc">{{$.i18n.Tr "repo.pulls.merged_title_desc" .NumCommits (.HeadTarget|Escape) (.BaseTarget|Escape) $mergedStr | Str2html}}</span>
|
||||||
{{end}}
|
{{end}}
|
||||||
{{else}}
|
{{else}}
|
||||||
{{if .Issue.OriginalAuthor }}
|
{{if .Issue.OriginalAuthor }}
|
||||||
<span id="pull-desc" class="pull-desc">{{.Issue.OriginalAuthor}} {{$.i18n.Tr "repo.pulls.title_desc" .NumCommits .HeadTarget .BaseTarget | Str2html}}</span>
|
<span id="pull-desc" class="pull-desc">{{.Issue.OriginalAuthor}} {{$.i18n.Tr "repo.pulls.title_desc" .NumCommits (.HeadTarget|Escape) (.BaseTarget|Escape) | Str2html}}</span>
|
||||||
{{else}}
|
{{else}}
|
||||||
<span id="pull-desc" class="pull-desc">
|
<span id="pull-desc" class="pull-desc">
|
||||||
<a {{if gt .Issue.Poster.ID 0}}href="{{.Issue.Poster.HomeLink}}"{{end}}>{{.Issue.Poster.GetDisplayName}}</a>
|
<a {{if gt .Issue.Poster.ID 0}}href="{{.Issue.Poster.HomeLink}}"{{end}}>{{.Issue.Poster.GetDisplayName}}</a>
|
||||||
{{$.i18n.Tr "repo.pulls.title_desc" .NumCommits .HeadTarget .BaseTarget | Str2html}}
|
{{$.i18n.Tr "repo.pulls.title_desc" .NumCommits (.HeadTarget|Escape) (.BaseTarget|Escape) | Str2html}}
|
||||||
</span>
|
</span>
|
||||||
{{end}}
|
{{end}}
|
||||||
<span id="pull-desc-edit" style="display: none">
|
<span id="pull-desc-edit" style="display: none">
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
<div class="ui container">
|
<div class="ui container">
|
||||||
{{template "base/alert" .}}
|
{{template "base/alert" .}}
|
||||||
<h4 class="ui top attached header">
|
<h4 class="ui top attached header">
|
||||||
{{.i18n.Tr "repo.settings.branch_protection" .Branch.BranchName | Str2html}}
|
{{.i18n.Tr "repo.settings.branch_protection" (.Branch.BranchName|Escape) | Str2html}}
|
||||||
</h4>
|
</h4>
|
||||||
<div class="ui attached segment branch-protection">
|
<div class="ui attached segment branch-protection">
|
||||||
<form class="ui form" action="{{.Link}}" method="post">
|
<form class="ui form" action="{{.Link}}" method="post">
|
||||||
|
|||||||
@@ -2318,6 +2318,9 @@
|
|||||||
"responses": {
|
"responses": {
|
||||||
"200": {
|
"200": {
|
||||||
"$ref": "#/responses/Branch"
|
"$ref": "#/responses/Branch"
|
||||||
|
},
|
||||||
|
"404": {
|
||||||
|
"$ref": "#/responses/notFound"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -2359,6 +2362,9 @@
|
|||||||
},
|
},
|
||||||
"403": {
|
"403": {
|
||||||
"$ref": "#/responses/error"
|
"$ref": "#/responses/error"
|
||||||
|
},
|
||||||
|
"404": {
|
||||||
|
"$ref": "#/responses/notFound"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,15 +15,15 @@
|
|||||||
{{else if .ResendLimited}}
|
{{else if .ResendLimited}}
|
||||||
<p class="center">{{.i18n.Tr "auth.resent_limit_prompt"}}</p>
|
<p class="center">{{.i18n.Tr "auth.resent_limit_prompt"}}</p>
|
||||||
{{else}}
|
{{else}}
|
||||||
<p>{{.i18n.Tr "auth.confirmation_mail_sent_prompt" .SignedUser.Email .ActiveCodeLives | Str2html}}</p>
|
<p>{{.i18n.Tr "auth.confirmation_mail_sent_prompt" (.SignedUser.Email|Escape) .ActiveCodeLives | Str2html}}</p>
|
||||||
{{end}}
|
{{end}}
|
||||||
{{else}}
|
{{else}}
|
||||||
{{if .IsSendRegisterMail}}
|
{{if .IsSendRegisterMail}}
|
||||||
<p>{{.i18n.Tr "auth.confirmation_mail_sent_prompt" .Email .ActiveCodeLives | Str2html}}</p>
|
<p>{{.i18n.Tr "auth.confirmation_mail_sent_prompt" (.Email|Escape) .ActiveCodeLives | Str2html}}</p>
|
||||||
{{else if .IsActivateFailed}}
|
{{else if .IsActivateFailed}}
|
||||||
<p>{{.i18n.Tr "auth.invalid_code"}}</p>
|
<p>{{.i18n.Tr "auth.invalid_code"}}</p>
|
||||||
{{else}}
|
{{else}}
|
||||||
<p>{{.i18n.Tr "auth.has_unconfirmed_mail" .SignedUser.Name .SignedUser.Email | Str2html}}</p>
|
<p>{{.i18n.Tr "auth.has_unconfirmed_mail" (.SignedUser.Name|Escape) (.SignedUser.Email|Escape) | Str2html}}</p>
|
||||||
<div class="ui divider"></div>
|
<div class="ui divider"></div>
|
||||||
<div class="text right">
|
<div class="text right">
|
||||||
<button class="ui blue button">{{.i18n.Tr "auth.resend_mail"}}</button>
|
<button class="ui blue button">{{.i18n.Tr "auth.resend_mail"}}</button>
|
||||||
|
|||||||
@@ -50,17 +50,17 @@
|
|||||||
{{$.i18n.Tr "action.reopen_pull_request" .GetRepoLink $index .ShortRepoPath | Str2html}}
|
{{$.i18n.Tr "action.reopen_pull_request" .GetRepoLink $index .ShortRepoPath | Str2html}}
|
||||||
{{else if eq .GetOpType 16}}
|
{{else if eq .GetOpType 16}}
|
||||||
{{ $index := index .GetIssueInfos 0}}
|
{{ $index := index .GetIssueInfos 0}}
|
||||||
{{$.i18n.Tr "action.delete_tag" .GetRepoLink .GetBranch .ShortRepoPath | Str2html}}
|
{{$.i18n.Tr "action.delete_tag" .GetRepoLink (.GetBranch|Escape) .ShortRepoPath | Str2html}}
|
||||||
{{else if eq .GetOpType 17}}
|
{{else if eq .GetOpType 17}}
|
||||||
{{ $index := index .GetIssueInfos 0}}
|
{{ $index := index .GetIssueInfos 0}}
|
||||||
{{$.i18n.Tr "action.delete_branch" .GetRepoLink .GetBranch .ShortRepoPath | Str2html}}
|
{{$.i18n.Tr "action.delete_branch" .GetRepoLink (.GetBranch|Escape) .ShortRepoPath | Str2html}}
|
||||||
{{else if eq .GetOpType 18}}
|
{{else if eq .GetOpType 18}}
|
||||||
{{ $branchLink := .GetBranch | EscapePound}}
|
{{ $branchLink := .GetBranch | EscapePound}}
|
||||||
{{$.i18n.Tr "action.mirror_sync_push" .GetRepoLink $branchLink .GetBranch .ShortRepoPath | Str2html}}
|
{{$.i18n.Tr "action.mirror_sync_push" .GetRepoLink $branchLink (.GetBranch|Escape) .ShortRepoPath | Str2html}}
|
||||||
{{else if eq .GetOpType 19}}
|
{{else if eq .GetOpType 19}}
|
||||||
{{$.i18n.Tr "action.mirror_sync_create" .GetRepoLink .GetBranch .ShortRepoPath | Str2html}}
|
{{$.i18n.Tr "action.mirror_sync_create" .GetRepoLink (.GetBranch|Escape) .ShortRepoPath | Str2html}}
|
||||||
{{else if eq .GetOpType 20}}
|
{{else if eq .GetOpType 20}}
|
||||||
{{$.i18n.Tr "action.mirror_sync_delete" .GetRepoLink .GetBranch .ShortRepoPath | Str2html}}
|
{{$.i18n.Tr "action.mirror_sync_delete" .GetRepoLink (.GetBranch|Escape) .ShortRepoPath | Str2html}}
|
||||||
{{else if eq .GetOpType 21}}
|
{{else if eq .GetOpType 21}}
|
||||||
{{ $index := index .GetIssueInfos 0}}
|
{{ $index := index .GetIssueInfos 0}}
|
||||||
{{$.i18n.Tr "action.approve_pull_request" .GetRepoLink $index .ShortRepoPath | Str2html}}
|
{{$.i18n.Tr "action.approve_pull_request" .GetRepoLink $index .ShortRepoPath | Str2html}}
|
||||||
|
|||||||
@@ -21,9 +21,9 @@
|
|||||||
<label for="full_name">{{.i18n.Tr "settings.full_name"}}</label>
|
<label for="full_name">{{.i18n.Tr "settings.full_name"}}</label>
|
||||||
<input id="full_name" name="full_name" value="{{.SignedUser.FullName}}">
|
<input id="full_name" name="full_name" value="{{.SignedUser.FullName}}">
|
||||||
</div>
|
</div>
|
||||||
<div class="required field {{if .Err_Email}}error{{end}}">
|
<div class="field {{if .Err_Email}}error{{end}}">
|
||||||
<label for="email">{{.i18n.Tr "email"}}</label>
|
<label for="email">{{.i18n.Tr "email"}}</label>
|
||||||
<input id="email" name="email" value="{{.SignedUser.Email}}">
|
<p>{{.SignedUser.Email}}</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="inline field">
|
<div class="inline field">
|
||||||
<div class="ui checkbox" id="keep-email-private">
|
<div class="ui checkbox" id="keep-email-private">
|
||||||
|
|||||||
8
vendor/github.com/nfnt/resize/.travis.yml
generated
vendored
8
vendor/github.com/nfnt/resize/.travis.yml
generated
vendored
@@ -1,7 +1,7 @@
|
|||||||
language: go
|
language: go
|
||||||
|
|
||||||
go:
|
go:
|
||||||
- 1.1
|
- "1.x"
|
||||||
- 1.2
|
- "1.1"
|
||||||
- 1.3
|
- "1.4"
|
||||||
- tip
|
- "1.10"
|
||||||
|
|||||||
2
vendor/github.com/nfnt/resize/README.md
generated
vendored
2
vendor/github.com/nfnt/resize/README.md
generated
vendored
@@ -1,3 +1,5 @@
|
|||||||
|
# This package is no longer being updated! Please look for alternatives if that bothers you.
|
||||||
|
|
||||||
Resize
|
Resize
|
||||||
======
|
======
|
||||||
|
|
||||||
|
|||||||
6
vendor/github.com/nfnt/resize/resize.go
generated
vendored
6
vendor/github.com/nfnt/resize/resize.go
generated
vendored
@@ -78,6 +78,7 @@ var blur = 1.0
|
|||||||
// If one of the parameters width or height is set to 0, its size will be calculated so that
|
// If one of the parameters width or height is set to 0, its size will be calculated so that
|
||||||
// the aspect ratio is that of the originating image.
|
// the aspect ratio is that of the originating image.
|
||||||
// The resizing algorithm uses channels for parallel computation.
|
// The resizing algorithm uses channels for parallel computation.
|
||||||
|
// If the input image has width or height of 0, it is returned unchanged.
|
||||||
func Resize(width, height uint, img image.Image, interp InterpolationFunction) image.Image {
|
func Resize(width, height uint, img image.Image, interp InterpolationFunction) image.Image {
|
||||||
scaleX, scaleY := calcFactors(width, height, float64(img.Bounds().Dx()), float64(img.Bounds().Dy()))
|
scaleX, scaleY := calcFactors(width, height, float64(img.Bounds().Dx()), float64(img.Bounds().Dy()))
|
||||||
if width == 0 {
|
if width == 0 {
|
||||||
@@ -92,6 +93,11 @@ func Resize(width, height uint, img image.Image, interp InterpolationFunction) i
|
|||||||
return img
|
return img
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Input image has no pixels
|
||||||
|
if img.Bounds().Dx() <= 0 || img.Bounds().Dy() <= 0 {
|
||||||
|
return img
|
||||||
|
}
|
||||||
|
|
||||||
if interp == NearestNeighbor {
|
if interp == NearestNeighbor {
|
||||||
return resizeNearest(width, height, scaleX, scaleY, img, interp)
|
return resizeNearest(width, height, scaleX, scaleY, img, interp)
|
||||||
}
|
}
|
||||||
|
|||||||
402
vendor/github.com/nfnt/resize/ycc.go
generated
vendored
402
vendor/github.com/nfnt/resize/ycc.go
generated
vendored
@@ -88,70 +88,34 @@ func newYCC(r image.Rectangle, s image.YCbCrSubsampleRatio) *ycc {
|
|||||||
return &ycc{Pix: buf, Stride: 3 * w, Rect: r, SubsampleRatio: s}
|
return &ycc{Pix: buf, Stride: 3 * w, Rect: r, SubsampleRatio: s}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Copy of image.YCbCrSubsampleRatio constants - this allows us to support
|
||||||
|
// older versions of Go where these constants are not defined (i.e. Go 1.4)
|
||||||
|
const (
|
||||||
|
ycbcrSubsampleRatio444 image.YCbCrSubsampleRatio = iota
|
||||||
|
ycbcrSubsampleRatio422
|
||||||
|
ycbcrSubsampleRatio420
|
||||||
|
ycbcrSubsampleRatio440
|
||||||
|
ycbcrSubsampleRatio411
|
||||||
|
ycbcrSubsampleRatio410
|
||||||
|
)
|
||||||
|
|
||||||
// YCbCr converts ycc to a YCbCr image with the same subsample ratio
|
// YCbCr converts ycc to a YCbCr image with the same subsample ratio
|
||||||
// as the YCbCr image that ycc was generated from.
|
// as the YCbCr image that ycc was generated from.
|
||||||
func (p *ycc) YCbCr() *image.YCbCr {
|
func (p *ycc) YCbCr() *image.YCbCr {
|
||||||
ycbcr := image.NewYCbCr(p.Rect, p.SubsampleRatio)
|
ycbcr := image.NewYCbCr(p.Rect, p.SubsampleRatio)
|
||||||
var off int
|
|
||||||
|
|
||||||
switch ycbcr.SubsampleRatio {
|
switch ycbcr.SubsampleRatio {
|
||||||
case image.YCbCrSubsampleRatio422:
|
case ycbcrSubsampleRatio422:
|
||||||
for y := ycbcr.Rect.Min.Y; y < ycbcr.Rect.Max.Y; y++ {
|
return p.ycbcr422(ycbcr)
|
||||||
yy := (y - ycbcr.Rect.Min.Y) * ycbcr.YStride
|
case ycbcrSubsampleRatio420:
|
||||||
cy := (y - ycbcr.Rect.Min.Y) * ycbcr.CStride
|
return p.ycbcr420(ycbcr)
|
||||||
for x := ycbcr.Rect.Min.X; x < ycbcr.Rect.Max.X; x++ {
|
case ycbcrSubsampleRatio440:
|
||||||
xx := (x - ycbcr.Rect.Min.X)
|
return p.ycbcr440(ycbcr)
|
||||||
yi := yy + xx
|
case ycbcrSubsampleRatio444:
|
||||||
ci := cy + xx/2
|
return p.ycbcr444(ycbcr)
|
||||||
ycbcr.Y[yi] = p.Pix[off+0]
|
case ycbcrSubsampleRatio411:
|
||||||
ycbcr.Cb[ci] = p.Pix[off+1]
|
return p.ycbcr411(ycbcr)
|
||||||
ycbcr.Cr[ci] = p.Pix[off+2]
|
case ycbcrSubsampleRatio410:
|
||||||
off += 3
|
return p.ycbcr410(ycbcr)
|
||||||
}
|
|
||||||
}
|
|
||||||
case image.YCbCrSubsampleRatio420:
|
|
||||||
for y := ycbcr.Rect.Min.Y; y < ycbcr.Rect.Max.Y; y++ {
|
|
||||||
yy := (y - ycbcr.Rect.Min.Y) * ycbcr.YStride
|
|
||||||
cy := (y/2 - ycbcr.Rect.Min.Y/2) * ycbcr.CStride
|
|
||||||
for x := ycbcr.Rect.Min.X; x < ycbcr.Rect.Max.X; x++ {
|
|
||||||
xx := (x - ycbcr.Rect.Min.X)
|
|
||||||
yi := yy + xx
|
|
||||||
ci := cy + xx/2
|
|
||||||
ycbcr.Y[yi] = p.Pix[off+0]
|
|
||||||
ycbcr.Cb[ci] = p.Pix[off+1]
|
|
||||||
ycbcr.Cr[ci] = p.Pix[off+2]
|
|
||||||
off += 3
|
|
||||||
}
|
|
||||||
}
|
|
||||||
case image.YCbCrSubsampleRatio440:
|
|
||||||
for y := ycbcr.Rect.Min.Y; y < ycbcr.Rect.Max.Y; y++ {
|
|
||||||
yy := (y - ycbcr.Rect.Min.Y) * ycbcr.YStride
|
|
||||||
cy := (y/2 - ycbcr.Rect.Min.Y/2) * ycbcr.CStride
|
|
||||||
for x := ycbcr.Rect.Min.X; x < ycbcr.Rect.Max.X; x++ {
|
|
||||||
xx := (x - ycbcr.Rect.Min.X)
|
|
||||||
yi := yy + xx
|
|
||||||
ci := cy + xx
|
|
||||||
ycbcr.Y[yi] = p.Pix[off+0]
|
|
||||||
ycbcr.Cb[ci] = p.Pix[off+1]
|
|
||||||
ycbcr.Cr[ci] = p.Pix[off+2]
|
|
||||||
off += 3
|
|
||||||
}
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
// Default to 4:4:4 subsampling.
|
|
||||||
for y := ycbcr.Rect.Min.Y; y < ycbcr.Rect.Max.Y; y++ {
|
|
||||||
yy := (y - ycbcr.Rect.Min.Y) * ycbcr.YStride
|
|
||||||
cy := (y - ycbcr.Rect.Min.Y) * ycbcr.CStride
|
|
||||||
for x := ycbcr.Rect.Min.X; x < ycbcr.Rect.Max.X; x++ {
|
|
||||||
xx := (x - ycbcr.Rect.Min.X)
|
|
||||||
yi := yy + xx
|
|
||||||
ci := cy + xx
|
|
||||||
ycbcr.Y[yi] = p.Pix[off+0]
|
|
||||||
ycbcr.Cb[ci] = p.Pix[off+1]
|
|
||||||
ycbcr.Cr[ci] = p.Pix[off+2]
|
|
||||||
off += 3
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return ycbcr
|
return ycbcr
|
||||||
}
|
}
|
||||||
@@ -159,69 +123,265 @@ func (p *ycc) YCbCr() *image.YCbCr {
|
|||||||
// imageYCbCrToYCC converts a YCbCr image to a ycc image for resizing.
|
// imageYCbCrToYCC converts a YCbCr image to a ycc image for resizing.
|
||||||
func imageYCbCrToYCC(in *image.YCbCr) *ycc {
|
func imageYCbCrToYCC(in *image.YCbCr) *ycc {
|
||||||
w, h := in.Rect.Dx(), in.Rect.Dy()
|
w, h := in.Rect.Dx(), in.Rect.Dy()
|
||||||
r := image.Rect(0, 0, w, h)
|
p := ycc{
|
||||||
buf := make([]uint8, 3*w*h)
|
Pix: make([]uint8, 3*w*h),
|
||||||
p := ycc{Pix: buf, Stride: 3 * w, Rect: r, SubsampleRatio: in.SubsampleRatio}
|
Stride: 3 * w,
|
||||||
var off int
|
Rect: image.Rect(0, 0, w, h),
|
||||||
|
SubsampleRatio: in.SubsampleRatio,
|
||||||
|
}
|
||||||
switch in.SubsampleRatio {
|
switch in.SubsampleRatio {
|
||||||
case image.YCbCrSubsampleRatio422:
|
case ycbcrSubsampleRatio422:
|
||||||
for y := in.Rect.Min.Y; y < in.Rect.Max.Y; y++ {
|
return convertToYCC422(in, &p)
|
||||||
yy := (y - in.Rect.Min.Y) * in.YStride
|
case ycbcrSubsampleRatio420:
|
||||||
cy := (y - in.Rect.Min.Y) * in.CStride
|
return convertToYCC420(in, &p)
|
||||||
for x := in.Rect.Min.X; x < in.Rect.Max.X; x++ {
|
case ycbcrSubsampleRatio440:
|
||||||
xx := (x - in.Rect.Min.X)
|
return convertToYCC440(in, &p)
|
||||||
yi := yy + xx
|
case ycbcrSubsampleRatio444:
|
||||||
ci := cy + xx/2
|
return convertToYCC444(in, &p)
|
||||||
p.Pix[off+0] = in.Y[yi]
|
case ycbcrSubsampleRatio411:
|
||||||
p.Pix[off+1] = in.Cb[ci]
|
return convertToYCC411(in, &p)
|
||||||
p.Pix[off+2] = in.Cr[ci]
|
case ycbcrSubsampleRatio410:
|
||||||
off += 3
|
return convertToYCC410(in, &p)
|
||||||
}
|
|
||||||
}
|
|
||||||
case image.YCbCrSubsampleRatio420:
|
|
||||||
for y := in.Rect.Min.Y; y < in.Rect.Max.Y; y++ {
|
|
||||||
yy := (y - in.Rect.Min.Y) * in.YStride
|
|
||||||
cy := (y/2 - in.Rect.Min.Y/2) * in.CStride
|
|
||||||
for x := in.Rect.Min.X; x < in.Rect.Max.X; x++ {
|
|
||||||
xx := (x - in.Rect.Min.X)
|
|
||||||
yi := yy + xx
|
|
||||||
ci := cy + xx/2
|
|
||||||
p.Pix[off+0] = in.Y[yi]
|
|
||||||
p.Pix[off+1] = in.Cb[ci]
|
|
||||||
p.Pix[off+2] = in.Cr[ci]
|
|
||||||
off += 3
|
|
||||||
}
|
|
||||||
}
|
|
||||||
case image.YCbCrSubsampleRatio440:
|
|
||||||
for y := in.Rect.Min.Y; y < in.Rect.Max.Y; y++ {
|
|
||||||
yy := (y - in.Rect.Min.Y) * in.YStride
|
|
||||||
cy := (y/2 - in.Rect.Min.Y/2) * in.CStride
|
|
||||||
for x := in.Rect.Min.X; x < in.Rect.Max.X; x++ {
|
|
||||||
xx := (x - in.Rect.Min.X)
|
|
||||||
yi := yy + xx
|
|
||||||
ci := cy + xx
|
|
||||||
p.Pix[off+0] = in.Y[yi]
|
|
||||||
p.Pix[off+1] = in.Cb[ci]
|
|
||||||
p.Pix[off+2] = in.Cr[ci]
|
|
||||||
off += 3
|
|
||||||
}
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
// Default to 4:4:4 subsampling.
|
|
||||||
for y := in.Rect.Min.Y; y < in.Rect.Max.Y; y++ {
|
|
||||||
yy := (y - in.Rect.Min.Y) * in.YStride
|
|
||||||
cy := (y - in.Rect.Min.Y) * in.CStride
|
|
||||||
for x := in.Rect.Min.X; x < in.Rect.Max.X; x++ {
|
|
||||||
xx := (x - in.Rect.Min.X)
|
|
||||||
yi := yy + xx
|
|
||||||
ci := cy + xx
|
|
||||||
p.Pix[off+0] = in.Y[yi]
|
|
||||||
p.Pix[off+1] = in.Cb[ci]
|
|
||||||
p.Pix[off+2] = in.Cr[ci]
|
|
||||||
off += 3
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return &p
|
return &p
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *ycc) ycbcr422(ycbcr *image.YCbCr) *image.YCbCr {
|
||||||
|
var off int
|
||||||
|
Pix := p.Pix
|
||||||
|
Y := ycbcr.Y
|
||||||
|
Cb := ycbcr.Cb
|
||||||
|
Cr := ycbcr.Cr
|
||||||
|
for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ {
|
||||||
|
yy := y * ycbcr.YStride
|
||||||
|
cy := y * ycbcr.CStride
|
||||||
|
for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ {
|
||||||
|
ci := cy + x/2
|
||||||
|
Y[yy+x] = Pix[off+0]
|
||||||
|
Cb[ci] = Pix[off+1]
|
||||||
|
Cr[ci] = Pix[off+2]
|
||||||
|
off += 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ycbcr
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *ycc) ycbcr420(ycbcr *image.YCbCr) *image.YCbCr {
|
||||||
|
var off int
|
||||||
|
Pix := p.Pix
|
||||||
|
Y := ycbcr.Y
|
||||||
|
Cb := ycbcr.Cb
|
||||||
|
Cr := ycbcr.Cr
|
||||||
|
for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ {
|
||||||
|
yy := y * ycbcr.YStride
|
||||||
|
cy := (y / 2) * ycbcr.CStride
|
||||||
|
for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ {
|
||||||
|
ci := cy + x/2
|
||||||
|
Y[yy+x] = Pix[off+0]
|
||||||
|
Cb[ci] = Pix[off+1]
|
||||||
|
Cr[ci] = Pix[off+2]
|
||||||
|
off += 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ycbcr
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *ycc) ycbcr440(ycbcr *image.YCbCr) *image.YCbCr {
|
||||||
|
var off int
|
||||||
|
Pix := p.Pix
|
||||||
|
Y := ycbcr.Y
|
||||||
|
Cb := ycbcr.Cb
|
||||||
|
Cr := ycbcr.Cr
|
||||||
|
for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ {
|
||||||
|
yy := y * ycbcr.YStride
|
||||||
|
cy := (y / 2) * ycbcr.CStride
|
||||||
|
for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ {
|
||||||
|
ci := cy + x
|
||||||
|
Y[yy+x] = Pix[off+0]
|
||||||
|
Cb[ci] = Pix[off+1]
|
||||||
|
Cr[ci] = Pix[off+2]
|
||||||
|
off += 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ycbcr
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *ycc) ycbcr444(ycbcr *image.YCbCr) *image.YCbCr {
|
||||||
|
var off int
|
||||||
|
Pix := p.Pix
|
||||||
|
Y := ycbcr.Y
|
||||||
|
Cb := ycbcr.Cb
|
||||||
|
Cr := ycbcr.Cr
|
||||||
|
for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ {
|
||||||
|
yy := y * ycbcr.YStride
|
||||||
|
cy := y * ycbcr.CStride
|
||||||
|
for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ {
|
||||||
|
ci := cy + x
|
||||||
|
Y[yy+x] = Pix[off+0]
|
||||||
|
Cb[ci] = Pix[off+1]
|
||||||
|
Cr[ci] = Pix[off+2]
|
||||||
|
off += 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ycbcr
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *ycc) ycbcr411(ycbcr *image.YCbCr) *image.YCbCr {
|
||||||
|
var off int
|
||||||
|
Pix := p.Pix
|
||||||
|
Y := ycbcr.Y
|
||||||
|
Cb := ycbcr.Cb
|
||||||
|
Cr := ycbcr.Cr
|
||||||
|
for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ {
|
||||||
|
yy := y * ycbcr.YStride
|
||||||
|
cy := y * ycbcr.CStride
|
||||||
|
for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ {
|
||||||
|
ci := cy + x/4
|
||||||
|
Y[yy+x] = Pix[off+0]
|
||||||
|
Cb[ci] = Pix[off+1]
|
||||||
|
Cr[ci] = Pix[off+2]
|
||||||
|
off += 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ycbcr
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *ycc) ycbcr410(ycbcr *image.YCbCr) *image.YCbCr {
|
||||||
|
var off int
|
||||||
|
Pix := p.Pix
|
||||||
|
Y := ycbcr.Y
|
||||||
|
Cb := ycbcr.Cb
|
||||||
|
Cr := ycbcr.Cr
|
||||||
|
for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ {
|
||||||
|
yy := y * ycbcr.YStride
|
||||||
|
cy := (y / 2) * ycbcr.CStride
|
||||||
|
for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ {
|
||||||
|
ci := cy + x/4
|
||||||
|
Y[yy+x] = Pix[off+0]
|
||||||
|
Cb[ci] = Pix[off+1]
|
||||||
|
Cr[ci] = Pix[off+2]
|
||||||
|
off += 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ycbcr
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertToYCC422(in *image.YCbCr, p *ycc) *ycc {
|
||||||
|
var off int
|
||||||
|
Pix := p.Pix
|
||||||
|
Y := in.Y
|
||||||
|
Cb := in.Cb
|
||||||
|
Cr := in.Cr
|
||||||
|
for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ {
|
||||||
|
yy := y * in.YStride
|
||||||
|
cy := y * in.CStride
|
||||||
|
for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ {
|
||||||
|
ci := cy + x/2
|
||||||
|
Pix[off+0] = Y[yy+x]
|
||||||
|
Pix[off+1] = Cb[ci]
|
||||||
|
Pix[off+2] = Cr[ci]
|
||||||
|
off += 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertToYCC420(in *image.YCbCr, p *ycc) *ycc {
|
||||||
|
var off int
|
||||||
|
Pix := p.Pix
|
||||||
|
Y := in.Y
|
||||||
|
Cb := in.Cb
|
||||||
|
Cr := in.Cr
|
||||||
|
for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ {
|
||||||
|
yy := y * in.YStride
|
||||||
|
cy := (y / 2) * in.CStride
|
||||||
|
for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ {
|
||||||
|
ci := cy + x/2
|
||||||
|
Pix[off+0] = Y[yy+x]
|
||||||
|
Pix[off+1] = Cb[ci]
|
||||||
|
Pix[off+2] = Cr[ci]
|
||||||
|
off += 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertToYCC440(in *image.YCbCr, p *ycc) *ycc {
|
||||||
|
var off int
|
||||||
|
Pix := p.Pix
|
||||||
|
Y := in.Y
|
||||||
|
Cb := in.Cb
|
||||||
|
Cr := in.Cr
|
||||||
|
for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ {
|
||||||
|
yy := y * in.YStride
|
||||||
|
cy := (y / 2) * in.CStride
|
||||||
|
for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ {
|
||||||
|
ci := cy + x
|
||||||
|
Pix[off+0] = Y[yy+x]
|
||||||
|
Pix[off+1] = Cb[ci]
|
||||||
|
Pix[off+2] = Cr[ci]
|
||||||
|
off += 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertToYCC444(in *image.YCbCr, p *ycc) *ycc {
|
||||||
|
var off int
|
||||||
|
Pix := p.Pix
|
||||||
|
Y := in.Y
|
||||||
|
Cb := in.Cb
|
||||||
|
Cr := in.Cr
|
||||||
|
for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ {
|
||||||
|
yy := y * in.YStride
|
||||||
|
cy := y * in.CStride
|
||||||
|
for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ {
|
||||||
|
ci := cy + x
|
||||||
|
Pix[off+0] = Y[yy+x]
|
||||||
|
Pix[off+1] = Cb[ci]
|
||||||
|
Pix[off+2] = Cr[ci]
|
||||||
|
off += 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertToYCC411(in *image.YCbCr, p *ycc) *ycc {
|
||||||
|
var off int
|
||||||
|
Pix := p.Pix
|
||||||
|
Y := in.Y
|
||||||
|
Cb := in.Cb
|
||||||
|
Cr := in.Cr
|
||||||
|
for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ {
|
||||||
|
yy := y * in.YStride
|
||||||
|
cy := y * in.CStride
|
||||||
|
for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ {
|
||||||
|
ci := cy + x/4
|
||||||
|
Pix[off+0] = Y[yy+x]
|
||||||
|
Pix[off+1] = Cb[ci]
|
||||||
|
Pix[off+2] = Cr[ci]
|
||||||
|
off += 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertToYCC410(in *image.YCbCr, p *ycc) *ycc {
|
||||||
|
var off int
|
||||||
|
Pix := p.Pix
|
||||||
|
Y := in.Y
|
||||||
|
Cb := in.Cb
|
||||||
|
Cr := in.Cr
|
||||||
|
for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ {
|
||||||
|
yy := y * in.YStride
|
||||||
|
cy := (y / 2) * in.CStride
|
||||||
|
for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ {
|
||||||
|
ci := cy + x/4
|
||||||
|
Pix[off+0] = Y[yy+x]
|
||||||
|
Pix[off+1] = Cb[ci]
|
||||||
|
Pix[off+2] = Cr[ci]
|
||||||
|
off += 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|||||||
4
vendor/modules.txt
vendored
4
vendor/modules.txt
vendored
@@ -518,7 +518,7 @@ github.com/mschoch/smat
|
|||||||
# github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc
|
# github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc
|
||||||
## explicit
|
## explicit
|
||||||
github.com/msteinert/pam
|
github.com/msteinert/pam
|
||||||
# github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5
|
# github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646
|
||||||
## explicit
|
## explicit
|
||||||
github.com/nfnt/resize
|
github.com/nfnt/resize
|
||||||
# github.com/niklasfasching/go-org v0.1.9
|
# github.com/niklasfasching/go-org v0.1.9
|
||||||
@@ -842,7 +842,7 @@ gopkg.in/warnings.v0
|
|||||||
# gopkg.in/yaml.v2 v2.2.8
|
# gopkg.in/yaml.v2 v2.2.8
|
||||||
## explicit
|
## explicit
|
||||||
gopkg.in/yaml.v2
|
gopkg.in/yaml.v2
|
||||||
# mvdan.cc/xurls/v2 v2.1.0
|
# mvdan.cc/xurls/v2 v2.2.0
|
||||||
## explicit
|
## explicit
|
||||||
mvdan.cc/xurls/v2
|
mvdan.cc/xurls/v2
|
||||||
# strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251
|
# strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251
|
||||||
|
|||||||
11
vendor/mvdan.cc/xurls/v2/README.md
vendored
11
vendor/mvdan.cc/xurls/v2/README.md
vendored
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
[](https://godoc.org/mvdan.cc/xurls)
|
[](https://godoc.org/mvdan.cc/xurls)
|
||||||
|
|
||||||
Extract urls from text using regular expressions. Requires Go 1.12 or later.
|
Extract urls from text using regular expressions. Requires Go 1.13 or later.
|
||||||
|
|
||||||
```go
|
```go
|
||||||
import "mvdan.cc/xurls/v2"
|
import "mvdan.cc/xurls/v2"
|
||||||
@@ -18,13 +18,18 @@ func main() {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that the funcs compile regexes, so avoid calling them repeatedly.
|
Since API is centered around [regexp.Regexp](https://golang.org/pkg/regexp/#Regexp),
|
||||||
|
many other methods are available, such as finding the [byte indexes](https://golang.org/pkg/regexp/#Regexp.FindAllIndex)
|
||||||
|
for all matches.
|
||||||
|
|
||||||
|
Note that calling the exposed functions means compiling a regular expression, so
|
||||||
|
repeated calls should be avoided.
|
||||||
|
|
||||||
#### cmd/xurls
|
#### cmd/xurls
|
||||||
|
|
||||||
To install the tool globally:
|
To install the tool globally:
|
||||||
|
|
||||||
go get mvdan.cc/xurls/cmd/xurls
|
cd $(mktemp -d); go mod init tmp; GO111MODULE=on go get mvdan.cc/xurls/v2/cmd/xurls
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ echo "Do gophers live in http://golang.org?" | xurls
|
$ echo "Do gophers live in http://golang.org?" | xurls
|
||||||
|
|||||||
7
vendor/mvdan.cc/xurls/v2/go.mod
vendored
7
vendor/mvdan.cc/xurls/v2/go.mod
vendored
@@ -1,3 +1,8 @@
|
|||||||
module mvdan.cc/xurls/v2
|
module mvdan.cc/xurls/v2
|
||||||
|
|
||||||
go 1.13
|
go 1.14
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/rogpeppe/go-internal v1.5.2
|
||||||
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
|
||||||
|
)
|
||||||
|
|||||||
12
vendor/mvdan.cc/xurls/v2/go.sum
vendored
12
vendor/mvdan.cc/xurls/v2/go.sum
vendored
@@ -0,0 +1,12 @@
|
|||||||
|
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||||
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
|
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||||
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
|
github.com/rogpeppe/go-internal v1.5.2 h1:qLvObTrvO/XRCqmkKxUlOBc48bI3efyDuAZe25QiF0w=
|
||||||
|
github.com/rogpeppe/go-internal v1.5.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/errgo.v2 v2.1.0 h1:0vLT13EuvQ0hNvakwLuFZ/jYrLp5F3kcWHXdRggjCE8=
|
||||||
|
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||||
|
|||||||
3
vendor/mvdan.cc/xurls/v2/schemes.go
vendored
3
vendor/mvdan.cc/xurls/v2/schemes.go
vendored
@@ -66,6 +66,7 @@ var Schemes = []string{
|
|||||||
`dpp`,
|
`dpp`,
|
||||||
`drm`,
|
`drm`,
|
||||||
`drop`,
|
`drop`,
|
||||||
|
`dtmi`,
|
||||||
`dtn`,
|
`dtn`,
|
||||||
`dvb`,
|
`dvb`,
|
||||||
`ed2k`,
|
`ed2k`,
|
||||||
@@ -226,6 +227,7 @@ var Schemes = []string{
|
|||||||
`pack`,
|
`pack`,
|
||||||
`palm`,
|
`palm`,
|
||||||
`paparazzi`,
|
`paparazzi`,
|
||||||
|
`payment`,
|
||||||
`payto`,
|
`payto`,
|
||||||
`pkcs11`,
|
`pkcs11`,
|
||||||
`platform`,
|
`platform`,
|
||||||
@@ -238,6 +240,7 @@ var Schemes = []string{
|
|||||||
`pttp`,
|
`pttp`,
|
||||||
`qb`,
|
`qb`,
|
||||||
`query`,
|
`query`,
|
||||||
|
`quic-transport`,
|
||||||
`redis`,
|
`redis`,
|
||||||
`rediss`,
|
`rediss`,
|
||||||
`reload`,
|
`reload`,
|
||||||
|
|||||||
22
vendor/mvdan.cc/xurls/v2/tlds.go
vendored
22
vendor/mvdan.cc/xurls/v2/tlds.go
vendored
@@ -57,6 +57,7 @@ var TLDs = []string{
|
|||||||
`alsace`,
|
`alsace`,
|
||||||
`alstom`,
|
`alstom`,
|
||||||
`am`,
|
`am`,
|
||||||
|
`amazon`,
|
||||||
`americanexpress`,
|
`americanexpress`,
|
||||||
`americanfamily`,
|
`americanfamily`,
|
||||||
`amex`,
|
`amex`,
|
||||||
@@ -219,7 +220,6 @@ var TLDs = []string{
|
|||||||
`career`,
|
`career`,
|
||||||
`careers`,
|
`careers`,
|
||||||
`cars`,
|
`cars`,
|
||||||
`cartier`,
|
|
||||||
`casa`,
|
`casa`,
|
||||||
`case`,
|
`case`,
|
||||||
`caseih`,
|
`caseih`,
|
||||||
@@ -252,7 +252,6 @@ var TLDs = []string{
|
|||||||
`chintai`,
|
`chintai`,
|
||||||
`christmas`,
|
`christmas`,
|
||||||
`chrome`,
|
`chrome`,
|
||||||
`chrysler`,
|
|
||||||
`church`,
|
`church`,
|
||||||
`ci`,
|
`ci`,
|
||||||
`cipriani`,
|
`cipriani`,
|
||||||
@@ -366,7 +365,6 @@ var TLDs = []string{
|
|||||||
`do`,
|
`do`,
|
||||||
`docs`,
|
`docs`,
|
||||||
`doctor`,
|
`doctor`,
|
||||||
`dodge`,
|
|
||||||
`dog`,
|
`dog`,
|
||||||
`domains`,
|
`domains`,
|
||||||
`dot`,
|
`dot`,
|
||||||
@@ -411,7 +409,6 @@ var TLDs = []string{
|
|||||||
`eurovision`,
|
`eurovision`,
|
||||||
`eus`,
|
`eus`,
|
||||||
`events`,
|
`events`,
|
||||||
`everbank`,
|
|
||||||
`exchange`,
|
`exchange`,
|
||||||
`expert`,
|
`expert`,
|
||||||
`exposed`,
|
`exposed`,
|
||||||
@@ -701,12 +698,10 @@ var TLDs = []string{
|
|||||||
`kz`,
|
`kz`,
|
||||||
`la`,
|
`la`,
|
||||||
`lacaixa`,
|
`lacaixa`,
|
||||||
`ladbrokes`,
|
|
||||||
`lamborghini`,
|
`lamborghini`,
|
||||||
`lamer`,
|
`lamer`,
|
||||||
`lancaster`,
|
`lancaster`,
|
||||||
`lancia`,
|
`lancia`,
|
||||||
`lancome`,
|
|
||||||
`land`,
|
`land`,
|
||||||
`landrover`,
|
`landrover`,
|
||||||
`lanxess`,
|
`lanxess`,
|
||||||
@@ -727,7 +722,6 @@ var TLDs = []string{
|
|||||||
`lexus`,
|
`lexus`,
|
||||||
`lgbt`,
|
`lgbt`,
|
||||||
`li`,
|
`li`,
|
||||||
`liaison`,
|
|
||||||
`lidl`,
|
`lidl`,
|
||||||
`life`,
|
`life`,
|
||||||
`lifeinsurance`,
|
`lifeinsurance`,
|
||||||
@@ -829,7 +823,6 @@ var TLDs = []string{
|
|||||||
`monash`,
|
`monash`,
|
||||||
`money`,
|
`money`,
|
||||||
`monster`,
|
`monster`,
|
||||||
`mopar`,
|
|
||||||
`mormon`,
|
`mormon`,
|
||||||
`mortgage`,
|
`mortgage`,
|
||||||
`moscow`,
|
`moscow`,
|
||||||
@@ -837,7 +830,6 @@ var TLDs = []string{
|
|||||||
`motorcycles`,
|
`motorcycles`,
|
||||||
`mov`,
|
`mov`,
|
||||||
`movie`,
|
`movie`,
|
||||||
`movistar`,
|
|
||||||
`mp`,
|
`mp`,
|
||||||
`mq`,
|
`mq`,
|
||||||
`mr`,
|
`mr`,
|
||||||
@@ -856,7 +848,6 @@ var TLDs = []string{
|
|||||||
`mz`,
|
`mz`,
|
||||||
`na`,
|
`na`,
|
||||||
`nab`,
|
`nab`,
|
||||||
`nadex`,
|
|
||||||
`nagoya`,
|
`nagoya`,
|
||||||
`name`,
|
`name`,
|
||||||
`nationwide`,
|
`nationwide`,
|
||||||
@@ -958,7 +949,6 @@ var TLDs = []string{
|
|||||||
`photography`,
|
`photography`,
|
||||||
`photos`,
|
`photos`,
|
||||||
`physio`,
|
`physio`,
|
||||||
`piaget`,
|
|
||||||
`pics`,
|
`pics`,
|
||||||
`pictet`,
|
`pictet`,
|
||||||
`pictures`,
|
`pictures`,
|
||||||
@@ -1154,13 +1144,13 @@ var TLDs = []string{
|
|||||||
`song`,
|
`song`,
|
||||||
`sony`,
|
`sony`,
|
||||||
`soy`,
|
`soy`,
|
||||||
|
`spa`,
|
||||||
`space`,
|
`space`,
|
||||||
`sport`,
|
`sport`,
|
||||||
`spot`,
|
`spot`,
|
||||||
`spreadbetting`,
|
`spreadbetting`,
|
||||||
`sr`,
|
`sr`,
|
||||||
`srl`,
|
`srl`,
|
||||||
`srt`,
|
|
||||||
`ss`,
|
`ss`,
|
||||||
`st`,
|
`st`,
|
||||||
`stada`,
|
`stada`,
|
||||||
@@ -1213,7 +1203,6 @@ var TLDs = []string{
|
|||||||
`tech`,
|
`tech`,
|
||||||
`technology`,
|
`technology`,
|
||||||
`tel`,
|
`tel`,
|
||||||
`telefonica`,
|
|
||||||
`temasek`,
|
`temasek`,
|
||||||
`tennis`,
|
`tennis`,
|
||||||
`teva`,
|
`teva`,
|
||||||
@@ -1273,7 +1262,6 @@ var TLDs = []string{
|
|||||||
`ua`,
|
`ua`,
|
||||||
`ubank`,
|
`ubank`,
|
||||||
`ubs`,
|
`ubs`,
|
||||||
`uconnect`,
|
|
||||||
`ug`,
|
`ug`,
|
||||||
`uk`,
|
`uk`,
|
||||||
`unicom`,
|
`unicom`,
|
||||||
@@ -1309,7 +1297,6 @@ var TLDs = []string{
|
|||||||
`virgin`,
|
`virgin`,
|
||||||
`visa`,
|
`visa`,
|
||||||
`vision`,
|
`vision`,
|
||||||
`vistaprint`,
|
|
||||||
`viva`,
|
`viva`,
|
||||||
`vivo`,
|
`vivo`,
|
||||||
`vlaanderen`,
|
`vlaanderen`,
|
||||||
@@ -1328,7 +1315,6 @@ var TLDs = []string{
|
|||||||
`walter`,
|
`walter`,
|
||||||
`wang`,
|
`wang`,
|
||||||
`wanggou`,
|
`wanggou`,
|
||||||
`warman`,
|
|
||||||
`watch`,
|
`watch`,
|
||||||
`watches`,
|
`watches`,
|
||||||
`weather`,
|
`weather`,
|
||||||
@@ -1388,6 +1374,7 @@ var TLDs = []string{
|
|||||||
`zuerich`,
|
`zuerich`,
|
||||||
`zw`,
|
`zw`,
|
||||||
`ελ`,
|
`ελ`,
|
||||||
|
`ευ`,
|
||||||
`бг`,
|
`бг`,
|
||||||
`бел`,
|
`бел`,
|
||||||
`дети`,
|
`дети`,
|
||||||
@@ -1469,6 +1456,7 @@ var TLDs = []string{
|
|||||||
`ไทย`,
|
`ไทย`,
|
||||||
`გე`,
|
`გე`,
|
||||||
`みんな`,
|
`みんな`,
|
||||||
|
`アマゾン`,
|
||||||
`クラウド`,
|
`クラウド`,
|
||||||
`グーグル`,
|
`グーグル`,
|
||||||
`コム`,
|
`コム`,
|
||||||
@@ -1481,6 +1469,7 @@ var TLDs = []string{
|
|||||||
`中国`,
|
`中国`,
|
||||||
`中國`,
|
`中國`,
|
||||||
`中文网`,
|
`中文网`,
|
||||||
|
`亚马逊`,
|
||||||
`企业`,
|
`企业`,
|
||||||
`佛山`,
|
`佛山`,
|
||||||
`信息`,
|
`信息`,
|
||||||
@@ -1501,7 +1490,6 @@ var TLDs = []string{
|
|||||||
`天主教`,
|
`天主教`,
|
||||||
`娱乐`,
|
`娱乐`,
|
||||||
`家電`,
|
`家電`,
|
||||||
`工行`,
|
|
||||||
`广东`,
|
`广东`,
|
||||||
`微博`,
|
`微博`,
|
||||||
`慈善`,
|
`慈善`,
|
||||||
|
|||||||
4
vendor/mvdan.cc/xurls/v2/xurls.go
vendored
4
vendor/mvdan.cc/xurls/v2/xurls.go
vendored
@@ -19,7 +19,7 @@ const (
|
|||||||
iriChar = letter + mark + number
|
iriChar = letter + mark + number
|
||||||
currency = `\p{Sc}`
|
currency = `\p{Sc}`
|
||||||
otherSymb = `\p{So}`
|
otherSymb = `\p{So}`
|
||||||
endChar = iriChar + `/\-+&~%=#` + currency + otherSymb
|
endChar = iriChar + `/\-_+&~%=#` + currency + otherSymb
|
||||||
otherPunc = `\p{Po}`
|
otherPunc = `\p{Po}`
|
||||||
midChar = endChar + "_*" + otherPunc
|
midChar = endChar + "_*" + otherPunc
|
||||||
wellParen = `\([` + midChar + `]*(\([` + midChar + `]*\)[` + midChar + `]*)*\)`
|
wellParen = `\([` + midChar + `]*(\([` + midChar + `]*\)[` + midChar + `]*)*\)`
|
||||||
@@ -76,7 +76,7 @@ func relaxedExp() string {
|
|||||||
knownTLDs := anyOf(append(TLDs, PseudoTLDs...)...)
|
knownTLDs := anyOf(append(TLDs, PseudoTLDs...)...)
|
||||||
site := domain + `(?i)(` + punycode + `|` + knownTLDs + `)(?-i)`
|
site := domain + `(?i)(` + punycode + `|` + knownTLDs + `)(?-i)`
|
||||||
hostName := `(` + site + `|` + ipAddr + `)`
|
hostName := `(` + site + `|` + ipAddr + `)`
|
||||||
webURL := hostName + port + `(/|/` + pathCont + `?|\b|(?m)$)`
|
webURL := hostName + port + `(/|/` + pathCont + `)?`
|
||||||
return strictExp() + `|` + webURL
|
return strictExp() + `|` + webURL
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user