mirror of
				https://github.com/go-gitea/gitea.git
				synced 2025-11-03 08:02:36 +09:00 
			
		
		
		
	Compare commits
	
		
			124 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					ef2cb41dc3 | ||
| 
						 | 
					9201068ff9 | ||
| 
						 | 
					bfd33088b4 | ||
| 
						 | 
					711ca0c410 | ||
| 
						 | 
					013639b13f | ||
| 
						 | 
					558b0005ff | ||
| 
						 | 
					0d7afb02c0 | ||
| 
						 | 
					1a26f6c7ab | ||
| 
						 | 
					1062931cf1 | ||
| 
						 | 
					8d4f8ebf31 | ||
| 
						 | 
					4f47bf5346 | ||
| 
						 | 
					6dfa92bb1c | ||
| 
						 | 
					151bedab52 | ||
| 
						 | 
					6198403fbc | ||
| 
						 | 
					a6290f603f | ||
| 
						 | 
					2f09e5775f | ||
| 
						 | 
					b0819efaea | ||
| 
						 | 
					d7a3bcdd70 | ||
| 
						 | 
					7a85e228d8 | ||
| 
						 | 
					a461d90415 | ||
| 
						 | 
					70e4134130 | ||
| 
						 | 
					909f2be99d | ||
| 
						 | 
					645c0d8abd | ||
| 
						 | 
					8c461eb261 | ||
| 
						 | 
					fff66eb016 | ||
| 
						 | 
					c965ed6529 | ||
| 
						 | 
					71a2adbf10 | ||
| 
						 | 
					3231b70043 | ||
| 
						 | 
					e3c44923d7 | ||
| 
						 | 
					3e7dccdf47 | ||
| 
						 | 
					33c2c49627 | ||
| 
						 | 
					05ac72cf33 | ||
| 
						 | 
					906ecfd173 | ||
| 
						 | 
					75496b9ff5 | ||
| 
						 | 
					8dad47a94a | ||
| 
						 | 
					8e792986bb | ||
| 
						 | 
					da80e90ac8 | ||
| 
						 | 
					74dc22358b | ||
| 
						 | 
					7d3e174906 | ||
| 
						 | 
					8456700411 | ||
| 
						 | 
					8a6acbbc12 | ||
| 
						 | 
					98b3d8d5e1 | ||
| 
						 | 
					e663f7459a | ||
| 
						 | 
					7e85cba3e5 | ||
| 
						 | 
					26628aa1d1 | ||
| 
						 | 
					d9d2e8f1e8 | ||
| 
						 | 
					4558eeb21a | ||
| 
						 | 
					be25afc6de | ||
| 
						 | 
					90bf1e7961 | ||
| 
						 | 
					77ce08976d | ||
| 
						 | 
					8f389c5dfa | ||
| 
						 | 
					edef62e69e | ||
| 
						 | 
					cdff144f76 | ||
| 
						 | 
					ad6084a222 | ||
| 
						 | 
					d3200db041 | ||
| 
						 | 
					f305cffcaf | ||
| 
						 | 
					c0320065b6 | ||
| 
						 | 
					a1b74c5509 | ||
| 
						 | 
					101fb0d7e2 | ||
| 
						 | 
					82637c240a | ||
| 
						 | 
					d0174d45ed | ||
| 
						 | 
					da7a525c5c | ||
| 
						 | 
					014313134f | ||
| 
						 | 
					7dddf2186b | ||
| 
						 | 
					446c06b817 | ||
| 
						 | 
					9569607abb | ||
| 
						 | 
					8ff4f82e05 | ||
| 
						 | 
					2595c70868 | ||
| 
						 | 
					00dc35e2de | ||
| 
						 | 
					841efac895 | ||
| 
						 | 
					dd827d6f2f | ||
| 
						 | 
					4d2a6c40f8 | ||
| 
						 | 
					fb274ec54b | ||
| 
						 | 
					0c3f95034a | ||
| 
						 | 
					4583caa077 | ||
| 
						 | 
					cf20ebc8ba | ||
| 
						 | 
					5ee09d3c81 | ||
| 
						 | 
					e846b712fc | ||
| 
						 | 
					49d113945f | ||
| 
						 | 
					096aa18249 | ||
| 
						 | 
					bf853db450 | ||
| 
						 | 
					fb656b5124 | ||
| 
						 | 
					4be59eb5d9 | ||
| 
						 | 
					450b32c1a1 | ||
| 
						 | 
					06673cbccb | ||
| 
						 | 
					2fd708a397 | ||
| 
						 | 
					7a0a133d7c | ||
| 
						 | 
					17022f8b62 | ||
| 
						 | 
					5568dd6475 | ||
| 
						 | 
					58c105d4bf | ||
| 
						 | 
					afa7f22dd8 | ||
| 
						 | 
					182be90655 | ||
| 
						 | 
					4a738a8f16 | ||
| 
						 | 
					206b66a184 | ||
| 
						 | 
					205be63bc1 | ||
| 
						 | 
					bf1441b1e1 | ||
| 
						 | 
					fae18bdac0 | ||
| 
						 | 
					661e3e2bdc | ||
| 
						 | 
					70038719bf | ||
| 
						 | 
					55d7e53d99 | ||
| 
						 | 
					96d41287e5 | ||
| 
						 | 
					df11075389 | ||
| 
						 | 
					b8a2cd9f40 | ||
| 
						 | 
					4f296f7436 | ||
| 
						 | 
					78b9ef3586 | ||
| 
						 | 
					90dfe445c2 | ||
| 
						 | 
					a728d1e046 | ||
| 
						 | 
					7f85728cf9 | ||
| 
						 | 
					d2b308ae35 | ||
| 
						 | 
					8e8e8ee150 | ||
| 
						 | 
					05ee88e576 | ||
| 
						 | 
					0d7cb2323f | ||
| 
						 | 
					5cdffc2b0c | ||
| 
						 | 
					a0101c61a4 | ||
| 
						 | 
					c0b1197a64 | ||
| 
						 | 
					e39ed0b1d9 | ||
| 
						 | 
					cb24cbc1fc | ||
| 
						 | 
					584d01cf2c | ||
| 
						 | 
					798fdeae45 | ||
| 
						 | 
					87997cccbb | ||
| 
						 | 
					0d5111c5c3 | ||
| 
						 | 
					10fff12da4 | ||
| 
						 | 
					0d43a2a069 | ||
| 
						 | 
					8396b792f8 | 
							
								
								
									
										150
									
								
								CHANGELOG.md
									
									
									
									
									
								
							
							
						
						
									
										150
									
								
								CHANGELOG.md
									
									
									
									
									
								
							@@ -4,6 +4,156 @@ This changelog goes through all the changes that have been made in each release
 | 
			
		||||
without substantial changes to our git log; to see the highlights of what has
 | 
			
		||||
been added to each release, please refer to the [blog](https://blog.gitea.io).
 | 
			
		||||
 | 
			
		||||
## [1.13.7](https://github.com/go-gitea/gitea/releases/tag/v1.13.7) - 2021-04-07
 | 
			
		||||
 | 
			
		||||
* SECURITY
 | 
			
		||||
  * Update to bluemonday-1.0.6 (#15294) (#15298)
 | 
			
		||||
  * Clusterfuzz found another way (#15160) (#15169)
 | 
			
		||||
* API
 | 
			
		||||
  * Fix wrong user returned in API (#15139) (#15150)
 | 
			
		||||
* BUGFIXES
 | 
			
		||||
  * Add 'fonts' into 'KnownPublicEntries' (#15188) (#15317)
 | 
			
		||||
  * Speed up `enry.IsVendor` (#15213) (#15246)
 | 
			
		||||
  * Response 404 for diff/patch of a commit that not exist (#15221) (#15238)
 | 
			
		||||
  * Prevent NPE in CommentMustAsDiff if no hunk header (#15199) (#15201)
 | 
			
		||||
* MISC
 | 
			
		||||
  * Add size to Save function (#15264) (#15271)
 | 
			
		||||
 | 
			
		||||
## [1.13.6](https://github.com/go-gitea/gitea/releases/tag/v1.13.6) - 2021-03-23
 | 
			
		||||
 | 
			
		||||
* SECURITY
 | 
			
		||||
  * Fix bug on avatar middleware (#15124) (#15125)
 | 
			
		||||
  * Fix another clusterfuzz identified issue (#15096) (#15114)
 | 
			
		||||
* API
 | 
			
		||||
  * Fix nil exeption for get pull reviews API #15104 (#15106)
 | 
			
		||||
* BUGFIXES
 | 
			
		||||
  * Fix markdown rendering in milestone content (#15056) (#15092)
 | 
			
		||||
 | 
			
		||||
## [1.13.5](https://github.com/go-gitea/gitea/releases/tag/v1.13.5) - 2021-03-21
 | 
			
		||||
 | 
			
		||||
* SECURITY
 | 
			
		||||
  * Update to goldmark 1.3.3 (#15059) (#15061)
 | 
			
		||||
* API
 | 
			
		||||
  * Fix set milestone on PR creation (#14981) (#15001)
 | 
			
		||||
  * Prevent panic when editing forked repos by API (#14960) (#14963)
 | 
			
		||||
* BUGFIXES
 | 
			
		||||
  * Fix bug when upload on web (#15042) (#15055)
 | 
			
		||||
  * Delete Labels & IssueLabels on Repo Delete too (#15039) (#15051)
 | 
			
		||||
  * another clusterfuzz spotted issue (#15032) (#15034)
 | 
			
		||||
  * Fix postgres ID sequences broken by recreate-table (#15015) (#15029)
 | 
			
		||||
  * Fix several render issues (#14986) (#15013)
 | 
			
		||||
  * Make sure sibling images get a link too (#14979) (#14995)
 | 
			
		||||
  * Fix Anchor jumping with escaped query components (#14969) (#14977)
 | 
			
		||||
  * fix release mail html template (#14976)
 | 
			
		||||
  * Fix excluding more than two labels on issues list (#14962) (#14973)
 | 
			
		||||
  * don't mark each comment poster as OP (#14971) (#14972)
 | 
			
		||||
  * Add "captcha" to list of reserved usernames (#14930)
 | 
			
		||||
  * Re-enable import local paths after reversion from #13610 (#14925) (#14927)
 | 
			
		||||
 | 
			
		||||
## [1.13.4](https://github.com/go-gitea/gitea/releases/tag/v1.13.4) - 2021-03-07
 | 
			
		||||
 | 
			
		||||
* SECURITY
 | 
			
		||||
  * Fix issue popups (#14898) (#14899)
 | 
			
		||||
* BUGFIXES
 | 
			
		||||
  * Fix race in LFS ContentStore.Put(...) (#14895) (#14913)
 | 
			
		||||
  * Fix a couple of issues with a feeds (#14897) (#14903)
 | 
			
		||||
  * When transfering repository and database transaction failed, rollback the renames (#14864) (#14902)
 | 
			
		||||
  * Fix race in local storage (#14888) (#14901)
 | 
			
		||||
  * Fix 500 on pull view page if user is not loged in (#14885) (#14886)
 | 
			
		||||
* DOCS
 | 
			
		||||
  * Fix how lfs data path is set (#14855) (#14884)
 | 
			
		||||
 | 
			
		||||
## [1.13.3](https://github.com/go-gitea/gitea/releases/tag/v1.13.3) - 2021-03-04
 | 
			
		||||
 | 
			
		||||
* BREAKING
 | 
			
		||||
  * Turn default hash password algorithm back to pbkdf2 from argon2 until we find a better one (#14673) (#14675)
 | 
			
		||||
* BUGFIXES
 | 
			
		||||
  * Fix paging of file commit logs (#14831) (#14879)
 | 
			
		||||
  * Print useful error if SQLite is used in settings but not supported (#14476) (#14874)
 | 
			
		||||
  * Fix display since time round (#14226) (#14873)
 | 
			
		||||
  * When Deleting Repository only explicitly close PRs whose base is not this repository (#14823) (#14842)
 | 
			
		||||
  * Set HCaptchaSiteKey on Link Account pages (#14834) (#14839)
 | 
			
		||||
  * Fix a couple of CommentAsPatch issues.  (#14804) (#14820)
 | 
			
		||||
  * Disable broken OAuth2 providers at startup (#14802) (#14811)
 | 
			
		||||
  * Repo Transfer permission checks (#14792) (#14794)
 | 
			
		||||
  * Fix double alert in oauth2 application edit view (#14764) (#14768)
 | 
			
		||||
  * Fix broken spans in diffs (#14678) (#14683)
 | 
			
		||||
  * Prevent race in PersistableChannelUniqueQueue.Has (#14651) (#14676)
 | 
			
		||||
  * HasPreviousCommit causes recursive load of commits unnecessarily (#14598) (#14649)
 | 
			
		||||
  * Do not assume all 40 char strings are SHA1s (#14624) (#14648)
 | 
			
		||||
  * Allow org labels to be set with issue templates (#14593) (#14647)
 | 
			
		||||
  * Accept multiple SSH keys in single LDAP SSHPublicKey attribute (#13989) (#14607)
 | 
			
		||||
  * Fix bug about ListOptions and stars/watchers pagnation (#14556) (#14573)
 | 
			
		||||
  * Fix GPG key deletion during account deletion (#14561) (#14569)
 | 
			
		||||
 | 
			
		||||
## [1.13.2](https://github.com/go-gitea/gitea/releases/tag/v1.13.2) - 2021-01-31
 | 
			
		||||
 | 
			
		||||
* SECURITY
 | 
			
		||||
  * Prevent panic on fuzzer provided string (#14405) (#14409)
 | 
			
		||||
  * Add secure/httpOnly attributes to the lang cookie (#14279) (#14280)
 | 
			
		||||
* API
 | 
			
		||||
  * If release publisher is deleted use ghost user (#14375)
 | 
			
		||||
* BUGFIXES
 | 
			
		||||
  * Internal ssh server respect Ciphers, MACs and KeyExchanges settings (#14523) (#14530)
 | 
			
		||||
  * Set the name Mapper in migrations (#14526) (#14529)
 | 
			
		||||
  * Fix wiki preview (#14515)
 | 
			
		||||
  * Update code.gitea.io/sdk/gitea v0.13.1 -> v0.13.2 (#14497)
 | 
			
		||||
  * ChangeUserName: rename user files back on DB issue (#14447)
 | 
			
		||||
  * Fix lfs preview bug (#14428) (#14433)
 | 
			
		||||
  * Ensure timeout error is shown on u2f timeout (#14417) (#14431)
 | 
			
		||||
  * Fix Deadlock & Delete affected reactions on comment deletion (#14392) (#14425)
 | 
			
		||||
  * Use path not filepath in routers/editor (#14390) (#14396)
 | 
			
		||||
  * Check if label template exist first (#14384) (#14389)
 | 
			
		||||
  * Fix migration v141 (#14387) (#14388)
 | 
			
		||||
  * Use Request.URL.RequestURI() for fcgi (#14347)
 | 
			
		||||
  * Use ServerError provided by Context (#14333) (#14345)
 | 
			
		||||
  * Fix edit-label form init (#14337)
 | 
			
		||||
  * Fix mailIssueCommentBatch for pull request (#14252) (#14296)
 | 
			
		||||
  * Render links for commit hashes followed by comma (#14224) (#14227)
 | 
			
		||||
  * Send notifications for mentions in pulls, issues, (code-)comments (#14218) (#14221)
 | 
			
		||||
  * Fix avatar bugs (#14217) (#14220)
 | 
			
		||||
  * Ensure that schema search path is set with every connection on postgres (#14131) (#14216)
 | 
			
		||||
  * Fix dashboard issues labels filter bug (#14210) (#14214)
 | 
			
		||||
  * When visit /favicon.ico but the static file is not exist return 404 but not continue to handle the route (#14211) (#14213)
 | 
			
		||||
  * Fix branch selector on new issue page (#14194) (#14207)
 | 
			
		||||
  * Check for notExist on profile repository page (#14197) (#14203)
 | 
			
		||||
 | 
			
		||||
## [1.13.1](https://github.com/go-gitea/gitea/releases/tag/v1.13.1) - 2020-12-29
 | 
			
		||||
 | 
			
		||||
* SECURITY
 | 
			
		||||
  * Hide private participation in Orgs (#13994) (#14031)
 | 
			
		||||
  * Fix escaping issue in diff (#14153) (#14154)
 | 
			
		||||
* BUGFIXES
 | 
			
		||||
  * Fix bug of link query order on markdown render (#14156) (#14171)
 | 
			
		||||
  * Drop long repo topics during migration (#14152) (#14155)
 | 
			
		||||
  * Ensure that search term and page are not lost on adoption page-turn (#14133) (#14143)
 | 
			
		||||
  * Fix storage config implementation (#14091) (#14095)
 | 
			
		||||
  * Fix panic in BasicAuthDecode (#14046) (#14048)
 | 
			
		||||
  * Always wait for the cmd to finish (#14006) (#14039)
 | 
			
		||||
  * Don't use simpleMDE editor on mobile devices for 1.13 (#14029)
 | 
			
		||||
  * Fix incorrect review comment diffs (#14002) (#14011)
 | 
			
		||||
  * Trim the branch prefix from action.GetBranch (#13981) (#13986)
 | 
			
		||||
  * Ensure template renderer is available before storage handler (#13164) (#13982)
 | 
			
		||||
  * Whenever the password is updated ensure that the hash algorithm is too (#13966) (#13967)
 | 
			
		||||
  * Enforce setting HEAD in wiki to master (#13950) (#13961)
 | 
			
		||||
  * Fix feishu webhook caused by API changed (#13938)
 | 
			
		||||
  * Fix Quote Reply button on review diff (#13830) (#13898)
 | 
			
		||||
  * Fix Pull Merge when tag with same name as base branch exist (#13882) (#13896)
 | 
			
		||||
  * Fix mermaid chart size (#13865)
 | 
			
		||||
  * Fix branch/tag notifications in mirror sync (#13855) (#13862)
 | 
			
		||||
  * Fix crash in short link processor (#13839) (#13841)
 | 
			
		||||
  * Update font stack to bootstrap's latest (#13834) (#13837)
 | 
			
		||||
  * Make sure email recipients can see issue (#13820) (#13827)
 | 
			
		||||
  * Reply button is not removed when deleting a code review comment (#13824)
 | 
			
		||||
  * When reinitialising DBConfig reset the database use flags (#13796) (#13811)
 | 
			
		||||
* ENHANCEMENTS
 | 
			
		||||
  * Add emoji in label to project boards (#13978) (#14021)
 | 
			
		||||
  * Send webhook when tag is removed via Web UI (#14015) (#14019)
 | 
			
		||||
  * Use Process Manager to create own Context (#13792) (#13793)
 | 
			
		||||
* API
 | 
			
		||||
  * GetCombinedCommitStatusByRef always return json & swagger doc fixes (#14047)
 | 
			
		||||
  * Return original URL of Repositories (#13885) (#13886)
 | 
			
		||||
 | 
			
		||||
## [1.13.0](https://github.com/go-gitea/gitea/releases/tag/v1.13.0) - 2020-12-01
 | 
			
		||||
* SECURITY
 | 
			
		||||
  * Add Allow-/Block-List for Migrate & Mirrors (#13610) (#13776)
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										2
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								Makefile
									
									
									
									
									
								
							@@ -585,7 +585,7 @@ release-darwin: | $(DIST_DIRS)
 | 
			
		||||
	@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
 | 
			
		||||
		GO111MODULE=off $(GO) get -u src.techknowlogick.com/xgo; \
 | 
			
		||||
	fi
 | 
			
		||||
	CGO_CFLAGS="$(CGO_CFLAGS)" GO111MODULE=off xgo -go $(XGO_VERSION) -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '$(LDFLAGS)' -targets 'darwin/*' -out gitea-$(VERSION) .
 | 
			
		||||
	CGO_CFLAGS="$(CGO_CFLAGS)" GO111MODULE=off xgo -go $(XGO_VERSION) -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '$(LDFLAGS)' -targets 'darwin/amd64' -out gitea-$(VERSION) .
 | 
			
		||||
ifeq ($(CI),drone)
 | 
			
		||||
	cp /build/* $(DIST)/binaries
 | 
			
		||||
endif
 | 
			
		||||
 
 | 
			
		||||
@@ -283,7 +283,7 @@ func runChangePassword(c *cli.Context) error {
 | 
			
		||||
	}
 | 
			
		||||
	user.HashPassword(c.String("password"))
 | 
			
		||||
 | 
			
		||||
	if err := models.UpdateUserCols(user, "passwd", "salt"); err != nil {
 | 
			
		||||
	if err := models.UpdateUserCols(user, "passwd", "passwd_hash_algo", "salt"); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -606,6 +606,22 @@ func runDoctorCheckDBConsistency(ctx *cli.Context) ([]string, error) {
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// find IssueLabels without existing label
 | 
			
		||||
	count, err = models.CountOrphanedIssueLabels()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return nil, err
 | 
			
		||||
	}
 | 
			
		||||
	if count > 0 {
 | 
			
		||||
		if ctx.Bool("fix") {
 | 
			
		||||
			if err = models.DeleteOrphanedIssueLabels(); err != nil {
 | 
			
		||||
				return nil, err
 | 
			
		||||
			}
 | 
			
		||||
			results = append(results, fmt.Sprintf("%d issue_labels without existing label deleted", count))
 | 
			
		||||
		} else {
 | 
			
		||||
			results = append(results, fmt.Sprintf("%d issue_labels without existing label", count))
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	//find issues without existing repository
 | 
			
		||||
	count, err = models.CountOrphanedIssues()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
@@ -670,6 +686,23 @@ func runDoctorCheckDBConsistency(ctx *cli.Context) ([]string, error) {
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if setting.Database.UsePostgreSQL {
 | 
			
		||||
		count, err = models.CountBadSequences()
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return nil, err
 | 
			
		||||
		}
 | 
			
		||||
		if count > 0 {
 | 
			
		||||
			if ctx.Bool("fix") {
 | 
			
		||||
				err := models.FixBadSequences()
 | 
			
		||||
				if err != nil {
 | 
			
		||||
					return nil, err
 | 
			
		||||
				}
 | 
			
		||||
				results = append(results, fmt.Sprintf("%d sequences updated", count))
 | 
			
		||||
			} else {
 | 
			
		||||
				results = append(results, fmt.Sprintf("%d sequences with incorrect values", count))
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	//ToDo: function to recalc all counters
 | 
			
		||||
 | 
			
		||||
	return results, nil
 | 
			
		||||
 
 | 
			
		||||
@@ -548,7 +548,7 @@ ONLY_ALLOW_PUSH_IF_GITEA_ENVIRONMENT_SET = true
 | 
			
		||||
;Classes include "lower,upper,digit,spec"
 | 
			
		||||
PASSWORD_COMPLEXITY = off
 | 
			
		||||
; Password Hash algorithm, either "argon2", "pbkdf2", "scrypt" or "bcrypt"
 | 
			
		||||
PASSWORD_HASH_ALGO = argon2
 | 
			
		||||
PASSWORD_HASH_ALGO = pbkdf2
 | 
			
		||||
; Set false to allow JavaScript to read CSRF cookie
 | 
			
		||||
CSRF_COOKIE_HTTP_ONLY = true
 | 
			
		||||
; Validate against https://haveibeenpwned.com/Passwords to see if a password has been exposed
 | 
			
		||||
@@ -850,7 +850,7 @@ MACARON = file
 | 
			
		||||
ROUTER_LOG_LEVEL = Info
 | 
			
		||||
ROUTER = console
 | 
			
		||||
ENABLE_ACCESS_LOG = false
 | 
			
		||||
ACCESS_LOG_TEMPLATE = {{.Ctx.RemoteAddr}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}\" \"{{.Ctx.Req.UserAgent}}"
 | 
			
		||||
ACCESS_LOG_TEMPLATE = {{.Ctx.RemoteAddr}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}\" \"{{.Ctx.Req.UserAgent}}"
 | 
			
		||||
ACCESS = file
 | 
			
		||||
; Either "Trace", "Debug", "Info", "Warn", "Error", "Critical", default is "Trace"
 | 
			
		||||
LEVEL = Info
 | 
			
		||||
 
 | 
			
		||||
@@ -276,7 +276,7 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`.
 | 
			
		||||
- `LANDING_PAGE`: **home**: Landing page for unauthenticated users \[home, explore, organizations, login\].
 | 
			
		||||
 | 
			
		||||
- `LFS_START_SERVER`: **false**: Enables git-lfs support.
 | 
			
		||||
- `LFS_CONTENT_PATH`: **%(APP_DATA_PATH)/lfs**:  Default LFS content path. (if it is on local storage.)
 | 
			
		||||
- `LFS_CONTENT_PATH`: **%(APP_DATA_PATH)/lfs**:  DEPRECATED: Default LFS content path. (if it is on local storage.)
 | 
			
		||||
- `LFS_JWT_SECRET`: **\<empty\>**: LFS authentication secret, change this a unique string.
 | 
			
		||||
- `LFS_HTTP_AUTH_EXPIRY`: **20m**: LFS authentication validity period in time.Duration, pushes taking longer than this may fail.
 | 
			
		||||
- `LFS_MAX_FILE_SIZE`: **0**: Maximum allowed LFS file size in bytes (Set to 0 for no limit).
 | 
			
		||||
@@ -402,7 +402,7 @@ relation to port exhaustion.
 | 
			
		||||
- `IMPORT_LOCAL_PATHS`: **false**: Set to `false` to prevent all users (including admin) from importing local path on server.
 | 
			
		||||
- `INTERNAL_TOKEN`: **\<random at every install if no uri set\>**: Secret used to validate communication within Gitea binary.
 | 
			
		||||
- `INTERNAL_TOKEN_URI`: **<empty>**: Instead of defining internal token in the configuration, this configuration option can be used to give Gitea a path to a file that contains the internal token (example value: `file:/etc/gitea/internal_token`)
 | 
			
		||||
- `PASSWORD_HASH_ALGO`: **argon2**: The hash algorithm to use \[argon2, pbkdf2, scrypt, bcrypt\].
 | 
			
		||||
- `PASSWORD_HASH_ALGO`: **pbkdf2**: The hash algorithm to use \[argon2, pbkdf2, scrypt, bcrypt\], argon2 will spend more memory than others.
 | 
			
		||||
- `CSRF_COOKIE_HTTP_ONLY`: **true**: Set false to allow JavaScript to read CSRF cookie.
 | 
			
		||||
- `MIN_PASSWORD_LENGTH`: **6**: Minimum password length for new users.
 | 
			
		||||
- `PASSWORD_COMPLEXITY`: **off**: Comma separated list of character classes required to pass minimum complexity. If left empty or no valid values are specified, checking is disabled (off):
 | 
			
		||||
@@ -828,7 +828,7 @@ is `data/lfs` and the default of `MINIO_BASE_PATH` is `lfs/`.
 | 
			
		||||
 | 
			
		||||
- `STORAGE_TYPE`: **local**: Storage type for lfs, `local` for local disk or `minio` for s3 compatible object storage service or other name defined with `[storage.xxx]`
 | 
			
		||||
- `SERVE_DIRECT`: **false**: Allows the storage driver to redirect to authenticated URLs to serve files directly. Currently, only Minio/S3 is supported via signed URLs, local does nothing.
 | 
			
		||||
- `CONTENT_PATH`: **./data/lfs**: Where to store LFS files, only available when `STORAGE_TYPE` is `local`.
 | 
			
		||||
- `PATH`: **./data/lfs**: Where to store LFS files, only available when `STORAGE_TYPE` is `local`. If not set it fall back to deprecated LFS_CONTENT_PATH value in [server] section.
 | 
			
		||||
- `MINIO_ENDPOINT`: **localhost:9000**: Minio endpoint to connect only available when `STORAGE_TYPE` is `minio`
 | 
			
		||||
- `MINIO_ACCESS_KEY_ID`: Minio accessKeyID to connect only available when `STORAGE_TYPE` is `minio`
 | 
			
		||||
- `MINIO_SECRET_ACCESS_KEY`: Minio secretAccessKey to connect only available when `STORAGE_TYPE is` `minio`
 | 
			
		||||
 
 | 
			
		||||
@@ -73,6 +73,7 @@ menu:
 | 
			
		||||
 | 
			
		||||
- `LFS_START_SERVER`: 是否启用 git-lfs 支持. 可以为 `true` 或 `false`, 默认是 `false`。
 | 
			
		||||
- `LFS_JWT_SECRET`: LFS 认证密钥,改成自己的。
 | 
			
		||||
- `LFS_CONTENT_PATH`: **已废弃**, 存放 lfs 命令上传的文件的地方,默认是 `data/lfs`。
 | 
			
		||||
 | 
			
		||||
## Database (`database`)
 | 
			
		||||
 | 
			
		||||
@@ -323,7 +324,7 @@ LFS 的存储配置。 如果 `STORAGE_TYPE` 为空,则此配置将从 `[stora
 | 
			
		||||
 | 
			
		||||
- `STORAGE_TYPE`: **local**: LFS 的存储类型,`local` 将存储到磁盘,`minio` 将存储到 s3 兼容的对象服务。
 | 
			
		||||
- `SERVE_DIRECT`: **false**: 允许直接重定向到存储系统。当前,仅 Minio/S3 是支持的。
 | 
			
		||||
- `CONTENT_PATH`: 存放 lfs 命令上传的文件的地方,默认是 `data/lfs`。
 | 
			
		||||
- `PATH`: 存放 lfs 命令上传的文件的地方,默认是 `data/lfs`。
 | 
			
		||||
- `MINIO_ENDPOINT`: **localhost:9000**: Minio 地址,仅当 `LFS_STORAGE_TYPE` 为 `minio` 时有效。
 | 
			
		||||
- `MINIO_ACCESS_KEY_ID`: Minio accessKeyID,仅当 `LFS_STORAGE_TYPE` 为 `minio` 时有效。
 | 
			
		||||
- `MINIO_SECRET_ACCESS_KEY`: Minio secretAccessKey,仅当 `LFS_STORAGE_TYPE` 为 `minio` 时有效。
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										12
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								go.mod
									
									
									
									
									
								
							@@ -4,7 +4,7 @@ go 1.14
 | 
			
		||||
 | 
			
		||||
require (
 | 
			
		||||
	code.gitea.io/gitea-vet v0.2.1
 | 
			
		||||
	code.gitea.io/sdk/gitea v0.13.1
 | 
			
		||||
	code.gitea.io/sdk/gitea v0.13.2
 | 
			
		||||
	gitea.com/lunny/levelqueue v0.3.0
 | 
			
		||||
	gitea.com/macaron/binding v0.0.0-20190822013154-a5f53841ed2b
 | 
			
		||||
	gitea.com/macaron/cache v0.0.0-20190822004001-a6e7fee4ee76
 | 
			
		||||
@@ -70,7 +70,7 @@ require (
 | 
			
		||||
	github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81
 | 
			
		||||
	github.com/mgechev/revive v1.0.3-0.20200921231451-246eac737dc7
 | 
			
		||||
	github.com/mholt/archiver/v3 v3.3.0
 | 
			
		||||
	github.com/microcosm-cc/bluemonday v1.0.3-0.20191119130333-0a75d7616912
 | 
			
		||||
	github.com/microcosm-cc/bluemonday v1.0.6
 | 
			
		||||
	github.com/minio/minio-go/v7 v7.0.4
 | 
			
		||||
	github.com/mitchellh/go-homedir v1.1.0
 | 
			
		||||
	github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc
 | 
			
		||||
@@ -99,15 +99,15 @@ require (
 | 
			
		||||
	github.com/urfave/cli v1.20.0
 | 
			
		||||
	github.com/xanzy/go-gitlab v0.37.0
 | 
			
		||||
	github.com/yohcop/openid-go v1.0.0
 | 
			
		||||
	github.com/yuin/goldmark v1.2.1
 | 
			
		||||
	github.com/yuin/goldmark v1.3.3
 | 
			
		||||
	github.com/yuin/goldmark-highlighting v0.0.0-20200307114337-60d527fdb691
 | 
			
		||||
	github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60
 | 
			
		||||
	go.jolheiser.com/hcaptcha v0.0.4
 | 
			
		||||
	go.jolheiser.com/pwn v0.0.3
 | 
			
		||||
	golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a
 | 
			
		||||
	golang.org/x/net v0.0.0-20200904194848-62affa334b73
 | 
			
		||||
	golang.org/x/crypto v0.0.0-20201217014255-9d1352758620
 | 
			
		||||
	golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4
 | 
			
		||||
	golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d
 | 
			
		||||
	golang.org/x/sys v0.0.0-20200918174421-af09f7315aff
 | 
			
		||||
	golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44
 | 
			
		||||
	golang.org/x/text v0.3.3
 | 
			
		||||
	golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e // indirect
 | 
			
		||||
	golang.org/x/tools v0.0.0-20200921210052-fa0125251cc4
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										30
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										30
									
								
								go.sum
									
									
									
									
									
								
							@@ -15,8 +15,8 @@ cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2k
 | 
			
		||||
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
 | 
			
		||||
code.gitea.io/gitea-vet v0.2.1 h1:b30by7+3SkmiftK0RjuXqFvZg2q4p68uoPGuxhzBN0s=
 | 
			
		||||
code.gitea.io/gitea-vet v0.2.1/go.mod h1:zcNbT/aJEmivCAhfmkHOlT645KNOf9W2KnkLgFjGGfE=
 | 
			
		||||
code.gitea.io/sdk/gitea v0.13.1 h1:Y7bpH2iO6Q0KhhMJfjP/LZ0AmiYITeRQlCD8b0oYqhk=
 | 
			
		||||
code.gitea.io/sdk/gitea v0.13.1/go.mod h1:z3uwDV/b9Ls47NGukYM9XhnHtqPh/J+t40lsUrR6JDY=
 | 
			
		||||
code.gitea.io/sdk/gitea v0.13.2 h1:wAnT/J7Z62q3fJXbgnecoaOBh8CM1Qq0/DakWxiv4yA=
 | 
			
		||||
code.gitea.io/sdk/gitea v0.13.2/go.mod h1:lee2y8LeV3kQb2iK+hHlMqoadL4bp27QOkOV/hawLKg=
 | 
			
		||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
 | 
			
		||||
gitea.com/lunny/levelqueue v0.3.0 h1:MHn1GuSZkxvVEDMyAPqlc7A3cOW+q8RcGhRgH/xtm6I=
 | 
			
		||||
gitea.com/lunny/levelqueue v0.3.0/go.mod h1:HBqmLbz56JWpfEGG0prskAV97ATNRoj5LDmPicD22hU=
 | 
			
		||||
@@ -140,8 +140,6 @@ github.com/bradfitz/gomemcache v0.0.0-20190329173943-551aad21a668 h1:U/lr3Dgy4WK
 | 
			
		||||
github.com/bradfitz/gomemcache v0.0.0-20190329173943-551aad21a668/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
 | 
			
		||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
 | 
			
		||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
 | 
			
		||||
github.com/chris-ramon/douceur v0.2.0 h1:IDMEdxlEUUBYBKE4z/mJnFyVXox+MjuEVDJNN27glkU=
 | 
			
		||||
github.com/chris-ramon/douceur v0.2.0/go.mod h1:wDW5xjJdeoMm1mRt4sD4c/LbF/mWdEpRXQKjTR8nIBE=
 | 
			
		||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
 | 
			
		||||
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
 | 
			
		||||
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
 | 
			
		||||
@@ -649,8 +647,8 @@ github.com/mgechev/revive v1.0.3-0.20200921231451-246eac737dc7 h1:ydVkpU/M4/c45y
 | 
			
		||||
github.com/mgechev/revive v1.0.3-0.20200921231451-246eac737dc7/go.mod h1:no/hfevHbndpXR5CaJahkYCfM/FFpmM/dSOwFGU7Z1o=
 | 
			
		||||
github.com/mholt/archiver/v3 v3.3.0 h1:vWjhY8SQp5yzM9P6OJ/eZEkmi3UAbRrxCq48MxjAzig=
 | 
			
		||||
github.com/mholt/archiver/v3 v3.3.0/go.mod h1:YnQtqsp+94Rwd0D/rk5cnLrxusUBUXg+08Ebtr1Mqao=
 | 
			
		||||
github.com/microcosm-cc/bluemonday v1.0.3-0.20191119130333-0a75d7616912 h1:hJde9rA24hlTcAYSwJoXpDUyGtfKQ/jsofw+WaDqGrI=
 | 
			
		||||
github.com/microcosm-cc/bluemonday v1.0.3-0.20191119130333-0a75d7616912/go.mod h1:8iwZnFn2CDDNZ0r6UXhF4xawGvzaqzCRa1n3/lO3W2w=
 | 
			
		||||
github.com/microcosm-cc/bluemonday v1.0.6 h1:ZOvqHKtnx0fUpnbQm3m3zKFWE+DRC+XB1onh8JoEObE=
 | 
			
		||||
github.com/microcosm-cc/bluemonday v1.0.6/go.mod h1:HOT/6NaBlR0f9XlxD3zolN6Z3N8Lp4pvhp+jLS5ihnI=
 | 
			
		||||
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
 | 
			
		||||
github.com/minio/md5-simd v1.1.0 h1:QPfiOqlZH+Cj9teu0t9b1nTBfPbyTl16Of5MeuShdK4=
 | 
			
		||||
github.com/minio/md5-simd v1.1.0/go.mod h1:XpBqgZULrMYD3R+M28PcmP0CkI7PEMzB3U77ZrKZ0Gw=
 | 
			
		||||
@@ -885,8 +883,9 @@ github.com/yuin/goldmark v1.1.7/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec
 | 
			
		||||
github.com/yuin/goldmark v1.1.22/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
 | 
			
		||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
 | 
			
		||||
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
 | 
			
		||||
github.com/yuin/goldmark v1.2.1 h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM=
 | 
			
		||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
 | 
			
		||||
github.com/yuin/goldmark v1.3.3 h1:37BdQwPx8VOSic8eDSWee6QL9mRpZRm9VJp/QugNrW0=
 | 
			
		||||
github.com/yuin/goldmark v1.3.3/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
 | 
			
		||||
github.com/yuin/goldmark-highlighting v0.0.0-20200307114337-60d527fdb691 h1:VWSxtAiQNh3zgHJpdpkpVYjTPqRE3P6UZCOPa1nRDio=
 | 
			
		||||
github.com/yuin/goldmark-highlighting v0.0.0-20200307114337-60d527fdb691/go.mod h1:YLF3kDffRfUH/bTxOxHhV6lxwIB3Vfj91rEwNMS9MXo=
 | 
			
		||||
github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60 h1:gZucqLjL1eDzVWrXj4uiWeMbAopJlBR2mKQAsTGdPwo=
 | 
			
		||||
@@ -937,8 +936,9 @@ golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPh
 | 
			
		||||
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
 | 
			
		||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
 | 
			
		||||
golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
 | 
			
		||||
golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a h1:vclmkQCjlDX5OydZ9wv8rBCcS0QyQY66Mpf/7BZbInM=
 | 
			
		||||
golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
 | 
			
		||||
golang.org/x/crypto v0.0.0-20201217014255-9d1352758620 h1:3wPMTskHO3+O6jqTEXyFcsnuxMQOqYSaHsDxcbUXpqA=
 | 
			
		||||
golang.org/x/crypto v0.0.0-20201217014255-9d1352758620/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
 | 
			
		||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
 | 
			
		||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
 | 
			
		||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
 | 
			
		||||
@@ -994,8 +994,9 @@ golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/
 | 
			
		||||
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
 | 
			
		||||
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
 | 
			
		||||
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
 | 
			
		||||
golang.org/x/net v0.0.0-20200904194848-62affa334b73 h1:MXfv8rhZWmFeqX3GNZRsd6vOLoaCHjYEX3qkRo3YBUA=
 | 
			
		||||
golang.org/x/net v0.0.0-20200904194848-62affa334b73/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
 | 
			
		||||
golang.org/x/net v0.0.0-20210331212208-0fccb6fa2b5c/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
 | 
			
		||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4 h1:4nGaVu0QrbjT/AK2PRLuQfQuh6DJve+pELhqTdAj3x0=
 | 
			
		||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
 | 
			
		||||
golang.org/x/oauth2 v0.0.0-20180620175406-ef147856a6dd/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
 | 
			
		||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
 | 
			
		||||
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
 | 
			
		||||
@@ -1051,8 +1052,12 @@ golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7w
 | 
			
		||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 | 
			
		||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 | 
			
		||||
golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 | 
			
		||||
golang.org/x/sys v0.0.0-20200918174421-af09f7315aff h1:1CPUrky56AcgSpxz/KfgzQWzfG09u5YOL8MvPYBlrL8=
 | 
			
		||||
golang.org/x/sys v0.0.0-20200918174421-af09f7315aff/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 | 
			
		||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 | 
			
		||||
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44 h1:Bli41pIlzTzf3KEY06n+xnzK/BESIg2ze4Pgfh/aI8c=
 | 
			
		||||
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 | 
			
		||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
 | 
			
		||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E=
 | 
			
		||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
 | 
			
		||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 | 
			
		||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 | 
			
		||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
 | 
			
		||||
@@ -1197,6 +1202,7 @@ honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWh
 | 
			
		||||
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
 | 
			
		||||
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
 | 
			
		||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
 | 
			
		||||
mvdan.cc/xurls/v2 v2.2.0 h1:NSZPykBXJFCetGZykLAxaL6SIpvbVy/UFEniIfHAa8A=
 | 
			
		||||
mvdan.cc/xurls/v2 v2.2.0/go.mod h1:EV1RMtya9D6G5DMYPGD8zTQzaHet6Jh8gFlRgGRJeO8=
 | 
			
		||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
 | 
			
		||||
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 h1:mUcz5b3FJbP5Cvdq7Khzn6J9OCUQJaBwgBkCR+MOwSs=
 | 
			
		||||
 
 | 
			
		||||
@@ -74,8 +74,79 @@ func TestAPICreatePullSuccess(t *testing.T) {
 | 
			
		||||
		Base:  "master",
 | 
			
		||||
		Title: "create a failure pr",
 | 
			
		||||
	})
 | 
			
		||||
 | 
			
		||||
	session.MakeRequest(t, req, 201)
 | 
			
		||||
	session.MakeRequest(t, req, http.StatusUnprocessableEntity) // second request should fail
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func TestAPICreatePullWithFieldsSuccess(t *testing.T) {
 | 
			
		||||
	defer prepareTestEnv(t)()
 | 
			
		||||
	// repo10 have code, pulls units.
 | 
			
		||||
	repo10 := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 10}).(*models.Repository)
 | 
			
		||||
	owner10 := models.AssertExistsAndLoadBean(t, &models.User{ID: repo10.OwnerID}).(*models.User)
 | 
			
		||||
	// repo11 only have code unit but should still create pulls
 | 
			
		||||
	repo11 := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 11}).(*models.Repository)
 | 
			
		||||
	owner11 := models.AssertExistsAndLoadBean(t, &models.User{ID: repo11.OwnerID}).(*models.User)
 | 
			
		||||
 | 
			
		||||
	session := loginUser(t, owner11.Name)
 | 
			
		||||
	token := getTokenForLoggedInUser(t, session)
 | 
			
		||||
 | 
			
		||||
	opts := &api.CreatePullRequestOption{
 | 
			
		||||
		Head:      fmt.Sprintf("%s:master", owner11.Name),
 | 
			
		||||
		Base:      "master",
 | 
			
		||||
		Title:     "create a failure pr",
 | 
			
		||||
		Body:      "foobaaar",
 | 
			
		||||
		Milestone: 5,
 | 
			
		||||
		Assignees: []string{owner10.Name},
 | 
			
		||||
		Labels:    []int64{5},
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	req := NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls?token=%s", owner10.Name, repo10.Name, token), opts)
 | 
			
		||||
 | 
			
		||||
	res := session.MakeRequest(t, req, 201)
 | 
			
		||||
	pull := new(api.PullRequest)
 | 
			
		||||
	DecodeJSON(t, res, pull)
 | 
			
		||||
 | 
			
		||||
	assert.NotNil(t, pull.Milestone)
 | 
			
		||||
	assert.EqualValues(t, opts.Milestone, pull.Milestone.ID)
 | 
			
		||||
	if assert.Len(t, pull.Assignees, 1) {
 | 
			
		||||
		assert.EqualValues(t, opts.Assignees[0], owner10.Name)
 | 
			
		||||
	}
 | 
			
		||||
	assert.NotNil(t, pull.Labels)
 | 
			
		||||
	assert.EqualValues(t, opts.Labels[0], pull.Labels[0].ID)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func TestAPICreatePullWithFieldsFailure(t *testing.T) {
 | 
			
		||||
	defer prepareTestEnv(t)()
 | 
			
		||||
	// repo10 have code, pulls units.
 | 
			
		||||
	repo10 := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 10}).(*models.Repository)
 | 
			
		||||
	owner10 := models.AssertExistsAndLoadBean(t, &models.User{ID: repo10.OwnerID}).(*models.User)
 | 
			
		||||
	// repo11 only have code unit but should still create pulls
 | 
			
		||||
	repo11 := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 11}).(*models.Repository)
 | 
			
		||||
	owner11 := models.AssertExistsAndLoadBean(t, &models.User{ID: repo11.OwnerID}).(*models.User)
 | 
			
		||||
 | 
			
		||||
	session := loginUser(t, owner11.Name)
 | 
			
		||||
	token := getTokenForLoggedInUser(t, session)
 | 
			
		||||
 | 
			
		||||
	opts := &api.CreatePullRequestOption{
 | 
			
		||||
		Head: fmt.Sprintf("%s:master", owner11.Name),
 | 
			
		||||
		Base: "master",
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	req := NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls?token=%s", owner10.Name, repo10.Name, token), opts)
 | 
			
		||||
	session.MakeRequest(t, req, http.StatusUnprocessableEntity)
 | 
			
		||||
	opts.Title = "is required"
 | 
			
		||||
 | 
			
		||||
	opts.Milestone = 666
 | 
			
		||||
	session.MakeRequest(t, req, http.StatusUnprocessableEntity)
 | 
			
		||||
	opts.Milestone = 5
 | 
			
		||||
 | 
			
		||||
	opts.Assignees = []string{"qweruqweroiuyqweoiruywqer"}
 | 
			
		||||
	session.MakeRequest(t, req, http.StatusUnprocessableEntity)
 | 
			
		||||
	opts.Assignees = []string{owner10.LoginName}
 | 
			
		||||
 | 
			
		||||
	opts.Labels = []int64{55555}
 | 
			
		||||
	session.MakeRequest(t, req, http.StatusUnprocessableEntity)
 | 
			
		||||
	opts.Labels = []int64{5}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func TestAPIEditPull(t *testing.T) {
 | 
			
		||||
 
 | 
			
		||||
@@ -445,11 +445,12 @@ func TestAPIRepoTransfer(t *testing.T) {
 | 
			
		||||
		expectedStatus int
 | 
			
		||||
	}{
 | 
			
		||||
		{ctxUserID: 1, newOwner: "user2", teams: nil, expectedStatus: http.StatusAccepted},
 | 
			
		||||
		{ctxUserID: 2, newOwner: "user1", teams: nil, expectedStatus: http.StatusAccepted},
 | 
			
		||||
		{ctxUserID: 2, newOwner: "user1", teams: nil, expectedStatus: http.StatusForbidden},
 | 
			
		||||
		{ctxUserID: 2, newOwner: "user6", teams: nil, expectedStatus: http.StatusForbidden},
 | 
			
		||||
		{ctxUserID: 1, newOwner: "user2", teams: &[]int64{2}, expectedStatus: http.StatusUnprocessableEntity},
 | 
			
		||||
		{ctxUserID: 1, newOwner: "user3", teams: &[]int64{5}, expectedStatus: http.StatusForbidden},
 | 
			
		||||
		{ctxUserID: 1, newOwner: "user3", teams: &[]int64{2}, expectedStatus: http.StatusAccepted},
 | 
			
		||||
		{ctxUserID: 2, newOwner: "user2", teams: nil, expectedStatus: http.StatusAccepted},
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	defer prepareTestEnv(t)()
 | 
			
		||||
 
 | 
			
		||||
@@ -122,7 +122,7 @@ func TestGetAttachment(t *testing.T) {
 | 
			
		||||
		t.Run(tc.name, func(t *testing.T) {
 | 
			
		||||
			//Write empty file to be available for response
 | 
			
		||||
			if tc.createFile {
 | 
			
		||||
				_, err := storage.Attachments.Save(models.AttachmentRelativePath(tc.uuid), strings.NewReader("hello world"))
 | 
			
		||||
				_, err := storage.Attachments.Save(models.AttachmentRelativePath(tc.uuid), strings.NewReader("hello world"), -1)
 | 
			
		||||
				assert.NoError(t, err)
 | 
			
		||||
			}
 | 
			
		||||
			//Actual test
 | 
			
		||||
 
 | 
			
		||||
@@ -237,6 +237,6 @@ func TestLDAPUserSSHKeySync(t *testing.T) {
 | 
			
		||||
			syncedKeys[i] = strings.TrimSpace(divs.Eq(i).Text())
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		assert.ElementsMatch(t, u.SSHKeys, syncedKeys)
 | 
			
		||||
		assert.ElementsMatch(t, u.SSHKeys, syncedKeys, "Unequal number of keys synchronized for user: %s", u.UserName)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -111,7 +111,7 @@ func onGiteaRun(t *testing.T, callback func(*testing.T, *url.URL), prepare ...bo
 | 
			
		||||
 | 
			
		||||
func doGitClone(dstLocalPath string, u *url.URL) func(*testing.T) {
 | 
			
		||||
	return func(t *testing.T) {
 | 
			
		||||
		assert.NoError(t, git.CloneWithArgs(u.String(), dstLocalPath, allowLFSFilters(), git.CloneRepoOptions{}))
 | 
			
		||||
		assert.NoError(t, git.CloneWithArgs(context.Background(), u.String(), dstLocalPath, allowLFSFilters(), git.CloneRepoOptions{}))
 | 
			
		||||
		assert.True(t, com.IsExist(filepath.Join(dstLocalPath, "README.md")))
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -13,6 +13,7 @@ import (
 | 
			
		||||
	"time"
 | 
			
		||||
 | 
			
		||||
	"code.gitea.io/gitea/modules/base"
 | 
			
		||||
	"code.gitea.io/gitea/modules/git"
 | 
			
		||||
	"code.gitea.io/gitea/modules/log"
 | 
			
		||||
	"code.gitea.io/gitea/modules/setting"
 | 
			
		||||
	"code.gitea.io/gitea/modules/timeutil"
 | 
			
		||||
@@ -243,7 +244,7 @@ func (a *Action) getCommentLink(e Engine) string {
 | 
			
		||||
 | 
			
		||||
// GetBranch returns the action's repository branch.
 | 
			
		||||
func (a *Action) GetBranch() string {
 | 
			
		||||
	return a.RefName
 | 
			
		||||
	return strings.TrimPrefix(a.RefName, git.BranchPrefix)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// GetContent returns the action's content.
 | 
			
		||||
 
 | 
			
		||||
@@ -77,7 +77,7 @@ func removeStorageWithNotice(e Engine, bucket storage.ObjectStorage, title, path
 | 
			
		||||
	if err := bucket.Delete(path); err != nil {
 | 
			
		||||
		desc := fmt.Sprintf("%s [%s]: %v", title, path, err)
 | 
			
		||||
		log.Warn(title+" [%s]: %v", path, err)
 | 
			
		||||
		if err = createNotice(x, NoticeRepository, desc); err != nil {
 | 
			
		||||
		if err = createNotice(e, NoticeRepository, desc); err != nil {
 | 
			
		||||
			log.Error("CreateRepositoryNotice: %v", err)
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -99,7 +99,7 @@ func (a *Attachment) LinkedRepository() (*Repository, UnitType, error) {
 | 
			
		||||
func NewAttachment(attach *Attachment, buf []byte, file io.Reader) (_ *Attachment, err error) {
 | 
			
		||||
	attach.UUID = gouuid.New().String()
 | 
			
		||||
 | 
			
		||||
	size, err := storage.Attachments.Save(attach.RelativePath(), io.MultiReader(bytes.NewReader(buf), file))
 | 
			
		||||
	size, err := storage.Attachments.Save(attach.RelativePath(), io.MultiReader(bytes.NewReader(buf), file), -1)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return nil, fmt.Errorf("Create: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -18,7 +18,7 @@ func TestGetCommitStatuses(t *testing.T) {
 | 
			
		||||
 | 
			
		||||
	sha1 := "1234123412341234123412341234123412341234"
 | 
			
		||||
 | 
			
		||||
	statuses, maxResults, err := GetCommitStatuses(repo1, sha1, &CommitStatusOptions{})
 | 
			
		||||
	statuses, maxResults, err := GetCommitStatuses(repo1, sha1, &CommitStatusOptions{ListOptions: ListOptions{Page: 1, PageSize: 50}})
 | 
			
		||||
	assert.NoError(t, err)
 | 
			
		||||
	assert.Equal(t, int(maxResults), 5)
 | 
			
		||||
	assert.Len(t, statuses, 5)
 | 
			
		||||
 
 | 
			
		||||
@@ -5,10 +5,13 @@
 | 
			
		||||
package models
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"reflect"
 | 
			
		||||
	"regexp"
 | 
			
		||||
	"strings"
 | 
			
		||||
	"testing"
 | 
			
		||||
 | 
			
		||||
	"code.gitea.io/gitea/modules/setting"
 | 
			
		||||
	"github.com/stretchr/testify/assert"
 | 
			
		||||
	"xorm.io/builder"
 | 
			
		||||
)
 | 
			
		||||
@@ -221,6 +224,24 @@ func DeleteOrphanedLabels() error {
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// CountOrphanedIssueLabels return count of IssueLabels witch have no label behind anymore
 | 
			
		||||
func CountOrphanedIssueLabels() (int64, error) {
 | 
			
		||||
	return x.Table("issue_label").
 | 
			
		||||
		Join("LEFT", "label", "issue_label.label_id = label.id").
 | 
			
		||||
		Where(builder.IsNull{"label.id"}).Count()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// DeleteOrphanedIssueLabels delete IssueLabels witch have no label behind anymore
 | 
			
		||||
func DeleteOrphanedIssueLabels() error {
 | 
			
		||||
 | 
			
		||||
	_, err := x.In("id", builder.Select("issue_label.id").From("issue_label").
 | 
			
		||||
		Join("LEFT", "label", "issue_label.label_id = label.id").
 | 
			
		||||
		Where(builder.IsNull{"label.id"})).
 | 
			
		||||
		Delete(IssueLabel{})
 | 
			
		||||
 | 
			
		||||
	return err
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// CountOrphanedIssues count issues without a repo
 | 
			
		||||
func CountOrphanedIssues() (int64, error) {
 | 
			
		||||
	return x.Table("issue").
 | 
			
		||||
@@ -295,3 +316,61 @@ func FixNullArchivedRepository() (int64, error) {
 | 
			
		||||
		IsArchived: false,
 | 
			
		||||
	})
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// CountBadSequences looks for broken sequences from recreate-table mistakes
 | 
			
		||||
func CountBadSequences() (int64, error) {
 | 
			
		||||
	if !setting.Database.UsePostgreSQL {
 | 
			
		||||
		return 0, nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	sess := x.NewSession()
 | 
			
		||||
	defer sess.Close()
 | 
			
		||||
 | 
			
		||||
	var sequences []string
 | 
			
		||||
	schema := sess.Engine().Dialect().URI().Schema
 | 
			
		||||
 | 
			
		||||
	sess.Engine().SetSchema("")
 | 
			
		||||
	if err := sess.Table("information_schema.sequences").Cols("sequence_name").Where("sequence_name LIKE 'tmp_recreate__%_id_seq%' AND sequence_catalog = ?", setting.Database.Name).Find(&sequences); err != nil {
 | 
			
		||||
		return 0, err
 | 
			
		||||
	}
 | 
			
		||||
	sess.Engine().SetSchema(schema)
 | 
			
		||||
 | 
			
		||||
	return int64(len(sequences)), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// FixBadSequences fixes for broken sequences from recreate-table mistakes
 | 
			
		||||
func FixBadSequences() error {
 | 
			
		||||
	if !setting.Database.UsePostgreSQL {
 | 
			
		||||
		return nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	sess := x.NewSession()
 | 
			
		||||
	defer sess.Close()
 | 
			
		||||
	if err := sess.Begin(); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	var sequences []string
 | 
			
		||||
	schema := sess.Engine().Dialect().URI().Schema
 | 
			
		||||
 | 
			
		||||
	sess.Engine().SetSchema("")
 | 
			
		||||
	if err := sess.Table("information_schema.sequences").Cols("sequence_name").Where("sequence_name LIKE 'tmp_recreate__%_id_seq%' AND sequence_catalog = ?", setting.Database.Name).Find(&sequences); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
	sess.Engine().SetSchema(schema)
 | 
			
		||||
 | 
			
		||||
	sequenceRegexp := regexp.MustCompile(`tmp_recreate__(\w+)_id_seq.*`)
 | 
			
		||||
 | 
			
		||||
	for _, sequence := range sequences {
 | 
			
		||||
		tableName := sequenceRegexp.FindStringSubmatch(sequence)[1]
 | 
			
		||||
		newSequenceName := tableName + "_id_seq"
 | 
			
		||||
		if _, err := sess.Exec(fmt.Sprintf("ALTER SEQUENCE `%s` RENAME TO `%s`", sequence, newSequenceName)); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
		if _, err := sess.Exec(fmt.Sprintf("SELECT setval('%s', COALESCE((SELECT MAX(id)+1 FROM `%s`), 1), false)", newSequenceName, tableName)); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return sess.Commit()
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -33,3 +33,11 @@
 | 
			
		||||
  num_issues: 1
 | 
			
		||||
  num_closed_issues: 0
 | 
			
		||||
  
 | 
			
		||||
-
 | 
			
		||||
  id: 5
 | 
			
		||||
  repo_id: 10
 | 
			
		||||
  org_id: 0
 | 
			
		||||
  name: pull-test-label
 | 
			
		||||
  color: '#000000'
 | 
			
		||||
  num_issues: 0
 | 
			
		||||
  num_closed_issues: 0
 | 
			
		||||
 
 | 
			
		||||
@@ -29,3 +29,11 @@
 | 
			
		||||
  content: content random
 | 
			
		||||
  is_closed: false
 | 
			
		||||
  num_issues: 0
 | 
			
		||||
 | 
			
		||||
- 
 | 
			
		||||
  id: 5
 | 
			
		||||
  repo_id: 10
 | 
			
		||||
  name: milestone of repo 10 
 | 
			
		||||
  content: for testing with PRs
 | 
			
		||||
  is_closed: false
 | 
			
		||||
  num_issues: 0
 | 
			
		||||
 
 | 
			
		||||
@@ -146,6 +146,7 @@
 | 
			
		||||
  num_closed_issues: 0
 | 
			
		||||
  num_pulls: 1
 | 
			
		||||
  num_closed_pulls: 0
 | 
			
		||||
  num_milestones: 1
 | 
			
		||||
  is_mirror: false
 | 
			
		||||
  num_forks: 1
 | 
			
		||||
  status: 0
 | 
			
		||||
 
 | 
			
		||||
@@ -65,7 +65,11 @@ func (key *GPGKey) AfterLoad(session *xorm.Session) {
 | 
			
		||||
 | 
			
		||||
// ListGPGKeys returns a list of public keys belongs to given user.
 | 
			
		||||
func ListGPGKeys(uid int64, listOptions ListOptions) ([]*GPGKey, error) {
 | 
			
		||||
	sess := x.Where("owner_id=? AND primary_key_id=''", uid)
 | 
			
		||||
	return listGPGKeys(x, uid, listOptions)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func listGPGKeys(e Engine, uid int64, listOptions ListOptions) ([]*GPGKey, error) {
 | 
			
		||||
	sess := e.Table(&GPGKey{}).Where("owner_id=? AND primary_key_id=''", uid)
 | 
			
		||||
	if listOptions.Page != 0 {
 | 
			
		||||
		sess = listOptions.setSessionPagination(sess)
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -14,6 +14,7 @@ import (
 | 
			
		||||
 | 
			
		||||
	"code.gitea.io/gitea/modules/base"
 | 
			
		||||
	"code.gitea.io/gitea/modules/log"
 | 
			
		||||
	"code.gitea.io/gitea/modules/references"
 | 
			
		||||
	"code.gitea.io/gitea/modules/setting"
 | 
			
		||||
	"code.gitea.io/gitea/modules/structs"
 | 
			
		||||
	api "code.gitea.io/gitea/modules/structs"
 | 
			
		||||
@@ -1491,6 +1492,7 @@ type UserIssueStatsOptions struct {
 | 
			
		||||
	IsPull      bool
 | 
			
		||||
	IsClosed    bool
 | 
			
		||||
	IssueIDs    []int64
 | 
			
		||||
	LabelIDs    []int64
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// GetUserIssueStats returns issue statistic information for dashboard by given conditions.
 | 
			
		||||
@@ -1507,29 +1509,38 @@ func GetUserIssueStats(opts UserIssueStatsOptions) (*IssueStats, error) {
 | 
			
		||||
		cond = cond.And(builder.In("issue.id", opts.IssueIDs))
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	sess := func(cond builder.Cond) *xorm.Session {
 | 
			
		||||
		s := x.Where(cond)
 | 
			
		||||
		if len(opts.LabelIDs) > 0 {
 | 
			
		||||
			s.Join("INNER", "issue_label", "issue_label.issue_id = issue.id").
 | 
			
		||||
				In("issue_label.label_id", opts.LabelIDs)
 | 
			
		||||
		}
 | 
			
		||||
		return s
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	switch opts.FilterMode {
 | 
			
		||||
	case FilterModeAll:
 | 
			
		||||
		stats.OpenCount, err = x.Where(cond).And("issue.is_closed = ?", false).
 | 
			
		||||
		stats.OpenCount, err = sess(cond).And("issue.is_closed = ?", false).
 | 
			
		||||
			And(builder.In("issue.repo_id", opts.UserRepoIDs)).
 | 
			
		||||
			Count(new(Issue))
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return nil, err
 | 
			
		||||
		}
 | 
			
		||||
		stats.ClosedCount, err = x.Where(cond).And("issue.is_closed = ?", true).
 | 
			
		||||
		stats.ClosedCount, err = sess(cond).And("issue.is_closed = ?", true).
 | 
			
		||||
			And(builder.In("issue.repo_id", opts.UserRepoIDs)).
 | 
			
		||||
			Count(new(Issue))
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return nil, err
 | 
			
		||||
		}
 | 
			
		||||
	case FilterModeAssign:
 | 
			
		||||
		stats.OpenCount, err = x.Where(cond).And("issue.is_closed = ?", false).
 | 
			
		||||
		stats.OpenCount, err = sess(cond).And("issue.is_closed = ?", false).
 | 
			
		||||
			Join("INNER", "issue_assignees", "issue.id = issue_assignees.issue_id").
 | 
			
		||||
			And("issue_assignees.assignee_id = ?", opts.UserID).
 | 
			
		||||
			Count(new(Issue))
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return nil, err
 | 
			
		||||
		}
 | 
			
		||||
		stats.ClosedCount, err = x.Where(cond).And("issue.is_closed = ?", true).
 | 
			
		||||
		stats.ClosedCount, err = sess(cond).And("issue.is_closed = ?", true).
 | 
			
		||||
			Join("INNER", "issue_assignees", "issue.id = issue_assignees.issue_id").
 | 
			
		||||
			And("issue_assignees.assignee_id = ?", opts.UserID).
 | 
			
		||||
			Count(new(Issue))
 | 
			
		||||
@@ -1537,27 +1548,27 @@ func GetUserIssueStats(opts UserIssueStatsOptions) (*IssueStats, error) {
 | 
			
		||||
			return nil, err
 | 
			
		||||
		}
 | 
			
		||||
	case FilterModeCreate:
 | 
			
		||||
		stats.OpenCount, err = x.Where(cond).And("issue.is_closed = ?", false).
 | 
			
		||||
		stats.OpenCount, err = sess(cond).And("issue.is_closed = ?", false).
 | 
			
		||||
			And("issue.poster_id = ?", opts.UserID).
 | 
			
		||||
			Count(new(Issue))
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return nil, err
 | 
			
		||||
		}
 | 
			
		||||
		stats.ClosedCount, err = x.Where(cond).And("issue.is_closed = ?", true).
 | 
			
		||||
		stats.ClosedCount, err = sess(cond).And("issue.is_closed = ?", true).
 | 
			
		||||
			And("issue.poster_id = ?", opts.UserID).
 | 
			
		||||
			Count(new(Issue))
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return nil, err
 | 
			
		||||
		}
 | 
			
		||||
	case FilterModeMention:
 | 
			
		||||
		stats.OpenCount, err = x.Where(cond).And("issue.is_closed = ?", false).
 | 
			
		||||
		stats.OpenCount, err = sess(cond).And("issue.is_closed = ?", false).
 | 
			
		||||
			Join("INNER", "issue_user", "issue.id = issue_user.issue_id and issue_user.is_mentioned = ?", true).
 | 
			
		||||
			And("issue_user.uid = ?", opts.UserID).
 | 
			
		||||
			Count(new(Issue))
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return nil, err
 | 
			
		||||
		}
 | 
			
		||||
		stats.ClosedCount, err = x.Where(cond).And("issue.is_closed = ?", true).
 | 
			
		||||
		stats.ClosedCount, err = sess(cond).And("issue.is_closed = ?", true).
 | 
			
		||||
			Join("INNER", "issue_user", "issue.id = issue_user.issue_id and issue_user.is_mentioned = ?", true).
 | 
			
		||||
			And("issue_user.uid = ?", opts.UserID).
 | 
			
		||||
			Count(new(Issue))
 | 
			
		||||
@@ -1567,7 +1578,7 @@ func GetUserIssueStats(opts UserIssueStatsOptions) (*IssueStats, error) {
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	cond = cond.And(builder.Eq{"issue.is_closed": opts.IsClosed})
 | 
			
		||||
	stats.AssignCount, err = x.Where(cond).
 | 
			
		||||
	stats.AssignCount, err = sess(cond).
 | 
			
		||||
		Join("INNER", "issue_assignees", "issue.id = issue_assignees.issue_id").
 | 
			
		||||
		And("issue_assignees.assignee_id = ?", opts.UserID).
 | 
			
		||||
		Count(new(Issue))
 | 
			
		||||
@@ -1575,14 +1586,14 @@ func GetUserIssueStats(opts UserIssueStatsOptions) (*IssueStats, error) {
 | 
			
		||||
		return nil, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	stats.CreateCount, err = x.Where(cond).
 | 
			
		||||
	stats.CreateCount, err = sess(cond).
 | 
			
		||||
		And("poster_id = ?", opts.UserID).
 | 
			
		||||
		Count(new(Issue))
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return nil, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	stats.MentionCount, err = x.Where(cond).
 | 
			
		||||
	stats.MentionCount, err = sess(cond).
 | 
			
		||||
		Join("INNER", "issue_user", "issue.id = issue_user.issue_id and issue_user.is_mentioned = ?", true).
 | 
			
		||||
		And("issue_user.uid = ?", opts.UserID).
 | 
			
		||||
		Count(new(Issue))
 | 
			
		||||
@@ -1590,7 +1601,7 @@ func GetUserIssueStats(opts UserIssueStatsOptions) (*IssueStats, error) {
 | 
			
		||||
		return nil, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	stats.YourRepositoriesCount, err = x.Where(cond).
 | 
			
		||||
	stats.YourRepositoriesCount, err = sess(cond).
 | 
			
		||||
		And(builder.In("issue.repo_id", opts.UserRepoIDs)).
 | 
			
		||||
		Count(new(Issue))
 | 
			
		||||
	if err != nil {
 | 
			
		||||
@@ -1829,6 +1840,19 @@ func (issue *Issue) updateClosedNum(e Engine) (err error) {
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// FindAndUpdateIssueMentions finds users mentioned in the given content string, and saves them in the database.
 | 
			
		||||
func (issue *Issue) FindAndUpdateIssueMentions(ctx DBContext, doer *User, content string) (mentions []*User, err error) {
 | 
			
		||||
	rawMentions := references.FindAllMentionsMarkdown(content)
 | 
			
		||||
	mentions, err = issue.ResolveMentionsByVisibility(ctx, doer, rawMentions)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return nil, fmt.Errorf("UpdateIssueMentions [%d]: %v", issue.ID, err)
 | 
			
		||||
	}
 | 
			
		||||
	if err = UpdateIssueMentions(ctx, issue.ID, mentions); err != nil {
 | 
			
		||||
		return nil, fmt.Errorf("UpdateIssueMentions [%d]: %v", issue.ID, err)
 | 
			
		||||
	}
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// ResolveMentionsByVisibility returns the users mentioned in an issue, removing those that
 | 
			
		||||
// don't have access to reading it. Teams are expanded into their users, but organizations are ignored.
 | 
			
		||||
func (issue *Issue) ResolveMentionsByVisibility(ctx DBContext, doer *User, mentions []string) (users []*User, err error) {
 | 
			
		||||
 
 | 
			
		||||
@@ -82,7 +82,7 @@ func isUserAssignedToIssue(e Engine, issue *Issue, user *User) (isAssigned bool,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// ClearAssigneeByUserID deletes all assignments of an user
 | 
			
		||||
func clearAssigneeByUserID(sess *xorm.Session, userID int64) (err error) {
 | 
			
		||||
func clearAssigneeByUserID(sess Engine, userID int64) (err error) {
 | 
			
		||||
	_, err = sess.Delete(&IssueAssignees{AssigneeID: userID})
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -1077,6 +1077,10 @@ func DeleteComment(comment *Comment, doer *User) error {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if err := deleteReaction(sess, &ReactionOptions{Comment: comment}); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return sess.Commit()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -47,7 +47,7 @@ type Label struct {
 | 
			
		||||
func GetLabelTemplateFile(name string) ([][3]string, error) {
 | 
			
		||||
	data, err := GetRepoInitFile("label", name)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return nil, fmt.Errorf("GetRepoInitFile: %v", err)
 | 
			
		||||
		return nil, ErrIssueLabelTemplateLoad{name, fmt.Errorf("GetRepoInitFile: %v", err)}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	lines := strings.Split(string(data), "\n")
 | 
			
		||||
@@ -62,7 +62,7 @@ func GetLabelTemplateFile(name string) ([][3]string, error) {
 | 
			
		||||
 | 
			
		||||
		fields := strings.SplitN(parts[0], " ", 2)
 | 
			
		||||
		if len(fields) != 2 {
 | 
			
		||||
			return nil, fmt.Errorf("line is malformed: %s", line)
 | 
			
		||||
			return nil, ErrIssueLabelTemplateLoad{name, fmt.Errorf("line is malformed: %s", line)}
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		color := strings.Trim(fields[0], " ")
 | 
			
		||||
@@ -70,7 +70,7 @@ func GetLabelTemplateFile(name string) ([][3]string, error) {
 | 
			
		||||
			color = "#" + color
 | 
			
		||||
		}
 | 
			
		||||
		if !LabelColorPattern.MatchString(color) {
 | 
			
		||||
			return nil, fmt.Errorf("bad HTML color code in line: %s", line)
 | 
			
		||||
			return nil, ErrIssueLabelTemplateLoad{name, fmt.Errorf("bad HTML color code in line: %s", line)}
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		var description string
 | 
			
		||||
@@ -167,7 +167,7 @@ func (label *Label) ForegroundColor() template.CSS {
 | 
			
		||||
func loadLabels(labelTemplate string) ([]string, error) {
 | 
			
		||||
	list, err := GetLabelTemplateFile(labelTemplate)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return nil, ErrIssueLabelTemplateLoad{labelTemplate, err}
 | 
			
		||||
		return nil, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	labels := make([]string, len(list))
 | 
			
		||||
@@ -186,7 +186,7 @@ func LoadLabelsFormatted(labelTemplate string) (string, error) {
 | 
			
		||||
func initializeLabels(e Engine, id int64, labelTemplate string, isOrg bool) error {
 | 
			
		||||
	list, err := GetLabelTemplateFile(labelTemplate)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return ErrIssueLabelTemplateLoad{labelTemplate, err}
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	labels := make([]*Label, len(list))
 | 
			
		||||
@@ -764,3 +764,15 @@ func DeleteIssueLabel(issue *Issue, label *Label, doer *User) (err error) {
 | 
			
		||||
 | 
			
		||||
	return sess.Commit()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func deleteLabelsByRepoID(sess Engine, repoID int64) error {
 | 
			
		||||
	deleteCond := builder.Select("id").From("label").Where(builder.Eq{"label.repo_id": repoID})
 | 
			
		||||
 | 
			
		||||
	if _, err := sess.In("label_id", deleteCond).
 | 
			
		||||
		Delete(&IssueLabel{}); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	_, err := sess.Delete(&Label{RepoID: repoID})
 | 
			
		||||
	return err
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -178,11 +178,15 @@ func CreateCommentReaction(doer *User, issue *Issue, comment *Comment, content s
 | 
			
		||||
	})
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func deleteReaction(e *xorm.Session, opts *ReactionOptions) error {
 | 
			
		||||
func deleteReaction(e Engine, opts *ReactionOptions) error {
 | 
			
		||||
	reaction := &Reaction{
 | 
			
		||||
		Type:    opts.Type,
 | 
			
		||||
		UserID:  opts.Doer.ID,
 | 
			
		||||
		IssueID: opts.Issue.ID,
 | 
			
		||||
		Type: opts.Type,
 | 
			
		||||
	}
 | 
			
		||||
	if opts.Doer != nil {
 | 
			
		||||
		reaction.UserID = opts.Doer.ID
 | 
			
		||||
	}
 | 
			
		||||
	if opts.Issue != nil {
 | 
			
		||||
		reaction.IssueID = opts.Issue.ID
 | 
			
		||||
	}
 | 
			
		||||
	if opts.Comment != nil {
 | 
			
		||||
		reaction.CommentID = opts.Comment.ID
 | 
			
		||||
 
 | 
			
		||||
@@ -16,13 +16,13 @@ type ListOptions struct {
 | 
			
		||||
	Page     int // start from 1
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (opts ListOptions) getPaginatedSession() *xorm.Session {
 | 
			
		||||
func (opts *ListOptions) getPaginatedSession() *xorm.Session {
 | 
			
		||||
	opts.setDefaultValues()
 | 
			
		||||
 | 
			
		||||
	return x.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (opts ListOptions) setSessionPagination(sess *xorm.Session) *xorm.Session {
 | 
			
		||||
func (opts *ListOptions) setSessionPagination(sess *xorm.Session) *xorm.Session {
 | 
			
		||||
	opts.setDefaultValues()
 | 
			
		||||
 | 
			
		||||
	if opts.PageSize <= 0 {
 | 
			
		||||
@@ -31,21 +31,21 @@ func (opts ListOptions) setSessionPagination(sess *xorm.Session) *xorm.Session {
 | 
			
		||||
	return sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (opts ListOptions) setEnginePagination(e Engine) Engine {
 | 
			
		||||
func (opts *ListOptions) setEnginePagination(e Engine) Engine {
 | 
			
		||||
	opts.setDefaultValues()
 | 
			
		||||
 | 
			
		||||
	return e.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// GetStartEnd returns the start and end of the ListOptions
 | 
			
		||||
func (opts ListOptions) GetStartEnd() (start, end int) {
 | 
			
		||||
func (opts *ListOptions) GetStartEnd() (start, end int) {
 | 
			
		||||
	opts.setDefaultValues()
 | 
			
		||||
	start = (opts.Page - 1) * opts.PageSize
 | 
			
		||||
	end = start + opts.Page
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (opts ListOptions) setDefaultValues() {
 | 
			
		||||
func (opts *ListOptions) setDefaultValues() {
 | 
			
		||||
	if opts.PageSize <= 0 {
 | 
			
		||||
		opts.PageSize = setting.API.DefaultPagingNum
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -16,6 +16,7 @@ import (
 | 
			
		||||
	"code.gitea.io/gitea/modules/setting"
 | 
			
		||||
 | 
			
		||||
	"xorm.io/xorm"
 | 
			
		||||
	"xorm.io/xorm/names"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
const minDBVersion = 70 // Gitea 1.5.3
 | 
			
		||||
@@ -296,6 +297,8 @@ func EnsureUpToDate(x *xorm.Engine) error {
 | 
			
		||||
 | 
			
		||||
// Migrate database to current version
 | 
			
		||||
func Migrate(x *xorm.Engine) error {
 | 
			
		||||
	// Set a new clean the default mapper to GonicMapper as that is the default for Gitea.
 | 
			
		||||
	x.SetMapper(names.GonicMapper{})
 | 
			
		||||
	if err := x.Sync(new(Version)); err != nil {
 | 
			
		||||
		return fmt.Errorf("sync: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
@@ -334,6 +337,8 @@ Please try upgrading to a lower version first (suggested v1.6.4), then upgrade t
 | 
			
		||||
	// Migrate
 | 
			
		||||
	for i, m := range migrations[v-minDBVersion:] {
 | 
			
		||||
		log.Info("Migration[%d]: %s", v+int64(i), m.Description())
 | 
			
		||||
		// Reset the mapper between each migration - migrations are not supposed to depend on each other
 | 
			
		||||
		x.SetMapper(names.GonicMapper{})
 | 
			
		||||
		if err = m.Migrate(x); err != nil {
 | 
			
		||||
			return fmt.Errorf("do migrate: %v", err)
 | 
			
		||||
		}
 | 
			
		||||
@@ -511,6 +516,31 @@ func recreateTable(sess *xorm.Session, bean interface{}) error {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
	case setting.Database.UsePostgreSQL:
 | 
			
		||||
		var originalSequences []string
 | 
			
		||||
		type sequenceData struct {
 | 
			
		||||
			LastValue int  `xorm:"'last_value'"`
 | 
			
		||||
			IsCalled  bool `xorm:"'is_called'"`
 | 
			
		||||
		}
 | 
			
		||||
		sequenceMap := map[string]sequenceData{}
 | 
			
		||||
 | 
			
		||||
		schema := sess.Engine().Dialect().URI().Schema
 | 
			
		||||
		sess.Engine().SetSchema("")
 | 
			
		||||
		if err := sess.Table("information_schema.sequences").Cols("sequence_name").Where("sequence_name LIKE ? || '_%' AND sequence_catalog = ?", tableName, setting.Database.Name).Find(&originalSequences); err != nil {
 | 
			
		||||
			log.Error("Unable to rename %s to %s. Error: %v", tempTableName, tableName, err)
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
		sess.Engine().SetSchema(schema)
 | 
			
		||||
 | 
			
		||||
		for _, sequence := range originalSequences {
 | 
			
		||||
			sequenceData := sequenceData{}
 | 
			
		||||
			if _, err := sess.Table(sequence).Cols("last_value", "is_called").Get(&sequenceData); err != nil {
 | 
			
		||||
				log.Error("Unable to get last_value and is_called from %s. Error: %v", sequence, err)
 | 
			
		||||
				return err
 | 
			
		||||
			}
 | 
			
		||||
			sequenceMap[sequence] = sequenceData
 | 
			
		||||
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		// CASCADE causes postgres to drop all the constraints on the old table
 | 
			
		||||
		if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s` CASCADE", tableName)); err != nil {
 | 
			
		||||
			log.Error("Unable to drop old table %s. Error: %v", tableName, err)
 | 
			
		||||
@@ -524,7 +554,6 @@ func recreateTable(sess *xorm.Session, bean interface{}) error {
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		var indices []string
 | 
			
		||||
		schema := sess.Engine().Dialect().URI().Schema
 | 
			
		||||
		sess.Engine().SetSchema("")
 | 
			
		||||
		if err := sess.Table("pg_indexes").Cols("indexname").Where("tablename = ? ", tableName).Find(&indices); err != nil {
 | 
			
		||||
			log.Error("Unable to rename %s to %s. Error: %v", tempTableName, tableName, err)
 | 
			
		||||
@@ -540,6 +569,43 @@ func recreateTable(sess *xorm.Session, bean interface{}) error {
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		var sequences []string
 | 
			
		||||
		sess.Engine().SetSchema("")
 | 
			
		||||
		if err := sess.Table("information_schema.sequences").Cols("sequence_name").Where("sequence_name LIKE 'tmp_recreate__' || ? || '_%' AND sequence_catalog = ?", tableName, setting.Database.Name).Find(&sequences); err != nil {
 | 
			
		||||
			log.Error("Unable to rename %s to %s. Error: %v", tempTableName, tableName, err)
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
		sess.Engine().SetSchema(schema)
 | 
			
		||||
 | 
			
		||||
		for _, sequence := range sequences {
 | 
			
		||||
			newSequenceName := strings.Replace(sequence, "tmp_recreate__", "", 1)
 | 
			
		||||
			if _, err := sess.Exec(fmt.Sprintf("ALTER SEQUENCE `%s` RENAME TO `%s`", sequence, newSequenceName)); err != nil {
 | 
			
		||||
				log.Error("Unable to rename %s sequence to %s. Error: %v", sequence, newSequenceName, err)
 | 
			
		||||
				return err
 | 
			
		||||
			}
 | 
			
		||||
			val, ok := sequenceMap[newSequenceName]
 | 
			
		||||
			if newSequenceName == tableName+"_id_seq" {
 | 
			
		||||
				if ok && val.LastValue != 0 {
 | 
			
		||||
					if _, err := sess.Exec(fmt.Sprintf("SELECT setval('%s', %d, %t)", newSequenceName, val.LastValue, val.IsCalled)); err != nil {
 | 
			
		||||
						log.Error("Unable to reset %s to %d. Error: %v", newSequenceName, val, err)
 | 
			
		||||
						return err
 | 
			
		||||
					}
 | 
			
		||||
				} else {
 | 
			
		||||
					// We're going to try to guess this
 | 
			
		||||
					if _, err := sess.Exec(fmt.Sprintf("SELECT setval('%s', COALESCE((SELECT MAX(id)+1 FROM `%s`), 1), false)", newSequenceName, tableName)); err != nil {
 | 
			
		||||
						log.Error("Unable to reset %s. Error: %v", newSequenceName, err)
 | 
			
		||||
						return err
 | 
			
		||||
					}
 | 
			
		||||
				}
 | 
			
		||||
			} else if ok {
 | 
			
		||||
				if _, err := sess.Exec(fmt.Sprintf("SELECT setval('%s', %d, %t)", newSequenceName, val.LastValue, val.IsCalled)); err != nil {
 | 
			
		||||
					log.Error("Unable to reset %s to %d. Error: %v", newSequenceName, val, err)
 | 
			
		||||
					return err
 | 
			
		||||
				}
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
	case setting.Database.UseMSSQL:
 | 
			
		||||
		// MSSQL will drop all the constraints on the old table
 | 
			
		||||
		if _, err := sess.Exec(fmt.Sprintf("DROP TABLE `%s`", tableName)); err != nil {
 | 
			
		||||
 
 | 
			
		||||
@@ -12,7 +12,7 @@ import (
 | 
			
		||||
 | 
			
		||||
func addKeepActivityPrivateUserColumn(x *xorm.Engine) error {
 | 
			
		||||
	type User struct {
 | 
			
		||||
		KeepActivityPrivate bool
 | 
			
		||||
		KeepActivityPrivate bool `xorm:"NOT NULL DEFAULT false"`
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if err := x.Sync2(new(User)); err != nil {
 | 
			
		||||
 
 | 
			
		||||
@@ -15,12 +15,14 @@ import (
 | 
			
		||||
 | 
			
		||||
	"code.gitea.io/gitea/modules/setting"
 | 
			
		||||
 | 
			
		||||
	// Needed for the MySQL driver
 | 
			
		||||
	_ "github.com/go-sql-driver/mysql"
 | 
			
		||||
	"xorm.io/builder"
 | 
			
		||||
	"xorm.io/xorm"
 | 
			
		||||
	"xorm.io/xorm/names"
 | 
			
		||||
	"xorm.io/xorm/schemas"
 | 
			
		||||
 | 
			
		||||
	// Needed for the MySQL driver
 | 
			
		||||
	_ "github.com/go-sql-driver/mysql"
 | 
			
		||||
 | 
			
		||||
	// Needed for the Postgresql driver
 | 
			
		||||
	_ "github.com/lib/pq"
 | 
			
		||||
 | 
			
		||||
@@ -145,7 +147,16 @@ func getEngine() (*xorm.Engine, error) {
 | 
			
		||||
		return nil, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	engine, err := xorm.NewEngine(setting.Database.Type, connStr)
 | 
			
		||||
	var engine *xorm.Engine
 | 
			
		||||
 | 
			
		||||
	if setting.Database.UsePostgreSQL && len(setting.Database.Schema) > 0 {
 | 
			
		||||
		// OK whilst we sort out our schema issues - create a schema aware postgres
 | 
			
		||||
		registerPostgresSchemaDriver()
 | 
			
		||||
		engine, err = xorm.NewEngine("postgresschema", connStr)
 | 
			
		||||
	} else {
 | 
			
		||||
		engine, err = xorm.NewEngine(setting.Database.Type, connStr)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return nil, err
 | 
			
		||||
	}
 | 
			
		||||
@@ -155,16 +166,6 @@ func getEngine() (*xorm.Engine, error) {
 | 
			
		||||
		engine.Dialect().SetParams(map[string]string{"DEFAULT_VARCHAR": "nvarchar"})
 | 
			
		||||
	}
 | 
			
		||||
	engine.SetSchema(setting.Database.Schema)
 | 
			
		||||
	if setting.Database.UsePostgreSQL && len(setting.Database.Schema) > 0 {
 | 
			
		||||
		// Add the schema to the search path
 | 
			
		||||
		if _, err := engine.Exec(`SELECT set_config(
 | 
			
		||||
			'search_path',
 | 
			
		||||
			? || ',' || current_setting('search_path'),
 | 
			
		||||
			false)`,
 | 
			
		||||
			setting.Database.Schema); err != nil {
 | 
			
		||||
			return nil, err
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	return engine, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -313,6 +314,13 @@ func DumpDatabase(filePath string, dbType string) error {
 | 
			
		||||
		tbs = append(tbs, t)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// temporary fix for v1.13.x (https://github.com/go-gitea/gitea/issues/14069)
 | 
			
		||||
	if _, err := x.Where(builder.IsNull{"keep_activity_private"}).
 | 
			
		||||
		Cols("keep_activity_private").
 | 
			
		||||
		Update(User{KeepActivityPrivate: false}); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	type Version struct {
 | 
			
		||||
		ID      int64 `xorm:"pk autoincr"`
 | 
			
		||||
		Version int64
 | 
			
		||||
 
 | 
			
		||||
@@ -8,6 +8,7 @@ import (
 | 
			
		||||
	"sort"
 | 
			
		||||
 | 
			
		||||
	"code.gitea.io/gitea/modules/auth/oauth2"
 | 
			
		||||
	"code.gitea.io/gitea/modules/log"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
// OAuth2Provider describes the display values of a single OAuth2 provider
 | 
			
		||||
@@ -119,13 +120,28 @@ func InitOAuth2() error {
 | 
			
		||||
	if err := oauth2.Init(x); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
	loginSources, _ := GetActiveOAuth2ProviderLoginSources()
 | 
			
		||||
	return initOAuth2LoginSources()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// ResetOAuth2 clears existing OAuth2 providers and loads them from DB
 | 
			
		||||
func ResetOAuth2() error {
 | 
			
		||||
	oauth2.ClearProviders()
 | 
			
		||||
	return initOAuth2LoginSources()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// initOAuth2LoginSources is used to load and register all active OAuth2 providers
 | 
			
		||||
func initOAuth2LoginSources() error {
 | 
			
		||||
	loginSources, _ := GetActiveOAuth2ProviderLoginSources()
 | 
			
		||||
	for _, source := range loginSources {
 | 
			
		||||
		oAuth2Config := source.OAuth2()
 | 
			
		||||
		err := oauth2.RegisterProvider(source.Name, oAuth2Config.Provider, oAuth2Config.ClientID, oAuth2Config.ClientSecret, oAuth2Config.OpenIDConnectAutoDiscoveryURL, oAuth2Config.CustomURLMapping)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
			log.Critical("Unable to register source: %s due to Error: %v. This source will be disabled.", source.Name, err)
 | 
			
		||||
			source.IsActived = false
 | 
			
		||||
			if err = UpdateSource(source); err != nil {
 | 
			
		||||
				log.Critical("Unable to update source %s to disable it. Error: %v", err)
 | 
			
		||||
				return err
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	return nil
 | 
			
		||||
 
 | 
			
		||||
@@ -54,7 +54,11 @@ func (r *Release) loadAttributes(e Engine) error {
 | 
			
		||||
	if r.Publisher == nil {
 | 
			
		||||
		r.Publisher, err = getUserByID(e, r.PublisherID)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
			if IsErrUserNotExist(err) {
 | 
			
		||||
				r.Publisher = NewGhostUser()
 | 
			
		||||
			} else {
 | 
			
		||||
				return err
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	return getReleaseAttachments(e, r)
 | 
			
		||||
 
 | 
			
		||||
@@ -426,6 +426,7 @@ func (repo *Repository) innerAPIFormat(e Engine, mode AccessMode, isParent bool)
 | 
			
		||||
		HTMLURL:                   repo.HTMLURL(),
 | 
			
		||||
		SSHURL:                    cloneLink.SSH,
 | 
			
		||||
		CloneURL:                  cloneLink.HTTPS,
 | 
			
		||||
		OriginalURL:               repo.SanitizedOriginalURL(),
 | 
			
		||||
		Website:                   repo.Website,
 | 
			
		||||
		Stars:                     repo.NumStars,
 | 
			
		||||
		Forks:                     repo.NumForks,
 | 
			
		||||
@@ -1289,11 +1290,44 @@ func IncrementRepoForkNum(ctx DBContext, repoID int64) error {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// TransferOwnership transfers all corresponding setting from old user to new one.
 | 
			
		||||
func TransferOwnership(doer *User, newOwnerName string, repo *Repository) error {
 | 
			
		||||
func TransferOwnership(doer *User, newOwnerName string, repo *Repository) (err error) {
 | 
			
		||||
	repoRenamed := false
 | 
			
		||||
	wikiRenamed := false
 | 
			
		||||
	oldOwnerName := doer.Name
 | 
			
		||||
 | 
			
		||||
	defer func() {
 | 
			
		||||
		if !repoRenamed && !wikiRenamed {
 | 
			
		||||
			return
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		recoverErr := recover()
 | 
			
		||||
		if err == nil && recoverErr == nil {
 | 
			
		||||
			return
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		if repoRenamed {
 | 
			
		||||
			if err := os.Rename(RepoPath(newOwnerName, repo.Name), RepoPath(oldOwnerName, repo.Name)); err != nil {
 | 
			
		||||
				log.Critical("Unable to move repository %s/%s directory from %s back to correct place %s: %v", oldOwnerName, repo.Name, RepoPath(newOwnerName, repo.Name), RepoPath(oldOwnerName, repo.Name), err)
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		if wikiRenamed {
 | 
			
		||||
			if err := os.Rename(WikiPath(newOwnerName, repo.Name), WikiPath(oldOwnerName, repo.Name)); err != nil {
 | 
			
		||||
				log.Critical("Unable to move wiki for repository %s/%s directory from %s back to correct place %s: %v", oldOwnerName, repo.Name, WikiPath(newOwnerName, repo.Name), WikiPath(oldOwnerName, repo.Name), err)
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		if recoverErr != nil {
 | 
			
		||||
			log.Error("Panic within TransferOwnership: %v\n%s", recoverErr, log.Stack(2))
 | 
			
		||||
			panic(recoverErr)
 | 
			
		||||
		}
 | 
			
		||||
	}()
 | 
			
		||||
 | 
			
		||||
	newOwner, err := GetUserByName(newOwnerName)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return fmt.Errorf("get new owner '%s': %v", newOwnerName, err)
 | 
			
		||||
	}
 | 
			
		||||
	newOwnerName = newOwner.Name // ensure capitalisation matches
 | 
			
		||||
 | 
			
		||||
	// Check if new owner has repository with same name.
 | 
			
		||||
	has, err := IsRepositoryExist(newOwner, repo.Name)
 | 
			
		||||
@@ -1310,6 +1344,7 @@ func TransferOwnership(doer *User, newOwnerName string, repo *Repository) error
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	oldOwner := repo.Owner
 | 
			
		||||
	oldOwnerName = oldOwner.Name
 | 
			
		||||
 | 
			
		||||
	// Note: we have to set value here to make sure recalculate accesses is based on
 | 
			
		||||
	// new owner.
 | 
			
		||||
@@ -1369,9 +1404,9 @@ func TransferOwnership(doer *User, newOwnerName string, repo *Repository) error
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// Update repository count.
 | 
			
		||||
	if _, err = sess.Exec("UPDATE `user` SET num_repos=num_repos+1 WHERE id=?", newOwner.ID); err != nil {
 | 
			
		||||
	if _, err := sess.Exec("UPDATE `user` SET num_repos=num_repos+1 WHERE id=?", newOwner.ID); err != nil {
 | 
			
		||||
		return fmt.Errorf("increase new owner repository count: %v", err)
 | 
			
		||||
	} else if _, err = sess.Exec("UPDATE `user` SET num_repos=num_repos-1 WHERE id=?", oldOwner.ID); err != nil {
 | 
			
		||||
	} else if _, err := sess.Exec("UPDATE `user` SET num_repos=num_repos-1 WHERE id=?", oldOwner.ID); err != nil {
 | 
			
		||||
		return fmt.Errorf("decrease old owner repository count: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
@@ -1381,7 +1416,7 @@ func TransferOwnership(doer *User, newOwnerName string, repo *Repository) error
 | 
			
		||||
 | 
			
		||||
	// Remove watch for organization.
 | 
			
		||||
	if oldOwner.IsOrganization() {
 | 
			
		||||
		if err = watchRepo(sess, oldOwner.ID, repo.ID, false); err != nil {
 | 
			
		||||
		if err := watchRepo(sess, oldOwner.ID, repo.ID, false); err != nil {
 | 
			
		||||
			return fmt.Errorf("watchRepo [false]: %v", err)
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
@@ -1393,16 +1428,18 @@ func TransferOwnership(doer *User, newOwnerName string, repo *Repository) error
 | 
			
		||||
		return fmt.Errorf("Failed to create dir %s: %v", dir, err)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if err = os.Rename(RepoPath(oldOwner.Name, repo.Name), RepoPath(newOwner.Name, repo.Name)); err != nil {
 | 
			
		||||
	if err := os.Rename(RepoPath(oldOwner.Name, repo.Name), RepoPath(newOwner.Name, repo.Name)); err != nil {
 | 
			
		||||
		return fmt.Errorf("rename repository directory: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
	repoRenamed = true
 | 
			
		||||
 | 
			
		||||
	// Rename remote wiki repository to new path and delete local copy.
 | 
			
		||||
	wikiPath := WikiPath(oldOwner.Name, repo.Name)
 | 
			
		||||
	if com.IsExist(wikiPath) {
 | 
			
		||||
		if err = os.Rename(wikiPath, WikiPath(newOwner.Name, repo.Name)); err != nil {
 | 
			
		||||
		if err := os.Rename(wikiPath, WikiPath(newOwner.Name, repo.Name)); err != nil {
 | 
			
		||||
			return fmt.Errorf("rename repository wiki: %v", err)
 | 
			
		||||
		}
 | 
			
		||||
		wikiRenamed = true
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// If there was previously a redirect at this location, remove it.
 | 
			
		||||
@@ -1599,26 +1636,27 @@ func UpdateRepositoryUnits(repo *Repository, units []RepoUnit, deleteUnitTypes [
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// DeleteRepository deletes a repository for a user or organization.
 | 
			
		||||
// make sure if you call this func to close open sessions (sqlite will otherwise get a deadlock)
 | 
			
		||||
func DeleteRepository(doer *User, uid, repoID int64) error {
 | 
			
		||||
	sess := x.NewSession()
 | 
			
		||||
	defer sess.Close()
 | 
			
		||||
	if err := sess.Begin(); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// In case is a organization.
 | 
			
		||||
	org, err := GetUserByID(uid)
 | 
			
		||||
	org, err := getUserByID(sess, uid)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
	if org.IsOrganization() {
 | 
			
		||||
		if err = org.GetTeams(&SearchTeamOptions{}); err != nil {
 | 
			
		||||
		if err = org.getTeams(sess); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	sess := x.NewSession()
 | 
			
		||||
	defer sess.Close()
 | 
			
		||||
	if err = sess.Begin(); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	repo := &Repository{ID: repoID, OwnerID: uid}
 | 
			
		||||
	has, err := sess.Get(repo)
 | 
			
		||||
	repo := &Repository{OwnerID: uid}
 | 
			
		||||
	has, err := sess.ID(repoID).Get(repo)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	} else if !has {
 | 
			
		||||
@@ -1691,6 +1729,10 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
 | 
			
		||||
		return fmt.Errorf("deleteBeans: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if err := deleteLabelsByRepoID(sess, repoID); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// Delete Issues and related objects
 | 
			
		||||
	var attachmentPaths []string
 | 
			
		||||
	if attachmentPaths, err = deleteIssuesByRepoID(sess, repoID); err != nil {
 | 
			
		||||
@@ -1767,14 +1809,7 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if err = sess.Commit(); err != nil {
 | 
			
		||||
		sess.Close()
 | 
			
		||||
		if len(deployKeys) > 0 {
 | 
			
		||||
			// We need to rewrite the public keys because the commit failed
 | 
			
		||||
			if err2 := RewriteAllPublicKeys(); err2 != nil {
 | 
			
		||||
				return fmt.Errorf("Commit: %v SSH Keys: %v", err, err2)
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
		return fmt.Errorf("Commit: %v", err)
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	sess.Close()
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										75
									
								
								models/sql_postgres_with_schema.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										75
									
								
								models/sql_postgres_with_schema.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,75 @@
 | 
			
		||||
// Copyright 2020 The Gitea Authors. All rights reserved.
 | 
			
		||||
// Use of this source code is governed by a MIT-style
 | 
			
		||||
// license that can be found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
package models
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"database/sql"
 | 
			
		||||
	"database/sql/driver"
 | 
			
		||||
	"sync"
 | 
			
		||||
 | 
			
		||||
	"code.gitea.io/gitea/modules/setting"
 | 
			
		||||
 | 
			
		||||
	"github.com/lib/pq"
 | 
			
		||||
	"xorm.io/xorm/dialects"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
var registerOnce sync.Once
 | 
			
		||||
 | 
			
		||||
func registerPostgresSchemaDriver() {
 | 
			
		||||
	registerOnce.Do(func() {
 | 
			
		||||
		sql.Register("postgresschema", &postgresSchemaDriver{})
 | 
			
		||||
		dialects.RegisterDriver("postgresschema", dialects.QueryDriver("postgres"))
 | 
			
		||||
	})
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type postgresSchemaDriver struct {
 | 
			
		||||
	pq.Driver
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Open opens a new connection to the database. name is a connection string.
 | 
			
		||||
// This function opens the postgres connection in the default manner but immediately
 | 
			
		||||
// runs set_config to set the search_path appropriately
 | 
			
		||||
func (d *postgresSchemaDriver) Open(name string) (driver.Conn, error) {
 | 
			
		||||
	conn, err := d.Driver.Open(name)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return conn, err
 | 
			
		||||
	}
 | 
			
		||||
	schemaValue, _ := driver.String.ConvertValue(setting.Database.Schema)
 | 
			
		||||
 | 
			
		||||
	// golangci lint is incorrect here - there is no benefit to using driver.ExecerContext here
 | 
			
		||||
	// and in any case pq does not implement it
 | 
			
		||||
	if execer, ok := conn.(driver.Execer); ok { //nolint
 | 
			
		||||
		_, err := execer.Exec(`SELECT set_config(
 | 
			
		||||
			'search_path',
 | 
			
		||||
			$1 || ',' || current_setting('search_path'),
 | 
			
		||||
			false)`, []driver.Value{schemaValue}) //nolint
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			_ = conn.Close()
 | 
			
		||||
			return nil, err
 | 
			
		||||
		}
 | 
			
		||||
		return conn, nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	stmt, err := conn.Prepare(`SELECT set_config(
 | 
			
		||||
		'search_path',
 | 
			
		||||
		$1 || ',' || current_setting('search_path'),
 | 
			
		||||
		false)`)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		_ = conn.Close()
 | 
			
		||||
		return nil, err
 | 
			
		||||
	}
 | 
			
		||||
	defer stmt.Close()
 | 
			
		||||
 | 
			
		||||
	// driver.String.ConvertValue will never return err for string
 | 
			
		||||
 | 
			
		||||
	// golangci lint is incorrect here - there is no benefit to using stmt.ExecWithContext here
 | 
			
		||||
	_, err = stmt.Exec([]driver.Value{schemaValue}) //nolint
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		_ = conn.Close()
 | 
			
		||||
		return nil, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return conn, nil
 | 
			
		||||
}
 | 
			
		||||
@@ -40,7 +40,6 @@ import (
 | 
			
		||||
	"golang.org/x/crypto/scrypt"
 | 
			
		||||
	"golang.org/x/crypto/ssh"
 | 
			
		||||
	"xorm.io/builder"
 | 
			
		||||
	"xorm.io/xorm"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
// UserType defines the user type
 | 
			
		||||
@@ -551,6 +550,7 @@ func (u *User) GetOwnedOrganizations() (err error) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// GetOrganizations returns paginated organizations that user belongs to.
 | 
			
		||||
// TODO: does not respect All and show orgs you privately participate
 | 
			
		||||
func (u *User) GetOrganizations(opts *SearchOrganizationsOptions) error {
 | 
			
		||||
	sess := x.NewSession()
 | 
			
		||||
	defer sess.Close()
 | 
			
		||||
@@ -727,6 +727,7 @@ var (
 | 
			
		||||
		"assets",
 | 
			
		||||
		"attachments",
 | 
			
		||||
		"avatars",
 | 
			
		||||
		"captcha",
 | 
			
		||||
		"commits",
 | 
			
		||||
		"debug",
 | 
			
		||||
		"error",
 | 
			
		||||
@@ -922,6 +923,7 @@ func VerifyActiveEmailCode(code, email string) *EmailAddress {
 | 
			
		||||
 | 
			
		||||
// ChangeUserName changes all corresponding setting from old user name to new one.
 | 
			
		||||
func ChangeUserName(u *User, newUserName string) (err error) {
 | 
			
		||||
	oldUserName := u.Name
 | 
			
		||||
	if err = IsUsableUsername(newUserName); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
@@ -939,16 +941,24 @@ func ChangeUserName(u *User, newUserName string) (err error) {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if _, err = sess.Exec("UPDATE `repository` SET owner_name=? WHERE owner_name=?", newUserName, u.Name); err != nil {
 | 
			
		||||
	if _, err = sess.Exec("UPDATE `repository` SET owner_name=? WHERE owner_name=?", newUserName, oldUserName); err != nil {
 | 
			
		||||
		return fmt.Errorf("Change repo owner name: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// Do not fail if directory does not exist
 | 
			
		||||
	if err = os.Rename(UserPath(u.Name), UserPath(newUserName)); err != nil && !os.IsNotExist(err) {
 | 
			
		||||
	if err = os.Rename(UserPath(oldUserName), UserPath(newUserName)); err != nil && !os.IsNotExist(err) {
 | 
			
		||||
		return fmt.Errorf("Rename user directory: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return sess.Commit()
 | 
			
		||||
	if err = sess.Commit(); err != nil {
 | 
			
		||||
		if err2 := os.Rename(UserPath(newUserName), UserPath(oldUserName)); err2 != nil && !os.IsNotExist(err2) {
 | 
			
		||||
			log.Critical("Unable to rollback directory change during failed username change from: %s to: %s. DB Error: %v. Filesystem Error: %v", oldUserName, newUserName, err, err2)
 | 
			
		||||
			return fmt.Errorf("failed to rollback directory change during failed username change from: %s to: %s. DB Error: %w. Filesystem Error: %v", oldUserName, newUserName, err, err2)
 | 
			
		||||
		}
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// checkDupEmail checks whether there are the same email with the user
 | 
			
		||||
@@ -1019,8 +1029,7 @@ func deleteBeans(e Engine, beans ...interface{}) (err error) {
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// FIXME: need some kind of mechanism to record failure. HINT: system notice
 | 
			
		||||
func deleteUser(e *xorm.Session, u *User) error {
 | 
			
		||||
func deleteUser(e Engine, u *User) error {
 | 
			
		||||
	// Note: A user owns any repository or belongs to any organization
 | 
			
		||||
	//	cannot perform delete operation.
 | 
			
		||||
 | 
			
		||||
@@ -1114,6 +1123,16 @@ func deleteUser(e *xorm.Session, u *User) error {
 | 
			
		||||
	// ***** END: PublicKey *****
 | 
			
		||||
 | 
			
		||||
	// ***** START: GPGPublicKey *****
 | 
			
		||||
	keys, err := listGPGKeys(e, u.ID, ListOptions{})
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return fmt.Errorf("ListGPGKeys: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
	// Delete GPGKeyImport(s).
 | 
			
		||||
	for _, key := range keys {
 | 
			
		||||
		if _, err = e.Delete(&GPGKeyImport{KeyID: key.KeyID}); err != nil {
 | 
			
		||||
			return fmt.Errorf("deleteGPGKeyImports: %v", err)
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	if _, err = e.Delete(&GPGKey{OwnerID: u.ID}); err != nil {
 | 
			
		||||
		return fmt.Errorf("deleteGPGKeys: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
@@ -1134,18 +1153,21 @@ func deleteUser(e *xorm.Session, u *User) error {
 | 
			
		||||
		return fmt.Errorf("Delete: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// FIXME: system notice
 | 
			
		||||
	// Note: There are something just cannot be roll back,
 | 
			
		||||
	//	so just keep error logs of those operations.
 | 
			
		||||
	path := UserPath(u.Name)
 | 
			
		||||
	if err := util.RemoveAll(path); err != nil {
 | 
			
		||||
		return fmt.Errorf("Failed to RemoveAll %s: %v", path, err)
 | 
			
		||||
	if err = util.RemoveAll(path); err != nil {
 | 
			
		||||
		err = fmt.Errorf("Failed to RemoveAll %s: %v", path, err)
 | 
			
		||||
		_ = createNotice(e, NoticeTask, fmt.Sprintf("delete user '%s': %v", u.Name, err))
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if len(u.Avatar) > 0 {
 | 
			
		||||
		avatarPath := u.CustomAvatarRelativePath()
 | 
			
		||||
		if err := storage.Avatars.Delete(avatarPath); err != nil {
 | 
			
		||||
			return fmt.Errorf("Failed to remove %s: %v", avatarPath, err)
 | 
			
		||||
		if err = storage.Avatars.Delete(avatarPath); err != nil {
 | 
			
		||||
			err = fmt.Errorf("Failed to remove %s: %v", avatarPath, err)
 | 
			
		||||
			_ = createNotice(e, NoticeTask, fmt.Sprintf("delete user '%s': %v", u.Name, err))
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
@@ -1601,20 +1623,34 @@ func deleteKeysMarkedForDeletion(keys []string) (bool, error) {
 | 
			
		||||
func addLdapSSHPublicKeys(usr *User, s *LoginSource, sshPublicKeys []string) bool {
 | 
			
		||||
	var sshKeysNeedUpdate bool
 | 
			
		||||
	for _, sshKey := range sshPublicKeys {
 | 
			
		||||
		_, _, _, _, err := ssh.ParseAuthorizedKey([]byte(sshKey))
 | 
			
		||||
		if err == nil {
 | 
			
		||||
			sshKeyName := fmt.Sprintf("%s-%s", s.Name, sshKey[0:40])
 | 
			
		||||
			if _, err := AddPublicKey(usr.ID, sshKeyName, sshKey, s.ID); err != nil {
 | 
			
		||||
		var err error
 | 
			
		||||
		found := false
 | 
			
		||||
		keys := []byte(sshKey)
 | 
			
		||||
	loop:
 | 
			
		||||
		for len(keys) > 0 && err == nil {
 | 
			
		||||
			var out ssh.PublicKey
 | 
			
		||||
			// We ignore options as they are not relevant to Gitea
 | 
			
		||||
			out, _, _, keys, err = ssh.ParseAuthorizedKey(keys)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				break loop
 | 
			
		||||
			}
 | 
			
		||||
			found = true
 | 
			
		||||
			marshalled := string(ssh.MarshalAuthorizedKey(out))
 | 
			
		||||
			marshalled = marshalled[:len(marshalled)-1]
 | 
			
		||||
			sshKeyName := fmt.Sprintf("%s-%s", s.Name, ssh.FingerprintSHA256(out))
 | 
			
		||||
 | 
			
		||||
			if _, err := AddPublicKey(usr.ID, sshKeyName, marshalled, s.ID); err != nil {
 | 
			
		||||
				if IsErrKeyAlreadyExist(err) {
 | 
			
		||||
					log.Trace("addLdapSSHPublicKeys[%s]: LDAP Public SSH Key %s already exists for user", s.Name, usr.Name)
 | 
			
		||||
					log.Trace("addLdapSSHPublicKeys[%s]: LDAP Public SSH Key %s already exists for user", sshKeyName, usr.Name)
 | 
			
		||||
				} else {
 | 
			
		||||
					log.Error("addLdapSSHPublicKeys[%s]: Error adding LDAP Public SSH Key for user %s: %v", s.Name, usr.Name, err)
 | 
			
		||||
					log.Error("addLdapSSHPublicKeys[%s]: Error adding LDAP Public SSH Key for user %s: %v", sshKeyName, usr.Name, err)
 | 
			
		||||
				}
 | 
			
		||||
			} else {
 | 
			
		||||
				log.Trace("addLdapSSHPublicKeys[%s]: Added LDAP Public SSH Key for user %s", s.Name, usr.Name)
 | 
			
		||||
				log.Trace("addLdapSSHPublicKeys[%s]: Added LDAP Public SSH Key for user %s", sshKeyName, usr.Name)
 | 
			
		||||
				sshKeysNeedUpdate = true
 | 
			
		||||
			}
 | 
			
		||||
		} else {
 | 
			
		||||
		}
 | 
			
		||||
		if !found && err != nil {
 | 
			
		||||
			log.Warn("addLdapSSHPublicKeys[%s]: Skipping invalid LDAP Public SSH Key for user %s: %v", s.Name, usr.Name, sshKey)
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -40,10 +40,9 @@ func (u *User) generateRandomAvatar(e Engine) error {
 | 
			
		||||
		return fmt.Errorf("RandomImage: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if u.Avatar == "" {
 | 
			
		||||
		u.Avatar = base.HashEmail(u.AvatarEmail)
 | 
			
		||||
	}
 | 
			
		||||
	u.Avatar = base.HashEmail(seed)
 | 
			
		||||
 | 
			
		||||
	// Don't share the images so that we can delete them easily
 | 
			
		||||
	if err := storage.SaveFrom(storage.Avatars, u.CustomAvatarRelativePath(), func(w io.Writer) error {
 | 
			
		||||
		if err := png.Encode(w, img); err != nil {
 | 
			
		||||
			log.Error("Encode: %v", err)
 | 
			
		||||
@@ -133,7 +132,7 @@ func (u *User) UploadAvatar(data []byte) error {
 | 
			
		||||
	// Otherwise, if any of the users delete his avatar
 | 
			
		||||
	// Other users will lose their avatars too.
 | 
			
		||||
	u.Avatar = fmt.Sprintf("%x", md5.Sum([]byte(fmt.Sprintf("%d-%x", u.ID, md5.Sum(data)))))
 | 
			
		||||
	if err = updateUser(sess, u); err != nil {
 | 
			
		||||
	if err = updateUserCols(sess, u, "use_custom_avatar", "avatar"); err != nil {
 | 
			
		||||
		return fmt.Errorf("updateUser: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -421,3 +421,71 @@ func TestGetMaileableUsersByIDs(t *testing.T) {
 | 
			
		||||
		assert.Equal(t, results[1].ID, 4)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func TestAddLdapSSHPublicKeys(t *testing.T) {
 | 
			
		||||
	assert.NoError(t, PrepareTestDatabase())
 | 
			
		||||
 | 
			
		||||
	user := AssertExistsAndLoadBean(t, &User{ID: 2}).(*User)
 | 
			
		||||
	s := &LoginSource{ID: 1}
 | 
			
		||||
 | 
			
		||||
	testCases := []struct {
 | 
			
		||||
		keyString   string
 | 
			
		||||
		number      int
 | 
			
		||||
		keyContents []string
 | 
			
		||||
	}{
 | 
			
		||||
		{
 | 
			
		||||
			keyString: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment\n",
 | 
			
		||||
			number:    1,
 | 
			
		||||
			keyContents: []string{
 | 
			
		||||
				"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM=",
 | 
			
		||||
			},
 | 
			
		||||
		},
 | 
			
		||||
		{
 | 
			
		||||
			keyString: `ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment
 | 
			
		||||
ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag= nocomment`,
 | 
			
		||||
			number: 2,
 | 
			
		||||
			keyContents: []string{
 | 
			
		||||
				"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM=",
 | 
			
		||||
				"ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag=",
 | 
			
		||||
			},
 | 
			
		||||
		},
 | 
			
		||||
		{
 | 
			
		||||
			keyString: `ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment
 | 
			
		||||
# comment asmdna,ndp
 | 
			
		||||
ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag= nocomment`,
 | 
			
		||||
			number: 2,
 | 
			
		||||
			keyContents: []string{
 | 
			
		||||
				"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM=",
 | 
			
		||||
				"ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag=",
 | 
			
		||||
			},
 | 
			
		||||
		},
 | 
			
		||||
		{
 | 
			
		||||
			keyString: `ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment
 | 
			
		||||
382488320jasdj1lasmva/vasodifipi4193-fksma.cm
 | 
			
		||||
ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag= nocomment`,
 | 
			
		||||
			number: 2,
 | 
			
		||||
			keyContents: []string{
 | 
			
		||||
				"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM=",
 | 
			
		||||
				"ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag=",
 | 
			
		||||
			},
 | 
			
		||||
		},
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	for i, kase := range testCases {
 | 
			
		||||
		s.ID = int64(i) + 20
 | 
			
		||||
		addLdapSSHPublicKeys(user, s, []string{kase.keyString})
 | 
			
		||||
		keys, err := ListPublicLdapSSHKeys(user.ID, s.ID)
 | 
			
		||||
		assert.NoError(t, err)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			continue
 | 
			
		||||
		}
 | 
			
		||||
		assert.Equal(t, kase.number, len(keys))
 | 
			
		||||
 | 
			
		||||
		for _, key := range keys {
 | 
			
		||||
			assert.Contains(t, kase.keyContents, key.Content)
 | 
			
		||||
		}
 | 
			
		||||
		for _, key := range keys {
 | 
			
		||||
			DeletePublicKey(user, key.ID)
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										70
									
								
								modules/analyze/vendor.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										70
									
								
								modules/analyze/vendor.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,70 @@
 | 
			
		||||
// Copyright 2021 The Gitea Authors. All rights reserved.
 | 
			
		||||
// Use of this source code is governed by a MIT-style
 | 
			
		||||
// license that can be found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
package analyze
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"regexp"
 | 
			
		||||
	"sort"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
	"github.com/go-enry/go-enry/v2/data"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
var isVendorRegExp *regexp.Regexp
 | 
			
		||||
 | 
			
		||||
func init() {
 | 
			
		||||
	matchers := data.VendorMatchers
 | 
			
		||||
 | 
			
		||||
	caretStrings := make([]string, 0, 10)
 | 
			
		||||
	caretShareStrings := make([]string, 0, 10)
 | 
			
		||||
 | 
			
		||||
	matcherStrings := make([]string, 0, len(matchers))
 | 
			
		||||
	for _, matcher := range matchers {
 | 
			
		||||
		str := matcher.String()
 | 
			
		||||
		if str[0] == '^' {
 | 
			
		||||
			caretStrings = append(caretStrings, str[1:])
 | 
			
		||||
		} else if str[0:5] == "(^|/)" {
 | 
			
		||||
			caretShareStrings = append(caretShareStrings, str[5:])
 | 
			
		||||
		} else {
 | 
			
		||||
			matcherStrings = append(matcherStrings, str)
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	sort.Strings(caretShareStrings)
 | 
			
		||||
	sort.Strings(caretStrings)
 | 
			
		||||
	sort.Strings(matcherStrings)
 | 
			
		||||
 | 
			
		||||
	sb := &strings.Builder{}
 | 
			
		||||
	sb.WriteString("(?:^(?:")
 | 
			
		||||
	sb.WriteString(caretStrings[0])
 | 
			
		||||
	for _, matcher := range caretStrings[1:] {
 | 
			
		||||
		sb.WriteString(")|(?:")
 | 
			
		||||
		sb.WriteString(matcher)
 | 
			
		||||
	}
 | 
			
		||||
	sb.WriteString("))")
 | 
			
		||||
	sb.WriteString("|")
 | 
			
		||||
	sb.WriteString("(?:(?:^|/)(?:")
 | 
			
		||||
	sb.WriteString(caretShareStrings[0])
 | 
			
		||||
	for _, matcher := range caretShareStrings[1:] {
 | 
			
		||||
		sb.WriteString(")|(?:")
 | 
			
		||||
		sb.WriteString(matcher)
 | 
			
		||||
	}
 | 
			
		||||
	sb.WriteString("))")
 | 
			
		||||
	sb.WriteString("|")
 | 
			
		||||
	sb.WriteString("(?:")
 | 
			
		||||
	sb.WriteString(matcherStrings[0])
 | 
			
		||||
	for _, matcher := range matcherStrings[1:] {
 | 
			
		||||
		sb.WriteString(")|(?:")
 | 
			
		||||
		sb.WriteString(matcher)
 | 
			
		||||
	}
 | 
			
		||||
	sb.WriteString(")")
 | 
			
		||||
	combined := sb.String()
 | 
			
		||||
	isVendorRegExp = regexp.MustCompile(combined)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// IsVendor returns whether or not path is a vendor path.
 | 
			
		||||
func IsVendor(path string) bool {
 | 
			
		||||
	return isVendorRegExp.MatchString(path)
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										42
									
								
								modules/analyze/vendor_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								modules/analyze/vendor_test.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,42 @@
 | 
			
		||||
// Copyright 2021 The Gitea Authors. All rights reserved.
 | 
			
		||||
// Use of this source code is governed by a MIT-style
 | 
			
		||||
// license that can be found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
package analyze
 | 
			
		||||
 | 
			
		||||
import "testing"
 | 
			
		||||
 | 
			
		||||
func TestIsVendor(t *testing.T) {
 | 
			
		||||
	tests := []struct {
 | 
			
		||||
		path string
 | 
			
		||||
		want bool
 | 
			
		||||
	}{
 | 
			
		||||
		{"cache/", true},
 | 
			
		||||
		{"random/cache/", true},
 | 
			
		||||
		{"cache", false},
 | 
			
		||||
		{"dependencies/", true},
 | 
			
		||||
		{"Dependencies/", true},
 | 
			
		||||
		{"dependency/", false},
 | 
			
		||||
		{"dist/", true},
 | 
			
		||||
		{"dist", false},
 | 
			
		||||
		{"random/dist/", true},
 | 
			
		||||
		{"random/dist", false},
 | 
			
		||||
		{"deps/", true},
 | 
			
		||||
		{"configure", true},
 | 
			
		||||
		{"a/configure", true},
 | 
			
		||||
		{"config.guess", true},
 | 
			
		||||
		{"config.guess/", false},
 | 
			
		||||
		{".vscode/", true},
 | 
			
		||||
		{"doc/_build/", true},
 | 
			
		||||
		{"a/docs/_build/", true},
 | 
			
		||||
		{"a/dasdocs/_build-vsdoc.js", true},
 | 
			
		||||
		{"a/dasdocs/_build-vsdoc.j", false},
 | 
			
		||||
	}
 | 
			
		||||
	for _, tt := range tests {
 | 
			
		||||
		t.Run(tt.path, func(t *testing.T) {
 | 
			
		||||
			if got := IsVendor(tt.path); got != tt.want {
 | 
			
		||||
				t.Errorf("IsVendor() = %v, want %v", got, tt.want)
 | 
			
		||||
			}
 | 
			
		||||
		})
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
@@ -118,6 +118,11 @@ func RemoveProvider(providerName string) {
 | 
			
		||||
	delete(goth.GetProviders(), providerName)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// ClearProviders clears all OAuth2 providers from the goth lib
 | 
			
		||||
func ClearProviders() {
 | 
			
		||||
	goth.ClearProviders()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// used to create different types of goth providers
 | 
			
		||||
func createProvider(providerName, providerType, clientID, clientSecret, openIDConnectAutoDiscoveryURL string, customURLMapping *CustomURLMapping) (goth.Provider, error) {
 | 
			
		||||
	callbackURL := setting.AppURL + "user/oauth2/" + url.PathEscape(providerName) + "/callback"
 | 
			
		||||
 
 | 
			
		||||
@@ -10,6 +10,7 @@ import (
 | 
			
		||||
	"crypto/sha256"
 | 
			
		||||
	"encoding/base64"
 | 
			
		||||
	"encoding/hex"
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"net/url"
 | 
			
		||||
@@ -65,6 +66,11 @@ func BasicAuthDecode(encoded string) (string, string, error) {
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	auth := strings.SplitN(string(s), ":", 2)
 | 
			
		||||
 | 
			
		||||
	if len(auth) != 2 {
 | 
			
		||||
		return "", "", errors.New("invalid basic authentication")
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return auth[0], auth[1], nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -46,6 +46,12 @@ func TestBasicAuthDecode(t *testing.T) {
 | 
			
		||||
	assert.NoError(t, err)
 | 
			
		||||
	assert.Equal(t, "foo", user)
 | 
			
		||||
	assert.Equal(t, "bar", pass)
 | 
			
		||||
 | 
			
		||||
	_, _, err = BasicAuthDecode("aW52YWxpZA==")
 | 
			
		||||
	assert.Error(t, err)
 | 
			
		||||
 | 
			
		||||
	_, _, err = BasicAuthDecode("invalid")
 | 
			
		||||
	assert.Error(t, err)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func TestBasicAuthEncode(t *testing.T) {
 | 
			
		||||
 
 | 
			
		||||
@@ -83,18 +83,17 @@ func ToPullReviewCommentList(review *models.Review, doer *models.User) ([]*api.P
 | 
			
		||||
 | 
			
		||||
	apiComments := make([]*api.PullReviewComment, 0, len(review.CodeComments))
 | 
			
		||||
 | 
			
		||||
	auth := false
 | 
			
		||||
	if doer != nil {
 | 
			
		||||
		auth = doer.IsAdmin || doer.ID == review.ReviewerID
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	for _, lines := range review.CodeComments {
 | 
			
		||||
		for _, comments := range lines {
 | 
			
		||||
			for _, comment := range comments {
 | 
			
		||||
				auth := false
 | 
			
		||||
				if doer != nil {
 | 
			
		||||
					auth = doer.IsAdmin || doer.ID == comment.Poster.ID
 | 
			
		||||
				}
 | 
			
		||||
				apiComment := &api.PullReviewComment{
 | 
			
		||||
					ID:           comment.ID,
 | 
			
		||||
					Body:         comment.Content,
 | 
			
		||||
					Reviewer:     ToUser(review.Reviewer, doer != nil, auth),
 | 
			
		||||
					Reviewer:     ToUser(comment.Poster, doer != nil, auth),
 | 
			
		||||
					ReviewID:     review.ID,
 | 
			
		||||
					Created:      comment.CreatedUnix.AsTime(),
 | 
			
		||||
					Updated:      comment.UpdatedUnix.AsTime(),
 | 
			
		||||
 
 | 
			
		||||
@@ -13,6 +13,10 @@ import (
 | 
			
		||||
// ToUser convert models.User to api.User
 | 
			
		||||
// signed shall only be set if requester is logged in. authed shall only be set if user is site admin or user himself
 | 
			
		||||
func ToUser(user *models.User, signed, authed bool) *api.User {
 | 
			
		||||
	if user == nil {
 | 
			
		||||
		return nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	result := &api.User{
 | 
			
		||||
		ID:        user.ID,
 | 
			
		||||
		UserName:  user.Name,
 | 
			
		||||
 
 | 
			
		||||
@@ -30,6 +30,9 @@ var (
 | 
			
		||||
	// aliasMap provides a map of the alias to its emoji data.
 | 
			
		||||
	aliasMap map[string]int
 | 
			
		||||
 | 
			
		||||
	// emptyReplacer is the string replacer for emoji codes.
 | 
			
		||||
	emptyReplacer *strings.Replacer
 | 
			
		||||
 | 
			
		||||
	// codeReplacer is the string replacer for emoji codes.
 | 
			
		||||
	codeReplacer *strings.Replacer
 | 
			
		||||
 | 
			
		||||
@@ -49,6 +52,7 @@ func loadMap() {
 | 
			
		||||
 | 
			
		||||
		// process emoji codes and aliases
 | 
			
		||||
		codePairs := make([]string, 0)
 | 
			
		||||
		emptyPairs := make([]string, 0)
 | 
			
		||||
		aliasPairs := make([]string, 0)
 | 
			
		||||
 | 
			
		||||
		// sort from largest to small so we match combined emoji first
 | 
			
		||||
@@ -64,6 +68,7 @@ func loadMap() {
 | 
			
		||||
			// setup codes
 | 
			
		||||
			codeMap[e.Emoji] = i
 | 
			
		||||
			codePairs = append(codePairs, e.Emoji, ":"+e.Aliases[0]+":")
 | 
			
		||||
			emptyPairs = append(emptyPairs, e.Emoji, e.Emoji)
 | 
			
		||||
 | 
			
		||||
			// setup aliases
 | 
			
		||||
			for _, a := range e.Aliases {
 | 
			
		||||
@@ -77,6 +82,7 @@ func loadMap() {
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		// create replacers
 | 
			
		||||
		emptyReplacer = strings.NewReplacer(emptyPairs...)
 | 
			
		||||
		codeReplacer = strings.NewReplacer(codePairs...)
 | 
			
		||||
		aliasReplacer = strings.NewReplacer(aliasPairs...)
 | 
			
		||||
	})
 | 
			
		||||
@@ -127,38 +133,53 @@ func ReplaceAliases(s string) string {
 | 
			
		||||
	return aliasReplacer.Replace(s)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type rememberSecondWriteWriter struct {
 | 
			
		||||
	pos        int
 | 
			
		||||
	idx        int
 | 
			
		||||
	end        int
 | 
			
		||||
	writecount int
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (n *rememberSecondWriteWriter) Write(p []byte) (int, error) {
 | 
			
		||||
	n.writecount++
 | 
			
		||||
	if n.writecount == 2 {
 | 
			
		||||
		n.idx = n.pos
 | 
			
		||||
		n.end = n.pos + len(p)
 | 
			
		||||
	}
 | 
			
		||||
	n.pos += len(p)
 | 
			
		||||
	return len(p), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (n *rememberSecondWriteWriter) WriteString(s string) (int, error) {
 | 
			
		||||
	n.writecount++
 | 
			
		||||
	if n.writecount == 2 {
 | 
			
		||||
		n.idx = n.pos
 | 
			
		||||
		n.end = n.pos + len(s)
 | 
			
		||||
	}
 | 
			
		||||
	n.pos += len(s)
 | 
			
		||||
	return len(s), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// FindEmojiSubmatchIndex returns index pair of longest emoji in a string
 | 
			
		||||
func FindEmojiSubmatchIndex(s string) []int {
 | 
			
		||||
	loadMap()
 | 
			
		||||
	found := make(map[int]int)
 | 
			
		||||
	keys := make([]int, 0)
 | 
			
		||||
	secondWriteWriter := rememberSecondWriteWriter{}
 | 
			
		||||
 | 
			
		||||
	//see if there are any emoji in string before looking for position of specific ones
 | 
			
		||||
	//no performance difference when there is a match but 10x faster when there are not
 | 
			
		||||
	if s == ReplaceCodes(s) {
 | 
			
		||||
	// A faster and clean implementation would copy the trie tree formation in strings.NewReplacer but
 | 
			
		||||
	// we can be lazy here.
 | 
			
		||||
	//
 | 
			
		||||
	// The implementation of strings.Replacer.WriteString is such that the first index of the emoji
 | 
			
		||||
	// submatch is simply the second thing that is written to WriteString in the writer.
 | 
			
		||||
	//
 | 
			
		||||
	// Therefore we can simply take the index of the second write as our first emoji
 | 
			
		||||
	//
 | 
			
		||||
	// FIXME: just copy the trie implementation from strings.NewReplacer
 | 
			
		||||
	_, _ = emptyReplacer.WriteString(&secondWriteWriter, s)
 | 
			
		||||
 | 
			
		||||
	// if we wrote less than twice then we never "replaced"
 | 
			
		||||
	if secondWriteWriter.writecount < 2 {
 | 
			
		||||
		return nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// get index of first emoji occurrence while also checking for longest combination
 | 
			
		||||
	for j := range GemojiData {
 | 
			
		||||
		i := strings.Index(s, GemojiData[j].Emoji)
 | 
			
		||||
		if i != -1 {
 | 
			
		||||
			if _, ok := found[i]; !ok {
 | 
			
		||||
				if len(keys) == 0 || i < keys[0] {
 | 
			
		||||
					found[i] = j
 | 
			
		||||
					keys = []int{i}
 | 
			
		||||
				}
 | 
			
		||||
				if i == 0 {
 | 
			
		||||
					break
 | 
			
		||||
				}
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if len(keys) > 0 {
 | 
			
		||||
		index := keys[0]
 | 
			
		||||
		return []int{index, index + len(GemojiData[found[index]].Emoji)}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return nil
 | 
			
		||||
	return []int{secondWriteWriter.idx, secondWriteWriter.end}
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -8,6 +8,8 @@ package emoji
 | 
			
		||||
import (
 | 
			
		||||
	"reflect"
 | 
			
		||||
	"testing"
 | 
			
		||||
 | 
			
		||||
	"github.com/stretchr/testify/assert"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func TestDumpInfo(t *testing.T) {
 | 
			
		||||
@@ -65,3 +67,34 @@ func TestReplacers(t *testing.T) {
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func TestFindEmojiSubmatchIndex(t *testing.T) {
 | 
			
		||||
	type testcase struct {
 | 
			
		||||
		teststring string
 | 
			
		||||
		expected   []int
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	testcases := []testcase{
 | 
			
		||||
		{
 | 
			
		||||
			"\U0001f44d",
 | 
			
		||||
			[]int{0, len("\U0001f44d")},
 | 
			
		||||
		},
 | 
			
		||||
		{
 | 
			
		||||
			"\U0001f44d +1 \U0001f44d \U0001f37a",
 | 
			
		||||
			[]int{0, 4},
 | 
			
		||||
		},
 | 
			
		||||
		{
 | 
			
		||||
			" \U0001f44d",
 | 
			
		||||
			[]int{1, 1 + len("\U0001f44d")},
 | 
			
		||||
		},
 | 
			
		||||
		{
 | 
			
		||||
			string([]byte{'\u0001'}) + "\U0001f44d",
 | 
			
		||||
			[]int{1, 1 + len("\U0001f44d")},
 | 
			
		||||
		},
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	for _, kase := range testcases {
 | 
			
		||||
		actual := FindEmojiSubmatchIndex(kase.teststring)
 | 
			
		||||
		assert.Equal(t, kase.expected, actual)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -153,6 +153,7 @@ func (c *Command) RunInDirTimeoutEnvFullPipelineFunc(env []string, timeout time.
 | 
			
		||||
		err := fn(ctx, cancel)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			cancel()
 | 
			
		||||
			_ = cmd.Wait()
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -9,6 +9,7 @@ import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
	"bytes"
 | 
			
		||||
	"container/list"
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"image"
 | 
			
		||||
	"image/color"
 | 
			
		||||
@@ -17,6 +18,7 @@ import (
 | 
			
		||||
	_ "image/png"  // for processing png images
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"os/exec"
 | 
			
		||||
	"strconv"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
@@ -309,23 +311,33 @@ func (c *Commit) CommitsBefore() (*list.List, error) {
 | 
			
		||||
 | 
			
		||||
// HasPreviousCommit returns true if a given commitHash is contained in commit's parents
 | 
			
		||||
func (c *Commit) HasPreviousCommit(commitHash SHA1) (bool, error) {
 | 
			
		||||
	for i := 0; i < c.ParentCount(); i++ {
 | 
			
		||||
		commit, err := c.Parent(i)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return false, err
 | 
			
		||||
		}
 | 
			
		||||
		if commit.ID == commitHash {
 | 
			
		||||
			return true, nil
 | 
			
		||||
		}
 | 
			
		||||
		commitInParentCommit, err := commit.HasPreviousCommit(commitHash)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return false, err
 | 
			
		||||
		}
 | 
			
		||||
		if commitInParentCommit {
 | 
			
		||||
			return true, nil
 | 
			
		||||
		}
 | 
			
		||||
	this := c.ID.String()
 | 
			
		||||
	that := commitHash.String()
 | 
			
		||||
 | 
			
		||||
	if this == that {
 | 
			
		||||
		return false, nil
 | 
			
		||||
	}
 | 
			
		||||
	return false, nil
 | 
			
		||||
 | 
			
		||||
	if err := CheckGitVersionConstraint(">= 1.8.0"); err == nil {
 | 
			
		||||
		_, err := NewCommand("merge-base", "--is-ancestor", that, this).RunInDir(c.repo.Path)
 | 
			
		||||
		if err == nil {
 | 
			
		||||
			return true, nil
 | 
			
		||||
		}
 | 
			
		||||
		var exitError *exec.ExitError
 | 
			
		||||
		if errors.As(err, &exitError) {
 | 
			
		||||
			if exitError.ProcessState.ExitCode() == 1 && len(exitError.Stderr) == 0 {
 | 
			
		||||
				return false, nil
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
		return false, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	result, err := NewCommand("rev-list", "--ancestry-path", "-n1", that+".."+this, "--").RunInDir(c.repo.Path)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return false, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return len(strings.TrimSpace(result)) > 0, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// CommitsBeforeLimit returns num commits before current revision
 | 
			
		||||
 
 | 
			
		||||
@@ -47,7 +47,7 @@ func GetRawDiffForFile(repoPath, startCommit, endCommit string, diffType RawDiff
 | 
			
		||||
func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diffType RawDiffType, file string, writer io.Writer) error {
 | 
			
		||||
	commit, err := repo.GetCommit(endCommit)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return fmt.Errorf("GetCommit: %v", err)
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
	fileArgs := make([]string, 0)
 | 
			
		||||
	if len(file) > 0 {
 | 
			
		||||
@@ -125,30 +125,39 @@ var hunkRegex = regexp.MustCompile(`^@@ -(?P<beginOld>[0-9]+)(,(?P<endOld>[0-9]+
 | 
			
		||||
 | 
			
		||||
const cmdDiffHead = "diff --git "
 | 
			
		||||
 | 
			
		||||
func isHeader(lof string) bool {
 | 
			
		||||
	return strings.HasPrefix(lof, cmdDiffHead) || strings.HasPrefix(lof, "---") || strings.HasPrefix(lof, "+++")
 | 
			
		||||
func isHeader(lof string, inHunk bool) bool {
 | 
			
		||||
	return strings.HasPrefix(lof, cmdDiffHead) || (!inHunk && (strings.HasPrefix(lof, "---") || strings.HasPrefix(lof, "+++")))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// CutDiffAroundLine cuts a diff of a file in way that only the given line + numberOfLine above it will be shown
 | 
			
		||||
// it also recalculates hunks and adds the appropriate headers to the new diff.
 | 
			
		||||
// Warning: Only one-file diffs are allowed.
 | 
			
		||||
func CutDiffAroundLine(originalDiff io.Reader, line int64, old bool, numbersOfLine int) string {
 | 
			
		||||
func CutDiffAroundLine(originalDiff io.Reader, line int64, old bool, numbersOfLine int) (string, error) {
 | 
			
		||||
	if line == 0 || numbersOfLine == 0 {
 | 
			
		||||
		// no line or num of lines => no diff
 | 
			
		||||
		return ""
 | 
			
		||||
		return "", nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	scanner := bufio.NewScanner(originalDiff)
 | 
			
		||||
	hunk := make([]string, 0)
 | 
			
		||||
 | 
			
		||||
	// begin is the start of the hunk containing searched line
 | 
			
		||||
	// end is the end of the hunk ...
 | 
			
		||||
	// currentLine is the line number on the side of the searched line (differentiated by old)
 | 
			
		||||
	// otherLine is the line number on the opposite side of the searched line (differentiated by old)
 | 
			
		||||
	var begin, end, currentLine, otherLine int64
 | 
			
		||||
	var headerLines int
 | 
			
		||||
 | 
			
		||||
	inHunk := false
 | 
			
		||||
 | 
			
		||||
	for scanner.Scan() {
 | 
			
		||||
		lof := scanner.Text()
 | 
			
		||||
		// Add header to enable parsing
 | 
			
		||||
		if isHeader(lof) {
 | 
			
		||||
 | 
			
		||||
		if isHeader(lof, inHunk) {
 | 
			
		||||
			if strings.HasPrefix(lof, cmdDiffHead) {
 | 
			
		||||
				inHunk = false
 | 
			
		||||
			}
 | 
			
		||||
			hunk = append(hunk, lof)
 | 
			
		||||
			headerLines++
 | 
			
		||||
		}
 | 
			
		||||
@@ -157,6 +166,7 @@ func CutDiffAroundLine(originalDiff io.Reader, line int64, old bool, numbersOfLi
 | 
			
		||||
		}
 | 
			
		||||
		// Detect "hunk" with contains commented lof
 | 
			
		||||
		if strings.HasPrefix(lof, "@@") {
 | 
			
		||||
			inHunk = true
 | 
			
		||||
			// Already got our hunk. End of hunk detected!
 | 
			
		||||
			if len(hunk) > headerLines {
 | 
			
		||||
				break
 | 
			
		||||
@@ -213,15 +223,19 @@ func CutDiffAroundLine(originalDiff io.Reader, line int64, old bool, numbersOfLi
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	err := scanner.Err()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return "", err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// No hunk found
 | 
			
		||||
	if currentLine == 0 {
 | 
			
		||||
		return ""
 | 
			
		||||
		return "", nil
 | 
			
		||||
	}
 | 
			
		||||
	// headerLines + hunkLine (1) = totalNonCodeLines
 | 
			
		||||
	if len(hunk)-headerLines-1 <= numbersOfLine {
 | 
			
		||||
		// No need to cut the hunk => return existing hunk
 | 
			
		||||
		return strings.Join(hunk, "\n")
 | 
			
		||||
		return strings.Join(hunk, "\n"), nil
 | 
			
		||||
	}
 | 
			
		||||
	var oldBegin, oldNumOfLines, newBegin, newNumOfLines int64
 | 
			
		||||
	if old {
 | 
			
		||||
@@ -256,5 +270,5 @@ func CutDiffAroundLine(originalDiff io.Reader, line int64, old bool, numbersOfLi
 | 
			
		||||
	// construct the new hunk header
 | 
			
		||||
	newHunk[headerLines] = fmt.Sprintf("@@ -%d,%d +%d,%d @@",
 | 
			
		||||
		oldBegin, oldNumOfLines, newBegin, newNumOfLines)
 | 
			
		||||
	return strings.Join(newHunk, "\n")
 | 
			
		||||
	return strings.Join(newHunk, "\n"), nil
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -23,8 +23,28 @@ const exampleDiff = `diff --git a/README.md b/README.md
 | 
			
		||||
+ cut off
 | 
			
		||||
+ cut off`
 | 
			
		||||
 | 
			
		||||
const breakingDiff = `diff --git a/aaa.sql b/aaa.sql
 | 
			
		||||
index d8e4c92..19dc8ad 100644
 | 
			
		||||
--- a/aaa.sql
 | 
			
		||||
+++ b/aaa.sql
 | 
			
		||||
@@ -1,9 +1,10 @@
 | 
			
		||||
 --some comment
 | 
			
		||||
--- some comment 5
 | 
			
		||||
+--some coment 2
 | 
			
		||||
+-- some comment 3
 | 
			
		||||
 create or replace procedure test(p1 varchar2)
 | 
			
		||||
 is
 | 
			
		||||
 begin
 | 
			
		||||
---new comment
 | 
			
		||||
 dbms_output.put_line(p1);
 | 
			
		||||
+--some other comment
 | 
			
		||||
 end;
 | 
			
		||||
 /
 | 
			
		||||
`
 | 
			
		||||
 | 
			
		||||
func TestCutDiffAroundLine(t *testing.T) {
 | 
			
		||||
	result := CutDiffAroundLine(strings.NewReader(exampleDiff), 4, false, 3)
 | 
			
		||||
	result, err := CutDiffAroundLine(strings.NewReader(exampleDiff), 4, false, 3)
 | 
			
		||||
	assert.NoError(t, err)
 | 
			
		||||
	resultByLine := strings.Split(result, "\n")
 | 
			
		||||
	assert.Len(t, resultByLine, 7)
 | 
			
		||||
	// Check if headers got transferred
 | 
			
		||||
@@ -37,18 +57,50 @@ func TestCutDiffAroundLine(t *testing.T) {
 | 
			
		||||
	assert.Equal(t, "+ Build Status", resultByLine[4])
 | 
			
		||||
 | 
			
		||||
	// Must be same result as before since old line 3 == new line 5
 | 
			
		||||
	newResult := CutDiffAroundLine(strings.NewReader(exampleDiff), 3, true, 3)
 | 
			
		||||
	newResult, err := CutDiffAroundLine(strings.NewReader(exampleDiff), 3, true, 3)
 | 
			
		||||
	assert.NoError(t, err)
 | 
			
		||||
	assert.Equal(t, result, newResult, "Must be same result as before since old line 3 == new line 5")
 | 
			
		||||
 | 
			
		||||
	newResult = CutDiffAroundLine(strings.NewReader(exampleDiff), 6, false, 300)
 | 
			
		||||
	newResult, err = CutDiffAroundLine(strings.NewReader(exampleDiff), 6, false, 300)
 | 
			
		||||
	assert.NoError(t, err)
 | 
			
		||||
	assert.Equal(t, exampleDiff, newResult)
 | 
			
		||||
 | 
			
		||||
	emptyResult := CutDiffAroundLine(strings.NewReader(exampleDiff), 6, false, 0)
 | 
			
		||||
	emptyResult, err := CutDiffAroundLine(strings.NewReader(exampleDiff), 6, false, 0)
 | 
			
		||||
	assert.NoError(t, err)
 | 
			
		||||
	assert.Empty(t, emptyResult)
 | 
			
		||||
 | 
			
		||||
	// Line is out of scope
 | 
			
		||||
	emptyResult = CutDiffAroundLine(strings.NewReader(exampleDiff), 434, false, 0)
 | 
			
		||||
	emptyResult, err = CutDiffAroundLine(strings.NewReader(exampleDiff), 434, false, 0)
 | 
			
		||||
	assert.NoError(t, err)
 | 
			
		||||
	assert.Empty(t, emptyResult)
 | 
			
		||||
 | 
			
		||||
	// Handle minus diffs properly
 | 
			
		||||
	minusDiff, err := CutDiffAroundLine(strings.NewReader(breakingDiff), 2, false, 4)
 | 
			
		||||
	assert.NoError(t, err)
 | 
			
		||||
 | 
			
		||||
	expected := `diff --git a/aaa.sql b/aaa.sql
 | 
			
		||||
--- a/aaa.sql
 | 
			
		||||
+++ b/aaa.sql
 | 
			
		||||
@@ -1,9 +1,10 @@
 | 
			
		||||
 --some comment
 | 
			
		||||
--- some comment 5
 | 
			
		||||
+--some coment 2`
 | 
			
		||||
	assert.Equal(t, expected, minusDiff)
 | 
			
		||||
 | 
			
		||||
	// Handle minus diffs properly
 | 
			
		||||
	minusDiff, err = CutDiffAroundLine(strings.NewReader(breakingDiff), 3, false, 4)
 | 
			
		||||
	assert.NoError(t, err)
 | 
			
		||||
 | 
			
		||||
	expected = `diff --git a/aaa.sql b/aaa.sql
 | 
			
		||||
--- a/aaa.sql
 | 
			
		||||
+++ b/aaa.sql
 | 
			
		||||
@@ -1,9 +1,10 @@
 | 
			
		||||
 --some comment
 | 
			
		||||
--- some comment 5
 | 
			
		||||
+--some coment 2
 | 
			
		||||
+-- some comment 3`
 | 
			
		||||
 | 
			
		||||
	assert.Equal(t, expected, minusDiff)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func BenchmarkCutDiffAroundLine(b *testing.B) {
 | 
			
		||||
@@ -69,7 +121,7 @@ func ExampleCutDiffAroundLine() {
 | 
			
		||||
 Docker Pulls
 | 
			
		||||
+ cut off
 | 
			
		||||
+ cut off`
 | 
			
		||||
	result := CutDiffAroundLine(strings.NewReader(diff), 4, false, 3)
 | 
			
		||||
	result, _ := CutDiffAroundLine(strings.NewReader(diff), 4, false, 3)
 | 
			
		||||
	println(result)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -32,6 +32,7 @@ var (
 | 
			
		||||
	GitExecutable = "git"
 | 
			
		||||
 | 
			
		||||
	// DefaultContext is the default context to run git commands in
 | 
			
		||||
	// will be overwritten by Init with HammerContext
 | 
			
		||||
	DefaultContext = context.Background()
 | 
			
		||||
 | 
			
		||||
	gitVersion *version.Version
 | 
			
		||||
 
 | 
			
		||||
@@ -8,6 +8,7 @@ package git
 | 
			
		||||
import (
 | 
			
		||||
	"bytes"
 | 
			
		||||
	"container/list"
 | 
			
		||||
	"context"
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"os"
 | 
			
		||||
@@ -166,19 +167,24 @@ type CloneRepoOptions struct {
 | 
			
		||||
 | 
			
		||||
// Clone clones original repository to target path.
 | 
			
		||||
func Clone(from, to string, opts CloneRepoOptions) (err error) {
 | 
			
		||||
	return CloneWithContext(DefaultContext, from, to, opts)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// CloneWithContext clones original repository to target path.
 | 
			
		||||
func CloneWithContext(ctx context.Context, from, to string, opts CloneRepoOptions) (err error) {
 | 
			
		||||
	cargs := make([]string, len(GlobalCommandArgs))
 | 
			
		||||
	copy(cargs, GlobalCommandArgs)
 | 
			
		||||
	return CloneWithArgs(from, to, cargs, opts)
 | 
			
		||||
	return CloneWithArgs(ctx, from, to, cargs, opts)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// CloneWithArgs original repository to target path.
 | 
			
		||||
func CloneWithArgs(from, to string, args []string, opts CloneRepoOptions) (err error) {
 | 
			
		||||
func CloneWithArgs(ctx context.Context, from, to string, args []string, opts CloneRepoOptions) (err error) {
 | 
			
		||||
	toDir := path.Dir(to)
 | 
			
		||||
	if err = os.MkdirAll(toDir, os.ModePerm); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	cmd := NewCommandNoGlobals(args...).AddArguments("clone")
 | 
			
		||||
	cmd := NewCommandContextNoGlobals(ctx, args...).AddArguments("clone")
 | 
			
		||||
	if opts.Mirror {
 | 
			
		||||
		cmd.AddArguments("--mirror")
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -9,6 +9,8 @@ import (
 | 
			
		||||
	"bytes"
 | 
			
		||||
	"container/list"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"io"
 | 
			
		||||
	"io/ioutil"
 | 
			
		||||
	"strconv"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
@@ -129,19 +131,23 @@ func (repo *Repository) getCommit(id SHA1) (*Commit, error) {
 | 
			
		||||
 | 
			
		||||
// ConvertToSHA1 returns a Hash object from a potential ID string
 | 
			
		||||
func (repo *Repository) ConvertToSHA1(commitID string) (SHA1, error) {
 | 
			
		||||
	if len(commitID) != 40 {
 | 
			
		||||
		var err error
 | 
			
		||||
		actualCommitID, err := NewCommand("rev-parse", "--verify", commitID).RunInDir(repo.Path)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			if strings.Contains(err.Error(), "unknown revision or path") ||
 | 
			
		||||
				strings.Contains(err.Error(), "fatal: Needed a single revision") {
 | 
			
		||||
				return SHA1{}, ErrNotExist{commitID, ""}
 | 
			
		||||
			}
 | 
			
		||||
			return SHA1{}, err
 | 
			
		||||
	if len(commitID) == 40 {
 | 
			
		||||
		sha1, err := NewIDFromString(commitID)
 | 
			
		||||
		if err == nil {
 | 
			
		||||
			return sha1, nil
 | 
			
		||||
		}
 | 
			
		||||
		commitID = actualCommitID
 | 
			
		||||
	}
 | 
			
		||||
	return NewIDFromString(commitID)
 | 
			
		||||
 | 
			
		||||
	actualCommitID, err := NewCommand("rev-parse", "--verify", commitID).RunInDir(repo.Path)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		if strings.Contains(err.Error(), "unknown revision or path") ||
 | 
			
		||||
			strings.Contains(err.Error(), "fatal: Needed a single revision") {
 | 
			
		||||
			return SHA1{}, ErrNotExist{commitID, ""}
 | 
			
		||||
		}
 | 
			
		||||
		return SHA1{}, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return NewIDFromString(actualCommitID)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// GetCommit returns commit object of by ID string.
 | 
			
		||||
@@ -323,8 +329,41 @@ func (repo *Repository) FileCommitsCount(revision, file string) (int64, error) {
 | 
			
		||||
 | 
			
		||||
// CommitsByFileAndRange return the commits according revison file and the page
 | 
			
		||||
func (repo *Repository) CommitsByFileAndRange(revision, file string, page int) (*list.List, error) {
 | 
			
		||||
	stdout, err := NewCommand("log", revision, "--follow", "--skip="+strconv.Itoa((page-1)*50),
 | 
			
		||||
		"--max-count="+strconv.Itoa(CommitsRangeSize), prettyLogFormat, "--", file).RunInDirBytes(repo.Path)
 | 
			
		||||
	skip := (page - 1) * CommitsRangeSize
 | 
			
		||||
 | 
			
		||||
	stdoutReader, stdoutWriter := io.Pipe()
 | 
			
		||||
	defer func() {
 | 
			
		||||
		_ = stdoutReader.Close()
 | 
			
		||||
		_ = stdoutWriter.Close()
 | 
			
		||||
	}()
 | 
			
		||||
	go func() {
 | 
			
		||||
		stderr := strings.Builder{}
 | 
			
		||||
		err := NewCommand("log", revision, "--follow",
 | 
			
		||||
			"--max-count="+strconv.Itoa(CommitsRangeSize*page),
 | 
			
		||||
			prettyLogFormat, "--", file).
 | 
			
		||||
			RunInDirPipeline(repo.Path, stdoutWriter, &stderr)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			if stderr.Len() > 0 {
 | 
			
		||||
				err = fmt.Errorf("%v - %s", err, stderr.String())
 | 
			
		||||
			}
 | 
			
		||||
			_ = stdoutWriter.CloseWithError(err)
 | 
			
		||||
		} else {
 | 
			
		||||
			_ = stdoutWriter.Close()
 | 
			
		||||
		}
 | 
			
		||||
	}()
 | 
			
		||||
 | 
			
		||||
	if skip > 0 {
 | 
			
		||||
		_, err := io.CopyN(ioutil.Discard, stdoutReader, int64(skip*41))
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			if err == io.EOF {
 | 
			
		||||
				return list.New(), nil
 | 
			
		||||
			}
 | 
			
		||||
			_ = stdoutReader.CloseWithError(err)
 | 
			
		||||
			return nil, err
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	stdout, err := ioutil.ReadAll(stdoutReader)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return nil, err
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -44,7 +44,7 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
 | 
			
		||||
 | 
			
		||||
	sizes := make(map[string]int64)
 | 
			
		||||
	err = tree.Files().ForEach(func(f *object.File) error {
 | 
			
		||||
		if f.Size == 0 || enry.IsVendor(f.Name) || enry.IsDotFile(f.Name) ||
 | 
			
		||||
		if f.Size == 0 || analyze.IsVendor(f.Name) || enry.IsDotFile(f.Name) ||
 | 
			
		||||
			enry.IsDocumentation(f.Name) || enry.IsConfiguration(f.Name) {
 | 
			
		||||
			return nil
 | 
			
		||||
		}
 | 
			
		||||
 
 | 
			
		||||
@@ -175,7 +175,7 @@ func NewBleveIndexer(indexDir string) (*BleveIndexer, bool, error) {
 | 
			
		||||
 | 
			
		||||
func (b *BleveIndexer) addUpdate(commitSha string, update fileUpdate, repo *models.Repository, batch rupture.FlushingBatch) error {
 | 
			
		||||
	// Ignore vendored files in code search
 | 
			
		||||
	if setting.Indexer.ExcludeVendored && enry.IsVendor(update.Filename) {
 | 
			
		||||
	if setting.Indexer.ExcludeVendored && analyze.IsVendor(update.Filename) {
 | 
			
		||||
		return nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -170,7 +170,7 @@ func (b *ElasticSearchIndexer) init() (bool, error) {
 | 
			
		||||
 | 
			
		||||
func (b *ElasticSearchIndexer) addUpdate(sha string, update fileUpdate, repo *models.Repository) ([]elastic.BulkableRequest, error) {
 | 
			
		||||
	// Ignore vendored files in code search
 | 
			
		||||
	if setting.Indexer.ExcludeVendored && enry.IsVendor(update.Filename) {
 | 
			
		||||
	if setting.Indexer.ExcludeVendored && analyze.IsVendor(update.Filename) {
 | 
			
		||||
		return nil, nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -9,6 +9,7 @@ import (
 | 
			
		||||
	"encoding/hex"
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"hash"
 | 
			
		||||
	"io"
 | 
			
		||||
	"os"
 | 
			
		||||
 | 
			
		||||
@@ -66,15 +67,20 @@ func (s *ContentStore) Get(meta *models.LFSMetaObject, fromByte int64) (io.ReadC
 | 
			
		||||
 | 
			
		||||
// Put takes a Meta object and an io.Reader and writes the content to the store.
 | 
			
		||||
func (s *ContentStore) Put(meta *models.LFSMetaObject, r io.Reader) error {
 | 
			
		||||
	hash := sha256.New()
 | 
			
		||||
	rd := io.TeeReader(r, hash)
 | 
			
		||||
	p := meta.RelativePath()
 | 
			
		||||
	written, err := s.Save(p, rd)
 | 
			
		||||
 | 
			
		||||
	// Wrap the provided reader with an inline hashing and size checker
 | 
			
		||||
	wrappedRd := newHashingReader(meta.Size, meta.Oid, r)
 | 
			
		||||
 | 
			
		||||
	// now pass the wrapped reader to Save - if there is a size mismatch or hash mismatch then
 | 
			
		||||
	// the errors returned by the newHashingReader should percolate up to here
 | 
			
		||||
	written, err := s.Save(p, wrappedRd, meta.Size)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		log.Error("Whilst putting LFS OID[%s]: Failed to copy to tmpPath: %s Error: %v", meta.Oid, p, err)
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// This shouldn't happen but it is sensible to test
 | 
			
		||||
	if written != meta.Size {
 | 
			
		||||
		if err := s.Delete(p); err != nil {
 | 
			
		||||
			log.Error("Cleaning the LFS OID[%s] failed: %v", meta.Oid, err)
 | 
			
		||||
@@ -82,14 +88,6 @@ func (s *ContentStore) Put(meta *models.LFSMetaObject, r io.Reader) error {
 | 
			
		||||
		return errSizeMismatch
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	shaStr := hex.EncodeToString(hash.Sum(nil))
 | 
			
		||||
	if shaStr != meta.Oid {
 | 
			
		||||
		if err := s.Delete(p); err != nil {
 | 
			
		||||
			log.Error("Cleaning the LFS OID[%s] failed: %v", meta.Oid, err)
 | 
			
		||||
		}
 | 
			
		||||
		return errHashMismatch
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -118,3 +116,45 @@ func (s *ContentStore) Verify(meta *models.LFSMetaObject) (bool, error) {
 | 
			
		||||
 | 
			
		||||
	return true, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type hashingReader struct {
 | 
			
		||||
	internal     io.Reader
 | 
			
		||||
	currentSize  int64
 | 
			
		||||
	expectedSize int64
 | 
			
		||||
	hash         hash.Hash
 | 
			
		||||
	expectedHash string
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (r *hashingReader) Read(b []byte) (int, error) {
 | 
			
		||||
	n, err := r.internal.Read(b)
 | 
			
		||||
 | 
			
		||||
	if n > 0 {
 | 
			
		||||
		r.currentSize += int64(n)
 | 
			
		||||
		wn, werr := r.hash.Write(b[:n])
 | 
			
		||||
		if wn != n || werr != nil {
 | 
			
		||||
			return n, werr
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if err != nil && err == io.EOF {
 | 
			
		||||
		if r.currentSize != r.expectedSize {
 | 
			
		||||
			return n, errSizeMismatch
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		shaStr := hex.EncodeToString(r.hash.Sum(nil))
 | 
			
		||||
		if shaStr != r.expectedHash {
 | 
			
		||||
			return n, errHashMismatch
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return n, err
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func newHashingReader(expectedSize int64, expectedHash string, reader io.Reader) *hashingReader {
 | 
			
		||||
	return &hashingReader{
 | 
			
		||||
		internal:     reader,
 | 
			
		||||
		expectedSize: expectedSize,
 | 
			
		||||
		expectedHash: expectedHash,
 | 
			
		||||
		hash:         sha256.New(),
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -43,7 +43,7 @@ var (
 | 
			
		||||
	// sha1CurrentPattern matches string that represents a commit SHA, e.g. d8a994ef243349f321568f9e36d5c3f444b99cae
 | 
			
		||||
	// Although SHA1 hashes are 40 chars long, the regex matches the hash from 7 to 40 chars in length
 | 
			
		||||
	// so that abbreviated hash links can be used as well. This matches git and github useability.
 | 
			
		||||
	sha1CurrentPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([0-9a-f]{7,40})(?:\s|$|\)|\]|\.(\s|$))`)
 | 
			
		||||
	sha1CurrentPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([0-9a-f]{7,40})(?:\s|$|\)|\]|[.,](\s|$))`)
 | 
			
		||||
 | 
			
		||||
	// shortLinkPattern matches short but difficult to parse [[name|link|arg=test]] syntax
 | 
			
		||||
	shortLinkPattern = regexp.MustCompile(`\[\[(.*?)\]\](\w*)`)
 | 
			
		||||
@@ -298,8 +298,8 @@ func RenderEmoji(
 | 
			
		||||
	return ctx.postProcess(rawHTML)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
var byteBodyTag = []byte("<body>")
 | 
			
		||||
var byteBodyTagClosing = []byte("</body>")
 | 
			
		||||
var tagCleaner = regexp.MustCompile(`<((?:/?\w+/\w+)|(?:/[\w ]+/)|(/?[hH][tT][mM][lL]\b)|(/?[hH][eE][aA][dD]\b))`)
 | 
			
		||||
var nulCleaner = strings.NewReplacer("\000", "")
 | 
			
		||||
 | 
			
		||||
func (ctx *postProcessCtx) postProcess(rawHTML []byte) ([]byte, error) {
 | 
			
		||||
	if ctx.procs == nil {
 | 
			
		||||
@@ -307,13 +307,18 @@ func (ctx *postProcessCtx) postProcess(rawHTML []byte) ([]byte, error) {
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// give a generous extra 50 bytes
 | 
			
		||||
	res := make([]byte, 0, len(rawHTML)+50)
 | 
			
		||||
	res = append(res, byteBodyTag...)
 | 
			
		||||
	res = append(res, rawHTML...)
 | 
			
		||||
	res = append(res, byteBodyTagClosing...)
 | 
			
		||||
	res := bytes.NewBuffer(make([]byte, 0, len(rawHTML)+50))
 | 
			
		||||
	// prepend "<html><body>"
 | 
			
		||||
	_, _ = res.WriteString("<html><body>")
 | 
			
		||||
 | 
			
		||||
	// Strip out nuls - they're always invalid
 | 
			
		||||
	_, _ = res.Write(tagCleaner.ReplaceAll([]byte(nulCleaner.Replace(string(rawHTML))), []byte("<$1")))
 | 
			
		||||
 | 
			
		||||
	// close the tags
 | 
			
		||||
	_, _ = res.WriteString("</body></html>")
 | 
			
		||||
 | 
			
		||||
	// parse the HTML
 | 
			
		||||
	nodes, err := html.ParseFragment(bytes.NewReader(res), nil)
 | 
			
		||||
	nodes, err := html.ParseFragment(res, nil)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return nil, &postProcessError{"invalid HTML", err}
 | 
			
		||||
	}
 | 
			
		||||
@@ -322,24 +327,45 @@ func (ctx *postProcessCtx) postProcess(rawHTML []byte) ([]byte, error) {
 | 
			
		||||
		ctx.visitNode(node, true)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	newNodes := make([]*html.Node, 0, len(nodes))
 | 
			
		||||
 | 
			
		||||
	for _, node := range nodes {
 | 
			
		||||
		if node.Data == "html" {
 | 
			
		||||
			node = node.FirstChild
 | 
			
		||||
			for node != nil && node.Data != "body" {
 | 
			
		||||
				node = node.NextSibling
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
		if node == nil {
 | 
			
		||||
			continue
 | 
			
		||||
		}
 | 
			
		||||
		if node.Data == "body" {
 | 
			
		||||
			child := node.FirstChild
 | 
			
		||||
			for child != nil {
 | 
			
		||||
				newNodes = append(newNodes, child)
 | 
			
		||||
				child = child.NextSibling
 | 
			
		||||
			}
 | 
			
		||||
		} else {
 | 
			
		||||
			newNodes = append(newNodes, node)
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	nodes = newNodes
 | 
			
		||||
 | 
			
		||||
	// Create buffer in which the data will be placed again. We know that the
 | 
			
		||||
	// length will be at least that of res; to spare a few alloc+copy, we
 | 
			
		||||
	// reuse res, resetting its length to 0.
 | 
			
		||||
	buf := bytes.NewBuffer(res[:0])
 | 
			
		||||
	res.Reset()
 | 
			
		||||
	// Render everything to buf.
 | 
			
		||||
	for _, node := range nodes {
 | 
			
		||||
		err = html.Render(buf, node)
 | 
			
		||||
		err = html.Render(res, node)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return nil, &postProcessError{"error rendering processed HTML", err}
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// remove initial parts - because Render creates a whole HTML page.
 | 
			
		||||
	res = buf.Bytes()
 | 
			
		||||
	res = res[bytes.Index(res, byteBodyTag)+len(byteBodyTag) : bytes.LastIndex(res, byteBodyTagClosing)]
 | 
			
		||||
 | 
			
		||||
	// Everything done successfully, return parsed data.
 | 
			
		||||
	return res, nil
 | 
			
		||||
	return res.Bytes(), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (ctx *postProcessCtx) visitNode(node *html.Node, visitText bool) {
 | 
			
		||||
@@ -632,16 +658,18 @@ func shortLinkProcessorFull(ctx *postProcessCtx, node *html.Node, noLink bool) {
 | 
			
		||||
			// When parsing HTML, x/net/html will change all quotes which are
 | 
			
		||||
			// not used for syntax into UTF-8 quotes. So checking val[0] won't
 | 
			
		||||
			// be enough, since that only checks a single byte.
 | 
			
		||||
			if (strings.HasPrefix(val, "“") && strings.HasSuffix(val, "”")) ||
 | 
			
		||||
				(strings.HasPrefix(val, "‘") && strings.HasSuffix(val, "’")) {
 | 
			
		||||
				const lenQuote = len("‘")
 | 
			
		||||
				val = val[lenQuote : len(val)-lenQuote]
 | 
			
		||||
			} else if (strings.HasPrefix(val, "\"") && strings.HasSuffix(val, "\"")) ||
 | 
			
		||||
				(strings.HasPrefix(val, "'") && strings.HasSuffix(val, "'")) {
 | 
			
		||||
				val = val[1 : len(val)-1]
 | 
			
		||||
			} else if strings.HasPrefix(val, "'") && strings.HasSuffix(val, "’") {
 | 
			
		||||
				const lenQuote = len("‘")
 | 
			
		||||
				val = val[1 : len(val)-lenQuote]
 | 
			
		||||
			if len(val) > 1 {
 | 
			
		||||
				if (strings.HasPrefix(val, "“") && strings.HasSuffix(val, "”")) ||
 | 
			
		||||
					(strings.HasPrefix(val, "‘") && strings.HasSuffix(val, "’")) {
 | 
			
		||||
					const lenQuote = len("‘")
 | 
			
		||||
					val = val[lenQuote : len(val)-lenQuote]
 | 
			
		||||
				} else if (strings.HasPrefix(val, "\"") && strings.HasSuffix(val, "\"")) ||
 | 
			
		||||
					(strings.HasPrefix(val, "'") && strings.HasSuffix(val, "'")) {
 | 
			
		||||
					val = val[1 : len(val)-1]
 | 
			
		||||
				} else if strings.HasPrefix(val, "'") && strings.HasSuffix(val, "’") {
 | 
			
		||||
					const lenQuote = len("‘")
 | 
			
		||||
					val = val[1 : len(val)-lenQuote]
 | 
			
		||||
				}
 | 
			
		||||
			}
 | 
			
		||||
			props[key] = val
 | 
			
		||||
		}
 | 
			
		||||
 
 | 
			
		||||
@@ -46,6 +46,12 @@ func TestRender_Commits(t *testing.T) {
 | 
			
		||||
	test("/home/gitea/"+sha, "<p>/home/gitea/"+sha+"</p>")
 | 
			
		||||
	test("deadbeef", `<p>deadbeef</p>`)
 | 
			
		||||
	test("d27ace93", `<p>d27ace93</p>`)
 | 
			
		||||
	test(sha[:14]+".x", `<p>`+sha[:14]+`.x</p>`)
 | 
			
		||||
 | 
			
		||||
	expected14 := `<a href="` + commit[:len(commit)-(40-14)] + `" rel="nofollow"><code>` + sha[:10] + `</code></a>`
 | 
			
		||||
	test(sha[:14]+".", `<p>`+expected14+`.</p>`)
 | 
			
		||||
	test(sha[:14]+",", `<p>`+expected14+`,</p>`)
 | 
			
		||||
	test("["+sha[:14]+"]", `<p>[`+expected14+`]</p>`)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func TestRender_CrossReferences(t *testing.T) {
 | 
			
		||||
@@ -142,7 +148,7 @@ func TestRender_links(t *testing.T) {
 | 
			
		||||
		`<p><a href="ftp://gitea.com/file.txt" rel="nofollow">ftp://gitea.com/file.txt</a></p>`)
 | 
			
		||||
	test(
 | 
			
		||||
		"magnet:?xt=urn:btih:5dee65101db281ac9c46344cd6b175cdcadabcde&dn=download",
 | 
			
		||||
		`<p><a href="magnet:?dn=download&xt=urn%3Abtih%3A5dee65101db281ac9c46344cd6b175cdcadabcde" rel="nofollow">magnet:?xt=urn:btih:5dee65101db281ac9c46344cd6b175cdcadabcde&dn=download</a></p>`)
 | 
			
		||||
		`<p><a href="magnet:?xt=urn%3Abtih%3A5dee65101db281ac9c46344cd6b175cdcadabcde&dn=download" rel="nofollow">magnet:?xt=urn:btih:5dee65101db281ac9c46344cd6b175cdcadabcde&dn=download</a></p>`)
 | 
			
		||||
 | 
			
		||||
	// Test that should *not* be turned into URL
 | 
			
		||||
	test(
 | 
			
		||||
@@ -377,3 +383,28 @@ func TestRender_ShortLinks(t *testing.T) {
 | 
			
		||||
		`<p><a href="https://example.org" rel="nofollow">[[foobar]]</a></p>`,
 | 
			
		||||
		`<p><a href="https://example.org" rel="nofollow">[[foobar]]</a></p>`)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Test_ParseClusterFuzz(t *testing.T) {
 | 
			
		||||
	setting.AppURL = AppURL
 | 
			
		||||
	setting.AppSubURL = AppSubURL
 | 
			
		||||
 | 
			
		||||
	var localMetas = map[string]string{
 | 
			
		||||
		"user": "go-gitea",
 | 
			
		||||
		"repo": "gitea",
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	data := "<A><maTH><tr><MN><bodY ÿ><temPlate></template><tH><tr></A><tH><d<bodY "
 | 
			
		||||
 | 
			
		||||
	val, err := PostProcess([]byte(data), "https://example.com", localMetas, false)
 | 
			
		||||
 | 
			
		||||
	assert.NoError(t, err)
 | 
			
		||||
	assert.NotContains(t, string(val), "<html")
 | 
			
		||||
 | 
			
		||||
	data = "<!DOCTYPE html>\n<A><maTH><tr><MN><bodY ÿ><temPlate></template><tH><tr></A><tH><d<bodY "
 | 
			
		||||
 | 
			
		||||
	val, err = PostProcess([]byte(data), "https://example.com", localMetas, false)
 | 
			
		||||
 | 
			
		||||
	assert.NoError(t, err)
 | 
			
		||||
 | 
			
		||||
	assert.NotContains(t, string(val), "<html")
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -10,6 +10,7 @@ import (
 | 
			
		||||
	"regexp"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
	"code.gitea.io/gitea/modules/log"
 | 
			
		||||
	"code.gitea.io/gitea/modules/markup"
 | 
			
		||||
	"code.gitea.io/gitea/modules/markup/common"
 | 
			
		||||
	"code.gitea.io/gitea/modules/setting"
 | 
			
		||||
@@ -76,6 +77,12 @@ func (g *ASTTransformer) Transform(node *ast.Document, reader text.Reader, pc pa
 | 
			
		||||
					header.ID = util.BytesToReadOnlyString(id.([]byte))
 | 
			
		||||
				}
 | 
			
		||||
				toc = append(toc, header)
 | 
			
		||||
			} else {
 | 
			
		||||
				for _, attr := range v.Attributes() {
 | 
			
		||||
					if _, ok := attr.Value.([]byte); !ok {
 | 
			
		||||
						v.SetAttribute(attr.Name, []byte(fmt.Sprintf("%v", attr.Value)))
 | 
			
		||||
					}
 | 
			
		||||
				}
 | 
			
		||||
			}
 | 
			
		||||
		case *ast.Image:
 | 
			
		||||
			// Images need two things:
 | 
			
		||||
@@ -101,11 +108,41 @@ func (g *ASTTransformer) Transform(node *ast.Document, reader text.Reader, pc pa
 | 
			
		||||
			parent := n.Parent()
 | 
			
		||||
			// Create a link around image only if parent is not already a link
 | 
			
		||||
			if _, ok := parent.(*ast.Link); !ok && parent != nil {
 | 
			
		||||
				next := n.NextSibling()
 | 
			
		||||
 | 
			
		||||
				// Create a link wrapper
 | 
			
		||||
				wrap := ast.NewLink()
 | 
			
		||||
				wrap.Destination = link
 | 
			
		||||
				wrap.Title = v.Title
 | 
			
		||||
 | 
			
		||||
				// Duplicate the current image node
 | 
			
		||||
				image := ast.NewImage(ast.NewLink())
 | 
			
		||||
				image.Destination = link
 | 
			
		||||
				image.Title = v.Title
 | 
			
		||||
				for _, attr := range v.Attributes() {
 | 
			
		||||
					image.SetAttribute(attr.Name, attr.Value)
 | 
			
		||||
				}
 | 
			
		||||
				for child := v.FirstChild(); child != nil; {
 | 
			
		||||
					next := child.NextSibling()
 | 
			
		||||
					image.AppendChild(image, child)
 | 
			
		||||
					child = next
 | 
			
		||||
				}
 | 
			
		||||
 | 
			
		||||
				// Append our duplicate image to the wrapper link
 | 
			
		||||
				wrap.AppendChild(wrap, image)
 | 
			
		||||
 | 
			
		||||
				// Wire in the next sibling
 | 
			
		||||
				wrap.SetNextSibling(next)
 | 
			
		||||
 | 
			
		||||
				// Replace the current node with the wrapper link
 | 
			
		||||
				parent.ReplaceChild(parent, n, wrap)
 | 
			
		||||
				wrap.AppendChild(wrap, n)
 | 
			
		||||
 | 
			
		||||
				// But most importantly ensure the next sibling is still on the old image too
 | 
			
		||||
				v.SetNextSibling(next)
 | 
			
		||||
 | 
			
		||||
			} else {
 | 
			
		||||
				log.Debug("ast.Image: %s has parent: %v", link, parent)
 | 
			
		||||
 | 
			
		||||
			}
 | 
			
		||||
		case *ast.Link:
 | 
			
		||||
			// Links need their href to munged to be a real value
 | 
			
		||||
 
 | 
			
		||||
@@ -6,7 +6,8 @@
 | 
			
		||||
package markdown
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bytes"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"io"
 | 
			
		||||
	"strings"
 | 
			
		||||
	"sync"
 | 
			
		||||
 | 
			
		||||
@@ -18,7 +19,7 @@ import (
 | 
			
		||||
 | 
			
		||||
	chromahtml "github.com/alecthomas/chroma/formatters/html"
 | 
			
		||||
	"github.com/yuin/goldmark"
 | 
			
		||||
	"github.com/yuin/goldmark-highlighting"
 | 
			
		||||
	highlighting "github.com/yuin/goldmark-highlighting"
 | 
			
		||||
	meta "github.com/yuin/goldmark-meta"
 | 
			
		||||
	"github.com/yuin/goldmark/extension"
 | 
			
		||||
	"github.com/yuin/goldmark/parser"
 | 
			
		||||
@@ -34,6 +35,44 @@ var urlPrefixKey = parser.NewContextKey()
 | 
			
		||||
var isWikiKey = parser.NewContextKey()
 | 
			
		||||
var renderMetasKey = parser.NewContextKey()
 | 
			
		||||
 | 
			
		||||
type closesWithError interface {
 | 
			
		||||
	io.WriteCloser
 | 
			
		||||
	CloseWithError(err error) error
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type limitWriter struct {
 | 
			
		||||
	w     closesWithError
 | 
			
		||||
	sum   int64
 | 
			
		||||
	limit int64
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Write implements the standard Write interface:
 | 
			
		||||
func (l *limitWriter) Write(data []byte) (int, error) {
 | 
			
		||||
	leftToWrite := l.limit - l.sum
 | 
			
		||||
	if leftToWrite < int64(len(data)) {
 | 
			
		||||
		n, err := l.w.Write(data[:leftToWrite])
 | 
			
		||||
		l.sum += int64(n)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return n, err
 | 
			
		||||
		}
 | 
			
		||||
		_ = l.w.Close()
 | 
			
		||||
		return n, fmt.Errorf("Rendered content too large - truncating render")
 | 
			
		||||
	}
 | 
			
		||||
	n, err := l.w.Write(data)
 | 
			
		||||
	l.sum += int64(n)
 | 
			
		||||
	return n, err
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Close closes the writer
 | 
			
		||||
func (l *limitWriter) Close() error {
 | 
			
		||||
	return l.w.Close()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// CloseWithError closes the writer
 | 
			
		||||
func (l *limitWriter) CloseWithError(err error) error {
 | 
			
		||||
	return l.w.CloseWithError(err)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// NewGiteaParseContext creates a parser.Context with the gitea context set
 | 
			
		||||
func NewGiteaParseContext(urlPrefix string, metas map[string]string, isWiki bool) parser.Context {
 | 
			
		||||
	pc := parser.NewContext(parser.WithIDs(newPrefixedIDs()))
 | 
			
		||||
@@ -43,8 +82,8 @@ func NewGiteaParseContext(urlPrefix string, metas map[string]string, isWiki bool
 | 
			
		||||
	return pc
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// render renders Markdown to HTML without handling special links.
 | 
			
		||||
func render(body []byte, urlPrefix string, metas map[string]string, wikiMarkdown bool) []byte {
 | 
			
		||||
// actualRender renders Markdown to HTML without handling special links.
 | 
			
		||||
func actualRender(body []byte, urlPrefix string, metas map[string]string, wikiMarkdown bool) []byte {
 | 
			
		||||
	once.Do(func() {
 | 
			
		||||
		converter = goldmark.New(
 | 
			
		||||
			goldmark.WithExtensions(extension.Table,
 | 
			
		||||
@@ -119,12 +158,57 @@ func render(body []byte, urlPrefix string, metas map[string]string, wikiMarkdown
 | 
			
		||||
 | 
			
		||||
	})
 | 
			
		||||
 | 
			
		||||
	pc := NewGiteaParseContext(urlPrefix, metas, wikiMarkdown)
 | 
			
		||||
	var buf bytes.Buffer
 | 
			
		||||
	if err := converter.Convert(giteautil.NormalizeEOL(body), &buf, parser.WithContext(pc)); err != nil {
 | 
			
		||||
		log.Error("Unable to render: %v", err)
 | 
			
		||||
	rd, wr := io.Pipe()
 | 
			
		||||
	defer func() {
 | 
			
		||||
		_ = rd.Close()
 | 
			
		||||
		_ = wr.Close()
 | 
			
		||||
	}()
 | 
			
		||||
 | 
			
		||||
	lw := &limitWriter{
 | 
			
		||||
		w:     wr,
 | 
			
		||||
		limit: setting.UI.MaxDisplayFileSize * 3,
 | 
			
		||||
	}
 | 
			
		||||
	return markup.SanitizeReader(&buf).Bytes()
 | 
			
		||||
 | 
			
		||||
	// FIXME: should we include a timeout that closes the pipe to abort the parser and sanitizer if it takes too long?
 | 
			
		||||
	go func() {
 | 
			
		||||
		defer func() {
 | 
			
		||||
			err := recover()
 | 
			
		||||
			if err == nil {
 | 
			
		||||
				return
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			log.Warn("Unable to render markdown due to panic in goldmark: %v", err)
 | 
			
		||||
			if log.IsDebug() {
 | 
			
		||||
				log.Debug("Panic in markdown: %v\n%s", err, string(log.Stack(2)))
 | 
			
		||||
			}
 | 
			
		||||
			_ = lw.CloseWithError(fmt.Errorf("%v", err))
 | 
			
		||||
		}()
 | 
			
		||||
 | 
			
		||||
		pc := NewGiteaParseContext(urlPrefix, metas, wikiMarkdown)
 | 
			
		||||
		if err := converter.Convert(giteautil.NormalizeEOL(body), lw, parser.WithContext(pc)); err != nil {
 | 
			
		||||
			log.Error("Unable to render: %v", err)
 | 
			
		||||
			_ = lw.CloseWithError(err)
 | 
			
		||||
			return
 | 
			
		||||
		}
 | 
			
		||||
		_ = lw.Close()
 | 
			
		||||
	}()
 | 
			
		||||
	return markup.SanitizeReader(rd).Bytes()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func render(body []byte, urlPrefix string, metas map[string]string, wikiMarkdown bool) (ret []byte) {
 | 
			
		||||
	defer func() {
 | 
			
		||||
		err := recover()
 | 
			
		||||
		if err == nil {
 | 
			
		||||
			return
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		log.Warn("Unable to render markdown due to panic in goldmark - will return sanitized raw bytes")
 | 
			
		||||
		if log.IsDebug() {
 | 
			
		||||
			log.Debug("Panic in markdown: %v\n%s", err, string(log.Stack(2)))
 | 
			
		||||
		}
 | 
			
		||||
		ret = markup.SanitizeBytes(body)
 | 
			
		||||
	}()
 | 
			
		||||
	return actualRender(body, urlPrefix, metas, wikiMarkdown)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
var (
 | 
			
		||||
 
 | 
			
		||||
@@ -308,3 +308,34 @@ func TestRender_RenderParagraphs(t *testing.T) {
 | 
			
		||||
	test(t, "A\n\nB\nC\n", 2)
 | 
			
		||||
	test(t, "A\n\n\nB\nC\n", 2)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func TestMarkdownRenderRaw(t *testing.T) {
 | 
			
		||||
	testcases := [][]byte{
 | 
			
		||||
		{ // clusterfuzz_testcase_minimized_fuzz_markdown_render_raw_6267570554535936
 | 
			
		||||
			0x2a, 0x20, 0x2d, 0x0a, 0x09, 0x20, 0x60, 0x5b, 0x0a, 0x09, 0x20, 0x60,
 | 
			
		||||
			0x5b,
 | 
			
		||||
		},
 | 
			
		||||
		{ // clusterfuzz_testcase_minimized_fuzz_markdown_render_raw_6278827345051648
 | 
			
		||||
			0x2d, 0x20, 0x2d, 0x0d, 0x09, 0x60, 0x0d, 0x09, 0x60,
 | 
			
		||||
		},
 | 
			
		||||
		{ // clusterfuzz_testcase_minimized_fuzz_markdown_render_raw_6016973788020736[] = {
 | 
			
		||||
			0x7b, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x35, 0x7d, 0x0a, 0x3d,
 | 
			
		||||
		},
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	for _, testcase := range testcases {
 | 
			
		||||
		_ = RenderRaw(testcase, "", false)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func TestRenderSiblingImages_Issue12925(t *testing.T) {
 | 
			
		||||
	testcase := `
 | 
			
		||||

 | 
			
		||||
`
 | 
			
		||||
	expected := `<p><a href="/image1" rel="nofollow"><img src="/image1" alt="image1"></a><br>
 | 
			
		||||
<a href="/image2" rel="nofollow"><img src="/image2" alt="image2"></a></p>
 | 
			
		||||
`
 | 
			
		||||
	res := string(RenderRaw([]byte(testcase), "", false))
 | 
			
		||||
	assert.Equal(t, expected, res)
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -46,7 +46,9 @@ func ReplaceSanitizer() {
 | 
			
		||||
	sanitizer.policy.AllowAttrs("checked", "disabled", "readonly").OnElements("input")
 | 
			
		||||
 | 
			
		||||
	// Custom URL-Schemes
 | 
			
		||||
	sanitizer.policy.AllowURLSchemes(setting.Markdown.CustomURLSchemes...)
 | 
			
		||||
	if len(setting.Markdown.CustomURLSchemes) > 0 {
 | 
			
		||||
		sanitizer.policy.AllowURLSchemes(setting.Markdown.CustomURLSchemes...)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// Allow keyword markup
 | 
			
		||||
	sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^` + keywordClass + `$`)).OnElements("span")
 | 
			
		||||
 
 | 
			
		||||
@@ -6,6 +6,8 @@
 | 
			
		||||
package markup
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"html/template"
 | 
			
		||||
	"strings"
 | 
			
		||||
	"testing"
 | 
			
		||||
 | 
			
		||||
	"github.com/stretchr/testify/assert"
 | 
			
		||||
@@ -50,3 +52,13 @@ func Test_Sanitizer(t *testing.T) {
 | 
			
		||||
		assert.Equal(t, testCases[i+1], string(SanitizeBytes([]byte(testCases[i]))))
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func TestSanitizeNonEscape(t *testing.T) {
 | 
			
		||||
	descStr := "<scrİpt><script>alert(document.domain)</script></scrİpt>"
 | 
			
		||||
 | 
			
		||||
	output := template.HTML(Sanitize(string(descStr)))
 | 
			
		||||
	if strings.Contains(string(output), "<script>") {
 | 
			
		||||
		t.Errorf("un-escaped <script> in output: %q", output)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -6,7 +6,6 @@
 | 
			
		||||
package migrations
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bytes"
 | 
			
		||||
	"context"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"io"
 | 
			
		||||
@@ -125,7 +124,7 @@ func (g *GiteaLocalUploader) CreateRepo(repo *base.Repository, opts base.Migrate
 | 
			
		||||
	}
 | 
			
		||||
	r.DefaultBranch = repo.DefaultBranch
 | 
			
		||||
 | 
			
		||||
	r, err = repository.MigrateRepositoryGitData(g.doer, owner, r, base.MigrateOptions{
 | 
			
		||||
	r, err = repository.MigrateRepositoryGitData(g.ctx, owner, r, base.MigrateOptions{
 | 
			
		||||
		RepoName:       g.repoName,
 | 
			
		||||
		Description:    repo.Description,
 | 
			
		||||
		OriginalURL:    repo.OriginalURL,
 | 
			
		||||
@@ -154,6 +153,15 @@ func (g *GiteaLocalUploader) Close() {
 | 
			
		||||
 | 
			
		||||
// CreateTopics creates topics
 | 
			
		||||
func (g *GiteaLocalUploader) CreateTopics(topics ...string) error {
 | 
			
		||||
	// ignore topics to long for the db
 | 
			
		||||
	c := 0
 | 
			
		||||
	for i := range topics {
 | 
			
		||||
		if len(topics[i]) <= 25 {
 | 
			
		||||
			topics[c] = topics[i]
 | 
			
		||||
			c++
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	topics = topics[:c]
 | 
			
		||||
	return models.SaveTopics(g.repo.ID, topics...)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -287,7 +295,8 @@ func (g *GiteaLocalUploader) CreateReleases(downloader base.Downloader, releases
 | 
			
		||||
					}
 | 
			
		||||
					rc = resp.Body
 | 
			
		||||
				}
 | 
			
		||||
				_, err = storage.Attachments.Save(attach.RelativePath(), rc)
 | 
			
		||||
				defer rc.Close()
 | 
			
		||||
				_, err = storage.Attachments.Save(attach.RelativePath(), rc, int64(*asset.Size))
 | 
			
		||||
				return err
 | 
			
		||||
			}()
 | 
			
		||||
			if err != nil {
 | 
			
		||||
@@ -802,13 +811,20 @@ func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error {
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			var patch string
 | 
			
		||||
			patchBuf := new(bytes.Buffer)
 | 
			
		||||
			if err := git.GetRepoRawDiffForFile(g.gitRepo, pr.MergeBase, headCommitID, git.RawDiffNormal, comment.TreePath, patchBuf); err != nil {
 | 
			
		||||
				// We should ignore the error since the commit maybe removed when force push to the pull request
 | 
			
		||||
				log.Warn("GetRepoRawDiffForFile failed when migrating [%s, %s, %s, %s]: %v", g.gitRepo.Path, pr.MergeBase, headCommitID, comment.TreePath, err)
 | 
			
		||||
			} else {
 | 
			
		||||
				patch = git.CutDiffAroundLine(patchBuf, int64((&models.Comment{Line: int64(line + comment.Position - 1)}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines)
 | 
			
		||||
			}
 | 
			
		||||
			reader, writer := io.Pipe()
 | 
			
		||||
			defer func() {
 | 
			
		||||
				_ = reader.Close()
 | 
			
		||||
				_ = writer.Close()
 | 
			
		||||
			}()
 | 
			
		||||
			go func() {
 | 
			
		||||
				if err := git.GetRepoRawDiffForFile(g.gitRepo, pr.MergeBase, headCommitID, git.RawDiffNormal, comment.TreePath, writer); err != nil {
 | 
			
		||||
					// We should ignore the error since the commit maybe removed when force push to the pull request
 | 
			
		||||
					log.Warn("GetRepoRawDiffForFile failed when migrating [%s, %s, %s, %s]: %v", g.gitRepo.Path, pr.MergeBase, headCommitID, comment.TreePath, err)
 | 
			
		||||
				}
 | 
			
		||||
				_ = writer.Close()
 | 
			
		||||
			}()
 | 
			
		||||
 | 
			
		||||
			patch, _ = git.CutDiffAroundLine(reader, int64((&models.Comment{Line: int64(line + comment.Position - 1)}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines)
 | 
			
		||||
 | 
			
		||||
			var c = models.Comment{
 | 
			
		||||
				Type:        models.CommentTypeCode,
 | 
			
		||||
 
 | 
			
		||||
@@ -52,6 +52,13 @@ func isMigrateURLAllowed(remoteURL string) error {
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if u.Host == "" {
 | 
			
		||||
		if !setting.ImportLocalPaths {
 | 
			
		||||
			return &models.ErrMigrationNotAllowed{Host: "<LOCAL_FILESYSTEM>"}
 | 
			
		||||
		}
 | 
			
		||||
		return nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if !setting.Migrations.AllowLocalNetworks {
 | 
			
		||||
		addrList, err := net.LookupIP(strings.Split(u.Host, ":")[0])
 | 
			
		||||
		if err != nil {
 | 
			
		||||
 
 | 
			
		||||
@@ -31,4 +31,16 @@ func TestMigrateWhiteBlocklist(t *testing.T) {
 | 
			
		||||
 | 
			
		||||
	err = isMigrateURLAllowed("https://github.com/go-gitea/gitea.git")
 | 
			
		||||
	assert.Error(t, err)
 | 
			
		||||
 | 
			
		||||
	old := setting.ImportLocalPaths
 | 
			
		||||
	setting.ImportLocalPaths = false
 | 
			
		||||
 | 
			
		||||
	err = isMigrateURLAllowed("/home/foo/bar/goo")
 | 
			
		||||
	assert.Error(t, err)
 | 
			
		||||
 | 
			
		||||
	setting.ImportLocalPaths = true
 | 
			
		||||
	err = isMigrateURLAllowed("/home/foo/bar/goo")
 | 
			
		||||
	assert.NoError(t, err)
 | 
			
		||||
 | 
			
		||||
	setting.ImportLocalPaths = old
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -29,7 +29,7 @@ func NewNotifier() base.Notifier {
 | 
			
		||||
	return &actionNotifier{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *actionNotifier) NotifyNewIssue(issue *models.Issue) {
 | 
			
		||||
func (a *actionNotifier) NotifyNewIssue(issue *models.Issue, mentions []*models.User) {
 | 
			
		||||
	if err := issue.LoadPoster(); err != nil {
 | 
			
		||||
		log.Error("issue.LoadPoster: %v", err)
 | 
			
		||||
		return
 | 
			
		||||
@@ -88,7 +88,7 @@ func (a *actionNotifier) NotifyIssueChangeStatus(doer *models.User, issue *model
 | 
			
		||||
 | 
			
		||||
// NotifyCreateIssueComment notifies comment on an issue to notifiers
 | 
			
		||||
func (a *actionNotifier) NotifyCreateIssueComment(doer *models.User, repo *models.Repository,
 | 
			
		||||
	issue *models.Issue, comment *models.Comment) {
 | 
			
		||||
	issue *models.Issue, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
	act := &models.Action{
 | 
			
		||||
		ActUserID: doer.ID,
 | 
			
		||||
		ActUser:   doer,
 | 
			
		||||
@@ -120,7 +120,7 @@ func (a *actionNotifier) NotifyCreateIssueComment(doer *models.User, repo *model
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *actionNotifier) NotifyNewPullRequest(pull *models.PullRequest) {
 | 
			
		||||
func (a *actionNotifier) NotifyNewPullRequest(pull *models.PullRequest, mentions []*models.User) {
 | 
			
		||||
	if err := pull.LoadIssue(); err != nil {
 | 
			
		||||
		log.Error("pull.LoadIssue: %v", err)
 | 
			
		||||
		return
 | 
			
		||||
@@ -203,7 +203,7 @@ func (a *actionNotifier) NotifyForkRepository(doer *models.User, oldRepo, repo *
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *actionNotifier) NotifyPullRequestReview(pr *models.PullRequest, review *models.Review, comment *models.Comment) {
 | 
			
		||||
func (a *actionNotifier) NotifyPullRequestReview(pr *models.PullRequest, review *models.Review, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
	if err := review.LoadReviewer(); err != nil {
 | 
			
		||||
		log.Error("LoadReviewer '%d/%d': %v", review.ID, review.ReviewerID, err)
 | 
			
		||||
		return
 | 
			
		||||
 
 | 
			
		||||
@@ -20,7 +20,7 @@ type Notifier interface {
 | 
			
		||||
	NotifyRenameRepository(doer *models.User, repo *models.Repository, oldRepoName string)
 | 
			
		||||
	NotifyTransferRepository(doer *models.User, repo *models.Repository, oldOwnerName string)
 | 
			
		||||
 | 
			
		||||
	NotifyNewIssue(*models.Issue)
 | 
			
		||||
	NotifyNewIssue(issue *models.Issue, mentions []*models.User)
 | 
			
		||||
	NotifyIssueChangeStatus(*models.User, *models.Issue, *models.Comment, bool)
 | 
			
		||||
	NotifyIssueChangeMilestone(doer *models.User, issue *models.Issue, oldMilestoneID int64)
 | 
			
		||||
	NotifyIssueChangeAssignee(doer *models.User, issue *models.Issue, assignee *models.User, removed bool, comment *models.Comment)
 | 
			
		||||
@@ -32,15 +32,16 @@ type Notifier interface {
 | 
			
		||||
	NotifyIssueChangeLabels(doer *models.User, issue *models.Issue,
 | 
			
		||||
		addedLabels []*models.Label, removedLabels []*models.Label)
 | 
			
		||||
 | 
			
		||||
	NotifyNewPullRequest(*models.PullRequest)
 | 
			
		||||
	NotifyNewPullRequest(pr *models.PullRequest, mentions []*models.User)
 | 
			
		||||
	NotifyMergePullRequest(*models.PullRequest, *models.User)
 | 
			
		||||
	NotifyPullRequestSynchronized(doer *models.User, pr *models.PullRequest)
 | 
			
		||||
	NotifyPullRequestReview(*models.PullRequest, *models.Review, *models.Comment)
 | 
			
		||||
	NotifyPullRequestReview(pr *models.PullRequest, review *models.Review, comment *models.Comment, mentions []*models.User)
 | 
			
		||||
	NotifyPullRequestCodeComment(pr *models.PullRequest, comment *models.Comment, mentions []*models.User)
 | 
			
		||||
	NotifyPullRequestChangeTargetBranch(doer *models.User, pr *models.PullRequest, oldBranch string)
 | 
			
		||||
	NotifyPullRequestPushCommits(doer *models.User, pr *models.PullRequest, comment *models.Comment)
 | 
			
		||||
 | 
			
		||||
	NotifyCreateIssueComment(*models.User, *models.Repository,
 | 
			
		||||
		*models.Issue, *models.Comment)
 | 
			
		||||
	NotifyCreateIssueComment(doer *models.User, repo *models.Repository,
 | 
			
		||||
		issue *models.Issue, comment *models.Comment, mentions []*models.User)
 | 
			
		||||
	NotifyUpdateComment(*models.User, *models.Comment, string)
 | 
			
		||||
	NotifyDeleteComment(*models.User, *models.Comment)
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -23,11 +23,11 @@ func (*NullNotifier) Run() {
 | 
			
		||||
 | 
			
		||||
// NotifyCreateIssueComment places a place holder function
 | 
			
		||||
func (*NullNotifier) NotifyCreateIssueComment(doer *models.User, repo *models.Repository,
 | 
			
		||||
	issue *models.Issue, comment *models.Comment) {
 | 
			
		||||
	issue *models.Issue, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// NotifyNewIssue places a place holder function
 | 
			
		||||
func (*NullNotifier) NotifyNewIssue(issue *models.Issue) {
 | 
			
		||||
func (*NullNotifier) NotifyNewIssue(issue *models.Issue, mentions []*models.User) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// NotifyIssueChangeStatus places a place holder function
 | 
			
		||||
@@ -35,11 +35,15 @@ func (*NullNotifier) NotifyIssueChangeStatus(doer *models.User, issue *models.Is
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// NotifyNewPullRequest places a place holder function
 | 
			
		||||
func (*NullNotifier) NotifyNewPullRequest(pr *models.PullRequest) {
 | 
			
		||||
func (*NullNotifier) NotifyNewPullRequest(pr *models.PullRequest, mentions []*models.User) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// NotifyPullRequestReview places a place holder function
 | 
			
		||||
func (*NullNotifier) NotifyPullRequestReview(pr *models.PullRequest, r *models.Review, comment *models.Comment) {
 | 
			
		||||
func (*NullNotifier) NotifyPullRequestReview(pr *models.PullRequest, r *models.Review, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// NotifyPullRequestCodeComment places a place holder function
 | 
			
		||||
func (*NullNotifier) NotifyPullRequestCodeComment(pr *models.PullRequest, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// NotifyMergePullRequest places a place holder function
 | 
			
		||||
 
 | 
			
		||||
@@ -30,7 +30,7 @@ func NewNotifier() base.Notifier {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (r *indexerNotifier) NotifyCreateIssueComment(doer *models.User, repo *models.Repository,
 | 
			
		||||
	issue *models.Issue, comment *models.Comment) {
 | 
			
		||||
	issue *models.Issue, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
	if comment.Type == models.CommentTypeComment {
 | 
			
		||||
		if issue.Comments == nil {
 | 
			
		||||
			if err := issue.LoadDiscussComments(); err != nil {
 | 
			
		||||
@@ -45,11 +45,11 @@ func (r *indexerNotifier) NotifyCreateIssueComment(doer *models.User, repo *mode
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (r *indexerNotifier) NotifyNewIssue(issue *models.Issue) {
 | 
			
		||||
func (r *indexerNotifier) NotifyNewIssue(issue *models.Issue, mentions []*models.User) {
 | 
			
		||||
	issue_indexer.UpdateIssueIndexer(issue)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (r *indexerNotifier) NotifyNewPullRequest(pr *models.PullRequest) {
 | 
			
		||||
func (r *indexerNotifier) NotifyNewPullRequest(pr *models.PullRequest, mentions []*models.User) {
 | 
			
		||||
	issue_indexer.UpdateIssueIndexer(pr.Issue)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -27,7 +27,7 @@ func NewNotifier() base.Notifier {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (m *mailNotifier) NotifyCreateIssueComment(doer *models.User, repo *models.Repository,
 | 
			
		||||
	issue *models.Issue, comment *models.Comment) {
 | 
			
		||||
	issue *models.Issue, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
	var act models.ActionType
 | 
			
		||||
	if comment.Type == models.CommentTypeClose {
 | 
			
		||||
		act = models.ActionCloseIssue
 | 
			
		||||
@@ -41,13 +41,13 @@ func (m *mailNotifier) NotifyCreateIssueComment(doer *models.User, repo *models.
 | 
			
		||||
		act = 0
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if err := mailer.MailParticipantsComment(comment, act, issue); err != nil {
 | 
			
		||||
	if err := mailer.MailParticipantsComment(comment, act, issue, mentions); err != nil {
 | 
			
		||||
		log.Error("MailParticipantsComment: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (m *mailNotifier) NotifyNewIssue(issue *models.Issue) {
 | 
			
		||||
	if err := mailer.MailParticipants(issue, issue.Poster, models.ActionCreateIssue); err != nil {
 | 
			
		||||
func (m *mailNotifier) NotifyNewIssue(issue *models.Issue, mentions []*models.User) {
 | 
			
		||||
	if err := mailer.MailParticipants(issue, issue.Poster, models.ActionCreateIssue, mentions); err != nil {
 | 
			
		||||
		log.Error("MailParticipants: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
@@ -69,18 +69,18 @@ func (m *mailNotifier) NotifyIssueChangeStatus(doer *models.User, issue *models.
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if err := mailer.MailParticipants(issue, doer, actionType); err != nil {
 | 
			
		||||
	if err := mailer.MailParticipants(issue, doer, actionType, nil); err != nil {
 | 
			
		||||
		log.Error("MailParticipants: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (m *mailNotifier) NotifyNewPullRequest(pr *models.PullRequest) {
 | 
			
		||||
	if err := mailer.MailParticipants(pr.Issue, pr.Issue.Poster, models.ActionCreatePullRequest); err != nil {
 | 
			
		||||
func (m *mailNotifier) NotifyNewPullRequest(pr *models.PullRequest, mentions []*models.User) {
 | 
			
		||||
	if err := mailer.MailParticipants(pr.Issue, pr.Issue.Poster, models.ActionCreatePullRequest, mentions); err != nil {
 | 
			
		||||
		log.Error("MailParticipants: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (m *mailNotifier) NotifyPullRequestReview(pr *models.PullRequest, r *models.Review, comment *models.Comment) {
 | 
			
		||||
func (m *mailNotifier) NotifyPullRequestReview(pr *models.PullRequest, r *models.Review, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
	var act models.ActionType
 | 
			
		||||
	if comment.Type == models.CommentTypeClose {
 | 
			
		||||
		act = models.ActionCloseIssue
 | 
			
		||||
@@ -89,11 +89,17 @@ func (m *mailNotifier) NotifyPullRequestReview(pr *models.PullRequest, r *models
 | 
			
		||||
	} else if comment.Type == models.CommentTypeComment {
 | 
			
		||||
		act = models.ActionCommentPull
 | 
			
		||||
	}
 | 
			
		||||
	if err := mailer.MailParticipantsComment(comment, act, pr.Issue); err != nil {
 | 
			
		||||
	if err := mailer.MailParticipantsComment(comment, act, pr.Issue, mentions); err != nil {
 | 
			
		||||
		log.Error("MailParticipantsComment: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (m *mailNotifier) NotifyPullRequestCodeComment(pr *models.PullRequest, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
	if err := mailer.MailMentionsComment(pr, comment, mentions); err != nil {
 | 
			
		||||
		log.Error("MailMentionsComment: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (m *mailNotifier) NotifyIssueChangeAssignee(doer *models.User, issue *models.Issue, assignee *models.User, removed bool, comment *models.Comment) {
 | 
			
		||||
	// mail only sent to added assignees and not self-assignee
 | 
			
		||||
	if !removed && doer.ID != assignee.ID && assignee.EmailNotifications() == models.EmailNotificationsEnabled {
 | 
			
		||||
@@ -115,7 +121,7 @@ func (m *mailNotifier) NotifyMergePullRequest(pr *models.PullRequest, doer *mode
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	pr.Issue.Content = ""
 | 
			
		||||
	if err := mailer.MailParticipants(pr.Issue, doer, models.ActionMergePullRequest); err != nil {
 | 
			
		||||
	if err := mailer.MailParticipants(pr.Issue, doer, models.ActionMergePullRequest, nil); err != nil {
 | 
			
		||||
		log.Error("MailParticipants: %v", err)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
@@ -143,7 +149,7 @@ func (m *mailNotifier) NotifyPullRequestPushCommits(doer *models.User, pr *model
 | 
			
		||||
	}
 | 
			
		||||
	comment.Content = ""
 | 
			
		||||
 | 
			
		||||
	m.NotifyCreateIssueComment(doer, comment.Issue.Repo, comment.Issue, comment)
 | 
			
		||||
	m.NotifyCreateIssueComment(doer, comment.Issue.Repo, comment.Issue, comment, nil)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (m *mailNotifier) NotifyNewRelease(rel *models.Release) {
 | 
			
		||||
 
 | 
			
		||||
@@ -39,16 +39,16 @@ func NewContext() {
 | 
			
		||||
 | 
			
		||||
// NotifyCreateIssueComment notifies issue comment related message to notifiers
 | 
			
		||||
func NotifyCreateIssueComment(doer *models.User, repo *models.Repository,
 | 
			
		||||
	issue *models.Issue, comment *models.Comment) {
 | 
			
		||||
	issue *models.Issue, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
	for _, notifier := range notifiers {
 | 
			
		||||
		notifier.NotifyCreateIssueComment(doer, repo, issue, comment)
 | 
			
		||||
		notifier.NotifyCreateIssueComment(doer, repo, issue, comment, mentions)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// NotifyNewIssue notifies new issue to notifiers
 | 
			
		||||
func NotifyNewIssue(issue *models.Issue) {
 | 
			
		||||
func NotifyNewIssue(issue *models.Issue, mentions []*models.User) {
 | 
			
		||||
	for _, notifier := range notifiers {
 | 
			
		||||
		notifier.NotifyNewIssue(issue)
 | 
			
		||||
		notifier.NotifyNewIssue(issue, mentions)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -67,9 +67,9 @@ func NotifyMergePullRequest(pr *models.PullRequest, doer *models.User) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// NotifyNewPullRequest notifies new pull request to notifiers
 | 
			
		||||
func NotifyNewPullRequest(pr *models.PullRequest) {
 | 
			
		||||
func NotifyNewPullRequest(pr *models.PullRequest, mentions []*models.User) {
 | 
			
		||||
	for _, notifier := range notifiers {
 | 
			
		||||
		notifier.NotifyNewPullRequest(pr)
 | 
			
		||||
		notifier.NotifyNewPullRequest(pr, mentions)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -81,9 +81,16 @@ func NotifyPullRequestSynchronized(doer *models.User, pr *models.PullRequest) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// NotifyPullRequestReview notifies new pull request review
 | 
			
		||||
func NotifyPullRequestReview(pr *models.PullRequest, review *models.Review, comment *models.Comment) {
 | 
			
		||||
func NotifyPullRequestReview(pr *models.PullRequest, review *models.Review, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
	for _, notifier := range notifiers {
 | 
			
		||||
		notifier.NotifyPullRequestReview(pr, review, comment)
 | 
			
		||||
		notifier.NotifyPullRequestReview(pr, review, comment, mentions)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// NotifyPullRequestCodeComment notifies new pull request code comment
 | 
			
		||||
func NotifyPullRequestCodeComment(pr *models.PullRequest, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
	for _, notifier := range notifiers {
 | 
			
		||||
		notifier.NotifyPullRequestCodeComment(pr, comment, mentions)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -51,7 +51,7 @@ func (ns *notificationService) Run() {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (ns *notificationService) NotifyCreateIssueComment(doer *models.User, repo *models.Repository,
 | 
			
		||||
	issue *models.Issue, comment *models.Comment) {
 | 
			
		||||
	issue *models.Issue, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
	var opts = issueNotificationOpts{
 | 
			
		||||
		IssueID:              issue.ID,
 | 
			
		||||
		NotificationAuthorID: doer.ID,
 | 
			
		||||
@@ -60,13 +60,31 @@ func (ns *notificationService) NotifyCreateIssueComment(doer *models.User, repo
 | 
			
		||||
		opts.CommentID = comment.ID
 | 
			
		||||
	}
 | 
			
		||||
	_ = ns.issueQueue.Push(opts)
 | 
			
		||||
	for _, mention := range mentions {
 | 
			
		||||
		var opts = issueNotificationOpts{
 | 
			
		||||
			IssueID:              issue.ID,
 | 
			
		||||
			NotificationAuthorID: doer.ID,
 | 
			
		||||
			ReceiverID:           mention.ID,
 | 
			
		||||
		}
 | 
			
		||||
		if comment != nil {
 | 
			
		||||
			opts.CommentID = comment.ID
 | 
			
		||||
		}
 | 
			
		||||
		_ = ns.issueQueue.Push(opts)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (ns *notificationService) NotifyNewIssue(issue *models.Issue) {
 | 
			
		||||
func (ns *notificationService) NotifyNewIssue(issue *models.Issue, mentions []*models.User) {
 | 
			
		||||
	_ = ns.issueQueue.Push(issueNotificationOpts{
 | 
			
		||||
		IssueID:              issue.ID,
 | 
			
		||||
		NotificationAuthorID: issue.Poster.ID,
 | 
			
		||||
	})
 | 
			
		||||
	for _, mention := range mentions {
 | 
			
		||||
		_ = ns.issueQueue.Push(issueNotificationOpts{
 | 
			
		||||
			IssueID:              issue.ID,
 | 
			
		||||
			NotificationAuthorID: issue.Poster.ID,
 | 
			
		||||
			ReceiverID:           mention.ID,
 | 
			
		||||
		})
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (ns *notificationService) NotifyIssueChangeStatus(doer *models.User, issue *models.Issue, actionComment *models.Comment, isClosed bool) {
 | 
			
		||||
@@ -83,7 +101,7 @@ func (ns *notificationService) NotifyMergePullRequest(pr *models.PullRequest, do
 | 
			
		||||
	})
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (ns *notificationService) NotifyNewPullRequest(pr *models.PullRequest) {
 | 
			
		||||
func (ns *notificationService) NotifyNewPullRequest(pr *models.PullRequest, mentions []*models.User) {
 | 
			
		||||
	if err := pr.LoadIssue(); err != nil {
 | 
			
		||||
		log.Error("Unable to load issue: %d for pr: %d: Error: %v", pr.IssueID, pr.ID, err)
 | 
			
		||||
		return
 | 
			
		||||
@@ -92,9 +110,16 @@ func (ns *notificationService) NotifyNewPullRequest(pr *models.PullRequest) {
 | 
			
		||||
		IssueID:              pr.Issue.ID,
 | 
			
		||||
		NotificationAuthorID: pr.Issue.PosterID,
 | 
			
		||||
	})
 | 
			
		||||
	for _, mention := range mentions {
 | 
			
		||||
		_ = ns.issueQueue.Push(issueNotificationOpts{
 | 
			
		||||
			IssueID:              pr.Issue.ID,
 | 
			
		||||
			NotificationAuthorID: pr.Issue.PosterID,
 | 
			
		||||
			ReceiverID:           mention.ID,
 | 
			
		||||
		})
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (ns *notificationService) NotifyPullRequestReview(pr *models.PullRequest, r *models.Review, c *models.Comment) {
 | 
			
		||||
func (ns *notificationService) NotifyPullRequestReview(pr *models.PullRequest, r *models.Review, c *models.Comment, mentions []*models.User) {
 | 
			
		||||
	var opts = issueNotificationOpts{
 | 
			
		||||
		IssueID:              pr.Issue.ID,
 | 
			
		||||
		NotificationAuthorID: r.Reviewer.ID,
 | 
			
		||||
@@ -103,6 +128,28 @@ func (ns *notificationService) NotifyPullRequestReview(pr *models.PullRequest, r
 | 
			
		||||
		opts.CommentID = c.ID
 | 
			
		||||
	}
 | 
			
		||||
	_ = ns.issueQueue.Push(opts)
 | 
			
		||||
	for _, mention := range mentions {
 | 
			
		||||
		var opts = issueNotificationOpts{
 | 
			
		||||
			IssueID:              pr.Issue.ID,
 | 
			
		||||
			NotificationAuthorID: r.Reviewer.ID,
 | 
			
		||||
			ReceiverID:           mention.ID,
 | 
			
		||||
		}
 | 
			
		||||
		if c != nil {
 | 
			
		||||
			opts.CommentID = c.ID
 | 
			
		||||
		}
 | 
			
		||||
		_ = ns.issueQueue.Push(opts)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (ns *notificationService) NotifyPullRequestCodeComment(pr *models.PullRequest, c *models.Comment, mentions []*models.User) {
 | 
			
		||||
	for _, mention := range mentions {
 | 
			
		||||
		_ = ns.issueQueue.Push(issueNotificationOpts{
 | 
			
		||||
			IssueID:              pr.Issue.ID,
 | 
			
		||||
			NotificationAuthorID: c.Poster.ID,
 | 
			
		||||
			CommentID:            c.ID,
 | 
			
		||||
			ReceiverID:           mention.ID,
 | 
			
		||||
		})
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (ns *notificationService) NotifyPullRequestPushCommits(doer *models.User, pr *models.PullRequest, comment *models.Comment) {
 | 
			
		||||
 
 | 
			
		||||
@@ -249,7 +249,7 @@ func (m *webhookNotifier) NotifyIssueChangeStatus(doer *models.User, issue *mode
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (m *webhookNotifier) NotifyNewIssue(issue *models.Issue) {
 | 
			
		||||
func (m *webhookNotifier) NotifyNewIssue(issue *models.Issue, mentions []*models.User) {
 | 
			
		||||
	if err := issue.LoadRepo(); err != nil {
 | 
			
		||||
		log.Error("issue.LoadRepo: %v", err)
 | 
			
		||||
		return
 | 
			
		||||
@@ -271,7 +271,7 @@ func (m *webhookNotifier) NotifyNewIssue(issue *models.Issue) {
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (m *webhookNotifier) NotifyNewPullRequest(pull *models.PullRequest) {
 | 
			
		||||
func (m *webhookNotifier) NotifyNewPullRequest(pull *models.PullRequest, mentions []*models.User) {
 | 
			
		||||
	if err := pull.LoadIssue(); err != nil {
 | 
			
		||||
		log.Error("pull.LoadIssue: %v", err)
 | 
			
		||||
		return
 | 
			
		||||
@@ -387,7 +387,7 @@ func (m *webhookNotifier) NotifyUpdateComment(doer *models.User, c *models.Comme
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (m *webhookNotifier) NotifyCreateIssueComment(doer *models.User, repo *models.Repository,
 | 
			
		||||
	issue *models.Issue, comment *models.Comment) {
 | 
			
		||||
	issue *models.Issue, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
	mode, _ := models.AccessLevel(doer, repo)
 | 
			
		||||
 | 
			
		||||
	var err error
 | 
			
		||||
@@ -639,7 +639,7 @@ func (m *webhookNotifier) NotifyPullRequestChangeTargetBranch(doer *models.User,
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (m *webhookNotifier) NotifyPullRequestReview(pr *models.PullRequest, review *models.Review, comment *models.Comment) {
 | 
			
		||||
func (m *webhookNotifier) NotifyPullRequestReview(pr *models.PullRequest, review *models.Review, comment *models.Comment, mentions []*models.User) {
 | 
			
		||||
	var reviewHookType models.HookEventType
 | 
			
		||||
 | 
			
		||||
	switch review.Type {
 | 
			
		||||
 
 | 
			
		||||
@@ -34,10 +34,12 @@ type Options struct {
 | 
			
		||||
// KnownPublicEntries list all direct children in the `public` directory
 | 
			
		||||
var KnownPublicEntries = []string{
 | 
			
		||||
	"css",
 | 
			
		||||
	"fonts",
 | 
			
		||||
	"img",
 | 
			
		||||
	"js",
 | 
			
		||||
	"serviceworker.js",
 | 
			
		||||
	"vendor",
 | 
			
		||||
	"favicon.ico",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Custom implements the macaron static handler for serving custom assets.
 | 
			
		||||
 
 | 
			
		||||
@@ -149,6 +149,11 @@ func (q *PersistableChannelUniqueQueue) Has(data Data) (bool, error) {
 | 
			
		||||
	if err != nil || has {
 | 
			
		||||
		return has, err
 | 
			
		||||
	}
 | 
			
		||||
	q.lock.Lock()
 | 
			
		||||
	defer q.lock.Unlock()
 | 
			
		||||
	if q.internal == nil {
 | 
			
		||||
		return false, nil
 | 
			
		||||
	}
 | 
			
		||||
	return q.internal.(UniqueQueue).Has(data)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -120,7 +120,6 @@ func UploadRepoFiles(repo *models.Repository, doer *models.User, opts *UploadRep
 | 
			
		||||
				return err
 | 
			
		||||
			}
 | 
			
		||||
			infos[i] = uploadInfo
 | 
			
		||||
 | 
			
		||||
		} else if objectHash, err = t.HashObject(file); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
@@ -128,7 +127,6 @@ func UploadRepoFiles(repo *models.Repository, doer *models.User, opts *UploadRep
 | 
			
		||||
		// Add the object to the index
 | 
			
		||||
		if err := t.AddObjectToIndex("100644", objectHash, path.Join(opts.TreePath, uploadInfo.upload.Name)); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
@@ -165,28 +163,10 @@ func UploadRepoFiles(repo *models.Repository, doer *models.User, opts *UploadRep
 | 
			
		||||
	// OK now we can insert the data into the store - there's no way to clean up the store
 | 
			
		||||
	// once it's in there, it's in there.
 | 
			
		||||
	contentStore := &lfs.ContentStore{ObjectStorage: storage.LFS}
 | 
			
		||||
	for _, uploadInfo := range infos {
 | 
			
		||||
		if uploadInfo.lfsMetaObject == nil {
 | 
			
		||||
			continue
 | 
			
		||||
		}
 | 
			
		||||
		exist, err := contentStore.Exists(uploadInfo.lfsMetaObject)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
	for _, info := range infos {
 | 
			
		||||
		if err := uploadToLFSContentStore(info, contentStore); err != nil {
 | 
			
		||||
			return cleanUpAfterFailure(&infos, t, err)
 | 
			
		||||
		}
 | 
			
		||||
		if !exist {
 | 
			
		||||
			file, err := os.Open(uploadInfo.upload.LocalPath())
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				return cleanUpAfterFailure(&infos, t, err)
 | 
			
		||||
			}
 | 
			
		||||
			defer file.Close()
 | 
			
		||||
			// FIXME: Put regenerates the hash and copies the file over.
 | 
			
		||||
			// I guess this strictly ensures the soundness of the store but this is inefficient.
 | 
			
		||||
			if err := contentStore.Put(uploadInfo.lfsMetaObject, file); err != nil {
 | 
			
		||||
				// OK Now we need to cleanup
 | 
			
		||||
				// Can't clean up the store, once uploaded there they're there.
 | 
			
		||||
				return cleanUpAfterFailure(&infos, t, err)
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// Then push this tree to NewBranch
 | 
			
		||||
@@ -196,3 +176,29 @@ func UploadRepoFiles(repo *models.Repository, doer *models.User, opts *UploadRep
 | 
			
		||||
 | 
			
		||||
	return models.DeleteUploads(uploads...)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func uploadToLFSContentStore(info uploadInfo, contentStore *lfs.ContentStore) error {
 | 
			
		||||
	if info.lfsMetaObject == nil {
 | 
			
		||||
		return nil
 | 
			
		||||
	}
 | 
			
		||||
	exist, err := contentStore.Exists(info.lfsMetaObject)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
	if !exist {
 | 
			
		||||
		file, err := os.Open(info.upload.LocalPath())
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		defer file.Close()
 | 
			
		||||
		// FIXME: Put regenerates the hash and copies the file over.
 | 
			
		||||
		// I guess this strictly ensures the soundness of the store but this is inefficient.
 | 
			
		||||
		if err := contentStore.Put(info.lfsMetaObject, file); err != nil {
 | 
			
		||||
			// OK Now we need to cleanup
 | 
			
		||||
			// Can't clean up the store, once uploaded there they're there.
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -29,6 +29,13 @@ func CreateRepository(doer, u *models.User, opts models.CreateRepoOptions) (*mod
 | 
			
		||||
		opts.DefaultBranch = setting.Repository.DefaultBranch
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// Check if label template exist
 | 
			
		||||
	if len(opts.IssueLabels) > 0 {
 | 
			
		||||
		if _, err := models.GetLabelTemplateFile(opts.IssueLabels); err != nil {
 | 
			
		||||
			return nil, err
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	repo := &models.Repository{
 | 
			
		||||
		OwnerID:                         u.ID,
 | 
			
		||||
		Owner:                           u,
 | 
			
		||||
@@ -47,6 +54,8 @@ func CreateRepository(doer, u *models.User, opts models.CreateRepoOptions) (*mod
 | 
			
		||||
		TrustModel:                      opts.TrustModel,
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	var rollbackRepo *models.Repository
 | 
			
		||||
 | 
			
		||||
	if err := models.WithTx(func(ctx models.DBContext) error {
 | 
			
		||||
		if err := models.CreateRepository(ctx, doer, u, repo, false); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
@@ -85,9 +94,8 @@ func CreateRepository(doer, u *models.User, opts models.CreateRepoOptions) (*mod
 | 
			
		||||
		// Initialize Issue Labels if selected
 | 
			
		||||
		if len(opts.IssueLabels) > 0 {
 | 
			
		||||
			if err := models.InitializeLabels(ctx, repo.ID, opts.IssueLabels, false); err != nil {
 | 
			
		||||
				if errDelete := models.DeleteRepository(doer, u.ID, repo.ID); errDelete != nil {
 | 
			
		||||
					log.Error("Rollback deleteRepository: %v", errDelete)
 | 
			
		||||
				}
 | 
			
		||||
				rollbackRepo = repo
 | 
			
		||||
				rollbackRepo.OwnerID = u.ID
 | 
			
		||||
				return fmt.Errorf("InitializeLabels: %v", err)
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
@@ -96,13 +104,18 @@ func CreateRepository(doer, u *models.User, opts models.CreateRepoOptions) (*mod
 | 
			
		||||
			SetDescription(fmt.Sprintf("CreateRepository(git update-server-info): %s", repoPath)).
 | 
			
		||||
			RunInDir(repoPath); err != nil {
 | 
			
		||||
			log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err)
 | 
			
		||||
			if errDelete := models.DeleteRepository(doer, u.ID, repo.ID); errDelete != nil {
 | 
			
		||||
				log.Error("Rollback deleteRepository: %v", errDelete)
 | 
			
		||||
			}
 | 
			
		||||
			rollbackRepo = repo
 | 
			
		||||
			rollbackRepo.OwnerID = u.ID
 | 
			
		||||
			return fmt.Errorf("CreateRepository(git update-server-info): %v", err)
 | 
			
		||||
		}
 | 
			
		||||
		return nil
 | 
			
		||||
	}); err != nil {
 | 
			
		||||
		if rollbackRepo != nil {
 | 
			
		||||
			if errDelete := models.DeleteRepository(doer, rollbackRepo.OwnerID, rollbackRepo.ID); errDelete != nil {
 | 
			
		||||
				log.Error("Rollback deleteRepository: %v", errDelete)
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		return nil, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -5,6 +5,7 @@
 | 
			
		||||
package repository
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"context"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"path"
 | 
			
		||||
	"strings"
 | 
			
		||||
@@ -41,7 +42,7 @@ func WikiRemoteURL(remote string) string {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// MigrateRepositoryGitData starts migrating git related data after created migrating repository
 | 
			
		||||
func MigrateRepositoryGitData(doer, u *models.User, repo *models.Repository, opts migration.MigrateOptions) (*models.Repository, error) {
 | 
			
		||||
func MigrateRepositoryGitData(ctx context.Context, u *models.User, repo *models.Repository, opts migration.MigrateOptions) (*models.Repository, error) {
 | 
			
		||||
	repoPath := models.RepoPath(u.Name, opts.RepoName)
 | 
			
		||||
 | 
			
		||||
	if u.IsOrganization() {
 | 
			
		||||
@@ -61,7 +62,7 @@ func MigrateRepositoryGitData(doer, u *models.User, repo *models.Repository, opt
 | 
			
		||||
		return repo, fmt.Errorf("Failed to remove %s: %v", repoPath, err)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if err = git.Clone(opts.CloneAddr, repoPath, git.CloneRepoOptions{
 | 
			
		||||
	if err = git.CloneWithContext(ctx, opts.CloneAddr, repoPath, git.CloneRepoOptions{
 | 
			
		||||
		Mirror:  true,
 | 
			
		||||
		Quiet:   true,
 | 
			
		||||
		Timeout: migrateTimeout,
 | 
			
		||||
@@ -77,7 +78,7 @@ func MigrateRepositoryGitData(doer, u *models.User, repo *models.Repository, opt
 | 
			
		||||
				return repo, fmt.Errorf("Failed to remove %s: %v", wikiPath, err)
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			if err = git.Clone(wikiRemotePath, wikiPath, git.CloneRepoOptions{
 | 
			
		||||
			if err = git.CloneWithContext(ctx, wikiRemotePath, wikiPath, git.CloneRepoOptions{
 | 
			
		||||
				Mirror:  true,
 | 
			
		||||
				Quiet:   true,
 | 
			
		||||
				Timeout: migrateTimeout,
 | 
			
		||||
 
 | 
			
		||||
@@ -62,6 +62,11 @@ func InitDBConfig() {
 | 
			
		||||
	sec := Cfg.Section("database")
 | 
			
		||||
	Database.Type = sec.Key("DB_TYPE").String()
 | 
			
		||||
	defaultCharset := "utf8"
 | 
			
		||||
	Database.UseMySQL = false
 | 
			
		||||
	Database.UseSQLite3 = false
 | 
			
		||||
	Database.UsePostgreSQL = false
 | 
			
		||||
	Database.UseMSSQL = false
 | 
			
		||||
 | 
			
		||||
	switch Database.Type {
 | 
			
		||||
	case "sqlite3":
 | 
			
		||||
		Database.UseSQLite3 = true
 | 
			
		||||
 
 | 
			
		||||
@@ -771,7 +771,7 @@ func NewContext() {
 | 
			
		||||
	ImportLocalPaths = sec.Key("IMPORT_LOCAL_PATHS").MustBool(false)
 | 
			
		||||
	DisableGitHooks = sec.Key("DISABLE_GIT_HOOKS").MustBool(true)
 | 
			
		||||
	OnlyAllowPushIfGiteaEnvironmentSet = sec.Key("ONLY_ALLOW_PUSH_IF_GITEA_ENVIRONMENT_SET").MustBool(true)
 | 
			
		||||
	PasswordHashAlgo = sec.Key("PASSWORD_HASH_ALGO").MustString("argon2")
 | 
			
		||||
	PasswordHashAlgo = sec.Key("PASSWORD_HASH_ALGO").MustString("pbkdf2")
 | 
			
		||||
	CSRFCookieHTTPOnly = sec.Key("CSRF_COOKIE_HTTP_ONLY").MustBool(true)
 | 
			
		||||
	PasswordCheckPwn = sec.Key("PASSWORD_CHECK_PWN").MustBool(false)
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -31,22 +31,10 @@ func (s *Storage) MapTo(v interface{}) error {
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func getStorage(name, typ string, overrides ...*ini.Section) Storage {
 | 
			
		||||
func getStorage(name, typ string, targetSec *ini.Section) Storage {
 | 
			
		||||
	const sectionName = "storage"
 | 
			
		||||
	sec := Cfg.Section(sectionName)
 | 
			
		||||
 | 
			
		||||
	if len(overrides) == 0 {
 | 
			
		||||
		overrides = []*ini.Section{
 | 
			
		||||
			Cfg.Section(sectionName + "." + typ),
 | 
			
		||||
			Cfg.Section(sectionName + "." + name),
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	var storage Storage
 | 
			
		||||
 | 
			
		||||
	storage.Type = sec.Key("STORAGE_TYPE").MustString(typ)
 | 
			
		||||
	storage.ServeDirect = sec.Key("SERVE_DIRECT").MustBool(false)
 | 
			
		||||
 | 
			
		||||
	// Global Defaults
 | 
			
		||||
	sec.Key("MINIO_ENDPOINT").MustString("localhost:9000")
 | 
			
		||||
	sec.Key("MINIO_ACCESS_KEY_ID").MustString("")
 | 
			
		||||
@@ -55,17 +43,37 @@ func getStorage(name, typ string, overrides ...*ini.Section) Storage {
 | 
			
		||||
	sec.Key("MINIO_LOCATION").MustString("us-east-1")
 | 
			
		||||
	sec.Key("MINIO_USE_SSL").MustBool(false)
 | 
			
		||||
 | 
			
		||||
	storage.Section = sec
 | 
			
		||||
	var storage Storage
 | 
			
		||||
	storage.Section = targetSec
 | 
			
		||||
	storage.Type = typ
 | 
			
		||||
 | 
			
		||||
	overrides := make([]*ini.Section, 0, 3)
 | 
			
		||||
	nameSec, err := Cfg.GetSection(sectionName + "." + name)
 | 
			
		||||
	if err == nil {
 | 
			
		||||
		overrides = append(overrides, nameSec)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	typeSec, err := Cfg.GetSection(sectionName + "." + typ)
 | 
			
		||||
	if err == nil {
 | 
			
		||||
		overrides = append(overrides, typeSec)
 | 
			
		||||
		nextType := typeSec.Key("STORAGE_TYPE").String()
 | 
			
		||||
		if len(nextType) > 0 {
 | 
			
		||||
			storage.Type = nextType // Support custom STORAGE_TYPE
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	overrides = append(overrides, sec)
 | 
			
		||||
 | 
			
		||||
	for _, override := range overrides {
 | 
			
		||||
		for _, key := range storage.Section.Keys() {
 | 
			
		||||
			if !override.HasKey(key.Name()) {
 | 
			
		||||
				_, _ = override.NewKey(key.Name(), key.Value())
 | 
			
		||||
		for _, key := range override.Keys() {
 | 
			
		||||
			if !targetSec.HasKey(key.Name()) {
 | 
			
		||||
				_, _ = targetSec.NewKey(key.Name(), key.Value())
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
		storage.ServeDirect = override.Key("SERVE_DIRECT").MustBool(false)
 | 
			
		||||
		storage.Section = override
 | 
			
		||||
		if len(storage.Type) == 0 {
 | 
			
		||||
			storage.Type = override.Key("STORAGE_TYPE").String()
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	storage.ServeDirect = storage.Section.Key("SERVE_DIRECT").MustBool(false)
 | 
			
		||||
 | 
			
		||||
	// Specific defaults
 | 
			
		||||
	storage.Path = storage.Section.Key("PATH").MustString(filepath.Join(AppDataPath, name))
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										197
									
								
								modules/setting/storage_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										197
									
								
								modules/setting/storage_test.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,197 @@
 | 
			
		||||
// Copyright 2020 The Gitea Authors. All rights reserved.
 | 
			
		||||
// Use of this source code is governed by a MIT-style
 | 
			
		||||
// license that can be found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
package setting
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"testing"
 | 
			
		||||
 | 
			
		||||
	"github.com/stretchr/testify/assert"
 | 
			
		||||
	ini "gopkg.in/ini.v1"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func Test_getStorageCustomType(t *testing.T) {
 | 
			
		||||
	iniStr := `
 | 
			
		||||
[attachment]
 | 
			
		||||
STORAGE_TYPE = my_minio
 | 
			
		||||
MINIO_BUCKET = gitea-attachment
 | 
			
		||||
 | 
			
		||||
[storage.my_minio]
 | 
			
		||||
STORAGE_TYPE = minio
 | 
			
		||||
MINIO_ENDPOINT = my_minio:9000
 | 
			
		||||
`
 | 
			
		||||
	Cfg, _ = ini.Load([]byte(iniStr))
 | 
			
		||||
 | 
			
		||||
	sec := Cfg.Section("attachment")
 | 
			
		||||
	storageType := sec.Key("STORAGE_TYPE").MustString("")
 | 
			
		||||
	storage := getStorage("attachments", storageType, sec)
 | 
			
		||||
 | 
			
		||||
	assert.EqualValues(t, "minio", storage.Type)
 | 
			
		||||
	assert.EqualValues(t, "my_minio:9000", storage.Section.Key("MINIO_ENDPOINT").String())
 | 
			
		||||
	assert.EqualValues(t, "gitea-attachment", storage.Section.Key("MINIO_BUCKET").String())
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Test_getStorageNameSectionOverridesTypeSection(t *testing.T) {
 | 
			
		||||
	iniStr := `
 | 
			
		||||
[attachment]
 | 
			
		||||
STORAGE_TYPE = minio
 | 
			
		||||
 | 
			
		||||
[storage.attachments]
 | 
			
		||||
MINIO_BUCKET = gitea-attachment
 | 
			
		||||
 | 
			
		||||
[storage.minio]
 | 
			
		||||
MINIO_BUCKET = gitea
 | 
			
		||||
`
 | 
			
		||||
	Cfg, _ = ini.Load([]byte(iniStr))
 | 
			
		||||
 | 
			
		||||
	sec := Cfg.Section("attachment")
 | 
			
		||||
	storageType := sec.Key("STORAGE_TYPE").MustString("")
 | 
			
		||||
	storage := getStorage("attachments", storageType, sec)
 | 
			
		||||
 | 
			
		||||
	assert.EqualValues(t, "minio", storage.Type)
 | 
			
		||||
	assert.EqualValues(t, "gitea-attachment", storage.Section.Key("MINIO_BUCKET").String())
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Test_getStorageTypeSectionOverridesStorageSection(t *testing.T) {
 | 
			
		||||
	iniStr := `
 | 
			
		||||
[attachment]
 | 
			
		||||
STORAGE_TYPE = minio
 | 
			
		||||
 | 
			
		||||
[storage.minio]
 | 
			
		||||
MINIO_BUCKET = gitea-minio
 | 
			
		||||
 | 
			
		||||
[storage]
 | 
			
		||||
MINIO_BUCKET = gitea
 | 
			
		||||
`
 | 
			
		||||
	Cfg, _ = ini.Load([]byte(iniStr))
 | 
			
		||||
 | 
			
		||||
	sec := Cfg.Section("attachment")
 | 
			
		||||
	storageType := sec.Key("STORAGE_TYPE").MustString("")
 | 
			
		||||
	storage := getStorage("attachments", storageType, sec)
 | 
			
		||||
 | 
			
		||||
	assert.EqualValues(t, "minio", storage.Type)
 | 
			
		||||
	assert.EqualValues(t, "gitea-minio", storage.Section.Key("MINIO_BUCKET").String())
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Test_getStorageSpecificOverridesStorage(t *testing.T) {
 | 
			
		||||
	iniStr := `
 | 
			
		||||
[attachment]
 | 
			
		||||
STORAGE_TYPE = minio
 | 
			
		||||
MINIO_BUCKET = gitea-attachment
 | 
			
		||||
 | 
			
		||||
[storage.attachments]
 | 
			
		||||
MINIO_BUCKET = gitea
 | 
			
		||||
 | 
			
		||||
[storage]
 | 
			
		||||
STORAGE_TYPE = local
 | 
			
		||||
`
 | 
			
		||||
	Cfg, _ = ini.Load([]byte(iniStr))
 | 
			
		||||
 | 
			
		||||
	sec := Cfg.Section("attachment")
 | 
			
		||||
	storageType := sec.Key("STORAGE_TYPE").MustString("")
 | 
			
		||||
	storage := getStorage("attachments", storageType, sec)
 | 
			
		||||
 | 
			
		||||
	assert.EqualValues(t, "minio", storage.Type)
 | 
			
		||||
	assert.EqualValues(t, "gitea-attachment", storage.Section.Key("MINIO_BUCKET").String())
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Test_getStorageGetDefaults(t *testing.T) {
 | 
			
		||||
	Cfg, _ = ini.Load([]byte(""))
 | 
			
		||||
 | 
			
		||||
	sec := Cfg.Section("attachment")
 | 
			
		||||
	storageType := sec.Key("STORAGE_TYPE").MustString("")
 | 
			
		||||
	storage := getStorage("attachments", storageType, sec)
 | 
			
		||||
 | 
			
		||||
	assert.EqualValues(t, "gitea", storage.Section.Key("MINIO_BUCKET").String())
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Test_getStorageMultipleName(t *testing.T) {
 | 
			
		||||
	iniStr := `
 | 
			
		||||
[lfs]
 | 
			
		||||
MINIO_BUCKET = gitea-lfs
 | 
			
		||||
 | 
			
		||||
[attachment]
 | 
			
		||||
MINIO_BUCKET = gitea-attachment
 | 
			
		||||
 | 
			
		||||
[storage]
 | 
			
		||||
MINIO_BUCKET = gitea-storage
 | 
			
		||||
`
 | 
			
		||||
	Cfg, _ = ini.Load([]byte(iniStr))
 | 
			
		||||
 | 
			
		||||
	{
 | 
			
		||||
		sec := Cfg.Section("attachment")
 | 
			
		||||
		storageType := sec.Key("STORAGE_TYPE").MustString("")
 | 
			
		||||
		storage := getStorage("attachments", storageType, sec)
 | 
			
		||||
 | 
			
		||||
		assert.EqualValues(t, "gitea-attachment", storage.Section.Key("MINIO_BUCKET").String())
 | 
			
		||||
	}
 | 
			
		||||
	{
 | 
			
		||||
		sec := Cfg.Section("lfs")
 | 
			
		||||
		storageType := sec.Key("STORAGE_TYPE").MustString("")
 | 
			
		||||
		storage := getStorage("lfs", storageType, sec)
 | 
			
		||||
 | 
			
		||||
		assert.EqualValues(t, "gitea-lfs", storage.Section.Key("MINIO_BUCKET").String())
 | 
			
		||||
	}
 | 
			
		||||
	{
 | 
			
		||||
		sec := Cfg.Section("avatar")
 | 
			
		||||
		storageType := sec.Key("STORAGE_TYPE").MustString("")
 | 
			
		||||
		storage := getStorage("avatars", storageType, sec)
 | 
			
		||||
 | 
			
		||||
		assert.EqualValues(t, "gitea-storage", storage.Section.Key("MINIO_BUCKET").String())
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Test_getStorageUseOtherNameAsType(t *testing.T) {
 | 
			
		||||
	iniStr := `
 | 
			
		||||
[attachment]
 | 
			
		||||
STORAGE_TYPE = lfs
 | 
			
		||||
 | 
			
		||||
[storage.lfs]
 | 
			
		||||
MINIO_BUCKET = gitea-storage
 | 
			
		||||
`
 | 
			
		||||
	Cfg, _ = ini.Load([]byte(iniStr))
 | 
			
		||||
 | 
			
		||||
	{
 | 
			
		||||
		sec := Cfg.Section("attachment")
 | 
			
		||||
		storageType := sec.Key("STORAGE_TYPE").MustString("")
 | 
			
		||||
		storage := getStorage("attachments", storageType, sec)
 | 
			
		||||
 | 
			
		||||
		assert.EqualValues(t, "gitea-storage", storage.Section.Key("MINIO_BUCKET").String())
 | 
			
		||||
	}
 | 
			
		||||
	{
 | 
			
		||||
		sec := Cfg.Section("lfs")
 | 
			
		||||
		storageType := sec.Key("STORAGE_TYPE").MustString("")
 | 
			
		||||
		storage := getStorage("lfs", storageType, sec)
 | 
			
		||||
 | 
			
		||||
		assert.EqualValues(t, "gitea-storage", storage.Section.Key("MINIO_BUCKET").String())
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Test_getStorageInheritStorageType(t *testing.T) {
 | 
			
		||||
	iniStr := `
 | 
			
		||||
[storage]
 | 
			
		||||
STORAGE_TYPE = minio
 | 
			
		||||
`
 | 
			
		||||
	Cfg, _ = ini.Load([]byte(iniStr))
 | 
			
		||||
 | 
			
		||||
	sec := Cfg.Section("attachment")
 | 
			
		||||
	storageType := sec.Key("STORAGE_TYPE").MustString("")
 | 
			
		||||
	storage := getStorage("attachments", storageType, sec)
 | 
			
		||||
 | 
			
		||||
	assert.EqualValues(t, "minio", storage.Type)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Test_getStorageInheritNameSectionType(t *testing.T) {
 | 
			
		||||
	iniStr := `
 | 
			
		||||
[storage.attachments]
 | 
			
		||||
STORAGE_TYPE = minio
 | 
			
		||||
`
 | 
			
		||||
	Cfg, _ = ini.Load([]byte(iniStr))
 | 
			
		||||
 | 
			
		||||
	sec := Cfg.Section("attachment")
 | 
			
		||||
	storageType := sec.Key("STORAGE_TYPE").MustString("")
 | 
			
		||||
	storage := getStorage("attachments", storageType, sec)
 | 
			
		||||
 | 
			
		||||
	assert.EqualValues(t, "minio", storage.Type)
 | 
			
		||||
}
 | 
			
		||||
@@ -196,13 +196,17 @@ func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool {
 | 
			
		||||
 | 
			
		||||
// Listen starts a SSH server listens on given port.
 | 
			
		||||
func Listen(host string, port int, ciphers []string, keyExchanges []string, macs []string) {
 | 
			
		||||
	// TODO: Handle ciphers, keyExchanges, and macs
 | 
			
		||||
 | 
			
		||||
	srv := ssh.Server{
 | 
			
		||||
		Addr:             fmt.Sprintf("%s:%d", host, port),
 | 
			
		||||
		PublicKeyHandler: publicKeyHandler,
 | 
			
		||||
		Handler:          sessionHandler,
 | 
			
		||||
 | 
			
		||||
		ServerConfigCallback: func(ctx ssh.Context) *gossh.ServerConfig {
 | 
			
		||||
			config := &gossh.ServerConfig{}
 | 
			
		||||
			config.KeyExchanges = keyExchanges
 | 
			
		||||
			config.MACs = macs
 | 
			
		||||
			config.Ciphers = ciphers
 | 
			
		||||
			return config
 | 
			
		||||
		},
 | 
			
		||||
		// We need to explicitly disable the PtyCallback so text displays
 | 
			
		||||
		// properly.
 | 
			
		||||
		PtyCallback: func(ctx ssh.Context, pty ssh.Pty) bool {
 | 
			
		||||
 
 | 
			
		||||
@@ -7,6 +7,7 @@ package storage
 | 
			
		||||
import (
 | 
			
		||||
	"context"
 | 
			
		||||
	"io"
 | 
			
		||||
	"io/ioutil"
 | 
			
		||||
	"net/url"
 | 
			
		||||
	"os"
 | 
			
		||||
	"path/filepath"
 | 
			
		||||
@@ -24,13 +25,15 @@ const LocalStorageType Type = "local"
 | 
			
		||||
 | 
			
		||||
// LocalStorageConfig represents the configuration for a local storage
 | 
			
		||||
type LocalStorageConfig struct {
 | 
			
		||||
	Path string `ini:"PATH"`
 | 
			
		||||
	Path          string `ini:"PATH"`
 | 
			
		||||
	TemporaryPath string `ini:"TEMPORARY_PATH"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// LocalStorage represents a local files storage
 | 
			
		||||
type LocalStorage struct {
 | 
			
		||||
	ctx context.Context
 | 
			
		||||
	dir string
 | 
			
		||||
	ctx    context.Context
 | 
			
		||||
	dir    string
 | 
			
		||||
	tmpdir string
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// NewLocalStorage returns a local files
 | 
			
		||||
@@ -45,9 +48,14 @@ func NewLocalStorage(ctx context.Context, cfg interface{}) (ObjectStorage, error
 | 
			
		||||
		return nil, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if config.TemporaryPath == "" {
 | 
			
		||||
		config.TemporaryPath = config.Path + "/tmp"
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return &LocalStorage{
 | 
			
		||||
		ctx: ctx,
 | 
			
		||||
		dir: config.Path,
 | 
			
		||||
		ctx:    ctx,
 | 
			
		||||
		dir:    config.Path,
 | 
			
		||||
		tmpdir: config.TemporaryPath,
 | 
			
		||||
	}, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -57,23 +65,43 @@ func (l *LocalStorage) Open(path string) (Object, error) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Save a file
 | 
			
		||||
func (l *LocalStorage) Save(path string, r io.Reader) (int64, error) {
 | 
			
		||||
func (l *LocalStorage) Save(path string, r io.Reader, size int64) (int64, error) {
 | 
			
		||||
	p := filepath.Join(l.dir, path)
 | 
			
		||||
	if err := os.MkdirAll(filepath.Dir(p), os.ModePerm); err != nil {
 | 
			
		||||
		return 0, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// always override
 | 
			
		||||
	if err := util.Remove(p); err != nil {
 | 
			
		||||
	// Create a temporary file to save to
 | 
			
		||||
	if err := os.MkdirAll(l.tmpdir, os.ModePerm); err != nil {
 | 
			
		||||
		return 0, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	f, err := os.Create(p)
 | 
			
		||||
	tmp, err := ioutil.TempFile(l.tmpdir, "upload-*")
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return 0, err
 | 
			
		||||
	}
 | 
			
		||||
	defer f.Close()
 | 
			
		||||
	return io.Copy(f, r)
 | 
			
		||||
	tmpRemoved := false
 | 
			
		||||
	defer func() {
 | 
			
		||||
		if !tmpRemoved {
 | 
			
		||||
			_ = util.Remove(tmp.Name())
 | 
			
		||||
		}
 | 
			
		||||
	}()
 | 
			
		||||
 | 
			
		||||
	n, err := io.Copy(tmp, r)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return 0, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if err := tmp.Close(); err != nil {
 | 
			
		||||
		return 0, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if err := os.Rename(tmp.Name(), p); err != nil {
 | 
			
		||||
		return 0, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	tmpRemoved = true
 | 
			
		||||
 | 
			
		||||
	return n, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Stat returns the info of the file
 | 
			
		||||
 
 | 
			
		||||
@@ -129,13 +129,13 @@ func (m *MinioStorage) Open(path string) (Object, error) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Save save a file to minio
 | 
			
		||||
func (m *MinioStorage) Save(path string, r io.Reader) (int64, error) {
 | 
			
		||||
func (m *MinioStorage) Save(path string, r io.Reader, size int64) (int64, error) {
 | 
			
		||||
	uploadInfo, err := m.client.PutObject(
 | 
			
		||||
		m.ctx,
 | 
			
		||||
		m.bucket,
 | 
			
		||||
		m.buildMinioPath(path),
 | 
			
		||||
		r,
 | 
			
		||||
		-1,
 | 
			
		||||
		size,
 | 
			
		||||
		minio.PutObjectOptions{ContentType: "application/octet-stream"},
 | 
			
		||||
	)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
 
 | 
			
		||||
@@ -65,7 +65,8 @@ type Object interface {
 | 
			
		||||
// ObjectStorage represents an object storage to handle a bucket and files
 | 
			
		||||
type ObjectStorage interface {
 | 
			
		||||
	Open(path string) (Object, error)
 | 
			
		||||
	Save(path string, r io.Reader) (int64, error)
 | 
			
		||||
	// Save store a object, if size is unknown set -1
 | 
			
		||||
	Save(path string, r io.Reader, size int64) (int64, error)
 | 
			
		||||
	Stat(path string) (os.FileInfo, error)
 | 
			
		||||
	Delete(path string) error
 | 
			
		||||
	URL(path, name string) (*url.URL, error)
 | 
			
		||||
@@ -80,7 +81,13 @@ func Copy(dstStorage ObjectStorage, dstPath string, srcStorage ObjectStorage, sr
 | 
			
		||||
	}
 | 
			
		||||
	defer f.Close()
 | 
			
		||||
 | 
			
		||||
	return dstStorage.Save(dstPath, f)
 | 
			
		||||
	size := int64(-1)
 | 
			
		||||
	fsinfo, err := f.Stat()
 | 
			
		||||
	if err == nil {
 | 
			
		||||
		size = fsinfo.Size()
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return dstStorage.Save(dstPath, f, size)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// SaveFrom saves data to the ObjectStorage with path p from the callback
 | 
			
		||||
@@ -94,7 +101,7 @@ func SaveFrom(objStorage ObjectStorage, p string, callback func(w io.Writer) err
 | 
			
		||||
		}
 | 
			
		||||
	}()
 | 
			
		||||
 | 
			
		||||
	_, err := objStorage.Save(p, pr)
 | 
			
		||||
	_, err := objStorage.Save(p, pr, -1)
 | 
			
		||||
	return err
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -105,7 +105,7 @@ type CreateRepoOption struct {
 | 
			
		||||
	Description string `json:"description" binding:"MaxSize(255)"`
 | 
			
		||||
	// Whether the repository is private
 | 
			
		||||
	Private bool `json:"private"`
 | 
			
		||||
	// Issue Label set to use
 | 
			
		||||
	// Label-Set to use
 | 
			
		||||
	IssueLabels string `json:"issue_labels"`
 | 
			
		||||
	// Whether the repository should be auto-intialized?
 | 
			
		||||
	AutoInit bool `json:"auto_init"`
 | 
			
		||||
 
 | 
			
		||||
@@ -5,6 +5,7 @@
 | 
			
		||||
package task
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"context"
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"strings"
 | 
			
		||||
@@ -15,6 +16,7 @@ import (
 | 
			
		||||
	"code.gitea.io/gitea/modules/migrations"
 | 
			
		||||
	migration "code.gitea.io/gitea/modules/migrations/base"
 | 
			
		||||
	"code.gitea.io/gitea/modules/notification"
 | 
			
		||||
	"code.gitea.io/gitea/modules/process"
 | 
			
		||||
	"code.gitea.io/gitea/modules/structs"
 | 
			
		||||
	"code.gitea.io/gitea/modules/timeutil"
 | 
			
		||||
	"code.gitea.io/gitea/modules/util"
 | 
			
		||||
@@ -82,11 +84,6 @@ func runMigrateTask(t *models.Task) (err error) {
 | 
			
		||||
	if err = t.LoadOwner(); err != nil {
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	t.StartTime = timeutil.TimeStampNow()
 | 
			
		||||
	t.Status = structs.TaskStatusRunning
 | 
			
		||||
	if err = t.UpdateCols("start_time", "status"); err != nil {
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	var opts *migration.MigrateOptions
 | 
			
		||||
	opts, err = t.MigrateConfig()
 | 
			
		||||
@@ -96,7 +93,20 @@ func runMigrateTask(t *models.Task) (err error) {
 | 
			
		||||
 | 
			
		||||
	opts.MigrateToRepoID = t.RepoID
 | 
			
		||||
	var repo *models.Repository
 | 
			
		||||
	repo, err = migrations.MigrateRepository(graceful.GetManager().HammerContext(), t.Doer, t.Owner.Name, *opts)
 | 
			
		||||
 | 
			
		||||
	ctx, cancel := context.WithCancel(graceful.GetManager().ShutdownContext())
 | 
			
		||||
	defer cancel()
 | 
			
		||||
	pm := process.GetManager()
 | 
			
		||||
	pid := pm.Add(fmt.Sprintf("MigrateTask: %s/%s", t.Owner.Name, opts.RepoName), cancel)
 | 
			
		||||
	defer pm.Remove(pid)
 | 
			
		||||
 | 
			
		||||
	t.StartTime = timeutil.TimeStampNow()
 | 
			
		||||
	t.Status = structs.TaskStatusRunning
 | 
			
		||||
	if err = t.UpdateCols("start_time", "status"); err != nil {
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	repo, err = migrations.MigrateRepository(ctx, t.Doer, t.Owner.Name, *opts)
 | 
			
		||||
	if err == nil {
 | 
			
		||||
		log.Trace("Repository migrated [%d]: %s/%s", repo.ID, t.Owner.Name, repo.Name)
 | 
			
		||||
		return
 | 
			
		||||
 
 | 
			
		||||
@@ -689,6 +689,11 @@ func ActionIcon(opType models.ActionType) string {
 | 
			
		||||
// ActionContent2Commits converts action content to push commits
 | 
			
		||||
func ActionContent2Commits(act Actioner) *repository.PushCommits {
 | 
			
		||||
	push := repository.NewPushCommits()
 | 
			
		||||
 | 
			
		||||
	if act == nil || act.GetContent() == "" {
 | 
			
		||||
		return push
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if err := json.Unmarshal([]byte(act.GetContent()), push); err != nil {
 | 
			
		||||
		log.Error("json.Unmarshal:\n%s\nERROR: %v", act.GetContent(), err)
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -7,6 +7,7 @@ package timeutil
 | 
			
		||||
import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"html/template"
 | 
			
		||||
	"math"
 | 
			
		||||
	"strings"
 | 
			
		||||
	"time"
 | 
			
		||||
 | 
			
		||||
@@ -25,7 +26,11 @@ const (
 | 
			
		||||
	Year   = 12 * Month
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func computeTimeDiff(diff int64, lang string) (int64, string) {
 | 
			
		||||
func round(s float64) int64 {
 | 
			
		||||
	return int64(math.Round(s))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func computeTimeDiffFloor(diff int64, lang string) (int64, string) {
 | 
			
		||||
	diffStr := ""
 | 
			
		||||
	switch {
 | 
			
		||||
	case diff <= 0:
 | 
			
		||||
@@ -83,6 +88,94 @@ func computeTimeDiff(diff int64, lang string) (int64, string) {
 | 
			
		||||
	return diff, diffStr
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func computeTimeDiff(diff int64, lang string) (int64, string) {
 | 
			
		||||
	diffStr := ""
 | 
			
		||||
	switch {
 | 
			
		||||
	case diff <= 0:
 | 
			
		||||
		diff = 0
 | 
			
		||||
		diffStr = i18n.Tr(lang, "tool.now")
 | 
			
		||||
	case diff < 2:
 | 
			
		||||
		diff = 0
 | 
			
		||||
		diffStr = i18n.Tr(lang, "tool.1s")
 | 
			
		||||
	case diff < 1*Minute:
 | 
			
		||||
		diffStr = i18n.Tr(lang, "tool.seconds", diff)
 | 
			
		||||
		diff = 0
 | 
			
		||||
 | 
			
		||||
	case diff < Minute+Minute/2:
 | 
			
		||||
		diff -= 1 * Minute
 | 
			
		||||
		diffStr = i18n.Tr(lang, "tool.1m")
 | 
			
		||||
	case diff < 1*Hour:
 | 
			
		||||
		minutes := round(float64(diff) / Minute)
 | 
			
		||||
		if minutes > 1 {
 | 
			
		||||
			diffStr = i18n.Tr(lang, "tool.minutes", minutes)
 | 
			
		||||
		} else {
 | 
			
		||||
			diffStr = i18n.Tr(lang, "tool.1m")
 | 
			
		||||
		}
 | 
			
		||||
		diff -= diff / Minute * Minute
 | 
			
		||||
 | 
			
		||||
	case diff < Hour+Hour/2:
 | 
			
		||||
		diff -= 1 * Hour
 | 
			
		||||
		diffStr = i18n.Tr(lang, "tool.1h")
 | 
			
		||||
	case diff < 1*Day:
 | 
			
		||||
		hours := round(float64(diff) / Hour)
 | 
			
		||||
		if hours > 1 {
 | 
			
		||||
			diffStr = i18n.Tr(lang, "tool.hours", hours)
 | 
			
		||||
		} else {
 | 
			
		||||
			diffStr = i18n.Tr(lang, "tool.1h")
 | 
			
		||||
		}
 | 
			
		||||
		diff -= diff / Hour * Hour
 | 
			
		||||
 | 
			
		||||
	case diff < Day+Day/2:
 | 
			
		||||
		diff -= 1 * Day
 | 
			
		||||
		diffStr = i18n.Tr(lang, "tool.1d")
 | 
			
		||||
	case diff < 1*Week:
 | 
			
		||||
		days := round(float64(diff) / Day)
 | 
			
		||||
		if days > 1 {
 | 
			
		||||
			diffStr = i18n.Tr(lang, "tool.days", days)
 | 
			
		||||
		} else {
 | 
			
		||||
			diffStr = i18n.Tr(lang, "tool.1d")
 | 
			
		||||
		}
 | 
			
		||||
		diff -= diff / Day * Day
 | 
			
		||||
 | 
			
		||||
	case diff < Week+Week/2:
 | 
			
		||||
		diff -= 1 * Week
 | 
			
		||||
		diffStr = i18n.Tr(lang, "tool.1w")
 | 
			
		||||
	case diff < 1*Month:
 | 
			
		||||
		weeks := round(float64(diff) / Week)
 | 
			
		||||
		if weeks > 1 {
 | 
			
		||||
			diffStr = i18n.Tr(lang, "tool.weeks", weeks)
 | 
			
		||||
		} else {
 | 
			
		||||
			diffStr = i18n.Tr(lang, "tool.1w")
 | 
			
		||||
		}
 | 
			
		||||
		diff -= diff / Week * Week
 | 
			
		||||
 | 
			
		||||
	case diff < 1*Month+Month/2:
 | 
			
		||||
		diff -= 1 * Month
 | 
			
		||||
		diffStr = i18n.Tr(lang, "tool.1mon")
 | 
			
		||||
	case diff < 1*Year:
 | 
			
		||||
		months := round(float64(diff) / Month)
 | 
			
		||||
		if months > 1 {
 | 
			
		||||
			diffStr = i18n.Tr(lang, "tool.months", months)
 | 
			
		||||
		} else {
 | 
			
		||||
			diffStr = i18n.Tr(lang, "tool.1mon")
 | 
			
		||||
		}
 | 
			
		||||
		diff -= diff / Month * Month
 | 
			
		||||
 | 
			
		||||
	case diff < Year+Year/2:
 | 
			
		||||
		diff -= 1 * Year
 | 
			
		||||
		diffStr = i18n.Tr(lang, "tool.1y")
 | 
			
		||||
	default:
 | 
			
		||||
		years := round(float64(diff) / Year)
 | 
			
		||||
		if years > 1 {
 | 
			
		||||
			diffStr = i18n.Tr(lang, "tool.years", years)
 | 
			
		||||
		} else {
 | 
			
		||||
			diffStr = i18n.Tr(lang, "tool.1y")
 | 
			
		||||
		}
 | 
			
		||||
		diff -= (diff / Year) * Year
 | 
			
		||||
	}
 | 
			
		||||
	return diff, diffStr
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// MinutesToFriendly returns a user friendly string with number of minutes
 | 
			
		||||
// converted to hours and minutes.
 | 
			
		||||
func MinutesToFriendly(minutes int, lang string) string {
 | 
			
		||||
@@ -111,7 +204,7 @@ func timeSincePro(then, now time.Time, lang string) string {
 | 
			
		||||
			break
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		diff, diffStr = computeTimeDiff(diff, lang)
 | 
			
		||||
		diff, diffStr = computeTimeDiffFloor(diff, lang)
 | 
			
		||||
		timeStr += ", " + diffStr
 | 
			
		||||
	}
 | 
			
		||||
	return strings.TrimPrefix(timeStr, ", ")
 | 
			
		||||
 
 | 
			
		||||
@@ -5,6 +5,7 @@
 | 
			
		||||
package timeutil
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"os"
 | 
			
		||||
	"testing"
 | 
			
		||||
	"time"
 | 
			
		||||
@@ -47,27 +48,39 @@ func TestTimeSince(t *testing.T) {
 | 
			
		||||
 | 
			
		||||
	// test that each diff in `diffs` yields the expected string
 | 
			
		||||
	test := func(expected string, diffs ...time.Duration) {
 | 
			
		||||
		for _, diff := range diffs {
 | 
			
		||||
			actual := timeSince(BaseDate, BaseDate.Add(diff), "en")
 | 
			
		||||
			assert.Equal(t, i18n.Tr("en", "tool.ago", expected), actual)
 | 
			
		||||
			actual = timeSince(BaseDate.Add(diff), BaseDate, "en")
 | 
			
		||||
			assert.Equal(t, i18n.Tr("en", "tool.from_now", expected), actual)
 | 
			
		||||
		}
 | 
			
		||||
		t.Run(expected, func(t *testing.T) {
 | 
			
		||||
			for _, diff := range diffs {
 | 
			
		||||
				actual := timeSince(BaseDate, BaseDate.Add(diff), "en")
 | 
			
		||||
				assert.Equal(t, i18n.Tr("en", "tool.ago", expected), actual)
 | 
			
		||||
				actual = timeSince(BaseDate.Add(diff), BaseDate, "en")
 | 
			
		||||
				assert.Equal(t, i18n.Tr("en", "tool.from_now", expected), actual)
 | 
			
		||||
			}
 | 
			
		||||
		})
 | 
			
		||||
	}
 | 
			
		||||
	test("1 second", time.Second, time.Second+50*time.Millisecond)
 | 
			
		||||
	test("2 seconds", 2*time.Second, 2*time.Second+50*time.Millisecond)
 | 
			
		||||
	test("1 minute", time.Minute, time.Minute+30*time.Second)
 | 
			
		||||
	test("2 minutes", 2*time.Minute, 2*time.Minute+30*time.Second)
 | 
			
		||||
	test("1 hour", time.Hour, time.Hour+30*time.Minute)
 | 
			
		||||
	test("2 hours", 2*time.Hour, 2*time.Hour+30*time.Minute)
 | 
			
		||||
	test("1 day", DayDur, DayDur+12*time.Hour)
 | 
			
		||||
	test("2 days", 2*DayDur, 2*DayDur+12*time.Hour)
 | 
			
		||||
	test("1 minute", time.Minute, time.Minute+29*time.Second)
 | 
			
		||||
	test("2 minutes", 2*time.Minute, time.Minute+30*time.Second)
 | 
			
		||||
	test("2 minutes", 2*time.Minute, 2*time.Minute+29*time.Second)
 | 
			
		||||
	test("1 hour", time.Hour, time.Hour+29*time.Minute)
 | 
			
		||||
	test("2 hours", 2*time.Hour, time.Hour+30*time.Minute)
 | 
			
		||||
	test("2 hours", 2*time.Hour, 2*time.Hour+29*time.Minute)
 | 
			
		||||
	test("3 hours", 3*time.Hour, 2*time.Hour+30*time.Minute)
 | 
			
		||||
	test("1 day", DayDur, DayDur+11*time.Hour)
 | 
			
		||||
	test("2 days", 2*DayDur, DayDur+12*time.Hour)
 | 
			
		||||
	test("2 days", 2*DayDur, 2*DayDur+11*time.Hour)
 | 
			
		||||
	test("3 days", 3*DayDur, 2*DayDur+12*time.Hour)
 | 
			
		||||
	test("1 week", WeekDur, WeekDur+3*DayDur)
 | 
			
		||||
	test("2 weeks", 2*WeekDur, WeekDur+4*DayDur)
 | 
			
		||||
	test("2 weeks", 2*WeekDur, 2*WeekDur+3*DayDur)
 | 
			
		||||
	test("1 month", MonthDur, MonthDur+15*DayDur)
 | 
			
		||||
	test("2 months", 2*MonthDur, 2*MonthDur+15*DayDur)
 | 
			
		||||
	test("1 year", YearDur, YearDur+6*MonthDur)
 | 
			
		||||
	test("2 years", 2*YearDur, 2*YearDur+6*MonthDur)
 | 
			
		||||
	test("3 weeks", 3*WeekDur, 2*WeekDur+4*DayDur)
 | 
			
		||||
	test("1 month", MonthDur, MonthDur+14*DayDur)
 | 
			
		||||
	test("2 months", 2*MonthDur, MonthDur+15*DayDur)
 | 
			
		||||
	test("2 months", 2*MonthDur, 2*MonthDur+14*DayDur)
 | 
			
		||||
	test("1 year", YearDur, YearDur+5*MonthDur)
 | 
			
		||||
	test("2 years", 2*YearDur, YearDur+6*MonthDur)
 | 
			
		||||
	test("2 years", 2*YearDur, 2*YearDur+5*MonthDur)
 | 
			
		||||
	test("3 years", 3*YearDur, 2*YearDur+6*MonthDur)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func TestTimeSincePro(t *testing.T) {
 | 
			
		||||
@@ -114,11 +127,11 @@ func TestHtmlTimeSince(t *testing.T) {
 | 
			
		||||
	}
 | 
			
		||||
	test("1 second", time.Second)
 | 
			
		||||
	test("3 minutes", 3*time.Minute+5*time.Second)
 | 
			
		||||
	test("1 day", DayDur+18*time.Hour)
 | 
			
		||||
	test("1 week", WeekDur+6*DayDur)
 | 
			
		||||
	test("3 months", 3*MonthDur+3*WeekDur)
 | 
			
		||||
	test("1 day", DayDur+11*time.Hour)
 | 
			
		||||
	test("1 week", WeekDur+3*DayDur)
 | 
			
		||||
	test("3 months", 3*MonthDur+2*WeekDur)
 | 
			
		||||
	test("2 years", 2*YearDur)
 | 
			
		||||
	test("3 years", 3*YearDur+11*MonthDur+4*WeekDur)
 | 
			
		||||
	test("3 years", 2*YearDur+11*MonthDur+4*WeekDur)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func TestComputeTimeDiff(t *testing.T) {
 | 
			
		||||
@@ -126,26 +139,35 @@ func TestComputeTimeDiff(t *testing.T) {
 | 
			
		||||
	// computeTimeDiff(base + offset) == (offset, str)
 | 
			
		||||
	test := func(base int64, str string, offsets ...int64) {
 | 
			
		||||
		for _, offset := range offsets {
 | 
			
		||||
			diff, diffStr := computeTimeDiff(base+offset, "en")
 | 
			
		||||
			assert.Equal(t, offset, diff)
 | 
			
		||||
			assert.Equal(t, str, diffStr)
 | 
			
		||||
			t.Run(fmt.Sprintf("%s:%d", str, offset), func(t *testing.T) {
 | 
			
		||||
				diff, diffStr := computeTimeDiff(base+offset, "en")
 | 
			
		||||
				assert.Equal(t, offset, diff)
 | 
			
		||||
				assert.Equal(t, str, diffStr)
 | 
			
		||||
			})
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	test(0, "now", 0)
 | 
			
		||||
	test(1, "1 second", 0)
 | 
			
		||||
	test(2, "2 seconds", 0)
 | 
			
		||||
	test(Minute, "1 minute", 0, 1, 30, Minute-1)
 | 
			
		||||
	test(2*Minute, "2 minutes", 0, Minute-1)
 | 
			
		||||
	test(Hour, "1 hour", 0, 1, Hour-1)
 | 
			
		||||
	test(5*Hour, "5 hours", 0, Hour-1)
 | 
			
		||||
	test(Day, "1 day", 0, 1, Day-1)
 | 
			
		||||
	test(5*Day, "5 days", 0, Day-1)
 | 
			
		||||
	test(Week, "1 week", 0, 1, Week-1)
 | 
			
		||||
	test(3*Week, "3 weeks", 0, 4*Day+25000)
 | 
			
		||||
	test(Month, "1 month", 0, 1, Month-1)
 | 
			
		||||
	test(10*Month, "10 months", 0, Month-1)
 | 
			
		||||
	test(Year, "1 year", 0, Year-1)
 | 
			
		||||
	test(3*Year, "3 years", 0, Year-1)
 | 
			
		||||
	test(Minute, "1 minute", 0, 1, 29)
 | 
			
		||||
	test(Minute, "2 minutes", 30, Minute-1)
 | 
			
		||||
	test(2*Minute, "2 minutes", 0, 29)
 | 
			
		||||
	test(2*Minute, "3 minutes", 30, Minute-1)
 | 
			
		||||
	test(Hour, "1 hour", 0, 1, 29*Minute)
 | 
			
		||||
	test(Hour, "2 hours", 30*Minute, Hour-1)
 | 
			
		||||
	test(5*Hour, "5 hours", 0, 29*Minute)
 | 
			
		||||
	test(Day, "1 day", 0, 1, 11*Hour)
 | 
			
		||||
	test(Day, "2 days", 12*Hour, Day-1)
 | 
			
		||||
	test(5*Day, "5 days", 0, 11*Hour)
 | 
			
		||||
	test(Week, "1 week", 0, 1, 3*Day)
 | 
			
		||||
	test(Week, "2 weeks", 4*Day, Week-1)
 | 
			
		||||
	test(3*Week, "3 weeks", 0, 3*Day)
 | 
			
		||||
	test(Month, "1 month", 0, 1)
 | 
			
		||||
	test(Month, "2 months", 16*Day, Month-1)
 | 
			
		||||
	test(10*Month, "10 months", 0, 13*Day)
 | 
			
		||||
	test(Year, "1 year", 0, 179*Day)
 | 
			
		||||
	test(Year, "2 years", 180*Day, Year-1)
 | 
			
		||||
	test(3*Year, "3 years", 0, 179*Day)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func TestMinutesToFriendly(t *testing.T) {
 | 
			
		||||
 
 | 
			
		||||
@@ -17,11 +17,24 @@ import (
 | 
			
		||||
type (
 | 
			
		||||
	// FeishuPayload represents
 | 
			
		||||
	FeishuPayload struct {
 | 
			
		||||
		Title string `json:"title"`
 | 
			
		||||
		Text  string `json:"text"`
 | 
			
		||||
		MsgType string `json:"msg_type"` // text / post / image / share_chat / interactive
 | 
			
		||||
		Content struct {
 | 
			
		||||
			Text string `json:"text"`
 | 
			
		||||
		} `json:"content"`
 | 
			
		||||
	}
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func newFeishuTextPayload(text string) *FeishuPayload {
 | 
			
		||||
	return &FeishuPayload{
 | 
			
		||||
		MsgType: "text",
 | 
			
		||||
		Content: struct {
 | 
			
		||||
			Text string `json:"text"`
 | 
			
		||||
		}{
 | 
			
		||||
			Text: text,
 | 
			
		||||
		},
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// SetSecret sets the Feishu secret
 | 
			
		||||
func (f *FeishuPayload) SetSecret(_ string) {}
 | 
			
		||||
 | 
			
		||||
@@ -42,34 +55,25 @@ var (
 | 
			
		||||
func (f *FeishuPayload) Create(p *api.CreatePayload) (api.Payloader, error) {
 | 
			
		||||
	// created tag/branch
 | 
			
		||||
	refName := git.RefEndName(p.Ref)
 | 
			
		||||
	title := fmt.Sprintf("[%s] %s %s created", p.Repo.FullName, p.RefType, refName)
 | 
			
		||||
	text := fmt.Sprintf("[%s] %s %s created", p.Repo.FullName, p.RefType, refName)
 | 
			
		||||
 | 
			
		||||
	return &FeishuPayload{
 | 
			
		||||
		Text:  title,
 | 
			
		||||
		Title: title,
 | 
			
		||||
	}, nil
 | 
			
		||||
	return newFeishuTextPayload(text), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Delete implements PayloadConvertor Delete method
 | 
			
		||||
func (f *FeishuPayload) Delete(p *api.DeletePayload) (api.Payloader, error) {
 | 
			
		||||
	// created tag/branch
 | 
			
		||||
	refName := git.RefEndName(p.Ref)
 | 
			
		||||
	title := fmt.Sprintf("[%s] %s %s deleted", p.Repo.FullName, p.RefType, refName)
 | 
			
		||||
	text := fmt.Sprintf("[%s] %s %s deleted", p.Repo.FullName, p.RefType, refName)
 | 
			
		||||
 | 
			
		||||
	return &FeishuPayload{
 | 
			
		||||
		Text:  title,
 | 
			
		||||
		Title: title,
 | 
			
		||||
	}, nil
 | 
			
		||||
	return newFeishuTextPayload(text), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Fork implements PayloadConvertor Fork method
 | 
			
		||||
func (f *FeishuPayload) Fork(p *api.ForkPayload) (api.Payloader, error) {
 | 
			
		||||
	title := fmt.Sprintf("%s is forked to %s", p.Forkee.FullName, p.Repo.FullName)
 | 
			
		||||
	text := fmt.Sprintf("%s is forked to %s", p.Forkee.FullName, p.Repo.FullName)
 | 
			
		||||
 | 
			
		||||
	return &FeishuPayload{
 | 
			
		||||
		Text:  title,
 | 
			
		||||
		Title: title,
 | 
			
		||||
	}, nil
 | 
			
		||||
	return newFeishuTextPayload(text), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Push implements PayloadConvertor Push method
 | 
			
		||||
@@ -79,9 +83,7 @@ func (f *FeishuPayload) Push(p *api.PushPayload) (api.Payloader, error) {
 | 
			
		||||
		commitDesc string
 | 
			
		||||
	)
 | 
			
		||||
 | 
			
		||||
	title := fmt.Sprintf("[%s:%s] %s", p.Repo.FullName, branchName, commitDesc)
 | 
			
		||||
 | 
			
		||||
	var text string
 | 
			
		||||
	var text = fmt.Sprintf("[%s:%s] %s\n", p.Repo.FullName, branchName, commitDesc)
 | 
			
		||||
	// for each commit, generate attachment text
 | 
			
		||||
	for i, commit := range p.Commits {
 | 
			
		||||
		var authorName string
 | 
			
		||||
@@ -96,40 +98,28 @@ func (f *FeishuPayload) Push(p *api.PushPayload) (api.Payloader, error) {
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return &FeishuPayload{
 | 
			
		||||
		Text:  text,
 | 
			
		||||
		Title: title,
 | 
			
		||||
	}, nil
 | 
			
		||||
	return newFeishuTextPayload(text), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Issue implements PayloadConvertor Issue method
 | 
			
		||||
func (f *FeishuPayload) Issue(p *api.IssuePayload) (api.Payloader, error) {
 | 
			
		||||
	text, issueTitle, attachmentText, _ := getIssuesPayloadInfo(p, noneLinkFormatter, true)
 | 
			
		||||
 | 
			
		||||
	return &FeishuPayload{
 | 
			
		||||
		Text:  text + "\r\n\r\n" + attachmentText,
 | 
			
		||||
		Title: issueTitle,
 | 
			
		||||
	}, nil
 | 
			
		||||
	return newFeishuTextPayload(issueTitle + "\r\n" + text + "\r\n\r\n" + attachmentText), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// IssueComment implements PayloadConvertor IssueComment method
 | 
			
		||||
func (f *FeishuPayload) IssueComment(p *api.IssueCommentPayload) (api.Payloader, error) {
 | 
			
		||||
	text, issueTitle, _ := getIssueCommentPayloadInfo(p, noneLinkFormatter, true)
 | 
			
		||||
 | 
			
		||||
	return &FeishuPayload{
 | 
			
		||||
		Text:  text + "\r\n\r\n" + p.Comment.Body,
 | 
			
		||||
		Title: issueTitle,
 | 
			
		||||
	}, nil
 | 
			
		||||
	return newFeishuTextPayload(issueTitle + "\r\n" + text + "\r\n\r\n" + p.Comment.Body), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// PullRequest implements PayloadConvertor PullRequest method
 | 
			
		||||
func (f *FeishuPayload) PullRequest(p *api.PullRequestPayload) (api.Payloader, error) {
 | 
			
		||||
	text, issueTitle, attachmentText, _ := getPullRequestPayloadInfo(p, noneLinkFormatter, true)
 | 
			
		||||
 | 
			
		||||
	return &FeishuPayload{
 | 
			
		||||
		Text:  text + "\r\n\r\n" + attachmentText,
 | 
			
		||||
		Title: issueTitle,
 | 
			
		||||
	}, nil
 | 
			
		||||
	return newFeishuTextPayload(issueTitle + "\r\n" + text + "\r\n\r\n" + attachmentText), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Review implements PayloadConvertor Review method
 | 
			
		||||
@@ -147,28 +137,19 @@ func (f *FeishuPayload) Review(p *api.PullRequestPayload, event models.HookEvent
 | 
			
		||||
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return &FeishuPayload{
 | 
			
		||||
		Text:  title + "\r\n\r\n" + text,
 | 
			
		||||
		Title: title,
 | 
			
		||||
	}, nil
 | 
			
		||||
	return newFeishuTextPayload(title + "\r\n\r\n" + text), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Repository implements PayloadConvertor Repository method
 | 
			
		||||
func (f *FeishuPayload) Repository(p *api.RepositoryPayload) (api.Payloader, error) {
 | 
			
		||||
	var title string
 | 
			
		||||
	var text string
 | 
			
		||||
	switch p.Action {
 | 
			
		||||
	case api.HookRepoCreated:
 | 
			
		||||
		title = fmt.Sprintf("[%s] Repository created", p.Repository.FullName)
 | 
			
		||||
		return &FeishuPayload{
 | 
			
		||||
			Text:  title,
 | 
			
		||||
			Title: title,
 | 
			
		||||
		}, nil
 | 
			
		||||
		text = fmt.Sprintf("[%s] Repository created", p.Repository.FullName)
 | 
			
		||||
		return newFeishuTextPayload(text), nil
 | 
			
		||||
	case api.HookRepoDeleted:
 | 
			
		||||
		title = fmt.Sprintf("[%s] Repository deleted", p.Repository.FullName)
 | 
			
		||||
		return &FeishuPayload{
 | 
			
		||||
			Title: title,
 | 
			
		||||
			Text:  title,
 | 
			
		||||
		}, nil
 | 
			
		||||
		text = fmt.Sprintf("[%s] Repository deleted", p.Repository.FullName)
 | 
			
		||||
		return newFeishuTextPayload(text), nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return nil, nil
 | 
			
		||||
@@ -178,10 +159,7 @@ func (f *FeishuPayload) Repository(p *api.RepositoryPayload) (api.Payloader, err
 | 
			
		||||
func (f *FeishuPayload) Release(p *api.ReleasePayload) (api.Payloader, error) {
 | 
			
		||||
	text, _ := getReleasePayloadInfo(p, noneLinkFormatter, true)
 | 
			
		||||
 | 
			
		||||
	return &FeishuPayload{
 | 
			
		||||
		Text:  text,
 | 
			
		||||
		Title: text,
 | 
			
		||||
	}, nil
 | 
			
		||||
	return newFeishuTextPayload(text), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// GetFeishuPayload converts a ding talk webhook into a FeishuPayload
 | 
			
		||||
 
 | 
			
		||||
@@ -5,6 +5,8 @@
 | 
			
		||||
package admin
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"net/url"
 | 
			
		||||
	"strconv"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
	"code.gitea.io/gitea/models"
 | 
			
		||||
@@ -71,6 +73,8 @@ func UnadoptedRepos(ctx *context.Context) {
 | 
			
		||||
		opts.Page = 1
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	ctx.Data["CurrentPage"] = opts.Page
 | 
			
		||||
 | 
			
		||||
	doSearch := ctx.QueryBool("search")
 | 
			
		||||
 | 
			
		||||
	ctx.Data["search"] = doSearch
 | 
			
		||||
@@ -79,6 +83,7 @@ func UnadoptedRepos(ctx *context.Context) {
 | 
			
		||||
	if !doSearch {
 | 
			
		||||
		pager := context.NewPagination(0, opts.PageSize, opts.Page, 5)
 | 
			
		||||
		pager.SetDefaultParams(ctx)
 | 
			
		||||
		pager.AddParam(ctx, "search", "search")
 | 
			
		||||
		ctx.Data["Page"] = pager
 | 
			
		||||
		ctx.HTML(200, tplUnadoptedRepos)
 | 
			
		||||
		return
 | 
			
		||||
@@ -92,6 +97,7 @@ func UnadoptedRepos(ctx *context.Context) {
 | 
			
		||||
	ctx.Data["Dirs"] = repoNames
 | 
			
		||||
	pager := context.NewPagination(int(count), opts.PageSize, opts.Page, 5)
 | 
			
		||||
	pager.SetDefaultParams(ctx)
 | 
			
		||||
	pager.AddParam(ctx, "search", "search")
 | 
			
		||||
	ctx.Data["Page"] = pager
 | 
			
		||||
	ctx.HTML(200, tplUnadoptedRepos)
 | 
			
		||||
}
 | 
			
		||||
@@ -100,6 +106,9 @@ func UnadoptedRepos(ctx *context.Context) {
 | 
			
		||||
func AdoptOrDeleteRepository(ctx *context.Context) {
 | 
			
		||||
	dir := ctx.Query("id")
 | 
			
		||||
	action := ctx.Query("action")
 | 
			
		||||
	page := ctx.QueryInt("page")
 | 
			
		||||
	q := ctx.Query("q")
 | 
			
		||||
 | 
			
		||||
	dirSplit := strings.SplitN(dir, "/", 2)
 | 
			
		||||
	if len(dirSplit) != 2 {
 | 
			
		||||
		ctx.Redirect(setting.AppSubURL + "/admin/repos")
 | 
			
		||||
@@ -141,5 +150,5 @@ func AdoptOrDeleteRepository(ctx *context.Context) {
 | 
			
		||||
		}
 | 
			
		||||
		ctx.Flash.Success(ctx.Tr("repo.delete_preexisting_success", dir))
 | 
			
		||||
	}
 | 
			
		||||
	ctx.Redirect(setting.AppSubURL + "/admin/repos/unadopted")
 | 
			
		||||
	ctx.Redirect(setting.AppSubURL + "/admin/repos/unadopted?search=true&q=" + url.QueryEscape(q) + "&page=" + strconv.Itoa(page))
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -17,19 +17,28 @@ import (
 | 
			
		||||
	"code.gitea.io/gitea/routers/api/v1/utils"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func listUserOrgs(ctx *context.APIContext, u *models.User, all bool) {
 | 
			
		||||
	if err := u.GetOrganizations(&models.SearchOrganizationsOptions{
 | 
			
		||||
		ListOptions: utils.GetListOptions(ctx),
 | 
			
		||||
		All:         all,
 | 
			
		||||
	}); err != nil {
 | 
			
		||||
		ctx.Error(http.StatusInternalServerError, "GetOrganizations", err)
 | 
			
		||||
func listUserOrgs(ctx *context.APIContext, u *models.User) {
 | 
			
		||||
 | 
			
		||||
	listOptions := utils.GetListOptions(ctx)
 | 
			
		||||
	showPrivate := ctx.IsSigned && (ctx.User.IsAdmin || ctx.User.ID == u.ID)
 | 
			
		||||
 | 
			
		||||
	orgs, err := models.GetOrgsByUserID(u.ID, showPrivate)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		ctx.Error(http.StatusInternalServerError, "GetOrgsByUserID", err)
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	maxResults := len(orgs)
 | 
			
		||||
 | 
			
		||||
	apiOrgs := make([]*api.Organization, len(u.Orgs))
 | 
			
		||||
	for i := range u.Orgs {
 | 
			
		||||
		apiOrgs[i] = convert.ToOrganization(u.Orgs[i])
 | 
			
		||||
	orgs = utils.PaginateUserSlice(orgs, listOptions.Page, listOptions.PageSize)
 | 
			
		||||
 | 
			
		||||
	apiOrgs := make([]*api.Organization, len(orgs))
 | 
			
		||||
	for i := range orgs {
 | 
			
		||||
		apiOrgs[i] = convert.ToOrganization(orgs[i])
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	ctx.SetLinkHeader(int(maxResults), listOptions.PageSize)
 | 
			
		||||
	ctx.Header().Set("X-Total-Count", fmt.Sprintf("%d", maxResults))
 | 
			
		||||
	ctx.Header().Set("Access-Control-Expose-Headers", "X-Total-Count, Link")
 | 
			
		||||
	ctx.JSON(http.StatusOK, &apiOrgs)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -53,7 +62,7 @@ func ListMyOrgs(ctx *context.APIContext) {
 | 
			
		||||
	//   "200":
 | 
			
		||||
	//     "$ref": "#/responses/OrganizationList"
 | 
			
		||||
 | 
			
		||||
	listUserOrgs(ctx, ctx.User, true)
 | 
			
		||||
	listUserOrgs(ctx, ctx.User)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// ListUserOrgs list user's orgs
 | 
			
		||||
@@ -85,7 +94,7 @@ func ListUserOrgs(ctx *context.APIContext) {
 | 
			
		||||
	if ctx.Written() {
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	listUserOrgs(ctx, u, ctx.User != nil && (ctx.User.IsAdmin || ctx.User.ID == u.ID))
 | 
			
		||||
	listUserOrgs(ctx, u)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// GetAll return list of all public organizations
 | 
			
		||||
 
 | 
			
		||||
@@ -293,7 +293,6 @@ func CreatePullRequest(ctx *context.APIContext, form api.CreatePullRequestOption
 | 
			
		||||
	var (
 | 
			
		||||
		repo        = ctx.Repo.Repository
 | 
			
		||||
		labelIDs    []int64
 | 
			
		||||
		assigneeID  int64
 | 
			
		||||
		milestoneID int64
 | 
			
		||||
	)
 | 
			
		||||
 | 
			
		||||
@@ -354,7 +353,7 @@ func CreatePullRequest(ctx *context.APIContext, form api.CreatePullRequestOption
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if form.Milestone > 0 {
 | 
			
		||||
		milestone, err := models.GetMilestoneByRepoID(ctx.Repo.Repository.ID, milestoneID)
 | 
			
		||||
		milestone, err := models.GetMilestoneByRepoID(ctx.Repo.Repository.ID, form.Milestone)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			if models.IsErrMilestoneNotExist(err) {
 | 
			
		||||
				ctx.NotFound()
 | 
			
		||||
@@ -378,7 +377,6 @@ func CreatePullRequest(ctx *context.APIContext, form api.CreatePullRequestOption
 | 
			
		||||
		PosterID:     ctx.User.ID,
 | 
			
		||||
		Poster:       ctx.User,
 | 
			
		||||
		MilestoneID:  milestoneID,
 | 
			
		||||
		AssigneeID:   assigneeID,
 | 
			
		||||
		IsPull:       true,
 | 
			
		||||
		Content:      form.Body,
 | 
			
		||||
		DeadlineUnix: deadlineUnix,
 | 
			
		||||
 
 | 
			
		||||
@@ -539,6 +539,10 @@ func updateBasicProperties(ctx *context.APIContext, opts api.EditRepoOption) err
 | 
			
		||||
	if opts.Private != nil {
 | 
			
		||||
		// Visibility of forked repository is forced sync with base repository.
 | 
			
		||||
		if repo.IsFork {
 | 
			
		||||
			if err := repo.GetBaseRepo(); err != nil {
 | 
			
		||||
				ctx.Error(http.StatusInternalServerError, "Unable to load base repository", err)
 | 
			
		||||
				return err
 | 
			
		||||
			}
 | 
			
		||||
			*opts.Private = repo.BaseRepo.IsPrivate
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user