mirror of
https://github.com/go-gitea/gitea.git
synced 2025-11-10 15:32:55 +09:00
Compare commits
24 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c7d8181a70 | ||
|
|
548ae3eb98 | ||
|
|
2c383d812d | ||
|
|
ef12b8de80 | ||
|
|
dd1ba34ee5 | ||
|
|
1fbdf96c34 | ||
|
|
5159055278 | ||
|
|
06da10b9a1 | ||
|
|
175ebc6f88 | ||
|
|
3aecea2e6e | ||
|
|
cae8c63517 | ||
|
|
8ace5c1161 | ||
|
|
a87b813955 | ||
|
|
3baeec745c | ||
|
|
befb6bea22 | ||
|
|
79f0b1a50b | ||
|
|
79a3d277e5 | ||
|
|
eb748ff79e | ||
|
|
c5770195d9 | ||
|
|
a20ccec369 | ||
|
|
9c2b7a196e | ||
|
|
1e278b15c2 | ||
|
|
fde6ff6a75 | ||
|
|
51f4f8c393 |
28
CHANGELOG.md
28
CHANGELOG.md
@@ -4,6 +4,34 @@ This changelog goes through all the changes that have been made in each release
|
||||
without substantial changes to our git log; to see the highlights of what has
|
||||
been added to each release, please refer to the [blog](https://blog.gitea.io).
|
||||
|
||||
## [1.15.6](https://github.com/go-gitea/gitea/releases/tag/v1.15.6) - 2021-10-28
|
||||
|
||||
* BUGFIXES
|
||||
* Prevent panic in serv.go with Deploy Keys (#17434) (#17435)
|
||||
* Fix CSV render error (#17406) (#17431)
|
||||
* Read expected buffer size (#17409) (#17430)
|
||||
* Ensure that restricted users can access repos for which they are members (#17460) (#17464)
|
||||
* Make commit-statuses popup show correctly (#17447) (#17466)
|
||||
* TESTING
|
||||
* Add integration tests for private.NoServCommand and private.ServCommand (#17456) (#17463)
|
||||
|
||||
## [1.15.5](https://github.com/go-gitea/gitea/releases/tag/v1.15.5) - 2021-10-21
|
||||
|
||||
* SECURITY
|
||||
* Upgrade Bluemonday to v1.0.16 (#17372) (#17374)
|
||||
* Ensure correct SSH permissions check for private and restricted users (#17370) (#17373)
|
||||
* BUGFIXES
|
||||
* Prevent NPE in CSV diff rendering when column removed (#17018) (#17377)
|
||||
* Offer rsa-sha2-512 and rsa-sha2-256 algorithms in internal SSH (#17281) (#17376)
|
||||
* Don't panic if we fail to parse U2FRegistration data (#17304) (#17371)
|
||||
* Ensure popup text is aligned left (backport for 1.15) (#17343)
|
||||
* Ensure that git daemon export ok is created for mirrors (#17243) (#17306)
|
||||
* Disable core.protectNTFS (#17300) (#17302)
|
||||
* Use pointer for wrappedConn methods (#17295) (#17296)
|
||||
* AutoRegistration is supposed to be working with disabled registration (backport) (#17292)
|
||||
* Handle duplicate keys on GPG key ring (#17242) (#17284)
|
||||
* Fix SVG side by side comparison link (#17375) (#17391)
|
||||
|
||||
## [1.15.4](https://github.com/go-gitea/gitea/releases/tag/v1.15.4) - 2021-10-08
|
||||
* BUGFIXES
|
||||
* Raw file API: don't try to interpret 40char filenames as commit SHA (#17185) (#17272)
|
||||
|
||||
@@ -576,6 +576,8 @@ PATH =
|
||||
;;
|
||||
;; (Go-Git only) Don't cache objects greater than this in memory. (Set to 0 to disable.)
|
||||
;LARGE_OBJECT_THRESHOLD = 1048576
|
||||
;; Set to true to forcibly set core.protectNTFS=false
|
||||
;DISABLE_CORE_PROTECT_NTFS=false
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
@@ -839,6 +839,7 @@ NB: You must have `DISABLE_ROUTER_LOG` set to `false` for this option to take ef
|
||||
- `VERBOSE_PUSH`: **true**: Print status information about pushes as they are being processed.
|
||||
- `VERBOSE_PUSH_DELAY`: **5s**: Only print verbose information if push takes longer than this delay.
|
||||
- `LARGE_OBJECT_THRESHOLD`: **1048576**: (Go-Git only), don't cache objects greater than this in memory. (Set to 0 to disable.)
|
||||
- `DISABLE_CORE_PROTECT_NTFS`: **false** Set to true to forcibly set `core.protectNTFS` to false.
|
||||
## Git - Timeout settings (`git.timeout`)
|
||||
- `DEFAUlT`: **360**: Git operations default timeout seconds.
|
||||
- `MIGRATE`: **600**: Migrate external repositories timeout seconds.
|
||||
|
||||
4
go.mod
4
go.mod
@@ -80,7 +80,7 @@ require (
|
||||
github.com/mattn/go-runewidth v0.0.13 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.8
|
||||
github.com/mholt/archiver/v3 v3.5.0
|
||||
github.com/microcosm-cc/bluemonday v1.0.15
|
||||
github.com/microcosm-cc/bluemonday v1.0.16
|
||||
github.com/miekg/dns v1.1.43 // indirect
|
||||
github.com/minio/md5-simd v1.1.2 // indirect
|
||||
github.com/minio/minio-go/v7 v7.0.12
|
||||
@@ -125,7 +125,7 @@ require (
|
||||
go.uber.org/multierr v1.7.0 // indirect
|
||||
go.uber.org/zap v1.18.1 // indirect
|
||||
golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e
|
||||
golang.org/x/net v0.0.0-20210614182718-04defd469f4e
|
||||
golang.org/x/net v0.0.0-20211020060615-d418f374d309
|
||||
golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c
|
||||
golang.org/x/text v0.3.6
|
||||
|
||||
7
go.sum
7
go.sum
@@ -868,8 +868,8 @@ github.com/mholt/acmez v0.1.3 h1:J7MmNIk4Qf9b8mAGqAh4XkNeowv3f1zW816yf4zt7Qk=
|
||||
github.com/mholt/acmez v0.1.3/go.mod h1:8qnn8QA/Ewx8E3ZSsmscqsIjhhpxuy9vqdgbX2ceceM=
|
||||
github.com/mholt/archiver/v3 v3.5.0 h1:nE8gZIrw66cu4osS/U7UW7YDuGMHssxKutU8IfWxwWE=
|
||||
github.com/mholt/archiver/v3 v3.5.0/go.mod h1:qqTTPUK/HZPFgFQ/TJ3BzvTpF/dPtFVJXdQbCmeMxwc=
|
||||
github.com/microcosm-cc/bluemonday v1.0.15 h1:J4uN+qPng9rvkBZBoBb8YGR+ijuklIMpSOZZLjYpbeY=
|
||||
github.com/microcosm-cc/bluemonday v1.0.15/go.mod h1:ZLvAzeakRwrGnzQEvstVzVt3ZpqOF2+sdFr0Om+ce30=
|
||||
github.com/microcosm-cc/bluemonday v1.0.16 h1:kHmAq2t7WPWLjiGvzKa5o3HzSfahUKiOq7fAPUiMNIc=
|
||||
github.com/microcosm-cc/bluemonday v1.0.16/go.mod h1:Z0r70sCuXHig8YpBzCc5eGHAap2K7e/u082ZUpDRRqM=
|
||||
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||
github.com/miekg/dns v1.1.42/go.mod h1:+evo5L0630/F6ca/Z9+GAqzhjGyn8/c+TBaOyfEl0V4=
|
||||
github.com/miekg/dns v1.1.43 h1:JKfpVSCB84vrAmHzyrsxB5NAr5kLoMXZArPSw7Qlgyg=
|
||||
@@ -1364,8 +1364,9 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
|
||||
golang.org/x/net v0.0.0-20210326060303-6b1517762897/go.mod h1:uSPa2vr4CLtc/ILN5odXGNXS6mhrKVzTaCXzk9m6W3k=
|
||||
golang.org/x/net v0.0.0-20210331060903-cb1fcc7394e5/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20210614182718-04defd469f4e h1:XpT3nA5TvE525Ne3hInMh6+GETgn27Zfm9dxsThnX2Q=
|
||||
golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211020060615-d418f374d309 h1:A0lJIi+hcTR6aajJH4YqKWwohY4aW9RO7oRMcdv+HKI=
|
||||
golang.org/x/net v0.0.0-20211020060615-d418f374d309/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
|
||||
154
integrations/api_private_serv_test.go
Normal file
154
integrations/api_private_serv_test.go
Normal file
@@ -0,0 +1,154 @@
|
||||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package integrations
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
"code.gitea.io/gitea/modules/private"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestAPIPrivateNoServ(t *testing.T) {
|
||||
onGiteaRun(t, func(*testing.T, *url.URL) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
key, user, err := private.ServNoCommand(ctx, 1)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(2), user.ID)
|
||||
assert.Equal(t, "user2", user.Name)
|
||||
assert.Equal(t, int64(1), key.ID)
|
||||
assert.Equal(t, "user2@localhost", key.Name)
|
||||
|
||||
deployKey, err := models.AddDeployKey(1, "test-deploy", "sk-ecdsa-sha2-nistp256@openssh.com AAAAInNrLWVjZHNhLXNoYTItbmlzdHAyNTZAb3BlbnNzaC5jb20AAAAIbmlzdHAyNTYAAABBBGXEEzWmm1dxb+57RoK5KVCL0w2eNv9cqJX2AGGVlkFsVDhOXHzsadS3LTK4VlEbbrDMJdoti9yM8vclA8IeRacAAAAEc3NoOg== nocomment", false)
|
||||
assert.NoError(t, err)
|
||||
|
||||
key, user, err = private.ServNoCommand(ctx, deployKey.KeyID)
|
||||
assert.NoError(t, err)
|
||||
assert.Empty(t, user)
|
||||
assert.Equal(t, deployKey.KeyID, key.ID)
|
||||
assert.Equal(t, "test-deploy", key.Name)
|
||||
})
|
||||
}
|
||||
|
||||
func TestAPIPrivateServ(t *testing.T) {
|
||||
onGiteaRun(t, func(*testing.T, *url.URL) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
// Can push to a repo we own
|
||||
results, err := private.ServCommand(ctx, 1, "user2", "repo1", models.AccessModeWrite, "git-upload-pack", "")
|
||||
assert.NoError(t, err)
|
||||
assert.False(t, results.IsWiki)
|
||||
assert.False(t, results.IsDeployKey)
|
||||
assert.Equal(t, int64(1), results.KeyID)
|
||||
assert.Equal(t, "user2@localhost", results.KeyName)
|
||||
assert.Equal(t, "user2", results.UserName)
|
||||
assert.Equal(t, int64(2), results.UserID)
|
||||
assert.Equal(t, "user2", results.OwnerName)
|
||||
assert.Equal(t, "repo1", results.RepoName)
|
||||
assert.Equal(t, int64(1), results.RepoID)
|
||||
|
||||
// Cannot push to a private repo we're not associated with
|
||||
results, err = private.ServCommand(ctx, 1, "user15", "big_test_private_1", models.AccessModeWrite, "git-upload-pack", "")
|
||||
assert.Error(t, err)
|
||||
assert.Empty(t, results)
|
||||
|
||||
// Cannot pull from a private repo we're not associated with
|
||||
results, err = private.ServCommand(ctx, 1, "user15", "big_test_private_1", models.AccessModeRead, "git-upload-pack", "")
|
||||
assert.Error(t, err)
|
||||
assert.Empty(t, results)
|
||||
|
||||
// Can pull from a public repo we're not associated with
|
||||
results, err = private.ServCommand(ctx, 1, "user15", "big_test_public_1", models.AccessModeRead, "git-upload-pack", "")
|
||||
assert.NoError(t, err)
|
||||
assert.False(t, results.IsWiki)
|
||||
assert.False(t, results.IsDeployKey)
|
||||
assert.Equal(t, int64(1), results.KeyID)
|
||||
assert.Equal(t, "user2@localhost", results.KeyName)
|
||||
assert.Equal(t, "user2", results.UserName)
|
||||
assert.Equal(t, int64(2), results.UserID)
|
||||
assert.Equal(t, "user15", results.OwnerName)
|
||||
assert.Equal(t, "big_test_public_1", results.RepoName)
|
||||
assert.Equal(t, int64(17), results.RepoID)
|
||||
|
||||
// Cannot push to a public repo we're not associated with
|
||||
results, err = private.ServCommand(ctx, 1, "user15", "big_test_public_1", models.AccessModeWrite, "git-upload-pack", "")
|
||||
assert.Error(t, err)
|
||||
assert.Empty(t, results)
|
||||
|
||||
// Add reading deploy key
|
||||
deployKey, err := models.AddDeployKey(19, "test-deploy", "sk-ecdsa-sha2-nistp256@openssh.com AAAAInNrLWVjZHNhLXNoYTItbmlzdHAyNTZAb3BlbnNzaC5jb20AAAAIbmlzdHAyNTYAAABBBGXEEzWmm1dxb+57RoK5KVCL0w2eNv9cqJX2AGGVlkFsVDhOXHzsadS3LTK4VlEbbrDMJdoti9yM8vclA8IeRacAAAAEc3NoOg== nocomment", true)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Can pull from repo we're a deploy key for
|
||||
results, err = private.ServCommand(ctx, deployKey.KeyID, "user15", "big_test_private_1", models.AccessModeRead, "git-upload-pack", "")
|
||||
assert.NoError(t, err)
|
||||
assert.False(t, results.IsWiki)
|
||||
assert.True(t, results.IsDeployKey)
|
||||
assert.Equal(t, deployKey.KeyID, results.KeyID)
|
||||
assert.Equal(t, "test-deploy", results.KeyName)
|
||||
assert.Equal(t, "user15", results.UserName)
|
||||
assert.Equal(t, int64(15), results.UserID)
|
||||
assert.Equal(t, "user15", results.OwnerName)
|
||||
assert.Equal(t, "big_test_private_1", results.RepoName)
|
||||
assert.Equal(t, int64(19), results.RepoID)
|
||||
|
||||
// Cannot push to a private repo with reading key
|
||||
results, err = private.ServCommand(ctx, deployKey.KeyID, "user15", "big_test_private_1", models.AccessModeWrite, "git-upload-pack", "")
|
||||
assert.Error(t, err)
|
||||
assert.Empty(t, results)
|
||||
|
||||
// Cannot pull from a private repo we're not associated with
|
||||
results, err = private.ServCommand(ctx, deployKey.ID, "user15", "big_test_private_2", models.AccessModeRead, "git-upload-pack", "")
|
||||
assert.Error(t, err)
|
||||
assert.Empty(t, results)
|
||||
|
||||
// Cannot pull from a public repo we're not associated with
|
||||
results, err = private.ServCommand(ctx, deployKey.ID, "user15", "big_test_public_1", models.AccessModeRead, "git-upload-pack", "")
|
||||
assert.Error(t, err)
|
||||
assert.Empty(t, results)
|
||||
|
||||
// Add writing deploy key
|
||||
deployKey, err = models.AddDeployKey(20, "test-deploy", "sk-ecdsa-sha2-nistp256@openssh.com AAAAInNrLWVjZHNhLXNoYTItbmlzdHAyNTZAb3BlbnNzaC5jb20AAAAIbmlzdHAyNTYAAABBBGXEEzWmm1dxb+57RoK5KVCL0w2eNv9cqJX2AGGVlkFsVDhOXHzsadS3LTK4VlEbbrDMJdoti9yM8vclA8IeRacAAAAEc3NoOg== nocomment", false)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Cannot push to a private repo with reading key
|
||||
results, err = private.ServCommand(ctx, deployKey.KeyID, "user15", "big_test_private_1", models.AccessModeWrite, "git-upload-pack", "")
|
||||
assert.Error(t, err)
|
||||
assert.Empty(t, results)
|
||||
|
||||
// Can pull from repo we're a writing deploy key for
|
||||
results, err = private.ServCommand(ctx, deployKey.KeyID, "user15", "big_test_private_2", models.AccessModeRead, "git-upload-pack", "")
|
||||
assert.NoError(t, err)
|
||||
assert.False(t, results.IsWiki)
|
||||
assert.True(t, results.IsDeployKey)
|
||||
assert.Equal(t, deployKey.KeyID, results.KeyID)
|
||||
assert.Equal(t, "test-deploy", results.KeyName)
|
||||
assert.Equal(t, "user15", results.UserName)
|
||||
assert.Equal(t, int64(15), results.UserID)
|
||||
assert.Equal(t, "user15", results.OwnerName)
|
||||
assert.Equal(t, "big_test_private_2", results.RepoName)
|
||||
assert.Equal(t, int64(20), results.RepoID)
|
||||
|
||||
// Can push to repo we're a writing deploy key for
|
||||
results, err = private.ServCommand(ctx, deployKey.KeyID, "user15", "big_test_private_2", models.AccessModeWrite, "git-upload-pack", "")
|
||||
assert.NoError(t, err)
|
||||
assert.False(t, results.IsWiki)
|
||||
assert.True(t, results.IsDeployKey)
|
||||
assert.Equal(t, deployKey.KeyID, results.KeyID)
|
||||
assert.Equal(t, "test-deploy", results.KeyName)
|
||||
assert.Equal(t, "user15", results.UserName)
|
||||
assert.Equal(t, int64(15), results.UserID)
|
||||
assert.Equal(t, "user15", results.OwnerName)
|
||||
assert.Equal(t, "big_test_private_2", results.RepoName)
|
||||
assert.Equal(t, int64(20), results.RepoID)
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
@@ -8,8 +8,10 @@ import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
@@ -20,6 +22,10 @@ func TestUserHeatmap(t *testing.T) {
|
||||
normalUsername := "user2"
|
||||
session := loginUser(t, adminUsername)
|
||||
|
||||
var fakeNow = time.Date(2011, 10, 20, 0, 0, 0, 0, time.Local)
|
||||
timeutil.Set(fakeNow)
|
||||
defer timeutil.Unset()
|
||||
|
||||
urlStr := fmt.Sprintf("/api/v1/users/%s/heatmap", normalUsername)
|
||||
req := NewRequest(t, "GET", urlStr)
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
@@ -5,10 +5,12 @@
|
||||
package integrations
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
@@ -110,3 +112,64 @@ func TestPrivateOrg(t *testing.T) {
|
||||
req = NewRequest(t, "GET", "/privated_org/private_repo_on_private_org")
|
||||
session.MakeRequest(t, req, http.StatusOK)
|
||||
}
|
||||
|
||||
func TestOrgRestrictedUser(t *testing.T) {
|
||||
defer prepareTestEnv(t)()
|
||||
|
||||
// privated_org is a private org who has id 23
|
||||
orgName := "privated_org"
|
||||
|
||||
// public_repo_on_private_org is a public repo on privated_org
|
||||
repoName := "public_repo_on_private_org"
|
||||
|
||||
// user29 is a restricted user who is not a member of the organization
|
||||
restrictedUser := "user29"
|
||||
|
||||
// #17003 reports a bug whereby adding a restricted user to a read-only team doesn't work
|
||||
|
||||
// assert restrictedUser cannot see the org or the public repo
|
||||
restrictedSession := loginUser(t, restrictedUser)
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s", orgName))
|
||||
restrictedSession.MakeRequest(t, req, http.StatusNotFound)
|
||||
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s", orgName, repoName))
|
||||
restrictedSession.MakeRequest(t, req, http.StatusNotFound)
|
||||
|
||||
// Therefore create a read-only team
|
||||
adminSession := loginUser(t, "user1")
|
||||
token := getTokenForLoggedInUser(t, adminSession)
|
||||
|
||||
teamToCreate := &api.CreateTeamOption{
|
||||
Name: "codereader",
|
||||
Description: "Code Reader",
|
||||
IncludesAllRepositories: true,
|
||||
Permission: "read",
|
||||
Units: []string{"repo.code"},
|
||||
}
|
||||
|
||||
req = NewRequestWithJSON(t, "POST",
|
||||
fmt.Sprintf("/api/v1/orgs/%s/teams?token=%s", orgName, token), teamToCreate)
|
||||
|
||||
var apiTeam api.Team
|
||||
|
||||
resp := adminSession.MakeRequest(t, req, http.StatusCreated)
|
||||
DecodeJSON(t, resp, &apiTeam)
|
||||
checkTeamResponse(t, &apiTeam, teamToCreate.Name, teamToCreate.Description, teamToCreate.IncludesAllRepositories,
|
||||
teamToCreate.Permission, teamToCreate.Units)
|
||||
checkTeamBean(t, apiTeam.ID, teamToCreate.Name, teamToCreate.Description, teamToCreate.IncludesAllRepositories,
|
||||
teamToCreate.Permission, teamToCreate.Units)
|
||||
//teamID := apiTeam.ID
|
||||
|
||||
// Now we need to add the restricted user to the team
|
||||
req = NewRequest(t, "PUT",
|
||||
fmt.Sprintf("/api/v1/teams/%d/members/%s?token=%s", apiTeam.ID, restrictedUser, token))
|
||||
_ = adminSession.MakeRequest(t, req, http.StatusNoContent)
|
||||
|
||||
// Now we need to check if the restrictedUser can access the repo
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("/%s", orgName))
|
||||
restrictedSession.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s", orgName, repoName))
|
||||
restrictedSession.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
}
|
||||
|
||||
@@ -568,7 +568,7 @@
|
||||
-
|
||||
id: 40
|
||||
owner_id: 23
|
||||
owner_name: limited_org
|
||||
owner_name: privated_org
|
||||
lower_name: public_repo_on_private_org
|
||||
name: public_repo_on_private_org
|
||||
is_private: false
|
||||
@@ -581,7 +581,7 @@
|
||||
-
|
||||
id: 41
|
||||
owner_id: 23
|
||||
owner_name: limited_org
|
||||
owner_name: privated_org
|
||||
lower_name: private_repo_on_private_org
|
||||
name: private_repo_on_private_org
|
||||
is_private: true
|
||||
|
||||
@@ -99,6 +99,46 @@ func AddGPGKey(ownerID int64, content, token, signature string) ([]*GPGKey, erro
|
||||
verified = true
|
||||
}
|
||||
|
||||
if len(ekeys) > 1 {
|
||||
id2key := map[string]*openpgp.Entity{}
|
||||
newEKeys := make([]*openpgp.Entity, 0, len(ekeys))
|
||||
for _, ekey := range ekeys {
|
||||
id := ekey.PrimaryKey.KeyIdString()
|
||||
if original, has := id2key[id]; has {
|
||||
// Coalesce this with the other one
|
||||
for _, subkey := range ekey.Subkeys {
|
||||
if subkey.PublicKey == nil {
|
||||
continue
|
||||
}
|
||||
found := false
|
||||
|
||||
for _, originalSubkey := range original.Subkeys {
|
||||
if originalSubkey.PublicKey == nil {
|
||||
continue
|
||||
}
|
||||
if originalSubkey.PublicKey.KeyId == subkey.PublicKey.KeyId {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
original.Subkeys = append(original.Subkeys, subkey)
|
||||
}
|
||||
}
|
||||
for name, identity := range ekey.Identities {
|
||||
if _, has := original.Identities[name]; has {
|
||||
continue
|
||||
}
|
||||
original.Identities[name] = identity
|
||||
}
|
||||
continue
|
||||
}
|
||||
id2key[id] = ekey
|
||||
newEKeys = append(newEKeys, ekey)
|
||||
}
|
||||
ekeys = newEKeys
|
||||
}
|
||||
|
||||
for _, ekey := range ekeys {
|
||||
// Key ID cannot be duplicated.
|
||||
has, err := sess.Where("key_id=?", ekey.PrimaryKey.KeyIdString()).
|
||||
|
||||
@@ -1152,16 +1152,6 @@ func CreateRepository(ctx DBContext, doer, u *User, repo *Repository, overwriteO
|
||||
return fmt.Errorf("recalculateAccesses: %v", err)
|
||||
}
|
||||
|
||||
if u.Visibility == api.VisibleTypePublic && !repo.IsPrivate {
|
||||
// Create/Remove git-daemon-export-ok for git-daemon...
|
||||
daemonExportFile := path.Join(repo.RepoPath(), `git-daemon-export-ok`)
|
||||
if f, err := os.Create(daemonExportFile); err != nil {
|
||||
log.Error("Failed to create %s: %v", daemonExportFile, err)
|
||||
} else {
|
||||
f.Close()
|
||||
}
|
||||
}
|
||||
|
||||
if setting.Service.AutoWatchNewRepos {
|
||||
if err = watchRepo(ctx.e, doer.ID, repo.ID, true); err != nil {
|
||||
return fmt.Errorf("watchRepo: %v", err)
|
||||
@@ -1175,6 +1165,46 @@ func CreateRepository(ctx DBContext, doer, u *User, repo *Repository, overwriteO
|
||||
return nil
|
||||
}
|
||||
|
||||
// CheckDaemonExportOK creates/removes git-daemon-export-ok for git-daemon...
|
||||
func (repo *Repository) CheckDaemonExportOK() error {
|
||||
return repo.checkDaemonExportOK(x)
|
||||
}
|
||||
|
||||
// CheckDaemonExportOKCtx creates/removes git-daemon-export-ok for git-daemon...
|
||||
func (repo *Repository) CheckDaemonExportOKCtx(ctx DBContext) error {
|
||||
return repo.checkDaemonExportOK(ctx.e)
|
||||
}
|
||||
|
||||
func (repo *Repository) checkDaemonExportOK(e Engine) error {
|
||||
if err := repo.getOwner(e); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Create/Remove git-daemon-export-ok for git-daemon...
|
||||
daemonExportFile := path.Join(repo.RepoPath(), `git-daemon-export-ok`)
|
||||
|
||||
isExist, err := util.IsExist(daemonExportFile)
|
||||
if err != nil {
|
||||
log.Error("Unable to check if %s exists. Error: %v", daemonExportFile, err)
|
||||
return err
|
||||
}
|
||||
|
||||
isPublic := !repo.IsPrivate && repo.Owner.Visibility == api.VisibleTypePublic
|
||||
if !isPublic && isExist {
|
||||
if err = util.Remove(daemonExportFile); err != nil {
|
||||
log.Error("Failed to remove %s: %v", daemonExportFile, err)
|
||||
}
|
||||
} else if isPublic && !isExist {
|
||||
if f, err := os.Create(daemonExportFile); err != nil {
|
||||
log.Error("Failed to create %s: %v", daemonExportFile, err)
|
||||
} else {
|
||||
f.Close()
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func countRepositories(userID int64, private bool) int64 {
|
||||
sess := x.Where("id > 0")
|
||||
|
||||
@@ -1324,24 +1354,9 @@ func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err e
|
||||
}
|
||||
|
||||
// Create/Remove git-daemon-export-ok for git-daemon...
|
||||
daemonExportFile := path.Join(repo.RepoPath(), `git-daemon-export-ok`)
|
||||
isExist, err := util.IsExist(daemonExportFile)
|
||||
isPublic := !repo.IsPrivate && repo.Owner.Visibility == api.VisibleTypePublic
|
||||
if err != nil {
|
||||
log.Error("Unable to check if %s exists. Error: %v", daemonExportFile, err)
|
||||
if err := repo.checkDaemonExportOK(e); err != nil {
|
||||
return err
|
||||
}
|
||||
if !isPublic && isExist {
|
||||
if err = util.Remove(daemonExportFile); err != nil {
|
||||
log.Error("Failed to remove %s: %v", daemonExportFile, err)
|
||||
}
|
||||
} else if isPublic && !isExist {
|
||||
if f, err := os.Create(daemonExportFile); err != nil {
|
||||
log.Error("Failed to create %s: %v", daemonExportFile, err)
|
||||
} else {
|
||||
f.Close()
|
||||
}
|
||||
}
|
||||
|
||||
forkRepos, err := getRepositoriesByForkID(e, repo.ID)
|
||||
if err != nil {
|
||||
|
||||
@@ -52,7 +52,7 @@ func (list U2FRegistrationList) ToRegistrations() []u2f.Registration {
|
||||
for _, reg := range list {
|
||||
r, err := reg.Parse()
|
||||
if err != nil {
|
||||
log.Fatal("parsing u2f registration: %v", err)
|
||||
log.Error("parsing u2f registration: %v", err)
|
||||
continue
|
||||
}
|
||||
regs = append(regs, *r)
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
@@ -27,6 +28,7 @@ func TestGetU2FRegistrationsByUID(t *testing.T) {
|
||||
assert.NoError(t, PrepareTestDatabase())
|
||||
|
||||
res, err := GetU2FRegistrationsByUID(1)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, res, 1)
|
||||
assert.Equal(t, "U2F Key", res[0].Name)
|
||||
@@ -71,3 +73,27 @@ func TestDeleteRegistration(t *testing.T) {
|
||||
assert.NoError(t, DeleteRegistration(reg))
|
||||
AssertNotExistsBean(t, &U2FRegistration{ID: 1})
|
||||
}
|
||||
|
||||
const validU2FRegistrationResponseHex = "0504b174bc49c7ca254b70d2e5c207cee9cf174820ebd77ea3c65508c26da51b657c1cc6b952f8621697936482da0a6d3d3826a59095daf6cd7c03e2e60385d2f6d9402a552dfdb7477ed65fd84133f86196010b2215b57da75d315b7b9e8fe2e3925a6019551bab61d16591659cbaf00b4950f7abfe6660e2e006f76868b772d70c253082013c3081e4a003020102020a47901280001155957352300a06082a8648ce3d0403023017311530130603550403130c476e756262792050696c6f74301e170d3132303831343138323933325a170d3133303831343138323933325a3031312f302d0603550403132650696c6f74476e756262792d302e342e312d34373930313238303030313135353935373335323059301306072a8648ce3d020106082a8648ce3d030107034200048d617e65c9508e64bcc5673ac82a6799da3c1446682c258c463fffdf58dfd2fa3e6c378b53d795c4a4dffb4199edd7862f23abaf0203b4b8911ba0569994e101300a06082a8648ce3d0403020347003044022060cdb6061e9c22262d1aac1d96d8c70829b2366531dda268832cb836bcd30dfa0220631b1459f09e6330055722c8d89b7f48883b9089b88d60d1d9795902b30410df304502201471899bcc3987e62e8202c9b39c33c19033f7340352dba80fcab017db9230e402210082677d673d891933ade6f617e5dbde2e247e70423fd5ad7804a6d3d3961ef871"
|
||||
|
||||
func TestToRegistrations_SkipInvalidItemsWithoutCrashing(t *testing.T) {
|
||||
regKeyRaw, _ := hex.DecodeString(validU2FRegistrationResponseHex)
|
||||
regs := U2FRegistrationList{
|
||||
&U2FRegistration{ID: 1},
|
||||
&U2FRegistration{ID: 2, Name: "U2F Key", UserID: 2, Counter: 0, Raw: regKeyRaw, CreatedUnix: 946684800, UpdatedUnix: 946684800},
|
||||
}
|
||||
|
||||
actual := regs.ToRegistrations()
|
||||
assert.Len(t, actual, 1)
|
||||
}
|
||||
|
||||
func TestToRegistrations(t *testing.T) {
|
||||
regKeyRaw, _ := hex.DecodeString(validU2FRegistrationResponseHex)
|
||||
regs := U2FRegistrationList{
|
||||
&U2FRegistration{ID: 1, Name: "U2F Key", UserID: 1, Counter: 0, Raw: regKeyRaw, CreatedUnix: 946684800, UpdatedUnix: 946684800},
|
||||
&U2FRegistration{ID: 2, Name: "U2F Key", UserID: 2, Counter: 0, Raw: regKeyRaw, CreatedUnix: 946684800, UpdatedUnix: 946684800},
|
||||
}
|
||||
|
||||
actual := regs.ToRegistrations()
|
||||
assert.Len(t, actual, 2)
|
||||
}
|
||||
|
||||
@@ -7,6 +7,9 @@ package models
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/assert"
|
||||
@@ -37,6 +40,10 @@ func TestGetUserHeatmapDataByUser(t *testing.T) {
|
||||
// Prepare
|
||||
assert.NoError(t, PrepareTestDatabase())
|
||||
|
||||
// Mock time
|
||||
timeutil.Set(time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
defer timeutil.Unset()
|
||||
|
||||
for i, tc := range testCases {
|
||||
user := AssertExistsAndLoadBean(t, &User{ID: tc.userID}).(*User)
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"github.com/gogs/chardet"
|
||||
"golang.org/x/net/html/charset"
|
||||
@@ -26,9 +27,9 @@ var UTF8BOM = []byte{'\xef', '\xbb', '\xbf'}
|
||||
// ToUTF8WithFallbackReader detects the encoding of content and coverts to UTF-8 reader if possible
|
||||
func ToUTF8WithFallbackReader(rd io.Reader) io.Reader {
|
||||
var buf = make([]byte, 2048)
|
||||
n, err := rd.Read(buf)
|
||||
n, err := util.ReadAtMost(rd, buf)
|
||||
if err != nil {
|
||||
return rd
|
||||
return io.MultiReader(bytes.NewReader(RemoveBOMIfPresent(buf[:n])), rd)
|
||||
}
|
||||
|
||||
charsetLabel, err := DetectEncoding(buf[:n])
|
||||
|
||||
@@ -345,7 +345,7 @@ func repoAssignment(ctx *Context, repo *models.Repository) {
|
||||
}
|
||||
|
||||
// Check access.
|
||||
if ctx.Repo.Permission.AccessMode == models.AccessModeNone {
|
||||
if !ctx.Repo.Permission.HasAccess() {
|
||||
if ctx.Query("go-get") == "1" {
|
||||
EarlyResponseForGoGetMeta(ctx)
|
||||
return
|
||||
|
||||
@@ -28,32 +28,24 @@ func CreateReader(input io.Reader, delimiter rune) *stdcsv.Reader {
|
||||
}
|
||||
|
||||
// CreateReaderAndGuessDelimiter tries to guess the field delimiter from the content and creates a csv.Reader.
|
||||
// Reads at most 10k bytes.
|
||||
func CreateReaderAndGuessDelimiter(rd io.Reader) (*stdcsv.Reader, error) {
|
||||
var data = make([]byte, 1e4)
|
||||
size, err := rd.Read(data)
|
||||
size, err := util.ReadAtMost(rd, data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
delimiter := guessDelimiter(data[:size])
|
||||
|
||||
var newInput io.Reader
|
||||
if size < 1e4 {
|
||||
newInput = bytes.NewReader(data[:size])
|
||||
} else {
|
||||
newInput = io.MultiReader(bytes.NewReader(data), rd)
|
||||
}
|
||||
|
||||
return CreateReader(newInput, delimiter), nil
|
||||
return CreateReader(
|
||||
io.MultiReader(bytes.NewReader(data[:size]), rd),
|
||||
guessDelimiter(data[:size]),
|
||||
), nil
|
||||
}
|
||||
|
||||
// guessDelimiter scores the input CSV data against delimiters, and returns the best match.
|
||||
// Reads at most 10k bytes & 10 lines.
|
||||
func guessDelimiter(data []byte) rune {
|
||||
maxLines := 10
|
||||
maxBytes := util.Min(len(data), 1e4)
|
||||
text := string(data[:maxBytes])
|
||||
text = quoteRegexp.ReplaceAllLiteralString(text, "")
|
||||
text := quoteRegexp.ReplaceAllLiteralString(string(data), "")
|
||||
lines := strings.SplitN(text, "\n", maxLines+1)
|
||||
lines = lines[:util.Min(maxLines, len(lines))]
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"io/ioutil"
|
||||
|
||||
"code.gitea.io/gitea/modules/typesniffer"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
// This file contains common functions between the gogit and !gogit variants for git Blobs
|
||||
@@ -29,7 +30,7 @@ func (b *Blob) GetBlobContent() (string, error) {
|
||||
}
|
||||
defer dataRc.Close()
|
||||
buf := make([]byte, 1024)
|
||||
n, _ := dataRc.Read(buf)
|
||||
n, _ := util.ReadAtMost(dataRc, buf)
|
||||
buf = buf[:n]
|
||||
return string(buf), nil
|
||||
}
|
||||
|
||||
@@ -188,6 +188,12 @@ func Init(ctx context.Context) error {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if setting.Git.DisableCoreProtectNTFS {
|
||||
if err := checkAndSetConfig("core.protectntfs", "false", true); err != nil {
|
||||
return err
|
||||
}
|
||||
GlobalCommandArgs = append(GlobalCommandArgs, "-c", "core.protectntfs=false")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -229,7 +229,7 @@ func (wl *wrappedListener) Accept() (net.Conn, error) {
|
||||
|
||||
closed := int32(0)
|
||||
|
||||
c = wrappedConn{
|
||||
c = &wrappedConn{
|
||||
Conn: c,
|
||||
server: wl.server,
|
||||
closed: &closed,
|
||||
@@ -264,7 +264,7 @@ type wrappedConn struct {
|
||||
perWritePerKbTimeout time.Duration
|
||||
}
|
||||
|
||||
func (w wrappedConn) Write(p []byte) (n int, err error) {
|
||||
func (w *wrappedConn) Write(p []byte) (n int, err error) {
|
||||
if w.perWriteTimeout > 0 {
|
||||
minTimeout := time.Duration(len(p)/1024) * w.perWritePerKbTimeout
|
||||
minDeadline := time.Now().Add(minTimeout).Add(w.perWriteTimeout)
|
||||
@@ -278,7 +278,7 @@ func (w wrappedConn) Write(p []byte) (n int, err error) {
|
||||
return w.Conn.Write(p)
|
||||
}
|
||||
|
||||
func (w wrappedConn) Close() error {
|
||||
func (w *wrappedConn) Close() error {
|
||||
if atomic.CompareAndSwapInt32(w.closed, 0, 1) {
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
|
||||
@@ -92,7 +92,7 @@ func isLinkStr(link string) bool {
|
||||
func getIssueFullPattern() *regexp.Regexp {
|
||||
if issueFullPattern == nil {
|
||||
issueFullPattern = regexp.MustCompile(regexp.QuoteMeta(setting.AppURL) +
|
||||
`\w+/\w+/(?:issues|pulls)/((?:\w{1,10}-)?[1-9][0-9]*)([\?|#]\S+.(\S+)?)?\b`)
|
||||
`\w+/\w+/(?:issues|pulls)/((?:\w{1,10}-)?[1-9][0-9]*)([\?|#](\S+)?)?\b`)
|
||||
}
|
||||
return issueFullPattern
|
||||
}
|
||||
|
||||
@@ -265,6 +265,10 @@ func TestRender_FullIssueURLs(t *testing.T) {
|
||||
`<a href="http://localhost:3000/person/repo/issues/4#issuecomment-1234" class="ref-issue">person/repo#4</a>`)
|
||||
test("http://localhost:3000/gogits/gogs/issues/4",
|
||||
`<a href="http://localhost:3000/gogits/gogs/issues/4" class="ref-issue">#4</a>`)
|
||||
test("http://localhost:3000/gogits/gogs/issues/4 test",
|
||||
`<a href="http://localhost:3000/gogits/gogs/issues/4" class="ref-issue">#4</a> test`)
|
||||
test("http://localhost:3000/gogits/gogs/issues/4?a=1&b=2#comment-123 test",
|
||||
`<a href="http://localhost:3000/gogits/gogs/issues/4?a=1&b=2#comment-123" class="ref-issue">#4</a> test`)
|
||||
}
|
||||
|
||||
func TestRegExp_sha1CurrentPattern(t *testing.T) {
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
repo_module "code.gitea.io/gitea/modules/repository"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
stdcharset "golang.org/x/net/html/charset"
|
||||
"golang.org/x/text/transform"
|
||||
@@ -61,7 +62,7 @@ func detectEncodingAndBOM(entry *git.TreeEntry, repo *models.Repository) (string
|
||||
}
|
||||
defer reader.Close()
|
||||
buf := make([]byte, 1024)
|
||||
n, err := reader.Read(buf)
|
||||
n, err := util.ReadAtMost(reader, buf)
|
||||
if err != nil {
|
||||
// return default
|
||||
return "UTF-8", false
|
||||
@@ -84,7 +85,7 @@ func detectEncodingAndBOM(entry *git.TreeEntry, repo *models.Repository) (string
|
||||
}
|
||||
defer dataRc.Close()
|
||||
buf = make([]byte, 1024)
|
||||
n, err = dataRc.Read(buf)
|
||||
n, err = util.ReadAtMost(dataRc, buf)
|
||||
if err != nil {
|
||||
// return default
|
||||
return "UTF-8", false
|
||||
|
||||
@@ -66,6 +66,9 @@ func AdoptRepository(doer, u *models.User, opts models.CreateRepoOptions) (*mode
|
||||
if err := adoptRepository(ctx, repoPath, doer, repo, opts); err != nil {
|
||||
return fmt.Errorf("createDelegateHooks: %v", err)
|
||||
}
|
||||
if err := repo.CheckDaemonExportOKCtx(ctx); err != nil {
|
||||
return fmt.Errorf("checkDaemonExportOK: %v", err)
|
||||
}
|
||||
|
||||
// Initialize Issue Labels if selected
|
||||
if len(opts.IssueLabels) > 0 {
|
||||
|
||||
@@ -103,6 +103,10 @@ func CreateRepository(doer, u *models.User, opts models.CreateRepoOptions) (*mod
|
||||
}
|
||||
}
|
||||
|
||||
if err := repo.CheckDaemonExportOKCtx(ctx); err != nil {
|
||||
return fmt.Errorf("checkDaemonExportOK: %v", err)
|
||||
}
|
||||
|
||||
if stdout, err := git.NewCommand("update-server-info").
|
||||
SetDescription(fmt.Sprintf("CreateRepository(git update-server-info): %s", repoPath)).
|
||||
RunInDir(repoPath); err != nil {
|
||||
|
||||
@@ -95,14 +95,17 @@ func ForkRepository(doer, owner *models.User, oldRepo *models.Repository, name,
|
||||
needsRollback = true
|
||||
|
||||
repoPath := models.RepoPath(owner.Name, repo.Name)
|
||||
if stdout, err := git.NewCommand(
|
||||
"clone", "--bare", oldRepoPath, repoPath).
|
||||
if stdout, err := git.NewCommand("clone", "--bare", oldRepoPath, repoPath).
|
||||
SetDescription(fmt.Sprintf("ForkRepository(git clone): %s to %s", oldRepo.FullName(), repo.FullName())).
|
||||
RunInDirTimeout(10*time.Minute, ""); err != nil {
|
||||
log.Error("Fork Repository (git clone) Failed for %v (from %v):\nStdout: %s\nError: %v", repo, oldRepo, stdout, err)
|
||||
return fmt.Errorf("git clone: %v", err)
|
||||
}
|
||||
|
||||
if err := repo.CheckDaemonExportOKCtx(ctx); err != nil {
|
||||
return fmt.Errorf("checkDaemonExportOK: %v", err)
|
||||
}
|
||||
|
||||
if stdout, err := git.NewCommand("update-server-info").
|
||||
SetDescription(fmt.Sprintf("ForkRepository(git update-server-info): %s", repo.FullName())).
|
||||
RunInDir(repoPath); err != nil {
|
||||
|
||||
@@ -275,5 +275,16 @@ func GenerateRepository(ctx models.DBContext, doer, owner *models.User, template
|
||||
return generateRepo, err
|
||||
}
|
||||
|
||||
if err = generateRepo.CheckDaemonExportOKCtx(ctx); err != nil {
|
||||
return generateRepo, fmt.Errorf("checkDaemonExportOK: %v", err)
|
||||
}
|
||||
|
||||
if stdout, err := git.NewCommand("update-server-info").
|
||||
SetDescription(fmt.Sprintf("GenerateRepository(git update-server-info): %s", repoPath)).
|
||||
RunInDir(repoPath); err != nil {
|
||||
log.Error("GenerateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", generateRepo, stdout, err)
|
||||
return generateRepo, fmt.Errorf("error in GenerateRepository(git update-server-info): %v", err)
|
||||
}
|
||||
|
||||
return generateRepo, nil
|
||||
}
|
||||
|
||||
@@ -95,6 +95,21 @@ func MigrateRepositoryGitData(ctx context.Context, u *models.User, repo *models.
|
||||
}
|
||||
}
|
||||
|
||||
if repo.OwnerID == u.ID {
|
||||
repo.Owner = u
|
||||
}
|
||||
|
||||
if err = repo.CheckDaemonExportOK(); err != nil {
|
||||
return repo, fmt.Errorf("checkDaemonExportOK: %v", err)
|
||||
}
|
||||
|
||||
if stdout, err := git.NewCommandContext(ctx, "update-server-info").
|
||||
SetDescription(fmt.Sprintf("MigrateRepositoryGitData(git update-server-info): %s", repoPath)).
|
||||
RunInDir(repoPath); err != nil {
|
||||
log.Error("MigrateRepositoryGitData(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err)
|
||||
return repo, fmt.Errorf("error in MigrateRepositoryGitData(git update-server-info): %v", err)
|
||||
}
|
||||
|
||||
gitRepo, err := git.OpenRepository(repoPath)
|
||||
if err != nil {
|
||||
return repo, fmt.Errorf("OpenRepository: %v", err)
|
||||
|
||||
@@ -26,6 +26,7 @@ var (
|
||||
EnableAutoGitWireProtocol bool
|
||||
PullRequestPushMessage bool
|
||||
LargeObjectThreshold int64
|
||||
DisableCoreProtectNTFS bool
|
||||
Timeout struct {
|
||||
Default int
|
||||
Migrate int
|
||||
|
||||
@@ -316,10 +316,66 @@ func Listen(host string, port int, ciphers []string, keyExchanges []string, macs
|
||||
}
|
||||
}
|
||||
|
||||
// Workaround slightly broken behaviour in x/crypto/ssh/handshake.go:458-463
|
||||
//
|
||||
// Fundamentally the issue here is that HostKeyAlgos make the incorrect assumption
|
||||
// that the PublicKey().Type() matches the signature algorithm.
|
||||
//
|
||||
// Therefore we need to add duplicates for the RSA with different signing algorithms.
|
||||
signers := make([]ssh.Signer, 0, len(srv.HostSigners))
|
||||
for _, signer := range srv.HostSigners {
|
||||
if signer.PublicKey().Type() == "ssh-rsa" {
|
||||
signers = append(signers,
|
||||
&wrapSigner{
|
||||
Signer: signer,
|
||||
algorithm: gossh.SigAlgoRSASHA2512,
|
||||
},
|
||||
&wrapSigner{
|
||||
Signer: signer,
|
||||
algorithm: gossh.SigAlgoRSASHA2256,
|
||||
},
|
||||
)
|
||||
}
|
||||
signers = append(signers, signer)
|
||||
}
|
||||
srv.HostSigners = signers
|
||||
|
||||
go listen(&srv)
|
||||
|
||||
}
|
||||
|
||||
// wrapSigner wraps a signer and overrides its public key type with the provided algorithm
|
||||
type wrapSigner struct {
|
||||
ssh.Signer
|
||||
algorithm string
|
||||
}
|
||||
|
||||
// PublicKey returns an associated PublicKey instance.
|
||||
func (s *wrapSigner) PublicKey() gossh.PublicKey {
|
||||
return &wrapPublicKey{
|
||||
PublicKey: s.Signer.PublicKey(),
|
||||
algorithm: s.algorithm,
|
||||
}
|
||||
}
|
||||
|
||||
// Sign returns raw signature for the given data. This method
|
||||
// will apply the hash specified for the keytype to the data using
|
||||
// the algorithm assigned for this key
|
||||
func (s *wrapSigner) Sign(rand io.Reader, data []byte) (*gossh.Signature, error) {
|
||||
return s.Signer.(gossh.AlgorithmSigner).SignWithAlgorithm(rand, data, s.algorithm)
|
||||
}
|
||||
|
||||
// wrapPublicKey wraps a PublicKey and overrides its type
|
||||
type wrapPublicKey struct {
|
||||
gossh.PublicKey
|
||||
algorithm string
|
||||
}
|
||||
|
||||
// Type returns the algorithm
|
||||
func (k *wrapPublicKey) Type() string {
|
||||
return k.algorithm
|
||||
}
|
||||
|
||||
// GenKeyPair make a pair of public and private keys for SSH access.
|
||||
// Public key is encoded in the format for inclusion in an OpenSSH authorized_keys file.
|
||||
// Private Key generated is PEM encoded
|
||||
|
||||
@@ -13,8 +13,24 @@ import (
|
||||
// TimeStamp defines a timestamp
|
||||
type TimeStamp int64
|
||||
|
||||
// mock is NOT concurrency-safe!!
|
||||
var mock time.Time
|
||||
|
||||
// Set sets the time to a mocked time.Time
|
||||
func Set(now time.Time) {
|
||||
mock = now
|
||||
}
|
||||
|
||||
// Unset will unset the mocked time.Time
|
||||
func Unset() {
|
||||
mock = time.Time{}
|
||||
}
|
||||
|
||||
// TimeStampNow returns now int64
|
||||
func TimeStampNow() TimeStamp {
|
||||
if !mock.IsZero() {
|
||||
return TimeStamp(mock.Unix())
|
||||
}
|
||||
return TimeStamp(time.Now().Unix())
|
||||
}
|
||||
|
||||
|
||||
@@ -10,6 +10,8 @@ import (
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
// Use at most this many bytes to determine Content Type.
|
||||
@@ -86,8 +88,8 @@ func DetectContentType(data []byte) SniffedType {
|
||||
// DetectContentTypeFromReader guesses the content type contained in the reader.
|
||||
func DetectContentTypeFromReader(r io.Reader) (SniffedType, error) {
|
||||
buf := make([]byte, sniffLen)
|
||||
n, err := r.Read(buf)
|
||||
if err != nil && err != io.EOF {
|
||||
n, err := util.ReadAtMost(r, buf)
|
||||
if err != nil {
|
||||
return SniffedType{}, fmt.Errorf("DetectContentTypeFromReader io error: %w", err)
|
||||
}
|
||||
buf = buf[:n]
|
||||
|
||||
20
modules/util/io.go
Normal file
20
modules/util/io.go
Normal file
@@ -0,0 +1,20 @@
|
||||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package util
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
// ReadAtMost reads at most len(buf) bytes from r into buf.
|
||||
// It returns the number of bytes copied. n is only less then len(buf) if r provides fewer bytes.
|
||||
// If EOF occurs while reading, err will be nil.
|
||||
func ReadAtMost(r io.Reader, buf []byte) (n int, err error) {
|
||||
n, err = io.ReadFull(r, buf)
|
||||
if err == io.EOF || err == io.ErrUnexpectedEOF {
|
||||
err = nil
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -115,7 +115,7 @@ func ListReleases(ctx *context.APIContext) {
|
||||
|
||||
opts := models.FindReleasesOptions{
|
||||
ListOptions: listOptions,
|
||||
IncludeDrafts: ctx.Repo.AccessMode >= models.AccessModeWrite,
|
||||
IncludeDrafts: ctx.Repo.AccessMode >= models.AccessModeWrite || ctx.Repo.UnitAccessMode(models.UnitTypeReleases) >= models.AccessModeWrite,
|
||||
IncludeTags: false,
|
||||
IsDraft: ctx.QueryOptionalBool("draft"),
|
||||
IsPreRelease: ctx.QueryOptionalBool("pre-release"),
|
||||
|
||||
@@ -18,6 +18,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/typesniffer"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
// ServeBlob download a git.Blob
|
||||
@@ -42,8 +43,8 @@ func ServeBlob(ctx *context.Context, blob *git.Blob) error {
|
||||
// ServeData download file from io.Reader
|
||||
func ServeData(ctx *context.Context, name string, size int64, reader io.Reader) error {
|
||||
buf := make([]byte, 1024)
|
||||
n, err := reader.Read(buf)
|
||||
if err != nil && err != io.EOF {
|
||||
n, err := util.ReadAtMost(reader, buf)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if n >= 0 {
|
||||
|
||||
@@ -278,7 +278,12 @@ func ServCommand(ctx *context.PrivateContext) {
|
||||
}
|
||||
|
||||
// Permissions checking:
|
||||
if repoExist && (mode > models.AccessModeRead || repo.IsPrivate || setting.Service.RequireSignInView) {
|
||||
if repoExist &&
|
||||
(mode > models.AccessModeRead ||
|
||||
repo.IsPrivate ||
|
||||
owner.Visibility.IsPrivate() ||
|
||||
(user != nil && user.IsRestricted) || // user will be nil if the key is a deploykey
|
||||
setting.Service.RequireSignInView) {
|
||||
if key.Type == models.KeyTypeDeploy {
|
||||
if deployKey.Mode < mode {
|
||||
ctx.JSON(http.StatusUnauthorized, private.ErrServCommand{
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/storage"
|
||||
"code.gitea.io/gitea/modules/upload"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/routers/common"
|
||||
)
|
||||
|
||||
@@ -43,10 +44,8 @@ func uploadAttachment(ctx *context.Context, allowedTypes string) {
|
||||
defer file.Close()
|
||||
|
||||
buf := make([]byte, 1024)
|
||||
n, _ := file.Read(buf)
|
||||
if n > 0 {
|
||||
buf = buf[:n]
|
||||
}
|
||||
n, _ := util.ReadAtMost(file, buf)
|
||||
buf = buf[:n]
|
||||
|
||||
err = upload.Verify(buf, header.Filename, allowedTypes)
|
||||
if err != nil {
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"html"
|
||||
"io"
|
||||
"net/http"
|
||||
"path"
|
||||
"path/filepath"
|
||||
@@ -104,30 +105,36 @@ func setCsvCompareContext(ctx *context.Context) {
|
||||
|
||||
errTooLarge := errors.New(ctx.Locale.Tr("repo.error.csv.too_large"))
|
||||
|
||||
csvReaderFromCommit := func(c *git.Commit) (*csv.Reader, error) {
|
||||
csvReaderFromCommit := func(c *git.Commit) (*csv.Reader, io.Closer, error) {
|
||||
blob, err := c.GetBlobByPath(diffFile.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if setting.UI.CSV.MaxFileSize != 0 && setting.UI.CSV.MaxFileSize < blob.Size() {
|
||||
return nil, errTooLarge
|
||||
return nil, nil, errTooLarge
|
||||
}
|
||||
|
||||
reader, err := blob.DataAsync()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
defer reader.Close()
|
||||
|
||||
return csv_module.CreateReaderAndGuessDelimiter(charset.ToUTF8WithFallbackReader(reader))
|
||||
csvReader, err := csv_module.CreateReaderAndGuessDelimiter(charset.ToUTF8WithFallbackReader(reader))
|
||||
return csvReader, reader, err
|
||||
}
|
||||
|
||||
baseReader, err := csvReaderFromCommit(baseCommit)
|
||||
baseReader, baseBlobCloser, err := csvReaderFromCommit(baseCommit)
|
||||
if baseBlobCloser != nil {
|
||||
defer baseBlobCloser.Close()
|
||||
}
|
||||
if err == errTooLarge {
|
||||
return CsvDiffResult{nil, err.Error()}
|
||||
}
|
||||
headReader, err := csvReaderFromCommit(headCommit)
|
||||
headReader, headBlobCloser, err := csvReaderFromCommit(headCommit)
|
||||
if headBlobCloser != nil {
|
||||
defer headBlobCloser.Close()
|
||||
}
|
||||
if err == errTooLarge {
|
||||
return CsvDiffResult{nil, err.Error()}
|
||||
}
|
||||
|
||||
@@ -114,7 +114,7 @@ func editFile(ctx *context.Context, isNewFile bool) {
|
||||
ctx.Data["FileName"] = blob.Name()
|
||||
|
||||
buf := make([]byte, 1024)
|
||||
n, _ := dataRc.Read(buf)
|
||||
n, _ := util.ReadAtMost(dataRc, buf)
|
||||
buf = buf[:n]
|
||||
|
||||
// Only some file types are editable online as text.
|
||||
@@ -747,7 +747,7 @@ func UploadFileToServer(ctx *context.Context) {
|
||||
defer file.Close()
|
||||
|
||||
buf := make([]byte, 1024)
|
||||
n, _ := file.Read(buf)
|
||||
n, _ := util.ReadAtMost(file, buf)
|
||||
if n > 0 {
|
||||
buf = buf[:n]
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/storage"
|
||||
"code.gitea.io/gitea/modules/typesniffer"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -272,7 +273,7 @@ func LFSFileGet(ctx *context.Context) {
|
||||
}
|
||||
defer dataRc.Close()
|
||||
buf := make([]byte, 1024)
|
||||
n, err := dataRc.Read(buf)
|
||||
n, err := util.ReadAtMost(dataRc, buf)
|
||||
if err != nil {
|
||||
ctx.ServerError("Data", err)
|
||||
return
|
||||
@@ -297,10 +298,10 @@ func LFSFileGet(ctx *context.Context) {
|
||||
break
|
||||
}
|
||||
|
||||
buf := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc))
|
||||
rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc))
|
||||
|
||||
// Building code view blocks with line number on server side.
|
||||
fileContent, _ := ioutil.ReadAll(buf)
|
||||
fileContent, _ := ioutil.ReadAll(rd)
|
||||
|
||||
var output bytes.Buffer
|
||||
lines := strings.Split(string(fileContent), "\n")
|
||||
|
||||
@@ -31,6 +31,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/typesniffer"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -264,7 +265,7 @@ func renderDirectory(ctx *context.Context, treeLink string) {
|
||||
defer dataRc.Close()
|
||||
|
||||
buf := make([]byte, 1024)
|
||||
n, _ := dataRc.Read(buf)
|
||||
n, _ := util.ReadAtMost(dataRc, buf)
|
||||
buf = buf[:n]
|
||||
|
||||
st := typesniffer.DetectContentType(buf)
|
||||
@@ -299,7 +300,7 @@ func renderDirectory(ctx *context.Context, treeLink string) {
|
||||
defer dataRc.Close()
|
||||
|
||||
buf = make([]byte, 1024)
|
||||
n, err = dataRc.Read(buf)
|
||||
n, err = util.ReadAtMost(dataRc, buf)
|
||||
if err != nil {
|
||||
ctx.ServerError("Data", err)
|
||||
return
|
||||
@@ -413,7 +414,7 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st
|
||||
ctx.Data["RawFileLink"] = rawLink + "/" + ctx.Repo.TreePath
|
||||
|
||||
buf := make([]byte, 1024)
|
||||
n, _ := dataRc.Read(buf)
|
||||
n, _ := util.ReadAtMost(dataRc, buf)
|
||||
buf = buf[:n]
|
||||
|
||||
st := typesniffer.DetectContentType(buf)
|
||||
@@ -445,10 +446,8 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st
|
||||
defer dataRc.Close()
|
||||
|
||||
buf = make([]byte, 1024)
|
||||
n, err = dataRc.Read(buf)
|
||||
// Error EOF don't mean there is an error, it just means we read to
|
||||
// the end
|
||||
if err != nil && err != io.EOF {
|
||||
n, err = util.ReadAtMost(dataRc, buf)
|
||||
if err != nil {
|
||||
ctx.ServerError("Data", err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -451,7 +451,7 @@ func U2FSign(ctx *context.Context) {
|
||||
for _, reg := range regs {
|
||||
r, err := reg.Parse()
|
||||
if err != nil {
|
||||
log.Fatal("parsing u2f registration: %v", err)
|
||||
log.Error("parsing u2f registration: %v", err)
|
||||
continue
|
||||
}
|
||||
newCounter, authErr := r.Authenticate(*signResp, *challenge, reg.Counter)
|
||||
@@ -617,7 +617,7 @@ func SignInOAuthCallback(ctx *context.Context) {
|
||||
}
|
||||
|
||||
if u == nil {
|
||||
if !(setting.Service.DisableRegistration || setting.Service.AllowOnlyInternalRegistration) && setting.OAuth2Client.EnableAutoRegistration {
|
||||
if !setting.Service.AllowOnlyInternalRegistration && setting.OAuth2Client.EnableAutoRegistration {
|
||||
// create new user with details from oauth2 provider
|
||||
var missingFields []string
|
||||
if gothUser.UserID == "" {
|
||||
|
||||
@@ -21,10 +21,12 @@ type TableDiffCellType uint8
|
||||
|
||||
// TableDiffCellType possible values.
|
||||
const (
|
||||
TableDiffCellEqual TableDiffCellType = iota + 1
|
||||
TableDiffCellUnchanged TableDiffCellType = iota + 1
|
||||
TableDiffCellChanged
|
||||
TableDiffCellAdd
|
||||
TableDiffCellDel
|
||||
TableDiffCellMovedUnchanged
|
||||
TableDiffCellMovedChanged
|
||||
)
|
||||
|
||||
// TableDiffCell represents a cell of a TableDiffRow
|
||||
@@ -53,6 +55,9 @@ type csvReader struct {
|
||||
eof bool
|
||||
}
|
||||
|
||||
// ErrorUndefinedCell is for when a row, column coordinates do not exist in the CSV
|
||||
var ErrorUndefinedCell = errors.New("undefined cell")
|
||||
|
||||
// createCsvReader creates a csvReader and fills the buffer
|
||||
func createCsvReader(reader *csv.Reader, bufferRowCount int) (*csvReader, error) {
|
||||
csv := &csvReader{reader: reader}
|
||||
@@ -70,7 +75,7 @@ func createCsvReader(reader *csv.Reader, bufferRowCount int) (*csvReader, error)
|
||||
|
||||
// GetRow gets a row from the buffer if present or advances the reader to the requested row. On the end of the file only nil gets returned.
|
||||
func (csv *csvReader) GetRow(row int) ([]string, error) {
|
||||
if row < len(csv.buffer) {
|
||||
if row < len(csv.buffer) && row >= 0 {
|
||||
return csv.buffer[row], nil
|
||||
}
|
||||
if csv.eof {
|
||||
@@ -131,7 +136,11 @@ func createCsvDiffSingle(reader *csv.Reader, celltype TableDiffCellType) ([]*Tab
|
||||
}
|
||||
cells := make([]*TableDiffCell, len(row))
|
||||
for j := 0; j < len(row); j++ {
|
||||
cells[j] = &TableDiffCell{LeftCell: row[j], Type: celltype}
|
||||
if celltype == TableDiffCellDel {
|
||||
cells[j] = &TableDiffCell{LeftCell: row[j], Type: celltype}
|
||||
} else {
|
||||
cells[j] = &TableDiffCell{RightCell: row[j], Type: celltype}
|
||||
}
|
||||
}
|
||||
rows = append(rows, &TableDiffRow{RowIdx: i, Cells: cells})
|
||||
i++
|
||||
@@ -141,185 +150,267 @@ func createCsvDiffSingle(reader *csv.Reader, celltype TableDiffCellType) ([]*Tab
|
||||
}
|
||||
|
||||
func createCsvDiff(diffFile *DiffFile, baseReader *csv.Reader, headReader *csv.Reader) ([]*TableDiffSection, error) {
|
||||
a, err := createCsvReader(baseReader, maxRowsToInspect)
|
||||
// Given the baseReader and headReader, we are going to create CSV Reader for each, baseCSVReader and b respectively
|
||||
baseCSVReader, err := createCsvReader(baseReader, maxRowsToInspect)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
headCSVReader, err := createCsvReader(headReader, maxRowsToInspect)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b, err := createCsvReader(headReader, maxRowsToInspect)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
// Initializing the mappings of base to head (a2bColMap) and head to base (b2aColMap) columns
|
||||
a2bColMap, b2aColMap := getColumnMapping(baseCSVReader, headCSVReader)
|
||||
|
||||
// Determines how many cols there will be in the diff table, which includes deleted columns from base and added columns to base
|
||||
numDiffTableCols := len(a2bColMap) + countUnmappedColumns(b2aColMap)
|
||||
if len(a2bColMap) < len(b2aColMap) {
|
||||
numDiffTableCols = len(b2aColMap) + countUnmappedColumns(a2bColMap)
|
||||
}
|
||||
|
||||
a2b, b2a := getColumnMapping(a, b)
|
||||
|
||||
columns := len(a2b) + countUnmappedColumns(b2a)
|
||||
if len(a2b) < len(b2a) {
|
||||
columns = len(b2a) + countUnmappedColumns(a2b)
|
||||
}
|
||||
|
||||
createDiffRow := func(aline int, bline int) (*TableDiffRow, error) {
|
||||
cells := make([]*TableDiffCell, columns)
|
||||
|
||||
if aline == 0 || bline == 0 {
|
||||
var (
|
||||
row []string
|
||||
celltype TableDiffCellType
|
||||
err error
|
||||
)
|
||||
if bline == 0 {
|
||||
row, err = a.GetRow(aline - 1)
|
||||
celltype = TableDiffCellDel
|
||||
} else {
|
||||
row, err = b.GetRow(bline - 1)
|
||||
celltype = TableDiffCellAdd
|
||||
}
|
||||
// createDiffTableRow takes the row # of the `a` line and `b` line of a diff (starting from 1), 0 if the line doesn't exist (undefined)
|
||||
// in the base or head respectively.
|
||||
// Returns a TableDiffRow which has the row index
|
||||
createDiffTableRow := func(aLineNum int, bLineNum int) (*TableDiffRow, error) {
|
||||
// diffTableCells is a row of the diff table. It will have a cells for added, deleted, changed, and unchanged content, thus either
|
||||
// the same size as the head table or bigger
|
||||
diffTableCells := make([]*TableDiffCell, numDiffTableCols)
|
||||
var bRow *[]string
|
||||
if bLineNum > 0 {
|
||||
row, err := headCSVReader.GetRow(bLineNum - 1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if row == nil {
|
||||
return nil, nil
|
||||
bRow = &row
|
||||
}
|
||||
var aRow *[]string
|
||||
if aLineNum > 0 {
|
||||
row, err := baseCSVReader.GetRow(aLineNum - 1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for i := 0; i < len(row); i++ {
|
||||
cells[i] = &TableDiffCell{LeftCell: row[i], Type: celltype}
|
||||
}
|
||||
return &TableDiffRow{RowIdx: bline, Cells: cells}, nil
|
||||
aRow = &row
|
||||
}
|
||||
|
||||
arow, err := a.GetRow(aline - 1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
brow, err := b.GetRow(bline - 1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(arow) == 0 && len(brow) == 0 {
|
||||
if aRow == nil && bRow == nil {
|
||||
// No content
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
for i := 0; i < len(a2b); i++ {
|
||||
acell, _ := getCell(arow, i)
|
||||
if a2b[i] == unmappedColumn {
|
||||
cells[i] = &TableDiffCell{LeftCell: acell, Type: TableDiffCellDel}
|
||||
} else {
|
||||
bcell, _ := getCell(brow, a2b[i])
|
||||
aIndex := 0 // tracks where we are in the a2bColMap
|
||||
bIndex := 0 // tracks where we are in the b2aColMap
|
||||
colsAdded := 0 // incremented whenever we found a column was added
|
||||
colsDeleted := 0 // incrememted whenever a column was deleted
|
||||
|
||||
celltype := TableDiffCellChanged
|
||||
if acell == bcell {
|
||||
celltype = TableDiffCellEqual
|
||||
// We loop until both the aIndex and bIndex are greater than their col map, which then we are done
|
||||
for aIndex < len(a2bColMap) || bIndex < len(b2aColMap) {
|
||||
// Starting from where aIndex is currently pointing, we see if the map is -1 (dleeted) and if is, create column to note that, increment, and look at the next aIndex
|
||||
for aIndex < len(a2bColMap) && a2bColMap[aIndex] == -1 && (bIndex >= len(b2aColMap) || aIndex <= bIndex) {
|
||||
var aCell string
|
||||
if aRow != nil {
|
||||
if cell, err := getCell(*aRow, aIndex); err != nil {
|
||||
if err != ErrorUndefinedCell {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
aCell = cell
|
||||
}
|
||||
}
|
||||
diffTableCells[bIndex+colsDeleted] = &TableDiffCell{LeftCell: aCell, Type: TableDiffCellDel}
|
||||
aIndex++
|
||||
colsDeleted++
|
||||
}
|
||||
|
||||
// aIndex is now pointing to a column that also exists in b, or is at the end of a2bColMap. If the former,
|
||||
// we can just increment aIndex until it points to a -1 column or one greater than the current bIndex
|
||||
for aIndex < len(a2bColMap) && a2bColMap[aIndex] != -1 {
|
||||
aIndex++
|
||||
}
|
||||
|
||||
// Starting from where bIndex is currently pointing, we see if the map is -1 (added) and if is, create column to note that, increment, and look at the next aIndex
|
||||
for bIndex < len(b2aColMap) && b2aColMap[bIndex] == -1 && (aIndex >= len(a2bColMap) || bIndex < aIndex) {
|
||||
var bCell string
|
||||
cellType := TableDiffCellAdd
|
||||
if bRow != nil {
|
||||
if cell, err := getCell(*bRow, bIndex); err != nil {
|
||||
if err != ErrorUndefinedCell {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
bCell = cell
|
||||
}
|
||||
} else {
|
||||
cellType = TableDiffCellDel
|
||||
}
|
||||
diffTableCells[bIndex+colsDeleted] = &TableDiffCell{RightCell: bCell, Type: cellType}
|
||||
bIndex++
|
||||
colsAdded++
|
||||
}
|
||||
|
||||
// aIndex is now pointing to a column that also exists in a, or is at the end of b2aColMap. If the former,
|
||||
// we get the a col and b col values (if they exist), figure out if they are the same or not, and if the column moved, and add it to the diff table
|
||||
for bIndex < len(b2aColMap) && b2aColMap[bIndex] != -1 && (aIndex >= len(a2bColMap) || bIndex < aIndex) {
|
||||
var diffTableCell TableDiffCell
|
||||
|
||||
var aCell *string
|
||||
// get the aCell value if the aRow exists
|
||||
if aRow != nil {
|
||||
if cell, err := getCell(*aRow, b2aColMap[bIndex]); err != nil {
|
||||
if err != ErrorUndefinedCell {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
aCell = &cell
|
||||
diffTableCell.LeftCell = cell
|
||||
}
|
||||
} else {
|
||||
diffTableCell.Type = TableDiffCellAdd
|
||||
}
|
||||
|
||||
cells[i] = &TableDiffCell{LeftCell: acell, RightCell: bcell, Type: celltype}
|
||||
}
|
||||
}
|
||||
for i := 0; i < len(b2a); i++ {
|
||||
if b2a[i] == unmappedColumn {
|
||||
bcell, _ := getCell(brow, i)
|
||||
cells[i] = &TableDiffCell{LeftCell: bcell, Type: TableDiffCellAdd}
|
||||
var bCell *string
|
||||
// get the bCell value if the bRow exists
|
||||
if bRow != nil {
|
||||
if cell, err := getCell(*bRow, bIndex); err != nil {
|
||||
if err != ErrorUndefinedCell {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
bCell = &cell
|
||||
diffTableCell.RightCell = cell
|
||||
}
|
||||
} else {
|
||||
diffTableCell.Type = TableDiffCellDel
|
||||
}
|
||||
|
||||
// if both a and b have a row that exists, compare the value and determine if the row has moved
|
||||
if aCell != nil && bCell != nil {
|
||||
moved := ((bIndex + colsDeleted) != (b2aColMap[bIndex] + colsAdded))
|
||||
if *aCell != *bCell {
|
||||
if moved {
|
||||
diffTableCell.Type = TableDiffCellMovedChanged
|
||||
} else {
|
||||
diffTableCell.Type = TableDiffCellChanged
|
||||
}
|
||||
} else {
|
||||
if moved {
|
||||
diffTableCell.Type = TableDiffCellMovedUnchanged
|
||||
} else {
|
||||
diffTableCell.Type = TableDiffCellUnchanged
|
||||
}
|
||||
diffTableCell.LeftCell = ""
|
||||
}
|
||||
}
|
||||
|
||||
// Add the diff column to the diff row
|
||||
diffTableCells[bIndex+colsDeleted] = &diffTableCell
|
||||
bIndex++
|
||||
}
|
||||
}
|
||||
|
||||
return &TableDiffRow{RowIdx: bline, Cells: cells}, nil
|
||||
return &TableDiffRow{RowIdx: bLineNum, Cells: diffTableCells}, nil
|
||||
}
|
||||
|
||||
var sections []*TableDiffSection
|
||||
// diffTableSections are TableDiffSections which represent the diffTableSections we get when doing a diff, each will be its own table in the view
|
||||
var diffTableSections []*TableDiffSection
|
||||
|
||||
for i, section := range diffFile.Sections {
|
||||
var rows []*TableDiffRow
|
||||
// Each section has multiple diffTableRows
|
||||
var diffTableRows []*TableDiffRow
|
||||
lines := tryMergeLines(section.Lines)
|
||||
// Loop through the merged lines to get each row of the CSV diff table for this section
|
||||
for j, line := range lines {
|
||||
if i == 0 && j == 0 && (line[0] != 1 || line[1] != 1) {
|
||||
diffRow, err := createDiffRow(1, 1)
|
||||
diffTableRow, err := createDiffTableRow(1, 1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if diffRow != nil {
|
||||
rows = append(rows, diffRow)
|
||||
if diffTableRow != nil {
|
||||
diffTableRows = append(diffTableRows, diffTableRow)
|
||||
}
|
||||
}
|
||||
diffRow, err := createDiffRow(line[0], line[1])
|
||||
diffTableRow, err := createDiffTableRow(line[0], line[1])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if diffRow != nil {
|
||||
rows = append(rows, diffRow)
|
||||
if diffTableRow != nil {
|
||||
diffTableRows = append(diffTableRows, diffTableRow)
|
||||
}
|
||||
}
|
||||
|
||||
if len(rows) > 0 {
|
||||
sections = append(sections, &TableDiffSection{Rows: rows})
|
||||
if len(diffTableRows) > 0 {
|
||||
diffTableSections = append(diffTableSections, &TableDiffSection{Rows: diffTableRows})
|
||||
}
|
||||
}
|
||||
|
||||
return sections, nil
|
||||
return diffTableSections, nil
|
||||
}
|
||||
|
||||
// getColumnMapping creates a mapping of columns between a and b
|
||||
func getColumnMapping(a *csvReader, b *csvReader) ([]int, []int) {
|
||||
arow, _ := a.GetRow(0)
|
||||
brow, _ := b.GetRow(0)
|
||||
func getColumnMapping(baseCSVReader *csvReader, headCSVReader *csvReader) ([]int, []int) {
|
||||
baseRow, _ := baseCSVReader.GetRow(0)
|
||||
headRow, _ := headCSVReader.GetRow(0)
|
||||
|
||||
a2b := []int{}
|
||||
b2a := []int{}
|
||||
base2HeadColMap := []int{}
|
||||
head2BaseColMap := []int{}
|
||||
|
||||
if arow != nil {
|
||||
a2b = make([]int, len(arow))
|
||||
if baseRow != nil {
|
||||
base2HeadColMap = make([]int, len(baseRow))
|
||||
}
|
||||
if brow != nil {
|
||||
b2a = make([]int, len(brow))
|
||||
if headRow != nil {
|
||||
head2BaseColMap = make([]int, len(headRow))
|
||||
}
|
||||
|
||||
for i := 0; i < len(b2a); i++ {
|
||||
b2a[i] = unmappedColumn
|
||||
// Initializes all head2base mappings to be unmappedColumn (-1)
|
||||
for i := 0; i < len(head2BaseColMap); i++ {
|
||||
head2BaseColMap[i] = unmappedColumn
|
||||
}
|
||||
|
||||
bcol := 0
|
||||
for i := 0; i < len(a2b); i++ {
|
||||
a2b[i] = unmappedColumn
|
||||
|
||||
acell, ea := getCell(arow, i)
|
||||
if ea == nil {
|
||||
for j := bcol; j < len(b2a); j++ {
|
||||
bcell, eb := getCell(brow, j)
|
||||
if eb == nil && acell == bcell {
|
||||
a2b[i] = j
|
||||
b2a[j] = i
|
||||
bcol = j + 1
|
||||
break
|
||||
// Loops through the baseRow and see if there is a match in the head row
|
||||
for i := 0; i < len(baseRow); i++ {
|
||||
base2HeadColMap[i] = unmappedColumn
|
||||
baseCell, err := getCell(baseRow, i)
|
||||
if err == nil {
|
||||
for j := 0; j < len(headRow); j++ {
|
||||
if head2BaseColMap[j] == -1 {
|
||||
headCell, err := getCell(headRow, j)
|
||||
if err == nil && baseCell == headCell {
|
||||
base2HeadColMap[i] = j
|
||||
head2BaseColMap[j] = i
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tryMapColumnsByContent(a, a2b, b, b2a)
|
||||
tryMapColumnsByContent(b, b2a, a, a2b)
|
||||
tryMapColumnsByContent(baseCSVReader, base2HeadColMap, headCSVReader, head2BaseColMap)
|
||||
tryMapColumnsByContent(headCSVReader, head2BaseColMap, baseCSVReader, base2HeadColMap)
|
||||
|
||||
return a2b, b2a
|
||||
return base2HeadColMap, head2BaseColMap
|
||||
}
|
||||
|
||||
// tryMapColumnsByContent tries to map missing columns by the content of the first lines.
|
||||
func tryMapColumnsByContent(a *csvReader, a2b []int, b *csvReader, b2a []int) {
|
||||
start := 0
|
||||
for i := 0; i < len(a2b); i++ {
|
||||
if a2b[i] == unmappedColumn {
|
||||
if b2a[start] == unmappedColumn {
|
||||
rows := util.Min(maxRowsToInspect, util.Max(0, util.Min(len(a.buffer), len(b.buffer))-1))
|
||||
func tryMapColumnsByContent(baseCSVReader *csvReader, base2HeadColMap []int, headCSVReader *csvReader, head2BaseColMap []int) {
|
||||
for i := 0; i < len(base2HeadColMap); i++ {
|
||||
headStart := 0
|
||||
for base2HeadColMap[i] == unmappedColumn && headStart < len(head2BaseColMap) {
|
||||
if head2BaseColMap[headStart] == unmappedColumn {
|
||||
rows := util.Min(maxRowsToInspect, util.Max(0, util.Min(len(baseCSVReader.buffer), len(headCSVReader.buffer))-1))
|
||||
same := 0
|
||||
for j := 1; j <= rows; j++ {
|
||||
acell, ea := getCell(a.buffer[j], i)
|
||||
bcell, eb := getCell(b.buffer[j], start+1)
|
||||
if ea == nil && eb == nil && acell == bcell {
|
||||
baseCell, baseErr := getCell(baseCSVReader.buffer[j], i)
|
||||
headCell, headErr := getCell(headCSVReader.buffer[j], headStart)
|
||||
if baseErr == nil && headErr == nil && baseCell == headCell {
|
||||
same++
|
||||
}
|
||||
}
|
||||
if (float32(same) / float32(rows)) > minRatioToMatch {
|
||||
a2b[i] = start + 1
|
||||
b2a[start+1] = i
|
||||
base2HeadColMap[i] = headStart
|
||||
head2BaseColMap[headStart] = i
|
||||
}
|
||||
}
|
||||
headStart++
|
||||
}
|
||||
start = a2b[i]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -328,7 +419,7 @@ func getCell(row []string, column int) (string, error) {
|
||||
if column < len(row) {
|
||||
return row[column], nil
|
||||
}
|
||||
return "", errors.New("Undefined column")
|
||||
return "", ErrorUndefinedCell
|
||||
}
|
||||
|
||||
// countUnmappedColumns returns the count of unmapped columns.
|
||||
|
||||
@@ -19,9 +19,9 @@ func TestCSVDiff(t *testing.T) {
|
||||
diff string
|
||||
base string
|
||||
head string
|
||||
cells [][2]TableDiffCellType
|
||||
cells [][]TableDiffCellType
|
||||
}{
|
||||
// case 0
|
||||
// case 0 - initial commit of a csv
|
||||
{
|
||||
diff: `diff --git a/unittest.csv b/unittest.csv
|
||||
--- a/unittest.csv
|
||||
@@ -29,11 +29,14 @@ func TestCSVDiff(t *testing.T) {
|
||||
@@ -0,0 +1,2 @@
|
||||
+col1,col2
|
||||
+a,a`,
|
||||
base: "",
|
||||
head: "col1,col2\na,a",
|
||||
cells: [][2]TableDiffCellType{{TableDiffCellAdd, TableDiffCellAdd}, {TableDiffCellAdd, TableDiffCellAdd}},
|
||||
base: "",
|
||||
head: `col1,col2
|
||||
a,a`,
|
||||
cells: [][]TableDiffCellType{
|
||||
{TableDiffCellAdd, TableDiffCellAdd},
|
||||
{TableDiffCellAdd, TableDiffCellAdd}},
|
||||
},
|
||||
// case 1
|
||||
// case 1 - adding 1 row at end
|
||||
{
|
||||
diff: `diff --git a/unittest.csv b/unittest.csv
|
||||
--- a/unittest.csv
|
||||
@@ -43,11 +46,17 @@ func TestCSVDiff(t *testing.T) {
|
||||
-a,a
|
||||
+a,a
|
||||
+b,b`,
|
||||
base: "col1,col2\na,a",
|
||||
head: "col1,col2\na,a\nb,b",
|
||||
cells: [][2]TableDiffCellType{{TableDiffCellEqual, TableDiffCellEqual}, {TableDiffCellEqual, TableDiffCellEqual}, {TableDiffCellAdd, TableDiffCellAdd}},
|
||||
base: `col1,col2
|
||||
a,a`,
|
||||
head: `col1,col2
|
||||
a,a
|
||||
b,b`,
|
||||
cells: [][]TableDiffCellType{
|
||||
{TableDiffCellUnchanged, TableDiffCellUnchanged}, {TableDiffCellUnchanged, TableDiffCellUnchanged},
|
||||
{TableDiffCellAdd, TableDiffCellAdd},
|
||||
},
|
||||
},
|
||||
// case 2
|
||||
// case 2 - row deleted
|
||||
{
|
||||
diff: `diff --git a/unittest.csv b/unittest.csv
|
||||
--- a/unittest.csv
|
||||
@@ -56,11 +65,17 @@ func TestCSVDiff(t *testing.T) {
|
||||
col1,col2
|
||||
-a,a
|
||||
b,b`,
|
||||
base: "col1,col2\na,a\nb,b",
|
||||
head: "col1,col2\nb,b",
|
||||
cells: [][2]TableDiffCellType{{TableDiffCellEqual, TableDiffCellEqual}, {TableDiffCellDel, TableDiffCellDel}, {TableDiffCellEqual, TableDiffCellEqual}},
|
||||
base: `col1,col2
|
||||
a,a
|
||||
b,b`,
|
||||
head: `col1,col2
|
||||
b,b`,
|
||||
cells: [][]TableDiffCellType{
|
||||
{TableDiffCellUnchanged, TableDiffCellUnchanged}, {TableDiffCellDel, TableDiffCellDel},
|
||||
{TableDiffCellUnchanged, TableDiffCellUnchanged},
|
||||
},
|
||||
},
|
||||
// case 3
|
||||
// case 3 - row changed
|
||||
{
|
||||
diff: `diff --git a/unittest.csv b/unittest.csv
|
||||
--- a/unittest.csv
|
||||
@@ -69,11 +84,16 @@ func TestCSVDiff(t *testing.T) {
|
||||
col1,col2
|
||||
-b,b
|
||||
+b,c`,
|
||||
base: "col1,col2\nb,b",
|
||||
head: "col1,col2\nb,c",
|
||||
cells: [][2]TableDiffCellType{{TableDiffCellEqual, TableDiffCellEqual}, {TableDiffCellEqual, TableDiffCellChanged}},
|
||||
base: `col1,col2
|
||||
b,b`,
|
||||
head: `col1,col2
|
||||
b,c`,
|
||||
cells: [][]TableDiffCellType{
|
||||
{TableDiffCellUnchanged, TableDiffCellUnchanged},
|
||||
{TableDiffCellUnchanged, TableDiffCellChanged},
|
||||
},
|
||||
},
|
||||
// case 4
|
||||
// case 4 - all deleted
|
||||
{
|
||||
diff: `diff --git a/unittest.csv b/unittest.csv
|
||||
--- a/unittest.csv
|
||||
@@ -81,9 +101,88 @@ func TestCSVDiff(t *testing.T) {
|
||||
@@ -1,2 +0,0 @@
|
||||
-col1,col2
|
||||
-b,c`,
|
||||
base: "col1,col2\nb,c",
|
||||
head: "",
|
||||
cells: [][2]TableDiffCellType{{TableDiffCellDel, TableDiffCellDel}, {TableDiffCellDel, TableDiffCellDel}},
|
||||
base: `col1,col2
|
||||
b,c`,
|
||||
head: "",
|
||||
cells: [][]TableDiffCellType{
|
||||
{TableDiffCellDel, TableDiffCellDel},
|
||||
{TableDiffCellDel, TableDiffCellDel},
|
||||
},
|
||||
},
|
||||
// case 5 - renames first column
|
||||
{
|
||||
diff: `diff --git a/unittest.csv b/unittest.csv
|
||||
--- a/unittest.csv
|
||||
+++ b/unittest.csv
|
||||
@@ -1,3 +1,3 @@
|
||||
-col1,col2,col3
|
||||
+cola,col2,col3
|
||||
a,b,c`,
|
||||
base: `col1,col2,col3
|
||||
a,b,c`,
|
||||
head: `cola,col2,col3
|
||||
a,b,c`,
|
||||
cells: [][]TableDiffCellType{
|
||||
{TableDiffCellDel, TableDiffCellAdd, TableDiffCellUnchanged, TableDiffCellUnchanged},
|
||||
{TableDiffCellDel, TableDiffCellAdd, TableDiffCellUnchanged, TableDiffCellUnchanged},
|
||||
},
|
||||
},
|
||||
// case 6 - inserts a column after first, deletes last column
|
||||
{
|
||||
diff: `diff --git a/unittest.csv b/unittest.csv
|
||||
--- a/unittest.csv
|
||||
+++ b/unittest.csv
|
||||
@@ -1,2 +1,2 @@
|
||||
-col1,col2,col3
|
||||
-a,b,c
|
||||
+col1,col1a,col2
|
||||
+a,d,b`,
|
||||
base: `col1,col2,col3
|
||||
a,b,c`,
|
||||
head: `col1,col1a,col2
|
||||
a,d,b`,
|
||||
cells: [][]TableDiffCellType{
|
||||
{TableDiffCellUnchanged, TableDiffCellAdd, TableDiffCellDel, TableDiffCellMovedUnchanged},
|
||||
{TableDiffCellUnchanged, TableDiffCellAdd, TableDiffCellDel, TableDiffCellMovedUnchanged},
|
||||
},
|
||||
},
|
||||
// case 7 - deletes first column, inserts column after last
|
||||
{
|
||||
diff: `diff --git a/unittest.csv b/unittest.csv
|
||||
--- a/unittest.csv
|
||||
+++ b/unittest.csv
|
||||
@@ -1,2 +1,2 @@
|
||||
-col1,col2,col3
|
||||
-a,b,c
|
||||
+col2,col3,col4
|
||||
+b,c,d`,
|
||||
base: `col1,col2,col3
|
||||
a,b,c`,
|
||||
head: `col2,col3,col4
|
||||
b,c,d`,
|
||||
cells: [][]TableDiffCellType{
|
||||
{TableDiffCellDel, TableDiffCellUnchanged, TableDiffCellUnchanged, TableDiffCellAdd},
|
||||
{TableDiffCellDel, TableDiffCellUnchanged, TableDiffCellUnchanged, TableDiffCellAdd},
|
||||
},
|
||||
},
|
||||
// case 8 - two columns deleted, 2 added
|
||||
{
|
||||
diff: `diff --git a/unittest.csv b/unittest.csv
|
||||
--- a/unittest.csv
|
||||
+++ b/unittest.csv
|
||||
@@ -1,2 +1,2 @@
|
||||
-col1,col2,col
|
||||
-a,b,c
|
||||
+col3,col4,col5
|
||||
+c,d,e`,
|
||||
base: `col1,col2,col3
|
||||
a,b,c`,
|
||||
head: `col3,col4,col5
|
||||
c,d,e`,
|
||||
cells: [][]TableDiffCellType{
|
||||
{TableDiffCellDel, TableDiffCellMovedUnchanged, TableDiffCellDel, TableDiffCellAdd, TableDiffCellAdd},
|
||||
{TableDiffCellDel, TableDiffCellMovedUnchanged, TableDiffCellDel, TableDiffCellAdd, TableDiffCellAdd},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -116,7 +215,7 @@ func TestCSVDiff(t *testing.T) {
|
||||
assert.Len(t, section.Rows, len(c.cells), "case %d: should be %d rows", n, len(c.cells))
|
||||
|
||||
for i, row := range section.Rows {
|
||||
assert.Len(t, row.Cells, 2, "case %d: row %d should have two cells", n, i)
|
||||
assert.Len(t, row.Cells, len(c.cells[i]), "case %d: row %d should have %d cells", n, i, len(c.cells[i]))
|
||||
for j, cell := range row.Cells {
|
||||
assert.Equal(t, c.cells[i][j], cell.Type, "case %d: row %d cell %d should be equal", n, i, j)
|
||||
}
|
||||
|
||||
@@ -12,12 +12,18 @@
|
||||
{{if and (eq $i 0) (eq $j 0)}}
|
||||
<th class="line-num">{{.RowIdx}}</th>
|
||||
{{range $j, $cell := $row.Cells}}
|
||||
{{if eq $cell.Type 2}}
|
||||
{{if not $cell}}
|
||||
<th></th>
|
||||
{{else if eq $cell.Type 2}}
|
||||
<th class="modified"><span class="removed-code">{{.LeftCell}}</span> <span class="added-code">{{.RightCell}}</span></th>
|
||||
{{else if eq $cell.Type 3}}
|
||||
<th class="added"><span class="added-code">{{.LeftCell}}</span></th>
|
||||
<th class="added"><span class="added-code">{{.RightCell}}</span></th>
|
||||
{{else if eq $cell.Type 4}}
|
||||
<th class="removed"><span class="removed-code">{{.LeftCell}}</span></th>
|
||||
{{else if eq $cell.Type 5}}
|
||||
<th class="moved">{{.RightCell}}</th>
|
||||
{{else if eq $cell.Type 6}}
|
||||
<th class="moved"><span class="removed-code">{{.LeftCell}}</span> <span class="added-code">{{.RightCell}}</span></th>
|
||||
{{else}}
|
||||
<th>{{.RightCell}}</th>
|
||||
{{end}}
|
||||
@@ -25,12 +31,18 @@
|
||||
{{else}}
|
||||
<td class="line-num">{{if .RowIdx}}{{.RowIdx}}{{end}}</td>
|
||||
{{range $j, $cell := $row.Cells}}
|
||||
{{if eq $cell.Type 2}}
|
||||
{{if not $cell}}
|
||||
<td></td>
|
||||
{{else if eq $cell.Type 2}}
|
||||
<td class="modified"><span class="removed-code">{{.LeftCell}}</span> <span class="added-code">{{.RightCell}}</span></td>
|
||||
{{else if eq $cell.Type 3}}
|
||||
<td class="added"><span class="added-code">{{.LeftCell}}</span></td>
|
||||
<td class="added"><span class="added-code">{{.RightCell}}</span></td>
|
||||
{{else if eq $cell.Type 4}}
|
||||
<td class="removed"><span class="removed-code">{{.LeftCell}}</span></td>
|
||||
{{else if eq $cell.Type 5}}
|
||||
<td class="moved">{{.RightCell}}</td>
|
||||
{{else if eq $cell.Type 6}}
|
||||
<td class="moved"><span class="removed-code">{{.LeftCell}}</span> <span class="added-code">{{.RightCell}}</span></td>
|
||||
{{else}}
|
||||
<td>{{.RightCell}}</td>
|
||||
{{end}}
|
||||
|
||||
@@ -6,15 +6,15 @@
|
||||
<div class="image-diff" data-path-before="{{$imagePathOld}}" data-path-after="{{$imagePathNew}}">
|
||||
<div class="ui secondary pointing tabular top attached borderless menu stackable new-menu">
|
||||
<div class="new-menu-inner">
|
||||
<a class="item active" data-tab="diff-side-by-side">{{.root.i18n.Tr "repo.diff.image.side_by_side"}}</a>
|
||||
<a class="item active" data-tab="diff-side-by-side-{{ .file.Index }}">{{.root.i18n.Tr "repo.diff.image.side_by_side"}}</a>
|
||||
{{if and .blobBase .blobHead}}
|
||||
<a class="item" data-tab="diff-swipe">{{.root.i18n.Tr "repo.diff.image.swipe"}}</a>
|
||||
<a class="item" data-tab="diff-overlay">{{.root.i18n.Tr "repo.diff.image.overlay"}}</a>
|
||||
<a class="item" data-tab="diff-swipe-{{ .file.Index }}">{{.root.i18n.Tr "repo.diff.image.swipe"}}</a>
|
||||
<a class="item" data-tab="diff-overlay-{{ .file.Index }}">{{.root.i18n.Tr "repo.diff.image.overlay"}}</a>
|
||||
{{end}}
|
||||
</div>
|
||||
</div>
|
||||
<div class="hide">
|
||||
<div class="ui bottom attached tab image-diff-container active" data-tab="diff-side-by-side">
|
||||
<div class="ui bottom attached tab image-diff-container active" data-tab="diff-side-by-side-{{ .file.Index }}">
|
||||
<div class="diff-side-by-side">
|
||||
{{if .blobBase }}
|
||||
<span class="side">
|
||||
@@ -49,7 +49,7 @@
|
||||
</div>
|
||||
</div>
|
||||
{{if and .blobBase .blobHead}}
|
||||
<div class="ui bottom attached tab image-diff-container" data-tab="diff-swipe">
|
||||
<div class="ui bottom attached tab image-diff-container" data-tab="diff-swipe-{{ .file.Index }}">
|
||||
<div class="diff-swipe">
|
||||
<div class="swipe-frame">
|
||||
<span class="before-container"><img class="image-before" /></span>
|
||||
@@ -63,7 +63,7 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="ui bottom attached tab image-diff-container" data-tab="diff-overlay">
|
||||
<div class="ui bottom attached tab image-diff-container" data-tab="diff-overlay-{{ .file.Index }}">
|
||||
<div class="diff-overlay">
|
||||
<div class="overlay-frame">
|
||||
<div class="ui centered">
|
||||
|
||||
4
vendor/github.com/microcosm-cc/bluemonday/.editorconfig
generated
vendored
Normal file
4
vendor/github.com/microcosm-cc/bluemonday/.editorconfig
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
1
vendor/github.com/microcosm-cc/bluemonday/.gitattributes
generated
vendored
Normal file
1
vendor/github.com/microcosm-cc/bluemonday/.gitattributes
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
* text=auto eol=lf
|
||||
3
vendor/github.com/microcosm-cc/bluemonday/CREDITS.md
generated
vendored
3
vendor/github.com/microcosm-cc/bluemonday/CREDITS.md
generated
vendored
@@ -4,4 +4,5 @@
|
||||
1. Andrew Krasichkov @buglloc https://github.com/buglloc
|
||||
1. Mike Samuel mikesamuel@gmail.com
|
||||
1. Dmitri Shuralyov shurcooL@gmail.com
|
||||
1. https://github.com/opennota
|
||||
1. opennota https://github.com/opennota https://gitlab.com/opennota
|
||||
1. Tom Anthony https://www.tomanthony.co.uk/
|
||||
8
vendor/github.com/microcosm-cc/bluemonday/Makefile
generated
vendored
8
vendor/github.com/microcosm-cc/bluemonday/Makefile
generated
vendored
@@ -3,6 +3,7 @@
|
||||
# all: Builds the code locally after testing
|
||||
#
|
||||
# fmt: Formats the source files
|
||||
# fmt-check: Check if the source files are formated
|
||||
# build: Builds the code locally
|
||||
# vet: Vets the code
|
||||
# lint: Runs lint over the code (you do not need to fix everything)
|
||||
@@ -11,6 +12,8 @@
|
||||
#
|
||||
# install: Builds, tests and installs the code locally
|
||||
|
||||
GOFILES_NOVENDOR = $(shell find . -type f -name '*.go' -not -path "./vendor/*" -not -path "./.git/*")
|
||||
|
||||
.PHONY: all fmt build vet lint test cover install
|
||||
|
||||
# The first target is always the default action if `make` is called without
|
||||
@@ -19,7 +22,10 @@
|
||||
all: fmt vet test install
|
||||
|
||||
fmt:
|
||||
@gofmt -s -w ./$*
|
||||
@gofmt -s -w ${GOFILES_NOVENDOR}
|
||||
|
||||
fmt-check:
|
||||
@([ -z "$(shell gofmt -d $(GOFILES_NOVENDOR) | head)" ]) || (echo "Source is unformatted"; exit 1)
|
||||
|
||||
build:
|
||||
@go build
|
||||
|
||||
6
vendor/github.com/microcosm-cc/bluemonday/README.md
generated
vendored
6
vendor/github.com/microcosm-cc/bluemonday/README.md
generated
vendored
@@ -180,7 +180,7 @@ p.AllowElementsMatching(regex.MustCompile(`^my-element-`))
|
||||
|
||||
Or add elements as a virtue of adding an attribute:
|
||||
```go
|
||||
// Not the recommended pattern, see the recommendation on using .Matching() below
|
||||
// Note the recommended pattern, see the recommendation on using .Matching() below
|
||||
p.AllowAttrs("nowrap").OnElements("td", "th")
|
||||
```
|
||||
|
||||
@@ -222,7 +222,7 @@ p.AllowElements("fieldset", "select", "option")
|
||||
|
||||
Although it's possible to handle inline CSS using `AllowAttrs` with a `Matching` rule, writing a single monolithic regular expression to safely process all inline CSS which you wish to allow is not a trivial task. Instead of attempting to do so, you can allow the `style` attribute on whichever element(s) you desire and use style policies to control and sanitize inline styles.
|
||||
|
||||
It is suggested that you use `Matching` (with a suitable regular expression)
|
||||
It is strongly recommended that you use `Matching` (with a suitable regular expression)
|
||||
`MatchingEnum`, or `MatchingHandler` to ensure each style matches your needs,
|
||||
but default handlers are supplied for most widely used styles.
|
||||
|
||||
@@ -379,6 +379,8 @@ Both examples exhibit the same issue, they declare attributes but do not then sp
|
||||
|
||||
We are not yet including any tools to help allow and sanitize CSS. Which means that unless you wish to do the heavy lifting in a single regular expression (inadvisable), **you should not allow the "style" attribute anywhere**.
|
||||
|
||||
In the same theme, both `<script>` and `<style>` are considered harmful. These elements (and their content) will not be rendered by default, and require you to explicitly set `p.AllowUnsafe(true)`. You should be aware that allowing these elements defeats the purpose of using a HTML sanitizer as you would be explicitly allowing either JavaScript (and any plainly written XSS) and CSS (which can modify a DOM to insert JS), and additionally but limitations in this library mean it is not aware of whether HTML is validly structured and that can allow these elements to bypass some of the safety mechanisms built into the [WhatWG HTML parser standard](https://html.spec.whatwg.org/multipage/parsing.html#parsing-main-inselect).
|
||||
|
||||
It is not the job of bluemonday to fix your bad HTML, it is merely the job of bluemonday to prevent malicious HTML getting through. If you have mismatched HTML elements, or non-conforming nesting of elements, those will remain. But if you have well-structured HTML bluemonday will not break it.
|
||||
|
||||
## TODO
|
||||
|
||||
1
vendor/github.com/microcosm-cc/bluemonday/go.mod
generated
vendored
1
vendor/github.com/microcosm-cc/bluemonday/go.mod
generated
vendored
@@ -3,7 +3,6 @@ module github.com/microcosm-cc/bluemonday
|
||||
go 1.16
|
||||
|
||||
require (
|
||||
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d
|
||||
github.com/aymerick/douceur v0.2.0
|
||||
github.com/gorilla/css v1.0.0 // indirect
|
||||
golang.org/x/net v0.0.0-20210614182718-04defd469f4e
|
||||
|
||||
7
vendor/github.com/microcosm-cc/bluemonday/go.sum
generated
vendored
7
vendor/github.com/microcosm-cc/bluemonday/go.sum
generated
vendored
@@ -1,17 +1,10 @@
|
||||
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d h1:Byv0BzEl3/e6D5CLfI0j/7hiIEtvGVFPCZ7Ei2oq8iQ=
|
||||
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
|
||||
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
|
||||
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
|
||||
github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY=
|
||||
github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c=
|
||||
golang.org/x/net v0.0.0-20210421230115-4e50805a0758 h1:aEpZnXcAmXkd6AvLb2OPt+EN1Zu/8Ne3pCqPjja5PXY=
|
||||
golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM=
|
||||
golang.org/x/net v0.0.0-20210610132358-84b48f89b13b h1:k+E048sYJHyVnsr1GDrRZWQ32D2C7lWs9JRc0bel53A=
|
||||
golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20210614182718-04defd469f4e h1:XpT3nA5TvE525Ne3hInMh6+GETgn27Zfm9dxsThnX2Q=
|
||||
golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
|
||||
30
vendor/github.com/microcosm-cc/bluemonday/policy.go
generated
vendored
30
vendor/github.com/microcosm-cc/bluemonday/policy.go
generated
vendored
@@ -134,6 +134,19 @@ type Policy struct {
|
||||
setOfElementsMatchingAllowedWithoutAttrs []*regexp.Regexp
|
||||
|
||||
setOfElementsToSkipContent map[string]struct{}
|
||||
|
||||
// Permits fundamentally unsafe elements.
|
||||
//
|
||||
// If false (default) then elements such as `style` and `script` will not be
|
||||
// permitted even if declared in a policy. These elements when combined with
|
||||
// untrusted input cannot be safely handled by bluemonday at this point in
|
||||
// time.
|
||||
//
|
||||
// If true then `style` and `script` would be permitted by bluemonday if a
|
||||
// policy declares them. However this is not recommended under any circumstance
|
||||
// and can lead to XSS being rendered thus defeating the purpose of using a
|
||||
// HTML sanitizer.
|
||||
allowUnsafe bool
|
||||
}
|
||||
|
||||
type attrPolicy struct {
|
||||
@@ -714,6 +727,23 @@ func (p *Policy) AllowElementsContent(names ...string) *Policy {
|
||||
return p
|
||||
}
|
||||
|
||||
// AllowUnsafe permits fundamentally unsafe elements.
|
||||
//
|
||||
// If false (default) then elements such as `style` and `script` will not be
|
||||
// permitted even if declared in a policy. These elements when combined with
|
||||
// untrusted input cannot be safely handled by bluemonday at this point in
|
||||
// time.
|
||||
//
|
||||
// If true then `style` and `script` would be permitted by bluemonday if a
|
||||
// policy declares them. However this is not recommended under any circumstance
|
||||
// and can lead to XSS being rendered thus defeating the purpose of using a
|
||||
// HTML sanitizer.
|
||||
func (p *Policy) AllowUnsafe(allowUnsafe bool) *Policy {
|
||||
p.init()
|
||||
p.allowUnsafe = allowUnsafe
|
||||
return p
|
||||
}
|
||||
|
||||
// addDefaultElementsWithoutAttrs adds the HTML elements that we know are valid
|
||||
// without any attributes to an internal map.
|
||||
// i.e. we know that <table> is valid, but <bdo> isn't valid as the "dir" attr
|
||||
|
||||
57
vendor/github.com/microcosm-cc/bluemonday/sanitize.go
generated
vendored
57
vendor/github.com/microcosm-cc/bluemonday/sanitize.go
generated
vendored
@@ -130,7 +130,7 @@ func escapeUrlComponent(w stringWriterWriter, val string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
// Query represents a single part of the query string, a query param
|
||||
// Query represents a single part of the query string, a query param
|
||||
type Query struct {
|
||||
Key string
|
||||
Value string
|
||||
@@ -293,6 +293,17 @@ func (p *Policy) sanitize(r io.Reader, w io.Writer) error {
|
||||
|
||||
mostRecentlyStartedToken = normaliseElementName(token.Data)
|
||||
|
||||
switch normaliseElementName(token.Data) {
|
||||
case `script`:
|
||||
if !p.allowUnsafe {
|
||||
continue
|
||||
}
|
||||
case `style`:
|
||||
if !p.allowUnsafe {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
aps, ok := p.elsAndAttrs[token.Data]
|
||||
if !ok {
|
||||
aa, matched := p.matchRegex(token.Data)
|
||||
@@ -341,6 +352,17 @@ func (p *Policy) sanitize(r io.Reader, w io.Writer) error {
|
||||
mostRecentlyStartedToken = ""
|
||||
}
|
||||
|
||||
switch normaliseElementName(token.Data) {
|
||||
case `script`:
|
||||
if !p.allowUnsafe {
|
||||
continue
|
||||
}
|
||||
case `style`:
|
||||
if !p.allowUnsafe {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if skipClosingTag && closingTagToSkipStack[len(closingTagToSkipStack)-1] == token.Data {
|
||||
closingTagToSkipStack = closingTagToSkipStack[:len(closingTagToSkipStack)-1]
|
||||
if len(closingTagToSkipStack) == 0 {
|
||||
@@ -386,6 +408,17 @@ func (p *Policy) sanitize(r io.Reader, w io.Writer) error {
|
||||
|
||||
case html.SelfClosingTagToken:
|
||||
|
||||
switch normaliseElementName(token.Data) {
|
||||
case `script`:
|
||||
if !p.allowUnsafe {
|
||||
continue
|
||||
}
|
||||
case `style`:
|
||||
if !p.allowUnsafe {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
aps, ok := p.elsAndAttrs[token.Data]
|
||||
if !ok {
|
||||
aa, matched := p.matchRegex(token.Data)
|
||||
@@ -425,14 +458,22 @@ func (p *Policy) sanitize(r io.Reader, w io.Writer) error {
|
||||
case `script`:
|
||||
// not encouraged, but if a policy allows JavaScript we
|
||||
// should not HTML escape it as that would break the output
|
||||
if _, err := buff.WriteString(token.Data); err != nil {
|
||||
return err
|
||||
//
|
||||
// requires p.AllowUnsafe()
|
||||
if p.allowUnsafe {
|
||||
if _, err := buff.WriteString(token.Data); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case "style":
|
||||
// not encouraged, but if a policy allows CSS styles we
|
||||
// should not HTML escape it as that would break the output
|
||||
if _, err := buff.WriteString(token.Data); err != nil {
|
||||
return err
|
||||
//
|
||||
// requires p.AllowUnsafe()
|
||||
if p.allowUnsafe {
|
||||
if _, err := buff.WriteString(token.Data); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
default:
|
||||
// HTML escape the text
|
||||
@@ -524,11 +565,11 @@ attrsLoop:
|
||||
for _, ap := range apl {
|
||||
if ap.regexp != nil {
|
||||
if ap.regexp.MatchString(htmlAttr.Val) {
|
||||
htmlAttr.Val = escapeAttribute(htmlAttr.Val)
|
||||
htmlAttr.Val = escapeAttribute(htmlAttr.Val)
|
||||
cleanAttrs = append(cleanAttrs, htmlAttr)
|
||||
}
|
||||
} else {
|
||||
htmlAttr.Val = escapeAttribute(htmlAttr.Val)
|
||||
htmlAttr.Val = escapeAttribute(htmlAttr.Val)
|
||||
cleanAttrs = append(cleanAttrs, htmlAttr)
|
||||
}
|
||||
}
|
||||
@@ -1058,4 +1099,4 @@ func escapeAttribute(val string) string {
|
||||
val = strings.Replace(val, string([]rune{'\u00A0'}), ` `, -1)
|
||||
val = strings.Replace(val, `"`, `"`, -1)
|
||||
return val
|
||||
}
|
||||
}
|
||||
|
||||
1
vendor/github.com/microcosm-cc/bluemonday/stringwriterwriter_go1.12.go
generated
vendored
1
vendor/github.com/microcosm-cc/bluemonday/stringwriterwriter_go1.12.go
generated
vendored
@@ -1,3 +1,4 @@
|
||||
//go:build go1.12
|
||||
// +build go1.12
|
||||
|
||||
package bluemonday
|
||||
|
||||
1
vendor/github.com/microcosm-cc/bluemonday/stringwriterwriter_ltgo1.12.go
generated
vendored
1
vendor/github.com/microcosm-cc/bluemonday/stringwriterwriter_ltgo1.12.go
generated
vendored
@@ -1,3 +1,4 @@
|
||||
//go:build go1.1 && !go1.12
|
||||
// +build go1.1,!go1.12
|
||||
|
||||
package bluemonday
|
||||
|
||||
26
vendor/golang.org/x/net/internal/socket/complete_dontwait.go
generated
vendored
Normal file
26
vendor/golang.org/x/net/internal/socket/complete_dontwait.go
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris
|
||||
// +build darwin dragonfly freebsd linux netbsd openbsd solaris
|
||||
|
||||
package socket
|
||||
|
||||
import (
|
||||
"syscall"
|
||||
)
|
||||
|
||||
// ioComplete checks the flags and result of a syscall, to be used as return
|
||||
// value in a syscall.RawConn.Read or Write callback.
|
||||
func ioComplete(flags int, operr error) bool {
|
||||
if flags&syscall.MSG_DONTWAIT != 0 {
|
||||
// Caller explicitly said don't wait, so always return immediately.
|
||||
return true
|
||||
}
|
||||
if operr == syscall.EAGAIN || operr == syscall.EWOULDBLOCK {
|
||||
// No data available, block for I/O and try again.
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
22
vendor/golang.org/x/net/internal/socket/complete_nodontwait.go
generated
vendored
Normal file
22
vendor/golang.org/x/net/internal/socket/complete_nodontwait.go
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build aix || windows || zos
|
||||
// +build aix windows zos
|
||||
|
||||
package socket
|
||||
|
||||
import (
|
||||
"syscall"
|
||||
)
|
||||
|
||||
// ioComplete checks the flags and result of a syscall, to be used as return
|
||||
// value in a syscall.RawConn.Read or Write callback.
|
||||
func ioComplete(flags int, operr error) bool {
|
||||
if operr == syscall.EAGAIN || operr == syscall.EWOULDBLOCK {
|
||||
// No data available, block for I/O and try again.
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
11
vendor/golang.org/x/net/internal/socket/rawconn_mmsg.go
generated
vendored
11
vendor/golang.org/x/net/internal/socket/rawconn_mmsg.go
generated
vendored
@@ -10,7 +10,6 @@ package socket
|
||||
import (
|
||||
"net"
|
||||
"os"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func (c *Conn) recvMsgs(ms []Message, flags int) (int, error) {
|
||||
@@ -28,10 +27,7 @@ func (c *Conn) recvMsgs(ms []Message, flags int) (int, error) {
|
||||
var n int
|
||||
fn := func(s uintptr) bool {
|
||||
n, operr = recvmmsg(s, hs, flags)
|
||||
if operr == syscall.EAGAIN {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return ioComplete(flags, operr)
|
||||
}
|
||||
if err := c.c.Read(fn); err != nil {
|
||||
return n, err
|
||||
@@ -60,10 +56,7 @@ func (c *Conn) sendMsgs(ms []Message, flags int) (int, error) {
|
||||
var n int
|
||||
fn := func(s uintptr) bool {
|
||||
n, operr = sendmmsg(s, hs, flags)
|
||||
if operr == syscall.EAGAIN {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return ioComplete(flags, operr)
|
||||
}
|
||||
if err := c.c.Write(fn); err != nil {
|
||||
return n, err
|
||||
|
||||
11
vendor/golang.org/x/net/internal/socket/rawconn_msg.go
generated
vendored
11
vendor/golang.org/x/net/internal/socket/rawconn_msg.go
generated
vendored
@@ -9,7 +9,6 @@ package socket
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func (c *Conn) recvMsg(m *Message, flags int) error {
|
||||
@@ -25,10 +24,7 @@ func (c *Conn) recvMsg(m *Message, flags int) error {
|
||||
var n int
|
||||
fn := func(s uintptr) bool {
|
||||
n, operr = recvmsg(s, &h, flags)
|
||||
if operr == syscall.EAGAIN || operr == syscall.EWOULDBLOCK {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return ioComplete(flags, operr)
|
||||
}
|
||||
if err := c.c.Read(fn); err != nil {
|
||||
return err
|
||||
@@ -64,10 +60,7 @@ func (c *Conn) sendMsg(m *Message, flags int) error {
|
||||
var n int
|
||||
fn := func(s uintptr) bool {
|
||||
n, operr = sendmsg(s, &h, flags)
|
||||
if operr == syscall.EAGAIN || operr == syscall.EWOULDBLOCK {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return ioComplete(flags, operr)
|
||||
}
|
||||
if err := c.c.Write(fn); err != nil {
|
||||
return err
|
||||
|
||||
4
vendor/modules.txt
vendored
4
vendor/modules.txt
vendored
@@ -614,7 +614,7 @@ github.com/mholt/acmez/acme
|
||||
# github.com/mholt/archiver/v3 v3.5.0
|
||||
## explicit
|
||||
github.com/mholt/archiver/v3
|
||||
# github.com/microcosm-cc/bluemonday v1.0.15
|
||||
# github.com/microcosm-cc/bluemonday v1.0.16
|
||||
## explicit
|
||||
github.com/microcosm-cc/bluemonday
|
||||
github.com/microcosm-cc/bluemonday/css
|
||||
@@ -886,7 +886,7 @@ golang.org/x/crypto/ssh/knownhosts
|
||||
# golang.org/x/mod v0.4.2
|
||||
golang.org/x/mod/module
|
||||
golang.org/x/mod/semver
|
||||
# golang.org/x/net v0.0.0-20210614182718-04defd469f4e
|
||||
# golang.org/x/net v0.0.0-20211020060615-d418f374d309
|
||||
## explicit
|
||||
golang.org/x/net/bpf
|
||||
golang.org/x/net/context
|
||||
|
||||
@@ -788,10 +788,13 @@ async function initRepository() {
|
||||
|
||||
// Commit statuses
|
||||
$('.commit-statuses-trigger').each(function () {
|
||||
const positionRight = $('.repository.file.list').length > 0 || $('.repository.diff').length > 0;
|
||||
const popupPosition = positionRight ? 'right center' : 'left center';
|
||||
$(this)
|
||||
.popup({
|
||||
on: 'click',
|
||||
position: ($('.repository.file.list').length > 0 ? 'right center' : 'left center'),
|
||||
lastResort: popupPosition, // prevent error message "Popup does not fit within the boundaries of the viewport"
|
||||
position: popupPosition,
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -76,8 +76,10 @@
|
||||
--color-diff-removed-word-bg: #fdb8c0;
|
||||
--color-diff-added-word-bg: #acf2bd;
|
||||
--color-diff-removed-row-bg: #ffeef0;
|
||||
--color-diff-moved-row-bg: #f1f8d1;
|
||||
--color-diff-added-row-bg: #e6ffed;
|
||||
--color-diff-removed-row-border: #f1c0c0;
|
||||
--color-diff-moved-row-border: #d0e27f;
|
||||
--color-diff-added-row-border: #e6ffed;
|
||||
--color-diff-inactive: #f2f2f2;
|
||||
/* target-based colors */
|
||||
|
||||
@@ -60,6 +60,10 @@
|
||||
.overflow-visible {
|
||||
overflow: visible;
|
||||
}
|
||||
/* fix alignment of PR popup in branches table */
|
||||
table .ui.popup {
|
||||
text-align: left;
|
||||
}
|
||||
}
|
||||
|
||||
.ui.user.list {
|
||||
|
||||
@@ -1,6 +1,19 @@
|
||||
.repository {
|
||||
.commit-statuses .list > .item {
|
||||
line-height: 2;
|
||||
.popup.commit-statuses {
|
||||
// we had better limit the max size of the popup, and add scroll bars if the content size is too large.
|
||||
// otherwise some part of the popup will be hidden by viewport boundary
|
||||
max-height: 45vh;
|
||||
max-width: 60vw;
|
||||
overflow: auto;
|
||||
padding: 0;
|
||||
|
||||
.list {
|
||||
padding: .8em; // to make the scrollbar align to the border, we move the padding from outer `.popup` to this inside `.list`
|
||||
|
||||
> .item {
|
||||
line-height: 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.repo-header {
|
||||
@@ -1500,6 +1513,12 @@
|
||||
background-color: var(--color-diff-removed-row-bg) !important;
|
||||
}
|
||||
|
||||
td.moved,
|
||||
th.moved,
|
||||
tr.moved {
|
||||
background-color: var(--color-diff-moved-row-bg) !important;
|
||||
}
|
||||
|
||||
tbody.section {
|
||||
border-top: 2px solid var(--color-secondary);
|
||||
}
|
||||
|
||||
@@ -54,7 +54,6 @@
|
||||
}
|
||||
|
||||
.swipe-bar {
|
||||
z-index: 100;
|
||||
position: absolute;
|
||||
height: 100%;
|
||||
top: 0;
|
||||
|
||||
@@ -158,9 +158,18 @@
|
||||
|
||||
.task-list-item {
|
||||
list-style-type: none;
|
||||
position: relative;
|
||||
line-height: 1.5rem;
|
||||
min-height: 1.5rem; // to render a checkbox list without content `- [ ]`, we need this min-height to make sure the <li> can be visible
|
||||
|
||||
input[type="checkbox"] {
|
||||
margin: 0 6px .25em -1.6em;
|
||||
position: absolute;
|
||||
top: .25em;
|
||||
left: -1.6em;
|
||||
}
|
||||
|
||||
p {
|
||||
line-height: 1.5rem;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -71,8 +71,10 @@
|
||||
--color-diff-removed-word-bg: #6f3333;
|
||||
--color-diff-added-word-bg: #3c653c;
|
||||
--color-diff-removed-row-bg: #3c2626;
|
||||
--color-diff-moved-row-bg: #818044;
|
||||
--color-diff-added-row-bg: #283e2d;
|
||||
--color-diff-removed-row-border: #634343;
|
||||
--color-diff-moved-row-border: #bcca6f;
|
||||
--color-diff-added-row-border: #314a37;
|
||||
--color-diff-inactive: #353846;
|
||||
/* target-based colors */
|
||||
|
||||
Reference in New Issue
Block a user