Compare commits

..

1 Commits

Author SHA1 Message Date
Jimmy Praet
8df3d6575a Change @every 24h default schedules to @midnight (#16431) 2021-07-15 11:55:48 -04:00
36 changed files with 119 additions and 497 deletions

View File

@@ -404,7 +404,7 @@ steps:
- name: update
pull: default
image: alpine:3.13
image: alpine:3.14
commands:
- ./build/update-locales.sh

View File

@@ -4,19 +4,6 @@ This changelog goes through all the changes that have been made in each release
without substantial changes to our git log; to see the highlights of what has
been added to each release, please refer to the [blog](https://blog.gitea.io).
## [1.15.0-rc2](https://github.com/go-gitea/gitea/releases/tag/v1.15.0-rc2) - 2021-07-22
* BUGFIXES
* Restore creation of git-daemon-export-ok files (#16508) (#16514)
* Fix data race in bleve indexer (#16474) (#16509)
* Restore CORS on git smart http protocol (#16496) (#16506)
* Fix race in log (#16490) (#16505)
* Fix prepareWikiFileName to respect existing unescaped files (#16487) (#16498)
* Make cancel from CatFileBatch and CatFileBatchCheck wait for the command to end (#16479) (#16480)
* Update notification table with only latest data (#16445) (#16469)
* Revert to use alpine 3.13 to fix multiple seccomp related issues with Docker <20 (#16451) (#16452)
* Fix crash following ldap authentication update (#16447) (#16448)
## [1.15.0-rc1](https://github.com/go-gitea/gitea/releases/tag/v1.15.0-rc1) - 2021-07-15
* BREAKING
@@ -229,18 +216,6 @@ been added to each release, please refer to the [blog](https://blog.gitea.io).
* Remove utf8 option from installation page (#16126)
* Use Wants= over Requires= in systemd file (#15897)
## [1.14.5](https://github.com/go-gitea/gitea/releases/tag/v1.14.5) - 2021-07-16
* SECURITY
* Hide mirror passwords on repo settings page (#16022) (#16355)
* Update bluemonday to v1.0.15 (#16379) (#16380)
* BUGFIXES
* Retry rename on lock induced failures (#16435) (#16439)
* Validate issue index before querying DB (#16406) (#16410)
* Fix crash following ldap authentication update (#16447) (#16449)
* ENHANCEMENTS
* Redirect on bad CSRF instead of presenting bad page (#14937) (#16378)
## [1.14.4](https://github.com/go-gitea/gitea/releases/tag/v1.14.4) - 2021-07-06
* BUGFIXES

View File

@@ -1,7 +1,7 @@
###################################
#Build stage
FROM golang:1.16-alpine3.13 AS build-env
FROM golang:1.16-alpine3.14 AS build-env
ARG GOPROXY
ENV GOPROXY ${GOPROXY:-direct}
@@ -25,7 +25,7 @@ RUN if [ -n "${GITEA_VERSION}" ]; then git checkout "${GITEA_VERSION}"; fi \
# Begin env-to-ini build
RUN go build contrib/environment-to-ini/environment-to-ini.go
FROM alpine:3.13
FROM alpine:3.14
LABEL maintainer="maintainers@gitea.io"
EXPOSE 22 3000

View File

@@ -1,7 +1,7 @@
###################################
#Build stage
FROM golang:1.16-alpine3.13 AS build-env
FROM golang:1.16-alpine3.14 AS build-env
ARG GOPROXY
ENV GOPROXY ${GOPROXY:-direct}
@@ -25,7 +25,7 @@ RUN if [ -n "${GITEA_VERSION}" ]; then git checkout "${GITEA_VERSION}"; fi \
# Begin env-to-ini build
RUN go build contrib/environment-to-ini/environment-to-ini.go
FROM alpine:3.13
FROM alpine:3.14
LABEL maintainer="maintainers@gitea.io"
EXPOSE 2222 3000

View File

@@ -1671,7 +1671,7 @@ PATH =
;; Notice if not success
;NO_SUCCESS_NOTICE = false
;; Time interval for job to run
;SCHEDULE = @every 24h
;SCHEDULE = @midnight
;; Archives created more than OLDER_THAN ago are subject to deletion
;OLDER_THAN = 24h
@@ -1697,7 +1697,7 @@ PATH =
;[cron.repo_health_check]
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;SCHEDULE = @every 24h
;SCHEDULE = @midnight
;; Enable running Repository health check task periodically.
;ENABLED = true
;; Run Repository health check task when Gitea starts.
@@ -1722,7 +1722,7 @@ PATH =
;RUN_AT_START = true
;; Notice if not success
;NO_SUCCESS_NOTICE = false
;SCHEDULE = @every 24h
;SCHEDULE = @midnight
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -1736,7 +1736,7 @@ PATH =
;; Notice if not success
;NO_SUCCESS_NOTICE = false
;; Interval as a duration between each synchronization. (default every 24h)
;SCHEDULE = @every 24h
;SCHEDULE = @midnight
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -1751,7 +1751,7 @@ PATH =
;; Notice if not success
;NO_SUCCESS_NOTICE = false
;; Interval as a duration between each synchronization (default every 24h)
;SCHEDULE = @every 24h
;SCHEDULE = @midnight
;; Create new users, update existing user data and disable users that are not in external source anymore (default)
;; or only create new users if UPDATE_EXISTING is set to false
;UPDATE_EXISTING = true
@@ -1769,7 +1769,7 @@ PATH =
;; Notice if not success
;NO_SUCCESS_NOTICE = false
;; Interval as a duration between each synchronization (default every 24h)
;SCHEDULE = @every 24h
;SCHEDULE = @midnight
;; deleted branches than OLDER_THAN ago are subject to deletion
;OLDER_THAN = 24h
@@ -1785,7 +1785,7 @@ PATH =
;; Whether to always run at start up time (if ENABLED)
;RUN_AT_START = false
;; Time interval for job to run
;SCHEDULE = @every 24h
;SCHEDULE = @midnight
;; OlderThan or PerWebhook. How the records are removed, either by age (i.e. how long ago hook_task record was delivered) or by the number to keep per webhook (i.e. keep most recent x deliveries per webhook).
;CLEANUP_TYPE = OlderThan
;; If CLEANUP_TYPE is set to OlderThan, then any delivered hook_task records older than this expression will be deleted.

View File

@@ -32,7 +32,7 @@ You absolutely must not place a general ToS or privacy statement that implies th
Create or append to `/path/to/custom/templates/custom/extra_links_footer.tmpl`:
```go
<a class="item" href="{{AppSubUrl}}/assets/privacy.html">Privacy Policy</a>
<a class="item" href="{{AppSubUrl}}/privacy.html">Privacy Policy</a>
```
Restart Gitea to see the changes.

View File

@@ -740,7 +740,7 @@ NB: You must have `DISABLE_ROUTER_LOG` set to `false` for this option to take ef
- `ENABLED`: **true**: Enable service.
- `RUN_AT_START`: **true**: Run tasks at start up time (if ENABLED).
- `SCHEDULE`: **@every 24h**: Cron syntax for scheduling repository archive cleanup, e.g. `@every 1h`.
- `SCHEDULE`: **@midnight**: Cron syntax for scheduling repository archive cleanup, e.g. `@every 1h`.
- `OLDER_THAN`: **24h**: Archives created more than `OLDER_THAN` ago are subject to deletion, e.g. `12h`.
#### Cron - Update Mirrors (`cron.update_mirrors`)
@@ -750,31 +750,31 @@ NB: You must have `DISABLE_ROUTER_LOG` set to `false` for this option to take ef
#### Cron - Repository Health Check (`cron.repo_health_check`)
- `SCHEDULE`: **@every 24h**: Cron syntax for scheduling repository health check.
- `SCHEDULE`: **@midnight**: Cron syntax for scheduling repository health check.
- `TIMEOUT`: **60s**: Time duration syntax for health check execution timeout.
- `ARGS`: **\<empty\>**: Arguments for command `git fsck`, e.g. `--unreachable --tags`. See more on http://git-scm.com/docs/git-fsck
#### Cron - Repository Statistics Check (`cron.check_repo_stats`)
- `RUN_AT_START`: **true**: Run repository statistics check at start time.
- `SCHEDULE`: **@every 24h**: Cron syntax for scheduling repository statistics check.
- `SCHEDULE`: **@midnight**: Cron syntax for scheduling repository statistics check.
### Cron - Cleanup hook_task Table (`cron.cleanup_hook_task_table`)
- `ENABLED`: **true**: Enable cleanup hook_task job.
- `RUN_AT_START`: **false**: Run cleanup hook_task at start time (if ENABLED).
- `SCHEDULE`: **@every 24h**: Cron syntax for cleaning hook_task table.
- `SCHEDULE`: **@midnight**: Cron syntax for cleaning hook_task table.
- `CLEANUP_TYPE` **OlderThan** OlderThan or PerWebhook Method to cleanup hook_task, either by age (i.e. how long ago hook_task record was delivered) or by the number to keep per webhook (i.e. keep most recent x deliveries per webhook).
- `OLDER_THAN`: **168h**: If CLEANUP_TYPE is set to OlderThan, then any delivered hook_task records older than this expression will be deleted.
- `NUMBER_TO_KEEP`: **10**: If CLEANUP_TYPE is set to PerWebhook, this is number of hook_task records to keep for a webhook (i.e. keep the most recent x deliveries).
#### Cron - Update Migration Poster ID (`cron.update_migration_poster_id`)
- `SCHEDULE`: **@every 24h** : Interval as a duration between each synchronization, it will always attempt synchronization when the instance starts.
- `SCHEDULE`: **@midnight** : Interval as a duration between each synchronization, it will always attempt synchronization when the instance starts.
#### Cron - Sync External Users (`cron.sync_external_users`)
- `SCHEDULE`: **@every 24h** : Interval as a duration between each synchronization, it will always attempt synchronization when the instance starts.
- `SCHEDULE`: **@midnight** : Interval as a duration between each synchronization, it will always attempt synchronization when the instance starts.
- `UPDATE_EXISTING`: **true**: Create new users, update existing user data and disable users that are not in external source anymore (default) or only create new users if UPDATE_EXISTING is set to false.
### Extended cron tasks (not enabled by default)

View File

@@ -257,18 +257,18 @@ test01.xls: application/vnd.ms-excel; charset=binary
### Cron - Repository Health Check (`cron.repo_health_check`)
- `SCHEDULE`: 仓库健康监测的Cron语法比如`@every 24h`
- `SCHEDULE`: 仓库健康监测的Cron语法比如`@midnight`
- `TIMEOUT`: 仓库健康监测的超时时间,比如:`60s`.
- `ARGS`: 执行 `git fsck` 命令的参数,比如:`--unreachable --tags`
### Cron - Repository Statistics Check (`cron.check_repo_stats`)
- `RUN_AT_START`: 是否启动时自动运行仓库统计。
- `SCHEDULE`: 仓库统计时的Cron 语法,比如:`@every 24h`.
- `SCHEDULE`: 仓库统计时的Cron 语法,比如:`@midnight`.
### Cron - Update Migration Poster ID (`cron.update_migration_poster_id`)
- `SCHEDULE`: **@every 24h** : 每次同步的间隔时间。此任务总是在启动时自动进行。
- `SCHEDULE`: **@midnight** : 每次同步的间隔时间。此任务总是在启动时自动进行。
## Git (`git`)

View File

@@ -102,7 +102,7 @@ For instance, let's say you are in Germany and must add the famously legally-req
just place it under your "$GITEA_CUSTOM/public/" directory (for instance `$GITEA_CUSTOM/public/impressum.html`) and put a link to it in either `$GITEA_CUSTOM/templates/custom/extra_links.tmpl` or `$GITEA_CUSTOM/templates/custom/extra_links_footer.tmpl`.
To match the current style, the link should have the class name "item", and you can use `{{AppSubUrl}}` to get the base URL:
`<a class="item" href="{{AppSubUrl}}/assets/impressum.html">Impressum</a>`
`<a class="item" href="{{AppSubUrl}}/impressum.html">Impressum</a>`
For more information, see [Adding Legal Pages](https://docs.gitea.io/en-us/adding-legal-pages).
@@ -174,13 +174,13 @@ You can display STL file directly in Gitea by adding:
if ($('.view-raw>a[href$=".stl" i]').length) {
$("body").append(
'<link href="/assets/Madeleine.js/src/css/Madeleine.css" rel="stylesheet">'
'<link href="/Madeleine.js/src/css/Madeleine.css" rel="stylesheet">'
);
Promise.all([
lS("/assets/Madeleine.js/src/lib/stats.js"),
lS("/assets/Madeleine.js/src/lib/detector.js"),
lS("/assets/Madeleine.js/src/lib/three.min.js"),
lS("/assets/Madeleine.js/src/Madeleine.js"),
lS("/Madeleine.js/src/lib/stats.js"),
lS("/Madeleine.js/src/lib/detector.js"),
lS("/Madeleine.js/src/lib/three.min.js"),
lS("/Madeleine.js/src/Madeleine.js"),
]).then(function () {
$(".view-raw")
.attr("id", "view-raw")
@@ -188,7 +188,7 @@ You can display STL file directly in Gitea by adding:
new Madeleine({
target: "view-raw",
data: $('.view-raw>a[href$=".stl" i]').attr("href"),
path: "/assets/Madeleine.js/src",
path: "/Madeleine.js/src",
});
$('.view-raw>a[href$=".stl"]').remove();
});

View File

@@ -61,7 +61,7 @@ Gitea 引用 `custom` 目录中的自定义配置文件来覆盖配置、模板
"custom/public/"目录下(比如 `custom/public/impressum.html`)并且将它与 `custom/templates/custom/extra_links.tmpl` 链接起来即可。
这个链接应当使用一个名为“item”的 class 来匹配当前样式,您可以使用 `{{AppSubUrl}}` 来获取 base URL:
`<a class="item" href="{{AppSubUrl}}/assets/impressum.html">Impressum</a>`
`<a class="item" href="{{AppSubUrl}}/impressum.html">Impressum</a>`
同理,您可以将页签添加到 `extra_tabs.tmpl` 中,使用同样的方式来添加页签。它的具体样式需要与
`templates/repo/header.tmpl` 中已有的其他选项卡的样式匹配

View File

@@ -164,5 +164,5 @@ And so you could write some CSS:
Add your stylesheet to your custom directory e.g `custom/public/css/my-style-XXXXX.css` and import it using a custom header file `custom/templates/custom/header.tmpl`:
```html
<link type="text/css" href="{{AppSubUrl}}/assets/css/my-style-XXXXX.css" />
<link type="text/css" href="{{AppSubUrl}}/css/my-style-XXXXX.css" />
```

View File

@@ -144,60 +144,6 @@ func TestLDAPUserSignin(t *testing.T) {
assert.Equal(t, u.Email, htmlDoc.Find(`label[for="email"]`).Siblings().First().Text())
}
func TestLDAPAuthChange(t *testing.T) {
defer prepareTestEnv(t)()
addAuthSourceLDAP(t, "")
session := loginUser(t, "user1")
req := NewRequest(t, "GET", "/admin/auths")
resp := session.MakeRequest(t, req, http.StatusOK)
doc := NewHTMLParser(t, resp.Body)
href, exists := doc.Find("table.table td a").Attr("href")
if !exists {
assert.True(t, exists, "No authentication source found")
return
}
req = NewRequest(t, "GET", href)
resp = session.MakeRequest(t, req, http.StatusOK)
doc = NewHTMLParser(t, resp.Body)
csrf := doc.GetCSRF()
host, _ := doc.Find(`input[name="host"]`).Attr("value")
assert.Equal(t, host, getLDAPServerHost())
binddn, _ := doc.Find(`input[name="bind_dn"]`).Attr("value")
assert.Equal(t, binddn, "uid=gitea,ou=service,dc=planetexpress,dc=com")
req = NewRequestWithValues(t, "POST", href, map[string]string{
"_csrf": csrf,
"type": "2",
"name": "ldap",
"host": getLDAPServerHost(),
"port": "389",
"bind_dn": "uid=gitea,ou=service,dc=planetexpress,dc=com",
"bind_password": "password",
"user_base": "ou=people,dc=planetexpress,dc=com",
"filter": "(&(objectClass=inetOrgPerson)(memberOf=cn=git,ou=people,dc=planetexpress,dc=com)(uid=%s))",
"admin_filter": "(memberOf=cn=admin_staff,ou=people,dc=planetexpress,dc=com)",
"restricted_filter": "(uid=leela)",
"attribute_username": "uid",
"attribute_name": "givenName",
"attribute_surname": "sn",
"attribute_mail": "mail",
"attribute_ssh_public_key": "",
"is_sync_enabled": "on",
"is_active": "on",
})
session.MakeRequest(t, req, http.StatusFound)
req = NewRequest(t, "GET", href)
resp = session.MakeRequest(t, req, http.StatusOK)
doc = NewHTMLParser(t, resp.Body)
host, _ = doc.Find(`input[name="host"]`).Attr("value")
assert.Equal(t, host, getLDAPServerHost())
binddn, _ = doc.Find(`input[name="bind_dn"]`).Attr("value")
assert.Equal(t, binddn, "uid=gitea,ou=service,dc=planetexpress,dc=com")
}
func TestLDAPUserSync(t *testing.T) {
if skipLDAPTests() {
t.Skip()

View File

@@ -1 +1 @@
0dca5bd9b5d7ef937710e056f575e86c0184ba85
423313fbd38093bb10d0c8387db9105409c6f196

View File

@@ -7,7 +7,6 @@ package models
import (
"crypto/tls"
"encoding/binary"
"errors"
"fmt"
"net/smtp"
@@ -71,32 +70,13 @@ var (
_ convert.Conversion = &SSPIConfig{}
)
// jsonUnmarshalHandleDoubleEncode - due to a bug in xorm (see https://gitea.com/xorm/xorm/pulls/1957) - it's
// possible that a Blob may be double encoded or gain an unwanted prefix of 0xff 0xfe.
func jsonUnmarshalHandleDoubleEncode(bs []byte, v interface{}) error {
// jsonUnmarshalIgnoreErroneousBOM - due to a bug in xorm (see https://gitea.com/xorm/xorm/pulls/1957) - it's
// possible that a Blob may gain an unwanted prefix of 0xff 0xfe.
func jsonUnmarshalIgnoreErroneousBOM(bs []byte, v interface{}) error {
json := jsoniter.ConfigCompatibleWithStandardLibrary
err := json.Unmarshal(bs, v)
if err != nil {
ok := true
rs := []byte{}
temp := make([]byte, 2)
for _, rn := range string(bs) {
if rn > 0xffff {
ok = false
break
}
binary.LittleEndian.PutUint16(temp, uint16(rn))
rs = append(rs, temp...)
}
if ok {
if rs[0] == 0xff && rs[1] == 0xfe {
rs = rs[2:]
}
err = json.Unmarshal(rs, v)
}
}
err := json.Unmarshal(bs, &v)
if err != nil && len(bs) > 2 && bs[0] == 0xff && bs[1] == 0xfe {
err = json.Unmarshal(bs[2:], v)
err = json.Unmarshal(bs[2:], &v)
}
return err
}
@@ -108,7 +88,7 @@ type LDAPConfig struct {
// FromDB fills up a LDAPConfig from serialized format.
func (cfg *LDAPConfig) FromDB(bs []byte) error {
err := jsonUnmarshalHandleDoubleEncode(bs, &cfg)
err := jsonUnmarshalIgnoreErroneousBOM(bs, &cfg)
if err != nil {
return err
}
@@ -149,7 +129,7 @@ type SMTPConfig struct {
// FromDB fills up an SMTPConfig from serialized format.
func (cfg *SMTPConfig) FromDB(bs []byte) error {
return jsonUnmarshalHandleDoubleEncode(bs, cfg)
return jsonUnmarshalIgnoreErroneousBOM(bs, cfg)
}
// ToDB exports an SMTPConfig to a serialized format.
@@ -166,7 +146,7 @@ type PAMConfig struct {
// FromDB fills up a PAMConfig from serialized format.
func (cfg *PAMConfig) FromDB(bs []byte) error {
return jsonUnmarshalHandleDoubleEncode(bs, cfg)
return jsonUnmarshalIgnoreErroneousBOM(bs, cfg)
}
// ToDB exports a PAMConfig to a serialized format.
@@ -187,7 +167,7 @@ type OAuth2Config struct {
// FromDB fills up an OAuth2Config from serialized format.
func (cfg *OAuth2Config) FromDB(bs []byte) error {
return jsonUnmarshalHandleDoubleEncode(bs, cfg)
return jsonUnmarshalIgnoreErroneousBOM(bs, cfg)
}
// ToDB exports an SMTPConfig to a serialized format.
@@ -207,7 +187,7 @@ type SSPIConfig struct {
// FromDB fills up an SSPIConfig from serialized format.
func (cfg *SSPIConfig) FromDB(bs []byte) error {
return jsonUnmarshalHandleDoubleEncode(bs, cfg)
return jsonUnmarshalIgnoreErroneousBOM(bs, cfg)
}
// ToDB exports an SSPIConfig to a serialized format.

View File

@@ -1152,16 +1152,6 @@ func CreateRepository(ctx DBContext, doer, u *User, repo *Repository, overwriteO
return fmt.Errorf("recalculateAccesses: %v", err)
}
if u.Visibility == api.VisibleTypePublic && !repo.IsPrivate {
// Create/Remove git-daemon-export-ok for git-daemon...
daemonExportFile := path.Join(repo.RepoPath(), `git-daemon-export-ok`)
if f, err := os.Create(daemonExportFile); err != nil {
log.Error("Failed to create %s: %v", daemonExportFile, err)
} else {
f.Close()
}
}
if setting.Service.AutoWatchNewRepos {
if err = watchRepo(ctx.e, doer.ID, repo.ID, true); err != nil {
return fmt.Errorf("watchRepo: %v", err)
@@ -1320,16 +1310,15 @@ func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err e
// Create/Remove git-daemon-export-ok for git-daemon...
daemonExportFile := path.Join(repo.RepoPath(), `git-daemon-export-ok`)
isExist, err := util.IsExist(daemonExportFile)
isPublic := !repo.IsPrivate && repo.Owner.Visibility == api.VisibleTypePublic
if err != nil {
log.Error("Unable to check if %s exists. Error: %v", daemonExportFile, err)
return err
}
if !isPublic && isExist {
if repo.IsPrivate && isExist {
if err = util.Remove(daemonExportFile); err != nil {
log.Error("Failed to remove %s: %v", daemonExportFile, err)
}
} else if isPublic && !isExist {
} else if !repo.IsPrivate && !isExist {
if f, err := os.Create(daemonExportFile); err != nil {
log.Error("Failed to create %s: %v", daemonExportFile, err)
} else {

View File

@@ -28,7 +28,7 @@ type UnitConfig struct{}
// FromDB fills up a UnitConfig from serialized format.
func (cfg *UnitConfig) FromDB(bs []byte) error {
return jsonUnmarshalHandleDoubleEncode(bs, &cfg)
return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg)
}
// ToDB exports a UnitConfig to a serialized format.
@@ -44,7 +44,7 @@ type ExternalWikiConfig struct {
// FromDB fills up a ExternalWikiConfig from serialized format.
func (cfg *ExternalWikiConfig) FromDB(bs []byte) error {
return jsonUnmarshalHandleDoubleEncode(bs, &cfg)
return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg)
}
// ToDB exports a ExternalWikiConfig to a serialized format.
@@ -62,7 +62,7 @@ type ExternalTrackerConfig struct {
// FromDB fills up a ExternalTrackerConfig from serialized format.
func (cfg *ExternalTrackerConfig) FromDB(bs []byte) error {
return jsonUnmarshalHandleDoubleEncode(bs, &cfg)
return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg)
}
// ToDB exports a ExternalTrackerConfig to a serialized format.
@@ -80,7 +80,7 @@ type IssuesConfig struct {
// FromDB fills up a IssuesConfig from serialized format.
func (cfg *IssuesConfig) FromDB(bs []byte) error {
return jsonUnmarshalHandleDoubleEncode(bs, &cfg)
return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg)
}
// ToDB exports a IssuesConfig to a serialized format.
@@ -104,7 +104,7 @@ type PullRequestsConfig struct {
// FromDB fills up a PullRequestsConfig from serialized format.
func (cfg *PullRequestsConfig) FromDB(bs []byte) error {
return jsonUnmarshalHandleDoubleEncode(bs, &cfg)
return jsonUnmarshalIgnoreErroneousBOM(bs, &cfg)
}
// ToDB exports a PullRequestsConfig to a serialized format.

View File

@@ -36,7 +36,7 @@ func registerRepoHealthCheck() {
BaseConfig: BaseConfig{
Enabled: true,
RunAtStart: false,
Schedule: "@every 24h",
Schedule: "@midnight",
},
Timeout: 60 * time.Second,
Args: []string{},
@@ -50,7 +50,7 @@ func registerCheckRepoStats() {
RegisterTaskFatal("check_repo_stats", &BaseConfig{
Enabled: true,
RunAtStart: true,
Schedule: "@every 24h",
Schedule: "@midnight",
}, func(ctx context.Context, _ *models.User, _ Config) error {
return models.CheckRepoStats(ctx)
})
@@ -61,7 +61,7 @@ func registerArchiveCleanup() {
BaseConfig: BaseConfig{
Enabled: true,
RunAtStart: true,
Schedule: "@every 24h",
Schedule: "@midnight",
},
OlderThan: 24 * time.Hour,
}, func(ctx context.Context, _ *models.User, config Config) error {
@@ -75,7 +75,7 @@ func registerSyncExternalUsers() {
BaseConfig: BaseConfig{
Enabled: true,
RunAtStart: false,
Schedule: "@every 24h",
Schedule: "@midnight",
},
UpdateExisting: true,
}, func(ctx context.Context, _ *models.User, config Config) error {
@@ -89,7 +89,7 @@ func registerDeletedBranchesCleanup() {
BaseConfig: BaseConfig{
Enabled: true,
RunAtStart: true,
Schedule: "@every 24h",
Schedule: "@midnight",
},
OlderThan: 24 * time.Hour,
}, func(ctx context.Context, _ *models.User, config Config) error {
@@ -103,7 +103,7 @@ func registerUpdateMigrationPosterID() {
RegisterTaskFatal("update_migration_poster_id", &BaseConfig{
Enabled: true,
RunAtStart: true,
Schedule: "@every 24h",
Schedule: "@midnight",
}, func(ctx context.Context, _ *models.User, _ Config) error {
return migrations.UpdateMigrationPosterID(ctx)
})
@@ -114,7 +114,7 @@ func registerCleanupHookTaskTable() {
BaseConfig: BaseConfig{
Enabled: true,
RunAtStart: false,
Schedule: "@every 24h",
Schedule: "@midnight",
},
CleanupType: "OlderThan",
OlderThan: 168 * time.Hour,

View File

@@ -6,9 +6,7 @@ package doctor
import (
"fmt"
"os"
"os/exec"
"path"
"strings"
"code.gitea.io/gitea/models"
@@ -16,9 +14,6 @@ import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
lru "github.com/hashicorp/golang-lru"
"xorm.io/builder"
)
@@ -80,7 +75,6 @@ func checkUserStarNum(logger log.Logger, autofix bool) error {
func checkEnablePushOptions(logger log.Logger, autofix bool) error {
numRepos := 0
numNeedUpdate := 0
if err := iterateRepositories(func(repo *models.Repository) error {
numRepos++
r, err := git.OpenRepository(repo.RepoPath())
@@ -120,66 +114,6 @@ func checkEnablePushOptions(logger log.Logger, autofix bool) error {
return nil
}
func checkDaemonExport(logger log.Logger, autofix bool) error {
numRepos := 0
numNeedUpdate := 0
cache, err := lru.New(512)
if err != nil {
logger.Critical("Unable to create cache: %v", err)
return err
}
if err := iterateRepositories(func(repo *models.Repository) error {
numRepos++
if owner, has := cache.Get(repo.OwnerID); has {
repo.Owner = owner.(*models.User)
} else {
if err := repo.GetOwner(); err != nil {
return err
}
cache.Add(repo.OwnerID, repo.Owner)
}
// Create/Remove git-daemon-export-ok for git-daemon...
daemonExportFile := path.Join(repo.RepoPath(), `git-daemon-export-ok`)
isExist, err := util.IsExist(daemonExportFile)
if err != nil {
log.Error("Unable to check if %s exists. Error: %v", daemonExportFile, err)
return err
}
isPublic := !repo.IsPrivate && repo.Owner.Visibility == structs.VisibleTypePublic
if isPublic != isExist {
numNeedUpdate++
if autofix {
if !isPublic && isExist {
if err = util.Remove(daemonExportFile); err != nil {
log.Error("Failed to remove %s: %v", daemonExportFile, err)
}
} else if isPublic && !isExist {
if f, err := os.Create(daemonExportFile); err != nil {
log.Error("Failed to create %s: %v", daemonExportFile, err)
} else {
f.Close()
}
}
}
}
return nil
}); err != nil {
logger.Critical("Unable to checkDaemonExport: %v", err)
return err
}
if autofix {
logger.Info("Updated git-daemon-export-ok files for %d of %d repositories.", numNeedUpdate, numRepos)
} else {
logger.Info("Checked %d repositories, %d need updates.", numRepos, numNeedUpdate)
}
return nil
}
func init() {
Register(&Check{
Title: "Check if SCRIPT_TYPE is available",
@@ -209,11 +143,4 @@ func init() {
Run: checkEnablePushOptions,
Priority: 7,
})
Register(&Check{
Title: "Check git-daemon-export-ok files",
Name: "check-git-daemon-export-ok",
IsDefault: false,
Run: checkDaemonExport,
Priority: 8,
})
}

View File

@@ -7,7 +7,6 @@ package git
import (
"bufio"
"bytes"
"context"
"io"
"math"
"strconv"
@@ -29,20 +28,16 @@ type WriteCloserError interface {
func CatFileBatchCheck(repoPath string) (WriteCloserError, *bufio.Reader, func()) {
batchStdinReader, batchStdinWriter := io.Pipe()
batchStdoutReader, batchStdoutWriter := io.Pipe()
ctx, ctxCancel := context.WithCancel(DefaultContext)
closed := make(chan struct{})
cancel := func() {
_ = batchStdinReader.Close()
_ = batchStdinWriter.Close()
_ = batchStdoutReader.Close()
_ = batchStdoutWriter.Close()
ctxCancel()
<-closed
}
go func() {
stderr := strings.Builder{}
err := NewCommandContext(ctx, "cat-file", "--batch-check").RunInDirFullPipeline(repoPath, batchStdoutWriter, &stderr, batchStdinReader)
err := NewCommand("cat-file", "--batch-check").RunInDirFullPipeline(repoPath, batchStdoutWriter, &stderr, batchStdinReader)
if err != nil {
_ = batchStdoutWriter.CloseWithError(ConcatenateError(err, (&stderr).String()))
_ = batchStdinReader.CloseWithError(ConcatenateError(err, (&stderr).String()))
@@ -50,7 +45,6 @@ func CatFileBatchCheck(repoPath string) (WriteCloserError, *bufio.Reader, func()
_ = batchStdoutWriter.Close()
_ = batchStdinReader.Close()
}
close(closed)
}()
// For simplicities sake we'll use a buffered reader to read from the cat-file --batch-check
@@ -65,20 +59,16 @@ func CatFileBatch(repoPath string) (WriteCloserError, *bufio.Reader, func()) {
// so let's create a batch stdin and stdout
batchStdinReader, batchStdinWriter := io.Pipe()
batchStdoutReader, batchStdoutWriter := nio.Pipe(buffer.New(32 * 1024))
ctx, ctxCancel := context.WithCancel(DefaultContext)
closed := make(chan struct{})
cancel := func() {
_ = batchStdinReader.Close()
_ = batchStdinWriter.Close()
_ = batchStdoutReader.Close()
_ = batchStdoutWriter.Close()
ctxCancel()
<-closed
}
go func() {
stderr := strings.Builder{}
err := NewCommandContext(ctx, "cat-file", "--batch").RunInDirFullPipeline(repoPath, batchStdoutWriter, &stderr, batchStdinReader)
err := NewCommand("cat-file", "--batch").RunInDirFullPipeline(repoPath, batchStdoutWriter, &stderr, batchStdinReader)
if err != nil {
_ = batchStdoutWriter.CloseWithError(ConcatenateError(err, (&stderr).String()))
_ = batchStdinReader.CloseWithError(ConcatenateError(err, (&stderr).String()))
@@ -86,7 +76,6 @@ func CatFileBatch(repoPath string) (WriteCloserError, *bufio.Reader, func()) {
_ = batchStdoutWriter.Close()
_ = batchStdinReader.Close()
}
close(closed)
}()
// For simplicities sake we'll us a buffered reader to read from the cat-file --batch

View File

@@ -6,7 +6,6 @@
package git
import (
"bytes"
"strings"
)
@@ -46,23 +45,3 @@ func (t *Tree) SubTree(rpath string) (*Tree, error) {
}
return g, nil
}
// LsTree checks if the given filenames are in the tree
func (repo *Repository) LsTree(ref string, filenames ...string) ([]string, error) {
cmd := NewCommand("ls-tree", "-z", "--name-only", "--", ref)
for _, arg := range filenames {
if arg != "" {
cmd.AddArguments(arg)
}
}
res, err := cmd.RunInDirBytes(repo.Path)
if err != nil {
return nil, err
}
filelist := make([]string, 0, len(filenames))
for _, line := range bytes.Split(res, []byte{'\000'}) {
filelist = append(filelist, string(line))
}
return filelist, err
}

View File

@@ -1,59 +0,0 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package bleve
import (
"github.com/blevesearch/bleve/v2"
)
// FlushingBatch is a batch of operations that automatically flushes to the
// underlying index once it reaches a certain size.
type FlushingBatch struct {
maxBatchSize int
batch *bleve.Batch
index bleve.Index
}
// NewFlushingBatch creates a new flushing batch for the specified index. Once
// the number of operations in the batch reaches the specified limit, the batch
// automatically flushes its operations to the index.
func NewFlushingBatch(index bleve.Index, maxBatchSize int) *FlushingBatch {
return &FlushingBatch{
maxBatchSize: maxBatchSize,
batch: index.NewBatch(),
index: index,
}
}
// Index add a new index to batch
func (b *FlushingBatch) Index(id string, data interface{}) error {
if err := b.batch.Index(id, data); err != nil {
return err
}
return b.flushIfFull()
}
// Delete add a delete index to batch
func (b *FlushingBatch) Delete(id string) error {
b.batch.Delete(id)
return b.flushIfFull()
}
func (b *FlushingBatch) flushIfFull() error {
if b.batch.Size() < b.maxBatchSize {
return nil
}
return b.Flush()
}
// Flush submit the batch and create a new one
func (b *FlushingBatch) Flush() error {
err := b.index.Batch(b.batch)
if err != nil {
return err
}
b.batch = b.index.NewBatch()
return nil
}

View File

@@ -18,7 +18,6 @@ import (
"code.gitea.io/gitea/modules/analyze"
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/git"
gitea_bleve "code.gitea.io/gitea/modules/indexer/bleve"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
@@ -177,8 +176,7 @@ func NewBleveIndexer(indexDir string) (*BleveIndexer, bool, error) {
return indexer, created, err
}
func (b *BleveIndexer) addUpdate(batchWriter git.WriteCloserError, batchReader *bufio.Reader, commitSha string,
update fileUpdate, repo *models.Repository, batch *gitea_bleve.FlushingBatch) error {
func (b *BleveIndexer) addUpdate(batchWriter git.WriteCloserError, batchReader *bufio.Reader, commitSha string, update fileUpdate, repo *models.Repository, batch rupture.FlushingBatch) error {
// Ignore vendored files in code search
if setting.Indexer.ExcludeVendored && analyze.IsVendor(update.Filename) {
return nil
@@ -231,7 +229,7 @@ func (b *BleveIndexer) addUpdate(batchWriter git.WriteCloserError, batchReader *
})
}
func (b *BleveIndexer) addDelete(filename string, repo *models.Repository, batch *gitea_bleve.FlushingBatch) error {
func (b *BleveIndexer) addDelete(filename string, repo *models.Repository, batch rupture.FlushingBatch) error {
id := filenameIndexerID(repo.ID, filename)
return batch.Delete(id)
}
@@ -269,7 +267,7 @@ func (b *BleveIndexer) Close() {
// Index indexes the data
func (b *BleveIndexer) Index(repo *models.Repository, sha string, changes *repoChanges) error {
batch := gitea_bleve.NewFlushingBatch(b.indexer, maxBatchSize)
batch := rupture.NewFlushingBatch(b.indexer, maxBatchSize)
if len(changes.Updates) > 0 {
batchWriter, batchReader, cancel := git.CatFileBatch(repo.RepoPath())
@@ -298,7 +296,7 @@ func (b *BleveIndexer) Delete(repoID int64) error {
if err != nil {
return err
}
batch := gitea_bleve.NewFlushingBatch(b.indexer, maxBatchSize)
batch := rupture.NewFlushingBatch(b.indexer, maxBatchSize)
for _, hit := range result.Hits {
if err = batch.Delete(hit.ID); err != nil {
return err

View File

@@ -9,10 +9,8 @@ import (
"os"
"strconv"
gitea_bleve "code.gitea.io/gitea/modules/indexer/bleve"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
"github.com/blevesearch/bleve/v2"
"github.com/blevesearch/bleve/v2/analysis/analyzer/custom"
"github.com/blevesearch/bleve/v2/analysis/token/lowercase"
@@ -199,7 +197,7 @@ func (b *BleveIndexer) Close() {
// Index will save the index data
func (b *BleveIndexer) Index(issues []*IndexerData) error {
batch := gitea_bleve.NewFlushingBatch(b.indexer, maxBatchSize)
batch := rupture.NewFlushingBatch(b.indexer, maxBatchSize)
for _, issue := range issues {
if err := batch.Index(indexerID(issue.ID), struct {
RepoID int64
@@ -220,7 +218,7 @@ func (b *BleveIndexer) Index(issues []*IndexerData) error {
// Delete deletes indexes by ids
func (b *BleveIndexer) Delete(ids ...int64) error {
batch := gitea_bleve.NewFlushingBatch(b.indexer, maxBatchSize)
batch := rupture.NewFlushingBatch(b.indexer, maxBatchSize)
for _, id := range ids {
if err := batch.Delete(indexerID(id)); err != nil {
return err

View File

@@ -143,7 +143,7 @@ type MultiChannelledLog struct {
name string
bufferLength int64
queue chan *Event
rwmutex sync.RWMutex
mutex sync.Mutex
loggers map[string]EventLogger
flush chan bool
close chan bool
@@ -173,10 +173,10 @@ func NewMultiChannelledLog(name string, bufferLength int64) *MultiChannelledLog
// AddLogger adds a logger to this MultiChannelledLog
func (m *MultiChannelledLog) AddLogger(logger EventLogger) error {
m.rwmutex.Lock()
m.mutex.Lock()
name := logger.GetName()
if _, has := m.loggers[name]; has {
m.rwmutex.Unlock()
m.mutex.Unlock()
return ErrDuplicateName{name}
}
m.loggers[name] = logger
@@ -186,7 +186,7 @@ func (m *MultiChannelledLog) AddLogger(logger EventLogger) error {
if logger.GetStacktraceLevel() < m.stacktraceLevel {
m.stacktraceLevel = logger.GetStacktraceLevel()
}
m.rwmutex.Unlock()
m.mutex.Unlock()
go m.Start()
return nil
}
@@ -195,15 +195,15 @@ func (m *MultiChannelledLog) AddLogger(logger EventLogger) error {
// NB: If you delete the last sublogger this logger will simply drop
// log events
func (m *MultiChannelledLog) DelLogger(name string) bool {
m.rwmutex.Lock()
m.mutex.Lock()
logger, has := m.loggers[name]
if !has {
m.rwmutex.Unlock()
m.mutex.Unlock()
return false
}
delete(m.loggers, name)
m.internalResetLevel()
m.rwmutex.Unlock()
m.mutex.Unlock()
logger.Flush()
logger.Close()
return true
@@ -211,15 +211,15 @@ func (m *MultiChannelledLog) DelLogger(name string) bool {
// GetEventLogger returns a sub logger from this MultiChannelledLog
func (m *MultiChannelledLog) GetEventLogger(name string) EventLogger {
m.rwmutex.RLock()
defer m.rwmutex.RUnlock()
m.mutex.Lock()
defer m.mutex.Unlock()
return m.loggers[name]
}
// GetEventLoggerNames returns a list of names
func (m *MultiChannelledLog) GetEventLoggerNames() []string {
m.rwmutex.RLock()
defer m.rwmutex.RUnlock()
m.mutex.Lock()
defer m.mutex.Unlock()
var keys []string
for k := range m.loggers {
keys = append(keys, k)
@@ -228,12 +228,12 @@ func (m *MultiChannelledLog) GetEventLoggerNames() []string {
}
func (m *MultiChannelledLog) closeLoggers() {
m.rwmutex.Lock()
m.mutex.Lock()
for _, logger := range m.loggers {
logger.Flush()
logger.Close()
}
m.rwmutex.Unlock()
m.mutex.Unlock()
m.closed <- true
}
@@ -249,8 +249,8 @@ func (m *MultiChannelledLog) Resume() {
// ReleaseReopen causes this logger to tell its subloggers to release and reopen
func (m *MultiChannelledLog) ReleaseReopen() error {
m.rwmutex.Lock()
defer m.rwmutex.Unlock()
m.mutex.Lock()
defer m.mutex.Unlock()
var accumulatedErr error
for _, logger := range m.loggers {
if err := logger.ReleaseReopen(); err != nil {
@@ -266,13 +266,13 @@ func (m *MultiChannelledLog) ReleaseReopen() error {
// Start processing the MultiChannelledLog
func (m *MultiChannelledLog) Start() {
m.rwmutex.Lock()
m.mutex.Lock()
if m.started {
m.rwmutex.Unlock()
m.mutex.Unlock()
return
}
m.started = true
m.rwmutex.Unlock()
m.mutex.Unlock()
paused := false
for {
if paused {
@@ -286,11 +286,11 @@ func (m *MultiChannelledLog) Start() {
m.closeLoggers()
return
}
m.rwmutex.RLock()
m.mutex.Lock()
for _, logger := range m.loggers {
logger.Flush()
}
m.rwmutex.RUnlock()
m.mutex.Unlock()
case <-m.close:
m.closeLoggers()
return
@@ -307,24 +307,24 @@ func (m *MultiChannelledLog) Start() {
m.closeLoggers()
return
}
m.rwmutex.RLock()
m.mutex.Lock()
for _, logger := range m.loggers {
err := logger.LogEvent(event)
if err != nil {
fmt.Println(err)
}
}
m.rwmutex.RUnlock()
m.mutex.Unlock()
case _, ok := <-m.flush:
if !ok {
m.closeLoggers()
return
}
m.rwmutex.RLock()
m.mutex.Lock()
for _, logger := range m.loggers {
logger.Flush()
}
m.rwmutex.RUnlock()
m.mutex.Unlock()
case <-m.close:
m.closeLoggers()
return
@@ -359,15 +359,11 @@ func (m *MultiChannelledLog) Flush() {
// GetLevel gets the level of this MultiChannelledLog
func (m *MultiChannelledLog) GetLevel() Level {
m.rwmutex.RLock()
defer m.rwmutex.RUnlock()
return m.level
}
// GetStacktraceLevel gets the level of this MultiChannelledLog
func (m *MultiChannelledLog) GetStacktraceLevel() Level {
m.rwmutex.RLock()
defer m.rwmutex.RUnlock()
return m.stacktraceLevel
}
@@ -388,8 +384,8 @@ func (m *MultiChannelledLog) internalResetLevel() Level {
// ResetLevel will reset the level of this MultiChannelledLog
func (m *MultiChannelledLog) ResetLevel() Level {
m.rwmutex.Lock()
defer m.rwmutex.Unlock()
m.mutex.Lock()
defer m.mutex.Unlock()
return m.internalResetLevel()
}

View File

@@ -15,6 +15,7 @@ import (
func TestPersistableChannelQueue(t *testing.T) {
handleChan := make(chan *testData)
handle := func(data ...Data) {
assert.True(t, len(data) == 2)
for _, datum := range data {
testDatum := datum.(*testData)
handleChan <- testDatum

View File

@@ -6,13 +6,10 @@ package util
import (
"os"
"runtime"
"syscall"
"time"
)
const windowsSharingViolationError syscall.Errno = 32
// Remove removes the named file or (empty) directory with at most 5 attempts.
func Remove(name string) error {
var err error
@@ -28,12 +25,6 @@ func Remove(name string) error {
continue
}
if unwrapped == windowsSharingViolationError && runtime.GOOS == "windows" {
// try again
<-time.After(100 * time.Millisecond)
continue
}
if unwrapped == syscall.ENOENT {
// it's already gone
return nil
@@ -57,12 +48,6 @@ func RemoveAll(name string) error {
continue
}
if unwrapped == windowsSharingViolationError && runtime.GOOS == "windows" {
// try again
<-time.After(100 * time.Millisecond)
continue
}
if unwrapped == syscall.ENOENT {
// it's already gone
return nil
@@ -79,19 +64,13 @@ func Rename(oldpath, newpath string) error {
if err == nil {
break
}
unwrapped := err.(*os.LinkError).Err
unwrapped := err.(*os.PathError).Err
if unwrapped == syscall.EBUSY || unwrapped == syscall.ENOTEMPTY || unwrapped == syscall.EPERM || unwrapped == syscall.EMFILE || unwrapped == syscall.ENFILE {
// try again
<-time.After(100 * time.Millisecond)
continue
}
if unwrapped == windowsSharingViolationError && runtime.GOOS == "windows" {
// try again
<-time.After(100 * time.Millisecond)
continue
}
if i == 0 && os.IsNotExist(err) {
return err
}

View File

@@ -269,26 +269,6 @@ func (r *Route) Get(pattern string, h ...interface{}) {
r.R.Get(r.getPattern(pattern), Wrap(middlewares...))
}
// Options delegate options method
func (r *Route) Options(pattern string, h ...interface{}) {
var middlewares = r.getMiddlewares(h)
r.R.Options(r.getPattern(pattern), Wrap(middlewares...))
}
// GetOptions delegate get and options method
func (r *Route) GetOptions(pattern string, h ...interface{}) {
var middlewares = r.getMiddlewares(h)
r.R.Get(r.getPattern(pattern), Wrap(middlewares...))
r.R.Options(r.getPattern(pattern), Wrap(middlewares...))
}
// PostOptions delegate post and options method
func (r *Route) PostOptions(pattern string, h ...interface{}) {
var middlewares = r.getMiddlewares(h)
r.R.Post(r.getPattern(pattern), Wrap(middlewares...))
r.R.Options(r.getPattern(pattern), Wrap(middlewares...))
}
// Head delegate head method
func (r *Route) Head(pattern string, h ...interface{}) {
var middlewares = r.getMiddlewares(h)

View File

@@ -81,7 +81,7 @@ func TestWiki(t *testing.T) {
Wiki(ctx)
assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
assert.EqualValues(t, "Home", ctx.Data["Title"])
assertPagesMetas(t, []string{"Home", "Page With Image", "Page With Spaced Name", "Unescaped File"}, ctx.Data["Pages"])
assertPagesMetas(t, []string{"Home", "Page With Image", "Page With Spaced Name"}, ctx.Data["Pages"])
}
func TestWikiPages(t *testing.T) {
@@ -91,7 +91,7 @@ func TestWikiPages(t *testing.T) {
test.LoadRepo(t, ctx, 1)
WikiPages(ctx)
assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
assertPagesMetas(t, []string{"Home", "Page With Image", "Page With Spaced Name", "Unescaped File"}, ctx.Data["Pages"])
assertPagesMetas(t, []string{"Home", "Page With Image", "Page With Spaced Name"}, ctx.Data["Pages"])
}
func TestNewWiki(t *testing.T) {

View File

@@ -50,7 +50,6 @@ func Notifications(c *context.Context) {
return
}
if c.QueryBool("div-only") {
c.Data["SequenceNumber"] = c.Query("sequence-number")
c.HTML(http.StatusOK, tplNotificationDiv)
return
}
@@ -176,7 +175,6 @@ func NotificationStatusPost(c *context.Context) {
return
}
c.Data["Link"] = setting.AppURL + "notifications"
c.Data["SequenceNumber"] = c.Req.PostFormValue("sequence-number")
c.HTML(http.StatusOK, tplNotificationDiv)
}

View File

@@ -1006,17 +1006,17 @@ func RegisterRoutes(m *web.Route) {
}, ignSignInAndCsrf, lfsServerEnabled)
m.Group("", func() {
m.PostOptions("/git-upload-pack", repo.ServiceUploadPack)
m.PostOptions("/git-receive-pack", repo.ServiceReceivePack)
m.GetOptions("/info/refs", repo.GetInfoRefs)
m.GetOptions("/HEAD", repo.GetTextFile("HEAD"))
m.GetOptions("/objects/info/alternates", repo.GetTextFile("objects/info/alternates"))
m.GetOptions("/objects/info/http-alternates", repo.GetTextFile("objects/info/http-alternates"))
m.GetOptions("/objects/info/packs", repo.GetInfoPacks)
m.GetOptions("/objects/info/{file:[^/]*}", repo.GetTextFile(""))
m.GetOptions("/objects/{head:[0-9a-f]{2}}/{hash:[0-9a-f]{38}}", repo.GetLooseObject)
m.GetOptions("/objects/pack/pack-{file:[0-9a-f]{40}}.pack", repo.GetPackFile)
m.GetOptions("/objects/pack/pack-{file:[0-9a-f]{40}}.idx", repo.GetIdxFile)
m.Post("/git-upload-pack", repo.ServiceUploadPack)
m.Post("/git-receive-pack", repo.ServiceReceivePack)
m.Get("/info/refs", repo.GetInfoRefs)
m.Get("/HEAD", repo.GetTextFile("HEAD"))
m.Get("/objects/info/alternates", repo.GetTextFile("objects/info/alternates"))
m.Get("/objects/info/http-alternates", repo.GetTextFile("objects/info/http-alternates"))
m.Get("/objects/info/packs", repo.GetInfoPacks)
m.Get("/objects/info/{file:[^/]*}", repo.GetTextFile(""))
m.Get("/objects/{head:[0-9a-f]{2}}/{hash:[0-9a-f]{38}}", repo.GetLooseObject)
m.Get("/objects/pack/pack-{file:[0-9a-f]{40}}.pack", repo.GetPackFile)
m.Get("/objects/pack/pack-{file:[0-9a-f]{40}}.idx", repo.GetIdxFile)
}, ignSignInAndCsrf)
m.Head("/tasks/trigger", repo.TriggerTask)

View File

@@ -88,7 +88,7 @@ func prepareWikiFileName(gitRepo *git.Repository, wikiName string) (bool, string
escaped := NameToFilename(wikiName)
// Look for both files
filesInIndex, err := gitRepo.LsTree("master", unescaped, escaped)
filesInIndex, err := gitRepo.LsFiles(unescaped, escaped)
if err != nil {
log.Error("%v", err)
return false, escaped, err
@@ -308,9 +308,14 @@ func DeleteWikiPage(doer *models.User, repo *models.Repository, wikiName string)
return fmt.Errorf("Unable to read HEAD tree to index in: %s %v", basePath, err)
}
found, wikiPath, err := prepareWikiFileName(gitRepo, wikiName)
if err != nil {
return err
wikiPath := NameToFilename(wikiName)
filesInIndex, err := gitRepo.LsFiles(wikiPath)
found := false
for _, file := range filesInIndex {
if file == wikiPath {
found = true
break
}
}
if found {
err := gitRepo.RemoveFilesFromIndex(wikiPath)

View File

@@ -210,54 +210,3 @@ func TestRepository_DeleteWikiPage(t *testing.T) {
_, err = masterTree.GetTreeEntryByPath(wikiPath)
assert.Error(t, err)
}
func TestPrepareWikiFileName(t *testing.T) {
models.PrepareTestEnv(t)
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
gitRepo, err := git.OpenRepository(repo.WikiPath())
defer gitRepo.Close()
assert.NoError(t, err)
tests := []struct {
name string
arg string
existence bool
wikiPath string
wantErr bool
}{{
name: "add suffix",
arg: "Home",
existence: true,
wikiPath: "Home.md",
wantErr: false,
}, {
name: "test special chars",
arg: "home of and & or wiki page!",
existence: false,
wikiPath: "home-of-and-%26-or-wiki-page%21.md",
wantErr: false,
}, {
name: "fount unescaped cases",
arg: "Unescaped File",
existence: true,
wikiPath: "Unescaped File.md",
wantErr: false,
}}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
existence, newWikiPath, err := prepareWikiFileName(gitRepo, tt.arg)
if (err != nil) != tt.wantErr {
assert.NoError(t, err)
return
}
if existence != tt.existence {
if existence {
t.Errorf("expect to find no escaped file but we detect one")
} else {
t.Errorf("expect to find an escaped file but we could not detect one")
}
}
assert.Equal(t, tt.wikiPath, newWikiPath)
})
}
}

View File

@@ -1,4 +1,4 @@
<div class="page-content user notification" id="notification_div" data-params="{{.Page.GetParams}}" data-sequence-number="{{.SequenceNumber}}">
<div class="page-content user notification" id="notification_div" data-params="{{.Page.GetParams}}">
<div class="ui container">
<h1 class="ui dividing header">{{.i18n.Tr "notification.notifications"}}</h1>
<div class="ui top attached tabular menu">

View File

@@ -1,7 +1,5 @@
const {AppSubUrl, csrf, NotificationSettings} = window.config;
let notificationSequenceNumber = 0;
export function initNotificationsTable() {
$('#notification_table .button').on('click', async function () {
const data = await updateNotification(
@@ -12,10 +10,8 @@ export function initNotificationsTable() {
$(this).data('notification-id'),
);
if ($(data).data('sequence-number') === notificationSequenceNumber) {
$('#notification_div').replaceWith(data);
initNotificationsTable();
}
$('#notification_div').replaceWith(data);
initNotificationsTable();
await updateNotificationCount();
return false;
@@ -143,13 +139,10 @@ async function updateNotificationTable() {
url: `${AppSubUrl}/notifications?${notificationDiv.data('params')}`,
data: {
'div-only': true,
'sequence-number': ++notificationSequenceNumber,
}
});
if ($(data).data('sequence-number') === notificationSequenceNumber) {
notificationDiv.replaceWith(data);
initNotificationsTable();
}
notificationDiv.replaceWith(data);
initNotificationsTable();
}
}
@@ -189,7 +182,6 @@ async function updateNotification(url, status, page, q, notificationID) {
page,
q,
noredirect: true,
'sequence-number': ++notificationSequenceNumber,
},
});
}