Compare commits

...

182 Commits

Author SHA1 Message Date
thelamer
8c28cb7a40 make the env proxy confs their own isolated folder to include 2025-09-04 15:28:25 -04:00
thelamer
5942cc2253 initial env var ingestion for rev proxy configs 2025-09-02 15:16:05 -04:00
LinuxServer-CI
fb4ba0deb0 Bot Updating Package Versions
Some checks failed
Mark stale issues and pull requests / stale (push) Has been cancelled
2025-08-30 03:33:52 +00:00
LinuxServer-CI
7d8332e624 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-08-23 03:39:14 +00:00
LinuxServer-CI
d9dbcd0756 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-08-16 03:49:49 +00:00
LinuxServer-CI
8381b03a05 Bot Updating Package Versions
Some checks failed
Mark stale issues and pull requests / stale (push) Has been cancelled
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-08-09 03:54:46 +00:00
LinuxServer-CI
a1efcf3cd4 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-08-05 19:29:36 +00:00
LinuxServer-CI
834de14952 Bot Updating Package Versions 2025-08-05 19:25:33 +00:00
LinuxServer-CI
8353859972 Bot Updating Package Versions 2025-08-02 04:00:44 +00:00
LinuxServer-CI
ca399a7fa2 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-07-26 04:01:49 +00:00
LinuxServer-CI
1905b3c920 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-07-22 17:06:33 +00:00
LinuxServer-CI
c9efb531b0 Bot Updating Templated Files 2025-07-22 17:02:00 +00:00
Roxedus
26d05580ef Merge pull request #576 from linuxserver/typo-fix 2025-07-22 18:59:55 +02:00
driz
a2a7292e39 fix typo 2025-07-22 11:43:45 -04:00
LinuxServer-CI
5316c58910 Bot Updating Package Versions 2025-07-20 15:09:08 +00:00
LinuxServer-CI
fa860e1349 Bot Updating Templated Files 2025-07-20 15:04:44 +00:00
LinuxServer-CI
72f60b132b Bot Updating Templated Files 2025-07-20 15:03:11 +00:00
Adam
24cf84fd61 Merge pull request #573 from linuxserver/3.22 2025-07-20 16:01:37 +01:00
LinuxServer-CI
d4ceeb2f67 Bot Updating Package Versions 2025-07-19 03:56:42 +00:00
thespad
1282274a1a Wording 2025-07-18 20:57:53 +01:00
thespad
b05df6cf2a Add UDP buffer note 2025-07-18 20:57:01 +01:00
thespad
b96738cdf2 Add header note in readme 2025-07-18 20:32:49 +01:00
thespad
2d6a54a526 Comment out QUIC listeners and update readme 2025-07-18 20:26:10 +01:00
LinuxServer-CI
bb78c0f50e Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-07-12 03:57:44 +00:00
thespad
56ff1d5e19 Rebase to 3.22 2025-07-09 18:46:39 +01:00
LinuxServer-CI
7f9835b43f Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-07-05 03:53:44 +00:00
LinuxServer-CI
f3ac0dd394 Bot Updating Templated Files 2025-07-05 03:49:27 +00:00
LinuxServer-CI
0168126729 Bot Updating Templated Files 2025-07-05 03:48:01 +00:00
LinuxServer-CI
0e55f7b67e Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-06-28 03:48:42 +00:00
LinuxServer-CI
b52e35e494 Bot Updating Package Versions
Some checks failed
Mark stale issues and pull requests / stale (push) Has been cancelled
External Trigger Scheduler / external-trigger-scheduler (push) Has been cancelled
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-06-21 03:48:40 +00:00
LinuxServer-CI
ef2a5f2077 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-06-16 17:39:01 +00:00
Adam
0c910b9a7b Merge pull request #570 from jlssmt/add-tinyauth 2025-06-16 18:34:21 +01:00
LinuxServer-CI
9ab0f727d0 Bot Updating Package Versions 2025-06-14 03:47:18 +00:00
LinuxServer-CI
adcdf5d748 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-06-12 20:30:24 +00:00
LinuxServer-CI
7a38630c0b Bot Updating Package Versions 2025-06-12 20:26:05 +00:00
LinuxServer-CI
6b6e7b74b5 Bot Updating Package Versions 2025-06-11 01:48:03 +00:00
LinuxServer-CI
3b6d0484b9 Bot Updating Templated Files 2025-06-11 01:43:55 +00:00
jlssmt
0d952bcee1 add tinyauth 2025-06-08 17:47:30 +02:00
LinuxServer-CI
35deb8f654 Bot Updating Package Versions 2025-06-07 03:47:21 +00:00
LinuxServer-CI
2ec9bacf0c Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-05-31 03:44:37 +00:00
LinuxServer-CI
bcbad63147 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-05-24 03:40:40 +00:00
LinuxServer-CI
962c2322eb Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-05-17 03:43:00 +00:00
LinuxServer-CI
dd8fd8ad05 Bot Updating Templated Files 2025-05-17 03:39:19 +00:00
LinuxServer-CI
b818ae1f58 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-05-10 03:39:03 +00:00
LinuxServer-CI
43466fe490 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-05-08 21:11:05 +00:00
LinuxServer-CI
3781360d72 Bot Updating Templated Files 2025-05-08 21:05:17 +00:00
Eric Nemchik
a01e4aca17 Merge pull request #564 from linuxserver/max-log-backups 2025-05-08 16:03:57 -05:00
Eric Nemchik
b87c9d2886 Update readme-vars.yml 2025-05-05 15:25:41 -05:00
Eric Nemchik
08aa9cc07b Disable Certbot's built in log rotation 2025-05-05 15:24:38 -05:00
LinuxServer-CI
23e05f1f7a Bot Updating Package Versions 2025-05-03 03:40:16 +00:00
LinuxServer-CI
f80d14bf8c Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-04-26 03:36:46 +00:00
LinuxServer-CI
7e7e22753c Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-04-19 03:33:00 +00:00
LinuxServer-CI
9f76c031fe Bot Updating Package Versions 2025-04-12 03:34:26 +00:00
LinuxServer-CI
2b2ccf9e9a Bot Updating Package Versions 2025-04-11 03:24:16 +00:00
Eric Nemchik
54ed99d81a Merge pull request #554 from linuxserver/remove-old-authelia
chore: ⚰️ remove authelia 4.37 and below comments
2025-04-10 22:18:39 -05:00
LinuxServer-CI
a3f72898ff Bot Updating Package Versions 2025-04-08 01:29:27 +00:00
Eric Nemchik
8b8d33a81a Merge branch 'master' into remove-old-authelia 2025-04-05 15:43:54 -05:00
LinuxServer-CI
82ba5dd791 Bot Updating Package Versions 2025-04-05 03:34:00 +00:00
LinuxServer-CI
e7c815c27f Bot Updating Package Versions 2025-03-29 03:32:47 +00:00
Eric Nemchik
563ae7e9c5 chore: authentik/authelia consistency
Signed-off-by: GitHub <noreply@github.com>
2025-03-25 21:30:10 +00:00
Eric Nemchik
8caf2a1841 feat: 💩 proxy pass the full authelia auth request subpath
Signed-off-by: GitHub <noreply@github.com>
2025-03-25 21:28:54 +00:00
Eric Nemchik
15a3bc9d2c chore: ⚰️ remove authelia 4.37 and below comments
Signed-off-by: GitHub <noreply@github.com>
2025-03-25 19:50:00 +00:00
LinuxServer-CI
1567416bfb Bot Updating Package Versions 2025-03-22 03:34:48 +00:00
LinuxServer-CI
f909c85857 Bot Updating Package Versions 2025-03-17 13:53:02 +00:00
LinuxServer-CI
2992a09e32 Bot Updating Package Versions 2025-03-15 03:30:55 +00:00
LinuxServer-CI
5a8b8010ee Bot Updating Package Versions 2025-03-11 17:20:25 +00:00
LinuxServer-CI
586eaa3b4c Bot Updating Package Versions 2025-03-08 03:24:16 +00:00
LinuxServer-CI
2528e2f027 Bot Updating Package Versions 2025-03-01 03:33:12 +00:00
LinuxServer-CI
4632ecb91a Bot Updating Package Versions 2025-02-26 08:57:28 +00:00
Adam
615ccbc589 Merge pull request #548 from linuxserver/invalid-dns-creds 2025-02-26 08:51:57 +00:00
thespad
199d0a6707 Check for broken dns credentials value in cli.ini and remove 2025-02-25 20:24:11 +00:00
LinuxServer-CI
f8171d73ce Bot Updating Package Versions 2025-02-22 03:27:55 +00:00
LinuxServer-CI
503578a870 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-02-15 03:25:45 +00:00
LinuxServer-CI
b4978e40c5 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-02-11 22:21:06 +00:00
LinuxServer-CI
ed765dbdc1 Bot Updating Templated Files 2025-02-11 22:17:30 +00:00
LinuxServer-CI
6fcd946c0a Bot Updating Package Versions 2025-02-08 03:26:43 +00:00
LinuxServer-CI
c1d1a87a0c Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-02-01 03:32:56 +00:00
LinuxServer-CI
990c95b7d9 Bot Updating Templated Files 2025-02-01 03:24:53 +00:00
LinuxServer-CI
d83dc89c84 Bot Updating Templated Files 2025-02-01 03:23:39 +00:00
LinuxServer-CI
7046e938e0 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-01-25 03:21:11 +00:00
LinuxServer-CI
27e2e83f03 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-01-23 07:42:30 +00:00
quietsy
f11dbcea78 Merge pull request #540 from linuxserver/add-project-categories
Add categories to readme-vars.yml
2025-01-23 09:34:40 +02:00
quietsy
07e9ada724 Add categories to readme-vars.yml 2025-01-22 22:44:19 +02:00
Adam
ae72916deb Merge pull request #538 from linuxserver/auto-reload-readme
Update auto reload wording
2025-01-19 19:04:33 +00:00
thespad
06b385d25c Update auto reload wording 2025-01-19 18:30:03 +00:00
thespad
8753119d54 Update wording to fix stupid GH markdown parser 2025-01-19 18:21:28 +00:00
LinuxServer-CI
1f2cc4ade5 Bot Updating Package Versions 2025-01-19 18:13:52 +00:00
LinuxServer-CI
fc0986b0be Bot Updating Templated Files 2025-01-19 18:07:41 +00:00
LinuxServer-CI
564fbd271a Bot Updating Templated Files 2025-01-19 18:05:23 +00:00
Adam
bffc4c9236 Merge pull request #537 from linuxserver/auto-reload 2025-01-19 18:03:54 +00:00
thespad
14cab18c36 Spelling 2025-01-19 17:56:10 +00:00
thespad
c0adf4fd0a Update log message 2025-01-19 17:36:15 +00:00
thespad
2160126f96 Use case-insensitive include just in case (no pun) 2025-01-19 17:34:21 +00:00
thespad
d81e33b63b Anchor to avoid samples 2025-01-19 17:33:22 +00:00
thespad
21b5a79e06 Switch to include, document watchlist functionality 2025-01-19 17:30:11 +00:00
thespad
02ed03a455 Add auto-reload 2025-01-19 17:03:14 +00:00
thespad
515fdf45d8 Skip logrotate.status file in log chmod 2025-01-19 16:52:45 +00:00
LinuxServer-CI
5a5d0ebaec Bot Updating Package Versions 2025-01-18 03:24:35 +00:00
LinuxServer-CI
37deacf13a Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-01-11 03:29:15 +00:00
LinuxServer-CI
16d5763dcc Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-01-10 17:43:18 +00:00
LinuxServer-CI
e12d7e642c Bot Updating Templated Files 2025-01-10 17:33:54 +00:00
driz
0cddb6d6b7 Merge pull request #534 from linuxserver/man-fail2ban
Add working link for fail2ban-client manpage
2025-01-10 12:32:14 -05:00
thespad
ff8cf3bfa5 Add working link for fail2ban-client manpage 2025-01-10 17:16:22 +00:00
LinuxServer-CI
db05a6b72b Bot Updating Package Versions 2025-01-08 01:24:02 +00:00
LinuxServer-CI
410fa0515e Bot Updating Package Versions 2025-01-07 10:29:27 +00:00
Adam
e1ece8ac1c Merge pull request #532 from linuxserver/more-3.21-migrations 2025-01-07 10:25:12 +00:00
thespad
d33df2224b Try and warn about confs looking at /etc for certs 2025-01-07 10:15:49 +00:00
thespad
3b98b3ae65 Simplify 2025-01-07 10:15:27 +00:00
LinuxServer-CI
af6a3a2163 Bot Updating Package Versions 2025-01-06 18:11:57 +00:00
Adam
7a8a360746 Merge pull request #530 from linuxserver/3.21-migrations 2025-01-06 18:05:32 +00:00
thespad
f467b9539b Include space in replacement path 2025-01-06 17:51:07 +00:00
thespad
3aae7b50d9 Migrate existing renewal confs with old paths 2025-01-06 16:54:53 +00:00
LinuxServer-CI
98e22cb66d Bot Updating Package Versions 2025-01-04 03:26:59 +00:00
LinuxServer-CI
0a9c7ff821 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2025-01-03 16:00:52 +00:00
LinuxServer-CI
6dd89c8232 Bot Updating Templated Files 2025-01-03 15:56:57 +00:00
Adam
d376c95088 Merge pull request #526 from linuxserver/3.21 2025-01-03 15:54:39 +00:00
LinuxServer-CI
9a63c22e77 Bot Updating Package Versions 2025-01-03 15:29:34 +00:00
thespad
29bd5fe1b7 Remove logrotate chmod as the base image handles it 2024-12-31 18:18:47 +00:00
LinuxServer-CI
2e005369f1 Bot Updating Package Versions 2024-12-28 03:23:46 +00:00
LinuxServer-CI
d9a92bd940 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2024-12-21 03:28:51 +00:00
LinuxServer-CI
892cf960a9 Bot Updating Templated Files 2024-12-21 03:24:57 +00:00
LinuxServer-CI
aaa6ae77b5 Bot Updating Templated Files 2024-12-21 03:22:04 +00:00
thespad
c489e2c07f Can't rm if it's an RO filesystem 2024-12-17 21:27:37 +00:00
thespad
7f4aabeef7 Don't need to create that folder 2024-12-17 21:27:26 +00:00
thespad
03f8285212 Remove proxy cache path 2024-12-17 21:27:15 +00:00
thespad
589b80e492 Support disabling f2b 2024-12-17 20:34:13 +00:00
thespad
2dc24f90c7 Use live base image 2024-12-17 20:20:53 +00:00
thespad
e56ade75fb Rebase to 3.21 2024-12-17 20:06:05 +00:00
LinuxServer-CI
584ca6732c Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2024-12-14 03:33:45 +00:00
LinuxServer-CI
4e109fb858 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2024-12-07 03:39:50 +00:00
LinuxServer-CI
4788f2b855 Bot Updating Templated Files 2024-12-07 03:35:51 +00:00
LinuxServer-CI
397106ec30 Bot Updating Templated Files 2024-12-07 03:34:27 +00:00
LinuxServer-CI
ab9d0b8037 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2024-12-01 23:16:13 +00:00
LinuxServer-CI
19e9b1158d Bot Updating Templated Files 2024-12-01 23:12:25 +00:00
LinuxServer-CI
0a87bdaba8 Bot Updating Templated Files 2024-12-01 23:11:04 +00:00
LinuxServer-CI
2f2d7033b1 Bot Updating Templated Files 2024-12-01 23:09:57 +00:00
Adam
cbc7b3de09 Merge pull request #525 from linuxserver/update-readme 2024-12-01 23:07:45 +00:00
thespad
73806b2032 Wrong pairing 2024-12-01 22:49:32 +00:00
thespad
f3c87c3935 Use .net 2024-12-01 22:07:32 +00:00
thespad
20a134924f Add cap description, use example.com/org 2024-12-01 22:04:24 +00:00
LinuxServer-CI
9971d2f50b Bot Updating Package Versions 2024-11-30 03:32:59 +00:00
LinuxServer-CI
4e1f959980 Bot Updating Package Versions
Some checks failed
Package Trigger Scheduler / package-trigger-scheduler (push) Has been cancelled
2024-11-17 17:38:27 +00:00
Adam
f94e685a65 Merge pull request #523 from linuxserver/qnap-init-fix 2024-11-17 17:34:27 +00:00
thespad
bfeeaaaa73 Workaround for qnap systems with chmod errors 2024-11-17 14:16:46 +00:00
Adam
4437f6f8ba Merge pull request #521 from linuxserver/tidy-init 2024-11-16 16:35:20 +00:00
thespad
ed7c58a4c3 Tidy up init process 2024-11-16 15:46:17 +00:00
LinuxServer-CI
ca3830de35 Bot Updating Package Versions 2024-11-16 03:31:57 +00:00
LinuxServer-CI
e932493428 Bot Updating Package Versions 2024-11-14 19:20:37 +00:00
LinuxServer-CI
cd77a9cd2a Bot Updating Templated Files 2024-11-14 19:16:52 +00:00
LinuxServer-CI
7e9db0db80 Bot Updating Templated Files 2024-11-14 19:15:29 +00:00
LinuxServer-CI
ffecc6ee8b Bot Updating Package Versions 2024-11-09 03:26:43 +00:00
LinuxServer-CI
2b18659591 Bot Updating Package Versions 2024-11-07 23:40:09 +00:00
LinuxServer-CI
54e3eeb6e8 Bot Updating Package Versions 2024-11-05 20:24:34 +00:00
LinuxServer-CI
7066b4c1ea Bot Updating Package Versions 2024-11-02 03:25:35 +00:00
LinuxServer-CI
66ea2cbad6 Bot Updating Package Versions 2024-10-26 03:26:25 +00:00
LinuxServer-CI
139a27f1bf Bot Updating Package Versions 2024-10-22 16:20:56 +00:00
LinuxServer-CI
d107e3cbef Bot Updating Templated Files 2024-10-22 16:15:43 +00:00
Eric Nemchik
08e91b3dc3 Merge pull request #483 from panosangel/dynu
Update Dynu code and .ini file
2024-10-22 11:13:55 -05:00
Eric Nemchik
8decebad67 Merge branch 'master' into dynu 2024-10-21 13:50:19 -05:00
Panos Angel
7b828b92e8 Readme vars / Rename 'dynudns' to 'dynu' to match active plugin name 2024-10-21 13:34:35 +03:00
LinuxServer-CI
db6fbc2731 Bot Updating Package Versions 2024-10-19 03:29:06 +00:00
LinuxServer-CI
aaee5b4737 Bot Updating Package Versions 2024-10-18 13:47:28 +00:00
LinuxServer-CI
30165272ef Bot Updating Package Versions 2024-10-12 03:25:28 +00:00
LinuxServer-CI
1c052fdd0d Bot Updating Package Versions 2024-10-05 03:27:36 +00:00
LinuxServer-CI
b569c84976 Bot Updating Templated Files 2024-10-05 03:23:29 +00:00
LinuxServer-CI
c14b42f85d Bot Updating Package Versions 2024-09-28 03:30:10 +00:00
LinuxServer-CI
60b6827133 Bot Updating Templated Files 2024-09-28 03:26:43 +00:00
LinuxServer-CI
cef4d471e1 Bot Updating Templated Files 2024-09-28 03:25:24 +00:00
LinuxServer-CI
eaafc4393b Bot Updating Templated Files 2024-09-28 03:23:56 +00:00
LinuxServer-CI
94c72584a7 Bot Updating Package Versions 2024-09-21 03:21:56 +00:00
LinuxServer-CI
59d1c8a724 Bot Updating Package Versions 2024-09-14 03:24:17 +00:00
LinuxServer-CI
ed0c949267 Bot Updating Package Versions 2024-09-07 03:22:04 +00:00
LinuxServer-CI
5027f6f7b3 Bot Updating Package Versions 2024-09-02 20:56:29 +00:00
aptalca
502d10303c Merge pull request #503 from linuxserver/zerossl-revoke
fix zerossl cert revocation
2024-09-02 16:53:07 -04:00
LinuxServer-CI
05bccb95ab Bot Updating Package Versions 2024-08-31 03:22:26 +00:00
aptalca
00afe35e21 fix zerossl cert revocation 2024-08-30 13:15:22 -04:00
LinuxServer-CI
e1340c6c9e Bot Updating Package Versions 2024-08-24 03:27:44 +00:00
LinuxServer-CI
96998a1002 Bot Updating Templated Files 2024-08-24 03:22:15 +00:00
LinuxServer-CI
4fb557dcda Bot Updating Templated Files 2024-08-24 03:20:37 +00:00
LinuxServer-CI
ea13c5a885 Bot Updating Package Versions 2024-08-17 03:20:39 +00:00
Panos Angel
05b586d6df dns-conf / Rename example dynu-credentials.ini to dynu.ini 2024-05-23 23:05:05 +03:00
Panos Angel
d6d1432ff8 README / Change DNSPLUGIN option from 'dynudns' to 'dynu' 2024-05-23 23:04:19 +03:00
72 changed files with 1994 additions and 948 deletions

0
.editorconfig Executable file → Normal file
View File

View File

@@ -6,7 +6,7 @@
* Read, and fill the Pull Request template * Read, and fill the Pull Request template
* If this is a fix for a typo (in code, documentation, or the README) please file an issue and let us sort it out. We do not need a PR * If this is a fix for a typo (in code, documentation, or the README) please file an issue and let us sort it out. We do not need a PR
* If the PR is addressing an existing issue include, closes #\<issue number>, in the body of the PR commit message * If the PR is addressing an existing issue include, closes #\<issue number>, in the body of the PR commit message
* If you want to discuss changes, you can also bring it up in [#dev-talk](https://discordapp.com/channels/354974912613449730/757585807061155840) in our [Discord server](https://discord.gg/YWrKVTn) * If you want to discuss changes, you can also bring it up in [#dev-talk](https://discordapp.com/channels/354974912613449730/757585807061155840) in our [Discord server](https://linuxserver.io/discord)
## Common files ## Common files
@@ -105,10 +105,10 @@ docker build \
-t linuxserver/swag:latest . -t linuxserver/swag:latest .
``` ```
The ARM variants can be built on x86_64 hardware using `multiarch/qemu-user-static` The ARM variants can be built on x86_64 hardware and vice versa using `lscr.io/linuxserver/qemu-static`
```bash ```bash
docker run --rm --privileged multiarch/qemu-user-static:register --reset docker run --rm --privileged lscr.io/linuxserver/qemu-static --reset
``` ```
Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`. Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`.

0
.github/FUNDING.yml vendored Executable file → Normal file
View File

2
.github/ISSUE_TEMPLATE/config.yml vendored Executable file → Normal file
View File

@@ -1,7 +1,7 @@
blank_issues_enabled: false blank_issues_enabled: false
contact_links: contact_links:
- name: Discord chat support - name: Discord chat support
url: https://discord.gg/YWrKVTn url: https://linuxserver.io/discord
about: Realtime support / chat with the community and the team. about: Realtime support / chat with the community and the team.
- name: Discourse discussion forum - name: Discourse discussion forum

0
.github/ISSUE_TEMPLATE/issue.bug.yml vendored Executable file → Normal file
View File

0
.github/ISSUE_TEMPLATE/issue.feature.yml vendored Executable file → Normal file
View File

3
.github/workflows/call_issue_pr_tracker.yml vendored Executable file → Normal file
View File

@@ -8,6 +8,9 @@ on:
pull_request_review: pull_request_review:
types: [submitted,edited,dismissed] types: [submitted,edited,dismissed]
permissions:
contents: read
jobs: jobs:
manage-project: manage-project:
permissions: permissions:

3
.github/workflows/call_issues_cron.yml vendored Executable file → Normal file
View File

@@ -4,6 +4,9 @@ on:
- cron: '35 15 * * *' - cron: '35 15 * * *'
workflow_dispatch: workflow_dispatch:
permissions:
contents: read
jobs: jobs:
stale: stale:
permissions: permissions:

View File

@@ -3,6 +3,9 @@ name: External Trigger Main
on: on:
workflow_dispatch: workflow_dispatch:
permissions:
contents: read
jobs: jobs:
external-trigger-master: external-trigger-master:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -11,18 +14,31 @@ jobs:
- name: External Trigger - name: External Trigger
if: github.ref == 'refs/heads/master' if: github.ref == 'refs/heads/master'
env:
SKIP_EXTERNAL_TRIGGER: ${{ vars.SKIP_EXTERNAL_TRIGGER }}
run: | run: |
if [ -n "${{ secrets.PAUSE_EXTERNAL_TRIGGER_SWAG_MASTER }}" ]; then printf "# External trigger for docker-swag\n\n" >> $GITHUB_STEP_SUMMARY
echo "**** Github secret PAUSE_EXTERNAL_TRIGGER_SWAG_MASTER is set; skipping trigger. ****" if grep -q "^swag_master_" <<< "${SKIP_EXTERNAL_TRIGGER}"; then
echo "Github secret \`PAUSE_EXTERNAL_TRIGGER_SWAG_MASTER\` is set; skipping trigger." >> $GITHUB_STEP_SUMMARY echo "> [!NOTE]" >> $GITHUB_STEP_SUMMARY
echo "> Github organizational variable \`SKIP_EXTERNAL_TRIGGER\` contains \`swag_master_\`; will skip trigger if version matches." >> $GITHUB_STEP_SUMMARY
elif grep -q "^swag_master" <<< "${SKIP_EXTERNAL_TRIGGER}"; then
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Github organizational variable \`SKIP_EXTERNAL_TRIGGER\` contains \`swag_master\`; skipping trigger." >> $GITHUB_STEP_SUMMARY
exit 0 exit 0
fi fi
echo "**** External trigger running off of master branch. To disable this trigger, set a Github secret named \"PAUSE_EXTERNAL_TRIGGER_SWAG_MASTER\". ****" echo "> [!NOTE]" >> $GITHUB_STEP_SUMMARY
echo "External trigger running off of master branch. To disable this trigger, set a Github secret named \`PAUSE_EXTERNAL_TRIGGER_SWAG_MASTER\`" >> $GITHUB_STEP_SUMMARY echo "> External trigger running off of master branch. To disable this trigger, add \`swag_master\` into the Github organizational variable \`SKIP_EXTERNAL_TRIGGER\`." >> $GITHUB_STEP_SUMMARY
echo "**** Retrieving external version ****" printf "\n## Retrieving external version\n\n" >> $GITHUB_STEP_SUMMARY
EXT_RELEASE=$(curl -sL "https://pypi.python.org/pypi/certbot/json" |jq -r '. | .info.version') EXT_RELEASE=$(curl -sL "https://pypi.python.org/pypi/certbot/json" |jq -r '. | .info.version')
echo "Type is \`pip_version\`" >> $GITHUB_STEP_SUMMARY
if grep -q "^swag_master_${EXT_RELEASE}" <<< "${SKIP_EXTERNAL_TRIGGER}"; then
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Github organizational variable \`SKIP_EXTERNAL_TRIGGER\` matches current external release; skipping trigger." >> $GITHUB_STEP_SUMMARY
exit 0
fi
if [ -z "${EXT_RELEASE}" ] || [ "${EXT_RELEASE}" == "null" ]; then if [ -z "${EXT_RELEASE}" ] || [ "${EXT_RELEASE}" == "null" ]; then
echo "**** Can't retrieve external version, exiting ****" echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Can't retrieve external version, exiting" >> $GITHUB_STEP_SUMMARY
FAILURE_REASON="Can't retrieve external version for swag branch master" FAILURE_REASON="Can't retrieve external version for swag branch master"
GHA_TRIGGER_URL="https://github.com/linuxserver/docker-swag/actions/runs/${{ github.run_id }}" GHA_TRIGGER_URL="https://github.com/linuxserver/docker-swag/actions/runs/${{ github.run_id }}"
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 16711680, curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 16711680,
@@ -30,25 +46,43 @@ jobs:
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }} "username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
exit 1 exit 1
fi fi
EXT_RELEASE=$(echo ${EXT_RELEASE} | sed 's/[~,%@+;:/]//g') EXT_RELEASE_SANITIZED=$(echo ${EXT_RELEASE} | sed 's/[~,%@+;:/]//g')
echo "**** External version: ${EXT_RELEASE} ****" echo "Sanitized external version: \`${EXT_RELEASE_SANITIZED}\`" >> $GITHUB_STEP_SUMMARY
echo "External version: ${EXT_RELEASE}" >> $GITHUB_STEP_SUMMARY echo "Retrieving last pushed version" >> $GITHUB_STEP_SUMMARY
echo "**** Retrieving last pushed version ****"
image="linuxserver/swag" image="linuxserver/swag"
tag="latest" tag="latest"
token=$(curl -sX GET \ token=$(curl -sX GET \
"https://ghcr.io/token?scope=repository%3Alinuxserver%2Fswag%3Apull" \ "https://ghcr.io/token?scope=repository%3Alinuxserver%2Fswag%3Apull" \
| jq -r '.token') | jq -r '.token')
multidigest=$(curl -s \ multidigest=$(curl -s \
--header "Accept: application/vnd.docker.distribution.manifest.v2+json" \
--header "Accept: application/vnd.oci.image.index.v1+json" \
--header "Authorization: Bearer ${token}" \
"https://ghcr.io/v2/${image}/manifests/${tag}")
if jq -e '.layers // empty' <<< "${multidigest}" >/dev/null 2>&1; then
# If there's a layer element it's a single-arch manifest so just get that digest
digest=$(jq -r '.config.digest' <<< "${multidigest}")
else
# Otherwise it's multi-arch or has manifest annotations
if jq -e '.manifests[]?.annotations // empty' <<< "${multidigest}" >/dev/null 2>&1; then
# Check for manifest annotations and delete if found
multidigest=$(jq 'del(.manifests[] | select(.annotations))' <<< "${multidigest}")
fi
if [[ $(jq '.manifests | length' <<< "${multidigest}") -gt 1 ]]; then
# If there's still more than one digest, it's multi-arch
multidigest=$(jq -r ".manifests[] | select(.platform.architecture == \"amd64\").digest?" <<< "${multidigest}")
else
# Otherwise it's single arch
multidigest=$(jq -r ".manifests[].digest?" <<< "${multidigest}")
fi
if digest=$(curl -s \
--header "Accept: application/vnd.docker.distribution.manifest.v2+json" \ --header "Accept: application/vnd.docker.distribution.manifest.v2+json" \
--header "Accept: application/vnd.oci.image.manifest.v1+json" \
--header "Authorization: Bearer ${token}" \ --header "Authorization: Bearer ${token}" \
"https://ghcr.io/v2/${image}/manifests/${tag}" \ "https://ghcr.io/v2/${image}/manifests/${multidigest}"); then
| jq -r 'first(.manifests[].digest)') digest=$(jq -r '.config.digest' <<< "${digest}");
digest=$(curl -s \ fi
--header "Accept: application/vnd.docker.distribution.manifest.v2+json" \ fi
--header "Authorization: Bearer ${token}" \
"https://ghcr.io/v2/${image}/manifests/${multidigest}" \
| jq -r '.config.digest')
image_info=$(curl -sL \ image_info=$(curl -sL \
--header "Authorization: Bearer ${token}" \ --header "Authorization: Bearer ${token}" \
"https://ghcr.io/v2/${image}/blobs/${digest}") "https://ghcr.io/v2/${image}/blobs/${digest}")
@@ -60,45 +94,54 @@ jobs:
IMAGE_RELEASE=$(echo ${image_info} | jq -r '.Labels.build_version' | awk '{print $3}') IMAGE_RELEASE=$(echo ${image_info} | jq -r '.Labels.build_version' | awk '{print $3}')
IMAGE_VERSION=$(echo ${IMAGE_RELEASE} | awk -F'-ls' '{print $1}') IMAGE_VERSION=$(echo ${IMAGE_RELEASE} | awk -F'-ls' '{print $1}')
if [ -z "${IMAGE_VERSION}" ]; then if [ -z "${IMAGE_VERSION}" ]; then
echo "**** Can't retrieve last pushed version, exiting ****" echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "Can't retrieve last pushed version, exiting" >> $GITHUB_STEP_SUMMARY
FAILURE_REASON="Can't retrieve last pushed version for swag tag latest" FAILURE_REASON="Can't retrieve last pushed version for swag tag latest"
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 16711680, curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 16711680,
"description": "**Trigger Failed** \n**Reason:** '"${FAILURE_REASON}"' \n"}], "description": "**Trigger Failed** \n**Reason:** '"${FAILURE_REASON}"' \n"}],
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }} "username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
exit 1 exit 1
fi fi
echo "**** Last pushed version: ${IMAGE_VERSION} ****" echo "Last pushed version: \`${IMAGE_VERSION}\`" >> $GITHUB_STEP_SUMMARY
echo "Last pushed version: ${IMAGE_VERSION}" >> $GITHUB_STEP_SUMMARY if [ "${EXT_RELEASE_SANITIZED}" == "${IMAGE_VERSION}" ]; then
if [ "${EXT_RELEASE}" == "${IMAGE_VERSION}" ]; then echo "Sanitized version \`${EXT_RELEASE_SANITIZED}\` already pushed, exiting" >> $GITHUB_STEP_SUMMARY
echo "**** Version ${EXT_RELEASE} already pushed, exiting ****"
echo "Version ${EXT_RELEASE} already pushed, exiting" >> $GITHUB_STEP_SUMMARY
exit 0 exit 0
elif [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-swag/job/master/lastBuild/api/json | jq -r '.building') == "true" ]; then elif [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-swag/job/master/lastBuild/api/json | jq -r '.building') == "true" ]; then
echo "**** New version ${EXT_RELEASE} found; but there already seems to be an active build on Jenkins; exiting ****" echo "New version \`${EXT_RELEASE}\` found; but there already seems to be an active build on Jenkins; exiting" >> $GITHUB_STEP_SUMMARY
echo "New version ${EXT_RELEASE} found; but there already seems to be an active build on Jenkins; exiting" >> $GITHUB_STEP_SUMMARY
exit 0 exit 0
else else
echo "**** New version ${EXT_RELEASE} found; old version was ${IMAGE_VERSION}. Triggering new build ****" if [[ "${artifacts_found}" == "false" ]]; then
echo "New version ${EXT_RELEASE} found; old version was ${IMAGE_VERSION}. Triggering new build" >> $GITHUB_STEP_SUMMARY echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
response=$(curl -iX POST \ echo "> New version detected, but not all artifacts are published yet; skipping trigger" >> $GITHUB_STEP_SUMMARY
https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-swag/job/master/buildWithParameters?PACKAGE_CHECK=false \ FAILURE_REASON="New version ${EXT_RELEASE} for swag tag latest is detected, however not all artifacts are uploaded to upstream release yet. Will try again later."
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|") curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
echo "**** Jenkins job queue url: ${response%$'\r'} ****" "description": "**Trigger Failed** \n**Reason:** '"${FAILURE_REASON}"' \n"}],
echo "**** Sleeping 10 seconds until job starts ****" "username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
sleep 10 else
buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url') printf "\n## Trigger new build\n\n" >> $GITHUB_STEP_SUMMARY
buildurl="${buildurl%$'\r'}" echo "New sanitized version \`${EXT_RELEASE_SANITIZED}\` found; old version was \`${IMAGE_VERSION}\`. Triggering new build" >> $GITHUB_STEP_SUMMARY
echo "**** Jenkins job build url: ${buildurl} ****" if [[ "${artifacts_found}" == "true" ]]; then
echo "Jenkins job build url: ${buildurl}" >> $GITHUB_STEP_SUMMARY echo "All artifacts seem to be uploaded." >> $GITHUB_STEP_SUMMARY
echo "**** Attempting to change the Jenkins job description ****" fi
curl -iX POST \ response=$(curl -iX POST \
"${buildurl}submitDescription" \ https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-swag/job/master/buildWithParameters?PACKAGE_CHECK=false \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} \ --user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|")
--data-urlencode "description=GHA external trigger https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \ echo "Jenkins [job queue url](${response%$'\r'})" >> $GITHUB_STEP_SUMMARY
--data-urlencode "Submit=Submit" echo "Sleeping 10 seconds until job starts" >> $GITHUB_STEP_SUMMARY
echo "**** Notifying Discord ****" sleep 10
TRIGGER_REASON="A version change was detected for swag tag latest. Old version:${IMAGE_VERSION} New version:${EXT_RELEASE}" buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url')
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903, buildurl="${buildurl%$'\r'}"
"description": "**Build Triggered** \n**Reason:** '"${TRIGGER_REASON}"' \n**Build URL:** '"${buildurl}display/redirect"' \n"}], echo "Jenkins job [build url](${buildurl})" >> $GITHUB_STEP_SUMMARY
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }} echo "Attempting to change the Jenkins job description" >> $GITHUB_STEP_SUMMARY
curl -iX POST \
"${buildurl}submitDescription" \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} \
--data-urlencode "description=GHA external trigger https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \
--data-urlencode "Submit=Submit"
echo "**** Notifying Discord ****"
TRIGGER_REASON="A version change was detected for swag tag latest. Old version:${IMAGE_VERSION} New version:${EXT_RELEASE_SANITIZED}"
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
"description": "**Build Triggered** \n**Reason:** '"${TRIGGER_REASON}"' \n**Build URL:** '"${buildurl}display/redirect"' \n"}],
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
fi
fi fi

View File

@@ -5,6 +5,9 @@ on:
- cron: '2 * * * *' - cron: '2 * * * *'
workflow_dispatch: workflow_dispatch:
permissions:
contents: read
jobs: jobs:
external-trigger-scheduler: external-trigger-scheduler:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -15,31 +18,31 @@ jobs:
- name: External Trigger Scheduler - name: External Trigger Scheduler
run: | run: |
echo "**** Branches found: ****" printf "# External trigger scheduler for docker-swag\n\n" >> $GITHUB_STEP_SUMMARY
git for-each-ref --format='%(refname:short)' refs/remotes printf "Found the branches:\n\n%s\n" "$(git for-each-ref --format='- %(refname:lstrip=3)' refs/remotes)" >> $GITHUB_STEP_SUMMARY
for br in $(git for-each-ref --format='%(refname:short)' refs/remotes) for br in $(git for-each-ref --format='%(refname:lstrip=3)' refs/remotes)
do do
br=$(echo "$br" | sed 's|origin/||g') if [[ "${br}" == "HEAD" ]]; then
echo "**** Evaluating branch ${br} ****" printf "\nSkipping %s.\n" ${br} >> $GITHUB_STEP_SUMMARY
continue
fi
printf "\n## Evaluating \`%s\`\n\n" ${br} >> $GITHUB_STEP_SUMMARY
ls_jenkins_vars=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-swag/${br}/jenkins-vars.yml) ls_jenkins_vars=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-swag/${br}/jenkins-vars.yml)
ls_branch=$(echo "${ls_jenkins_vars}" | yq -r '.ls_branch') ls_branch=$(echo "${ls_jenkins_vars}" | yq -r '.ls_branch')
ls_trigger=$(echo "${ls_jenkins_vars}" | yq -r '.external_type') ls_trigger=$(echo "${ls_jenkins_vars}" | yq -r '.external_type')
if [[ "${br}" == "${ls_branch}" ]] && [[ "${ls_trigger}" != "os" ]]; then if [[ "${br}" == "${ls_branch}" ]] && [[ "${ls_trigger}" != "os" ]]; then
echo "**** Branch ${br} appears to be live and trigger is not os; checking workflow. ****" echo "Branch appears to be live and trigger is not os; checking workflow." >> $GITHUB_STEP_SUMMARY
if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-swag/${br}/.github/workflows/external_trigger.yml > /dev/null 2>&1; then if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-swag/${br}/.github/workflows/external_trigger.yml > /dev/null 2>&1; then
echo "**** Workflow exists. Triggering external trigger workflow for branch ${br} ****." echo "Triggering external trigger workflow for branch." >> $GITHUB_STEP_SUMMARY
echo "Triggering external trigger workflow for branch ${br}" >> $GITHUB_STEP_SUMMARY
curl -iX POST \ curl -iX POST \
-H "Authorization: token ${{ secrets.CR_PAT }}" \ -H "Authorization: token ${{ secrets.CR_PAT }}" \
-H "Accept: application/vnd.github.v3+json" \ -H "Accept: application/vnd.github.v3+json" \
-d "{\"ref\":\"refs/heads/${br}\"}" \ -d "{\"ref\":\"refs/heads/${br}\"}" \
https://api.github.com/repos/linuxserver/docker-swag/actions/workflows/external_trigger.yml/dispatches https://api.github.com/repos/linuxserver/docker-swag/actions/workflows/external_trigger.yml/dispatches
else else
echo "**** Workflow doesn't exist; skipping trigger. ****" echo "Skipping branch due to no external trigger workflow present." >> $GITHUB_STEP_SUMMARY
echo "Skipping branch ${br} due to no external trigger workflow present." >> $GITHUB_STEP_SUMMARY
fi fi
else else
echo "**** ${br} is either a dev branch, or has no external version; skipping trigger. ****" echo "Skipping branch due to being detected as dev branch or having no external version." >> $GITHUB_STEP_SUMMARY
echo "Skipping branch ${br} due to being detected as dev branch or having no external version." >> $GITHUB_STEP_SUMMARY
fi fi
done done

View File

@@ -2,8 +2,14 @@ name: Greetings
on: [pull_request_target, issues] on: [pull_request_target, issues]
permissions:
contents: read
jobs: jobs:
greeting: greeting:
permissions:
issues: write
pull-requests: write
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/first-interaction@v1 - uses: actions/first-interaction@v1

View File

@@ -1,42 +0,0 @@
name: Package Trigger Main
on:
workflow_dispatch:
jobs:
package-trigger-master:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4.1.1
- name: Package Trigger
if: github.ref == 'refs/heads/master'
run: |
if [ -n "${{ secrets.PAUSE_PACKAGE_TRIGGER_SWAG_MASTER }}" ]; then
echo "**** Github secret PAUSE_PACKAGE_TRIGGER_SWAG_MASTER is set; skipping trigger. ****"
echo "Github secret \`PAUSE_PACKAGE_TRIGGER_SWAG_MASTER\` is set; skipping trigger." >> $GITHUB_STEP_SUMMARY
exit 0
fi
if [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-swag/job/master/lastBuild/api/json | jq -r '.building') == "true" ]; then
echo "**** There already seems to be an active build on Jenkins; skipping package trigger ****"
echo "There already seems to be an active build on Jenkins; skipping package trigger" >> $GITHUB_STEP_SUMMARY
exit 0
fi
echo "**** Package trigger running off of master branch. To disable, set a Github secret named \"PAUSE_PACKAGE_TRIGGER_SWAG_MASTER\". ****"
echo "Package trigger running off of master branch. To disable, set a Github secret named \`PAUSE_PACKAGE_TRIGGER_SWAG_MASTER\`" >> $GITHUB_STEP_SUMMARY
response=$(curl -iX POST \
https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-swag/job/master/buildWithParameters?PACKAGE_CHECK=true \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|")
echo "**** Jenkins job queue url: ${response%$'\r'} ****"
echo "**** Sleeping 10 seconds until job starts ****"
sleep 10
buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url')
buildurl="${buildurl%$'\r'}"
echo "**** Jenkins job build url: ${buildurl} ****"
echo "Jenkins job build url: ${buildurl}" >> $GITHUB_STEP_SUMMARY
echo "**** Attempting to change the Jenkins job description ****"
curl -iX POST \
"${buildurl}submitDescription" \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} \
--data-urlencode "description=GHA package trigger https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \
--data-urlencode "Submit=Submit"

View File

@@ -5,6 +5,9 @@ on:
- cron: '1 3 * * 6' - cron: '1 3 * * 6'
workflow_dispatch: workflow_dispatch:
permissions:
contents: read
jobs: jobs:
package-trigger-scheduler: package-trigger-scheduler:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -14,37 +17,87 @@ jobs:
fetch-depth: '0' fetch-depth: '0'
- name: Package Trigger Scheduler - name: Package Trigger Scheduler
env:
SKIP_PACKAGE_TRIGGER: ${{ vars.SKIP_PACKAGE_TRIGGER }}
run: | run: |
echo "**** Branches found: ****" printf "# Package trigger scheduler for docker-swag\n\n" >> $GITHUB_STEP_SUMMARY
git for-each-ref --format='%(refname:short)' refs/remotes printf "Found the branches:\n\n%s\n" "$(git for-each-ref --format='- %(refname:lstrip=3)' refs/remotes)" >> $GITHUB_STEP_SUMMARY
for br in $(git for-each-ref --format='%(refname:short)' refs/remotes) for br in $(git for-each-ref --format='%(refname:lstrip=3)' refs/remotes)
do do
br=$(echo "$br" | sed 's|origin/||g') if [[ "${br}" == "HEAD" ]]; then
echo "**** Evaluating branch ${br} ****" printf "\nSkipping %s.\n" ${br} >> $GITHUB_STEP_SUMMARY
ls_branch=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-swag/${br}/jenkins-vars.yml | yq -r '.ls_branch') continue
if [ "${br}" == "${ls_branch}" ]; then fi
echo "**** Branch ${br} appears to be live; checking workflow. ****" printf "\n## Evaluating \`%s\`\n\n" ${br} >> $GITHUB_STEP_SUMMARY
if curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-swag/${br}/.github/workflows/package_trigger.yml > /dev/null 2>&1; then JENKINS_VARS=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-swag/${br}/jenkins-vars.yml)
echo "**** Workflow exists. Triggering package trigger workflow for branch ${br}. ****" if ! curl -sfX GET https://raw.githubusercontent.com/linuxserver/docker-swag/${br}/Jenkinsfile >/dev/null 2>&1; then
echo "Triggering package trigger workflow for branch ${br}" >> $GITHUB_STEP_SUMMARY echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
triggered_branches="${triggered_branches}${br} " echo "> No Jenkinsfile found. Branch is either deprecated or is an early dev branch." >> $GITHUB_STEP_SUMMARY
curl -iX POST \ skipped_branches="${skipped_branches}${br} "
-H "Authorization: token ${{ secrets.CR_PAT }}" \ elif [[ "${br}" == $(yq -r '.ls_branch' <<< "${JENKINS_VARS}") ]]; then
-H "Accept: application/vnd.github.v3+json" \ echo "Branch appears to be live; checking workflow." >> $GITHUB_STEP_SUMMARY
-d "{\"ref\":\"refs/heads/${br}\"}" \ README_VARS=$(curl -sX GET https://raw.githubusercontent.com/linuxserver/docker-swag/${br}/readme-vars.yml)
https://api.github.com/repos/linuxserver/docker-swag/actions/workflows/package_trigger.yml/dispatches if [[ $(yq -r '.project_deprecation_status' <<< "${README_VARS}") == "true" ]]; then
sleep 30 echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Branch appears to be deprecated; skipping trigger." >> $GITHUB_STEP_SUMMARY
skipped_branches="${skipped_branches}${br} "
elif [[ $(yq -r '.skip_package_check' <<< "${JENKINS_VARS}") == "true" ]]; then
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Skipping branch ${br} due to \`skip_package_check\` being set in \`jenkins-vars.yml\`." >> $GITHUB_STEP_SUMMARY
skipped_branches="${skipped_branches}${br} "
elif grep -q "^swag_${br}" <<< "${SKIP_PACKAGE_TRIGGER}"; then
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Github organizational variable \`SKIP_PACKAGE_TRIGGER\` contains \`swag_${br}\`; skipping trigger." >> $GITHUB_STEP_SUMMARY
skipped_branches="${skipped_branches}${br} "
elif [ $(curl -s https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-swag/job/${br}/lastBuild/api/json | jq -r '.building' 2>/dev/null) == "true" ]; then
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> There already seems to be an active build on Jenkins; skipping package trigger for ${br}" >> $GITHUB_STEP_SUMMARY
skipped_branches="${skipped_branches}${br} "
else else
echo "**** Workflow doesn't exist; skipping trigger. ****" echo "> [!NOTE]" >> $GITHUB_STEP_SUMMARY
echo "Skipping branch ${br} due to no package trigger workflow present." >> $GITHUB_STEP_SUMMARY echo "> Triggering package trigger for branch ${br}" >> $GITHUB_STEP_SUMMARY
printf "> To disable, add \`swag_%s\` into the Github organizational variable \`SKIP_PACKAGE_TRIGGER\`.\n\n" "${br}" >> $GITHUB_STEP_SUMMARY
triggered_branches="${triggered_branches}${br} "
response=$(curl -iX POST \
https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-swag/job/${br}/buildWithParameters?PACKAGE_CHECK=true \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} | grep -i location | sed "s|^[L|l]ocation: \(.*\)|\1|")
if [[ -z "${response}" ]]; then
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Jenkins build could not be triggered. Skipping branch."
continue
fi
echo "Jenkins [job queue url](${response%$'\r'})" >> $GITHUB_STEP_SUMMARY
echo "Sleeping 10 seconds until job starts" >> $GITHUB_STEP_SUMMARY
sleep 10
buildurl=$(curl -s "${response%$'\r'}api/json" | jq -r '.executable.url')
buildurl="${buildurl%$'\r'}"
echo "Jenkins job [build url](${buildurl})" >> $GITHUB_STEP_SUMMARY
echo "Attempting to change the Jenkins job description" >> $GITHUB_STEP_SUMMARY
if ! curl -ifX POST \
"${buildurl}submitDescription" \
--user ${{ secrets.JENKINS_USER }}:${{ secrets.JENKINS_TOKEN }} \
--data-urlencode "description=GHA package trigger https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" \
--data-urlencode "Submit=Submit"; then
echo "> [!WARNING]" >> $GITHUB_STEP_SUMMARY
echo "> Unable to change the Jenkins job description."
fi
sleep 20
fi fi
else else
echo "**** ${br} appears to be a dev branch; skipping trigger. ****"
echo "Skipping branch ${br} due to being detected as dev branch." >> $GITHUB_STEP_SUMMARY echo "Skipping branch ${br} due to being detected as dev branch." >> $GITHUB_STEP_SUMMARY
fi fi
done done
echo "**** Package check build(s) triggered for branch(es): ${triggered_branches} ****" if [[ -n "${triggered_branches}" ]] || [[ -n "${skipped_branches}" ]]; then
echo "**** Notifying Discord ****" if [[ -n "${triggered_branches}" ]]; then
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903, NOTIFY_BRANCHES="**Triggered:** ${triggered_branches} \n"
"description": "**Package Check Build(s) Triggered for swag** \n**Branch(es):** '"${triggered_branches}"' \n**Build URL:** '"https://ci.linuxserver.io/blue/organizations/jenkins/Docker-Pipeline-Builders%2Fdocker-swag/activity/"' \n"}], NOTIFY_BUILD_URL="**Build URL:** https://ci.linuxserver.io/blue/organizations/jenkins/Docker-Pipeline-Builders%2Fdocker-swag/activity/ \n"
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }} echo "**** Package check build(s) triggered for branch(es): ${triggered_branches} ****"
fi
if [[ -n "${skipped_branches}" ]]; then
NOTIFY_BRANCHES="${NOTIFY_BRANCHES}**Skipped:** ${skipped_branches} \n"
fi
echo "**** Notifying Discord ****"
curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://cdn.discordapp.com/avatars/354986384542662657/df91181b3f1cf0ef1592fbe18e0962d7.png","embeds": [{"color": 9802903,
"description": "**Package Check Build(s) for swag** \n'"${NOTIFY_BRANCHES}"''"${NOTIFY_BUILD_URL}"'"}],
"username": "Github Actions"}' ${{ secrets.DISCORD_WEBHOOK }}
fi

0
.github/workflows/permissions.yml vendored Executable file → Normal file
View File

1
.gitignore vendored
View File

@@ -1 +1,2 @@
.idea
.jenkins-external .jenkins-external

View File

@@ -1,6 +1,6 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
FROM ghcr.io/linuxserver/baseimage-alpine-nginx:3.20 FROM ghcr.io/linuxserver/baseimage-alpine-nginx:3.22
# set version label # set version label
ARG BUILD_DATE ARG BUILD_DATE
@@ -10,8 +10,10 @@ LABEL build_version="Linuxserver.io version:- ${VERSION} Build-date:- ${BUILD_DA
LABEL maintainer="nemchik" LABEL maintainer="nemchik"
# environment settings # environment settings
ENV DHLEVEL=2048 ONLY_SUBDOMAINS=false AWS_CONFIG_FILE=/config/dns-conf/route53.ini ENV DHLEVEL=2048 \
ENV S6_BEHAVIOUR_IF_STAGE2_FAILS=2 ONLY_SUBDOMAINS=false \
AWS_CONFIG_FILE=/config/dns-conf/route53.ini \
S6_BEHAVIOUR_IF_STAGE2_FAILS=2
RUN \ RUN \
echo "**** install build packages ****" && \ echo "**** install build packages ****" && \
@@ -27,6 +29,7 @@ RUN \
apk add --no-cache \ apk add --no-cache \
fail2ban \ fail2ban \
gnupg \ gnupg \
inotify-tools \
iptables-legacy \ iptables-legacy \
memcached \ memcached \
nginx-mod-http-brotli \ nginx-mod-http-brotli \
@@ -46,37 +49,38 @@ RUN \
nginx-mod-stream \ nginx-mod-stream \
nginx-mod-stream-geoip2 \ nginx-mod-stream-geoip2 \
nginx-vim \ nginx-vim \
php83-bcmath \ php84-bcmath \
php83-bz2 \ php84-bz2 \
php83-dom \ php84-dom \
php83-exif \ php84-exif \
php83-ftp \ php84-ftp \
php83-gd \ php84-gd \
php83-gmp \ php84-gmp \
php83-imap \ php84-imap \
php83-intl \ php84-intl \
php83-ldap \ php84-ldap \
php83-mysqli \ php84-mysqli \
php83-mysqlnd \ php84-mysqlnd \
php83-opcache \ php84-opcache \
php83-pdo_mysql \ php84-pdo_mysql \
php83-pdo_odbc \ php84-pdo_odbc \
php83-pdo_pgsql \ php84-pdo_pgsql \
php83-pdo_sqlite \ php84-pdo_sqlite \
php83-pear \ php84-pear \
php83-pecl-apcu \ php84-pecl-apcu \
php83-pecl-mcrypt \ php84-pecl-memcached \
php83-pecl-memcached \ php84-pecl-redis \
php83-pecl-redis \ php84-pgsql \
php83-pgsql \ php84-posix \
php83-posix \ php84-soap \
php83-soap \ php84-sockets \
php83-sockets \ php84-sodium \
php83-sodium \ php84-sqlite3 \
php83-sqlite3 \ php84-tokenizer \
php83-tokenizer \ php84-xmlreader \
php83-xmlreader \ php84-xsl \
php83-xsl \ python3 \
py3-jinja2 \
whois && \ whois && \
echo "**** install certbot plugins ****" && \ echo "**** install certbot plugins ****" && \
if [ -z ${CERTBOT_VERSION+x} ]; then \ if [ -z ${CERTBOT_VERSION+x} ]; then \
@@ -86,7 +90,7 @@ RUN \
pip install -U --no-cache-dir \ pip install -U --no-cache-dir \
pip \ pip \
wheel && \ wheel && \
pip install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.20/ \ pip install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.22/ \
certbot==${CERTBOT_VERSION} \ certbot==${CERTBOT_VERSION} \
certbot-dns-acmedns \ certbot-dns-acmedns \
certbot-dns-aliyun \ certbot-dns-aliyun \
@@ -148,9 +152,9 @@ RUN \
rm -f /etc/nginx/conf.d/stream.conf && \ rm -f /etc/nginx/conf.d/stream.conf && \
echo "**** correct ip6tables legacy issue ****" && \ echo "**** correct ip6tables legacy issue ****" && \
rm \ rm \
/sbin/ip6tables && \ /usr/sbin/ip6tables && \
ln -s \ ln -s \
/sbin/ip6tables-nft /sbin/ip6tables && \ /usr/sbin/ip6tables-nft /usr/sbin/ip6tables && \
echo "**** remove unnecessary fail2ban filters ****" && \ echo "**** remove unnecessary fail2ban filters ****" && \
rm \ rm \
/etc/fail2ban/jail.d/alpine-ssh.conf && \ /etc/fail2ban/jail.d/alpine-ssh.conf && \

View File

@@ -1,6 +1,6 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
FROM ghcr.io/linuxserver/baseimage-alpine-nginx:arm64v8-3.20 FROM ghcr.io/linuxserver/baseimage-alpine-nginx:arm64v8-3.22
# set version label # set version label
ARG BUILD_DATE ARG BUILD_DATE
@@ -10,8 +10,10 @@ LABEL build_version="Linuxserver.io version:- ${VERSION} Build-date:- ${BUILD_DA
LABEL maintainer="nemchik" LABEL maintainer="nemchik"
# environment settings # environment settings
ENV DHLEVEL=2048 ONLY_SUBDOMAINS=false AWS_CONFIG_FILE=/config/dns-conf/route53.ini ENV DHLEVEL=2048 \
ENV S6_BEHAVIOUR_IF_STAGE2_FAILS=2 ONLY_SUBDOMAINS=false \
AWS_CONFIG_FILE=/config/dns-conf/route53.ini \
S6_BEHAVIOUR_IF_STAGE2_FAILS=2
RUN \ RUN \
echo "**** install build packages ****" && \ echo "**** install build packages ****" && \
@@ -27,6 +29,7 @@ RUN \
apk add --no-cache \ apk add --no-cache \
fail2ban \ fail2ban \
gnupg \ gnupg \
inotify-tools \
iptables-legacy \ iptables-legacy \
memcached \ memcached \
nginx-mod-http-brotli \ nginx-mod-http-brotli \
@@ -46,37 +49,38 @@ RUN \
nginx-mod-stream \ nginx-mod-stream \
nginx-mod-stream-geoip2 \ nginx-mod-stream-geoip2 \
nginx-vim \ nginx-vim \
php83-bcmath \ php84-bcmath \
php83-bz2 \ php84-bz2 \
php83-dom \ php84-dom \
php83-exif \ php84-exif \
php83-ftp \ php84-ftp \
php83-gd \ php84-gd \
php83-gmp \ php84-gmp \
php83-imap \ php84-imap \
php83-intl \ php84-intl \
php83-ldap \ php84-ldap \
php83-mysqli \ php84-mysqli \
php83-mysqlnd \ php84-mysqlnd \
php83-opcache \ php84-opcache \
php83-pdo_mysql \ php84-pdo_mysql \
php83-pdo_odbc \ php84-pdo_odbc \
php83-pdo_pgsql \ php84-pdo_pgsql \
php83-pdo_sqlite \ php84-pdo_sqlite \
php83-pear \ php84-pear \
php83-pecl-apcu \ php84-pecl-apcu \
php83-pecl-mcrypt \ php84-pecl-memcached \
php83-pecl-memcached \ php84-pecl-redis \
php83-pecl-redis \ php84-pgsql \
php83-pgsql \ php84-posix \
php83-posix \ php84-soap \
php83-soap \ php84-sockets \
php83-sockets \ php84-sodium \
php83-sodium \ php84-sqlite3 \
php83-sqlite3 \ php84-tokenizer \
php83-tokenizer \ php84-xmlreader \
php83-xmlreader \ php84-xsl \
php83-xsl \ python3 \
py3-jinja2 \
whois && \ whois && \
echo "**** install certbot plugins ****" && \ echo "**** install certbot plugins ****" && \
if [ -z ${CERTBOT_VERSION+x} ]; then \ if [ -z ${CERTBOT_VERSION+x} ]; then \
@@ -86,7 +90,7 @@ RUN \
pip install -U --no-cache-dir \ pip install -U --no-cache-dir \
pip \ pip \
wheel && \ wheel && \
pip install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.20/ \ pip install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.22/ \
certbot==${CERTBOT_VERSION} \ certbot==${CERTBOT_VERSION} \
certbot-dns-acmedns \ certbot-dns-acmedns \
certbot-dns-aliyun \ certbot-dns-aliyun \
@@ -148,9 +152,9 @@ RUN \
rm -f /etc/nginx/conf.d/stream.conf && \ rm -f /etc/nginx/conf.d/stream.conf && \
echo "**** correct ip6tables legacy issue ****" && \ echo "**** correct ip6tables legacy issue ****" && \
rm \ rm \
/sbin/ip6tables && \ /usr/sbin/ip6tables && \
ln -s \ ln -s \
/sbin/ip6tables-nft /sbin/ip6tables && \ /usr/sbin/ip6tables-nft /usr/sbin/ip6tables && \
echo "**** remove unnecessary fail2ban filters ****" && \ echo "**** remove unnecessary fail2ban filters ****" && \
rm \ rm \
/etc/fail2ban/jail.d/alpine-ssh.conf && \ /etc/fail2ban/jail.d/alpine-ssh.conf && \

438
Jenkinsfile vendored
View File

@@ -8,7 +8,7 @@ pipeline {
} }
// Input to determine if this is a package check // Input to determine if this is a package check
parameters { parameters {
string(defaultValue: 'false', description: 'package check run', name: 'PACKAGE_CHECK') string(defaultValue: 'false', description: 'package check run', name: 'PACKAGE_CHECK')
} }
// Configuration for the variables used for this specific repo // Configuration for the variables used for this specific repo
environment { environment {
@@ -17,6 +17,8 @@ pipeline {
GITLAB_TOKEN=credentials('b6f0f1dd-6952-4cf6-95d1-9c06380283f0') GITLAB_TOKEN=credentials('b6f0f1dd-6952-4cf6-95d1-9c06380283f0')
GITLAB_NAMESPACE=credentials('gitlab-namespace-id') GITLAB_NAMESPACE=credentials('gitlab-namespace-id')
DOCKERHUB_TOKEN=credentials('docker-hub-ci-pat') DOCKERHUB_TOKEN=credentials('docker-hub-ci-pat')
QUAYIO_API_TOKEN=credentials('quayio-repo-api-token')
GIT_SIGNING_KEY=credentials('484fbca6-9a4f-455e-b9e3-97ac98785f5f')
EXT_PIP = 'certbot' EXT_PIP = 'certbot'
BUILD_VERSION_ARG = 'CERTBOT_VERSION' BUILD_VERSION_ARG = 'CERTBOT_VERSION'
LS_USER = 'linuxserver' LS_USER = 'linuxserver'
@@ -27,25 +29,51 @@ pipeline {
PR_DOCKERHUB_IMAGE = 'lspipepr/swag' PR_DOCKERHUB_IMAGE = 'lspipepr/swag'
DIST_IMAGE = 'alpine' DIST_IMAGE = 'alpine'
MULTIARCH='true' MULTIARCH='true'
CI='true' CI='false'
CI_WEB='false' CI_WEB='false'
CI_PORT='80' CI_PORT='80'
CI_SSL='false' CI_SSL='false'
CI_DELAY='30' CI_DELAY='30'
CI_DOCKERENV='TEST_RUN=1' CI_DOCKERENV=''
CI_AUTH='' CI_AUTH=''
CI_WEBPATH='' CI_WEBPATH=''
} }
stages { stages {
stage("Set git config"){
steps{
sh '''#!/bin/bash
cat ${GIT_SIGNING_KEY} > /config/.ssh/id_sign
chmod 600 /config/.ssh/id_sign
ssh-keygen -y -f /config/.ssh/id_sign > /config/.ssh/id_sign.pub
echo "Using $(ssh-keygen -lf /config/.ssh/id_sign) to sign commits"
git config --global gpg.format ssh
git config --global user.signingkey /config/.ssh/id_sign
git config --global commit.gpgsign true
'''
}
}
// Setup all the basic environment variables needed for the build // Setup all the basic environment variables needed for the build
stage("Set ENV Variables base"){ stage("Set ENV Variables base"){
steps{ steps{
echo "Running on node: ${NODE_NAME}"
sh '''#! /bin/bash sh '''#! /bin/bash
containers=$(docker ps -aq) echo "Pruning builder"
docker builder prune -f --builder container || :
containers=$(docker ps -q)
if [[ -n "${containers}" ]]; then if [[ -n "${containers}" ]]; then
docker stop ${containers} BUILDX_CONTAINER_ID=$(docker ps -qf 'name=buildx_buildkit')
for container in ${containers}; do
if [[ "${container}" == "${BUILDX_CONTAINER_ID}" ]]; then
echo "skipping buildx container in docker stop"
else
echo "Stopping container ${container}"
docker stop ${container}
fi
done
fi fi
docker system prune -af --volumes || : ''' docker system prune -f --volumes || :
docker image prune -af || :
'''
script{ script{
env.EXIT_STATUS = '' env.EXIT_STATUS = ''
env.LS_RELEASE = sh( env.LS_RELEASE = sh(
@@ -66,8 +94,12 @@ pipeline {
env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/commit/' + env.GIT_COMMIT env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/commit/' + env.GIT_COMMIT
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DOCKERHUB_IMAGE + '/tags/' env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DOCKERHUB_IMAGE + '/tags/'
env.PULL_REQUEST = env.CHANGE_ID env.PULL_REQUEST = env.CHANGE_ID
env.TEMPLATED_FILES = 'Jenkinsfile README.md LICENSE .editorconfig ./.github/CONTRIBUTING.md ./.github/FUNDING.yml ./.github/ISSUE_TEMPLATE/config.yml ./.github/ISSUE_TEMPLATE/issue.bug.yml ./.github/ISSUE_TEMPLATE/issue.feature.yml ./.github/PULL_REQUEST_TEMPLATE.md ./.github/workflows/external_trigger_scheduler.yml ./.github/workflows/greetings.yml ./.github/workflows/package_trigger_scheduler.yml ./.github/workflows/call_issue_pr_tracker.yml ./.github/workflows/call_issues_cron.yml ./.github/workflows/permissions.yml ./.github/workflows/external_trigger.yml ./.github/workflows/package_trigger.yml ./root/donate.txt' env.TEMPLATED_FILES = 'Jenkinsfile README.md LICENSE .editorconfig ./.github/CONTRIBUTING.md ./.github/FUNDING.yml ./.github/ISSUE_TEMPLATE/config.yml ./.github/ISSUE_TEMPLATE/issue.bug.yml ./.github/ISSUE_TEMPLATE/issue.feature.yml ./.github/PULL_REQUEST_TEMPLATE.md ./.github/workflows/external_trigger_scheduler.yml ./.github/workflows/greetings.yml ./.github/workflows/package_trigger_scheduler.yml ./.github/workflows/call_issue_pr_tracker.yml ./.github/workflows/call_issues_cron.yml ./.github/workflows/permissions.yml ./.github/workflows/external_trigger.yml ./root/donate.txt'
if ( env.SYFT_IMAGE_TAG == null ) {
env.SYFT_IMAGE_TAG = 'latest'
}
} }
echo "Using syft image tag ${SYFT_IMAGE_TAG}"
sh '''#! /bin/bash sh '''#! /bin/bash
echo "The default github branch detected as ${GH_DEFAULT_BRANCH}" ''' echo "The default github branch detected as ${GH_DEFAULT_BRANCH}" '''
script{ script{
@@ -175,6 +207,7 @@ pipeline {
env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER env.VERSION_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER
env.META_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER env.META_TAG = env.EXT_RELEASE_CLEAN + '-ls' + env.LS_TAG_NUMBER
env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN
env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache'
} }
} }
} }
@@ -199,6 +232,7 @@ pipeline {
env.META_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA env.META_TAG = env.EXT_RELEASE_CLEAN + '-pkg-' + env.PACKAGE_TAG + '-dev-' + env.COMMIT_SHA
env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DEV_DOCKERHUB_IMAGE + '/tags/' env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.DEV_DOCKERHUB_IMAGE + '/tags/'
env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache'
} }
} }
} }
@@ -223,6 +257,7 @@ pipeline {
env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN env.EXT_RELEASE_TAG = 'version-' + env.EXT_RELEASE_CLEAN
env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/pull/' + env.PULL_REQUEST env.CODE_URL = 'https://github.com/' + env.LS_USER + '/' + env.LS_REPO + '/pull/' + env.PULL_REQUEST
env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.PR_DOCKERHUB_IMAGE + '/tags/' env.DOCKERHUB_LINK = 'https://hub.docker.com/r/' + env.PR_DOCKERHUB_IMAGE + '/tags/'
env.BUILDCACHE = 'docker.io/lsiodev/buildcache,registry.gitlab.com/linuxserver.io/docker-jenkins-builder/lsiodev-buildcache,ghcr.io/linuxserver/lsiodev-buildcache,quay.io/linuxserver.io/lsiodev-buildcache'
} }
} }
} }
@@ -295,7 +330,7 @@ pipeline {
echo "Jenkinsfile is up to date." echo "Jenkinsfile is up to date."
fi fi
echo "Starting Stage 2 - Delete old templates" echo "Starting Stage 2 - Delete old templates"
OLD_TEMPLATES=".github/ISSUE_TEMPLATE.md .github/ISSUE_TEMPLATE/issue.bug.md .github/ISSUE_TEMPLATE/issue.feature.md .github/workflows/call_invalid_helper.yml .github/workflows/stale.yml" OLD_TEMPLATES=".github/ISSUE_TEMPLATE.md .github/ISSUE_TEMPLATE/issue.bug.md .github/ISSUE_TEMPLATE/issue.feature.md .github/workflows/call_invalid_helper.yml .github/workflows/stale.yml .github/workflows/package_trigger.yml"
for i in ${OLD_TEMPLATES}; do for i in ${OLD_TEMPLATES}; do
if [[ -f "${i}" ]]; then if [[ -f "${i}" ]]; then
TEMPLATES_TO_DELETE="${i} ${TEMPLATES_TO_DELETE}" TEMPLATES_TO_DELETE="${i} ${TEMPLATES_TO_DELETE}"
@@ -319,6 +354,35 @@ pipeline {
else else
echo "No templates to delete" echo "No templates to delete"
fi fi
echo "Starting Stage 2.5 - Update init diagram"
if ! grep -q 'init_diagram:' readme-vars.yml; then
echo "Adding the key 'init_diagram' to readme-vars.yml"
sed -i '\\|^#.*changelog.*$|d' readme-vars.yml
sed -i 's|^changelogs:|# init diagram\\ninit_diagram:\\n\\n# changelog\\nchangelogs:|' readme-vars.yml
fi
mkdir -p ${TEMPDIR}/d2
docker run --rm -v ${TEMPDIR}/d2:/output -e PUID=$(id -u) -e PGID=$(id -g) -e RAW="true" ghcr.io/linuxserver/d2-builder:latest ${CONTAINER_NAME}:latest
ls -al ${TEMPDIR}/d2
yq -ei ".init_diagram |= load_str(\\"${TEMPDIR}/d2/${CONTAINER_NAME}-latest.d2\\")" readme-vars.yml
if [[ $(md5sum readme-vars.yml | cut -c1-8) != $(md5sum ${TEMPDIR}/docker-${CONTAINER_NAME}/readme-vars.yml | cut -c1-8) ]]; then
echo "'init_diagram' has been updated. Updating repo and exiting build, new one will trigger based on commit."
mkdir -p ${TEMPDIR}/repo
git clone https://github.com/${LS_USER}/${LS_REPO}.git ${TEMPDIR}/repo/${LS_REPO}
cd ${TEMPDIR}/repo/${LS_REPO}
git checkout -f master
cp ${WORKSPACE}/readme-vars.yml ${TEMPDIR}/repo/${LS_REPO}/readme-vars.yml
git add readme-vars.yml
git commit -m 'Bot Updating Templated Files'
git pull https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
git push https://LinuxServer-CI:${GITHUB_TOKEN}@github.com/${LS_USER}/${LS_REPO}.git master
echo "true" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
echo "Updating templates and exiting build, new one will trigger based on commit"
rm -Rf ${TEMPDIR}
exit 0
else
echo "false" > /tmp/${COMMIT_SHA}-${BUILD_NUMBER}
echo "Init diagram is unchanged"
fi
echo "Starting Stage 3 - Update templates" echo "Starting Stage 3 - Update templates"
CURRENTHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8) CURRENTHASH=$(grep -hs ^ ${TEMPLATED_FILES} | md5sum | cut -c1-8)
cd ${TEMPDIR}/docker-${CONTAINER_NAME} cd ${TEMPDIR}/docker-${CONTAINER_NAME}
@@ -381,9 +445,9 @@ pipeline {
echo "Updating Unraid template" echo "Updating Unraid template"
cd ${TEMPDIR}/unraid/templates/ cd ${TEMPDIR}/unraid/templates/
GH_TEMPLATES_DEFAULT_BRANCH=$(git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||') GH_TEMPLATES_DEFAULT_BRANCH=$(git remote show origin | grep "HEAD branch:" | sed 's|.*HEAD branch: ||')
if grep -wq "${CONTAINER_NAME}" ${TEMPDIR}/unraid/templates/unraid/ignore.list && [[ -f ${TEMPDIR}/unraid/templates/unraid/deprecated/${CONTAINER_NAME}.xml ]]; then if grep -wq "^${CONTAINER_NAME}$" ${TEMPDIR}/unraid/templates/unraid/ignore.list && [[ -f ${TEMPDIR}/unraid/templates/unraid/deprecated/${CONTAINER_NAME}.xml ]]; then
echo "Image is on the ignore list, and already in the deprecation folder." echo "Image is on the ignore list, and already in the deprecation folder."
elif grep -wq "${CONTAINER_NAME}" ${TEMPDIR}/unraid/templates/unraid/ignore.list; then elif grep -wq "^${CONTAINER_NAME}$" ${TEMPDIR}/unraid/templates/unraid/ignore.list; then
echo "Image is on the ignore list, marking Unraid template as deprecated" echo "Image is on the ignore list, marking Unraid template as deprecated"
cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml ${TEMPDIR}/unraid/templates/unraid/ cp ${TEMPDIR}/docker-${CONTAINER_NAME}/.jenkins-external/${CONTAINER_NAME}.xml ${TEMPDIR}/unraid/templates/unraid/
git add -u unraid/${CONTAINER_NAME}.xml git add -u unraid/${CONTAINER_NAME}.xml
@@ -476,10 +540,10 @@ pipeline {
} }
} }
/* ####################### /* #######################
GitLab Mirroring GitLab Mirroring and Quay.io Repo Visibility
####################### */ ####################### */
// Ping into Gitlab to mirror this repo and have a registry endpoint // Ping into Gitlab to mirror this repo and have a registry endpoint & mark this repo on Quay.io as public
stage("GitLab Mirror"){ stage("GitLab Mirror and Quay.io Visibility"){
when { when {
environment name: 'EXIT_STATUS', value: '' environment name: 'EXIT_STATUS', value: ''
} }
@@ -495,6 +559,8 @@ pipeline {
"visibility":"public"}' ''' "visibility":"public"}' '''
sh '''curl -H "Private-Token: ${GITLAB_TOKEN}" -X PUT "https://gitlab.com/api/v4/projects/Linuxserver.io%2F${LS_REPO}" \ sh '''curl -H "Private-Token: ${GITLAB_TOKEN}" -X PUT "https://gitlab.com/api/v4/projects/Linuxserver.io%2F${LS_REPO}" \
-d "mirror=true&import_url=https://github.com/linuxserver/${LS_REPO}.git" ''' -d "mirror=true&import_url=https://github.com/linuxserver/${LS_REPO}.git" '''
sh '''curl -H "Content-Type: application/json" -H "Authorization: Bearer ${QUAYIO_API_TOKEN}" -X POST "https://quay.io/api/v1/repository${QUAYIMAGE/quay.io/}/changevisibility" \
-d '{"visibility":"public"}' ||: '''
} }
} }
/* ############### /* ###############
@@ -525,8 +591,42 @@ pipeline {
--label \"org.opencontainers.image.title=Swag\" \ --label \"org.opencontainers.image.title=Swag\" \
--label \"org.opencontainers.image.description=SWAG - Secure Web Application Gateway (formerly known as letsencrypt, no relation to Let's Encrypt™) sets up an Nginx webserver and reverse proxy with php support and a built-in certbot client that automates free SSL server certificate generation and renewal processes (Let's Encrypt and ZeroSSL). It also contains fail2ban for intrusion prevention.\" \ --label \"org.opencontainers.image.description=SWAG - Secure Web Application Gateway (formerly known as letsencrypt, no relation to Let's Encrypt™) sets up an Nginx webserver and reverse proxy with php support and a built-in certbot client that automates free SSL server certificate generation and renewal processes (Let's Encrypt and ZeroSSL). It also contains fail2ban for intrusion prevention.\" \
--no-cache --pull -t ${IMAGE}:${META_TAG} --platform=linux/amd64 \ --no-cache --pull -t ${IMAGE}:${META_TAG} --platform=linux/amd64 \
--provenance=false --sbom=false \ --provenance=true --sbom=true --builder=container --load \
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
sh '''#! /bin/bash
set -e
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker tag ${IMAGE}:${META_TAG} ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
done
'''
withCredentials([
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: 'Quay.io-Robot',
usernameVariable: 'QUAYUSER',
passwordVariable: 'QUAYPASS'
]
]) {
retry_backoff(5,5) {
sh '''#! /bin/bash
set -e
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
if [[ "${PACKAGE_CHECK}" != "true" ]]; then
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker push ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} &
done
for p in $(jobs -p); do
wait "$p" || { echo "job $p failed" >&2; exit 1; }
done
fi
'''
}
}
} }
} }
// Build MultiArch Docker containers for push to LS Repo // Build MultiArch Docker containers for push to LS Repo
@@ -557,8 +657,42 @@ pipeline {
--label \"org.opencontainers.image.title=Swag\" \ --label \"org.opencontainers.image.title=Swag\" \
--label \"org.opencontainers.image.description=SWAG - Secure Web Application Gateway (formerly known as letsencrypt, no relation to Let's Encrypt™) sets up an Nginx webserver and reverse proxy with php support and a built-in certbot client that automates free SSL server certificate generation and renewal processes (Let's Encrypt and ZeroSSL). It also contains fail2ban for intrusion prevention.\" \ --label \"org.opencontainers.image.description=SWAG - Secure Web Application Gateway (formerly known as letsencrypt, no relation to Let's Encrypt™) sets up an Nginx webserver and reverse proxy with php support and a built-in certbot client that automates free SSL server certificate generation and renewal processes (Let's Encrypt and ZeroSSL). It also contains fail2ban for intrusion prevention.\" \
--no-cache --pull -t ${IMAGE}:amd64-${META_TAG} --platform=linux/amd64 \ --no-cache --pull -t ${IMAGE}:amd64-${META_TAG} --platform=linux/amd64 \
--provenance=false --sbom=false \ --provenance=true --sbom=true --builder=container --load \
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
sh '''#! /bin/bash
set -e
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker tag ${IMAGE}:amd64-${META_TAG} ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
done
'''
withCredentials([
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: 'Quay.io-Robot',
usernameVariable: 'QUAYUSER',
passwordVariable: 'QUAYPASS'
]
]) {
retry_backoff(5,5) {
sh '''#! /bin/bash
set -e
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
if [[ "${PACKAGE_CHECK}" != "true" ]]; then
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker push ${i}:amd64-${COMMIT_SHA}-${BUILD_NUMBER} &
done
for p in $(jobs -p); do
wait "$p" || { echo "job $p failed" >&2; exit 1; }
done
fi
'''
}
}
} }
} }
stage('Build ARM64') { stage('Build ARM64') {
@@ -567,10 +701,6 @@ pipeline {
} }
steps { steps {
echo "Running on node: ${NODE_NAME}" echo "Running on node: ${NODE_NAME}"
echo 'Logging into Github'
sh '''#! /bin/bash
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
'''
sh "sed -r -i 's|(^FROM .*)|\\1\\n\\nENV LSIO_FIRST_PARTY=true|g' Dockerfile.aarch64" sh "sed -r -i 's|(^FROM .*)|\\1\\n\\nENV LSIO_FIRST_PARTY=true|g' Dockerfile.aarch64"
sh "docker buildx build \ sh "docker buildx build \
--label \"org.opencontainers.image.created=${GITHUB_DATE}\" \ --label \"org.opencontainers.image.created=${GITHUB_DATE}\" \
@@ -586,18 +716,50 @@ pipeline {
--label \"org.opencontainers.image.title=Swag\" \ --label \"org.opencontainers.image.title=Swag\" \
--label \"org.opencontainers.image.description=SWAG - Secure Web Application Gateway (formerly known as letsencrypt, no relation to Let's Encrypt™) sets up an Nginx webserver and reverse proxy with php support and a built-in certbot client that automates free SSL server certificate generation and renewal processes (Let's Encrypt and ZeroSSL). It also contains fail2ban for intrusion prevention.\" \ --label \"org.opencontainers.image.description=SWAG - Secure Web Application Gateway (formerly known as letsencrypt, no relation to Let's Encrypt™) sets up an Nginx webserver and reverse proxy with php support and a built-in certbot client that automates free SSL server certificate generation and renewal processes (Let's Encrypt and ZeroSSL). It also contains fail2ban for intrusion prevention.\" \
--no-cache --pull -f Dockerfile.aarch64 -t ${IMAGE}:arm64v8-${META_TAG} --platform=linux/arm64 \ --no-cache --pull -f Dockerfile.aarch64 -t ${IMAGE}:arm64v8-${META_TAG} --platform=linux/arm64 \
--provenance=false --sbom=false \ --provenance=true --sbom=true --builder=container --load \
--build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ." --build-arg ${BUILD_VERSION_ARG}=${EXT_RELEASE} --build-arg VERSION=\"${VERSION_TAG}\" --build-arg BUILD_DATE=${GITHUB_DATE} ."
sh "docker tag ${IMAGE}:arm64v8-${META_TAG} ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}" sh '''#! /bin/bash
retry(5) { set -e
sh "docker push ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}" IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker tag ${IMAGE}:arm64v8-${META_TAG} ${i}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
done
'''
withCredentials([
[
$class: 'UsernamePasswordMultiBinding',
credentialsId: 'Quay.io-Robot',
usernameVariable: 'QUAYUSER',
passwordVariable: 'QUAYPASS'
]
]) {
retry_backoff(5,5) {
sh '''#! /bin/bash
set -e
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
if [[ "${PACKAGE_CHECK}" != "true" ]]; then
IFS=',' read -ra CACHE <<< "$BUILDCACHE"
for i in "${CACHE[@]}"; do
docker push ${i}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} &
done
for p in $(jobs -p); do
wait "$p" || { echo "job $p failed" >&2; exit 1; }
done
fi
'''
}
} }
sh '''#! /bin/bash sh '''#! /bin/bash
containers=$(docker ps -aq) containers=$(docker ps -aq)
if [[ -n "${containers}" ]]; then if [[ -n "${containers}" ]]; then
docker stop ${containers} docker stop ${containers}
fi fi
docker system prune -af --volumes || : ''' docker system prune -f --volumes || :
docker image prune -af || :
'''
} }
} }
} }
@@ -622,7 +784,7 @@ pipeline {
docker run --rm \ docker run --rm \
-v /var/run/docker.sock:/var/run/docker.sock:ro \ -v /var/run/docker.sock:/var/run/docker.sock:ro \
-v ${TEMPDIR}:/tmp \ -v ${TEMPDIR}:/tmp \
ghcr.io/anchore/syft:latest \ ghcr.io/anchore/syft:${SYFT_IMAGE_TAG} \
${LOCAL_CONTAINER} -o table=/tmp/package_versions.txt ${LOCAL_CONTAINER} -o table=/tmp/package_versions.txt
NEW_PACKAGE_TAG=$(md5sum ${TEMPDIR}/package_versions.txt | cut -c1-8 ) NEW_PACKAGE_TAG=$(md5sum ${TEMPDIR}/package_versions.txt | cut -c1-8 )
echo "Package tag sha from current packages in buit container is ${NEW_PACKAGE_TAG} comparing to old ${PACKAGE_TAG} from github" echo "Package tag sha from current packages in buit container is ${NEW_PACKAGE_TAG} comparing to old ${PACKAGE_TAG} from github"
@@ -701,6 +863,14 @@ pipeline {
} }
sh '''#! /bin/bash sh '''#! /bin/bash
set -e set -e
if grep -q 'docker-baseimage' <<< "${LS_REPO}"; then
echo "Detected baseimage, setting LSIO_FIRST_PARTY=true"
if [ -n "${CI_DOCKERENV}" ]; then
CI_DOCKERENV="LSIO_FIRST_PARTY=true|${CI_DOCKERENV}"
else
CI_DOCKERENV="LSIO_FIRST_PARTY=true"
fi
fi
docker pull ghcr.io/linuxserver/ci:latest docker pull ghcr.io/linuxserver/ci:latest
if [ "${MULTIARCH}" == "true" ]; then if [ "${MULTIARCH}" == "true" ]; then
docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} --platform=arm64 docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} --platform=arm64
@@ -713,6 +883,7 @@ pipeline {
-e DOCKER_LOGS_TIMEOUT=\"${CI_DELAY}\" \ -e DOCKER_LOGS_TIMEOUT=\"${CI_DELAY}\" \
-e TAGS=\"${CI_TAGS}\" \ -e TAGS=\"${CI_TAGS}\" \
-e META_TAG=\"${META_TAG}\" \ -e META_TAG=\"${META_TAG}\" \
-e RELEASE_TAG=\"latest\" \
-e PORT=\"${CI_PORT}\" \ -e PORT=\"${CI_PORT}\" \
-e SSL=\"${CI_SSL}\" \ -e SSL=\"${CI_SSL}\" \
-e BASE=\"${DIST_IMAGE}\" \ -e BASE=\"${DIST_IMAGE}\" \
@@ -722,6 +893,8 @@ pipeline {
-e WEB_SCREENSHOT=\"${CI_WEB}\" \ -e WEB_SCREENSHOT=\"${CI_WEB}\" \
-e WEB_AUTH=\"${CI_AUTH}\" \ -e WEB_AUTH=\"${CI_AUTH}\" \
-e WEB_PATH=\"${CI_WEBPATH}\" \ -e WEB_PATH=\"${CI_WEBPATH}\" \
-e NODE_NAME=\"${NODE_NAME}\" \
-e SYFT_IMAGE_TAG=\"${CI_SYFT_IMAGE_TAG:-${SYFT_IMAGE_TAG}}\" \
-t ghcr.io/linuxserver/ci:latest \ -t ghcr.io/linuxserver/ci:latest \
python3 test_build.py''' python3 test_build.py'''
} }
@@ -737,37 +910,23 @@ pipeline {
environment name: 'EXIT_STATUS', value: '' environment name: 'EXIT_STATUS', value: ''
} }
steps { steps {
withCredentials([ retry_backoff(5,5) {
[ sh '''#! /bin/bash
$class: 'UsernamePasswordMultiBinding', set -e
credentialsId: 'Quay.io-Robot', for PUSHIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
usernameVariable: 'QUAYUSER', [[ ${PUSHIMAGE%%/*} =~ \\. ]] && PUSHIMAGEPLUS="${PUSHIMAGE}" || PUSHIMAGEPLUS="docker.io/${PUSHIMAGE}"
passwordVariable: 'QUAYPASS' IFS=',' read -ra CACHE <<< "$BUILDCACHE"
] for i in "${CACHE[@]}"; do
]) { if [[ "${PUSHIMAGEPLUS}" == "$(cut -d "/" -f1 <<< ${i})"* ]]; then
retry(5) { CACHEIMAGE=${i}
sh '''#! /bin/bash fi
set -e
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin
for PUSHIMAGE in "${GITHUBIMAGE}" "${GITLABIMAGE}" "${QUAYIMAGE}" "${IMAGE}"; do
docker tag ${IMAGE}:${META_TAG} ${PUSHIMAGE}:${META_TAG}
docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:latest
docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then
docker tag ${PUSHIMAGE}:${META_TAG} ${PUSHIMAGE}:${SEMVER}
fi
docker push ${PUSHIMAGE}:latest
docker push ${PUSHIMAGE}:${META_TAG}
docker push ${PUSHIMAGE}:${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then
docker push ${PUSHIMAGE}:${SEMVER}
fi
done done
''' docker buildx imagetools create --prefer-index=false -t ${PUSHIMAGE}:${META_TAG} -t ${PUSHIMAGE}:latest -t ${PUSHIMAGE}:${EXT_RELEASE_TAG} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
} if [ -n "${SEMVER}" ]; then
docker buildx imagetools create --prefer-index=false -t ${PUSHIMAGE}:${SEMVER} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
fi
done
'''
} }
} }
} }
@@ -778,57 +937,34 @@ pipeline {
environment name: 'EXIT_STATUS', value: '' environment name: 'EXIT_STATUS', value: ''
} }
steps { steps {
withCredentials([ retry_backoff(5,5) {
[ sh '''#! /bin/bash
$class: 'UsernamePasswordMultiBinding', set -e
credentialsId: 'Quay.io-Robot', for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
usernameVariable: 'QUAYUSER', [[ ${MANIFESTIMAGE%%/*} =~ \\. ]] && MANIFESTIMAGEPLUS="${MANIFESTIMAGE}" || MANIFESTIMAGEPLUS="docker.io/${MANIFESTIMAGE}"
passwordVariable: 'QUAYPASS' IFS=',' read -ra CACHE <<< "$BUILDCACHE"
] for i in "${CACHE[@]}"; do
]) { if [[ "${MANIFESTIMAGEPLUS}" == "$(cut -d "/" -f1 <<< ${i})"* ]]; then
retry(5) { CACHEIMAGE=${i}
sh '''#! /bin/bash fi
set -e done
echo $DOCKERHUB_TOKEN | docker login -u linuxserverci --password-stdin docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:amd64-${META_TAG} -t ${MANIFESTIMAGE}:amd64-latest -t ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
echo $GITHUB_TOKEN | docker login ghcr.io -u LinuxServer-CI --password-stdin docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:arm64v8-${META_TAG} -t ${MANIFESTIMAGE}:arm64v8-latest -t ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} ${CACHEIMAGE}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
echo $GITLAB_TOKEN | docker login registry.gitlab.com -u LinuxServer.io --password-stdin if [ -n "${SEMVER}" ]; then
echo $QUAYPASS | docker login quay.io -u $QUAYUSER --password-stdin docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:amd64-${SEMVER} ${CACHEIMAGE}:amd64-${COMMIT_SHA}-${BUILD_NUMBER}
if [ "${CI}" == "false" ]; then docker buildx imagetools create --prefer-index=false -t ${MANIFESTIMAGE}:arm64v8-${SEMVER} ${CACHEIMAGE}:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER}
docker pull ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} --platform=arm64
docker tag ghcr.io/linuxserver/lsiodev-buildcache:arm64v8-${COMMIT_SHA}-${BUILD_NUMBER} ${IMAGE}:arm64v8-${META_TAG}
fi fi
for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do done
docker tag ${IMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-latest docker buildx imagetools create -t ${MANIFESTIMAGE}:latest ${MANIFESTIMAGE}:amd64-latest ${MANIFESTIMAGE}:arm64v8-latest
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} docker buildx imagetools create -t ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG}
docker tag ${IMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG}
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-latest docker buildx imagetools create -t ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG} if [ -n "${SEMVER}" ]; then
if [ -n "${SEMVER}" ]; then docker buildx imagetools create -t ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:amd64-${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER}
docker tag ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:amd64-${SEMVER} fi
docker tag ${MANIFESTIMAGE}:arm64v8-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${SEMVER} done
fi '''
docker push ${MANIFESTIMAGE}:amd64-${META_TAG}
docker push ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG}
docker push ${MANIFESTIMAGE}:amd64-latest
docker push ${MANIFESTIMAGE}:arm64v8-${META_TAG}
docker push ${MANIFESTIMAGE}:arm64v8-latest
docker push ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then
docker push ${MANIFESTIMAGE}:amd64-${SEMVER}
docker push ${MANIFESTIMAGE}:arm64v8-${SEMVER}
fi
done
for MANIFESTIMAGE in "${IMAGE}" "${GITLABIMAGE}" "${GITHUBIMAGE}" "${QUAYIMAGE}"; do
docker buildx imagetools create -t ${MANIFESTIMAGE}:latest ${MANIFESTIMAGE}:amd64-latest ${MANIFESTIMAGE}:arm64v8-latest
docker buildx imagetools create -t ${MANIFESTIMAGE}:${META_TAG} ${MANIFESTIMAGE}:amd64-${META_TAG} ${MANIFESTIMAGE}:arm64v8-${META_TAG}
docker buildx imagetools create -t ${MANIFESTIMAGE}:${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:amd64-${EXT_RELEASE_TAG} ${MANIFESTIMAGE}:arm64v8-${EXT_RELEASE_TAG}
if [ -n "${SEMVER}" ]; then
docker buildx imagetools create -t ${MANIFESTIMAGE}:${SEMVER} ${MANIFESTIMAGE}:amd64-${SEMVER} ${MANIFESTIMAGE}:arm64v8-${SEMVER}
fi
done
'''
}
} }
} }
} }
@@ -849,14 +985,14 @@ pipeline {
"object": "'${COMMIT_SHA}'",\ "object": "'${COMMIT_SHA}'",\
"message": "Tagging Release '${EXT_RELEASE_CLEAN}'-ls'${LS_TAG_NUMBER}' to master",\ "message": "Tagging Release '${EXT_RELEASE_CLEAN}'-ls'${LS_TAG_NUMBER}' to master",\
"type": "commit",\ "type": "commit",\
"tagger": {"name": "LinuxServer Jenkins","email": "jenkins@linuxserver.io","date": "'${GITHUB_DATE}'"}}' ''' "tagger": {"name": "LinuxServer-CI","email": "ci@linuxserver.io","date": "'${GITHUB_DATE}'"}}' '''
echo "Pushing New release for Tag" echo "Pushing New release for Tag"
sh '''#! /bin/bash sh '''#! /bin/bash
echo "Updating PIP version of ${EXT_PIP} to ${EXT_RELEASE_CLEAN}" > releasebody.json echo "Updating PIP version of ${EXT_PIP} to ${EXT_RELEASE_CLEAN}" > releasebody.json
echo '{"tag_name":"'${META_TAG}'",\ echo '{"tag_name":"'${META_TAG}'",\
"target_commitish": "master",\ "target_commitish": "master",\
"name": "'${META_TAG}'",\ "name": "'${META_TAG}'",\
"body": "**LinuxServer Changes:**\\n\\n'${LS_RELEASE_NOTES}'\\n\\n**PIP Changes:**\\n\\n' > start "body": "**CI Report:**\\n\\n'${CI_URL:-N/A}'\\n\\n**LinuxServer Changes:**\\n\\n'${LS_RELEASE_NOTES}'\\n\\n**Remote Changes:**\\n\\n' > start
printf '","draft": false,"prerelease": false}' >> releasebody.json printf '","draft": false,"prerelease": false}' >> releasebody.json
paste -d'\\0' start releasebody.json > releasebody.json.done paste -d'\\0' start releasebody.json > releasebody.json.done
curl -H "Authorization: token ${GITHUB_TOKEN}" -X POST https://api.github.com/repos/${LS_USER}/${LS_REPO}/releases -d @releasebody.json.done''' curl -H "Authorization: token ${GITHUB_TOKEN}" -X POST https://api.github.com/repos/${LS_USER}/${LS_REPO}/releases -d @releasebody.json.done'''
@@ -981,32 +1117,94 @@ EOF
###################### */ ###################### */
post { post {
always { always {
sh '''#!/bin/bash
rm -rf /config/.ssh/id_sign
rm -rf /config/.ssh/id_sign.pub
git config --global --unset gpg.format
git config --global --unset user.signingkey
git config --global --unset commit.gpgsign
'''
script{ script{
env.JOB_DATE = sh(
script: '''date '+%Y-%m-%dT%H:%M:%S%:z' ''',
returnStdout: true).trim()
if (env.EXIT_STATUS == "ABORTED"){ if (env.EXIT_STATUS == "ABORTED"){
sh 'echo "build aborted"' sh 'echo "build aborted"'
} }else{
else if (currentBuild.currentResult == "SUCCESS"){ if (currentBuild.currentResult == "SUCCESS"){
sh ''' curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/jenkins-avatar.png","embeds": [{"color": 1681177,\ if (env.GITHUBIMAGE =~ /lspipepr/){
"description": "**Build:** '${BUILD_NUMBER}'\\n**CI Results:** '${CI_URL}'\\n**ShellCheck Results:** '${SHELLCHECK_URL}'\\n**Status:** Success\\n**Job:** '${RUN_DISPLAY_URL}'\\n**Change:** '${CODE_URL}'\\n**External Release:**: '${RELEASE_LINK}'\\n**DockerHub:** '${DOCKERHUB_LINK}'\\n"}],\ env.JOB_WEBHOOK_STATUS='Success'
"username": "Jenkins"}' ${BUILDS_DISCORD} ''' env.JOB_WEBHOOK_COLOUR=3957028
} env.JOB_WEBHOOK_FOOTER='PR Build'
else { }else if (env.GITHUBIMAGE =~ /lsiodev/){
sh ''' curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/jenkins-avatar.png","embeds": [{"color": 16711680,\ env.JOB_WEBHOOK_STATUS='Success'
"description": "**Build:** '${BUILD_NUMBER}'\\n**CI Results:** '${CI_URL}'\\n**ShellCheck Results:** '${SHELLCHECK_URL}'\\n**Status:** failure\\n**Job:** '${RUN_DISPLAY_URL}'\\n**Change:** '${CODE_URL}'\\n**External Release:**: '${RELEASE_LINK}'\\n**DockerHub:** '${DOCKERHUB_LINK}'\\n"}],\ env.JOB_WEBHOOK_COLOUR=3957028
env.JOB_WEBHOOK_FOOTER='Dev Build'
}else{
env.JOB_WEBHOOK_STATUS='Success'
env.JOB_WEBHOOK_COLOUR=1681177
env.JOB_WEBHOOK_FOOTER='Live Build'
}
}else{
if (env.GITHUBIMAGE =~ /lspipepr/){
env.JOB_WEBHOOK_STATUS='Failure'
env.JOB_WEBHOOK_COLOUR=12669523
env.JOB_WEBHOOK_FOOTER='PR Build'
}else if (env.GITHUBIMAGE =~ /lsiodev/){
env.JOB_WEBHOOK_STATUS='Failure'
env.JOB_WEBHOOK_COLOUR=12669523
env.JOB_WEBHOOK_FOOTER='Dev Build'
}else{
env.JOB_WEBHOOK_STATUS='Failure'
env.JOB_WEBHOOK_COLOUR=16711680
env.JOB_WEBHOOK_FOOTER='Live Build'
}
}
sh ''' curl -X POST -H "Content-Type: application/json" --data '{"avatar_url": "https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/jenkins-avatar.png","embeds": [{"'color'": '${JOB_WEBHOOK_COLOUR}',\
"footer": {"text" : "'"${JOB_WEBHOOK_FOOTER}"'"},\
"timestamp": "'${JOB_DATE}'",\
"description": "**Build:** '${BUILD_NUMBER}'\\n**CI Results:** '${CI_URL}'\\n**ShellCheck Results:** '${SHELLCHECK_URL}'\\n**Status:** '${JOB_WEBHOOK_STATUS}'\\n**Job:** '${RUN_DISPLAY_URL}'\\n**Change:** '${CODE_URL}'\\n**External Release:**: '${RELEASE_LINK}'\\n**DockerHub:** '${DOCKERHUB_LINK}'\\n"}],\
"username": "Jenkins"}' ${BUILDS_DISCORD} ''' "username": "Jenkins"}' ${BUILDS_DISCORD} '''
} }
} }
} }
cleanup { cleanup {
sh '''#! /bin/bash sh '''#! /bin/bash
echo "Performing docker system prune!!" echo "Pruning builder!!"
containers=$(docker ps -aq) docker builder prune -f --builder container || :
containers=$(docker ps -q)
if [[ -n "${containers}" ]]; then if [[ -n "${containers}" ]]; then
docker stop ${containers} BUILDX_CONTAINER_ID=$(docker ps -qf 'name=buildx_buildkit')
for container in ${containers}; do
if [[ "${container}" == "${BUILDX_CONTAINER_ID}" ]]; then
echo "skipping buildx container in docker stop"
else
echo "Stopping container ${container}"
docker stop ${container}
fi
done
fi fi
docker system prune -af --volumes || : docker system prune -f --volumes || :
docker image prune -af || :
''' '''
cleanWs() cleanWs()
} }
} }
} }
def retry_backoff(int max_attempts, int power_base, Closure c) {
int n = 0
while (n < max_attempts) {
try {
c()
return
} catch (err) {
if ((n + 1) >= max_attempts) {
throw err
}
sleep(power_base ** n)
n++
}
}
return
}

0
LICENSE Executable file → Normal file
View File

158
README.md
View File

@@ -3,9 +3,8 @@
[![linuxserver.io](https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/linuxserver_medium.png)](https://linuxserver.io) [![linuxserver.io](https://raw.githubusercontent.com/linuxserver/docker-templates/master/linuxserver.io/img/linuxserver_medium.png)](https://linuxserver.io)
[![Blog](https://img.shields.io/static/v1.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=linuxserver.io&message=Blog)](https://blog.linuxserver.io "all the things you can do with our containers including How-To guides, opinions and much more!") [![Blog](https://img.shields.io/static/v1.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=linuxserver.io&message=Blog)](https://blog.linuxserver.io "all the things you can do with our containers including How-To guides, opinions and much more!")
[![Discord](https://img.shields.io/discord/354974912613449730.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=Discord&logo=discord)](https://discord.gg/YWrKVTn "realtime support / chat with the community and the team.") [![Discord](https://img.shields.io/discord/354974912613449730.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=Discord&logo=discord)](https://linuxserver.io/discord "realtime support / chat with the community and the team.")
[![Discourse](https://img.shields.io/discourse/https/discourse.linuxserver.io/topics.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&logo=discourse)](https://discourse.linuxserver.io "post on our community forum.") [![Discourse](https://img.shields.io/discourse/https/discourse.linuxserver.io/topics.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&logo=discourse)](https://discourse.linuxserver.io "post on our community forum.")
[![Fleet](https://img.shields.io/static/v1.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=linuxserver.io&message=Fleet)](https://fleet.linuxserver.io "an online web interface which displays all of our maintained images.")
[![GitHub](https://img.shields.io/static/v1.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=linuxserver.io&message=GitHub&logo=github)](https://github.com/linuxserver "view the source for all of our repositories.") [![GitHub](https://img.shields.io/static/v1.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=linuxserver.io&message=GitHub&logo=github)](https://github.com/linuxserver "view the source for all of our repositories.")
[![Open Collective](https://img.shields.io/opencollective/all/linuxserver.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=Supporters&logo=open%20collective)](https://opencollective.com/linuxserver "please consider helping us by either donating or contributing to our budget") [![Open Collective](https://img.shields.io/opencollective/all/linuxserver.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=Supporters&logo=open%20collective)](https://opencollective.com/linuxserver "please consider helping us by either donating or contributing to our budget")
@@ -20,9 +19,8 @@ The [LinuxServer.io](https://linuxserver.io) team brings you another container r
Find us at: Find us at:
* [Blog](https://blog.linuxserver.io) - all the things you can do with our containers including How-To guides, opinions and much more! * [Blog](https://blog.linuxserver.io) - all the things you can do with our containers including How-To guides, opinions and much more!
* [Discord](https://discord.gg/YWrKVTn) - realtime support / chat with the community and the team. * [Discord](https://linuxserver.io/discord) - realtime support / chat with the community and the team.
* [Discourse](https://discourse.linuxserver.io) - post on our community forum. * [Discourse](https://discourse.linuxserver.io) - post on our community forum.
* [Fleet](https://fleet.linuxserver.io) - an online web interface which displays all of our maintained images.
* [GitHub](https://github.com/linuxserver) - view the source for all of our repositories. * [GitHub](https://github.com/linuxserver) - view the source for all of our repositories.
* [Open Collective](https://opencollective.com/linuxserver) - please consider helping us by either donating or contributing to our budget * [Open Collective](https://opencollective.com/linuxserver) - please consider helping us by either donating or contributing to our budget
@@ -37,7 +35,6 @@ Find us at:
[![Docker Pulls](https://img.shields.io/docker/pulls/linuxserver/swag.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=pulls&logo=docker)](https://hub.docker.com/r/linuxserver/swag) [![Docker Pulls](https://img.shields.io/docker/pulls/linuxserver/swag.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=pulls&logo=docker)](https://hub.docker.com/r/linuxserver/swag)
[![Docker Stars](https://img.shields.io/docker/stars/linuxserver/swag.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=stars&logo=docker)](https://hub.docker.com/r/linuxserver/swag) [![Docker Stars](https://img.shields.io/docker/stars/linuxserver/swag.svg?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=stars&logo=docker)](https://hub.docker.com/r/linuxserver/swag)
[![Jenkins Build](https://img.shields.io/jenkins/build?labelColor=555555&logoColor=ffffff&style=for-the-badge&jobUrl=https%3A%2F%2Fci.linuxserver.io%2Fjob%2FDocker-Pipeline-Builders%2Fjob%2Fdocker-swag%2Fjob%2Fmaster%2F&logo=jenkins)](https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-swag/job/master/) [![Jenkins Build](https://img.shields.io/jenkins/build?labelColor=555555&logoColor=ffffff&style=for-the-badge&jobUrl=https%3A%2F%2Fci.linuxserver.io%2Fjob%2FDocker-Pipeline-Builders%2Fjob%2Fdocker-swag%2Fjob%2Fmaster%2F&logo=jenkins)](https://ci.linuxserver.io/job/Docker-Pipeline-Builders/job/docker-swag/job/master/)
[![LSIO CI](https://img.shields.io/badge/dynamic/yaml?color=94398d&labelColor=555555&logoColor=ffffff&style=for-the-badge&label=CI&query=CI&url=https%3A%2F%2Fci-tests.linuxserver.io%2Flinuxserver%2Fswag%2Flatest%2Fci-status.yml)](https://ci-tests.linuxserver.io/linuxserver/swag/latest/index.html)
SWAG - Secure Web Application Gateway (formerly known as letsencrypt, no relation to Let's Encrypt™) sets up an Nginx webserver and reverse proxy with php support and a built-in certbot client that automates free SSL server certificate generation and renewal processes (Let's Encrypt and ZeroSSL). It also contains fail2ban for intrusion prevention. SWAG - Secure Web Application Gateway (formerly known as letsencrypt, no relation to Let's Encrypt™) sets up an Nginx webserver and reverse proxy with php support and a built-in certbot client that automates free SSL server certificate generation and renewal processes (Let's Encrypt and ZeroSSL). It also contains fail2ban for intrusion prevention.
@@ -55,7 +52,6 @@ The architectures supported by this image are:
| :----: | :----: | ---- | | :----: | :----: | ---- |
| x86-64 | ✅ | amd64-\<version tag\> | | x86-64 | ✅ | amd64-\<version tag\> |
| arm64 | ✅ | arm64v8-\<version tag\> | | arm64 | ✅ | arm64v8-\<version tag\> |
| armhf | ❌ | |
## Application Setup ## Application Setup
@@ -71,7 +67,7 @@ The architectures supported by this image are:
1. Certs that only cover your main subdomain (ie. `yoursubdomain.duckdns.org`, leave the `SUBDOMAINS` variable empty) 1. Certs that only cover your main subdomain (ie. `yoursubdomain.duckdns.org`, leave the `SUBDOMAINS` variable empty)
2. Certs that cover sub-subdomains of your main subdomain (ie. `*.yoursubdomain.duckdns.org`, set the `SUBDOMAINS` variable to `wildcard`) 2. Certs that cover sub-subdomains of your main subdomain (ie. `*.yoursubdomain.duckdns.org`, set the `SUBDOMAINS` variable to `wildcard`)
* `--cap-add=NET_ADMIN` is required for fail2ban to modify iptables * `--cap-add=NET_ADMIN` is required for fail2ban to modify iptables
* After setup, navigate to `https://yourdomain.url` to access the default homepage (http access through port 80 is disabled by default, you can enable it by editing the default site config at `/config/nginx/site-confs/default.conf`). * After setup, navigate to `https://example.com` to access the default homepage (http access through port 80 is disabled by default, you can enable it by editing the default site config at `/config/nginx/site-confs/default.conf`).
* Certs are checked nightly and if expiration is within 30 days, renewal is attempted. If your cert is about to expire in less than 30 days, check the logs under `/config/log/letsencrypt` to see why the renewals have been failing. It is recommended to input your e-mail in docker parameters so you receive expiration notices from Let's Encrypt in those circumstances. * Certs are checked nightly and if expiration is within 30 days, renewal is attempted. If your cert is about to expire in less than 30 days, check the logs under `/config/log/letsencrypt` to see why the renewals have been failing. It is recommended to input your e-mail in docker parameters so you receive expiration notices from Let's Encrypt in those circumstances.
### Certbot Plugins ### Certbot Plugins
@@ -89,6 +85,88 @@ INSTALL_PIP_PACKAGES=certbot-dns-<plugin>
Set the required credentials (usually found in the plugin documentation) in `/config/dns-conf/<plugin>.ini`. Set the required credentials (usually found in the plugin documentation) in `/config/dns-conf/<plugin>.ini`.
It is recommended to attempt obtaining a certificate with `STAGING=true` first to make sure the plugin is working as expected. It is recommended to attempt obtaining a certificate with `STAGING=true` first to make sure the plugin is working as expected.
### Dynamic Reverse Proxy Configuration via Environment Variables
SWAG can dynamically generate reverse proxy configuration files directly from environment variables, bypassing the need to manage individual `.conf` files. When any `PROXY_CONFIG_*` variable is detected, this mode is activated, and any existing `.conf` files in `/config/nginx/proxy-confs/` will be removed at startup.
**Service Definition**
Each reverse proxy service is defined by an environment variable following the format `PROXY_CONFIG_<SERVICE_NAME>`. The service name will be used as the subdomain (e.g., `SERVICE_NAME.yourdomain.com`), with the special exception of `DEFAULT` (see below). The value of the variable must be a valid JSON object.
```yaml
environment:
# Configure the default site (root domain) to proxy to a dashboard service
- 'PROXY_CONFIG_DEFAULT={"name": "dashboard", "port": 80, "auth": "authelia", "quic": true}'
# Simple subdomain service
- 'PROXY_CONFIG_HOMARR={"port": 7575, "auth": "authelia"}'
# Service with a boolean flag for HTTPS backend and QUIC enabled
- 'PROXY_CONFIG_HEIMDALL={"port": 443, "https": true, "quic": true}'
# Complex service with nested objects and lists (incomplete example for syntax)
- 'PROXY_CONFIG_PLEX={
"port": 32400,
"proxy_redirect_off": true,
"buffering_off": true,
"proxy_set_headers": [
{"key": "X-Plex-Client-Identifier", "value": "$$http_x_plex_client_identifier"},
{"key": "X-Plex-Device", "value": "$$http_x_plex_device"}
],
"extra_locations": [
{"path": "/library/streams/", "custom_directives": ["proxy_pass_request_headers off"]}
]
}'
```
The available keys in the JSON object correspond to the options in the underlying Nginx template. Common keys include `port`, `https`, `quic`, `auth`, `buffering_off`, `proxy_set_headers`, and `extra_locations`.
**Configuring the Default Site (Root Domain)**
To configure the service that responds on your root domain (e.g., `https://yourdomain.com`), use the special service name `DEFAULT`.
* The environment variable is `PROXY_CONFIG_DEFAULT`.
* Unlike subdomain services, the `DEFAULT` configuration **must** include a `"name"` key in its JSON value. This key specifies the name of the container that SWAG should proxy traffic to.
* If `PROXY_CONFIG_DEFAULT` is not set, the container will serve the standard SWAG welcome page on the root domain.
Example:
```yaml
environment:
# This will proxy https://yourdomain.com to the 'dashboard' container on port 80
- 'PROXY_CONFIG_DEFAULT={"name": "dashboard", "port": 80, "auth": "none"}'
```
**Authentication Management**
Authentication can be managed globally or per-service with a clear order of precedence.
1. **Per-Service Override (Highest Priority):** Add an `auth` key directly inside the service's JSON configuration.
* `"auth": "authelia"`: Enables Authelia for this service.
* `"auth": "basic"`: Enables Basic Authentication for this service (see below).
* `"auth": "none"`: Explicitly disables authentication for this service.
2. **Global Exclusions:** A comma-separated list of service names to exclude from the global authenticator.
* `PROXY_AUTH_EXCLUDE=ntfy,public-dashboard`
3. **Global Default (Lowest Priority):** A single variable sets the default authentication provider for all services that don't have a per-service override and are not in the exclusion list.
* `PROXY_AUTH_PROVIDER=authelia` (can be `ldap`, `authentik`, etc.)
**Basic Authentication**
If you set `"auth": "basic"` for any service, you must also provide the credentials using these two environment variables. The container will automatically create the necessary `.htpasswd` file.
* `PROXY_AUTH_BASIC_USER`: The username for basic authentication.
* `PROXY_AUTH_BASIC_PASS`: The password for basic authentication.
Example:
```yaml
environment:
- 'PROXY_CONFIG_PORTAINER={"port": 9000, "auth": "basic"}'
- PROXY_AUTH_BASIC_USER=myadmin
- PROXY_AUTH_BASIC_PASS=supersecretpassword
```
### Security and password protection ### Security and password protection
* The container detects changes to url and subdomains, revokes existing certs and generates new ones during start. * The container detects changes to url and subdomains, revokes existing certs and generates new ones during start.
@@ -130,7 +208,7 @@ This will *ask* Google et al not to index and list your site. Be careful with th
* You can check which jails are active via `docker exec -it swag fail2ban-client status` * You can check which jails are active via `docker exec -it swag fail2ban-client status`
* You can check the status of a specific jail via `docker exec -it swag fail2ban-client status <jail name>` * You can check the status of a specific jail via `docker exec -it swag fail2ban-client status <jail name>`
* You can unban an IP via `docker exec -it swag fail2ban-client set <jail name> unbanip <IP>` * You can unban an IP via `docker exec -it swag fail2ban-client set <jail name> unbanip <IP>`
* A list of commands can be found here: <https://www.fail2ban.org/wiki/index.php/Commands> * A list of commands for fail2ban-client can be found [here](https://manpages.ubuntu.com/manpages/noble/man1/fail2ban-client.1.html)
### Updating configs ### Updating configs
@@ -146,14 +224,36 @@ This will *ask* Google et al not to index and list your site. Be careful with th
* Proxy sample files WILL be updated, however your renamed (enabled) proxy files will not. * Proxy sample files WILL be updated, however your renamed (enabled) proxy files will not.
* You can check the new sample and adjust your active config as needed. * You can check the new sample and adjust your active config as needed.
### QUIC support
This image supports QUIC (also known as HTTP/3) but it must be explicitly enabled in each proxy conf, and the default conf, because if the listener is enabled and you don't expose 443/UDP, it can break connections with some browsers.
To enable QUIC, expose 443/UDP to your clients, then uncomment both QUIC listeners in all of your active proxy confs, as well as the default conf, and restart the container.
You should also uncomment the `Alt-Svc` header in your `ssl.conf` so that browsers are aware that you offer QUIC connectivity.
It is [recommended](https://quic-go.net/docs/quic/optimizations/#udp-buffer-sizes) to increase the UDP send/recieve buffer **on the host** by setting the `net.core.rmem_max` and `net.core.wmem_max` sysctls. Suggested values are 4-16Mb (4194304-16777216 bytes). For persistence between reboots use `/etc/sysctl.d/`.
### Migration from the old `linuxserver/letsencrypt` image ### Migration from the old `linuxserver/letsencrypt` image
Please follow the instructions [on this blog post](https://www.linuxserver.io/blog/2020-08-21-introducing-swag#migrate). Please follow the instructions [on this blog post](https://www.linuxserver.io/blog/2020-08-21-introducing-swag#migrate).
## Read-Only Operation
This image can be run with a read-only container filesystem. For details please [read the docs](https://docs.linuxserver.io/misc/read-only/).
### Caveats
* `/tmp` must be mounted to tmpfs
* fail2ban will not be available
## Usage ## Usage
To help you get started creating a container from this image you can either use docker-compose or the docker cli. To help you get started creating a container from this image you can either use docker-compose or the docker cli.
>[!NOTE]
>Unless a parameter is flaged as 'optional', it is *mandatory* and a value must be provided.
### docker-compose (recommended, [click here for more info](https://docs.linuxserver.io/general/docker-compose)) ### docker-compose (recommended, [click here for more info](https://docs.linuxserver.io/general/docker-compose))
```yaml ```yaml
@@ -168,7 +268,7 @@ services:
- PUID=1000 - PUID=1000
- PGID=1000 - PGID=1000
- TZ=Etc/UTC - TZ=Etc/UTC
- URL=yourdomain.url - URL=example.com
- VALIDATION=http - VALIDATION=http
- SUBDOMAINS=www, #optional - SUBDOMAINS=www, #optional
- CERTPROVIDER= #optional - CERTPROVIDER= #optional
@@ -178,11 +278,15 @@ services:
- ONLY_SUBDOMAINS=false #optional - ONLY_SUBDOMAINS=false #optional
- EXTRA_DOMAINS= #optional - EXTRA_DOMAINS= #optional
- STAGING=false #optional - STAGING=false #optional
- DISABLE_F2B= #optional
- SWAG_AUTORELOAD= #optional
- SWAG_AUTORELOAD_WATCHLIST= #optional
volumes: volumes:
- /path/to/swag/config:/config - /path/to/swag/config:/config
ports: ports:
- 443:443 - 443:443
- 80:80 #optional - 80:80 #optional
- 443:443/udp #optional
restart: unless-stopped restart: unless-stopped
``` ```
@@ -195,7 +299,7 @@ docker run -d \
-e PUID=1000 \ -e PUID=1000 \
-e PGID=1000 \ -e PGID=1000 \
-e TZ=Etc/UTC \ -e TZ=Etc/UTC \
-e URL=yourdomain.url \ -e URL=example.com \
-e VALIDATION=http \ -e VALIDATION=http \
-e SUBDOMAINS=www, `#optional` \ -e SUBDOMAINS=www, `#optional` \
-e CERTPROVIDER= `#optional` \ -e CERTPROVIDER= `#optional` \
@@ -205,8 +309,12 @@ docker run -d \
-e ONLY_SUBDOMAINS=false `#optional` \ -e ONLY_SUBDOMAINS=false `#optional` \
-e EXTRA_DOMAINS= `#optional` \ -e EXTRA_DOMAINS= `#optional` \
-e STAGING=false `#optional` \ -e STAGING=false `#optional` \
-e DISABLE_F2B= `#optional` \
-e SWAG_AUTORELOAD= `#optional` \
-e SWAG_AUTORELOAD_WATCHLIST= `#optional` \
-p 443:443 \ -p 443:443 \
-p 80:80 `#optional` \ -p 80:80 `#optional` \
-p 443:443/udp `#optional` \
-v /path/to/swag/config:/config \ -v /path/to/swag/config:/config \
--restart unless-stopped \ --restart unless-stopped \
lscr.io/linuxserver/swag:latest lscr.io/linuxserver/swag:latest
@@ -218,22 +326,28 @@ Containers are configured using parameters passed at runtime (such as those abov
| Parameter | Function | | Parameter | Function |
| :----: | --- | | :----: | --- |
| `-p 443` | Https port | | `-p 443:443` | HTTPS port |
| `-p 80` | Http port (required for http validation and http -> https redirect) | | `-p 80` | HTTP port (required for HTTP validation and HTTP -> HTTPS redirect) |
| `-p 443/udp` | QUIC (HTTP/3) port. Must be enabled in the default and proxy confs. |
| `-e PUID=1000` | for UserID - see below for explanation | | `-e PUID=1000` | for UserID - see below for explanation |
| `-e PGID=1000` | for GroupID - see below for explanation | | `-e PGID=1000` | for GroupID - see below for explanation |
| `-e TZ=Etc/UTC` | specify a timezone to use, see this [list](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List). | | `-e TZ=Etc/UTC` | specify a timezone to use, see this [list](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List). |
| `-e URL=yourdomain.url` | Top url you have control over (`customdomain.com` if you own it, or `customsubdomain.ddnsprovider.com` if dynamic dns). | | `-e URL=example.com` | Top url you have control over (e.g. `example.com` if you own it, or `customsubdomain.example.com` if dynamic dns). |
| `-e VALIDATION=http` | Certbot validation method to use, options are `http` or `dns` (`dns` method also requires `DNSPLUGIN` variable set). | | `-e VALIDATION=http` | Certbot validation method to use, options are `http` or `dns` (`dns` method also requires `DNSPLUGIN` variable set). |
| `-e SUBDOMAINS=www,` | Subdomains you'd like the cert to cover (comma separated, no spaces) ie. `www,ftp,cloud`. For a wildcard cert, set this *exactly* to `wildcard` (wildcard cert is available via `dns` validation only) | | `-e SUBDOMAINS=www,` | Subdomains you'd like the cert to cover (comma separated, no spaces) ie. `www,ftp,cloud`. For a wildcard cert, set this *exactly* to `wildcard` (wildcard cert is available via `dns` validation only) |
| `-e CERTPROVIDER=` | Optionally define the cert provider. Set to `zerossl` for ZeroSSL certs (requires existing [ZeroSSL account](https://app.zerossl.com/signup) and the e-mail address entered in `EMAIL` env var). Otherwise defaults to Let's Encrypt. | | `-e CERTPROVIDER=` | Optionally define the cert provider. Set to `zerossl` for ZeroSSL certs (requires existing [ZeroSSL account](https://app.zerossl.com/signup) and the e-mail address entered in `EMAIL` env var). Otherwise defaults to Let's Encrypt. |
| `-e DNSPLUGIN=cloudflare` | Required if `VALIDATION` is set to `dns`. Options are `acmedns`, `aliyun`, `azure`, `bunny`, `cloudflare`, `cpanel`, `desec`, `digitalocean`, `directadmin`, `dnsimple`, `dnsmadeeasy`, `dnspod`, `do`, `domeneshop`, `dreamhost`, `duckdns`, `dynudns`, `freedns`, `gandi`, `gehirn`, `glesys`, `godaddy`, `google`, `he`, `hetzner`, `infomaniak`, `inwx`, `ionos`, `linode`, `loopia`, `luadns`, `namecheap`, `netcup`, `njalla`, `nsone`, `ovh`, `porkbun`, `rfc2136`, `route53`, `sakuracloud`, `standalone`, `transip`, and `vultr`. Also need to enter the credentials into the corresponding ini (or json for some plugins) file under `/config/dns-conf`. | | `-e DNSPLUGIN=cloudflare` | Required if `VALIDATION` is set to `dns`. Options are `acmedns`, `aliyun`, `azure`, `bunny`, `cloudflare`, `cpanel`, `desec`, `digitalocean`, `directadmin`, `dnsimple`, `dnsmadeeasy`, `dnspod`, `do`, `domeneshop`, `dreamhost`, `duckdns`, `dynu`, `freedns`, `gandi`, `gehirn`, `glesys`, `godaddy`, `google`, `he`, `hetzner`, `infomaniak`, `inwx`, `ionos`, `linode`, `loopia`, `luadns`, `namecheap`, `netcup`, `njalla`, `nsone`, `ovh`, `porkbun`, `rfc2136`, `route53`, `sakuracloud`, `standalone`, `transip`, and `vultr`. Also need to enter the credentials into the corresponding ini (or json for some plugins) file under `/config/dns-conf`. |
| `-e PROPAGATION=` | Optionally override (in seconds) the default propagation time for the dns plugins. | | `-e PROPAGATION=` | Optionally override (in seconds) the default propagation time for the dns plugins. |
| `-e EMAIL=` | Optional e-mail address used for cert expiration notifications (Required for ZeroSSL). | | `-e EMAIL=` | Optional e-mail address used for cert expiration notifications (Required for ZeroSSL). |
| `-e ONLY_SUBDOMAINS=false` | If you wish to get certs only for certain subdomains, but not the main domain (main domain may be hosted on another machine and cannot be validated), set this to `true` | | `-e ONLY_SUBDOMAINS=false` | If you wish to get certs only for certain subdomains, but not the main domain (main domain may be hosted on another machine and cannot be validated), set this to `true` |
| `-e EXTRA_DOMAINS=` | Additional fully qualified domain names (comma separated, no spaces) ie. `extradomain.com,subdomain.anotherdomain.org,*.anotherdomain.org` | | `-e EXTRA_DOMAINS=` | Additional fully qualified domain names (comma separated, no spaces) ie. `example.net,subdomain.example.net,*.example.org` |
| `-e STAGING=false` | Set to `true` to retrieve certs in staging mode. Rate limits will be much higher, but the resulting cert will not pass the browser's security test. Only to be used for testing purposes. | | `-e STAGING=false` | Set to `true` to retrieve certs in staging mode. Rate limits will be much higher, but the resulting cert will not pass the browser's security test. Only to be used for testing purposes. |
| `-e DISABLE_F2B=` | Set to `true` to disable the Fail2ban service in the container, if you're already running it elsewhere or using a different IPS. |
| `-e SWAG_AUTORELOAD=` | Set to `true` to enable automatic reloading of confs on change without stopping/restarting nginx. Your filesystem must support inotify. This functionality was previously offered [via mod](https://github.com/linuxserver/docker-mods/tree/swag-auto-reload). |
| `-e SWAG_AUTORELOAD_WATCHLIST=` | A [pipe](https://en.wikipedia.org/wiki/Vertical_bar)-separated list of additional folders for auto reload to watch in addition to `/config/nginx` |
| `-v /config` | Persistent config files | | `-v /config` | Persistent config files |
| `--read-only=true` | Run container with a read-only filesystem. Please [read the docs](https://docs.linuxserver.io/misc/read-only/). |
| `--cap-add=NET_ADMIN` | Required for fail2Ban to be able to modify iptables rules. |
### Portainer notice ### Portainer notice
@@ -375,7 +489,8 @@ Below are the instructions for updating containers:
### Image Update Notifications - Diun (Docker Image Update Notifier) ### Image Update Notifications - Diun (Docker Image Update Notifier)
**tip**: We recommend [Diun](https://crazymax.dev/diun/) for update notifications. Other tools that automatically update containers unattended are not recommended or supported. >[!TIP]
>We recommend [Diun](https://crazymax.dev/diun/) for update notifications. Other tools that automatically update containers unattended are not recommended or supported.
## Building locally ## Building locally
@@ -390,16 +505,23 @@ docker build \
-t lscr.io/linuxserver/swag:latest . -t lscr.io/linuxserver/swag:latest .
``` ```
The ARM variants can be built on x86_64 hardware using `multiarch/qemu-user-static` The ARM variants can be built on x86_64 hardware and vice versa using `lscr.io/linuxserver/qemu-static`
```bash ```bash
docker run --rm --privileged multiarch/qemu-user-static:register --reset docker run --rm --privileged lscr.io/linuxserver/qemu-static --reset
``` ```
Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`. Once registered you can define the dockerfile to use with `-f Dockerfile.aarch64`.
## Versions ## Versions
* **02.09.25:** - Add ability to define proxy configurations via environment variables.
* **18.07.25:** - Rebase to Alpine 3.22 with PHP 8.4. Add QUIC support. Drop PHP bindings for mcrypt as it is no longer maintained.
* **05.05.25:** - Disable Certbot's built in log rotation.
* **19.01.25:** - Add [Auto Reload](https://github.com/linuxserver/docker-mods/tree/swag-auto-reload) functionality to SWAG.
* **17.12.24:** - Rebase to Alpine 3.21.
* **21.10.24:** - Fix naming issue with Dynu plugin. If you are using Dynu, please make sure your credentials are set in /config/dns-conf/dynu.ini and your DNSPLUGIN variable is set to dynu (not dynudns).
* **30.08.24:** - Fix zerossl cert revocation.
* **24.07.14:** - Rebase to Alpine 3.20. Remove deprecated Google Domains certbot plugin. Existing users should update their nginx confs to avoid http2 deprecation warnings. * **24.07.14:** - Rebase to Alpine 3.20. Remove deprecated Google Domains certbot plugin. Existing users should update their nginx confs to avoid http2 deprecation warnings.
* **01.07.24:** - Fall back to iptables-legacy if iptables doesn't work. * **01.07.24:** - Fall back to iptables-legacy if iptables doesn't work.
* **23.03.24:** - Fix perms on the generated `priv-fullchain-bundle.pem`. * **23.03.24:** - Fix perms on the generated `priv-fullchain-bundle.pem`.

View File

@@ -17,12 +17,12 @@ repo_vars:
- PR_DOCKERHUB_IMAGE = 'lspipepr/swag' - PR_DOCKERHUB_IMAGE = 'lspipepr/swag'
- DIST_IMAGE = 'alpine' - DIST_IMAGE = 'alpine'
- MULTIARCH='true' - MULTIARCH='true'
- CI='true' - CI='false'
- CI_WEB='false' - CI_WEB='false'
- CI_PORT='80' - CI_PORT='80'
- CI_SSL='false' - CI_SSL='false'
- CI_DELAY='30' - CI_DELAY='30'
- CI_DOCKERENV='TEST_RUN=1' - CI_DOCKERENV=''
- CI_AUTH='' - CI_AUTH=''
- CI_WEBPATH='' - CI_WEBPATH=''
sponsor_links: sponsor_links:

View File

@@ -1,363 +1,372 @@
NAME VERSION TYPE NAME VERSION TYPE
Simple Launcher 1.1.0.14 dotnet (+5 duplicates) Simple Launcher 1.1.0.14 binary (+5 duplicates)
acme 2.11.0 python acl-libs 2.3.2-r1 apk
alpine-baselayout 3.6.5-r0 apk acme 4.2.0 python
alpine-baselayout-data 3.6.5-r0 apk alpine-baselayout 3.7.0-r0 apk
alpine-keys 2.4-r1 apk alpine-baselayout-data 3.7.0-r0 apk
alpine-release 3.20.2-r0 apk alpine-keys 2.5-r0 apk
aom-libs 3.9.1-r0 apk alpine-release 3.22.1-r0 apk
apache2-utils 2.4.62-r0 apk aom-libs 3.12.1-r0 apk
apk-tools 2.14.4-r0 apk apache2-utils 2.4.65-r0 apk
apr 1.7.4-r0 apk apk-tools 2.14.9-r2 apk
apr-util 1.6.3-r1 apk apr 1.7.5-r0 apk
argon2-libs 20190702-r5 apk apr-util 1.6.3-r1 apk
attrs 24.2.0 python argon2-libs 20190702-r5 apk
autocommand 2.2.2 python attrs 25.3.0 python
azure-common 1.1.28 python autocommand 2.2.2 python
azure-core 1.30.2 python azure-common 1.1.28 python
azure-identity 1.17.1 python azure-core 1.35.0 python
azure-mgmt-core 1.4.0 python azure-identity 1.24.0 python
azure-mgmt-dns 8.1.0 python azure-mgmt-core 1.6.0 python
backports-tarfile 1.2.0 python azure-mgmt-dns 9.0.0 python
bash 5.2.26-r0 apk backports-tarfile 1.2.0 python
beautifulsoup4 4.12.3 python bash 5.2.37-r0 apk
boto3 1.34.161 python beautifulsoup4 4.13.5 python
botocore 1.34.161 python boto3 1.40.21 python
brotli-libs 1.1.0-r2 apk botocore 1.40.21 python
bs4 0.0.2 python brotli-libs 1.1.0-r2 apk
busybox 1.36.1-r29 apk bs4 0.0.2 python
busybox-binsh 1.36.1-r29 apk busybox 1.37.0-r19 apk
c-ares 1.28.1-r0 apk busybox-binsh 1.37.0-r19 apk
c-client 2007f-r15 apk c-ares 1.34.5-r0 apk
ca-certificates 20240705-r0 apk c-client 2007f-r15 apk
ca-certificates-bundle 20240705-r0 apk ca-certificates 20250619-r0 apk
cachetools 5.4.0 python ca-certificates-bundle 20250619-r0 apk
catatonit 0.2.0-r0 apk cachetools 5.5.2 python
certbot 2.11.0 python catatonit 0.2.1-r0 apk
certbot-dns-acmedns 0.1.0 python certbot 4.2.0 python
certbot-dns-aliyun 2.0.0 python certbot-dns-acmedns 0.1.0 python
certbot-dns-azure 2.5.0 python certbot-dns-aliyun 2.0.0 python
certbot-dns-bunny 0.0.9 python certbot-dns-azure 1.5.0 python
certbot-dns-cloudflare 2.11.0 python certbot-dns-bunny 3.0.0 python
certbot-dns-cpanel 0.4.0 python certbot-dns-cloudflare 4.2.0 python
certbot-dns-desec 1.2.1 python certbot-dns-cpanel 0.4.0 python
certbot-dns-digitalocean 2.11.0 python certbot-dns-desec 1.2.1 python
certbot-dns-directadmin 1.0.4 python certbot-dns-digitalocean 4.2.0 python
certbot-dns-dnsimple 2.11.0 python certbot-dns-directadmin 1.0.15 python
certbot-dns-dnsmadeeasy 2.11.0 python certbot-dns-dnsimple 4.2.0 python
certbot-dns-dnspod 0.1.0 python certbot-dns-dnsmadeeasy 4.2.0 python
certbot-dns-do 0.31.0 python certbot-dns-dnspod 0.1.0 python
certbot-dns-domeneshop 0.2.9 python certbot-dns-do 0.31.0 python
certbot-dns-dreamhost 1.0 python certbot-dns-domeneshop 0.2.9 python
certbot-dns-duckdns 1.3 python certbot-dns-dreamhost 1.0 python
certbot-dns-dynudns 0.0.6 python certbot-dns-duckdns 1.6 python
certbot-dns-freedns 0.2.0 python certbot-dns-dynudns 0.0.6 python
certbot-dns-gehirn 2.11.0 python certbot-dns-freedns 0.2.0 python
certbot-dns-glesys 2.1.0 python certbot-dns-gehirn 4.2.0 python
certbot-dns-godaddy 2.8.0 python certbot-dns-glesys 2.1.0 python
certbot-dns-google 2.11.0 python certbot-dns-godaddy 2.8.0 python
certbot-dns-he 1.0.0 python certbot-dns-google 4.2.0 python
certbot-dns-hetzner 2.0.1 python certbot-dns-he 1.0.0 python
certbot-dns-infomaniak 0.2.2 python certbot-dns-hetzner 2.0.1 python
certbot-dns-inwx 2.2.0 python certbot-dns-infomaniak 0.2.3 python
certbot-dns-ionos 2024.1.8 python certbot-dns-inwx 3.0.3 python
certbot-dns-linode 2.11.0 python certbot-dns-ionos 2024.11.9 python
certbot-dns-loopia 1.0.1 python certbot-dns-linode 4.2.0 python
certbot-dns-luadns 2.11.0 python certbot-dns-loopia 1.0.1 python
certbot-dns-namecheap 1.0.0 python certbot-dns-luadns 4.2.0 python
certbot-dns-netcup 1.4.3 python certbot-dns-namecheap 1.0.0 python
certbot-dns-njalla 1.0.0 python certbot-dns-netcup 1.4.4 python
certbot-dns-nsone 2.11.0 python certbot-dns-njalla 2.0.2 python
certbot-dns-ovh 2.11.0 python certbot-dns-nsone 4.2.0 python
certbot-dns-porkbun 0.8 python certbot-dns-ovh 4.2.0 python
certbot-dns-rfc2136 2.11.0 python certbot-dns-porkbun 0.10.1 python
certbot-dns-route53 2.11.0 python certbot-dns-rfc2136 4.2.0 python
certbot-dns-sakuracloud 2.11.0 python certbot-dns-route53 4.2.0 python
certbot-dns-standalone 1.1 python certbot-dns-sakuracloud 4.2.0 python
certbot-dns-transip 0.5.2 python certbot-dns-standalone 1.2.1 python
certbot-dns-vultr 1.1.0 python certbot-dns-transip 0.5.2 python
certbot-plugin-gandi 1.5.0 python certbot-dns-vultr 1.1.0 python
certifi 2024.7.4 python certbot-plugin-gandi 1.5.0 python
cffi 1.17.0 python certifi 2025.8.3 python
charset-normalizer 3.3.2 python cffi 1.17.1 python
cloudflare 2.19.4 python charset-normalizer 3.4.3 python
composer 2.7.7 binary cli UNKNOWN binary
configargparse 1.7 python cli-32 UNKNOWN binary
configobj 5.0.8 python cli-64 UNKNOWN binary
coreutils 9.5-r1 apk cli-arm64 UNKNOWN binary
coreutils-env 9.5-r1 apk cloudflare 2.19.4 python
coreutils-fmt 9.5-r1 apk composer 2.8.11 binary
coreutils-sha512sum 9.5-r1 apk configargparse 1.7.1 python
cryptography 43.0.0 python configobj 5.0.9 python
curl 8.9.0-r0 apk coreutils 9.7-r1 apk
distro 1.9.0 python coreutils-env 9.7-r1 apk
dns-lexicon 3.17.0 python coreutils-fmt 9.7-r1 apk
dnslib 0.9.25 python coreutils-sha512sum 9.7-r1 apk
dnspython 2.6.1 python cryptography 45.0.6 python
domeneshop 0.4.4 python curl 8.14.1-r1 apk
fail2ban 1.1.0 python distro 1.9.0 python
fail2ban 1.1.0-r0 apk dns-lexicon 3.21.1 python
fail2ban-pyc 1.1.0-r0 apk dnslib 0.9.26 python
filelock 3.15.4 python dnspython 2.7.0 python
findutils 4.9.0-r5 apk domeneshop 0.4.4 python
fontconfig 2.15.0-r1 apk fail2ban 1.1.0 python
freetype 2.13.2-r0 apk fail2ban 1.1.0-r3 apk
future 1.0.0 python fail2ban-pyc 1.1.0-r3 apk
gdbm 1.23-r1 apk filelock 3.19.1 python
git 2.45.2-r0 apk findutils 4.10.0-r0 apk
git-init-template 2.45.2-r0 apk fontconfig 2.15.0-r3 apk
git-perl 2.45.2-r0 apk freetype 2.13.3-r0 apk
gmp 6.3.0-r1 apk future 1.0.0 python
gnupg 2.4.5-r0 apk gdbm 1.24-r0 apk
gnupg-dirmngr 2.4.5-r0 apk git 2.49.1-r0 apk
gnupg-gpgconf 2.4.5-r0 apk git-init-template 2.49.1-r0 apk
gnupg-keyboxd 2.4.5-r0 apk git-perl 2.49.1-r0 apk
gnupg-utils 2.4.5-r0 apk gmp 6.3.0-r3 apk
gnupg-wks-client 2.4.5-r0 apk gnupg 2.4.7-r0 apk
gnutls 3.8.5-r0 apk gnupg-dirmngr 2.4.7-r0 apk
google-api-core 2.19.1 python gnupg-gpgconf 2.4.7-r0 apk
google-api-python-client 2.141.0 python gnupg-keyboxd 2.4.7-r0 apk
google-auth 2.33.0 python gnupg-utils 2.4.7-r0 apk
google-auth-httplib2 0.2.0 python gnupg-wks-client 2.4.7-r0 apk
googleapis-common-protos 1.63.2 python gnutls 3.8.8-r0 apk
gpg 2.4.5-r0 apk google-api-core 2.25.1 python
gpg-agent 2.4.5-r0 apk google-api-python-client 2.179.0 python
gpg-wks-server 2.4.5-r0 apk google-auth 2.40.3 python
gpgsm 2.4.5-r0 apk google-auth-httplib2 0.2.0 python
gpgv 2.4.5-r0 apk googleapis-common-protos 1.70.0 python
httplib2 0.22.0 python gpg 2.4.7-r0 apk
icu-data-en 74.2-r0 apk gpg-agent 2.4.7-r0 apk
icu-libs 74.2-r0 apk gpg-wks-server 2.4.7-r0 apk
idna 3.7 python gpgsm 2.4.7-r0 apk
importlib-metadata 8.0.0 python gpgv 2.4.7-r0 apk
importlib-resources 6.4.0 python gui UNKNOWN binary
inflect 7.3.1 python gui-32 UNKNOWN binary
iptables 1.8.10-r3 apk gui-64 UNKNOWN binary
iptables-legacy 1.8.10-r3 apk gui-arm64 UNKNOWN binary
isodate 0.6.1 python httplib2 0.30.0 python
jaraco-context 5.3.0 python icu-data-en 76.1-r1 apk
jaraco-functools 4.0.1 python icu-libs 76.1-r1 apk
jaraco-text 3.12.1 python idna 3.10 python
jmespath 1.0.1 python importlib-metadata 8.0.0 python
josepy 1.14.0 python inflect 7.3.1 python
jq 1.7.1-r0 apk inotify-tools 4.23.9.0-r0 apk
jsonlines 4.0.0 python inotify-tools-libs 4.23.9.0-r0 apk
jsonpickle 3.2.2 python inwx-domrobot 3.2.0 python
libacl 2.3.2-r0 apk iptables 1.8.11-r1 apk
libassuan 2.5.7-r0 apk iptables-legacy 1.8.11-r1 apk
libattr 2.5.2-r0 apk isodate 0.7.2 python
libavif 1.0.4-r0 apk jaraco-collections 5.1.0 python
libbsd 0.12.2-r0 apk jaraco-context 5.3.0 python
libbz2 1.0.8-r6 apk jaraco-functools 4.0.1 python
libcrypto3 3.3.1-r3 apk jaraco-text 3.12.1 python
libcurl 8.9.0-r0 apk jinja2 3.1.6 python
libdav1d 1.4.2-r0 apk jmespath 1.0.1 python
libedit 20240517.3.1-r0 apk josepy 2.1.0 python
libevent 2.1.12-r7 apk jq 1.8.0-r0 apk
libexpat 2.6.2-r0 apk jsonlines 4.0.0 python
libffi 3.4.6-r0 apk jsonpickle 4.1.1 python
libgcc 13.2.1_git20240309-r0 apk libapk2 2.14.9-r2 apk
libgcrypt 1.10.3-r0 apk libassuan 2.5.7-r0 apk
libgd 2.3.3-r9 apk libattr 2.5.2-r2 apk
libgpg-error 1.49-r0 apk libavif 1.3.0-r0 apk
libice 1.1.1-r6 apk libbsd 0.12.2-r0 apk
libidn2 2.3.7-r0 apk libbz2 1.0.8-r6 apk
libintl 0.22.5-r0 apk libcrypto3 3.5.2-r0 apk
libip4tc 1.8.10-r3 apk libcurl 8.14.1-r1 apk
libip6tc 1.8.10-r3 apk libdav1d 1.5.1-r0 apk
libjpeg-turbo 3.0.3-r0 apk libedit 20250104.3.1-r1 apk
libksba 1.6.6-r0 apk libevent 2.1.12-r8 apk
libldap 2.6.7-r0 apk libexpat 2.7.1-r0 apk
libmaxminddb-libs 1.9.1-r0 apk libffi 3.4.8-r0 apk
libmcrypt 2.5.8-r10 apk libgcc 14.2.0-r6 apk
libmd 1.1.0-r0 apk libgcrypt 1.10.3-r1 apk
libmemcached-libs 1.1.4-r1 apk libgd 2.3.3-r10 apk
libmnl 1.0.5-r2 apk libgpg-error 1.55-r0 apk
libncursesw 6.4_p20240420-r0 apk libice 1.1.2-r0 apk
libnftnl 1.2.6-r0 apk libidn2 2.3.7-r0 apk
libpanelw 6.4_p20240420-r0 apk libintl 0.24.1-r0 apk
libpng 1.6.43-r0 apk libip4tc 1.8.11-r1 apk
libpq 16.3-r0 apk libip6tc 1.8.11-r1 apk
libproc2 4.0.4-r0 apk libjpeg-turbo 3.1.0-r0 apk
libpsl 0.21.5-r1 apk libksba 1.6.7-r0 apk
libsasl 2.1.28-r6 apk libldap 2.6.8-r0 apk
libseccomp 2.5.5-r1 apk libmaxminddb-libs 1.9.1-r0 apk
libsharpyuv 1.3.2-r0 apk libmd 1.1.0-r0 apk
libsm 1.2.4-r4 apk libmemcached-libs 1.1.4-r1 apk
libsodium 1.0.19-r0 apk libmnl 1.0.5-r2 apk
libssl3 3.3.1-r3 apk libncursesw 6.5_p20250503-r0 apk
libstdc++ 13.2.1_git20240309-r0 apk libnftnl 1.2.9-r0 apk
libtasn1 4.19.0-r2 apk libpanelw 6.5_p20250503-r0 apk
libunistring 1.2-r0 apk libpng 1.6.47-r0 apk
libuuid 2.40.1-r1 apk libpq 17.6-r0 apk
libwebp 1.3.2-r0 apk libproc2 4.0.4-r3 apk
libx11 1.8.9-r1 apk libpsl 0.21.5-r3 apk
libxau 1.0.11-r4 apk libsasl 2.1.28-r8 apk
libxcb 1.16.1-r0 apk libseccomp 2.6.0-r0 apk
libxdmcp 1.1.5-r1 apk libsharpyuv 1.5.0-r0 apk
libxext 1.3.6-r2 apk libsm 1.2.5-r0 apk
libxml2 2.12.7-r0 apk libsodium 1.0.20-r0 apk
libxpm 3.5.17-r0 apk libssl3 3.5.2-r0 apk
libxslt 1.1.39-r1 apk libstdc++ 14.2.0-r6 apk
libxt 1.3.0-r5 apk libtasn1 4.20.0-r0 apk
libxtables 1.8.10-r3 apk libunistring 1.3-r0 apk
libzip 1.10.1-r0 apk libuuid 2.41-r9 apk
linux-pam 1.6.0-r0 apk libwebp 1.5.0-r0 apk
logrotate 3.21.0-r1 apk libx11 1.8.11-r0 apk
loopialib 0.2.0 python libxau 1.0.12-r0 apk
lxml 5.3.0 python libxcb 1.17.0-r0 apk
lz4-libs 1.9.4-r5 apk libxdmcp 1.1.5-r1 apk
memcached 1.6.27-r0 apk libxext 1.3.6-r2 apk
mock 5.1.0 python libxml2 2.13.8-r0 apk
more-itertools 10.3.0 python libxpm 3.5.17-r0 apk
mpdecimal 4.0.0-r0 apk libxslt 1.1.43-r3 apk
msal 1.30.0 python libxt 1.3.1-r0 apk
msal-extensions 1.2.0 python libxtables 1.8.11-r1 apk
musl 1.2.5-r0 apk libyuv 0.0.1887.20251502-r1 apk
musl-utils 1.2.5-r0 apk libzip 1.11.4-r0 apk
my-test-package 1.0 python linux-pam 1.7.0-r4 apk
nano 8.0-r0 apk logrotate 3.21.0-r1 apk
ncurses-terminfo-base 6.4_p20240420-r0 apk loopialib 0.2.0 python
netcat-openbsd 1.226-r0 apk lxml 6.0.1 python
nettle 3.9.1-r0 apk lz4-libs 1.10.0-r0 apk
nghttp2-libs 1.62.1-r0 apk markupsafe 3.0.2 python
nginx 1.26.2-r0 apk memcached 1.6.32-r0 apk
nginx-mod-devel-kit 1.26.2-r0 apk mock 5.2.0 python
nginx-mod-http-brotli 1.26.2-r0 apk more-itertools 10.3.0 python
nginx-mod-http-dav-ext 1.26.2-r0 apk mpdecimal 4.0.1-r0 apk
nginx-mod-http-echo 1.26.2-r0 apk msal 1.33.0 python
nginx-mod-http-fancyindex 1.26.2-r0 apk msal-extensions 1.3.1 python
nginx-mod-http-geoip2 1.26.2-r0 apk musl 1.2.5-r10 apk
nginx-mod-http-headers-more 1.26.2-r0 apk musl-utils 1.2.5-r10 apk
nginx-mod-http-image-filter 1.26.2-r0 apk my-test-package 1.0 python
nginx-mod-http-perl 1.26.2-r0 apk nano 8.4-r0 apk
nginx-mod-http-redis2 1.26.2-r0 apk ncurses-terminfo-base 6.5_p20250503-r0 apk
nginx-mod-http-set-misc 1.26.2-r0 apk netcat-openbsd 1.229.1-r0 apk
nginx-mod-http-upload-progress 1.26.2-r0 apk nettle 3.10.1-r0 apk
nginx-mod-http-xslt-filter 1.26.2-r0 apk nghttp2-libs 1.65.0-r0 apk
nginx-mod-mail 1.26.2-r0 apk nginx 1.28.0-r3 apk
nginx-mod-rtmp 1.26.2-r0 apk nginx-mod-devel-kit 1.28.0-r3 apk
nginx-mod-stream 1.26.2-r0 apk nginx-mod-http-brotli 1.28.0-r3 apk
nginx-mod-stream-geoip2 1.26.2-r0 apk nginx-mod-http-dav-ext 1.28.0-r3 apk
nginx-vim 1.26.2-r0 apk nginx-mod-http-echo 1.28.0-r3 apk
npth 1.6-r4 apk nginx-mod-http-fancyindex 1.28.0-r3 apk
oniguruma 6.9.9-r0 apk nginx-mod-http-geoip2 1.28.0-r3 apk
openssl 3.3.1-r3 apk nginx-mod-http-headers-more 1.28.0-r3 apk
ordered-set 4.1.0 python nginx-mod-http-image-filter 1.28.0-r3 apk
p11-kit 0.25.3-r0 apk nginx-mod-http-perl 1.28.0-r3 apk
packaging 24.1 python nginx-mod-http-redis2 1.28.0-r3 apk
parsedatetime 2.6 python nginx-mod-http-set-misc 1.28.0-r3 apk
pcre 8.45-r3 apk nginx-mod-http-upload-progress 1.28.0-r3 apk
pcre2 10.43-r0 apk nginx-mod-http-xslt-filter 1.28.0-r3 apk
perl 5.38.2-r0 apk nginx-mod-mail 1.28.0-r3 apk
perl-error 0.17029-r2 apk nginx-mod-rtmp 1.28.0-r3 apk
perl-git 2.45.2-r0 apk nginx-mod-stream 1.28.0-r3 apk
php83 8.3.10-r0 apk nginx-mod-stream-geoip2 1.28.0-r3 apk
php83-bcmath 8.3.10-r0 apk nginx-vim 1.28.0-r3 apk
php83-bz2 8.3.10-r0 apk npth 1.8-r0 apk
php83-common 8.3.10-r0 apk oniguruma 6.9.10-r0 apk
php83-ctype 8.3.10-r0 apk openssl 3.5.2-r0 apk
php83-curl 8.3.10-r0 apk p11-kit 0.25.5-r2 apk
php83-dom 8.3.10-r0 apk packaging 24.2 python
php83-exif 8.3.10-r0 apk parsedatetime 2.6 python
php83-fileinfo 8.3.10-r0 apk pcre2 10.43-r1 apk
php83-fpm 8.3.10-r0 apk perl 5.40.3-r0 apk
php83-ftp 8.3.10-r0 apk perl-error 0.17030-r0 apk
php83-gd 8.3.10-r0 apk perl-git 2.49.1-r0 apk
php83-gmp 8.3.10-r0 apk php84 8.4.11-r0 apk
php83-iconv 8.3.10-r0 apk php84-bcmath 8.4.11-r0 apk
php83-imap 8.3.10-r0 apk php84-bz2 8.4.11-r0 apk
php83-intl 8.3.10-r0 apk php84-common 8.4.11-r0 apk
php83-ldap 8.3.10-r0 apk php84-ctype 8.4.11-r0 apk
php83-mbstring 8.3.10-r0 apk php84-curl 8.4.11-r0 apk
php83-mysqli 8.3.10-r0 apk php84-dom 8.4.11-r0 apk
php83-mysqlnd 8.3.10-r0 apk php84-exif 8.4.11-r0 apk
php83-opcache 8.3.10-r0 apk php84-fileinfo 8.4.11-r0 apk
php83-openssl 8.3.10-r0 apk php84-fpm 8.4.11-r0 apk
php83-pdo 8.3.10-r0 apk php84-ftp 8.4.11-r0 apk
php83-pdo_mysql 8.3.10-r0 apk php84-gd 8.4.11-r0 apk
php83-pdo_odbc 8.3.10-r0 apk php84-gmp 8.4.11-r0 apk
php83-pdo_pgsql 8.3.10-r0 apk php84-iconv 8.4.11-r0 apk
php83-pdo_sqlite 8.3.10-r0 apk php84-intl 8.4.11-r0 apk
php83-pear 8.3.10-r0 apk php84-ldap 8.4.11-r0 apk
php83-pecl-apcu 5.1.23-r0 apk php84-mbstring 8.4.11-r0 apk
php83-pecl-igbinary 3.2.15-r0 apk php84-mysqli 8.4.11-r0 apk
php83-pecl-mcrypt 1.0.7-r0 apk php84-mysqlnd 8.4.11-r0 apk
php83-pecl-memcached 3.2.0-r0 apk php84-opcache 8.4.11-r0 apk
php83-pecl-msgpack 2.2.0-r2 apk php84-openssl 8.4.11-r0 apk
php83-pecl-redis 6.0.2-r0 apk php84-pdo 8.4.11-r0 apk
php83-pgsql 8.3.10-r0 apk php84-pdo_mysql 8.4.11-r0 apk
php83-phar 8.3.10-r0 apk php84-pdo_odbc 8.4.11-r0 apk
php83-posix 8.3.10-r0 apk php84-pdo_pgsql 8.4.11-r0 apk
php83-session 8.3.10-r0 apk php84-pdo_sqlite 8.4.11-r0 apk
php83-simplexml 8.3.10-r0 apk php84-pear 8.4.11-r0 apk
php83-soap 8.3.10-r0 apk php84-pecl-apcu 5.1.27-r0 apk
php83-sockets 8.3.10-r0 apk php84-pecl-igbinary 3.2.16-r1 apk
php83-sodium 8.3.10-r0 apk php84-pecl-imap 1.0.3-r0 apk
php83-sqlite3 8.3.10-r0 apk php84-pecl-memcached 3.3.0-r0 apk
php83-tokenizer 8.3.10-r0 apk php84-pecl-msgpack 3.0.0-r0 apk
php83-xml 8.3.10-r0 apk php84-pecl-redis 6.2.0-r0 apk
php83-xmlreader 8.3.10-r0 apk php84-pgsql 8.4.11-r0 apk
php83-xmlwriter 8.3.10-r0 apk php84-phar 8.4.11-r0 apk
php83-xsl 8.3.10-r0 apk php84-posix 8.4.11-r0 apk
php83-zip 8.3.10-r0 apk php84-session 8.4.11-r0 apk
pinentry 1.3.0-r0 apk php84-simplexml 8.4.11-r0 apk
pip 24.2 python php84-soap 8.4.11-r0 apk
pkb-client 1.2 python php84-sockets 8.4.11-r0 apk
platformdirs 4.2.2 python php84-sodium 8.4.11-r0 apk
popt 1.19-r3 apk php84-sqlite3 8.4.11-r0 apk
portalocker 2.10.1 python php84-tokenizer 8.4.11-r0 apk
procps-ng 4.0.4-r0 apk php84-xml 8.4.11-r0 apk
proto-plus 1.24.0 python php84-xmlreader 8.4.11-r0 apk
protobuf 5.27.3 python php84-xmlwriter 8.4.11-r0 apk
pyacmedns 0.4 python php84-xsl 8.4.11-r0 apk
pyasn1 0.6.0 python php84-zip 8.4.11-r0 apk
pyasn1-modules 0.4.0 python pinentry 1.3.1-r0 apk
pyc 3.12.3-r1 apk pip 25.2 python
pycparser 2.22 python pkb-client 2.2.0 python
pyjwt 2.9.0 python platformdirs 4.2.2 python
pynamecheap 0.0.3 python popt 1.19-r4 apk
pyopenssl 24.2.1 python procps-ng 4.0.4-r3 apk
pyotp 2.9.0 python proto-plus 1.26.1 python
pyparsing 3.1.2 python protobuf 6.32.0 python
pyrfc3339 1.1 python pyacmedns 0.4 python
python-dateutil 2.9.0.post0 python pyasn1 0.6.1 python
python-digitalocean 1.17.0 python pyasn1-modules 0.4.2 python
python-transip 0.6.0 python pyc 3.12.11-r0 apk
python3 3.12.3-r1 apk pycparser 2.22 python
python3-pyc 3.12.3-r1 apk pyjwt 2.10.1 python
python3-pycache-pyc0 3.12.3-r1 apk pynamecheap 0.0.3 python
pytz 2024.1 python pyopenssl 25.1.0 python
pyyaml 6.0.2 python pyotp 2.9.0 python
readline 8.2.10-r0 apk pyparsing 3.2.3 python
requests 2.32.3 python pyrfc3339 2.1.0 python
requests-file 2.1.0 python python-dateutil 2.9.0.post0 python
requests-mock 1.12.1 python python-digitalocean 1.17.0 python
rsa 4.9 python python-transip 0.6.0 python
s3transfer 0.10.2 python python3 3.12.11-r0 apk
scanelf 1.3.7-r2 apk python3-pyc 3.12.11-r0 apk
setuptools 72.2.0 python python3-pycache-pyc0 3.12.11-r0 apk
shadow 4.15.1-r0 apk pyyaml 6.0.2 python
six 1.16.0 python readline 8.2.13-r1 apk
skalibs 2.14.1.1-r0 apk requests 2.32.5 python
soupsieve 2.6 python requests-file 2.1.0 python
sqlite-libs 3.45.3-r1 apk requests-mock 1.12.1 python
ssl_client 1.36.1-r29 apk rsa 4.9.1 python
tiff 4.6.0t-r0 apk s3transfer 0.13.1 python
tldextract 5.1.2 python scanelf 1.3.8-r1 apk
tomli 2.0.1 python setuptools 80.9.0 python
typeguard 4.3.0 python shadow 4.17.3-r0 apk
typing-extensions 4.12.2 python (+1 duplicate) six 1.17.0 python
tzdata 2024a-r1 apk skalibs-libs 2.14.4.0-r0 apk
unixodbc 2.3.12-r0 apk soupsieve 2.8 python
uritemplate 4.1.1 python sqlite-libs 3.49.2-r1 apk
urllib3 2.2.2 python ssl_client 1.37.0-r19 apk
utmps-libs 0.1.2.2-r1 apk tiff 4.7.0-r0 apk
wheel 0.43.0 python tldextract 5.3.0 python
wheel 0.44.0 python tomli 2.0.1 python
whois 5.5.23-r0 apk typeguard 4.3.0 python
xz-libs 5.6.2-r0 apk typing-extensions 4.12.2 python
zipp 3.19.2 python typing-extensions 4.15.0 python
zlib 1.3.1-r1 apk tzdata 2025b-r0 apk
zope-interface 7.0.1 python unixodbc 2.3.12-r0 apk
zstd-libs 1.5.6-r0 apk uritemplate 4.2.0 python
urllib3 2.5.0 python
utmps-libs 0.1.3.1-r0 apk
wheel 0.45.1 python (+1 duplicate)
whois 5.6.3-r0 apk
xz-libs 5.8.1-r0 apk
zipp 3.19.2 python
zlib 1.3.1-r2 apk
zope-interface 7.2 python
zstd-libs 1.5.7-r0 apk

View File

@@ -6,44 +6,49 @@ project_url: "https://linuxserver.io"
project_logo: "https://github.com/linuxserver/docker-templates/raw/master/linuxserver.io/img/swag.gif" project_logo: "https://github.com/linuxserver/docker-templates/raw/master/linuxserver.io/img/swag.gif"
project_blurb: "SWAG - Secure Web Application Gateway (formerly known as letsencrypt, no relation to Let's Encrypt™) sets up an Nginx webserver and reverse proxy with php support and a built-in certbot client that automates free SSL server certificate generation and renewal processes (Let's Encrypt and ZeroSSL). It also contains fail2ban for intrusion prevention." project_blurb: "SWAG - Secure Web Application Gateway (formerly known as letsencrypt, no relation to Let's Encrypt™) sets up an Nginx webserver and reverse proxy with php support and a built-in certbot client that automates free SSL server certificate generation and renewal processes (Let's Encrypt and ZeroSSL). It also contains fail2ban for intrusion prevention."
project_lsio_github_repo_url: "https://github.com/linuxserver/docker-{{ project_name }}" project_lsio_github_repo_url: "https://github.com/linuxserver/docker-{{ project_name }}"
project_categories: "Reverse Proxy"
# supported architectures # supported architectures
available_architectures: available_architectures:
- { arch: "{{ arch_x86_64 }}", tag: "amd64-latest"} - {arch: "{{ arch_x86_64 }}", tag: "amd64-latest"}
- { arch: "{{ arch_arm64 }}", tag: "arm64v8-latest"} - {arch: "{{ arch_arm64 }}", tag: "arm64v8-latest"}
# container parameters # container parameters
common_param_env_vars_enabled: true common_param_env_vars_enabled: true
param_container_name: "{{ project_name }}" param_container_name: "{{ project_name }}"
param_usage_include_env: true param_usage_include_env: true
param_env_vars: param_env_vars:
- { env_var: "URL", env_value: "yourdomain.url", desc: "Top url you have control over (`customdomain.com` if you own it, or `customsubdomain.ddnsprovider.com` if dynamic dns)." } - {env_var: "URL", env_value: "example.com", desc: "Top url you have control over (e.g. `example.com` if you own it, or `customsubdomain.example.com` if dynamic dns)."}
- { env_var: "VALIDATION", env_value: "http", desc: "Certbot validation method to use, options are `http` or `dns` (`dns` method also requires `DNSPLUGIN` variable set).", env_options: ["http", "dns"] } - {env_var: "VALIDATION", env_value: "http", desc: "Certbot validation method to use, options are `http` or `dns` (`dns` method also requires `DNSPLUGIN` variable set).", env_options: ["http", "dns"]}
param_usage_include_vols: true param_usage_include_vols: true
param_volumes: param_volumes:
- { vol_path: "/config", vol_host_path: "/path/to/{{ project_name }}/config", desc: "Persistent config files" } - {vol_path: "/config", vol_host_path: "/path/to/{{ project_name }}/config", desc: "Persistent config files"}
param_usage_include_ports: true param_usage_include_ports: true
param_ports: param_ports:
- { external_port: "443", internal_port: "443", port_desc: "Https port" } - {external_port: "443", internal_port: "443", port_desc: "HTTPS port"}
cap_add_param: true cap_add_param: true
cap_add_param_vars: cap_add_param_vars:
- { cap_add_var: "NET_ADMIN" } - {cap_add_var: "NET_ADMIN", desc: "Required for fail2Ban to be able to modify iptables rules."}
# optional container parameters # optional container parameters
opt_param_usage_include_env: true opt_param_usage_include_env: true
opt_param_env_vars: opt_param_env_vars:
- { env_var: "SUBDOMAINS", env_value: "www,", desc: "Subdomains you'd like the cert to cover (comma separated, no spaces) ie. `www,ftp,cloud`. For a wildcard cert, set this *exactly* to `wildcard` (wildcard cert is available via `dns` validation only)" } - {env_var: "SUBDOMAINS", env_value: "www,", desc: "Subdomains you'd like the cert to cover (comma separated, no spaces) ie. `www,ftp,cloud`. For a wildcard cert, set this *exactly* to `wildcard` (wildcard cert is available via `dns` validation only)"}
- { env_var: "CERTPROVIDER", env_value: "", desc: "Optionally define the cert provider. Set to `zerossl` for ZeroSSL certs (requires existing [ZeroSSL account](https://app.zerossl.com/signup) and the e-mail address entered in `EMAIL` env var). Otherwise defaults to Let's Encrypt." } - {env_var: "CERTPROVIDER", env_value: "", desc: "Optionally define the cert provider. Set to `zerossl` for ZeroSSL certs (requires existing [ZeroSSL account](https://app.zerossl.com/signup) and the e-mail address entered in `EMAIL` env var). Otherwise defaults to Let's Encrypt."}
- { env_var: "DNSPLUGIN", env_value: "cloudflare", desc: "Required if `VALIDATION` is set to `dns`. Options are `acmedns`, `aliyun`, `azure`, `bunny`, `cloudflare`, `cpanel`, `desec`, `digitalocean`, `directadmin`, `dnsimple`, `dnsmadeeasy`, `dnspod`, `do`, `domeneshop`, `dreamhost`, `duckdns`, `dynudns`, `freedns`, `gandi`, `gehirn`, `glesys`, `godaddy`, `google`, `he`, `hetzner`, `infomaniak`, `inwx`, `ionos`, `linode`, `loopia`, `luadns`, `namecheap`, `netcup`, `njalla`, `nsone`, `ovh`, `porkbun`, `rfc2136`, `route53`, `sakuracloud`, `standalone`, `transip`, and `vultr`. Also need to enter the credentials into the corresponding ini (or json for some plugins) file under `/config/dns-conf`." } - {env_var: "DNSPLUGIN", env_value: "cloudflare", desc: "Required if `VALIDATION` is set to `dns`. Options are `acmedns`, `aliyun`, `azure`, `bunny`, `cloudflare`, `cpanel`, `desec`, `digitalocean`, `directadmin`, `dnsimple`, `dnsmadeeasy`, `dnspod`, `do`, `domeneshop`, `dreamhost`, `duckdns`, `dynu`, `freedns`, `gandi`, `gehirn`, `glesys`, `godaddy`, `google`, `he`, `hetzner`, `infomaniak`, `inwx`, `ionos`, `linode`, `loopia`, `luadns`, `namecheap`, `netcup`, `njalla`, `nsone`, `ovh`, `porkbun`, `rfc2136`, `route53`, `sakuracloud`, `standalone`, `transip`, and `vultr`. Also need to enter the credentials into the corresponding ini (or json for some plugins) file under `/config/dns-conf`."}
- { env_var: "PROPAGATION", env_value: "", desc: "Optionally override (in seconds) the default propagation time for the dns plugins." } - {env_var: "PROPAGATION", env_value: "", desc: "Optionally override (in seconds) the default propagation time for the dns plugins."}
- { env_var: "EMAIL", env_value: "", desc: "Optional e-mail address used for cert expiration notifications (Required for ZeroSSL)." } - {env_var: "EMAIL", env_value: "", desc: "Optional e-mail address used for cert expiration notifications (Required for ZeroSSL)."}
- { env_var: "ONLY_SUBDOMAINS", env_value: "false", desc: "If you wish to get certs only for certain subdomains, but not the main domain (main domain may be hosted on another machine and cannot be validated), set this to `true`" } - {env_var: "ONLY_SUBDOMAINS", env_value: "false", desc: "If you wish to get certs only for certain subdomains, but not the main domain (main domain may be hosted on another machine and cannot be validated), set this to `true`"}
- { env_var: "EXTRA_DOMAINS", env_value: "", desc: "Additional fully qualified domain names (comma separated, no spaces) ie. `extradomain.com,subdomain.anotherdomain.org,*.anotherdomain.org`" } - {env_var: "EXTRA_DOMAINS", env_value: "", desc: "Additional fully qualified domain names (comma separated, no spaces) ie. `example.net,subdomain.example.net,*.example.org`"}
- { env_var: "STAGING", env_value: "false", desc: "Set to `true` to retrieve certs in staging mode. Rate limits will be much higher, but the resulting cert will not pass the browser's security test. Only to be used for testing purposes." } - {env_var: "STAGING", env_value: "false", desc: "Set to `true` to retrieve certs in staging mode. Rate limits will be much higher, but the resulting cert will not pass the browser's security test. Only to be used for testing purposes."}
- {env_var: "DISABLE_F2B", env_value: "", desc: "Set to `true` to disable the Fail2ban service in the container, if you're already running it elsewhere or using a different IPS."}
- {env_var: "SWAG_AUTORELOAD", env_value: "", desc: "Set to `true` to enable automatic reloading of confs on change without stopping/restarting nginx. Your filesystem must support inotify. This functionality was previously offered [via mod](https://github.com/linuxserver/docker-mods/tree/swag-auto-reload)."}
- {env_var: "SWAG_AUTORELOAD_WATCHLIST", env_value: "", desc: "A [pipe](https://en.wikipedia.org/wiki/Vertical_bar)-separated list of additional folders for auto reload to watch in addition to `/config/nginx`"}
opt_param_usage_include_ports: true opt_param_usage_include_ports: true
opt_param_ports: opt_param_ports:
- { external_port: "80", internal_port: "80", port_desc: "Http port (required for http validation and http -> https redirect)" } - {external_port: "80", internal_port: "80", port_desc: "HTTP port (required for HTTP validation and HTTP -> HTTPS redirect)"}
- {external_port: "443", internal_port: "443/udp", port_desc: "QUIC (HTTP/3) port. Must be enabled in the default and proxy confs."}
readonly_supported: true
readonly_message: |
* `/tmp` must be mounted to tmpfs
* fail2ban will not be available
# application setup block # application setup block
app_setup_block_enabled: true app_setup_block_enabled: true
app_setup_block: | app_setup_block: |
@@ -59,7 +64,7 @@ app_setup_block: |
1. Certs that only cover your main subdomain (ie. `yoursubdomain.duckdns.org`, leave the `SUBDOMAINS` variable empty) 1. Certs that only cover your main subdomain (ie. `yoursubdomain.duckdns.org`, leave the `SUBDOMAINS` variable empty)
2. Certs that cover sub-subdomains of your main subdomain (ie. `*.yoursubdomain.duckdns.org`, set the `SUBDOMAINS` variable to `wildcard`) 2. Certs that cover sub-subdomains of your main subdomain (ie. `*.yoursubdomain.duckdns.org`, set the `SUBDOMAINS` variable to `wildcard`)
* `--cap-add=NET_ADMIN` is required for fail2ban to modify iptables * `--cap-add=NET_ADMIN` is required for fail2ban to modify iptables
* After setup, navigate to `https://yourdomain.url` to access the default homepage (http access through port 80 is disabled by default, you can enable it by editing the default site config at `/config/nginx/site-confs/default.conf`). * After setup, navigate to `https://example.com` to access the default homepage (http access through port 80 is disabled by default, you can enable it by editing the default site config at `/config/nginx/site-confs/default.conf`).
* Certs are checked nightly and if expiration is within 30 days, renewal is attempted. If your cert is about to expire in less than 30 days, check the logs under `/config/log/letsencrypt` to see why the renewals have been failing. It is recommended to input your e-mail in docker parameters so you receive expiration notices from Let's Encrypt in those circumstances. * Certs are checked nightly and if expiration is within 30 days, renewal is attempted. If your cert is about to expire in less than 30 days, check the logs under `/config/log/letsencrypt` to see why the renewals have been failing. It is recommended to input your e-mail in docker parameters so you receive expiration notices from Let's Encrypt in those circumstances.
### Certbot Plugins ### Certbot Plugins
@@ -77,6 +82,88 @@ app_setup_block: |
Set the required credentials (usually found in the plugin documentation) in `/config/dns-conf/<plugin>.ini`. Set the required credentials (usually found in the plugin documentation) in `/config/dns-conf/<plugin>.ini`.
It is recommended to attempt obtaining a certificate with `STAGING=true` first to make sure the plugin is working as expected. It is recommended to attempt obtaining a certificate with `STAGING=true` first to make sure the plugin is working as expected.
### Dynamic Reverse Proxy Configuration via Environment Variables
SWAG can dynamically generate reverse proxy configuration files directly from environment variables, bypassing the need to manage individual `.conf` files. When any `PROXY_CONFIG_*` variable is detected, this mode is activated, and any existing `.conf` files in `/config/nginx/proxy-confs/` will be removed at startup.
**Service Definition**
Each reverse proxy service is defined by an environment variable following the format `PROXY_CONFIG_<SERVICE_NAME>`. The service name will be used as the subdomain (e.g., `SERVICE_NAME.yourdomain.com`), with the special exception of `DEFAULT` (see below). The value of the variable must be a valid JSON object.
```yaml
environment:
# Configure the default site (root domain) to proxy to a dashboard service
- 'PROXY_CONFIG_DEFAULT={"name": "dashboard", "port": 80, "auth": "authelia", "quic": true}'
# Simple subdomain service
- 'PROXY_CONFIG_HOMARR={"port": 7575, "auth": "authelia"}'
# Service with a boolean flag for HTTPS backend and QUIC enabled
- 'PROXY_CONFIG_HEIMDALL={"port": 443, "https": true, "quic": true}'
# Complex service with nested objects and lists (incomplete example for syntax)
- 'PROXY_CONFIG_PLEX={
"port": 32400,
"proxy_redirect_off": true,
"buffering_off": true,
"proxy_set_headers": [
{"key": "X-Plex-Client-Identifier", "value": "$$http_x_plex_client_identifier"},
{"key": "X-Plex-Device", "value": "$$http_x_plex_device"}
],
"extra_locations": [
{"path": "/library/streams/", "custom_directives": ["proxy_pass_request_headers off"]}
]
}'
```
The available keys in the JSON object correspond to the options in the underlying Nginx template. Common keys include `port`, `https`, `quic`, `auth`, `buffering_off`, `proxy_set_headers`, and `extra_locations`.
**Configuring the Default Site (Root Domain)**
To configure the service that responds on your root domain (e.g., `https://yourdomain.com`), use the special service name `DEFAULT`.
* The environment variable is `PROXY_CONFIG_DEFAULT`.
* Unlike subdomain services, the `DEFAULT` configuration **must** include a `"name"` key in its JSON value. This key specifies the name of the container that SWAG should proxy traffic to.
* If `PROXY_CONFIG_DEFAULT` is not set, the container will serve the standard SWAG welcome page on the root domain.
Example:
```yaml
environment:
# This will proxy https://yourdomain.com to the 'dashboard' container on port 80
- 'PROXY_CONFIG_DEFAULT={"name": "dashboard", "port": 80, "auth": "none"}'
```
**Authentication Management**
Authentication can be managed globally or per-service with a clear order of precedence.
1. **Per-Service Override (Highest Priority):** Add an `auth` key directly inside the service's JSON configuration.
* `"auth": "authelia"`: Enables Authelia for this service.
* `"auth": "basic"`: Enables Basic Authentication for this service (see below).
* `"auth": "none"`: Explicitly disables authentication for this service.
2. **Global Exclusions:** A comma-separated list of service names to exclude from the global authenticator.
* `PROXY_AUTH_EXCLUDE=ntfy,public-dashboard`
3. **Global Default (Lowest Priority):** A single variable sets the default authentication provider for all services that don't have a per-service override and are not in the exclusion list.
* `PROXY_AUTH_PROVIDER=authelia` (can be `ldap`, `authentik`, etc.)
**Basic Authentication**
If you set `"auth": "basic"` for any service, you must also provide the credentials using these two environment variables. The container will automatically create the necessary `.htpasswd` file.
* `PROXY_AUTH_BASIC_USER`: The username for basic authentication.
* `PROXY_AUTH_BASIC_PASS`: The password for basic authentication.
Example:
```yaml
environment:
- 'PROXY_CONFIG_PORTAINER={"port": 9000, "auth": "basic"}'
- PROXY_AUTH_BASIC_USER=myadmin
- PROXY_AUTH_BASIC_PASS=supersecretpassword
```
### Security and password protection ### Security and password protection
* The container detects changes to url and subdomains, revokes existing certs and generates new ones during start. * The container detects changes to url and subdomains, revokes existing certs and generates new ones during start.
@@ -118,7 +205,7 @@ app_setup_block: |
* You can check which jails are active via `docker exec -it swag fail2ban-client status` * You can check which jails are active via `docker exec -it swag fail2ban-client status`
* You can check the status of a specific jail via `docker exec -it swag fail2ban-client status <jail name>` * You can check the status of a specific jail via `docker exec -it swag fail2ban-client status <jail name>`
* You can unban an IP via `docker exec -it swag fail2ban-client set <jail name> unbanip <IP>` * You can unban an IP via `docker exec -it swag fail2ban-client set <jail name> unbanip <IP>`
* A list of commands can be found here: <https://www.fail2ban.org/wiki/index.php/Commands> * A list of commands for fail2ban-client can be found [here](https://manpages.ubuntu.com/manpages/noble/man1/fail2ban-client.1.html)
### Updating configs ### Updating configs
@@ -134,95 +221,175 @@ app_setup_block: |
* Proxy sample files WILL be updated, however your renamed (enabled) proxy files will not. * Proxy sample files WILL be updated, however your renamed (enabled) proxy files will not.
* You can check the new sample and adjust your active config as needed. * You can check the new sample and adjust your active config as needed.
### QUIC support
This image supports QUIC (also known as HTTP/3) but it must be explicitly enabled in each proxy conf, and the default conf, because if the listener is enabled and you don't expose 443/UDP, it can break connections with some browsers.
To enable QUIC, expose 443/UDP to your clients, then uncomment both QUIC listeners in all of your active proxy confs, as well as the default conf, and restart the container.
You should also uncomment the `Alt-Svc` header in your `ssl.conf` so that browsers are aware that you offer QUIC connectivity.
It is [recommended](https://quic-go.net/docs/quic/optimizations/#udp-buffer-sizes) to increase the UDP send/recieve buffer **on the host** by setting the `net.core.rmem_max` and `net.core.wmem_max` sysctls. Suggested values are 4-16Mb (4194304-16777216 bytes). For persistence between reboots use `/etc/sysctl.d/`.
### Migration from the old `linuxserver/letsencrypt` image ### Migration from the old `linuxserver/letsencrypt` image
Please follow the instructions [on this blog post](https://www.linuxserver.io/blog/2020-08-21-introducing-swag#migrate). Please follow the instructions [on this blog post](https://www.linuxserver.io/blog/2020-08-21-introducing-swag#migrate).
# init diagram
init_diagram: |
"swag:latest": {
docker-mods
base {
fix-attr +\nlegacy cont-init
}
docker-mods -> base
legacy-services
custom services
init-services -> legacy-services
init-services -> custom services
custom services -> legacy-services
legacy-services -> ci-service-check
init-migrations -> init-adduser
init-swag-config -> init-certbot-config
init-nginx-end -> init-config
init-os-end -> init-config
init-config -> init-config-end
init-crontab-config -> init-config-end
init-outdated-config -> init-config-end
init-config -> init-crontab-config
init-mods-end -> init-custom-files
init-adduser -> init-device-perms
base -> init-envfile
init-swag-samples -> init-fail2ban-config
init-os-end -> init-folders
init-php -> init-keygen
base -> init-migrations
init-config-end -> init-mods
init-mods-package-install -> init-mods-end
init-mods -> init-mods-package-install
init-samples -> init-nginx
init-version-checks -> init-nginx-end
init-adduser -> init-os-end
init-device-perms -> init-os-end
init-envfile -> init-os-end
init-renew -> init-outdated-config
init-keygen -> init-permissions
init-certbot-config -> init-permissions-config
init-nginx -> init-php
init-permissions-config -> init-renew
init-config -> init-require-url
init-folders -> init-samples
init-custom-files -> init-services
init-fail2ban-config -> init-swag-config
init-require-url -> init-swag-folders
init-swag-folders -> init-swag-samples
init-permissions -> init-version-checks
init-services -> svc-cron
svc-cron -> legacy-services
init-services -> svc-fail2ban
svc-fail2ban -> legacy-services
init-services -> svc-nginx
svc-nginx -> legacy-services
init-services -> svc-php-fpm
svc-php-fpm -> legacy-services
init-services -> svc-swag-auto-reload
svc-swag-auto-reload -> legacy-services
}
Base Images: {
"baseimage-alpine-nginx:3.22" <- "baseimage-alpine:3.22"
}
"swag:latest" <- Base Images
# changelog # changelog
changelogs: changelogs:
- { date: "24.07.14:", desc: "Rebase to Alpine 3.20. Remove deprecated Google Domains certbot plugin. Existing users should update their nginx confs to avoid http2 deprecation warnings."} - {date: "02.09.25:", desc: "Add ability to define proxy configurations via environment variables."}
- { date: "01.07.24:", desc: "Fall back to iptables-legacy if iptables doesn't work." } - {date: "18.07.25:", desc: "Rebase to Alpine 3.22 with PHP 8.4. Add QUIC support. Drop PHP bindings for mcrypt as it is no longer maintained."}
- { date: "23.03.24:", desc: "Fix perms on the generated `priv-fullchain-bundle.pem`." } - {date: "05.05.25:", desc: "Disable Certbot's built in log rotation."}
- { date: "14.03.24:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) authelia-location.conf, authelia-server.conf - Update Authelia conf samples with support for 4.38." } - {date: "19.01.25:", desc: "Add [Auto Reload](https://github.com/linuxserver/docker-mods/tree/swag-auto-reload) functionality to SWAG."}
- { date: "11.03.24:", desc: "Restore support for DynuDNS using `certbot-dns-dynudns`." } - {date: "17.12.24:", desc: "Rebase to Alpine 3.21."}
- { date: "06.03.24:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) site-confs/default.conf - Cleanup default site conf." } - {date: "21.10.24:", desc: "Fix naming issue with Dynu plugin. If you are using Dynu, please make sure your credentials are set in /config/dns-conf/dynu.ini and your DNSPLUGIN variable is set to dynu (not dynudns)."}
- { date: "04.03.24:", desc: "Remove `stream.conf` inside the container to allow users to include their own block in `nginx.conf`." } - {date: "30.08.24:", desc: "Fix zerossl cert revocation."}
- { date: "23.01.24:", desc: "Rebase to Alpine 3.19 with php 8.3, add root periodic crontabs for logrotate." } - {date: "24.07.14:", desc: "Rebase to Alpine 3.20. Remove deprecated Google Domains certbot plugin. Existing users should update their nginx confs to avoid http2 deprecation warnings."}
- { date: "01.01.24:", desc: "Add GleSYS DNS plugin." } - {date: "01.07.24:", desc: "Fall back to iptables-legacy if iptables doesn't work."}
- { date: "11.12.23:", desc: "Deprecate certbot-dns-dynu to resolve dependency conflicts with other plugins." } - {date: "23.03.24:", desc: "Fix perms on the generated `priv-fullchain-bundle.pem`."}
- { date: "30.11.23:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) site-confs/default.conf - Fix index.php being downloaded on 404." } - {date: "14.03.24:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) authelia-location.conf, authelia-server.conf - Update Authelia conf samples with support for 4.38."}
- { date: "23.11.23:", desc: "Run certbot as root to allow fix http validation." } - {date: "11.03.24:", desc: "Restore support for DynuDNS using `certbot-dns-dynudns`."}
- { date: "01.10.23:", desc: "Fix \"unrecognized arguments\" issue in DirectAdmin DNS plugin." } - {date: "06.03.24:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) site-confs/default.conf - Cleanup default site conf."}
- { date: "28.08.23:", desc: "Add Namecheap DNS plugin." } - {date: "04.03.24:", desc: "Remove `stream.conf` inside the container to allow users to include their own block in `nginx.conf`."}
- { date: "12.08.23:", desc: "Add FreeDNS plugin. Detect certbot DNS authenticators using CLI." } - {date: "23.01.24:", desc: "Rebase to Alpine 3.19 with php 8.3, add root periodic crontabs for logrotate."}
- { date: "07.08.23:", desc: "Add Bunny DNS Configuration." } - {date: "01.01.24:", desc: "Add GleSYS DNS plugin."}
- { date: "27.07.23:", desc: "Added support for dreamhost validation." } - {date: "11.12.23:", desc: "Deprecate certbot-dns-dynu to resolve dependency conflicts with other plugins."}
- { date: "25.05.23:", desc: "Rebase to Alpine 3.18, deprecate armhf." } - {date: "30.11.23:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) site-confs/default.conf - Fix index.php being downloaded on 404."}
- { date: "27.04.23:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) authelia-location.conf, authelia-server.conf, authentik-location.conf, authentik-server.conf - Simplify auth configs and fix Set-Cookie header bug." } - {date: "23.11.23:", desc: "Run certbot as root to allow fix http validation."}
- { date: "13.04.23:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) nginx.conf, authelia-location.conf, authentik-location.conf, and site-confs/default.conf - Move ssl.conf include to default.conf. Remove Authorization headers in authelia. Sort proxy_set_header in authelia and authentik." } - {date: "01.10.23:", desc: "Fix \"unrecognized arguments\" issue in DirectAdmin DNS plugin."}
- { date: "25.03.23:", desc: "Fix renewal post hook." } - {date: "28.08.23:", desc: "Add Namecheap DNS plugin."}
- { date: "10.03.23:", desc: "Cleanup unused csr and keys folders. See [certbot 2.3.0 release notes](https://github.com/certbot/certbot/releases/tag/v2.3.0)." } - {date: "12.08.23:", desc: "Add FreeDNS plugin. Detect certbot DNS authenticators using CLI."}
- { date: "09.03.23:", desc: "Add Google Domains DNS support, `google-domains`." } - {date: "07.08.23:", desc: "Add Bunny DNS Configuration."}
- { date: "02.03.23:", desc: "Set permissions on crontabs during init." } - {date: "27.07.23:", desc: "Added support for dreamhost validation."}
- { date: "09.02.23:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) proxy.conf, authelia-location.conf and authelia-server.conf - Add Authentik configs, update Authelia configs." } - {date: "25.05.23:", desc: "Rebase to Alpine 3.18, deprecate armhf."}
- { date: "06.02.23:", desc: "Add porkbun support back in." } - {date: "27.04.23:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) authelia-location.conf, authelia-server.conf, authentik-location.conf, authentik-server.conf - Simplify auth configs and fix Set-Cookie header bug."}
- { date: "21.01.23:", desc: "Unpin certbot version (allow certbot 2.x). !!BREAKING CHANGE!! We are temporarily removing the certbot porkbun plugin until a new version is released that is compatible with certbot 2.x." } - {date: "13.04.23:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) nginx.conf, authelia-location.conf, authentik-location.conf, and site-confs/default.conf - Move ssl.conf include to default.conf. Remove Authorization headers in authelia. Sort proxy_set_header in authelia and authentik."}
- { date: "20.01.23:", desc: "Rebase to alpine 3.17 with php8.1." } - {date: "25.03.23:", desc: "Fix renewal post hook."}
- { date: "16.01.23:", desc: "Remove nchan module because it keeps causing crashes." } - {date: "10.03.23:", desc: "Cleanup unused csr and keys folders. See [certbot 2.3.0 release notes](https://github.com/certbot/certbot/releases/tag/v2.3.0)."}
- { date: "08.12.22:", desc: "Revamp certbot init."} - {date: "09.03.23:", desc: "Add Google Domains DNS support, `google-domains`."}
- { date: "03.12.22:", desc: "Remove defunct cloudxns plugin."} - {date: "02.03.23:", desc: "Set permissions on crontabs during init."}
- { date: "22.11.22:", desc: "Pin acme to the same version as certbot."} - {date: "09.02.23:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) proxy.conf, authelia-location.conf and authelia-server.conf - Add Authentik configs, update Authelia configs."}
- { date: "22.11.22:", desc: "Pin certbot to 1.32.0 until plugin compatibility improves."} - {date: "06.02.23:", desc: "Add porkbun support back in."}
- { date: "05.11.22:", desc: "Update acmedns plugin handling."} - {date: "21.01.23:", desc: "Unpin certbot version (allow certbot 2.x). !!BREAKING CHANGE!! We are temporarily removing the certbot porkbun plugin until a new version is released that is compatible with certbot 2.x."}
- { date: "06.10.22:", desc: "Switch to certbot-dns-duckdns. Update cpanel and gandi dns plugin handling. Minor adjustments to init logic." } - {date: "20.01.23:", desc: "Rebase to alpine 3.17 with php8.1."}
- { date: "05.10.22:", desc: "Use certbot file hooks instead of command line hooks" } - {date: "16.01.23:", desc: "Remove nchan module because it keeps causing crashes."}
- { date: "04.10.22:", desc: "Add godaddy and porkbun dns plugins." } - {date: "08.12.22:", desc: "Revamp certbot init."}
- { date: "03.10.22:", desc: "Add default_server back to default site conf's https listen." } - {date: "03.12.22:", desc: "Remove defunct cloudxns plugin."}
- { date: "22.09.22:", desc: "Added support for DO DNS validation." } - {date: "22.11.22:", desc: "Pin acme to the same version as certbot."}
- { date: "22.09.22:", desc: "Added certbot-dns-acmedns for DNS01 validation." } - {date: "22.11.22:", desc: "Pin certbot to 1.32.0 until plugin compatibility improves."}
- { date: "20.08.22:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) nginx.conf - Rebasing to alpine 3.15 with php8. Restructure nginx configs ([see changes announcement](https://info.linuxserver.io/issues/2022-08-20-nginx-base))." } - {date: "05.11.22:", desc: "Update acmedns plugin handling."}
- { date: "10.08.22:", desc: "Added support for Dynu DNS validation." } - {date: "06.10.22:", desc: "Switch to certbot-dns-duckdns. Update cpanel and gandi dns plugin handling. Minor adjustments to init logic."}
- { date: "18.05.22:", desc: "Added support for Azure DNS validation." } - {date: "05.10.22:", desc: "Use certbot file hooks instead of command line hooks"}
- { date: "09.04.22:", desc: "Added certbot-dns-loopia for DNS01 validation." } - {date: "04.10.22:", desc: "Add godaddy and porkbun dns plugins."}
- { date: "05.04.22:", desc: "Added support for standalone DNS validation." } - {date: "03.10.22:", desc: "Add default_server back to default site conf's https listen."}
- { date: "28.03.22:", desc: "created a logfile for fail2ban nginx-unauthorized in /etc/cont-init.d/50-config" } - {date: "22.09.22:", desc: "Added support for DO DNS validation."}
- { date: "09.01.22:", desc: "Added a fail2ban jail for nginx unauthorized" } - {date: "22.09.22:", desc: "Added certbot-dns-acmedns for DNS01 validation."}
- { date: "21.12.21:", desc: "Fixed issue with iptables not working as expected" } - {date: "20.08.22:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) nginx.conf - Rebasing to alpine 3.15 with php8. Restructure nginx configs ([see changes announcement](https://info.linuxserver.io/issues/2022-08-20-nginx-base))."}
- { date: "30.11.21:", desc: "Move maxmind to a [new mod](https://github.com/linuxserver/docker-mods/tree/swag-maxmind)" } - {date: "10.08.22:", desc: "Added support for Dynu DNS validation."}
- { date: "22.11.21:", desc: "Added support for Infomaniak DNS for certificate generation." } - {date: "18.05.22:", desc: "Added support for Azure DNS validation."}
- { date: "20.11.21:", desc: "Added support for dnspod validation." } - {date: "09.04.22:", desc: "Added certbot-dns-loopia for DNS01 validation."}
- { date: "15.11.21:", desc: "Added support for deSEC DNS for wildcard certificate generation." } - {date: "05.04.22:", desc: "Added support for standalone DNS validation."}
- { date: "26.10.21:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) proxy.conf - Mitigate <https://httpoxy.org/> vulnerabilities. Ref: <https://www.nginx.com/blog/mitigating-the-httpoxy-vulnerability-with-nginx#Defeating-the-Attack-using-NGINX-and-NGINX-Plus>" } - {date: "28.03.22:", desc: "created a logfile for fail2ban nginx-unauthorized in /etc/cont-init.d/50-config"}
- { date: "23.10.21:", desc: "Fix Hurricane Electric (HE) DNS validation." } - {date: "09.01.22:", desc: "Added a fail2ban jail for nginx unauthorized"}
- { date: "12.10.21:", desc: "Fix deprecated LE root cert check to fix failures when using `STAGING=true`, and failures in revoking." } - {date: "21.12.21:", desc: "Fixed issue with iptables not working as expected"}
- { date: "06.10.21:", desc: "Added support for Hurricane Electric (HE) DNS validation. Added lxml build deps." } - {date: "30.11.21:", desc: "Move maxmind to a [new mod](https://github.com/linuxserver/docker-mods/tree/swag-maxmind)"}
- { date: "01.10.21:", desc: "Check if the cert uses the old LE root cert, revoke and regenerate if necessary. [Here's more info](https://twitter.com/letsencrypt/status/1443621997288767491) on LE root cert expiration" } - {date: "22.11.21:", desc: "Added support for Infomaniak DNS for certificate generation."}
- { date: "19.09.21:", desc: "Add an optional header to opt out of Google FLoC in `ssl.conf`." } - {date: "20.11.21:", desc: "Added support for dnspod validation."}
- { date: "17.09.21:", desc: "Mark `SUBDOMAINS` var as optional." } - {date: "15.11.21:", desc: "Added support for deSEC DNS for wildcard certificate generation."}
- { date: "01.08.21:", desc: "Add support for ionos dns validation." } - {date: "26.10.21:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) proxy.conf - Mitigate <https://httpoxy.org/> vulnerabilities. Ref: <https://www.nginx.com/blog/mitigating-the-httpoxy-vulnerability-with-nginx#Defeating-the-Attack-using-NGINX-and-NGINX-Plus>"}
- { date: "15.07.21:", desc: "Fix libmaxminddb issue due to upstream change." } - {date: "23.10.21:", desc: "Fix Hurricane Electric (HE) DNS validation."}
- { date: "07.07.21:", desc: "Rebase to alpine 3.14." } - {date: "12.10.21:", desc: "Fix deprecated LE root cert check to fix failures when using `STAGING=true`, and failures in revoking."}
- { date: "24.06.21:", desc: "Update default nginx conf folder." } - {date: "06.10.21:", desc: "Added support for Hurricane Electric (HE) DNS validation. Added lxml build deps."}
- { date: "28.05.21:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) authelia-server.conf - Use `resolver.conf` and patch for `CVE-2021-32637`." } - {date: "01.10.21:", desc: "Check if the cert uses the old LE root cert, revoke and regenerate if necessary. [Here's more info](https://twitter.com/letsencrypt/status/1443621997288767491) on LE root cert expiration"}
- { date: "20.05.21:", desc: "Modify resolver.conf generation to detect and ignore ipv6." } - {date: "19.09.21:", desc: "Add an optional header to opt out of Google FLoC in `ssl.conf`."}
- { date: "14.05.21:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) nginx.conf, ssl.conf, proxy.conf, and the default site-conf - Rework nginx.conf to be inline with alpine upstream and relocate lines from other files. Use linuxserver.io wheel index for pip packages. Switch to using [ffdhe4096](https://ssl-config.mozilla.org/ffdhe4096.txt) for `dhparams.pem` per [RFC7919](https://datatracker.ietf.org/doc/html/rfc7919). Added `worker_processes.conf`, which sets the number of nginx workers, and `resolver.conf`, which sets the dns resolver. Both conf files are auto-generated only on first start and can be user modified later." } - {date: "17.09.21:", desc: "Mark `SUBDOMAINS` var as optional."}
- { date: "21.04.21:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) authelia-server.conf and authelia-location.conf - Add remote name/email headers and pass http method." } - {date: "01.08.21:", desc: "Add support for ionos dns validation."}
- { date: "12.04.21:", desc: "Add php7-gmp and php7-pecl-mailparse." } - {date: "15.07.21:", desc: "Fix libmaxminddb issue due to upstream change."}
- { date: "12.04.21:", desc: "Add support for vultr dns validation." } - {date: "07.07.21:", desc: "Rebase to alpine 3.14."}
- { date: "14.03.21:", desc: "Add support for directadmin dns validation." } - {date: "24.06.21:", desc: "Update default nginx conf folder."}
- { date: "12.02.21:", desc: "Clean up rust/cargo cache, which ballooned the image size in the last couple of builds." } - {date: "28.05.21:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) authelia-server.conf - Use `resolver.conf` and patch for `CVE-2021-32637`."}
- { date: "10.02.21:", desc: "Fix aliyun, domeneshop, inwx and transip dns confs for existing users." } - {date: "20.05.21:", desc: "Modify resolver.conf generation to detect and ignore ipv6."}
- { date: "09.02.21:", desc: "Rebasing to alpine 3.13. Add nginx mods brotli and dav-ext. Remove nginx mods lua and lua-upstream (due to regression over the last couple of years)." } - {date: "14.05.21:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) nginx.conf, ssl.conf, proxy.conf, and the default site-conf - Rework nginx.conf to be inline with alpine upstream and relocate lines from other files. Use linuxserver.io wheel index for pip packages. Switch to using [ffdhe4096](https://ssl-config.mozilla.org/ffdhe4096.txt) for `dhparams.pem` per [RFC7919](https://datatracker.ietf.org/doc/html/rfc7919). Added `worker_processes.conf`, which sets the number of nginx workers, and `resolver.conf`, which sets the dns resolver. Both conf files are auto-generated only on first start and can be user modified later."}
- { date: "26.01.21:", desc: "Add support for hetzner dns validation." } - {date: "21.04.21:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) authelia-server.conf and authelia-location.conf - Add remote name/email headers and pass http method."}
- { date: "20.01.21:", desc: "Add check for ZeroSSL EAB retrieval." } - {date: "12.04.21:", desc: "Add php7-gmp and php7-pecl-mailparse."}
- { date: "08.01.21:", desc: "Add support for getting certs from [ZeroSSL](https://zerossl.com/) via optional `CERTPROVIDER` env var. Update aliyun, domeneshop, inwx and transip dns plugins with the new plugin names. Hide `donoteditthisfile.conf` because users were editing it despite its name. Suppress harmless error when no proxy confs are enabled." } - {date: "12.04.21:", desc: "Add support for vultr dns validation."}
- { date: "03.01.21:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) /config/nginx/site-confs/default.conf - Add helper pages to aid troubleshooting" } - {date: "14.03.21:", desc: "Add support for directadmin dns validation."}
- { date: "10.12.20:", desc: "Add support for njalla dns validation" } - {date: "12.02.21:", desc: "Clean up rust/cargo cache, which ballooned the image size in the last couple of builds."}
- { date: "09.12.20:", desc: "Check for template/conf updates and notify in the log. Add support for gehirn and sakuracloud dns validation." } - {date: "10.02.21:", desc: "Fix aliyun, domeneshop, inwx and transip dns confs for existing users."}
- { date: "01.11.20:", desc: "Add support for netcup dns validation" } - {date: "09.02.21:", desc: "Rebasing to alpine 3.13. Add nginx mods brotli and dav-ext. Remove nginx mods lua and lua-upstream (due to regression over the last couple of years)."}
- { date: "29.10.20:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) ssl.conf - Add frame-ancestors to Content-Security-Policy." } - {date: "26.01.21:", desc: "Add support for hetzner dns validation."}
- { date: "04.10.20:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) nginx.conf, proxy.conf, and ssl.conf - Minor cleanups and reordering." } - {date: "20.01.21:", desc: "Add check for ZeroSSL EAB retrieval."}
- { date: "20.09.20:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) nginx.conf - Added geoip2 configs. Added MAXMINDDB_LICENSE_KEY variable to readme."} - {date: "08.01.21:", desc: "Add support for getting certs from [ZeroSSL](https://zerossl.com/) via optional `CERTPROVIDER` env var. Update aliyun, domeneshop, inwx and transip dns plugins with the new plugin names. Hide `donoteditthisfile.conf` because users were editing it despite its name. Suppress harmless error when no proxy confs are enabled."}
- { date: "08.09.20:", desc: "Add php7-xsl." } - {date: "03.01.21:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) /config/nginx/site-confs/default.conf - Add helper pages to aid troubleshooting"}
- { date: "01.09.20:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) nginx.conf, proxy.conf, and various proxy samples - Global websockets across all configs." } - {date: "10.12.20:", desc: "Add support for njalla dns validation"}
- { date: "03.08.20:", desc: "Initial release." } - {date: "09.12.20:", desc: "Check for template/conf updates and notify in the log. Add support for gehirn and sakuracloud dns validation."}
- {date: "01.11.20:", desc: "Add support for netcup dns validation"}
- {date: "29.10.20:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) ssl.conf - Add frame-ancestors to Content-Security-Policy."}
- {date: "04.10.20:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) nginx.conf, proxy.conf, and ssl.conf - Minor cleanups and reordering."}
- {date: "20.09.20:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) nginx.conf - Added geoip2 configs. Added MAXMINDDB_LICENSE_KEY variable to readme."}
- {date: "08.09.20:", desc: "Add php7-xsl."}
- {date: "01.09.20:", desc: "[Existing users should update:](https://github.com/linuxserver/docker-swag/blob/master/README.md#updating-configs) nginx.conf, proxy.conf, and various proxy samples - Global websockets across all configs."}
- {date: "03.08.20:", desc: "Initial release."}

View File

@@ -0,0 +1,153 @@
import os
import json
import subprocess
from jinja2 import Environment, FileSystemLoader
# --- Configuration ---
TEMPLATE_DIR = '/app/config-generator/templates'
PROXY_OUTPUT_DIR = '/config/nginx/env-proxy-confs'
DEFAULT_CONF_OUTPUT = '/config/nginx/site-confs/default.conf'
HTPASSWD_FILE = '/config/nginx/.htpasswd'
# ---------------------
def process_service_config(service_name, service_config_json, global_auth_provider, auth_exclude_list):
"""Processes a single service configuration, including auth logic."""
service_config = json.loads(service_config_json)
# The default service doesn't have a subdomain name in the traditional sense
if service_name.lower() == 'default':
# We still need a target container name, let the user define it or raise an error
if 'name' not in service_config:
raise ValueError("PROXY_CONFIG_DEFAULT must contain a 'name' key specifying the target container name.")
else:
service_config['name'] = service_name
# --- Authentication Logic ---
auth_provider = 'none' # Default
# 1. Per-service override
if 'auth' in service_config:
auth_provider = service_config['auth']
print(f" - Found per-service auth override: '{auth_provider}'")
# 2. Global provider check
elif global_auth_provider and service_name not in auth_exclude_list:
auth_provider = global_auth_provider
print(f" - Applying global auth provider: '{auth_provider}'")
# 3. Otherwise, no auth
else:
if service_name in auth_exclude_list:
print(f" - Service is in global exclude list. No auth.")
else:
print(f" - No auth provider specified.")
service_config['auth_provider'] = auth_provider
return service_config
def generate_configs():
"""
Generates Nginx config files from PROXY_CONFIG environment variables and a Jinja2 template.
"""
print("--- Starting Nginx Config Generation from Environment Variables ---")
# Ensure output directories exist
os.makedirs(PROXY_OUTPUT_DIR, exist_ok=True)
os.makedirs(os.path.dirname(DEFAULT_CONF_OUTPUT), exist_ok=True)
print(f"Output directories are ready.")
# Get global auth settings from environment variables
global_auth_provider = os.environ.get('PROXY_AUTH_PROVIDER')
auth_exclude_list = os.environ.get('PROXY_AUTH_EXCLUDE', '').split(',')
auth_exclude_list = [name.strip() for name in auth_exclude_list if name.strip()]
# Get basic auth credentials
basic_auth_user = os.environ.get('PROXY_AUTH_BASIC_USER')
basic_auth_pass = os.environ.get('PROXY_AUTH_BASIC_PASS')
basic_auth_configured = False
print(f"Global Auth Provider: {global_auth_provider}")
print(f"Auth Exclude List: {auth_exclude_list}")
# Collect and process service configurations
subdomain_services = []
default_service = None
for key, value in os.environ.items():
if key.startswith('PROXY_CONFIG_'):
service_name = key.replace('PROXY_CONFIG_', '').lower()
print(f" Processing service: {service_name}")
print(value)
try:
service_config = process_service_config(service_name, value, global_auth_provider, auth_exclude_list)
# Handle Basic Auth File Creation
if service_config['auth_provider'] == 'basic' and not basic_auth_configured:
if basic_auth_user and basic_auth_pass:
print(f" - Configuring Basic Auth with user '{basic_auth_user}'.")
try:
os.makedirs(os.path.dirname(HTPASSWD_FILE), exist_ok=True)
command = ['htpasswd', '-bc', HTPASSWD_FILE, basic_auth_user, basic_auth_pass]
subprocess.run(command, check=True, capture_output=True, text=True)
print(f" - Successfully created '{HTPASSWD_FILE}'.")
basic_auth_configured = True
except subprocess.CalledProcessError as e:
print(f" [!!] ERROR: 'htpasswd' command failed: {e.stderr}. Basic auth will not be enabled.")
service_config['auth_provider'] = 'none'
except FileNotFoundError:
print(f" [!!] ERROR: 'htpasswd' command not found. Basic auth will not be enabled.")
service_config['auth_provider'] = 'none'
else:
print(f" [!!] WARNING: 'auth: basic' is set, but PROXY_AUTH_BASIC_USER or PROXY_AUTH_BASIC_PASS is missing. Skipping auth.")
service_config['auth_provider'] = 'none'
if service_name == 'default':
default_service = service_config
else:
subdomain_services.append(service_config)
except (json.JSONDecodeError, ValueError) as e:
print(f" [!!] ERROR: Could not parse or validate config for {service_name}: {e}. Skipping.")
except Exception as e:
print(f" [!!] ERROR: An unexpected error occurred processing {service_name}: {e}. Skipping.")
# Set up Jinja2 environment
try:
env = Environment(loader=FileSystemLoader(TEMPLATE_DIR), trim_blocks=True, lstrip_blocks=True)
proxy_template = env.get_template('proxy.conf.j2')
default_template = env.get_template('default.conf.j2')
print("\nJinja2 templates loaded successfully.")
except Exception as e:
print(f"ERROR: Failed to load Jinja2 templates from '{TEMPLATE_DIR}': {e}. Exiting.")
return
# Generate default site config if specified
if default_service:
print("\n--- Generating Default Site Config ---")
try:
rendered_content = default_template.render(item=default_service)
with open(DEFAULT_CONF_OUTPUT, 'w') as f:
f.write(rendered_content)
print(f" [OK] Generated {os.path.basename(DEFAULT_CONF_OUTPUT)}")
except Exception as e:
print(f" [!!] ERROR: Failed to render or write default config: {e}")
else:
print("\n--- PROXY_CONFIG_DEFAULT not set, default site config will not be generated. ---")
# Generate subdomain proxy configs
print("\n--- Generating Subdomain Proxy Configs ---")
if not subdomain_services:
print("No subdomain services found to configure.")
for service in subdomain_services:
filename = f"{service['name']}.subdomain.conf"
output_path = os.path.join(PROXY_OUTPUT_DIR, filename)
try:
rendered_content = proxy_template.render(item=service)
with open(output_path, 'w') as f:
f.write(rendered_content)
print(f" [OK] Generated {filename}")
except Exception as e:
print(f" [!!] ERROR: Failed to render or write config for {service['name']}: {e}")
print("\n--- Generation Complete ---")
if __name__ == "__main__":
generate_configs()

View File

@@ -0,0 +1,105 @@
## Version 2025/08/28
# THIS FILE IS AUTO-GENERATED BY THE CONTAINER. DO NOT EDIT.
#
# This is the default server block, handling requests to the root domain.
# redirect all traffic to https
server {
listen 80 default_server;
listen [::]:80 default_server;
location / {
return 301 https://$host$request_uri;
}
}
# main server block
server {
listen 443 ssl default_server;
{% if item.quic %}
listen 443 quic reuseport default_server;
{% else %}
# listen 443 quic reuseport default_server;
{% endif %}
listen [::]:443 ssl default_server;
{% if item.quic %}
listen [::]:443 quic reuseport default_server;
{% else %}
# listen [::]:443 quic reuseport default_server;
{% endif %}
server_name _;
include /config/nginx/ssl.conf;
client_max_body_size {{ item.client_max_body_size | default('0') }};
{% if item.proxy_redirect_off %}
proxy_redirect off;
{% endif %}
{% if item.buffering_off %}
proxy_buffering off;
{% endif %}
{% if item.auth_provider and item.auth_provider not in ['none', 'basic'] %}
# enable for {{ item.auth_provider }}
include /config/nginx/{{ item.auth_provider }}-server.conf;
{% endif %}
location / {
{% if item.auth_provider == 'basic' %}
# enable for basic auth
auth_basic "Restricted";
auth_basic_user_file /config/nginx/.htpasswd;
{% elif item.auth_provider and item.auth_provider != 'none' %}
# enable for {{ item.auth_provider }}
include /config/nginx/{{ item.auth_provider }}-location.conf;
{% else %}
# No authentication enabled for this service.
{% endif %}
include /config/nginx/proxy.conf;
include /config/nginx/resolver.conf;
set $upstream_app {{ item.name }};
set $upstream_port {{ item.port }};
set $upstream_proto {% if item.https %}https{% else %}http{% endif %};
proxy_pass $upstream_proto://$upstream_app:$upstream_port;
{% if item.proxy_set_headers %}
{% for header in item.proxy_set_headers %}
proxy_set_header {{ header.key }} {{ header.value }};
{% endfor %}
{% endif %}
{% if item.custom_directives %}
{% for directive in item.custom_directives %}
{{ directive }};
{% endfor %}
{% endif %}
}
{% if item.extra_locations %}
{% for loc in item.extra_locations %}
location {{ loc.path }} {
include /config/nginx/proxy.conf;
include /config/nginx/resolver.conf;
set $upstream_app {{ loc.app | default(item.name) }};
set $upstream_port {{ loc.port | default(item.port) }};
set $upstream_proto {% if loc.https %}https{% elif item.https and loc.https is not defined %}https{% else %}http{% endif %};
proxy_pass $upstream_proto://$upstream_app:$upstream_port{% if loc.proxy_pass_path %}{{ loc.proxy_pass_path }}{% endif %};
{% if loc.proxy_set_headers %}
{% for header in loc.proxy_set_headers %}
proxy_set_header {{ header.key }} {{ header.value }};
{% endfor %}
{% endif %}
{% if loc.custom_directives %}
{% for directive in loc.custom_directives %}
{{ directive }};
{% endfor %}
{% endif %}
}
{% endfor %}
{% endif %}
}
# enable subdomain method reverse proxy confs
include /config/nginx/proxy-confs/*.subdomain.conf;
# enable env var subdomain method reverse proxy confs
include /config/nginx/env-proxy-confs/*.subdomain.conf;

View File

@@ -0,0 +1,141 @@
## Version 2025/08/28
# THIS FILE IS AUTO-GENERATED BY THE CONTAINER. DO NOT EDIT.
#
# make sure that your {{ item.name }} container is named {{ item.name }}
# make sure that your dns has a cname set for {{ item.name }}
server {
listen 443 ssl;
{% if item.quic %}
listen 443 quic reuseport;
{% else %}
# listen 443 quic reuseport;
{% endif %}
listen [::]:443 ssl;
{% if item.quic %}
listen [::]:443 quic reuseport;
{% else %}
# listen [::]:443 quic reuseport;
{% endif %}
server_name {{ item.name }}.*;
include /config/nginx/ssl.conf;
client_max_body_size {{ item.client_max_body_size | default('0') }};
{% if item.proxy_redirect_off %}
proxy_redirect off;
{% endif %}
{% if item.buffering_off %}
proxy_buffering off;
{% endif %}
{% if item.auth_provider and item.auth_provider not in ['none', 'basic'] %}
# enable for {{ item.auth_provider }}
include /config/nginx/{{ item.auth_provider }}-server.conf;
{% endif %}
location / {
{% if item.auth_provider == 'basic' %}
# enable for basic auth
auth_basic "Restricted";
auth_basic_user_file /config/nginx/.htpasswd;
{% elif item.auth_provider and item.auth_provider != 'none' %}
# enable for {{ item.auth_provider }}
include /config/nginx/{{ item.auth_provider }}-location.conf;
{% else %}
# No authentication enabled for this service.
{% endif %}
include /config/nginx/proxy.conf;
include /config/nginx/resolver.conf;
set $upstream_app {{ item.name }};
set $upstream_port {{ item.port }};
set $upstream_proto {% if item.https %}https{% else %}http{% endif %};
proxy_pass $upstream_proto://$upstream_app:$upstream_port;
{% if item.hide_xframe %}
proxy_hide_header X-Frame-Options;
{% endif %}
{% if item.iframe_friendly %}
# Uncomment to allow loading in an iframe (i.e. Organizr)
#proxy_hide_header X-Frame-Options;
{% endif %}
{% if item.hide_x_forwarded_port %}
# Hide proxy port to prevent CSRF errors
proxy_hide_header X-Forwarded-Port;
{% endif %}
{% if item.set_x_scheme %}
proxy_set_header X-Scheme https;
{% endif %}
{% if item.websockets %}
proxy_buffering off;
proxy_socket_keepalive on;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Sec-WebSocket-Extensions $http_sec_websocket_extensions;
proxy_set_header Sec-WebSocket-Key $http_sec_websocket_key;
proxy_set_header Sec-WebSocket-Version $http_sec_websocket_version;
{% endif %}
{% if item.proxy_pass_headers %}
{% for header in item.proxy_pass_headers %}
proxy_pass_header {{ header }};
{% endfor %}
{% endif %}
{% if item.proxy_set_headers %}
{% for header in item.proxy_set_headers %}
proxy_set_header {{ header.key }} {{ header.value }};
{% endfor %}
{% endif %}
{% if item.proxy_hide_headers %}
{% for header in item.proxy_hide_headers %}
proxy_hide_header {{ header }};
{% endfor %}
{% endif %}
{% if item.add_headers %}
{% for header in item.add_headers %}
add_header {{ header.key }} "{{ header.value }}";
{% endfor %}
{% endif %}
{% if item.custom_directives %}
{% for directive in item.custom_directives %}
{{ directive }};
{% endfor %}
{% endif %}
}
{% if item.api %}
location ~ (?:/{{ item.name }})?/api {
include /config/nginx/proxy.conf;
include /config/nginx/resolver.conf;
set $upstream_app {{ item.name }};
set $upstream_port {{ item.port }};
set $upstream_proto {% if item.https %}https{% else %}http{% endif %};
proxy_pass $upstream_proto://$upstream_app:$upstream_port;
}
{% endif %}
{% if item.extra_locations %}
{% for loc in item.extra_locations %}
location ~ (?:/{{ item.name }})?{{ loc.path }} {
include /config/nginx/proxy.conf;
include /config/nginx/resolver.conf;
set $upstream_app {{ loc.app | default(item.name) }};
set $upstream_port {{ loc.port | default(item.port) }};
set $upstream_proto {% if loc.https %}https{% elif item.https and loc.https is not defined %}https{% else %}http{% endif %};
proxy_pass $upstream_proto://$upstream_app:$upstream_port{% if loc.proxy_pass_path %}{{ loc.proxy_pass_path }}{% endif %};
{% if loc.proxy_set_headers %}
{% for header in loc.proxy_set_headers %}
proxy_set_header {{ header.key }} {{ header.value }};
{% endfor %}
{% endif %}
{% if loc.proxy_hide_headers %}
{% for header in loc.proxy_hide_headers %}
proxy_hide_header {{ header }};
{% endfor %}
{% endif %}
{% if loc.custom_directives %}
{% for directive in loc.custom_directives %}
{{ directive }};
{% endfor %}
{% endif %}
}
{% endfor %}
{% endif %}
}

2
root/app/le-renew.sh Normal file → Executable file
View File

@@ -6,4 +6,4 @@ echo
echo "<------------------------------------------------->" echo "<------------------------------------------------->"
echo "cronjob running on $(date)" echo "cronjob running on $(date)"
echo "Running certbot renew" echo "Running certbot renew"
certbot renew --non-interactive certbot renew --non-interactive --config-dir /config/etc/letsencrypt --logs-dir /config/log/letsencrypt --work-dir /tmp/letsencrypt --config /config/etc/letsencrypt/cli.ini

View File

View File

View File

View File

@@ -1,13 +1,8 @@
## Version 2024/03/14 - Changelog: https://github.com/linuxserver/docker-swag/commits/master/root/defaults/nginx/authelia-location.conf.sample ## Version 2025/03/25 - Changelog: https://github.com/linuxserver/docker-swag/commits/master/root/defaults/nginx/authelia-location.conf.sample
# Make sure that your authelia container is in the same user defined bridge network and is named authelia # Make sure that your authelia container is in the same user defined bridge network and is named authelia
# Rename /config/nginx/proxy-confs/authelia.subdomain.conf.sample to /config/nginx/proxy-confs/authelia.subdomain.conf # Rename /config/nginx/proxy-confs/authelia.subdomain.conf.sample to /config/nginx/proxy-confs/authelia.subdomain.conf
# For authelia 4.37 and below, make sure that the authelia configuration.yml has 'path: "authelia"' defined
# For authelia 4.38 and above, make sure that the authelia configuration.yml has 'address: "tcp://:9091/authelia"' defined
## Send a subrequest to Authelia to verify if the user is authenticated and has permission to access the resource ## Send a subrequest to Authelia to verify if the user is authenticated and has permission to access the resource
## For authelia 4.37 and below, use the following line
# auth_request /authelia/api/verify;
## For authelia 4.38 and above, use the following line
auth_request /authelia/api/authz/auth-request; auth_request /authelia/api/authz/auth-request;
## If the subreqest returns 200 pass to the backend, if the subrequest returns 401 redirect to the portal ## If the subreqest returns 200 pass to the backend, if the subrequest returns 401 redirect to the portal

View File

@@ -1,44 +1,15 @@
## Version 2024/03/16 - Changelog: https://github.com/linuxserver/docker-swag/commits/master/root/defaults/nginx/authelia-server.conf.sample ## Version 2025/03/25 - Changelog: https://github.com/linuxserver/docker-swag/commits/master/root/defaults/nginx/authelia-server.conf.sample
# Make sure that your authelia container is in the same user defined bridge network and is named authelia # Make sure that your authelia container is in the same user defined bridge network and is named authelia
# Rename /config/nginx/proxy-confs/authelia.subdomain.conf.sample to /config/nginx/proxy-confs/authelia.subdomain.conf # Rename /config/nginx/proxy-confs/authelia.subdomain.conf.sample to /config/nginx/proxy-confs/authelia.subdomain.conf
# For authelia 4.37 and below, make sure that the authelia configuration.yml has 'path: "authelia"' defined
# For authelia 4.38 and above, make sure that the authelia configuration.yml has 'address: "tcp://:9091/authelia"' defined
# location for authelia subfolder requests # location for authelia auth requests
location ^~ /authelia {
auth_request off; # requests to this subfolder must be accessible without authentication
include /config/nginx/proxy.conf;
include /config/nginx/resolver.conf;
set $upstream_authelia authelia;
proxy_pass http://$upstream_authelia:9091;
}
# location for authelia 4.37 and below auth requests
location = /authelia/api/verify {
internal;
include /config/nginx/proxy.conf;
include /config/nginx/resolver.conf;
set $upstream_authelia authelia;
proxy_pass http://$upstream_authelia:9091;
## Include the Set-Cookie header if present
auth_request_set $set_cookie $upstream_http_set_cookie;
add_header Set-Cookie $set_cookie;
proxy_pass_request_body off;
proxy_set_header Content-Length "";
}
# location for authelia 4.38 and above auth requests
location = /authelia/api/authz/auth-request { location = /authelia/api/authz/auth-request {
internal; internal;
include /config/nginx/proxy.conf; include /config/nginx/proxy.conf;
include /config/nginx/resolver.conf; include /config/nginx/resolver.conf;
set $upstream_authelia authelia; set $upstream_authelia authelia;
proxy_pass http://$upstream_authelia:9091; proxy_pass http://$upstream_authelia:9091/api/authz/auth-request;
## Include the Set-Cookie header if present ## Include the Set-Cookie header if present
auth_request_set $set_cookie $upstream_http_set_cookie; auth_request_set $set_cookie $upstream_http_set_cookie;
@@ -62,11 +33,6 @@ location @authelia_proxy_signin {
## Translate the Location response header from the auth subrequest into a variable ## Translate the Location response header from the auth subrequest into a variable
auth_request_set $signin_url $upstream_http_location; auth_request_set $signin_url $upstream_http_location;
if ($signin_url = '') {
## Set the $signin_url variable
set $signin_url https://$http_host/authelia/?rd=$target_url;
}
## Redirect to login ## Redirect to login
return 302 $signin_url; return 302 $signin_url;
} }

View File

@@ -1,4 +1,4 @@
## Version 2023/04/27 - Changelog: https://github.com/linuxserver/docker-swag/commits/master/root/defaults/nginx/authentik-server.conf.sample ## Version 2025/03/25 - Changelog: https://github.com/linuxserver/docker-swag/commits/master/root/defaults/nginx/authentik-server.conf.sample
# Make sure that your authentik container is in the same user defined bridge network and is named authentik-server # Make sure that your authentik container is in the same user defined bridge network and is named authentik-server
# Rename /config/nginx/proxy-confs/authentik.subdomain.conf.sample to /config/nginx/proxy-confs/authentik.subdomain.conf # Rename /config/nginx/proxy-confs/authentik.subdomain.conf.sample to /config/nginx/proxy-confs/authentik.subdomain.conf
@@ -19,7 +19,7 @@ location = /outpost.goauthentik.io/auth/nginx {
include /config/nginx/proxy.conf; include /config/nginx/proxy.conf;
include /config/nginx/resolver.conf; include /config/nginx/resolver.conf;
set $upstream_authentik authentik-server; set $upstream_authentik authentik-server;
proxy_pass http://$upstream_authentik:9000; proxy_pass http://$upstream_authentik:9000/outpost.goauthentik.io/auth/nginx;
## Include the Set-Cookie header if present ## Include the Set-Cookie header if present
auth_request_set $set_cookie $upstream_http_set_cookie; auth_request_set $set_cookie $upstream_http_set_cookie;

View File

@@ -1,4 +1,4 @@
## Version 2024/07/16 - Changelog: https://github.com/linuxserver/docker-swag/commits/master/root/defaults/nginx/site-confs/default.conf.sample ## Version 2025/07/18 - Changelog: https://github.com/linuxserver/docker-swag/commits/master/root/defaults/nginx/site-confs/default.conf.sample
# redirect all traffic to https # redirect all traffic to https
server { server {
@@ -13,7 +13,9 @@ server {
# main server block # main server block
server { server {
listen 443 ssl default_server; listen 443 ssl default_server;
# listen 443 quic reuseport default_server;
listen [::]:443 ssl default_server; listen [::]:443 ssl default_server;
# listen [::]:443 quic reuseport default_server;
server_name _; server_name _;
@@ -80,5 +82,5 @@ server {
# enable subdomain method reverse proxy confs # enable subdomain method reverse proxy confs
include /config/nginx/proxy-confs/*.subdomain.conf; include /config/nginx/proxy-confs/*.subdomain.conf;
# enable proxy cache for auth # enable env var subdomain method reverse proxy confs
proxy_cache_path cache/ keys_zone=auth_cache:10m; include /config/nginx/env-proxy-confs/*.subdomain.conf;

View File

@@ -0,0 +1,9 @@
## Version 2025/06/08 - Changelog: https://github.com/linuxserver/docker-swag/commits/master/root/defaults/nginx/tinyauth-location.conf.sample
# Make sure that your tinyauth container is in the same user defined bridge network and is named tinyauth
# Rename /config/nginx/proxy-confs/tinyauth.subdomain.conf.sample to /config/nginx/proxy-confs/tinyauth.subdomain.conf
## Send a subrequest to tinyauth to verify if the user is authenticated and has permission to access the resource
auth_request /tinyauth;
## If the subreqest returns 200 pass to the backend, if the subrequest returns 401 redirect to the portal
error_page 401 = @tinyauth_login;

View File

@@ -0,0 +1,35 @@
## Version 2025/06/08 - Changelog: https://github.com/linuxserver/docker-swag/commits/master/root/defaults/nginx/tinyauth-server.conf.sample
# Make sure that your tinyauth container is in the same user defined bridge network and is named tinyauth
# Rename /config/nginx/proxy-confs/tinyauth.subdomain.conf.sample to /config/nginx/proxy-confs/tinyauth.subdomain.conf
# location for tinyauth auth requests
location /tinyauth {
internal;
include /config/nginx/proxy.conf;
include /config/nginx/resolver.conf;
set $upstream_tinyauth tinyauth;
proxy_pass http://$upstream_tinyauth:3000/api/auth/nginx;
proxy_set_header x-forwarded-proto $scheme;
proxy_set_header x-forwarded-host $http_host;
proxy_set_header x-forwarded-uri $request_uri;
}
# virtual location for tinyauth 401 redirects
location @tinyauth_login {
internal;
## Set the $target_url variable based on the original request
set_escape_uri $target_url $scheme://$http_host$request_uri;
## Set the $signin_url variable
set $domain $host;
if ($host ~* "^[^.]+\.([^.]+\..+)$") {
set $domain $1;
}
set $signin_url https://tinyauth.$domain/login?redirect_uri=$target_url;
## Redirect to login
return 302 $signin_url;
}

View File

@@ -5,4 +5,4 @@
0 3 * * 6 run-parts /etc/periodic/weekly 0 3 * * 6 run-parts /etc/periodic/weekly
0 5 1 * * run-parts /etc/periodic/monthly 0 5 1 * * run-parts /etc/periodic/monthly
8 2 * * * /app/le-renew.sh >> /config/log/letsencrypt/letsencrypt.log 2>&1 8 2 * * * /app/le-renew.sh >> /config/log/letsencrypt/renewal.log 2>&1

View File

@@ -38,12 +38,6 @@ if [[ "${VALIDATION}" = "dns" ]] && ! echo "${CERTBOT_DNS_AUTHENTICATORS}" | gre
sleep infinity sleep infinity
fi fi
# set owner of certbot's CONFIG_DIR, WORK_DIR, and LOGS_DIR to abc
lsiown -R abc:abc \
/etc/letsencrypt \
/var/lib/letsencrypt \
/var/log/letsencrypt
# set_ini_value logic: # set_ini_value logic:
# - if the name is not found in the file, append the name=value to the end of the file # - if the name is not found in the file, append the name=value to the end of the file
# - if the name is found in the file, replace the value # - if the name is found in the file, replace the value
@@ -62,12 +56,17 @@ touch /config/etc/letsencrypt/cli.ini
lsiown abc:abc /config/etc/letsencrypt/cli.ini lsiown abc:abc /config/etc/letsencrypt/cli.ini
grep -qF 'agree-tos' /config/etc/letsencrypt/cli.ini || echo 'agree-tos=true' >>/config/etc/letsencrypt/cli.ini grep -qF 'agree-tos' /config/etc/letsencrypt/cli.ini || echo 'agree-tos=true' >>/config/etc/letsencrypt/cli.ini
# Check for broken dns credentials value in cli.ini and remove
sed -i '/dns--credentials/d' /config/etc/letsencrypt/cli.ini
# Disable Certbot's built in log rotation
set_ini_value "max-log-backups" "0" /config/etc/letsencrypt/cli.ini
# copy dns default configs # copy dns default configs
cp -n /defaults/dns-conf/* /config/dns-conf/ 2> >(grep -v 'cp: not replacing') cp -n /defaults/dns-conf/* /config/dns-conf/ 2> >(grep -v 'cp: not replacing')
lsiown -R abc:abc /config/dns-conf lsiown -R abc:abc /config/dns-conf
# copy default renewal hooks # copy default renewal hooks
chmod -R +x /defaults/etc/letsencrypt/renewal-hooks
cp -nR /defaults/etc/letsencrypt/renewal-hooks/* /config/etc/letsencrypt/renewal-hooks/ 2> >(grep -v 'cp: not replacing') cp -nR /defaults/etc/letsencrypt/renewal-hooks/* /config/etc/letsencrypt/renewal-hooks/ 2> >(grep -v 'cp: not replacing')
lsiown -R abc:abc /config/etc/letsencrypt/renewal-hooks lsiown -R abc:abc /config/etc/letsencrypt/renewal-hooks
@@ -169,14 +168,14 @@ fi
rm -rf /config/keys/letsencrypt rm -rf /config/keys/letsencrypt
if [[ "${ONLY_SUBDOMAINS}" = "true" ]] && [[ ! "${SUBDOMAINS}" = "wildcard" ]]; then if [[ "${ONLY_SUBDOMAINS}" = "true" ]] && [[ ! "${SUBDOMAINS}" = "wildcard" ]]; then
DOMAIN="$(echo "${SUBDOMAINS}" | tr ',' ' ' | awk '{print $1}').${URL}" DOMAIN="$(echo "${SUBDOMAINS}" | tr ',' ' ' | awk '{print $1}').${URL}"
ln -s ../etc/letsencrypt/live/"${DOMAIN}" /config/keys/letsencrypt ln -s /config/etc/letsencrypt/live/"${DOMAIN}" /config/keys/letsencrypt
else else
ln -s ../etc/letsencrypt/live/"${URL}" /config/keys/letsencrypt ln -s /config/etc/letsencrypt/live/"${URL}" /config/keys/letsencrypt
fi fi
# cleanup unused csr and keys folders # cleanup unused csr and keys folders
rm -rf /etc/letsencrypt/csr rm -rf /config/etc/letsencrypt/csr
rm -rf /etc/letsencrypt/keys rm -rf /config/etc/letsencrypt/keys
# checking for changes in cert variables, revoking certs if necessary # checking for changes in cert variables, revoking certs if necessary
if [[ ! "${URL}" = "${ORIGURL}" ]] || if [[ ! "${URL}" = "${ORIGURL}" ]] ||
@@ -189,26 +188,17 @@ if [[ ! "${URL}" = "${ORIGURL}" ]] ||
[[ ! "${STAGING}" = "${ORIGSTAGING}" ]] || [[ ! "${STAGING}" = "${ORIGSTAGING}" ]] ||
[[ ! "${CERTPROVIDER}" = "${ORIGCERTPROVIDER}" ]]; then [[ ! "${CERTPROVIDER}" = "${ORIGCERTPROVIDER}" ]]; then
echo "Different validation parameters entered than what was used before. Revoking and deleting existing certificate, and an updated one will be created" echo "Different validation parameters entered than what was used before. Revoking and deleting existing certificate, and an updated one will be created"
if [[ "${ORIGCERTPROVIDER}" = "zerossl" ]] && [[ -n "${ORIGEMAIL}" ]]; then if [[ "${ORIGCERTPROVIDER}" = "zerossl" ]]; then
REV_ACMESERVER=("https://acme.zerossl.com/v2/DV90") REV_ACMESERVER=("https://acme.zerossl.com/v2/DV90")
REV_ZEROSSL_EAB_KID=$(awk -F "=" '/eab-kid/ {print $2}' "/config/etc/letsencrypt/renewal/${ORIGDOMAIN}.conf" | tr -d ' ')
REV_ZEROSSL_EAB_HMAC_KEY=$(awk -F "=" '/eab-hmac-key/ {print $2}' "/config/etc/letsencrypt/renewal/${ORIGDOMAIN}.conf" | tr -d ' ')
if [[ -z "${REV_ZEROSSL_EAB_KID}" ]] || [[ -z "${REV_ZEROSSL_EAB_HMAC_KEY}" ]]; then
REV_ZEROSSL_EAB_KID=$(awk -F "=" '/eab-kid/ {print $2}' /config/etc/letsencrypt/cli.ini | tr -d ' ')
REV_ZEROSSL_EAB_HMAC_KEY=$(awk -F "=" '/eab-hmac-key/ {print $2}' /config/etc/letsencrypt/cli.ini | tr -d ' ')
fi
if [[ -n "${REV_ZEROSSL_EAB_KID}" ]] && [[ -n "${REV_ZEROSSL_EAB_HMAC_KEY}" ]]; then
REV_ACMESERVER+=("--eab-kid" "${REV_ZEROSSL_EAB_KID}" "--eab-hmac-key" "${REV_ZEROSSL_EAB_HMAC_KEY}")
fi
elif [[ "${ORIGSTAGING}" = "true" ]]; then elif [[ "${ORIGSTAGING}" = "true" ]]; then
REV_ACMESERVER=("https://acme-staging-v02.api.letsencrypt.org/directory") REV_ACMESERVER=("https://acme-staging-v02.api.letsencrypt.org/directory")
else else
REV_ACMESERVER=("https://acme-v02.api.letsencrypt.org/directory") REV_ACMESERVER=("https://acme-v02.api.letsencrypt.org/directory")
fi fi
if [[ -f /config/etc/letsencrypt/live/"${ORIGDOMAIN}"/fullchain.pem ]]; then if [[ -f /config/etc/letsencrypt/live/"${ORIGDOMAIN}"/fullchain.pem ]]; then
certbot revoke --non-interactive --cert-path /config/etc/letsencrypt/live/"${ORIGDOMAIN}"/fullchain.pem --server "${REV_ACMESERVER[@]}" || true certbot revoke --config-dir /config/etc/letsencrypt --logs-dir /config/log/letsencrypt --work-dir /tmp/letsencrypt --config /config/etc/letsencrypt/cli.ini --non-interactive --cert-path /config/etc/letsencrypt/live/"${ORIGDOMAIN}"/fullchain.pem --key-path /config/etc/letsencrypt/live/"${ORIGDOMAIN}"/privkey.pem --server "${REV_ACMESERVER[@]}" || true
else else
certbot revoke --non-interactive --cert-name "${ORIGDOMAIN}" --server "${REV_ACMESERVER[@]}" || true certbot revoke --config-dir /config/etc/letsencrypt --logs-dir /config/log/letsencrypt --work-dir /tmp/letsencrypt --config /config/etc/letsencrypt/cli.ini --non-interactive --cert-name "${ORIGDOMAIN}" --server "${REV_ACMESERVER[@]}" || true
fi fi
rm -rf /config/etc/letsencrypt/{accounts,archive,live,renewal} rm -rf /config/etc/letsencrypt/{accounts,archive,live,renewal}
fi fi
@@ -221,9 +211,9 @@ if [[ -f "/config/keys/letsencrypt/chain.pem" ]] && { [[ "${CERTPROVIDER}" == "l
echo "The cert seems to be using the old LE root cert, which is no longer valid. Deleting and revoking." echo "The cert seems to be using the old LE root cert, which is no longer valid. Deleting and revoking."
REV_ACMESERVER=("https://acme-v02.api.letsencrypt.org/directory") REV_ACMESERVER=("https://acme-v02.api.letsencrypt.org/directory")
if [[ -f /config/etc/letsencrypt/live/"${ORIGDOMAIN}"/fullchain.pem ]]; then if [[ -f /config/etc/letsencrypt/live/"${ORIGDOMAIN}"/fullchain.pem ]]; then
certbot revoke --non-interactive --cert-path /config/etc/letsencrypt/live/"${ORIGDOMAIN}"/fullchain.pem --server "${REV_ACMESERVER[@]}" || true certbot revoke --config-dir /config/etc/letsencrypt --logs-dir /config/log/letsencrypt --work-dir /tmp/letsencrypt --config /config/etc/letsencrypt/cli.ini --non-interactive --cert-path /config/etc/letsencrypt/live/"${ORIGDOMAIN}"/fullchain.pem --server "${REV_ACMESERVER[@]}" || true
else else
certbot revoke --non-interactive --cert-name "${ORIGDOMAIN}" --server "${REV_ACMESERVER[@]}" || true certbot revoke --config-dir /config/etc/letsencrypt --logs-dir /config/log/letsencrypt --work-dir /tmp/letsencrypt --config /config/etc/letsencrypt/cli.ini --non-interactive --cert-name "${ORIGDOMAIN}" --server "${REV_ACMESERVER[@]}" || true
fi fi
rm -rf /config/etc/letsencrypt/{accounts,archive,live,renewal} rm -rf /config/etc/letsencrypt/{accounts,archive,live,renewal}
fi fi
@@ -356,7 +346,7 @@ if [[ ! -f "/config/keys/letsencrypt/fullchain.pem" ]]; then
set_ini_value "eab-hmac-key" "${ZEROSSL_EAB_HMAC_KEY}" /config/etc/letsencrypt/cli.ini set_ini_value "eab-hmac-key" "${ZEROSSL_EAB_HMAC_KEY}" /config/etc/letsencrypt/cli.ini
fi fi
echo "Generating new certificate" echo "Generating new certificate"
certbot certonly --non-interactive --renew-by-default certbot certonly --config-dir /config/etc/letsencrypt --logs-dir /config/log/letsencrypt --work-dir /tmp/letsencrypt --config /config/etc/letsencrypt/cli.ini --non-interactive --renew-by-default
if [[ ! -d /config/keys/letsencrypt ]]; then if [[ ! -d /config/keys/letsencrypt ]]; then
if [[ "${VALIDATION}" = "dns" ]]; then if [[ "${VALIDATION}" = "dns" ]]; then
echo "ERROR: Cert does not exist! Please see the validation error above. Make sure you entered correct credentials into the ${DNSCREDENTIALFILE} file." echo "ERROR: Cert does not exist! Please see the validation error above. Make sure you entered correct credentials into the ${DNSCREDENTIALFILE} file."

View File

@@ -1,38 +1,40 @@
#!/usr/bin/with-contenv bash #!/usr/bin/with-contenv bash
# shellcheck shell=bash # shellcheck shell=bash
if ! iptables -L &> /dev/null; then if [[ -z ${LSIO_READ_ONLY_FS} ]] && [[ -z ${LSIO_NON_ROOT_USER} ]] && [[ "${DISABLE_F2B,,}" != "true" ]]; then
ln -sf /sbin/xtables-legacy-multi /sbin/iptables if ! iptables -L &> /dev/null; then
ln -sf /sbin/xtables-legacy-multi /sbin/iptables-save ln -sf /usr/sbin/xtables-legacy-multi /usr/sbin/iptables
ln -sf /sbin/xtables-legacy-multi /sbin/iptables-restore ln -sf /usr/sbin/xtables-legacy-multi /usr/sbin/iptables-save
ln -sf /sbin/xtables-legacy-multi /sbin/ip6tables ln -sf /usr/sbin/xtables-legacy-multi /usr/sbin/iptables-restore
ln -sf /sbin/xtables-legacy-multi /sbin/ip6tables-save ln -sf /usr/sbin/xtables-legacy-multi /usr/sbin/ip6tables
ln -sf /sbin/xtables-legacy-multi /sbin/ip6tables-restore ln -sf /usr/sbin/xtables-legacy-multi /usr/sbin/ip6tables-save
fi ln -sf /usr/sbin/xtables-legacy-multi /usr/sbin/ip6tables-restore
fi
# copy/update the fail2ban config defaults to/in /config # copy/update the fail2ban config defaults to/in /config
cp -R /defaults/fail2ban/filter.d /config/fail2ban/ cp -R /defaults/fail2ban/filter.d /config/fail2ban/
cp -R /defaults/fail2ban/action.d /config/fail2ban/ cp -R /defaults/fail2ban/action.d /config/fail2ban/
# if jail.local is missing in /config, copy default # if jail.local is missing in /config, copy default
if [[ ! -f /config/fail2ban/jail.local ]]; then if [[ ! -f /config/fail2ban/jail.local ]]; then
cp /defaults/fail2ban/jail.local /config/fail2ban/jail.local cp /defaults/fail2ban/jail.local /config/fail2ban/jail.local
fi fi
# Replace fail2ban config with user config # Replace fail2ban config with user config
if [[ -d /etc/fail2ban/filter.d ]]; then if [[ -d /etc/fail2ban/filter.d ]]; then
rm -rf /etc/fail2ban/filter.d rm -rf /etc/fail2ban/filter.d
fi fi
if [[ -d /etc/fail2ban/action.d ]]; then if [[ -d /etc/fail2ban/action.d ]]; then
rm -rf /etc/fail2ban/action.d rm -rf /etc/fail2ban/action.d
fi fi
cp -R /config/fail2ban/filter.d /etc/fail2ban/ cp -R /config/fail2ban/filter.d /etc/fail2ban/
cp -R /config/fail2ban/action.d /etc/fail2ban/ cp -R /config/fail2ban/action.d /etc/fail2ban/
cp /defaults/fail2ban/fail2ban.local /etc/fail2ban/ cp /defaults/fail2ban/fail2ban.local /etc/fail2ban/
cp /config/fail2ban/jail.local /etc/fail2ban/jail.local cp /config/fail2ban/jail.local /etc/fail2ban/jail.local
# logfiles needed by fail2ban # logfiles needed by fail2ban
if [[ ! -f /config/log/nginx/error.log ]]; then if [[ ! -f /config/log/nginx/error.log ]]; then
touch /config/log/nginx/error.log touch /config/log/nginx/error.log
fi fi
if [[ ! -f /config/log/nginx/access.log ]]; then if [[ ! -f /config/log/nginx/access.log ]]; then
touch /config/log/nginx/access.log touch /config/log/nginx/access.log
fi
fi fi

View File

@@ -1 +0,0 @@
/etc/s6-overlay/s6-rc.d/init-folders-config/run

View File

@@ -1 +0,0 @@
/etc/s6-overlay/s6-rc.d/init-nginx-config/run

View File

@@ -11,3 +11,9 @@ if [[ -f /config/nginx/ldap.conf ]]; then
Ensure your configs are updated and remove /config/nginx/ldap.conf Ensure your configs are updated and remove /config/nginx/ldap.conf
If you do not use this config, simply remove it." If you do not use this config, simply remove it."
fi fi
if grep -qrle ' /etc/letsencrypt' /config/nginx; then
echo " The following nginx confs are using certificates from the obsolete location
/etc/letsencrypt and should be updated to point to /config/etc/letsencrypt
"
echo -n " " && grep -rle ' /etc/letsencrypt' /config/nginx
fi

View File

@@ -2,8 +2,7 @@
# shellcheck shell=bash # shellcheck shell=bash
# permissions # permissions
find /config/log ! -path '/config/log/logrotate.status' -exec chmod +r {} \+
lsiown -R abc:abc \ lsiown -R abc:abc \
/config /config
chmod -R 0644 /etc/logrotate.d
chmod -R +r /config/log
chmod +x /app/le-renew.sh

View File

@@ -1 +0,0 @@
/etc/s6-overlay/s6-rc.d/init-samples-config/run

View File

@@ -22,6 +22,14 @@ if [[ ! -f /config/nginx/authentik-server.conf ]]; then
cp /defaults/nginx/authentik-server.conf.sample /config/nginx/authentik-server.conf cp /defaults/nginx/authentik-server.conf.sample /config/nginx/authentik-server.conf
fi fi
# copy tinyauth config files if they don't exist
if [[ ! -f /config/nginx/tinyauth-location.conf ]]; then
cp /defaults/nginx/tinyauth-location.conf.sample /config/nginx/tinyauth-location.conf
fi
if [[ ! -f /config/nginx/tinyauth-server.conf ]]; then
cp /defaults/nginx/tinyauth-server.conf.sample /config/nginx/tinyauth-server.conf
fi
# copy old ldap config file to new location # copy old ldap config file to new location
if [[ -f /config/nginx/ldap.conf ]] && [[ ! -f /config/nginx/ldap-server.conf ]]; then if [[ -f /config/nginx/ldap.conf ]] && [[ ! -f /config/nginx/ldap-server.conf ]]; then
cp /config/nginx/ldap.conf /config/nginx/ldap-server.conf cp /config/nginx/ldap.conf /config/nginx/ldap-server.conf
@@ -34,3 +42,19 @@ fi
if [[ ! -f /config/nginx/ldap-server.conf ]]; then if [[ ! -f /config/nginx/ldap-server.conf ]]; then
cp /defaults/nginx/ldap-server.conf.sample /config/nginx/ldap-server.conf cp /defaults/nginx/ldap-server.conf.sample /config/nginx/ldap-server.conf
fi fi
# clean the env target directory to ensure a fresh start
rm -f /config/nginx/env-proxy-confs/*
# check if any PROXY_CONFIG environment variables are set
if env | grep -q "^PROXY_CONFIG_"; then
echo "INFO: Found PROXY_CONFIG environment variables. Generating Nginx configs from environment..."
# run the Python generator script
echo "INFO: Running python config generator..."
if ! python3 /app/config-generator/generate_configs.py; then
echo "ERROR: The python config generator script failed. Please check the logs above. Container will not start."
exit 1
fi
echo "INFO: Config generation complete."
else
echo "INFO: No PROXY_CONFIG variables found. User is expected to manage /config/nginx/proxy-confs/ manually."
fi

View File

@@ -0,0 +1 @@
/etc/s6-overlay/s6-rc.d/init-swag-config/run

View File

@@ -6,7 +6,8 @@ mkdir -p \
/config/{fail2ban,dns-conf} \ /config/{fail2ban,dns-conf} \
/config/etc/letsencrypt/renewal-hooks \ /config/etc/letsencrypt/renewal-hooks \
/config/log/{fail2ban,letsencrypt,nginx} \ /config/log/{fail2ban,letsencrypt,nginx} \
/config/nginx/env-proxy-confs \
/config/nginx/proxy-confs \ /config/nginx/proxy-confs \
/run/fail2ban /run/fail2ban \
rm -rf /etc/letsencrypt /tmp/letsencrypt
ln -s /config/etc/letsencrypt /etc/letsencrypt

View File

@@ -0,0 +1 @@
/etc/s6-overlay/s6-rc.d/init-swag-folders/run

View File

@@ -0,0 +1 @@
/etc/s6-overlay/s6-rc.d/init-swag-samples/run

View File

@@ -1,7 +0,0 @@
#!/usr/bin/with-contenv bash
# shellcheck shell=bash
# Echo init finish for test runs
if [[ -n "${TEST_RUN}" ]]; then
echo '[services.d] done.'
fi

View File

@@ -1 +0,0 @@
oneshot

View File

@@ -1 +0,0 @@
/etc/s6-overlay/s6-rc.d/init-test-run/run

View File

@@ -1,5 +1,9 @@
#!/usr/bin/with-contenv bash #!/usr/bin/with-contenv bash
# shellcheck shell=bash # shellcheck shell=bash
exec \ if [[ -z ${LSIO_READ_ONLY_FS} ]] && [[ -z ${LSIO_NON_ROOT_USER} ]] && [[ "${DISABLE_F2B,,}" != "true" ]]; then
fail2ban-client -x -f start exec \
fail2ban-client -x -f start
else
sleep infinity
fi

View File

@@ -0,0 +1,41 @@
#!/usr/bin/with-contenv bash
# shellcheck shell=bash
if [[ ${SWAG_AUTORELOAD,,} == "true" ]]; then
if [[ -f "/etc/s6-overlay/s6-rc.d/svc-mod-swag-auto-reload/run" ]]; then
echo "ERROR: Legacy SWAG Auto Reload Mod detected, to use the built-in Auto Reload functionality please remove it from your container config."
sleep infinity
else
echo "Auto-reload: Watching the following folders for changes to .conf files:"
echo "/config/nginx"
ACTIVE_WATCH=("/config/nginx")
for i in $(echo "${SWAG_AUTORELOAD_WATCHLIST}" | tr "|" " "); do
if [ -f "${i}" ] || [ -d "${i}" ]; then
echo "${i}"
ACTIVE_WATCH+=("${i}")
fi
done
function wait_for_changes {
inotifywait -rq \
--event modify,move,create,delete \
--includei '\.conf$' \
"${ACTIVE_WATCH[@]}"
}
while wait_for_changes; do
NGINX_CONF=()
if ! grep -q "/config/nginx/nginx.conf" /etc/nginx/nginx.conf; then
NGINX_CONF=("-c" "/config/nginx/nginx.conf")
fi
if /usr/sbin/nginx "${NGINX_CONF[@]}" -t; then
echo "Changes to nginx config detected and the changes are valid, reloading nginx"
/usr/sbin/nginx "${NGINX_CONF[@]}" -s reload
else
echo "Changes to nginx config detected but the changes are not valid, skipping nginx reload. Please fix your config."
fi
done
fi
else
sleep infinity
fi

View File

@@ -0,0 +1 @@
longrun

View File

@@ -0,0 +1,7 @@
#!/usr/bin/with-contenv bash
# shellcheck shell=bash
# Migrate existing renewal confs with old paths from /etc/letsencrypt to /config/etc/letsencrypt
if ls /config/etc/letsencrypt/renewal/*.conf >/dev/null 2>&1; then
sed -i 's| /etc/letsencrypt| /config/etc/letsencrypt|' /config/etc/letsencrypt/renewal/*.conf
fi