Compare commits

...

92 Commits

Author SHA1 Message Date
Michel Roegl-Brunner
bf99287624 Revert "tools.func: add GitLab release check/fetch/deploy helpers (#14133)"
This reverts commit 9503db319c.
2026-05-02 23:58:34 +02:00
community-scripts-pr-app[bot]
ec059f44ad Update CHANGELOG.md (#14197)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-02 21:44:59 +00:00
push-app-to-main[bot]
52bed128f0 Add protonmail-bridge (ct) (#14136) 2026-05-02 23:44:36 +02:00
community-scripts-pr-app[bot]
ca409fc06b Update CHANGELOG.md (#14196)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-02 21:43:50 +00:00
CanbiZ (MickLesk)
9503db319c tools.func: add GitLab release check/fetch/deploy helpers (#14133) 2026-05-02 23:43:26 +02:00
community-scripts-pr-app[bot]
d56fa7ab50 Update CHANGELOG.md (#14191)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-02 17:00:51 +00:00
community-scripts-pr-app[bot]
d4fd89931f Update CHANGELOG.md (#14190)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-02 17:00:42 +00:00
community-scripts-pr-app[bot]
5b7d65ce5c Update CHANGELOG.md (#14189)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-02 17:00:30 +00:00
Slaviša Arežina
7e3d3d2cf4 Nagios: Ping fix (#14186) 2026-05-02 19:00:19 +02:00
push-app-to-main[bot]
2714d9fae4 Tube Archivist (#14123)
Co-authored-by: Slaviša Arežina <58952836+tremor021@users.noreply.github.com>
Co-authored-by: push-app-to-main[bot] <203845782+push-app-to-main[bot]@users.noreply.github.com>
Co-authored-by: CanbiZ (MickLesk) <47820557+MickLesk@users.noreply.github.com>
2026-05-02 19:00:06 +02:00
community-scripts-pr-app[bot]
7af8e907e4 Update CHANGELOG.md (#14183)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-02 11:41:07 +00:00
CanbiZ (MickLesk)
0190f4e7f1 opnsense-vm: retry pvesm alloc on transient zfs 'got timeout' errors (#14157) 2026-05-02 13:40:49 +02:00
community-scripts-pr-app[bot]
87fa14afaf Update CHANGELOG.md (#14182)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-02 11:40:44 +00:00
community-scripts-pr-app[bot]
546de16ef6 Update CHANGELOG.md (#14181)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-02 11:40:34 +00:00
CanbiZ (MickLesk)
553925b8cc ImmichFrame: keep dotnet-sdk installed so update can run dotnet publish (#14158) 2026-05-02 13:40:14 +02:00
community-scripts-pr-app[bot]
7fd0b9f35b Update CHANGELOG.md (#14180)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-02 11:40:12 +00:00
Chris
6b8a606375 Use UV sync for shelfmark backend build; update to Python 3.14 (#14170) 2026-05-02 13:39:48 +02:00
community-scripts-pr-app[bot]
9033793a66 Update CHANGELOG.md (#14179)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-02 10:07:08 +00:00
community-scripts-pr-app[bot]
ccc0ff7a2f Update CHANGELOG.md (#14178)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-02 10:06:59 +00:00
community-scripts-pr-app[bot]
218fd9060e Update CHANGELOG.md (#14177)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-02 10:06:41 +00:00
CanbiZ (MickLesk)
a48d400da5 alpine: remove deb/ubuntu-only resource & storage checks from update-script (#14166) 2026-05-02 12:06:37 +02:00
community-scripts-pr-app[bot]
208d34d7a6 Update CHANGELOG.md (#14176)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-02 10:06:29 +00:00
CanbiZ (MickLesk)
9578c6fa91 core: prompt to also run installed addon update scripts (…/bin/update_*) after update_script (#14162) 2026-05-02 12:06:16 +02:00
community-scripts-pr-app[bot]
a7bcd44ae6 Update CHANGELOG.md (#14175)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-02 10:06:02 +00:00
CanbiZ (MickLesk)
289708cc10 Threadfin: use 'threadfin-app' as app name to avoid version-file clash (#14159) 2026-05-02 12:05:37 +02:00
community-scripts-pr-app[bot]
86293fda1b Update CHANGELOG.md (#14168)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-01 21:07:56 +00:00
Joerg Heinemann
cf391086e5 Step ca update (#14058)
* Patch for step-ca.sh

Patch for making $STD happy (/usr/bin/step is a symlink to /usr/bin/step-cli)

* Refactor step-ca installation script

Refactor step-ca installation script to improve configuration and template handling.

- Carve out step-ca-admin.sh
- Patch for making $STD happy (/usr/bin/step is a symlink to /usr/bin/step-cli)
- Define enhanced x509 CA and Certificate Templates
- Configure CA Provisioners, DB and CRL settings
- Generate Root CA Certificate and Key
   - Validity: 219168h (~25 Years)
   - maxPathLen: 1 (Root -> Intermediate -> Leaf) => Only one Intermediate CA allowed below Root CA
   - Active revocation on Intermediate CA and Leaf Certificates by the usage of build-in Certificate Revocation List (CRL)
- Generate Intermediate CA Certificate Bundle and Key
   - Validity: 175368h (~20 Years)
   - maxPathLen: 0 (Root -> Intermediate -> Leaf) => Intermediate CA is only allowed to issue Leaf Certificates
   - Active revocation on Leaf Certificates by the usage of build-in Certificate Revocation List (CRL)
   - Bundle: Certificate Chain (including Root CA Certificate)

* Update source URL in step-ca.sh script
2026-05-01 23:07:30 +02:00
community-scripts-pr-app[bot]
bc72ce83ce Update CHANGELOG.md (#14167)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-01 20:48:14 +00:00
push-app-to-main[bot]
9eee1a7f95 SoulSync (#14124)
* Add soulsync (ct)

* Update pip install command to use requirements.txt

* Update soulsync.sh

---------

Co-authored-by: push-app-to-main[bot] <203845782+push-app-to-main[bot]@users.noreply.github.com>
Co-authored-by: Slaviša Arežina <58952836+tremor021@users.noreply.github.com>
2026-05-01 22:47:49 +02:00
community-scripts-pr-app[bot]
ecd1e29df5 Update CHANGELOG.md (#14165)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-01 20:44:53 +00:00
push-app-to-main[bot]
b556b5f8c6 Teable (#14125)
* Add teable (ct)

* Apply suggestion from @tremor021

---------

Co-authored-by: push-app-to-main[bot] <203845782+push-app-to-main[bot]@users.noreply.github.com>
Co-authored-by: Slaviša Arežina <58952836+tremor021@users.noreply.github.com>
2026-05-01 22:44:29 +02:00
community-scripts-pr-app[bot]
0dbee93410 Update CHANGELOG.md (#14156)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-01 19:00:34 +00:00
community-scripts-pr-app[bot]
1a7d1da029 Update CHANGELOG.md (#14155)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-05-01 19:00:08 +00:00
Kurt Anderson
f6ccf8be5e paperless-ngx: refresh NLTK data on update (#14144) 2026-05-01 21:00:05 +02:00
Letter N
ec81640d67 do not delete the public storage!!! (#14145) 2026-05-01 20:59:38 +02:00
Slaviša Arežina
03a301d736 update dependencies (#14152) 2026-05-01 20:59:10 +02:00
community-scripts-pr-app[bot]
be81d6255e Update CHANGELOG.md (#14139)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-30 17:17:23 +00:00
CanbiZ (MickLesk)
c9da2daec2 alpine-docker: install openssl as core dependency | alpine-komodo: check & install openssl if missing (#14134)
* fix(alpine-docker): install openssl as core dependency

* fix(komodo): ensure openssl is available on Alpine before generating secrets
2026-04-30 19:16:52 +02:00
community-scripts-pr-app[bot]
9015023e8c Update CHANGELOG.md (#14138)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-30 16:10:48 +00:00
CanbiZ (MickLesk)
e2a51d4941 Update source references to Codeberg in Endurain scripts (#14128) 2026-04-30 18:10:18 +02:00
community-scripts-pr-app[bot]
b4e0bb0686 Update CHANGELOG.md (#14135)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-30 12:17:04 +00:00
push-app-to-main[bot]
1109fe4b0d Nagios (#14126)
Co-authored-by: push-app-to-main[bot] <203845782+push-app-to-main[bot]@users.noreply.github.com>
Co-authored-by: Slaviša Arežina <58952836+tremor021@users.noreply.github.com>
Co-authored-by: CanbiZ (MickLesk) <47820557+MickLesk@users.noreply.github.com>
2026-04-30 14:16:39 +02:00
community-scripts-pr-app[bot]
a2daf7347f Update CHANGELOG.md (#14132)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-30 11:35:18 +00:00
Slaviša Arežina
564aaf5a9c tools.func: Manage minor versions for MongoDB 8.x (#14131) 2026-04-30 13:34:45 +02:00
community-scripts-pr-app[bot]
2edb231375 Update CHANGELOG.md (#14129)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-30 09:49:34 +00:00
push-app-to-main[bot]
e395e0d8ff Neko (#14121)
Co-authored-by: push-app-to-main[bot] <203845782+push-app-to-main[bot]@users.noreply.github.com>
2026-04-30 11:48:58 +02:00
community-scripts-pr-app[bot]
f10eef8243 Update CHANGELOG.md (#14118)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-29 21:55:41 +00:00
Slaviša Arežina
50b2256b1d MongoDB update (#14114) 2026-04-29 23:55:11 +02:00
community-scripts-pr-app[bot]
1bcc12af82 Update CHANGELOG.md (#14117)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-29 20:28:39 +00:00
community-scripts-pr-app[bot]
721667eaf3 Update CHANGELOG.md (#14116)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-29 20:28:30 +00:00
CanbiZ (MickLesk)
e9ae1bfde1 Add guidance when storage lacks rootdir support (#14108) 2026-04-29 22:28:08 +02:00
community-scripts-pr-app[bot]
84a3138e7e Update CHANGELOG.md (#14115)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-29 20:28:04 +00:00
Slaviša Arežina
e79d37b032 Correct the log (#14110) 2026-04-29 22:27:27 +02:00
community-scripts-pr-app[bot]
a47a425214 Update CHANGELOG.md (#14107)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-29 12:24:28 +00:00
CanbiZ (MickLesk)
6c96d992d6 Refactor: checkMK (#14105) 2026-04-29 14:23:59 +02:00
CanbiZ (MickLesk)
c5cbb46743 Enhance issue matching and redirect handling in close_issue_in_dev workflow 2026-04-29 14:04:04 +02:00
community-scripts-pr-app[bot]
d134fa200c Update CHANGELOG.md (#14100)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-29 03:55:15 +00:00
Slaviša Arežina
48774489f6 Unpin release (#14097) 2026-04-29 05:54:47 +02:00
community-scripts-pr-app[bot]
909e290d5e Update CHANGELOG.md (#14096)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-28 20:24:48 +00:00
iby
504ce22752 PatchMon Version 2.0.2 Script update (#14095)
Co-authored-by: Slaviša Arežina <58952836+tremor021@users.noreply.github.com>
2026-04-28 22:24:20 +02:00
Michel Roegl-Brunner
513e58b5d1 enhance pocketbase bot 2026-04-28 10:10:05 +02:00
community-scripts-pr-app[bot]
8da59d6133 Update CHANGELOG.md (#14086)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-28 07:19:06 +00:00
push-app-to-main[bot]
1f6303c918 StoryBook (#14081)
Co-authored-by: push-app-to-main[bot] <203845782+push-app-to-main[bot]@users.noreply.github.com>
Co-authored-by: Michel Roegl-Brunner <73236783+michelroegl-brunner@users.noreply.github.com>
2026-04-28 09:18:27 +02:00
community-scripts-pr-app[bot]
d05305d4c4 Update CHANGELOG.md (#14083)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-28 06:42:01 +00:00
push-app-to-main[bot]
ed7156b89c Add coredns (ct) (#14082)
Co-authored-by: push-app-to-main[bot] <203845782+push-app-to-main[bot]@users.noreply.github.com>
2026-04-28 08:41:36 +02:00
community-scripts-pr-app[bot]
4dc7418b3d Update CHANGELOG.md (#14080)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-28 04:29:34 +00:00
Jerry1098
608b77a662 Fix Dawarich Install/Update (#14078)
* [feat] adding envs

Add required envs to .env

https://github.com/Freika/dawarich/issues/2543

* Dawarich: add required envs to install script
2026-04-28 06:29:10 +02:00
community-scripts-pr-app[bot]
a7b8259022 Update CHANGELOG.md (#14077)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-27 21:14:53 +00:00
rellek
b226c87a00 Add pamUsername column to userOrgs table (#14075) 2026-04-27 23:14:24 +02:00
community-scripts-pr-app[bot]
ea296b59f4 Update CHANGELOG.md (#14066)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-27 11:31:28 +00:00
community-scripts-pr-app[bot]
6ab9737137 Update CHANGELOG.md (#14065)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-27 11:31:18 +00:00
CanbiZ (MickLesk)
6044637f12 Dawarich: run db:migrate before assets:precompile (#14051)
* fix(dawarich): run db:migrate before assets:precompile

In Rails production, eager loading during assets:precompile can execute
DB queries. Dawarich 1.7.0 adds new tables (monthly digest email
preferences, S3 storage settings). Running precompile before migrate
causes 'Operation not permitted' / exit code 1 when those tables do
not exist yet.

Reordered to: db:migrate -> assets:precompile -> data:migrate,
which is the correct Rails deployment sequence.

Fixes #14048

* add otp key
2026-04-27 13:31:02 +02:00
community-scripts-pr-app[bot]
93a53fe16e Update CHANGELOG.md (#14064)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-27 11:30:48 +00:00
community-scripts-pr-app[bot]
5cab784bcb Update CHANGELOG.md (#14063)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-27 11:30:22 +00:00
CanbiZ (MickLesk)
585de1ba0c fix(update-lxcs/apps): avoid pct exec on containers mid-shutdown (#14050)
Both update-lxcs.sh and update-apps.sh backgrounded pct shutdown and then
immediately called pct exec on the same container, causing 'Error: unexpected
status' which terminated the loop after the first container.

update-lxcs.sh: wrapped reboot-required and patchmon-agent checks in a
guard that only runs them when the container was already running (not
one that was started and is now being shut down).

update-apps.sh: moved pct set (resource reset) and the pct exec
reboot-required check to run before pct shutdown is issued.

Fixes #14027
2026-04-27 13:30:20 +02:00
CanbiZ (MickLesk)
c32ca537f1 fix(technitiumdns): always install .NET 10 if not already present (#14049)
Previously the update script only upgraded .NET when aspnetcore-runtime-8.0
or 9.0 was detected via is_package_installed. Containers where detection
failed would silently skip the upgrade block, leaving Technitium v15
(requires .NET 10) starting against .NET 8/9 and immediately failing.

Changed condition to: install .NET 10 unless it is already installed.
Old 8.0/9.0 packages are removed with || true to avoid errors on clean
installs.

Fixes #14045
2026-04-27 13:29:57 +02:00
community-scripts-pr-app[bot]
424575d8c1 Update CHANGELOG.md (#14060)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-27 10:06:41 +00:00
Chris
782420b4e4 PatchMon: v2.0.0 migration (#14015)
Co-authored-by: CanbiZ (MickLesk) <47820557+MickLesk@users.noreply.github.com>
2026-04-27 12:06:16 +02:00
community-scripts-pr-app[bot]
9b8129abd3 Update CHANGELOG.md (#14055)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-27 08:17:35 +00:00
Joerg Heinemann
1c169fc7e2 Add patchmon-agent report execution in update script (#14054) 2026-04-27 10:17:05 +02:00
community-scripts-pr-app[bot]
f985d84952 Update CHANGELOG.md (#14052)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-27 07:00:57 +00:00
Mike
88397b48dc Update build.func - fixed spelling mistake (#14047) 2026-04-27 09:00:29 +02:00
community-scripts-pr-app[bot]
91b03574e4 Update CHANGELOG.md (#14044)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-26 20:12:47 +00:00
CanbiZ (MickLesk)
ca915da8c1 Fix: Correct deb822 repository flat path detection (#14037)
The setup_deb822_repo function was only checking for the literal './'
suite value, but should reject any suite ending with '/', which
indicates a flat repository that must not include Components in the
DEB822 format.

This fix aligns ProxmoxVE with the correct behavior already present
in ProxmoxVED.

Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-04-26 22:12:20 +02:00
community-scripts-pr-app[bot]
95f2d24f53 Update CHANGELOG.md (#14042)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-26 19:22:27 +00:00
CanbiZ (MickLesk)
df9fa394b8 Increase Frigate default CPU cores from 4 to 8 (#14039)
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-04-26 21:22:00 +02:00
community-scripts-pr-app[bot]
1e1e96b68e Update CHANGELOG.md (#14040)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-26 19:04:56 +00:00
Slaviša Arežina
13bd09532a Set up directories and enable Technitium DNS service (#14030)
Create necessary directories for Technitium DNS service and update systemd service file.
2026-04-26 21:04:28 +02:00
community-scripts-pr-app[bot]
b78cdb4008 Update CHANGELOG.md (#14029)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-26 06:52:00 +00:00
Ömer Taha Öztop
4963385bf9 fix(2fauth): make update_script idempotent and preserve ownership (#14018)
The update was failing with 'mv: cannot stat /opt/2fauth-backup/.env:
No such file or directory' on a system where a previous run had left
/opt/2fauth-backup behind. mv would then nest /opt/2fauth inside the
existing backup directory (as /opt/2fauth-backup/2fauth/), so the
restore step looked at the wrong path.

After fixing that, the app returned a 500 ('Key path
file:///opt/2fauth/storage/oauth-public.key does not exist or is not
readable') because chown/chmod ran before composer install and
php artisan 2fauth:install, leaving vendor/, bootstrap/cache/* and
the regenerated oauth keys owned by root and unreadable by www-data.

- Remove any stale /opt/2fauth-backup before creating the backup, and
  remove it again at the end so the next run starts clean
- Use cp instead of mv when restoring .env/storage so the backup is
  preserved until the update completes
- Move chown/chmod to AFTER composer + artisan, matching the order in
  install/2fauth-install.sh
- Restart php8.4-fpm in addition to nginx so opcache picks up the new
  cached config
- Drop redundant quotes around literal paths to match the rest of the
  codebase
2026-04-26 08:51:33 +02:00
community-scripts-pr-app[bot]
799f3bf0fb Update CHANGELOG.md (#14028)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2026-04-26 06:49:13 +00:00
push-app-to-main[bot]
2f6f0880ac TREK (#14017)
* Add trek (ct)

* Update success message in trek.sh

* Simplify TREK installation script

Removed initialization wait and health check for TREK.

---------

Co-authored-by: push-app-to-main[bot] <203845782+push-app-to-main[bot]@users.noreply.github.com>
Co-authored-by: CanbiZ (MickLesk) <47820557+MickLesk@users.noreply.github.com>
2026-04-26 08:48:46 +02:00
68 changed files with 2808 additions and 460 deletions

View File

@@ -62,10 +62,10 @@ jobs:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
issues=$(gh issue list --repo community-scripts/ProxmoxVED --json number,title --jq '.[] | {number, title}')
best_match_score=0
best_match_number=0
for issue in $(echo "$issues" | jq -r '. | @base64'); do
_jq() {
echo ${issue} | base64 --decode | jq -r ${1}
@@ -113,7 +113,8 @@ jobs:
const http = require('http');
const url = require('url');
function request(fullUrl, opts) {
function request(fullUrl, opts, redirectsLeft) {
if (redirectsLeft === undefined) redirectsLeft = 5;
return new Promise(function(resolve, reject) {
const u = url.parse(fullUrl);
const isHttps = u.protocol === 'https:';
@@ -128,6 +129,19 @@ jobs:
if (body) options.headers['Content-Length'] = Buffer.byteLength(body);
const lib = isHttps ? https : http;
const req = lib.request(options, function(res) {
// Follow redirects (301/302/307/308)
if ([301, 302, 307, 308].indexOf(res.statusCode) !== -1 && res.headers.location && redirectsLeft > 0) {
res.resume();
const nextUrl = url.resolve(fullUrl, res.headers.location);
// For 301/302, browsers historically downgrade to GET; preserve method for 307/308.
const nextOpts = Object.assign({}, opts);
if (res.statusCode === 301 || res.statusCode === 302) {
nextOpts.method = 'GET';
delete nextOpts.body;
}
resolve(request(nextUrl, nextOpts, redirectsLeft - 1));
return;
}
let data = '';
res.on('data', function(chunk) { data += chunk; });
res.on('end', function() {

184
.github/workflows/pocketbase-bot.yml generated vendored
View File

@@ -7,7 +7,7 @@ on:
permissions:
issues: write
pull-requests: write
contents: read
contents: write
jobs:
pocketbase-bot:
@@ -95,6 +95,149 @@ jobs:
return request('https://api.github.com' + path, { method: method || 'GET', headers, body: bodyStr });
}
function encodeContentPath(filePath) {
return filePath.split('/').map(encodeURIComponent).join('/');
}
function decodeGitHubContent(content) {
return Buffer.from((content || '').replace(/\n/g, ''), 'base64').toString('utf8');
}
function sanitizeBranchPart(value) {
return (value || '')
.toLowerCase()
.replace(/[^a-z0-9._/-]+/g, '-')
.replace(/\/+/g, '/')
.replace(/^-+|-+$/g, '');
}
function applyCtDefaultChanges(scriptText, varChanges) {
let nextText = scriptText;
const updatedVars = [];
const unchangedVars = [];
for (const [varName, rawValue] of Object.entries(varChanges)) {
const newValue = String(rawValue);
const pattern = new RegExp('(^\\s*' + varName + '="\\$\\{' + varName + ':-)([^"}]*)(\\}"\\s*$)', 'm');
const match = nextText.match(pattern);
if (!match) continue;
if (match[2] === newValue) {
unchangedVars.push(varName);
continue;
}
nextText = nextText.replace(pattern, '$1' + newValue + '$3');
updatedVars.push(varName);
}
return { nextText, updatedVars, unchangedVars };
}
async function ensureBranch(defaultBranch, branchName) {
const branchRefRes = await ghRequest('/repos/' + owner + '/' + repo + '/git/ref/heads/' + encodeURIComponent(branchName));
if (branchRefRes.ok) return;
const defaultRefRes = await ghRequest('/repos/' + owner + '/' + repo + '/git/ref/heads/' + encodeURIComponent(defaultBranch));
if (!defaultRefRes.ok) {
throw new Error('Could not read default branch ref: ' + defaultRefRes.body);
}
const defaultRef = JSON.parse(defaultRefRes.body);
const createBranchRes = await ghRequest('/repos/' + owner + '/' + repo + '/git/refs', 'POST', {
ref: 'refs/heads/' + branchName,
sha: defaultRef.object.sha
});
if (!createBranchRes.ok) {
throw new Error('Could not create branch: ' + createBranchRes.body);
}
}
async function upsertCtDefaultsPr(slugValue, varChanges) {
const wantedEntries = Object.entries(varChanges || {}).filter(function ([, v]) {
return v !== undefined && v !== null && String(v) !== '';
});
if (wantedEntries.length === 0) {
return { status: 'skipped', reason: 'No mapped CT defaults changed.' };
}
const repoRes = await ghRequest('/repos/' + owner + '/' + repo);
if (!repoRes.ok) {
throw new Error('Could not read repository metadata: ' + repoRes.body);
}
const repoInfo = JSON.parse(repoRes.body);
const defaultBranch = repoInfo.default_branch;
const ctPath = 'ct/' + slugValue + '.sh';
const encodedCtPath = encodeContentPath(ctPath);
const defaultFileRes = await ghRequest('/repos/' + owner + '/' + repo + '/contents/' + encodedCtPath + '?ref=' + encodeURIComponent(defaultBranch));
if (defaultFileRes.statusCode === 404) {
return { status: 'skipped', reason: 'No matching CT file found at `' + ctPath + '`.' };
}
if (!defaultFileRes.ok) {
throw new Error('Could not read CT file from default branch: ' + defaultFileRes.body);
}
const branchName = 'pocketbase-sync/' + sanitizeBranchPart(slugValue || 'unknown');
await ensureBranch(defaultBranch, branchName);
const branchFileRes = await ghRequest('/repos/' + owner + '/' + repo + '/contents/' + encodedCtPath + '?ref=' + encodeURIComponent(branchName));
if (!branchFileRes.ok) {
throw new Error('Could not read CT file from sync branch: ' + branchFileRes.body);
}
const branchFile = JSON.parse(branchFileRes.body);
const currentBranchText = decodeGitHubContent(branchFile.content);
const updateResult = applyCtDefaultChanges(currentBranchText, Object.fromEntries(wantedEntries));
if (updateResult.updatedVars.length === 0) {
return { status: 'skipped', reason: 'CT defaults already up to date.', unchangedVars: updateResult.unchangedVars };
}
const commitMessage = 'chore(ct): sync ' + slugValue + ' defaults from PocketBase';
const putRes = await ghRequest('/repos/' + owner + '/' + repo + '/contents/' + encodedCtPath, 'PUT', {
message: commitMessage,
content: Buffer.from(updateResult.nextText, 'utf8').toString('base64'),
sha: branchFile.sha,
branch: branchName
});
if (!putRes.ok) {
throw new Error('Could not update CT file: ' + putRes.body);
}
const openPrRes = await ghRequest(
'/repos/' + owner + '/' + repo + '/pulls?state=open&head=' + encodeURIComponent(owner + ':' + branchName) + '&base=' + encodeURIComponent(defaultBranch)
);
if (!openPrRes.ok) {
throw new Error('Could not query existing PRs: ' + openPrRes.body);
}
const openPrs = JSON.parse(openPrRes.body);
if (openPrs.length > 0) {
return { status: 'updated', prUrl: openPrs[0].html_url, updatedVars: updateResult.updatedVars };
}
const prTitle = 'chore(ct): sync ' + slugValue + ' defaults with PocketBase';
const prBody =
'## Summary\n' +
'- Sync default CT variables for `' + slugValue + '` after `/pocketbase` update.\n' +
'- Updated vars: `' + updateResult.updatedVars.join('`, `') + '`.\n\n' +
'## Source\n' +
'- Triggered by @' + actor + ' via PocketBase bot.\n';
const createPrRes = await ghRequest('/repos/' + owner + '/' + repo + '/pulls', 'POST', {
title: prTitle,
body: prBody,
head: branchName,
base: defaultBranch
});
if (!createPrRes.ok) {
throw new Error('Could not create PR: ' + createPrRes.body);
}
const pr = JSON.parse(createPrRes.body);
return { status: 'created', prUrl: pr.html_url, updatedVars: updateResult.updatedVars };
}
function formatCtSyncResult(syncResult) {
if (!syncResult) return '';
if (syncResult.status === 'created') return '\n\n**CT sync PR:** ' + syncResult.prUrl;
if (syncResult.status === 'updated') return '\n\n**CT sync PR updated:** ' + syncResult.prUrl;
if (syncResult.status === 'skipped') return '\n\n**CT sync skipped:** ' + syncResult.reason;
return '';
}
async function addReaction(content) {
try {
await ghRequest(
@@ -510,6 +653,7 @@ jobs:
const RESOURCE_KEYS = { cpu: 'number', ram: 'number', hdd: 'number', os: 'string', version: 'string' };
const METHOD_KEYS = { config_path: 'string', script: 'string' };
const ALL_METHOD_KEYS = Object.assign({}, RESOURCE_KEYS, METHOD_KEYS);
const RESOURCE_TO_CT_VAR = { cpu: 'var_cpu', ram: 'var_ram', hdd: 'var_disk', os: 'var_os', version: 'var_version' };
function applyMethodChanges(method, parsed) {
if (!method.resources) method.resources = {};
@@ -550,6 +694,7 @@ jobs:
if (addMatch) {
// ── METHOD ADD ───────────────────────────────────────────────
const newType = addMatch[1];
const parsed = addMatch[2] ? parseKVPairs(addMatch[2]) : {};
if (methodsArr.some(function (im) { return (im.type || '').toLowerCase() === newType.toLowerCase(); })) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: Install method `' + newType + '` already exists for `' + slug + '`.\n\nUse `/pocketbase ' + slug + ' method list` to see all methods.');
@@ -557,7 +702,6 @@ jobs:
}
const newMethod = { type: newType, resources: { cpu: 1, ram: 512, hdd: 4, os: 'debian', version: '13' } };
if (addMatch[2]) {
const parsed = parseKVPairs(addMatch[2]);
const unknown = Object.keys(parsed).filter(function (k) { return !ALL_METHOD_KEYS[k]; });
if (unknown.length > 0) {
await addReaction('-1');
@@ -569,10 +713,21 @@ jobs:
methodsArr.push(newMethod);
await patchMethods(methodsArr);
await revalidate(slug);
const addCtChanges = {};
for (const [k, v] of Object.entries(parsed)) {
if (RESOURCE_TO_CT_VAR[k]) addCtChanges[RESOURCE_TO_CT_VAR[k]] = v;
}
let addCtSync = null;
try {
addCtSync = await upsertCtDefaultsPr(slug, addCtChanges);
} catch (e) {
addCtSync = { status: 'skipped', reason: 'CT sync failed: ' + e.message };
}
await addReaction('+1');
await postComment(
'✅ **PocketBase Bot**: Added install method **`' + newType + '`** to **`' + slug + '`**\n\n' +
formatMethodsList([newMethod]) + '\n\n' +
formatCtSyncResult(addCtSync) + '\n\n' +
'*Executed by @' + actor + '*'
);
@@ -640,6 +795,16 @@ jobs:
applyMethodChanges(methodsArr[idx], parsed);
await patchMethods(methodsArr);
await revalidate(slug);
const editCtChanges = {};
for (const [k, v] of Object.entries(parsed)) {
if (RESOURCE_TO_CT_VAR[k]) editCtChanges[RESOURCE_TO_CT_VAR[k]] = v;
}
let editCtSync = null;
try {
editCtSync = await upsertCtDefaultsPr(slug, editCtChanges);
} catch (e) {
editCtSync = { status: 'skipped', reason: 'CT sync failed: ' + e.message };
}
const changesLines = Object.entries(parsed)
.map(function ([k, v]) {
@@ -650,6 +815,7 @@ jobs:
await postComment(
'✅ **PocketBase Bot**: Updated install method **`' + methodsArr[idx].type + '`** for **`' + slug + '`**\n\n' +
'**Changes applied:**\n' + changesLines + '\n\n' +
formatCtSyncResult(editCtSync) + '\n\n' +
'*Executed by @' + actor + '*'
);
}
@@ -712,9 +878,11 @@ jobs:
project_url: 'string',
github: 'string',
config_path: 'string',
tags: 'string',
port: 'number',
default_user: 'nullable_string',
default_passwd: 'nullable_string',
unprivileged: 'number',
updateable: 'boolean',
privileged: 'boolean',
has_arm: 'boolean',
@@ -781,6 +949,17 @@ jobs:
process.exit(1);
}
await revalidate(slug);
const FIELD_TO_CT_VAR = { tags: 'var_tags', unprivileged: 'var_unprivileged' };
const fieldCtChanges = {};
for (const [k, v] of Object.entries(payload)) {
if (FIELD_TO_CT_VAR[k]) fieldCtChanges[FIELD_TO_CT_VAR[k]] = v;
}
let fieldCtSync = null;
try {
fieldCtSync = await upsertCtDefaultsPr(slug, fieldCtChanges);
} catch (e) {
fieldCtSync = { status: 'skipped', reason: 'CT sync failed: ' + e.message };
}
await addReaction('+1');
const changesLines = Object.entries(payload)
.map(function ([k, v]) { return '- `' + k + '` → `' + JSON.stringify(v) + '`'; })
@@ -788,6 +967,7 @@ jobs:
await postComment(
'✅ **PocketBase Bot**: Updated **`' + slug + '`** successfully!\n\n' +
'**Changes applied:**\n' + changesLines + '\n\n' +
formatCtSyncResult(fieldCtSync) + '\n\n' +
'*Executed by @' + actor + '*'
);
}

View File

@@ -448,8 +448,157 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
</details>
## 2026-05-02
### 🆕 New Scripts
- protonmail-bridge ([#14136](https://github.com/community-scripts/ProxmoxVE/pull/14136))
- Tube Archivist ([#14123](https://github.com/community-scripts/ProxmoxVE/pull/14123))
### 🚀 Updated Scripts
- #### 🐞 Bug Fixes
- Nagios: Ping fix [@tremor021](https://github.com/tremor021) ([#14186](https://github.com/community-scripts/ProxmoxVE/pull/14186))
- opnsense-vm: retry pvesm alloc on transient zfs 'got timeout' errors [@MickLesk](https://github.com/MickLesk) ([#14157](https://github.com/community-scripts/ProxmoxVE/pull/14157))
- ImmichFrame: fix update by reinstalling dotnet-sdk before publish [@MickLesk](https://github.com/MickLesk) ([#14158](https://github.com/community-scripts/ProxmoxVE/pull/14158))
- [FIX]ShelfMark: Use UV sync for shelfmark backend build; update to Python 3.14 [@vhsdream](https://github.com/vhsdream) ([#14170](https://github.com/community-scripts/ProxmoxVE/pull/14170))
- alpine: remove deb/ubuntu-only resource & storage checks from update-script [@MickLesk](https://github.com/MickLesk) ([#14166](https://github.com/community-scripts/ProxmoxVE/pull/14166))
- Threadfin: use 'threadfin-app' as app name to avoid version-file clash [@MickLesk](https://github.com/MickLesk) ([#14159](https://github.com/community-scripts/ProxmoxVE/pull/14159))
### 💾 Core
- #### ✨ New Features
- tools.func: add GitLab release check/fetch/deploy helpers [@MickLesk](https://github.com/MickLesk) ([#14133](https://github.com/community-scripts/ProxmoxVE/pull/14133))
- core: prompt to also run installed addon update scripts (…/bin/update_*) after update_script [@MickLesk](https://github.com/MickLesk) ([#14162](https://github.com/community-scripts/ProxmoxVE/pull/14162))
## 2026-05-01
### 🆕 New Scripts
- SoulSync ([#14124](https://github.com/community-scripts/ProxmoxVE/pull/14124))
- Teable ([#14125](https://github.com/community-scripts/ProxmoxVE/pull/14125))
### 🚀 Updated Scripts
- #### 🐞 Bug Fixes
- Step ca update [@heinemannj](https://github.com/heinemannj) ([#14058](https://github.com/community-scripts/ProxmoxVE/pull/14058))
- paperless-ngx: refresh NLTK data on update [@kurtislanderson](https://github.com/kurtislanderson) ([#14144](https://github.com/community-scripts/ProxmoxVE/pull/14144))
- [Pelican Panel] stop deleting the public storage [@LetterN](https://github.com/LetterN) ([#14145](https://github.com/community-scripts/ProxmoxVE/pull/14145))
- #### 🔧 Refactor
- Mail-Archiver: update dependencies [@tremor021](https://github.com/tremor021) ([#14152](https://github.com/community-scripts/ProxmoxVE/pull/14152))
## 2026-04-30
### 🆕 New Scripts
- Nagios ([#14126](https://github.com/community-scripts/ProxmoxVE/pull/14126))
- Neko ([#14121](https://github.com/community-scripts/ProxmoxVE/pull/14121))
### 🚀 Updated Scripts
- #### 🐞 Bug Fixes
- alpine-docker: install openssl as core dependency | alpine-komodo: check & install openssl if missing [@MickLesk](https://github.com/MickLesk) ([#14134](https://github.com/community-scripts/ProxmoxVE/pull/14134))
- endurain: update source references to Codeberg [@MickLesk](https://github.com/MickLesk) ([#14128](https://github.com/community-scripts/ProxmoxVE/pull/14128))
### 💾 Core
- #### 🔧 Refactor
- tools.func: Manage minor versions for MongoDB 8.x [@tremor021](https://github.com/tremor021) ([#14131](https://github.com/community-scripts/ProxmoxVE/pull/14131))
## 2026-04-29
### 🚀 Updated Scripts
- #### 🐞 Bug Fixes
- GrayLog: MongoDB update to 8.2.x [@tremor021](https://github.com/tremor021) ([#14114](https://github.com/community-scripts/ProxmoxVE/pull/14114))
- Graylog: Better information in the log file [@tremor021](https://github.com/tremor021) ([#14110](https://github.com/community-scripts/ProxmoxVE/pull/14110))
- #### 🔧 Refactor
- Refactor: checkMK [@MickLesk](https://github.com/MickLesk) ([#14105](https://github.com/community-scripts/ProxmoxVE/pull/14105))
- PatchMon: Unpin release [@tremor021](https://github.com/tremor021) ([#14097](https://github.com/community-scripts/ProxmoxVE/pull/14097))
### 💾 Core
- #### 🔧 Refactor
- core: add guidance when storage lacks rootdir support [@MickLesk](https://github.com/MickLesk) ([#14108](https://github.com/community-scripts/ProxmoxVE/pull/14108))
## 2026-04-28
### 🆕 New Scripts
- StoryBook ([#14081](https://github.com/community-scripts/ProxmoxVE/pull/14081))
- CoreDNS ([#14082](https://github.com/community-scripts/ProxmoxVE/pull/14082))
### 🚀 Updated Scripts
- Fix Dawarich Install/Update [@Jerry1098](https://github.com/Jerry1098) ([#14078](https://github.com/community-scripts/ProxmoxVE/pull/14078))
- #### ✨ New Features
- PatchMon Version 2.0.2 Script update [@9technologygroup](https://github.com/9technologygroup) ([#14095](https://github.com/community-scripts/ProxmoxVE/pull/14095))
## 2026-04-27
### 🚀 Updated Scripts
- Add pamUsername column to userOrgs table [@JVKeller](https://github.com/JVKeller) ([#14075](https://github.com/community-scripts/ProxmoxVE/pull/14075))
- #### 🐞 Bug Fixes
- Dawarich: run db:migrate before assets:precompile [@MickLesk](https://github.com/MickLesk) ([#14051](https://github.com/community-scripts/ProxmoxVE/pull/14051))
- TechnitiumDNS: always install .NET 10 if not already present [@MickLesk](https://github.com/MickLesk) ([#14049](https://github.com/community-scripts/ProxmoxVE/pull/14049))
- #### 💥 Breaking Changes
- PatchMon: v2.0.0 migration [@vhsdream](https://github.com/vhsdream) ([#14015](https://github.com/community-scripts/ProxmoxVE/pull/14015))
### 💾 Core
- #### 🔧 Refactor
- Update build.func - fixed spelling mistake [@m1ckywill](https://github.com/m1ckywill) ([#14047](https://github.com/community-scripts/ProxmoxVE/pull/14047))
### 🧰 Tools
- #### 🐞 Bug Fixes
- update-lxcs/apps: avoid pct exec on containers mid-shutdown [@MickLesk](https://github.com/MickLesk) ([#14050](https://github.com/community-scripts/ProxmoxVE/pull/14050))
- #### ✨ New Features
- Add patchmon-agent report execution in update script [@heinemannj](https://github.com/heinemannj) ([#14054](https://github.com/community-scripts/ProxmoxVE/pull/14054))
## 2026-04-26
### 🆕 New Scripts
- TREK ([#14017](https://github.com/community-scripts/ProxmoxVE/pull/14017))
### 🚀 Updated Scripts
- fix(2fauth): handle stale backup directory on update [@omertahaoztop](https://github.com/omertahaoztop) ([#14018](https://github.com/community-scripts/ProxmoxVE/pull/14018))
- #### 🐞 Bug Fixes
- Increase Frigate default CPU cores from 4 to 8 [@MickLesk](https://github.com/MickLesk) ([#14039](https://github.com/community-scripts/ProxmoxVE/pull/14039))
- Technitium DNS: Ensure directories exist before running service [@tremor021](https://github.com/tremor021) ([#14030](https://github.com/community-scripts/ProxmoxVE/pull/14030))
### 💾 Core
- #### 🐞 Bug Fixes
- core: Correct deb822 repository flat path detection [@MickLesk](https://github.com/MickLesk) ([#14037](https://github.com/community-scripts/ProxmoxVE/pull/14037))
## 2026-04-25
### 🚀 Updated Scripts

View File

@@ -24,7 +24,7 @@ function update_script() {
check_container_storage
check_container_resources
if [[ ! -d "/opt/2fauth" ]]; then
if [[ ! -d /opt/2fauth ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
@@ -34,7 +34,8 @@ function update_script() {
$STD apt -y upgrade
msg_info "Creating Backup"
mv "/opt/2fauth" "/opt/2fauth-backup"
rm -rf /opt/2fauth-backup
mv /opt/2fauth /opt/2fauth-backup
if ! dpkg -l | grep -q 'php8.4'; then
cp /etc/nginx/conf.d/2fauth.conf /etc/nginx/conf.d/2fauth.conf.bak
fi
@@ -46,15 +47,17 @@ function update_script() {
fi
fetch_and_deploy_gh_release "2fauth" "Bubka/2FAuth" "tarball"
setup_composer
mv "/opt/2fauth-backup/.env" "/opt/2fauth/.env"
mv "/opt/2fauth-backup/storage" "/opt/2fauth/storage"
cd "/opt/2fauth" || return
chown -R www-data: "/opt/2fauth"
chmod -R 755 "/opt/2fauth"
cp /opt/2fauth-backup/.env /opt/2fauth/.env
cp -r /opt/2fauth-backup/storage /opt/2fauth/storage
cd /opt/2fauth || return
export COMPOSER_ALLOW_SUPERUSER=1
$STD composer install --no-dev --prefer-dist
php artisan 2fauth:install
chown -R www-data: /opt/2fauth
chmod -R 755 /opt/2fauth
$STD systemctl restart php8.4-fpm
$STD systemctl restart nginx
rm -rf /opt/2fauth-backup
msg_ok "Updated successfully!"
fi
exit

View File

@@ -21,8 +21,6 @@ catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -f /usr/local/bin/ironclaw ]]; then
msg_error "No ${APP} Installation Found!"

View File

@@ -22,8 +22,6 @@ catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /etc/ntfy ]]; then
msg_error "No ${APP} Installation Found!"
exit

View File

@@ -21,7 +21,6 @@ catch_errors
function update_script() {
header_info
check_container_resources
if [[ ! -d /opt/redlib ]]; then
msg_error "No ${APP} Installation Found!"

View File

@@ -21,8 +21,6 @@ catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if ! apk info -e rustypaste >/dev/null 2>&1; then
msg_error "No ${APP} Installation Found!"

View File

@@ -23,26 +23,25 @@ function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -f /opt/checkmk_version.txt ]]; then
if ! command -v omd &>/dev/null; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/checkmk/checkmk/tags | grep "name" | awk '{print substr($2, 3, length($2)-4) }' | tr ' ' '\n' | grep -Ev 'rc|b' | sort -V | tail -n 1)
RELEASE=$(curl_with_retry "https://api.github.com/repos/checkmk/checkmk/tags" "-" | grep "name" | awk '{print substr($2, 3, length($2)-4) }' | tr ' ' '\n' | grep -Ev 'rc|b' | sort -V | tail -n 1)
RELEASE="${RELEASE%%+*}"
msg_info "Updating ${APP} to v${RELEASE}"
msg_info "Updating checkmk"
$STD omd stop monitoring
$STD omd cp monitoring monitoringbackup
curl -fsSL "https://download.checkmk.com/checkmk/${RELEASE}/check-mk-raw-${RELEASE}_0.$(get_os_info codename)_amd64.deb" -o "/opt/checkmk.deb"
$STD apt-get install -y /opt/checkmk.deb
curl_with_retry "https://download.checkmk.com/checkmk/${RELEASE}/check-mk-community-${RELEASE}_0.$(get_os_info codename)_amd64.deb" "/opt/checkmk.deb"
$STD apt install -y /opt/checkmk.deb
$STD omd --force -V ${RELEASE}.cre update --conflict=install monitoring
$STD omd start monitoring
$STD omd -f rm monitoringbackup
$STD omd cleanup
rm -rf /opt/checkmk.deb
msg_ok "Updated ${APP}"
msg_ok "Updated checkmk"
msg_ok "Updated successfully!"
exit
}

56
ct/coredns.sh Normal file
View File

@@ -0,0 +1,56 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/coredns/coredns
APP="CoreDNS"
var_tags="${var_tags:-dns;network}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-256}"
var_disk="${var_disk:-1}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -f /usr/local/bin/coredns ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if check_for_gh_release "coredns" "coredns/coredns"; then
msg_info "Stopping Service"
systemctl stop coredns
msg_ok "Stopped Service"
fetch_and_deploy_gh_release "coredns" "coredns/coredns" "prebuild" "latest" "/usr/local/bin" \
"coredns_*_linux_$(dpkg --print-architecture).tgz"
chmod +x /usr/local/bin/coredns
msg_info "Starting Service"
systemctl start coredns
msg_ok "Started Service"
msg_ok "Updated successfully!"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} CoreDNS is listening on port 53 (DNS)${CL}"
echo -e "${TAB}${GATEWAY}${BGN}dns://${IP}${CL}"

View File

@@ -53,6 +53,18 @@ function update_script() {
export PATH="/root/.rbenv/shims:/root/.rbenv/bin:$PATH"
eval "$(/root/.rbenv/bin/rbenv init - bash)"
if ! grep -q "OTP_ENCRYPTION_PRIMARY_KEY" /opt/dawarich/.env; then
echo "OTP_ENCRYPTION_PRIMARY_KEY=$(openssl rand -hex 64)" >>/opt/dawarich/.env
fi
if ! grep -q "OTP_ENCRYPTION_DETERMINISTIC_KEY" /opt/dawarich/.env; then
echo "OTP_ENCRYPTION_DETERMINISTIC_KEY=$(openssl rand -hex 64)" >>/opt/dawarich/.env
fi
if ! grep -q "OTP_ENCRYPTION_KEY_DERIVATION_SALT" /opt/dawarich/.env; then
echo "OTP_ENCRYPTION_KEY_DERIVATION_SALT=$(openssl rand -hex 64)" >>/opt/dawarich/.env
fi
set -a && source /opt/dawarich/.env && set +a
$STD bundle config set --local deployment 'true'
@@ -67,8 +79,8 @@ function update_script() {
$STD npm install
fi
$STD bundle exec rake assets:precompile
$STD bundle exec rails db:migrate
$STD bundle exec rake assets:precompile
$STD bundle exec rake data:migrate
msg_ok "Ran Migrations"

View File

@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2026 community-scripts ORG
# Author: johanngrobe
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/joaovitoriasilva/endurain
# Source: https://codeberg.org/endurain-project/endurain
APP="Endurain"
var_tags="${var_tags:-sport;social-media}"
@@ -28,7 +28,7 @@ function update_script() {
msg_error "No ${APP} installation found!"
exit 233
fi
if check_for_gh_release "endurain" "endurain-project/endurain"; then
if check_for_codeberg_release "endurain" "endurain-project/endurain"; then
msg_info "Stopping Service"
systemctl stop endurain
msg_ok "Stopped Service"
@@ -38,7 +38,7 @@ function update_script() {
cp /opt/endurain/frontend/app/dist/env.js /opt/endurain.env.js
msg_ok "Created Backup"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "endurain" "endurain-project/endurain" "tarball" "latest" "/opt/endurain"
CLEAN_INSTALL=1 fetch_and_deploy_codeberg_release "endurain" "endurain-project/endurain" "tarball" "latest" "/opt/endurain"
msg_info "Preparing Update"
cd /opt/endurain

View File

@@ -7,7 +7,7 @@ source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxV
APP="Frigate"
var_tags="${var_tags:-nvr}"
var_cpu="${var_cpu:-4}"
var_cpu="${var_cpu:-8}"
var_ram="${var_ram:-4096}"
var_disk="${var_disk:-20}"
var_os="${var_os:-debian}"

View File

@@ -37,7 +37,7 @@ function update_script() {
CURRENT_VERSION=$(apt list --installed 2>/dev/null | grep graylog-server | grep -oP '\d+\.\d+\.\d+')
if dpkg --compare-versions "$CURRENT_VERSION" lt "6.3"; then
MONGO_VERSION="8.0" setup_mongodb
MONGO_VERSION="8.2" setup_mongodb
msg_info "Updating Graylog"
$STD apt update

6
ct/headers/coredns Normal file
View File

@@ -0,0 +1,6 @@
______ ____ _ _______
/ ____/___ ________ / __ \/ | / / ___/
/ / / __ \/ ___/ _ \/ / / / |/ /\__ \
/ /___/ /_/ / / / __/ /_/ / /| /___/ /
\____/\____/_/ \___/_____/_/ |_//____/

6
ct/headers/nagios Normal file
View File

@@ -0,0 +1,6 @@
_ __ _
/ | / /___ _____ _(_)___ _____
/ |/ / __ `/ __ `/ / __ \/ ___/
/ /| / /_/ / /_/ / / /_/ (__ )
/_/ |_/\__,_/\__, /_/\____/____/
/____/

6
ct/headers/neko Normal file
View File

@@ -0,0 +1,6 @@
_ __ __
/ | / /__ / /______
/ |/ / _ \/ //_/ __ \
/ /| / __/ ,< / /_/ /
/_/ |_/\___/_/|_|\____/

View File

@@ -0,0 +1,6 @@
____ __ __ ___ _ __ ____ _ __
/ __ \_________ / /_____ ____ / |/ /___ _(_) / / __ )_____(_)___/ /___ ____
/ /_/ / ___/ __ \/ __/ __ \/ __ \/ /|_/ / __ `/ / /_____/ __ / ___/ / __ / __ `/ _ \
/ ____/ / / /_/ / /_/ /_/ / / / / / / / /_/ / / /_____/ /_/ / / / / /_/ / /_/ / __/
/_/ /_/ \____/\__/\____/_/ /_/_/ /_/\__,_/_/_/ /_____/_/ /_/\__,_/\__, /\___/
/____/

6
ct/headers/soulsync Normal file
View File

@@ -0,0 +1,6 @@
_____ _______
/ ___/____ __ __/ / ___/__ ______ _____
\__ \/ __ \/ / / / /\__ \/ / / / __ \/ ___/
___/ / /_/ / /_/ / /___/ / /_/ / / / / /__
/____/\____/\__,_/_//____/\__, /_/ /_/\___/
/____/

6
ct/headers/storybook Normal file
View File

@@ -0,0 +1,6 @@
_____ __ __ __
/ ___// /_____ _______ __/ /_ ____ ____ / /__
\__ \/ __/ __ \/ ___/ / / / __ \/ __ \/ __ \/ //_/
___/ / /_/ /_/ / / / /_/ / /_/ / /_/ / /_/ / ,<
/____/\__/\____/_/ \__, /_.___/\____/\____/_/|_|
/____/

6
ct/headers/teable Normal file
View File

@@ -0,0 +1,6 @@
______ __ __
/_ __/__ ____ _/ /_ / /__
/ / / _ \/ __ `/ __ \/ / _ \
/ / / __/ /_/ / /_/ / / __/
/_/ \___/\__,_/_.___/_/\___/

6
ct/headers/trek Normal file
View File

@@ -0,0 +1,6 @@
__________ ________ __
/_ __/ __ \/ ____/ //_/
/ / / /_/ / __/ / ,<
/ / / _, _/ /___/ /| |
/_/ /_/ |_/_____/_/ |_|

6
ct/headers/tubearchivist Normal file
View File

@@ -0,0 +1,6 @@
______ __ ___ __ _ _ __
/_ __/_ __/ /_ ___ / | __________/ /_ (_) __(_)____/ /_
/ / / / / / __ \/ _ \ / /| | / ___/ ___/ __ \/ / | / / / ___/ __/
/ / / /_/ / /_/ / __/ / ___ |/ / / /__/ / / / /| |/ / (__ ) /_
/_/ \__,_/_.___/\___/ /_/ |_/_/ \___/_/ /_/_/ |___/_/____/\__/

View File

@@ -28,6 +28,8 @@ function update_script() {
exit
fi
ensure_dependencies libgssapi-krb5-2
if check_for_gh_release "mail-archiver" "s1t5/mail-archiver"; then
msg_info "Stopping Mail-Archiver"
systemctl stop mail-archiver

91
ct/nagios.sh Normal file
View File

@@ -0,0 +1,91 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG
# Author: CanbiZ (MickLesk)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/NagiosEnterprises/nagioscore
APP="Nagios"
var_tags="${var_tags:-monitoring;alerts;infrastructure}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-2048}"
var_disk="${var_disk:-20}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -f /usr/local/nagios/etc/nagios.cfg ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_info "Backing up Configuration"
cp -a /usr/local/nagios/etc /opt/nagios-etc-backup
msg_ok "Backed up Configuration"
if check_for_gh_release "nagios" "NagiosEnterprises/nagioscore"; then
msg_info "Stopping Nagios"
systemctl stop nagios
msg_ok "Stopped Nagios"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "nagios" "NagiosEnterprises/nagioscore" "tarball"
msg_info "Building Nagios Core"
cd /opt/nagios
$STD ./configure --with-httpd-conf=/etc/apache2/sites-enabled
$STD make all
$STD make install-groups-users
usermod -a -G nagios www-data
$STD make install
$STD make install-daemoninit
$STD make install-commandmode
$STD make install-webconf
$STD a2enmod rewrite
$STD a2enmod cgi
setcap cap_net_raw+p /bin/ping
msg_ok "Built Nagios Core"
msg_info "Starting Nagios"
systemctl restart apache2
systemctl start nagios
msg_ok "Started Nagios"
fi
if check_for_gh_release "nagios-plugins" "nagios-plugins/nagios-plugins"; then
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "nagios-plugins" "nagios-plugins/nagios-plugins" "tarball"
msg_info "Building Nagios Plugins"
cd /opt/nagios-plugins
$STD ./tools/setup
$STD ./configure
$STD make
$STD make install
msg_ok "Built Nagios Plugins"
fi
msg_info "Restoring Configuration"
rm -rf /usr/local/nagios/etc
cp -a /opt/nagios-etc-backup /usr/local/nagios/etc
rm -rf /opt/nagios-etc-backup
msg_ok "Restored Configuration"
msg_ok "Updated successfully!"
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}/nagios${CL}"

78
ct/neko.sh Normal file
View File

@@ -0,0 +1,78 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG
# Author: CanbiZ (MickLesk)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://neko.m1k1o.net/
APP="Neko"
var_tags="${var_tags:-virtual-browser;webrtc;streaming}"
var_cpu="${var_cpu:-4}"
var_ram="${var_ram:-4096}"
var_disk="${var_disk:-12}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
var_gpu="${var_gpu:-yes}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/neko ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if check_for_gh_release "neko" "m1k1o/neko"; then
msg_info "Stopping Service"
systemctl stop neko
msg_ok "Stopped Service"
msg_info "Backing up Data"
cp /etc/neko/neko.yaml /opt/neko.yaml.bak
msg_ok "Backed up Data"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "neko" "m1k1o/neko" "tarball"
msg_info "Building Client"
cd /opt/neko/client
$STD npm install
$STD npm run build
cp -r /opt/neko/client/dist/* /var/www/
msg_ok "Built Client"
msg_info "Building Server"
cd /opt/neko/server
$STD ./build
cp /opt/neko/server/bin/neko /usr/bin/neko
cp -r /opt/neko/server/bin/plugins/* /etc/neko/plugins/ 2>/dev/null || true
msg_ok "Built Server"
msg_info "Restoring Data"
cp /opt/neko.yaml.bak /etc/neko/neko.yaml
rm -f /opt/neko.yaml.bak
msg_ok "Restored Data"
msg_info "Starting Service"
systemctl start neko
msg_ok "Started Service"
msg_ok "Updated successfully!"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"

View File

@@ -76,6 +76,7 @@ function update_script() {
if [[ -f "$DB" ]]; then
sqlite3 "$DB" "ALTER TABLE 'orgs' ADD COLUMN 'settingsLogRetentionDaysConnection' integer DEFAULT 0 NOT NULL;" 2>/dev/null || true
sqlite3 "$DB" "ALTER TABLE 'clientSitesAssociationsCache' ADD COLUMN 'isJitMode' integer DEFAULT 0 NOT NULL;" 2>/dev/null || true
sqlite3 "$DB" "ALTER TABLE 'userOrgs' ADD COLUMN 'pamUsername' text;" 2>/dev/null || true
# Create new role-mapping tables and migrate data before drizzle-kit
# drops the roleId columns from userOrgs and userInvites.

View File

@@ -164,6 +164,14 @@ function update_script() {
fi
fi
msg_info "Updating NLTK Data"
cd /opt/paperless
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data snowball_data
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data stopwords
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data punkt_tab ||
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data punkt
msg_ok "Updated NLTK Data"
msg_info "Starting all Paperless-ngx Services"
systemctl start paperless-consumer paperless-webserver paperless-scheduler paperless-task-queue
sleep 1

View File

@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2026 community-scripts ORG
# Author: vhsdream
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/PatcMmon/PatchMon
# Source: https://github.com/PatchMon/PatchMon
APP="PatchMon"
var_tags="${var_tags:-monitoring}"
@@ -29,63 +29,75 @@ function update_script() {
exit
fi
if ! grep -q "PORT=3001" /opt/patchmon/backend/.env; then
msg_warn "⚠️ The next PatchMon update will include breaking changes (port changes)."
msg_warn "See details here: https://github.com/community-scripts/ProxmoxVE/pull/11888"
msg_warn "Press Enter to continue with the update, or Ctrl+C to abort..."
read -r
fi
RELEASE="v1.4.2"
NODE_VERSION="24" setup_nodejs
if check_for_gh_release "PatchMon" "PatchMon/PatchMon" "${RELEASE}"; then
if check_for_gh_release "PatchMon" "PatchMon/PatchMon"; then
msg_info "Stopping Service"
systemctl stop patchmon-server
msg_ok "Stopped Service"
msg_info "Creating Backup"
cp /opt/patchmon/backend/.env /opt/backend.env
cp /opt/patchmon/frontend/.env /opt/frontend.env
msg_ok "Backup Created"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "PatchMon" "PatchMon/PatchMon" "tarball" "${RELEASE}" "/opt/patchmon"
msg_info "Updating PatchMon"
VERSION=$(get_latest_github_release "PatchMon/PatchMon")
SERVER_PORT="$(sed -n '/SERVER_PORT/s/[^=]*=//p' /opt/backend.env)"
sed -i 's/PORT=3399/PORT=3001/' /opt/backend.env
sed -i -e "s/VERSION=.*/VERSION=$VERSION/" \
-e '/^VITE_API_URL/d' /opt/frontend.env
export NODE_ENV=production
cd /opt/patchmon
$STD npm install --no-audit --no-fund --no-save --ignore-scripts
cd /opt/patchmon/frontend
mv /opt/frontend.env /opt/patchmon/frontend/.env
$STD npm install --no-audit --no-fund --no-save --ignore-scripts --include=dev
$STD npm run build
cd /opt/patchmon/backend
mv /opt/backend.env /opt/patchmon/backend/.env
$STD npm run db:generate
$STD npx prisma migrate deploy
cp /opt/patchmon/docker/nginx.conf.template /etc/nginx/sites-available/patchmon.conf
sed -i -e 's|proxy_pass .*|proxy_pass http://127.0.0.1:3001;|' \
-e '\|try_files |i\ root /opt/patchmon/frontend/dist;' \
-e 's|alias.*|alias /opt/patchmon/frontend/dist/assets;|' \
-e '\|expires 1y|i\ root /opt/patchmon/frontend/dist;' /etc/nginx/sites-available/patchmon.conf
if [[ -n "$SERVER_PORT" ]] && [[ "$SERVER_PORT" != "443" ]]; then
sed -i "s/listen [[:digit:]].*/listen ${SERVER_PORT};/" /etc/nginx/sites-available/patchmon.conf
if [[ -d /opt/patchmon/backend ]]; then
msg_info "Legacy install detected - creating full backup, please wait..."
$STD tar czf ~/patchmon_legacy.tar.gz /opt/patchmon
cp /opt/patchmon/backend/.env /opt/legacy.env
msg_ok "Full backup saved in /root"
msg_info "Starting migration to PatchMon v2.x.x"
systemctl disable -q --now nginx
$STD npm cache clean --force
$STD apt autoremove --purge -y {nginx,nodejs}
if [[ -f /etc/apt/sources.list.d/nodesource.sources ]]; then
cp /etc/apt/sources.list.d/nodesource.sources /etc/apt/sources.list.d/nodesource.sources.bak
rm -f /etc/apt/sources.list.d/nodesource.sources
elif [[ -f /etc/apt/sources.list.d/nodesource.list ]]; then
cp /etc/apt/sources.list.d/nodesource.list /etc/apt/sources.list.d/nodesource.list.bak
rm -f /etc/apt/sources.list.d/nodesource.list
fi
rm -rf /opt/patchmon
mkdir -p /opt/patchmon/agents
cp /opt/legacy.env /opt/patchmon/.env
sed -i -e 's/^PORT=.*/PORT=3000/' \
-e 's/^NODE_/APP_/' \
-e '/^SERVER_*/d' \
-e '/^# API*/,+2d' /opt/patchmon/.env
{
echo ""
echo "SESSION_SECRET=$(openssl rand -hex 64)"
echo "AI_ENCRYPTION_KEY=$(openssl rand -hex 64)"
echo "AGENT_BINARIES_DIR=/opt/patchmon/agents"
} >>/opt/patchmon/.env
sed -i -e '\|Directory|s|/backend||' \
-e 's|^ExecStart=.*|ExecStart=/opt/patchmon/patchmon-server|' \
-e 's|^Environment=NODE_.*|EnvironmentFile=/opt/patchmon/.env|' \
/etc/systemd/system/patchmon-server.service
systemctl daemon-reload
rm /opt/legacy.env
msg_ok "Migration complete!"
fi
ln -sf /etc/nginx/sites-available/patchmon.conf /etc/nginx/sites-enabled/
rm -f /etc/nginx/sites-enabled/default
$STD nginx -t
systemctl restart nginx
msg_ok "Updated PatchMon"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "PatchMon" "PatchMon/PatchMon" "singlefile" "latest" "/opt/patchmon" "patchmon-server-linux-amd64"
mv /opt/patchmon/PatchMon /opt/patchmon/patchmon-server
msg_info "Fetching PatchMon agent binaries"
RELEASE=$(get_latest_github_release "PatchMon/PatchMon")
[[ ! -d /opt/patchmon/agents ]] && mkdir -p /opt/patchmon/agents
FILE_URL="https://github.com/PatchMon/PatchMon/releases/download/v${RELEASE}/patchmon-agent-"
AGENT_NAME=(
"linux-amd64"
"linux-arm64"
"linux-arm"
"linux-386"
"freebsd-amd64"
"freebsd-arm64"
"freebsd-arm"
"freebsd-386"
"windows-amd64.exe"
"windows-arm64.exe"
)
for arch in "${AGENT_NAME[@]}"; do
curl_with_retry "${FILE_URL}${arch}" "/opt/patchmon/agents/patchmon-agent-${arch}"
[[ "${arch}" != *.exe ]] && chmod 755 "/opt/patchmon/agents/patchmon-agent-${arch}"
done
msg_ok "Fetched PatchMon agent binaries"
msg_info "Starting Service"
if grep -q '/usr/bin/node' /etc/systemd/system/patchmon-server.service; then
sed -i 's|ExecStart=.*|ExecStart=/usr/bin/npm run start|' /etc/systemd/system/patchmon-server.service
systemctl daemon-reload
fi
systemctl start patchmon-server
msg_ok "Started Service"
msg_ok "Updated successfully!"

View File

@@ -45,15 +45,21 @@ function update_script() {
$STD php artisan down
msg_ok "Stopped Service"
cp -r /opt/pelican-panel/.env /opt/
cp -a /opt/pelican-panel/.env /opt/backup
cp -a /opt/pelican-panel/storage/app/public /opt/backup/storage/app/
SQLITE_INSTALL=$(ls /opt/pelican-panel/database/*.sqlite 1>/dev/null 2>&1 && echo "true" || echo "false")
$SQLITE_INSTALL && cp -r /opt/pelican-panel/database/*.sqlite /opt/
rm -rf * .*
$SQLITE_INSTALL && cp -r /opt/pelican-panel/database/*.sqlite /opt/backup
find /opt/pelican-panel -mindepth 1 -maxdepth 1 ! -name 'backup' ! -name 'plugins' -exec rm -rf {} +
fetch_and_deploy_gh_release "pelican-panel" "pelican-dev/panel" "prebuild" "latest" "/opt/pelican-panel" "panel.tar.gz"
msg_info "Updating Pelican Panel"
mv /opt/.env /opt/pelican-panel/
$SQLITE_INSTALL && mv /opt/*.sqlite /opt/pelican-panel/database/
cp -a /opt/backup/.env /opt/pelican-panel/
$SQLITE_INSTALL && mv /opt/backup/*.sqlite /opt/pelican-panel/database/
cp -a /opt/backup/storage/app/public /opt/pelican-panel/storage/app/
$STD composer install --no-dev --optimize-autoloader --no-interaction
$STD php artisan p:environment:setup
$STD php artisan view:clear

79
ct/protonmail-bridge.sh Normal file
View File

@@ -0,0 +1,79 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG
# Author: Stephen Chin (steveonjava)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/ProtonMail/proton-bridge
APP="ProtonMail-Bridge"
var_tags="${var_tags:-mail;proton}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-1024}"
var_disk="${var_disk:-8}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -x /usr/bin/protonmail-bridge ]]; then
msg_error "No ${APP} Installation Found!"
exit 1
fi
if check_for_gh_release "protonmail-bridge" "ProtonMail/proton-bridge"; then
local -a bridge_units=(
protonmail-bridge
protonmail-bridge-imap.socket
protonmail-bridge-smtp.socket
protonmail-bridge-imap-proxy
protonmail-bridge-smtp-proxy
)
local unit
declare -A was_active
for unit in "${bridge_units[@]}"; do
if systemctl is-active --quiet "$unit" 2>/dev/null; then
was_active["$unit"]=1
else
was_active["$unit"]=0
fi
done
msg_info "Stopping Services"
systemctl stop protonmail-bridge-imap.socket protonmail-bridge-smtp.socket protonmail-bridge-imap-proxy protonmail-bridge-smtp-proxy protonmail-bridge
msg_ok "Stopped Services"
fetch_and_deploy_gh_release "protonmail-bridge" "ProtonMail/proton-bridge" "binary"
if [[ -f /home/protonbridge/.protonmailbridge-initialized ]]; then
msg_info "Starting Services"
for unit in "${bridge_units[@]}"; do
if [[ "${was_active[$unit]:-0}" == "1" ]]; then
systemctl start "$unit"
fi
done
msg_ok "Started Services"
else
msg_ok "Initialization not completed. Services remain disabled."
fi
msg_ok "Updated successfully!"
fi
exit
}
start
build_container
description
msg_ok "Completed successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW}One-time configuration is required before Bridge services are enabled.${CL}"
echo -e "${INFO}${YW}Run this command in the container: protonmailbridge-configure${CL}"

View File

@@ -30,7 +30,7 @@ function update_script() {
fi
NODE_VERSION="24" setup_nodejs
PYTHON_VERSION="3.12" setup_uv
PYTHON_VERSION="3.14" setup_uv
if check_for_gh_release "shelfmark" "calibrain/shelfmark"; then
msg_info "Stopping Service(s)"
@@ -59,6 +59,7 @@ function update_script() {
RELEASE_VERSION=$(cat "$HOME/.shelfmark")
msg_info "Updating Shelfmark"
export VIRTUAL_ENV=/opt/shelfmark/venv
sed -i "s/^RELEASE_VERSION=.*/RELEASE_VERSION=$RELEASE_VERSION/" /etc/shelfmark/.env
cd /opt/shelfmark/src/frontend
$STD npm ci
@@ -67,9 +68,10 @@ function update_script() {
cd /opt/shelfmark
$STD uv venv -c ./venv
$STD source ./venv/bin/activate
$STD uv pip install -r ./requirements-base.txt
if [[ $(sed -n '/_BYPASS=/s/[^=]*=//p' /etc/shelfmark/.env) == "true" ]] && [[ $(sed -n '/BYPASSER=/s/[^=]*=//p' /etc/shelfmark/.env) == "false" ]]; then
$STD uv pip install -r ./requirements-shelfmark.txt
$STD uv sync --active --locked --no-default-groups --extra browser
else
$STD uv sync --active --locked --no-default-groups
fi
mv /opt/start.sh.bak /opt/shelfmark/start.sh
msg_ok "Updated Shelfmark"

68
ct/soulsync.sh Normal file
View File

@@ -0,0 +1,68 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/Nezreka/SoulSync
APP="SoulSync"
var_tags="${var_tags:-music;automation;media}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-2048}"
var_disk="${var_disk:-8}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -f ~/.soulsync ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if check_for_gh_release "soulsync" "Nezreka/SoulSync"; then
msg_info "Stopping Service"
systemctl stop soulsync
msg_ok "Stopped Service"
msg_info "Backing up Data"
mv /opt/soulsync/config /opt/soulsync-config.bak
mv /opt/soulsync/data /opt/soulsync-data.bak
msg_ok "Backed up Data"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "soulsync" "Nezreka/SoulSync" "tarball"
msg_info "Updating Python Dependencies"
cd /opt/soulsync
$STD uv venv --clear /opt/soulsync/.venv --python 3.11
$STD /opt/soulsync/.venv/bin/pip install -r requirements.txt
msg_ok "Updated Python Dependencies"
mv /opt/soulsync-config.bak /opt/soulsync/config
mv /opt/soulsync-data.bak /opt/soulsync/data
msg_info "Starting Service"
systemctl start soulsync
msg_ok "Started Service"
msg_ok "Updated ${APP}"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8008${CL}"

View File

@@ -30,6 +30,12 @@ function update_script() {
msg_info "Updating step-ca and step-cli"
$STD apt update
$STD apt upgrade -y step-ca step-cli
# Patch for making $STD happy (/usr/bin/step is a symlink to /usr/bin/step-cli)
STEPBIN="$(which step)"
rm -f "$STEPBIN"
cp -f "$(which step-cli)" "$STEPBIN"
$STD systemctl restart step-ca
msg_ok "Updated step-ca and step-cli"

54
ct/storybook.sh Normal file
View File

@@ -0,0 +1,54 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/storybookjs/storybook
APP="Storybook"
var_tags="${var_tags:-dev-tools;frontend;ui}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-2048}"
var_disk="${var_disk:-8}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -f /opt/storybook/.projectpath ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
PROJECT_PATH=$(cat /opt/storybook/.projectpath)
if [[ ! -d "$PROJECT_PATH" ]]; then
msg_error "Project directory not found: $PROJECT_PATH"
exit
fi
msg_info "Updating Storybook"
cd "$PROJECT_PATH"
$STD npx storybook@latest upgrade --yes
msg_ok "Updated Storybook"
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:6006${CL}"

82
ct/teable.sh Normal file
View File

@@ -0,0 +1,82 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/teableio/teable
APP="Teable"
var_tags="${var_tags:-database;no-code;spreadsheet}"
var_cpu="${var_cpu:-4}"
var_ram="${var_ram:-10240}"
var_disk="${var_disk:-25}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/teable ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if check_for_gh_release "teable" "teableio/teable"; then
msg_info "Stopping Service"
systemctl stop teable
msg_ok "Stopped Service"
msg_info "Backing up Configuration"
cp /opt/teable/.env /opt/teable.env.bak
msg_ok "Backed up Configuration"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "teable" "teableio/teable" "tarball"
msg_info "Restoring Configuration"
mv /opt/teable.env.bak /opt/teable/.env
msg_ok "Restored Configuration"
msg_info "Rebuilding Teable"
cd /opt/teable
TEABLE_VERSION=$(cat ~/.teable)
echo "NEXT_PUBLIC_BUILD_VERSION=\"${TEABLE_VERSION}\"" >>apps/nextjs-app/.env
export HUSKY=0
export NODE_OPTIONS="--max-old-space-size=8192"
$STD pnpm install --frozen-lockfile
$STD pnpm -F @teable/db-main-prisma prisma-generate --schema ./prisma/postgres/schema.prisma
NODE_ENV=production NEXT_BUILD_ENV_TYPECHECK=false \
$STD pnpm -r --filter '!playground' run build
msg_ok "Rebuilt Teable"
msg_info "Running Database Migrations"
source /opt/teable/.env
$STD pnpm -F @teable/db-main-prisma prisma-migrate deploy --schema ./prisma/postgres/schema.prisma
msg_ok "Ran Database Migrations"
msg_info "Starting Service"
systemctl start teable
msg_ok "Started Service"
msg_ok "Updated successfully!"
else
msg_ok "No update available."
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"

View File

@@ -32,8 +32,8 @@ function update_script() {
systemctl daemon-reload
systemctl enable -q --now technitium
fi
if is_package_installed "aspnetcore-runtime-8.0" || is_package_installed "aspnetcore-runtime-9.0"; then
$STD apt remove -y aspnetcore-runtime-*
if ! is_package_installed "aspnetcore-runtime-10.0"; then
$STD apt remove -y aspnetcore-runtime-8.0 aspnetcore-runtime-9.0 2>/dev/null || true
[ -f /etc/apt/sources.list.d/microsoft-prod.list ] && rm -f /etc/apt/sources.list.d/microsoft-prod.list
[ -f /usr/share/keyrings/microsoft-prod.gpg ] && rm -f /usr/share/keyrings/microsoft-prod.gpg
setup_deb822_repo \

View File

@@ -29,12 +29,12 @@ function update_script() {
exit
fi
if check_for_gh_release "threadfin" "threadfin/threadfin"; then
if check_for_gh_release "threadfin-app" "threadfin/threadfin"; then
msg_info "Stopping Service"
systemctl stop threadfin
msg_ok "Stopped Service"
fetch_and_deploy_gh_release "threadfin" "threadfin/threadfin" "singlefile" "latest" "/opt/threadfin" "Threadfin_linux_amd64"
fetch_and_deploy_gh_release "threadfin-app" "threadfin/threadfin" "singlefile" "latest" "/opt/threadfin" "Threadfin_linux_amd64"
msg_info "Starting Service"
systemctl start threadfin

84
ct/trek.sh Normal file
View File

@@ -0,0 +1,84 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/mauriceboe/TREK
APP="TREK"
var_tags="${var_tags:-travel;planning;collaboration}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-2048}"
var_disk="${var_disk:-8}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/trek ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if check_for_gh_release "trek" "mauriceboe/TREK"; then
msg_info "Stopping Service"
systemctl stop trek
msg_ok "Stopped Service"
msg_info "Backing up Data"
cp /opt/trek/server/.env /opt/trek.env.bak
mv /opt/trek/data /opt/trek-data.bak
mv /opt/trek/uploads /opt/trek-uploads.bak
msg_ok "Backed up Data"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "trek" "mauriceboe/TREK" "tarball"
msg_info "Building Client"
cd /opt/trek/client
$STD npm ci
$STD npm run build
mkdir -p /opt/trek/server/public
cp -r /opt/trek/client/dist/* /opt/trek/server/public/
cp -r /opt/trek/client/public/fonts /opt/trek/server/public/fonts 2>/dev/null || true
msg_ok "Built Client"
msg_info "Installing Server Dependencies"
cd /opt/trek/server
$STD npm ci
msg_ok "Installed Server Dependencies"
msg_info "Restoring Data"
mv /opt/trek-data.bak /opt/trek/data
mv /opt/trek-uploads.bak /opt/trek/uploads
rm -rf /opt/trek/server/data /opt/trek/server/uploads
ln -s /opt/trek/data /opt/trek/server/data
ln -s /opt/trek/uploads /opt/trek/server/uploads
cp /opt/trek.env.bak /opt/trek/server/.env
rm -f /opt/trek.env.bak
msg_ok "Restored Data"
msg_info "Starting Service"
systemctl start trek
msg_ok "Started Service"
msg_ok "Updated Successfully!"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"

83
ct/tubearchivist.sh Normal file
View File

@@ -0,0 +1,83 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/tubearchivist/tubearchivist
APP="Tube Archivist"
var_tags="${var_tags:-media;youtube;archiving}"
var_cpu="${var_cpu:-4}"
var_ram="${var_ram:-6144}"
var_disk="${var_disk:-30}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/tubearchivist ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if check_for_gh_release "tubearchivist" "tubearchivist/tubearchivist"; then
msg_info "Stopping Services"
systemctl stop tubearchivist tubearchivist-celery tubearchivist-beat
msg_ok "Stopped Services"
msg_info "Backing up Data"
cp /opt/tubearchivist/.env /opt/tubearchivist_env.bak
msg_ok "Backed up Data"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "tubearchivist" "tubearchivist/tubearchivist" "tarball"
msg_info "Rebuilding Tube Archivist"
cd /opt/tubearchivist/frontend
$STD npm install
$STD npm run build:deploy
mkdir -p /opt/tubearchivist/backend/static
cp -r /opt/tubearchivist/frontend/dist/* /opt/tubearchivist/backend/static/
cp /opt/tubearchivist/docker_assets/backend_start.py /opt/tubearchivist/backend/
$STD uv pip install --python /opt/tubearchivist/.venv/bin/python -r /opt/tubearchivist/backend/requirements.txt
if [[ -f /opt/tubearchivist/backend/requirements.plugins.txt ]]; then
mkdir -p /opt/yt_plugins/bgutil
$STD uv pip install --python /opt/tubearchivist/.venv/bin/python --target /opt/yt_plugins/bgutil -r /opt/tubearchivist/backend/requirements.plugins.txt
fi
msg_ok "Rebuilt Tube Archivist"
msg_info "Restoring Configuration"
mv /opt/tubearchivist_env.bak /opt/tubearchivist/.env
sed -i 's|^TA_APP_DIR=/opt/tubearchivist$|TA_APP_DIR=/opt/tubearchivist/backend|' /opt/tubearchivist/.env
sed -i 's|^TA_CACHE_DIR=/opt/tubearchivist/cache$|TA_CACHE_DIR=/cache|' /opt/tubearchivist/.env
sed -i 's|^TA_MEDIA_DIR=/opt/tubearchivist/media$|TA_MEDIA_DIR=/youtube|' /opt/tubearchivist/.env
ln -sf /opt/tubearchivist/cache /cache
ln -sf /opt/tubearchivist/media /youtube
ln -sf /opt/tubearchivist/.env /opt/tubearchivist/backend/.env
msg_ok "Restored Configuration"
msg_info "Starting Services"
systemctl start tubearchivist tubearchivist-celery tubearchivist-beat
systemctl reload nginx
msg_ok "Started Services"
msg_ok "Updated successfully!"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8000${CL}"

View File

@@ -14,7 +14,7 @@ network_check
update_os
msg_info "Installing Dependencies"
$STD apk add tzdata
$STD apk add tzdata openssl
msg_ok "Installed Dependencies"
msg_info "Installing Docker"

View File

@@ -14,10 +14,10 @@ network_check
update_os
msg_info "Install Checkmk"
RELEASE=$(curl -fsSL https://api.github.com/repos/checkmk/checkmk/tags | grep "name" | awk '{print substr($2, 3, length($2)-4) }' | tr ' ' '\n' | grep -Ev 'rc|b' | sort -V | tail -n 1)
RELEASE=$(curl_with_retry "https://api.github.com/repos/checkmk/checkmk/tags" "-" | grep "name" | awk '{print substr($2, 3, length($2)-4) }' | tr ' ' '\n' | grep -Ev 'rc|b' | sort -V | tail -n 1)
RELEASE="${RELEASE%%+*}"
curl -fsSL "https://download.checkmk.com/checkmk/${RELEASE}/check-mk-raw-${RELEASE}_0.$(get_os_info codename)_amd64.deb" -o "/opt/checkmk.deb"
$STD apt-get install -y /opt/checkmk.deb
curl_with_retry "https://download.checkmk.com/checkmk/${RELEASE}/check-mk-community-${RELEASE}_0.$(get_os_info codename)_amd64.deb" "/opt/checkmk.deb"
$STD apt install -y /opt/checkmk.deb
rm -rf /opt/checkmk.deb
echo "${RELEASE}" >"/opt/checkmk_version.txt"
msg_ok "Installed Checkmk"
@@ -29,14 +29,12 @@ MKPASSWORD=$(openssl rand -base64 18 | tr -d '/+=' | cut -c1-16)
echo -e "$MKPASSWORD\n$MKPASSWORD" | su - "$SITE_NAME" -c "cmk-passwd cmkadmin --stdin"
$STD omd start "$SITE_NAME"
{
echo "Application-Credentials"
echo "Username: cmkadmin"
echo "Password: $MKPASSWORD"
echo "Site: $SITE_NAME"
} >>~/checkmk.creds
msg_ok "Created Service"
cleanup_lxc

View File

@@ -0,0 +1,55 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/coredns/coredns
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
fetch_and_deploy_gh_release "coredns" "coredns/coredns" "prebuild" "latest" "/usr/local/bin" \
"coredns_*_linux_$(dpkg --print-architecture).tgz"
chmod +x /usr/local/bin/coredns
msg_info "Configuring CoreDNS"
mkdir -p /etc/coredns
cat <<EOF >/etc/coredns/Corefile
. {
forward . 1.1.1.1 1.0.0.1
cache 30
log
errors
health :8080
ready :8181
}
EOF
msg_ok "Configured CoreDNS"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/coredns.service
[Unit]
Description=CoreDNS DNS Server
After=network.target
[Service]
Type=simple
ExecStart=/usr/local/bin/coredns -conf /etc/coredns/Corefile
Restart=on-failure
RestartSec=5
LimitNOFILE=1048576
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now coredns
msg_ok "Created Service"
motd_ssh
customize
cleanup_lxc

View File

@@ -46,10 +46,16 @@ msg_ok "Set up Directories"
msg_info "Configuring Environment"
SECRET_KEY_BASE=$(openssl rand -hex 64)
OTP_ENCRYPTION_PRIMARY_KEY=$(openssl rand -hex 64)
OTP_ENCRYPTION_DETERMINISTIC_KEY=$(openssl rand -hex 64)
OTP_ENCRYPTION_KEY_DERIVATION_SALT=$(openssl rand -hex 64)
RELEASE=$(get_latest_github_release "Freika/dawarich")
cat <<EOF >/opt/dawarich/.env
RAILS_ENV=production
SECRET_KEY_BASE=${SECRET_KEY_BASE}
OTP_ENCRYPTION_PRIMARY_KEY=${OTP_ENCRYPTION_PRIMARY_KEY}
OTP_ENCRYPTION_DETERMINISTIC_KEY=${OTP_ENCRYPTION_DETERMINISTIC_KEY}
OTP_ENCRYPTION_KEY_DERIVATION_SALT=${OTP_ENCRYPTION_KEY_DERIVATION_SALT}
DATABASE_HOST=localhost
DATABASE_USERNAME=${PG_DB_USER}
DATABASE_PASSWORD=${PG_DB_PASS}

View File

@@ -3,7 +3,7 @@
# Copyright (c) 2021-2026 community-scripts ORG
# Author: johanngrobe
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/joaovitoriasilva/endurain
# Source: https://codeberg.org/endurain-project/endurain
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
@@ -21,7 +21,7 @@ PYTHON_VERSION="3.13" setup_uv
NODE_VERSION="24" setup_nodejs
PG_VERSION="17" PG_MODULES="postgis" setup_postgresql
PG_DB_NAME="enduraindb" PG_DB_USER="endurain" setup_postgresql_db
fetch_and_deploy_gh_release "endurain" "endurain-project/endurain" "tarball" "latest" "/opt/endurain"
fetch_and_deploy_codeberg_release "endurain" "endurain-project/endurain" "tarball" "latest" "/opt/endurain"
msg_info "Setting up Endurain"
cd /opt/endurain

View File

@@ -13,7 +13,7 @@ setting_up_container
network_check
update_os
MONGO_VERSION="8.0" setup_mongodb
MONGO_VERSION="8.2" setup_mongodb
msg_info "Setup Graylog Data Node"
PASSWORD_SECRET=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | head -c16)
@@ -38,6 +38,8 @@ sed -i "s/password_secret =/password_secret = $PASSWORD_SECRET/g" /etc/graylog/s
sed -i "s/root_password_sha2 =/root_password_sha2 = $ROOT_PASSWORD/g" /etc/graylog/server/server.conf
sed -i 's/#http_bind_address = 127.0.0.1.*/http_bind_address = 0.0.0.0:9000/g' /etc/graylog/server/server.conf
systemctl enable -q --now graylog-server
sleep 5
sed -i "s/0\.0\.0\.0:9000/$LOCAL_IP:9000/g" /var/log/graylog-server/server.log
msg_ok "Setup ${APPLICATION}"
motd_ssh

View File

@@ -43,8 +43,6 @@ cd /tmp/immichframe/immichFrame.Web
$STD npm ci
$STD npm run build
cp -r build/* /opt/immichframe/wwwroot
$STD apt remove -y dotnet-sdk-8.0
$STD apt autoremove -y
rm -rf /tmp/immichframe
mkdir -p /opt/immichframe/Config
curl -fsSL "https://raw.githubusercontent.com/immichFrame/ImmichFrame/main/docker/Settings.example.yml" -o /opt/immichframe/Config/Settings.yml

View File

@@ -22,7 +22,8 @@ setup_deb822_repo \
"main"
$STD apt install -y \
dotnet-sdk-10.0 \
aspnetcore-runtime-8.0
aspnetcore-runtime-8.0 \
libgssapi-krb5-2
msg_ok "Installed Dependencies"
PG_VERSION="17" setup_postgresql

79
install/nagios-install.sh Normal file
View File

@@ -0,0 +1,79 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: CanbiZ (MickLesk)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/NagiosEnterprises/nagioscore
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
msg_info "Installing Dependencies"
$STD apt install -y \
autoconf \
automake \
build-essential \
bc \
dc \
gawk \
gettext \
gperf \
libgd-dev \
libmcrypt-dev \
libnet-snmp-perl \
libssl-dev \
snmp \
apache2 \
apache2-utils
msg_ok "Installed Dependencies"
PHP_APACHE="YES" setup_php
fetch_and_deploy_gh_release "nagios" "NagiosEnterprises/nagioscore" "tarball"
msg_info "Building Nagios Core"
cd /opt/nagios
$STD ./configure --with-httpd-conf=/etc/apache2/sites-enabled
$STD make all
$STD make install-groups-users
usermod -a -G nagios www-data
$STD make install
$STD make install-daemoninit
$STD make install-commandmode
$STD make install-config
$STD make install-webconf
$STD a2enmod rewrite
$STD a2enmod cgi
msg_ok "Built Nagios Core"
fetch_and_deploy_gh_release "nagios-plugins" "nagios-plugins/nagios-plugins" "tarball"
msg_info "Building Nagios Plugins"
cd /opt/nagios-plugins
$STD ./tools/setup
$STD ./configure
$STD make
$STD make install
setcap cap_net_raw+p /bin/ping
msg_ok "Built Nagios Plugins"
msg_info "Configuring Web Authentication"
$STD htpasswd -bc /usr/local/nagios/etc/htpasswd.users nagiosadmin nagiosadmin
chown root:www-data /usr/local/nagios/etc/htpasswd.users
chmod 640 /usr/local/nagios/etc/htpasswd.users
msg_ok "Configured Web Authentication"
msg_info "Starting Services"
systemctl enable -q apache2
systemctl restart apache2
systemctl enable -q --now nagios
msg_ok "Started Services"
motd_ssh
customize
cleanup_lxc

255
install/neko-install.sh Normal file
View File

@@ -0,0 +1,255 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: CanbiZ (MickLesk)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://neko.m1k1o.net/
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
msg_info "Installing Dependencies"
$STD apt install -y \
supervisor \
pulseaudio \
dbus-x11 \
xserver-xorg-video-dummy \
xdotool \
xclip \
libgtk-3-0 \
gstreamer1.0-plugins-base \
gstreamer1.0-plugins-good \
gstreamer1.0-plugins-bad \
gstreamer1.0-plugins-ugly \
gstreamer1.0-pulseaudio \
openbox \
firefox-esr \
fonts-noto-color-emoji \
fonts-wqy-zenhei
msg_ok "Installed Dependencies"
systemctl disable -q --now supervisor
msg_info "Installing Build Dependencies"
$STD apt install -y \
build-essential \
pkg-config \
libx11-dev \
libxrandr-dev \
libxtst-dev \
libgtk-3-dev \
libxcvt-dev \
libgstreamer1.0-dev \
libgstreamer-plugins-base1.0-dev
msg_ok "Installed Build Dependencies"
NODE_VERSION="22" setup_nodejs
setup_go
fetch_and_deploy_gh_release "neko" "m1k1o/neko" "tarball"
msg_info "Building Client"
cd /opt/neko/client
$STD npm install
$STD npm run build
mkdir -p /var/www
cp -r /opt/neko/client/dist/* /var/www/
msg_ok "Built Client"
msg_info "Building Server"
cd /opt/neko/server
$STD ./build
cp /opt/neko/server/bin/neko /usr/bin/neko
mkdir -p /etc/neko/plugins
cp -r /opt/neko/server/bin/plugins/* /etc/neko/plugins/ 2>/dev/null || true
msg_ok "Built Server"
msg_info "Setting up Runtime"
useradd -m -s /bin/bash neko
usermod -aG audio,video neko
mkdir -p /etc/neko/supervisord /var/www /var/log/neko /tmp/.X11-unix /tmp/runtime-neko /home/neko/.config/pulse /home/neko/.local/share/xorg
chmod 1777 /tmp/.X11-unix
chmod 1777 /var/log/neko
chmod 0700 /tmp/runtime-neko
chown neko /tmp/.X11-unix /var/log/neko /tmp/runtime-neko
chown -R neko:neko /home/neko
cp /opt/neko/runtime/xorg.conf /etc/neko/xorg.conf
# Remove the dummy_touchscreen InputDevice section (requires custom "neko" Xorg driver not available bare-metal)
sed -i '/Section "InputDevice"/{N;/dummy_touchscreen/{:l;N;/EndSection/!bl;d}}' /etc/neko/xorg.conf
sed -i '/dummy_touchscreen/d' /etc/neko/xorg.conf
sed -i 's/InputDevice "dummy_mouse"/InputDevice "dummy_mouse" "CorePointer"/' /etc/neko/xorg.conf
cp /opt/neko/runtime/default.pa /etc/pulse/default.pa
cat <<EOF >/etc/neko/supervisord.conf
[supervisord]
nodaemon=true
user=root
pidfile=/var/run/supervisord.pid
logfile=/dev/null
logfile_maxbytes=0
loglevel=debug
[include]
files=/etc/neko/supervisord/*.conf
[program:x-server]
environment=HOME="/home/neko",USER="neko"
command=/usr/bin/X :99.0 -config /etc/neko/xorg.conf -noreset -nolisten tcp
autorestart=true
priority=300
user=neko
stdout_logfile=/var/log/neko/xorg.log
stdout_logfile_maxbytes=100MB
stdout_logfile_backups=10
redirect_stderr=true
[program:pulseaudio]
environment=HOME="/home/neko",USER="neko",DISPLAY=":99.0"
command=/usr/bin/pulseaudio --log-level=error --disallow-module-loading --disallow-exit --exit-idle-time=-1
autorestart=true
priority=300
user=neko
stdout_logfile=/var/log/neko/pulseaudio.log
stdout_logfile_maxbytes=100MB
stdout_logfile_backups=10
redirect_stderr=true
[program:neko]
environment=HOME="/home/neko",USER="neko",DISPLAY=":99.0"
command=/usr/bin/neko serve --server.static "/var/www"
stopsignal=INT
stopwaitsecs=3
autorestart=true
priority=800
user=neko
stdout_logfile=/var/log/neko/neko.log
stdout_logfile_maxbytes=100MB
stdout_logfile_backups=10
redirect_stderr=true
[unix_http_server]
file=/var/run/supervisor.sock
chmod=0770
chown=root:neko
[supervisorctl]
serverurl=unix:///var/run/supervisor.sock
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
EOF
cat <<EOF >/etc/neko/supervisord/firefox.conf
[program:firefox]
environment=HOME="/home/neko",USER="neko",DISPLAY=":99.0"
command=/usr/bin/firefox-esr --no-remote --display=:99.0 -width 1280 -height 720
stopsignal=INT
autorestart=true
priority=800
user=neko
stdout_logfile=/var/log/neko/firefox.log
stdout_logfile_maxbytes=100MB
stdout_logfile_backups=10
redirect_stderr=true
[program:openbox]
environment=HOME="/home/neko",USER="neko",DISPLAY=":99.0"
command=/usr/bin/openbox --config-file /etc/neko/openbox.xml
autorestart=true
priority=300
user=neko
stdout_logfile=/var/log/neko/openbox.log
stdout_logfile_maxbytes=100MB
stdout_logfile_backups=10
redirect_stderr=true
EOF
cat <<'EOF' >/etc/neko/openbox.xml
<?xml version="1.0" encoding="UTF-8"?>
<openbox_config xmlns="http://openbox.org/3.4/rc" xmlns:xi="http://www.w3.org/2001/XInclude">
<applications>
<application class="firefox" name="Navigator" role="browser">
<decor>no</decor>
<maximized>true</maximized>
<focus>yes</focus>
<layer>normal</layer>
</application>
</applications>
<focus>
<focusNew>yes</focusNew>
<followMouse>no</followMouse>
<focusLast>yes</focusLast>
<underMouse>no</underMouse>
<focusDelay>200</focusDelay>
<raiseOnFocus>no</raiseOnFocus>
</focus>
<placement>
<policy>Smart</policy>
<center>yes</center>
</placement>
<desktops>
<number>1</number>
<firstdesk>1</firstdesk>
<popupTime>0</popupTime>
</desktops>
</openbox_config>
EOF
cat <<EOF >/etc/neko/neko.yaml
server:
bind: "0.0.0.0:8080"
static: "/var/www"
session:
cookie:
enabled: false
webrtc:
icelite: true
nat1to1:
- "${LOCAL_IP}"
epr: "59000-59100"
desktop:
input:
enabled: false
member:
provider: "multiuser"
multiuser:
admin_password: "admin"
user_password: "neko"
EOF
msg_ok "Set up Runtime"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/neko.service
[Unit]
Description=Neko Virtual Browser
After=network.target
[Service]
Type=simple
User=root
Environment=USER=neko
Environment=DISPLAY=:99.0
Environment=PULSE_SERVER=unix:/tmp/pulseaudio.socket
Environment=XDG_RUNTIME_DIR=/tmp/runtime-neko
Environment=NEKO_PLUGINS_ENABLED=true
Environment=NEKO_PLUGINS_DIR=/etc/neko/plugins/
Environment=NEKO_CONFIG=/etc/neko/neko.yaml
ExecStart=/usr/bin/supervisord -c /etc/neko/supervisord.conf -n
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now neko
msg_ok "Created Service"
motd_ssh
customize
cleanup_lxc

View File

@@ -14,74 +14,91 @@ network_check
update_os
msg_info "Installing Dependencies"
$STD apt install -y \
build-essential \
nginx \
redis-server
$STD apt install -y redis-server
msg_ok "Installed Dependencies"
NODE_VERSION="24" setup_nodejs
PG_VERSION="17" setup_postgresql
PG_DB_NAME="patchmon_db" PG_DB_USER="patchmon_usr" setup_postgresql_db
fetch_and_deploy_gh_release "PatchMon" "PatchMon/PatchMon" "tarball" "v1.4.2" "/opt/patchmon"
RELEASE="v2.0.2"
fetch_and_deploy_gh_release "PatchMon" "PatchMon/PatchMon" "singlefile" "latest" "/opt/patchmon" "patchmon-server-linux-amd64"
mv /opt/patchmon/PatchMon /opt/patchmon/patchmon-server
msg_info "Configuring PatchMon"
VERSION=$(get_latest_github_release "PatchMon/PatchMon")
export NODE_ENV=production
cd /opt/patchmon
$STD npm install --no-audit --no-fund --no-save --ignore-scripts
cd /opt/patchmon/frontend
cat <<EOF >./.env
VITE_APP_NAME=PatchMon
VITE_APP_VERSION=${VERSION}
EOF
$STD npm install --no-audit --no-fund --no-save --ignore-scripts --include=dev
$STD npm run build
cat <<EOF >/opt/patchmon/.env
DATABASE_URL="postgresql://$PG_DB_USER:$PG_DB_PASS@localhost:5432/$PG_DB_NAME"
JWT_SECRET="$(openssl rand -hex 64)"
mv /opt/patchmon/backend/env.example /opt/patchmon/backend/.env
sed -i -e "s|DATABASE_URL=.*|DATABASE_URL=\"postgresql://$PG_DB_USER:$PG_DB_PASS@localhost:5432/$PG_DB_NAME\"|" \
-e "/JWT_SECRET/s/[=$].*/=$JWT_SECRET/" \
-e "\|CORS_ORIGIN|s|localhost|$LOCAL_IP|" \
-e "/PORT=3001/aSERVER_PROTOCOL=http \\
SERVER_HOST=$LOCAL_IP \\
SERVER_PORT=3000" \
-e '/_ENV=production/aTRUST_PROXY=1' \
-e '/REDIS_USER=.*/,+1d' /opt/patchmon/backend/.env
SESSION_SECRET="$(openssl rand -hex 64)"
AI_ENCRYPTION_KEY="$(openssl rand -hex 64)"
CORS_ORIGIN=http://${LOCAL_IP}:3000
PORT=3000
APP_ENV=production
cd /opt/patchmon/backend
$STD npm run db:generate
$STD npx prisma migrate deploy
# Redis
REDIS_HOST=localhost
REDIS_PORT=6379
## OIDC / SSO (when OIDC_ENABLED=true, issuer/client/secret/redirect required)
# OIDC_ENABLED=false
# OIDC_ISSUER_URL=
# OIDC_CLIENT_ID=
# OIDC_CLIENT_SECRET=
# OIDC_REDIRECT_URI=
# OIDC_SCOPES=openid email profile groups
# OIDC_AUTO_CREATE_USERS=false
# OIDC_DEFAULT_ROLE=user
# OIDC_DISABLE_LOCAL_AUTH=false
# OIDC_BUTTON_TEXT=Login with SSO
# OIDC_SESSION_TTL=600
# OIDC_POST_LOGOUT_URI=
# OIDC_SYNC_ROLES=false
# OIDC_ADMIN_GROUP=
# OIDC_SUPERADMIN_GROUP=
# OIDC_HOST_MANAGER_GROUP=
# OIDC_READONLY_GROUP=
# OIDC_USER_GROUP=
# OIDC_ENFORCE_HTTPS=true
AGENT_BINARIES_DIR=/opt/patchmon/agents
EOF
msg_ok "Configured PatchMon"
msg_info "Configuring Nginx"
cp /opt/patchmon/docker/nginx.conf.template /etc/nginx/sites-available/patchmon.conf
sed -i -e 's|proxy_pass .*|proxy_pass http://127.0.0.1:3001;|' \
-e '\|try_files |i\ root /opt/patchmon/frontend/dist;' \
-e 's|alias.*|alias /opt/patchmon/frontend/dist/assets;|' \
-e '\|expires 1y|i\ root /opt/patchmon/frontend/dist;' /etc/nginx/sites-available/patchmon.conf
ln -sf /etc/nginx/sites-available/patchmon.conf /etc/nginx/sites-enabled/
rm -f /etc/nginx/sites-enabled/default
$STD nginx -t
systemctl restart nginx
msg_ok "Configured Nginx"
msg_info "Fetching PatchMon agent binaries"
RELEASE=$(get_latest_github_release "PatchMon/PatchMon")
mkdir -p /opt/patchmon/agents
FILE_URL="https://github.com/PatchMon/PatchMon/releases/download/v${RELEASE}/patchmon-agent-"
AGENT_NAME=(
"linux-amd64"
"linux-arm64"
"linux-arm"
"linux-386"
"freebsd-amd64"
"freebsd-arm64"
"freebsd-arm"
"freebsd-386"
"windows-amd64.exe"
"windows-arm64.exe"
)
for arch in "${AGENT_NAME[@]}"; do
curl_with_retry "${FILE_URL}${arch}" "/opt/patchmon/agents/patchmon-agent-${arch}"
[[ "${arch}" != *.exe ]] && chmod 755 "/opt/patchmon/agents/patchmon-agent-${arch}"
done
msg_ok "Fetched PatchMon agent binaries"
msg_info "Creating service"
cat <<EOF >/etc/systemd/system/patchmon-server.service
[Unit]
Description=PatchMon Service
Description=PatchMon Server
After=network.target postgresql.service
[Service]
Type=simple
WorkingDirectory=/opt/patchmon/backend
ExecStart=/usr/bin/npm run start
WorkingDirectory=/opt/patchmon
ExecStart=/opt/patchmon/patchmon-server
Restart=always
RestartSec=10
Environment=NODE_ENV=production
Environment=PATH=/usr/bin:/usr/local/bin
EnvironmentFile=/opt/patchmon/.env
NoNewPrivileges=true
PrivateTmp=true
ProtectSystem=strict

View File

@@ -0,0 +1,192 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: Stephen Chin (steveonjava)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/ProtonMail/proton-bridge
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
msg_info "Installing Dependencies"
$STD apt install -y pass
msg_ok "Installed Dependencies"
msg_info "Creating Service User"
useradd -r -m -d /home/protonbridge -s /usr/sbin/nologin protonbridge
install -d -m 0750 -o protonbridge -g protonbridge /home/protonbridge
msg_ok "Created Service User"
fetch_and_deploy_gh_release "protonmail-bridge" "ProtonMail/proton-bridge" "binary"
msg_info "Creating Services"
cat <<EOF >/etc/systemd/system/protonmail-bridge.service
[Unit]
Description=Proton Mail Bridge (noninteractive)
After=network-online.target
Wants=network-online.target
ConditionPathExists=/home/protonbridge/.protonmailbridge-initialized
[Service]
Type=simple
User=protonbridge
Group=protonbridge
WorkingDirectory=/home/protonbridge
Environment=HOME=/home/protonbridge
ExecStart=/usr/bin/protonmail-bridge --noninteractive
Restart=always
RestartSec=3
NoNewPrivileges=yes
PrivateTmp=yes
ProtectSystem=full
ProtectKernelTunables=yes
ProtectKernelModules=yes
ProtectControlGroups=yes
[Install]
WantedBy=multi-user.target
EOF
cat <<'EOF' >/etc/systemd/system/protonmail-bridge-imap.socket
[Unit]
Description=Proton Mail Bridge IMAP Socket (143)
ConditionPathExists=/home/protonbridge/.protonmailbridge-initialized
[Socket]
ListenStream=143
Accept=no
Service=protonmail-bridge-imap-proxy.service
[Install]
WantedBy=sockets.target
EOF
cat <<'EOF' >/etc/systemd/system/protonmail-bridge-imap-proxy.service
[Unit]
Description=Proton Mail Bridge IMAP Proxy (143 -> 127.0.0.1:1143)
After=protonmail-bridge.service
Requires=protonmail-bridge.service
ConditionPathExists=/home/protonbridge/.protonmailbridge-initialized
[Service]
Type=simple
Sockets=protonmail-bridge-imap.socket
ExecStart=/usr/lib/systemd/systemd-socket-proxyd 127.0.0.1:1143
NoNewPrivileges=yes
PrivateTmp=yes
EOF
cat <<'EOF' >/etc/systemd/system/protonmail-bridge-smtp.socket
[Unit]
Description=Proton Mail Bridge SMTP Socket (587)
ConditionPathExists=/home/protonbridge/.protonmailbridge-initialized
[Socket]
ListenStream=587
Accept=no
Service=protonmail-bridge-smtp-proxy.service
[Install]
WantedBy=sockets.target
EOF
cat <<'EOF' >/etc/systemd/system/protonmail-bridge-smtp-proxy.service
[Unit]
Description=Proton Mail Bridge SMTP Proxy (587 -> 127.0.0.1:1025)
After=protonmail-bridge.service
Requires=protonmail-bridge.service
ConditionPathExists=/home/protonbridge/.protonmailbridge-initialized
[Service]
Type=simple
Sockets=protonmail-bridge-smtp.socket
ExecStart=/usr/lib/systemd/systemd-socket-proxyd 127.0.0.1:1025
NoNewPrivileges=yes
PrivateTmp=yes
EOF
msg_ok "Created Services"
msg_info "Creating Helper Commands"
cat <<'EOF' >/usr/local/bin/protonmailbridge-configure
#!/usr/bin/env bash
set -euo pipefail
BRIDGE_USER="protonbridge"
BRIDGE_HOME="/home/${BRIDGE_USER}"
GNUPG_HOME="${BRIDGE_HOME}/.gnupg"
MARKER="${BRIDGE_HOME}/.protonmailbridge-initialized"
FIRST_TIME=0
if [[ ! -f "${MARKER}" ]]; then
FIRST_TIME=1
fi
# Stop sockets/proxies/bridge daemon before configuration
systemctl stop protonmail-bridge-imap.socket protonmail-bridge-smtp.socket
systemctl stop protonmail-bridge-imap-proxy protonmail-bridge-smtp-proxy protonmail-bridge
if [[ "${FIRST_TIME}" == "1" ]]; then
echo "First-time setup: initializing pass keychain for ${BRIDGE_USER} (required by Proton Mail Bridge on Linux)."
install -d -m 0700 -o "${BRIDGE_USER}" -g "${BRIDGE_USER}" "${GNUPG_HOME}"
FPR="$(runuser -u "${BRIDGE_USER}" -- env HOME="${BRIDGE_HOME}" GNUPGHOME="${GNUPG_HOME}" \
gpg --list-secret-keys --with-colons 2>/dev/null | awk -F: '$1=="fpr"{print $10; exit}')"
if [[ -z "${FPR}" ]]; then
runuser -u "${BRIDGE_USER}" -- env HOME="${BRIDGE_HOME}" GNUPGHOME="${GNUPG_HOME}" \
gpg --batch --pinentry-mode loopback --passphrase '' \
--quick-gen-key 'ProtonMail Bridge' default default never
FPR="$(runuser -u "${BRIDGE_USER}" -- env HOME="${BRIDGE_HOME}" GNUPGHOME="${GNUPG_HOME}" \
gpg --list-secret-keys --with-colons 2>/dev/null | awk -F: '$1=="fpr"{print $10; exit}')"
fi
if [[ -z "${FPR}" ]]; then
echo "Failed to detect a GPG key fingerprint for ${BRIDGE_USER}." >&2
exit 1
fi
runuser -u "${BRIDGE_USER}" -- env HOME="${BRIDGE_HOME}" GNUPGHOME="${GNUPG_HOME}" \
pass init "${FPR}"
echo
echo "To do initial configuration of the Proton Mail Bridge:"
echo "Run: login"
echo "Run: info"
echo "Run: exit"
echo
else
echo
echo "Launching Proton Mail Bridge CLI for configuration."
echo "External access is disabled until you exit."
echo "Run: exit"
echo
fi
runuser -u "${BRIDGE_USER}" -- env HOME="${BRIDGE_HOME}" \
protonmail-bridge -c
if [[ "${FIRST_TIME}" == "1" ]]; then
touch "${MARKER}"
chown "${BRIDGE_USER}:${BRIDGE_USER}" "${MARKER}"
chmod 0644 "${MARKER}"
fi
systemctl enable -q --now protonmail-bridge.service protonmail-bridge-imap.socket protonmail-bridge-smtp.socket
if [[ "${FIRST_TIME}" == "1" ]]; then
echo "Initialization complete. Services enabled and started."
else
echo "Configuration complete. Services enabled and started."
fi
EOF
chmod +x /usr/local/bin/protonmailbridge-configure
ln -sf /usr/local/bin/protonmailbridge-configure /usr/bin/protonmailbridge-configure
msg_ok "Created Helper Commands"
motd_ssh
customize
cleanup_lxc

View File

@@ -116,7 +116,7 @@ else
fi
NODE_VERSION="24" setup_nodejs
PYTHON_VERSION="3.12" setup_uv
PYTHON_VERSION="3.14" setup_uv
fetch_and_deploy_gh_release "shelfmark" "calibrain/shelfmark" "tarball" "latest" "/opt/shelfmark"
RELEASE_VERSION=$(cat "$HOME/.shelfmark")
@@ -130,11 +130,15 @@ mv /opt/shelfmark/src/frontend/dist /opt/shelfmark/frontend-dist
msg_ok "Built Shelfmark frontend"
msg_info "Configuring Shelfmark"
export VIRTUAL_ENV=/opt/shelfmark/venv
cd /opt/shelfmark
$STD uv venv --clear ./venv
$STD source ./venv/bin/activate
$STD uv pip install -r ./requirements-base.txt
[[ "$DEPLOYMENT_TYPE" == "1" ]] && $STD uv pip install -r ./requirements-shelfmark.txt
if [[ "$DEPLOYMENT_TYPE" == "1" ]]; then
$STD uv sync --active --locked --no-default-groups --extra browser
else
$STD uv sync --active --locked --no-default-groups
fi
mkdir -p {/var/log/shelfmark,/tmp/shelfmark}
msg_ok "Configured Shelfmark"

View File

@@ -0,0 +1,59 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/Nezreka/SoulSync
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
msg_info "Installing Dependencies"
$STD apt install -y \
gcc \
libffi-dev \
libssl-dev \
libchromaprint-tools \
ffmpeg
msg_ok "Installed Dependencies"
UV_PYTHON="3.11" setup_uv
fetch_and_deploy_gh_release "soulsync" "Nezreka/SoulSync" "tarball"
msg_info "Setting up Application"
cd /opt/soulsync
$STD uv venv /opt/soulsync/.venv --python 3.11
$STD uv pip install -r requirements.txt --python /opt/soulsync/.venv/bin/python
mkdir -p /opt/soulsync/{config,data,logs}
msg_ok "Set up Application"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/soulsync.service
[Unit]
Description=SoulSync Music Discovery
After=network.target
[Service]
Type=simple
User=root
WorkingDirectory=/opt/soulsync
ExecStart=/opt/soulsync/.venv/bin/python web_server.py
Environment=PYTHONPATH=/opt/soulsync PYTHONUNBUFFERED=1 DATABASE_PATH=/opt/soulsync/data/music_library.db
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now soulsync
msg_ok "Created Service"
motd_ssh
customize
cleanup_lxc

View File

@@ -23,21 +23,34 @@ setup_deb822_repo \
msg_info "Installing step-ca and step-cli"
$STD apt install -y step-ca step-cli
STEPHOME="/root/.step"
export STEPPATH=/etc/step-ca
STEPPATH="/etc/step-ca"
STEPHOME="/etc/step"
export STEPPATH=$STEPPATH
echo "export STEPPATH=${STEPPATH}" >> /etc/profile
export STEPHOME=$STEPHOME
echo "export STEPHOME=${STEPHOME}" >> /etc/profile
sed -i '1i export STEPPATH=/etc/step-ca' /etc/profile
sed -i '1i export STEPHOME=/root/.step' /etc/profile
mkdir -p "$STEPHOME"
setcap CAP_NET_BIND_SERVICE=+eip $(which step-ca)
# Patch for making $STD happy (/usr/bin/step is a symlink to /usr/bin/step-cli)
STEPBIN="$(which step)"
rm -f "$STEPBIN"
cp -f "$(which step-cli)" "$STEPBIN"
$STD useradd --user-group --system --home $(step path) --shell /bin/false step
# Low port-binding capabilities (ports < 1024)
# - Default step-ca listener port: 443
setcap CAP_NET_BIND_SERVICE=+eip "$(which step-ca)"
# Service User used by systemd step-ca.service
$STD useradd --user-group --system --home "$(step path)" --shell /bin/false step
msg_ok "Installed step-ca and step-cli"
DomainName="$(hostname -d)"
PKIName="$(prompt_input "Enter PKIName" "MyHomePKI" 30)"
PKICountry="$(prompt_input "Enter PKICountry" "DE" 30)"
PKIOrganizationalUnit="$(prompt_input "Enter PKIOrganizationalUnit" "MyHomeLab" 30)"
PKIProvisioner="$(prompt_input "Enter PKIProvisioner" "pki@$DomainName" 30)"
AcmeProvisioner="$(prompt_input "Enter AcmeProvisioner" "acme@$DomainName" 30)"
X509MinDur="$(prompt_input "Enter X509MinDur" "48h" 30)"
@@ -45,11 +58,15 @@ X509MaxDur="$(prompt_input "Enter X509MaxDur" "87600h" 30)"
X509DefaultDur="$(prompt_input "Enter X509DefaultDur" "168h" 30)"
msg_info "Initializing step-ca"
# Initialize step-ca
DeploymentType="standalone"
FQDN="$(hostname -f)"
IP="${LOCAL_IP}"
LISTENER=":443"
LISTENER_INSECURE=":80"
# Set different signing CA and Provisioner Passwords
EncryptionPwdDir="$(step path)/encryption"
PwdFile="$EncryptionPwdDir/ca.pwd"
ProvisionerPwdFile="$EncryptionPwdDir/provisioner.pwd"
@@ -57,19 +74,208 @@ mkdir -p "$EncryptionPwdDir"
gpg -q --gen-random --armor 2 32 >"$PwdFile"
gpg -q --gen-random --armor 2 32 >"$ProvisionerPwdFile"
$STD step ca init --deployment-type="$DeploymentType" --ssh --name="$PKIName" --dns="$FQDN" --dns="$IP" --address="$LISTENER" --provisioner="$PKIProvisioner" --password-file="$PwdFile" --provisioner-password-file="$ProvisionerPwdFile"
# Used by systemd step-ca.service
ln -s "$PwdFile" "$(step path)/password.txt"
chown -R step:step $(step path)
chmod -R 700 $(step path)
$STD step ca provisioner add "$AcmeProvisioner" --type ACME --admin-name "$AcmeProvisioner"
$STD step ca provisioner update "$PKIProvisioner" --x509-min-dur="$X509MinDur" --x509-max-dur="$X509MaxDur" --x509-default-dur="$X509DefaultDur" --allow-renewal-after-expiry
$STD step ca provisioner update "$AcmeProvisioner" --x509-min-dur="$X509MinDur" --x509-max-dur="$X509MaxDur" --x509-default-dur="$X509DefaultDur" --allow-renewal-after-expiry
$STD step certificate install --all $(step path)/certs/root_ca.crt
# Usage of:
# - SSH feature of step-ca
# - BadgerDB (badgerv2) => Default DB backend of step-ca
# - badgerFileLoadingMode: FileIO (instead of MemoryMap) for LXC with low RAM
$STD step ca init \
--deployment-type="$DeploymentType" \
--ssh \
--name="$PKIName" \
--dns="$FQDN" \
--dns="$IP" \
--address="$LISTENER" \
--provisioner="$PKIProvisioner" \
--password-file="$PwdFile" \
--provisioner-password-file="$ProvisionerPwdFile"
# Define enhanced x509 CA and Certificate Templates
mkdir -p "$(step path)/templates/ca"
mkdir -p "$(step path)/templates/x509"
CARootTemplate="$(step path)/templates/ca/root.tpl"
CAIntermediateTemplate="$(step path)/templates/ca/intermediate.tpl"
X509LeafTemplate="$(step path)/templates/x509/leaf.tpl"
X509LeafTemplateData="$(step path)/templates/x509/leaf_data.tpl"
cat <<'EOF' >"$CARootTemplate"
{
"subject": {
"country": {{ toJson .Insecure.User.country }},
"organization": {{ toJson .Insecure.User.organization }},
"organizationalUnit": {{ toJson .Insecure.User.organizationalUnit }},
"commonName": {{ toJson .Subject.CommonName }}
},
"issuer": {{ toJson .Subject }},
"keyUsage": ["certSign", "crlSign"],
"basicConstraints": {
"isCA": true,
"maxPathLen": 1
},
"issuingCertificateURL": [{{ toJson .Insecure.User.issuingCertificateURL }}],
"crlDistributionPoints": [{{ toJson .Insecure.User.crlDistributionPoints }}]
}
EOF
cat <<'EOF' >"$CAIntermediateTemplate"
{
"subject": {
"country": {{ toJson .Insecure.User.country }},
"organization": {{ toJson .Insecure.User.organization }},
"organizationalUnit": {{ toJson .Insecure.User.organizationalUnit }},
"commonName": {{ toJson .Subject.CommonName }}
},
"keyUsage": ["certSign", "crlSign"],
"basicConstraints": {
"isCA": true,
"maxPathLen": 0
},
"issuingCertificateURL": [{{ toJson .Insecure.User.issuingCertificateURL }}],
"crlDistributionPoints": [{{ toJson .Insecure.User.crlDistributionPoints }}]
}
EOF
cat <<'EOF' >"$X509LeafTemplate"
{
"subject": {
{{- if .Insecure.User.Country }}
"country": {{ toJson .Insecure.User.country }},
{{- else }}
"country": {{ toJson .country }},
{{- end }}
{{- if .Insecure.User.organization }}
"organization": {{ toJson .Insecure.User.organization }},
{{- else }}
"organization": {{ toJson .organization }},
{{- end }}
{{- if .Insecure.User.organizationalUnit }}
"organizationalUnit": {{ toJson .Insecure.User.organizationalUnit }},
{{- else }}
"organizationalUnit": {{ toJson .organizationalUnit }},
{{- end }}
"commonName": {{ toJson .Subject.CommonName }}
},
"sans": {{ toJson .SANs }},
{{- if typeIs "*rsa.PublicKey" .Insecure.CR.PublicKey }}
"keyUsage": ["keyEncipherment", "digitalSignature"],
{{- else }}
"keyUsage": ["digitalSignature"],
{{- end }}
"extKeyUsage": ["serverAuth", "clientAuth"],
{{- if .Insecure.User.issuingCertificateURL }}
"issuingCertificateURL": [{{ toJson .Insecure.User.issuingCertificateURL }}],
{{- else }}
"issuingCertificateURL": [{{ toJson .issuingCertificateURL }}],
{{- end }}
{{- if .Insecure.User.crlDistributionPoints }}
"crlDistributionPoints": [{{ toJson .Insecure.User.crlDistributionPoints }}]
{{- else }}
"crlDistributionPoints": [{{ toJson .crlDistributionPoints }}]
{{- end }}
}
EOF
cat <<EOF >"$X509LeafTemplateData"
{
"country": "${PKICountry}",
"organization": "${PKIName}",
"organizationalUnit": "${PKIOrganizationalUnit}",
"issuingCertificateURL": ["https://${FQDN}${LISTENER}/intermediates.pem"],
"crlDistributionPoints": ["https://${FQDN}${LISTENER}/crl"]
}
EOF
# Configure CA Provisioners, DB and CRL settings
$STD step ca provisioner add "$AcmeProvisioner" \
--type ACME \
--admin-name "$AcmeProvisioner"
$STD step ca provisioner update "$PKIProvisioner" \
--x509-min-dur="$X509MinDur" \
--x509-max-dur="$X509MaxDur" \
--x509-default-dur="$X509DefaultDur" \
--x509-template="$X509LeafTemplate" \
--x509-template-data="$X509LeafTemplateData" \
--allow-renewal-after-expiry
$STD step ca provisioner update "$AcmeProvisioner" \
--x509-min-dur="$X509MinDur" \
--x509-max-dur="$X509MaxDur" \
--x509-default-dur="$X509DefaultDur" \
--x509-template="$X509LeafTemplate" \
--x509-template-data="$X509LeafTemplateData" \
--allow-renewal-after-expiry
CAConfig="$(step path)/config/ca.json"
jq --arg a "${PKICountry}" '.country = $a' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
jq --arg a "${PKIName}" '.organization = $a' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
jq --arg a "${PKIOrganizationalUnit}" '.organizationalUnit = $a' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
jq --arg a "${PKIName} Online CA" '.commonName = $a' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
jq '.db.badgerFileLoadingMode = "FileIO"' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
jq '.crl.enabled = true' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
jq '.crl.generateOnRevoke = true' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
jq '.crl.cacheDuration = "24h0m0s"' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
jq '.crl.renewPeriod = "16h0m0s"' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
jq --arg a "https://${FQDN}${LISTENER}/crl" '.crl.idpURL = $a' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
jq --arg a "$LISTENER_INSECURE" '.insecureAddress = $a' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
# Generate Root CA Certificate and Key
# - Validity: 219168h (~25 Years)
# - maxPathLen: 1 (Root -> Intermediate -> Leaf) => Only one Intermediate CA allowed below Root CA
# - Active revocation on Intermediate CA and Leaf Certificates by the usage of build-in Certificate Revocation List (CRL)
FLAGS=(--force
--template="${CARootTemplate}"
--not-after="219168h"
--password-file="${PwdFile}"
--set country="${PKICountry}"
--set organization="${PKIName}"
--set organizationalUnit="${PKIOrganizationalUnit}"
--set issuingCertificateURL="https://${FQDN}${LISTENER}/roots.pem"
--set crlDistributionPoints="https://${FQDN}${LISTENER}/crl")
$STD step certificate create "${PKIName} Root CA" \
"$(step path)/certs/root_ca.crt" \
"$(step path)/secrets/root_ca_key" \
"${FLAGS[@]}"
# Generate Intermediate CA Certificate Bundle and Key
# - Validity: 175368h (~20 Years)
# - maxPathLen: 0 (Root -> Intermediate -> Leaf) => Intermediate CA is only allowed to issue Leaf Certificates
# - Active revocation on Leaf Certificates by the usage of build-in Certificate Revocation List (CRL)
# - Bundle: Certificate Chain (including Root CA Certificate)
FLAGS=(--force
--template="${CAIntermediateTemplate}"
--ca="$(step path)/certs/root_ca.crt"
--ca-key="$(step path)/secrets/root_ca_key"
--not-after="175368h"
--ca-password-file="${PwdFile}"
--password-file="${PwdFile}"
--bundle
--set country="${PKICountry}"
--set organization="${PKIName}"
--set organizationalUnit="${PKIOrganizationalUnit}"
--set issuingCertificateURL="https://${FQDN}${LISTENER}/roots.pem"
--set crlDistributionPoints="https://${FQDN}${LISTENER}/crl")
$STD step certificate create "${PKIName} Intermediate CA" \
"$(step path)/certs/intermediate_ca.crt" \
"$(step path)/secrets/intermediate_ca_key" \
"${FLAGS[@]}"
# Install Root CA Certificate to System Trust Store
$STD step certificate install --all "$(step path)/certs/root_ca.crt"
$STD update-ca-certificates
chown -R step:step "$(step path)"
chmod -R 700 "$(step path)"
msg_ok "Initialized step-ca"
msg_info "Start step-ca as a Daemon"
# https://smallstep.com/docs/step-ca/certificate-authority-server-production/#running-step-ca-as-a-daemon
cat <<'EOF' >/etc/systemd/system/step-ca.service
[Unit]
Description=step-ca service
@@ -130,271 +336,6 @@ msg_ok "Started step-ca as a Daemon"
fetch_and_deploy_gh_release "step-badger" "lukasz-lobocki/step-badger" "prebuild" "latest" "/opt/step-badger" "step-badger_Linux_x86_64.tar.gz"
ln -s /opt/step-badger/step-badger /usr/local/bin/step-badger
msg_info "Install step-ca Admin script"
mkdir -p "$STEPHOME"
cat <<'ADDON_EOF' >"$STEPHOME/step-ca-admin.sh"
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: Joerg Heinemann (heinemannj)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
function header_info() {
clear
cat <<"EOF"
__ ___ __ _
_____/ /____ ____ _________ _ / | ____/ /___ ___ (_)___
/ ___/ __/ _ \/ __ \______/ ___/ __ `/ / /| |/ __ / __ `__ \/ / __ \
(__ ) /_/ __/ /_/ /_____/ /__/ /_/ / / ___ / /_/ / / / / / / / / / /
/____/\__/\___/ .___/ \___/\__,_/ /_/ |_\__,_/_/ /_/ /_/_/_/ /_/
/_/
EOF
}
function die() {
echo -e "\n${BL}[ERROR]${GN} ${RD}${1}${CL}\n"
exit
}
function success() {
echo -e "${BL}[SUCCESS]${GN} ${1}${CL}\n"
exit
}
function whiptail_menu() {
MENU_ARRAY=()
MSG_MAX_LENGTH=0
while read -r TAG ITEM; do
OFFSET=2
((${#ITEM} + OFFSET > MSG_MAX_LENGTH)) && MSG_MAX_LENGTH=${#ITEM}+OFFSET
MENU_ARRAY+=("$TAG" "$ITEM " "OFF")
done < <(echo "$1")
}
function x509_list() {
CERT_LIST=""
cp --recursive --force "$(step path)/db/"* "$STEPHOME/db-copy/"
cp --recursive --force "$(step path)/certs/"* "$STEPHOME/certs/ca/"
if [[ $(step-badger x509Certs "${STEPHOME}/db-copy" 2>/dev/null) ]]; then
CERT_LIST=$(step-badger x509Certs ${STEPHOME}/db-copy 2>/dev/null)
fi
}
function ssh_list() {
CERT_LIST=""
cp --recursive --force "$(step path)/db/"* "$STEPHOME/db-copy/"
cp --recursive --force "$(step path)/certs/"* "$STEPHOME/certs/ca/"
if [[ $(step-badger sshCerts "${STEPHOME}/db-copy" 2>/dev/null) ]]; then
CERT_LIST=$(step-badgersshCerts ${STEPHOME}/db-copy 2>/dev/null)
fi
}
function x509_serial_to_cn() {
x509_list
CN="$(echo "${CERT_LIST}" | grep "${SERIAL_NUMBER}" | awk '{print $2}' | sed 's/CN=//g')"
CRT="$STEPHOME/certs/x509/$CN.crt"
KEY="$STEPHOME/certs/x509/$CN.key"
if ! [[ -f ${CRT} ]]; then
die "Certificate ${CRT} not found!"
elif ! [[ -f ${KEY} ]]; then
die "Private Key ${KEY} not found!"
fi
}
function x509_revoke() {
# shellcheck disable=SC2206
SERIAL_NUMBER_ARRAY=(${CERT_SERIAL_NUMBERS})
for SERIAL_NUMBER in "${SERIAL_NUMBER_ARRAY[@]}"; do
echo -e "${BL}[Info]${GN} Revoke x509 Certificate with Serial Number ${BL}${SERIAL_NUMBER}${GN}:${CL}"
echo
TOKEN=$(step ca token --provisioner="$PROVISIONER" --provisioner-password-file="$PROVISIONER_PASSWORD" --revoke "${SERIAL_NUMBER}")
step ca revoke --token "$TOKEN" "${SERIAL_NUMBER}" || die "Failed to revoke certificate!"
echo
done
success "Finished."
}
function x509_renew() {
# shellcheck disable=SC2206
SERIAL_NUMBER_ARRAY=(${CERT_SERIAL_NUMBERS})
for SERIAL_NUMBER in "${SERIAL_NUMBER_ARRAY[@]}"; do
echo -e "${BL}[Info]${GN} Renew x509 Certificate with Serial Number ${BL}${SERIAL_NUMBER}${GN}:${CL}"
echo
x509_serial_to_cn
step ca renew "${CRT}" "${KEY}" --force || die "Failed to renew certificate!"
echo
done
success "Finished."
}
function x509_inspect() {
# shellcheck disable=SC2206
SERIAL_NUMBER_ARRAY=(${CERT_SERIAL_NUMBERS})
for SERIAL_NUMBER in "${SERIAL_NUMBER_ARRAY[@]}"; do
echo -e "${BL}[Info]${GN} Inspect x509 Certificate with Serial Number ${BL}${SERIAL_NUMBER}${GN}:${CL}\n"
x509_serial_to_cn
step certificate inspect "${CRT}" || die "Failed to inspect certificate!"
if ! [[ $(step certificate inspect "${CRT}" | grep "${SERIAL_NUMBER}") ]]; then
die "Serial Number ${SERIAL_NUMBER} mismatch!"
fi
echo -e "\n${BL}[Info]${GN} Public Key:${CL}\n"
cat "${CRT}"
echo -e "\n${BL}[Info]${GN} Private Key:${CL}\n"
cat "${KEY}"
echo
done
success "Finished."
}
function x509_request() {
FQDN=""
SAN=""
while true; do
FQDN=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Certificate Signing Request (CSR)" --inputbox '\nFQDN (e.g. MyLXC.example.com)' 10 50 "$FQDN" 3>&1 1>&2 2>&3)
IP=$(dig +short "$FQDN")
if [[ -z "$IP" ]]; then
die "Resolution failed for $FQDN!"
fi
HOST=$(echo "$FQDN" | awk -F'.' '{print $1}')
IP=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Certificate Signing Request (CSR)" --inputbox '\nIP Address (e.g. x.x.x.x)' 10 50 "$IP" 3>&1 1>&2 2>&3)
HOST=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Certificate Signing Request (CSR)" --inputbox '\nHostname (e.g. MyHostName)' 10 50 "$HOST" 3>&1 1>&2 2>&3)
SAN=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Certificate Signing Request (CSR)" --inputbox '\nSubject Alternative Name(s) (SAN) (e.g. myapp-1.example.com, myapp-2.example.com)' 10 50 "$SAN" 3>&1 1>&2 2>&3)
VALID_TO=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Certificate Signing Request (CSR)" --inputbox '\nValidity (e.g. 2034-01-31T00:00:00Z)' 10 50 "2034-01-31T00:00:00Z" 3>&1 1>&2 2>&3)
# shellcheck disable=SC2034
if whiptail_yesno=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Certificate Signing Request (CSR)" --yesno "Continue with below?\n
FQDN: $FQDN
Hostname: $HOST
IP Address: $IP
Subject Alternative Name(s) (SAN): $SAN
Validity: $VALID_TO" --no-button "Change" --yes-button "Continue" 15 70 3>&1 1>&2 2>&3); then
break
fi
done
echo -e "${BL}[Info]${GN} Request x509 Certificate with subject ${BL}${FQDN}${GN}:${CL}"
echo
CRT="$STEPHOME/certs/x509/$FQDN.crt"
KEY="$STEPHOME/certs/x509/$FQDN.key"
SAN="$FQDN, $HOST, $IP, $SAN"
IFS=', ' read -r -a array <<< "$SAN"
for element in "${array[@]}"
do
SAN_ARRAY+=(--san "$element")
done
step ca certificate "$FQDN" "$CRT" "$KEY" \
--provisioner="$PROVISIONER" \
--provisioner-password-file="$PROVISIONER_PASSWORD" \
--not-after="$VALID_TO" \
"${SAN_ARRAY[@]}" \
|| die "Failed to request certificate!"
echo -e "\n${BL}[Info]${GN} Inspect Certificate:${CL}\n"
step certificate inspect "${CRT}" || die "Failed to inspect certificate!"
echo -e "\n${BL}[Info]${GN} Public Key:${CL}\n"
cat "${CRT}"
echo -e "\n${BL}[Info]${GN} Private Key:${CL}\n"
cat "${KEY}"
echo
success "Finished."
}
set -eEuo pipefail
# shellcheck disable=SC2034
# shellcheck disable=SC2116
# shellcheck disable=SC2028
YW=$(echo "\033[33m")
# shellcheck disable=SC2116
# shellcheck disable=SC2028
BL=$(echo "\033[36m")
# shellcheck disable=SC2116
# shellcheck disable=SC2028
RD=$(echo "\033[01;31m")
# shellcheck disable=SC2034
CM='\xE2\x9C\x94\033'
# shellcheck disable=SC2116
# shellcheck disable=SC2028
GN=$(echo "\033[1;92m")
# shellcheck disable=SC2116
# shellcheck disable=SC2028
CL=$(echo "\033[m")
# Telemetry
# shellcheck disable=SC1090
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/api.func) 2>/dev/null || true
declare -f init_tool_telemetry &>/dev/null && init_tool_telemetry "step-ca-admin" "step-ca"
header_info
mkdir --parents "$STEPHOME/db-copy/"
mkdir --parents "$STEPHOME/certs/ca/_archive/"
mkdir --parents "$STEPHOME/certs/ssh/_archive/"
mkdir --parents "$STEPHOME/certs/x509/_archive/"
PROVISIONER=$(jq '.authority.provisioners.[] | select(.type=="JWK") | .name' "$(step path)"/config/ca.json)
PROVISIONER="${PROVISIONER#\"}"
PROVISIONER="${PROVISIONER%\"}"
PROVISIONER_PASSWORD=$(step path)/encryption/provisioner.pwd
whiptail --backtitle "Proxmox VE Helper Scripts" --title "step-ca Admin" --yesno "This will maintain step-ca issued x509 and ssh Certificates. Proceed?" 10 58
MENU_ARRAY=("x509" "Maintain x509 Certificates." "ON")
MENU_ARRAY+=("ssh" "Maintain ssh Certificates." "OFF")
CERT_TYPE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "step-ca Admin" --radiolist "\nSelect Certificate Type:" 16 48 6 "${MENU_ARRAY[@]}" 3>&1 1>&2 2>&3 | tr -d '"')
[[ -z ${CERT_TYPE} ]] && die "No Certificate Type selected!"
case ${CERT_TYPE} in
("x509")
x509_list
CERT_LIST=$(echo "$CERT_LIST" | awk 'NR>1 {print $1 " " $2 "|" $3 "|" $4 "|" $5}')
if [[ $CERT_LIST ]]; then
whiptail_menu "$CERT_LIST"
else
MENU_ARRAY=()
MSG_MAX_LENGTH=2
fi
MENU_ARRAY+=("" "Create a new Certificate" "OFF")
CERT_SERIAL_NUMBERS=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Certificates on $(hostname)" --checklist "\nSelect Certificate(s) to maintain:\n" 16 $((MSG_MAX_LENGTH + 55)) 6 "${MENU_ARRAY[@]}" 3>&1 1>&2 2>&3 | tr -d '"')
[[ -z ${CERT_SERIAL_NUMBERS} ]] && x509_request
MENU_ARRAY=("Renew" "Renew x509 Certificates." "ON")
MENU_ARRAY+=("Revoke" "Revoke x509 Certificates." "OFF")
MENU_ARRAY+=("Inspect" "Inspect x509 Certificates." "OFF")
CERT_MAINTENANCE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "step-ca Admin" --radiolist "\nSelect Maintenance Type:" 16 48 6 "${MENU_ARRAY[@]}" 3>&1 1>&2 2>&3 | tr -d '"')
case ${CERT_MAINTENANCE} in
("Renew")
x509_renew "${CERT_SERIAL_NUMBERS[@]}"
;;
("Revoke")
x509_revoke "${CERT_SERIAL_NUMBERS[@]}"
;;
("Inspect")
x509_inspect "${CERT_SERIAL_NUMBERS[@]}"
;;
*)
die "Unsupported CERT_MAINTENANCE Option!"
;;
esac
;;
("ssh")
die "Maintain ssh Certificates - To be implemented in future"
;;
*)
die "Unsupported CERT_TYPE Option!"
;;
esac
ADDON_EOF
chmod 700 "$STEPHOME/step-ca-admin.sh"
msg_ok "Installed step-ca Admin script"
motd_ssh
customize
cleanup_lxc

View File

@@ -0,0 +1,55 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/storybookjs/storybook
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
NODE_VERSION="24" NODE_MODULE="pnpm" setup_nodejs
msg_info "Preparing Storybook"
mkdir -p /opt/storybook
cd /opt/storybook
msg_ok "Important: Interactive configuration will start now."
npx -y storybook@latest init --yes --no-dev
PROJECT_PATH=$(find /opt/storybook -maxdepth 2 -name ".storybook" -type d 2>/dev/null | head -n1 | xargs dirname)
if [[ -z "$PROJECT_PATH" ]]; then
PROJECT_PATH="/opt/storybook"
fi
cd "$PROJECT_PATH"
echo "$PROJECT_PATH" >/opt/storybook/.projectpath
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/storybook.service
[Unit]
Description=Storybook Dev Server
After=network.target
[Service]
Type=simple
User=root
WorkingDirectory=${PROJECT_PATH}
ExecStart=/usr/bin/npx storybook dev --host 0.0.0.0 --port 6006 --no-open
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now storybook
msg_ok "Created Service"
motd_ssh
customize
cleanup_lxc

94
install/teable-install.sh Normal file
View File

@@ -0,0 +1,94 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/teableio/teable
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
msg_info "Installing Dependencies"
$STD apt install -y \
build-essential \
python3 \
git
msg_ok "Installed Dependencies"
NODE_VERSION="24" NODE_MODULE="pnpm" setup_nodejs
PG_VERSION="16" setup_postgresql
PG_DB_NAME="teable" PG_DB_USER="teable" setup_postgresql_db
fetch_and_deploy_gh_release "teable" "teableio/teable" "tarball"
msg_info "Setting up Teable"
cd /opt/teable
TEABLE_VERSION=$(cat ~/.teable)
echo "NEXT_PUBLIC_BUILD_VERSION=\"${TEABLE_VERSION}\"" >>apps/nextjs-app/.env
export HUSKY=0
export NODE_OPTIONS="--max-old-space-size=8192"
$STD pnpm install --frozen-lockfile
$STD pnpm -F @teable/db-main-prisma prisma-generate --schema ./prisma/postgres/schema.prisma
msg_ok "Set up Teable"
msg_info "Building Teable"
NODE_ENV=production NEXT_BUILD_ENV_TYPECHECK=false \
$STD pnpm -r --filter '!playground' run build
msg_ok "Built Teable"
msg_info "Running Database Migrations"
PRISMA_DATABASE_URL="postgresql://teable:${PG_DB_PASS}@localhost:5432/teable?schema=public" \
$STD pnpm -F @teable/db-main-prisma prisma-migrate deploy --schema ./prisma/postgres/schema.prisma
msg_ok "Ran Database Migrations"
msg_info "Configuring Teable"
mkdir -p /opt/teable/.assets /opt/teable/.temporary
SECRET_KEY=$(openssl rand -base64 32)
cat <<EOF >/opt/teable/.env
PRISMA_DATABASE_URL=postgresql://teable:${PG_DB_PASS}@localhost:5432/teable?schema=public&statement_cache_size=1
PUBLIC_ORIGIN=http://${LOCAL_IP}:3000
SECRET_KEY=${SECRET_KEY}
PORT=3000
NODE_ENV=production
NEXT_TELEMETRY_DISABLED=1
BACKEND_CACHE_PROVIDER=sqlite
BACKEND_CACHE_SQLITE_URI=sqlite:///opt/teable/.assets/.cache.db
NEXTJS_DIR=apps/nextjs-app
EOF
ln -sf /opt/teable /app
rm -rf /opt/teable/static
if [ -d "/opt/teable/apps/nestjs-backend/static/static" ]; then
ln -sf /opt/teable/apps/nestjs-backend/static/static /opt/teable/static
else
ln -sf /opt/teable/apps/nestjs-backend/static /opt/teable/static
fi
msg_ok "Configured Teable"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/teable.service
[Unit]
Description=Teable
After=network.target postgresql.service
[Service]
Type=simple
WorkingDirectory=/opt/teable
EnvironmentFile=/opt/teable/.env
ExecStart=/usr/bin/node apps/nestjs-backend/dist/index.js
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now teable
msg_ok "Created Service"
motd_ssh
customize
cleanup_lxc

View File

@@ -28,6 +28,7 @@ fetch_and_deploy_from_url "https://download.technitium.com/dns/DnsServerPortable
echo "${RELEASE}" >~/.technitium
msg_info "Creating service"
mkdir -p /etc/dns /var/log/technitium/dns
sed -i '/^User=/d;/^Group=/d' /opt/technitium/dns/systemd.service
cp /opt/technitium/dns/systemd.service /etc/systemd/system/technitium.service
systemctl enable -q --now technitium

View File

@@ -20,9 +20,7 @@ $STD apt install -y \
vlc
msg_ok "Installed Dependencies"
fetch_and_deploy_gh_release "threadfin" "threadfin/threadfin" "singlefile" "latest" "/opt/threadfin" "Threadfin_linux_amd64"
mv /root/.threadfin /root/.threadfin_version
mkdir -p /root/.threadfin
fetch_and_deploy_gh_release "threadfin-app" "threadfin/threadfin" "singlefile" "latest" "/opt/threadfin" "Threadfin_linux_amd64"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/threadfin.service

79
install/trek-install.sh Normal file
View File

@@ -0,0 +1,79 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/mauriceboe/TREK
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
msg_info "Installing Dependencies"
$STD apt install -y build-essential
msg_ok "Installed Dependencies"
NODE_VERSION="22" setup_nodejs
fetch_and_deploy_gh_release "trek" "mauriceboe/TREK" "tarball"
msg_info "Building Client"
cd /opt/trek/client
$STD npm ci
$STD npm run build
msg_ok "Built Client"
msg_info "Setting up Server"
cd /opt/trek/server
$STD npm ci
mkdir -p /opt/trek/server/public
cp -r /opt/trek/client/dist/* /opt/trek/server/public/
cp -r /opt/trek/client/public/fonts /opt/trek/server/public/fonts 2>/dev/null || true
mkdir -p /opt/trek/{data/logs,uploads/{files,covers,avatars,photos}}
rm -rf /opt/trek/server/data /opt/trek/server/uploads
ln -s /opt/trek/data /opt/trek/server/data
ln -s /opt/trek/uploads /opt/trek/server/uploads
ENCRYPTION_KEY=$(openssl rand -hex 32)
ADMIN_EMAIL="admin@trek.local"
ADMIN_PASSWORD=$(openssl rand -base64 18 | tr -dc 'A-Za-z0-9' | head -c 16)
cat <<EOF >/opt/trek/server/.env
NODE_ENV=production
PORT=3000
ENCRYPTION_KEY=${ENCRYPTION_KEY}
ADMIN_EMAIL=${ADMIN_EMAIL}
ADMIN_PASSWORD=${ADMIN_PASSWORD}
COOKIE_SECURE=false
FORCE_HTTPS=false
LOG_LEVEL=info
TZ=UTC
EOF
chmod 600 /opt/trek/server/.env
msg_ok "Set up Server"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/trek.service
[Unit]
Description=TREK Travel Planner
After=network.target
[Service]
Type=simple
User=root
WorkingDirectory=/opt/trek/server
EnvironmentFile=/opt/trek/server/.env
ExecStart=/usr/bin/node --import tsx src/index.ts
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now trek
msg_ok "Created Service"
motd_ssh
customize
cleanup_lxc

View File

@@ -0,0 +1,294 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/tubearchivist/tubearchivist
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
msg_info "Installing Dependencies"
$STD apt install -y \
build-essential \
git \
nginx \
redis-server \
atomicparsley \
python3-dev \
libldap2-dev \
libsasl2-dev \
libssl-dev \
sqlite3 \
ffmpeg
msg_ok "Installed Dependencies"
UV_PYTHON="3.13" setup_uv
NODE_VERSION="24" setup_nodejs
fetch_and_deploy_gh_release "deno" "denoland/deno" "prebuild" "latest" "/usr/local/bin" "deno-x86_64-unknown-linux-gnu.zip"
msg_info "Installing ElasticSearch"
setup_deb822_repo \
"elastic-8.x" \
"https://artifacts.elastic.co/GPG-KEY-elasticsearch" \
"https://artifacts.elastic.co/packages/8.x/apt" \
"stable" \
"main"
ES_JAVA_OPTS="-Xms1g -Xmx1g" $STD apt install -y elasticsearch
msg_ok "Installed ElasticSearch"
msg_info "Configuring ElasticSearch"
cat <<EOF >/etc/elasticsearch/elasticsearch.yml
cluster.name: tubearchivist
path.data: /var/lib/elasticsearch
path.logs: /var/log/elasticsearch
path.repo: ["/var/lib/elasticsearch/snapshot"]
network.host: 127.0.0.1
xpack.security.enabled: false
xpack.security.transport.ssl.enabled: false
xpack.security.http.ssl.enabled: false
EOF
mkdir -p /var/lib/elasticsearch/snapshot
chown -R elasticsearch:elasticsearch /var/lib/elasticsearch/snapshot
cat <<EOF >/etc/elasticsearch/jvm.options.d/heap.options
-Xms1g
-Xmx1g
EOF
sysctl -w vm.max_map_count=262144 2>/dev/null || true
cat <<EOF >/etc/sysctl.d/99-elasticsearch.conf
vm.max_map_count=262144
EOF
systemctl enable -q --now elasticsearch
msg_ok "Configured ElasticSearch"
fetch_and_deploy_gh_release "tubearchivist" "tubearchivist/tubearchivist" "tarball"
msg_info "Building Frontend"
cd /opt/tubearchivist/frontend
$STD npm install
$STD npm run build:deploy
mkdir -p /opt/tubearchivist/backend/static
cp -r /opt/tubearchivist/frontend/dist/* /opt/tubearchivist/backend/static/
msg_ok "Built Frontend"
msg_info "Setting up Tube Archivist"
cp /opt/tubearchivist/docker_assets/backend_start.py /opt/tubearchivist/backend/
$STD uv venv /opt/tubearchivist/.venv
$STD uv pip install --python /opt/tubearchivist/.venv/bin/python -r /opt/tubearchivist/backend/requirements.txt
if [[ -f /opt/tubearchivist/backend/requirements.plugins.txt ]]; then
mkdir -p /opt/yt_plugins/bgutil
$STD uv pip install --python /opt/tubearchivist/.venv/bin/python --target /opt/yt_plugins/bgutil -r /opt/tubearchivist/backend/requirements.plugins.txt
fi
TA_PASSWORD=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | head -c13)
ES_PASSWORD=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | head -c13)
mkdir -p /opt/tubearchivist/{cache,media}
ln -sf /opt/tubearchivist/cache /cache
ln -sf /opt/tubearchivist/media /youtube
cat <<EOF >/opt/tubearchivist/.env
TA_HOST=http://${LOCAL_IP}:8000
TA_USERNAME=admin
TA_PASSWORD=${TA_PASSWORD}
TA_BACKEND_PORT=8080
TA_APP_DIR=/opt/tubearchivist/backend
TA_CACHE_DIR=/cache
TA_MEDIA_DIR=/youtube
ES_SNAPSHOT_DIR=/var/lib/elasticsearch/snapshot
ELASTIC_PASSWORD=${ES_PASSWORD}
REDIS_CON=redis://localhost:6379
ES_URL=http://localhost:9200
TZ=UTC
PYTHONUNBUFFERED=1
YTDLP_PLUGIN_DIRS=/opt/yt_plugins
EOF
{
echo "Tube Archivist Credentials"
echo "=========================="
echo "Username: admin"
echo "Password: ${TA_PASSWORD}"
echo "Elasticsearch Password: ${ES_PASSWORD}"
} >~/tubearchivist.creds
systemctl enable -q --now redis-server
msg_ok "Set up Tube Archivist"
msg_info "Configuring Nginx"
sed -i 's/^user www-data;$/user root;/' /etc/nginx/nginx.conf
cat <<'EOF' >/etc/nginx/sites-available/default
server {
listen 8000;
location = /_auth {
internal;
proxy_pass http://localhost:8080/api/ping/;
proxy_pass_request_body off;
proxy_set_header Content-Length "";
proxy_set_header Host $http_host;
proxy_set_header Cookie $http_cookie;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
location /cache/videos/ {
auth_request /_auth;
alias /cache/videos/;
}
location /cache/channels/ {
auth_request /_auth;
alias /cache/channels/;
}
location /cache/playlists/ {
auth_request /_auth;
alias /cache/playlists/;
}
location /media/ {
auth_request /_auth;
alias /youtube/;
types {
text/vtt vtt;
}
}
location /youtube/ {
auth_request /_auth;
alias /youtube/;
types {
video/mp4 mp4;
}
}
location /api {
include proxy_params;
proxy_pass http://localhost:8080;
}
location /admin {
include proxy_params;
proxy_pass http://localhost:8080;
}
location /static/ {
alias /opt/tubearchivist/backend/staticfiles/;
}
root /opt/tubearchivist/backend/static;
index index.html;
location ~* ^/(?!static/|cache/).*\.(?:css|js|png|jpg|jpeg|gif|ico|svg|woff2?)$ {
try_files $uri $uri/ /index.html =404;
}
location = /index.html {
add_header Cache-Control "no-store, no-cache, must-revalidate";
add_header Pragma "no-cache";
expires 0;
}
location / {
add_header Cache-Control "no-store, no-cache, must-revalidate";
add_header Pragma "no-cache";
expires 0;
try_files $uri $uri/ /index.html =404;
}
}
EOF
systemctl enable -q nginx
systemctl restart nginx
msg_ok "Configured Nginx"
msg_info "Creating Services"
cat <<'RUNEOF' >/opt/tubearchivist/backend/run.sh
#!/bin/bash
set -e
cd /opt/tubearchivist/backend
set -a
source /opt/tubearchivist/.env
set +a
PYTHON=/opt/tubearchivist/.venv/bin/python
echo "Waiting for ElasticSearch..."
for i in $(seq 1 30); do
if curl -sf http://localhost:9200/_cluster/health >/dev/null 2>&1; then
break
fi
sleep 2
done
$PYTHON manage.py migrate
$PYTHON manage.py collectstatic --noinput -c
$PYTHON manage.py ta_envcheck
$PYTHON manage.py ta_connection
$PYTHON manage.py ta_startup
exec $PYTHON backend_start.py
RUNEOF
chmod +x /opt/tubearchivist/backend/run.sh
ln -sf /opt/tubearchivist/.env /opt/tubearchivist/backend/.env
cat <<EOF >/etc/systemd/system/tubearchivist.service
[Unit]
Description=Tube Archivist Backend
After=network.target elasticsearch.service redis-server.service
[Service]
Type=simple
User=root
WorkingDirectory=/opt/tubearchivist/backend
EnvironmentFile=/opt/tubearchivist/.env
Environment=PATH=/opt/tubearchivist/.venv/bin:/usr/local/bin:/usr/bin:/bin
ExecStart=/opt/tubearchivist/backend/run.sh
Restart=on-failure
RestartSec=10
[Install]
WantedBy=multi-user.target
EOF
cat <<EOF >/etc/systemd/system/tubearchivist-celery.service
[Unit]
Description=Tube Archivist Celery Worker
After=tubearchivist.service redis-server.service elasticsearch.service
[Service]
Type=simple
User=root
WorkingDirectory=/opt/tubearchivist/backend
EnvironmentFile=/opt/tubearchivist/.env
Environment=PATH=/opt/tubearchivist/.venv/bin:/usr/local/bin:/usr/bin:/bin
ExecStart=/opt/tubearchivist/.venv/bin/celery -A task worker --loglevel=error --concurrency=4 --max-tasks-per-child=5 --max-memory-per-child=150000
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
cat <<EOF >/etc/systemd/system/tubearchivist-beat.service
[Unit]
Description=Tube Archivist Celery Beat
After=tubearchivist.service redis-server.service
[Service]
Type=simple
User=root
WorkingDirectory=/opt/tubearchivist/backend
EnvironmentFile=/opt/tubearchivist/.env
Environment=PATH=/opt/tubearchivist/.venv/bin:/usr/local/bin:/usr/bin:/bin
ExecStartPre=/bin/bash -c 'for i in \$(seq 1 60); do sqlite3 /cache/db.sqlite3 "SELECT 1 FROM django_celery_beat_crontabschedule LIMIT 1" 2>/dev/null && exit 0; sleep 2; done; exit 1'
ExecStart=/opt/tubearchivist/.venv/bin/celery -A task beat --loglevel=error --scheduler django_celery_beat.schedulers:DatabaseScheduler
Restart=always
RestartSec=5
RuntimeMaxSec=3600
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now tubearchivist tubearchivist-celery tubearchivist-beat
msg_ok "Created Services"
motd_ssh
customize
cleanup_lxc

View File

@@ -3504,6 +3504,52 @@ msg_menu() {
return 0
}
# ------------------------------------------------------------------------------
# run_addon_updates()
#
# - Scans /usr/local/bin/update_* for addon update scripts installed alongside
# the main application (e.g. by tools/addon/*.sh)
# - For each found addon, prompts the user (60s timeout, default no) whether
# it should be updated as well
# - Skipped entirely when PHS_SILENT=1 to keep unattended updates predictable
# ------------------------------------------------------------------------------
run_addon_updates() {
shopt -s nullglob
local addons=(/usr/local/bin/update_*)
shopt -u nullglob
((${#addons[@]} == 0)) && return 0
if [[ "${PHS_SILENT:-0}" == "1" ]]; then
msg_info "Detected ${#addons[@]} addon update script(s) - skipping (PHS_SILENT)"
return 0
fi
echo
echo -e "${INFO}${YW} Detected installed addon update script(s):${CL}"
local a name
for a in "${addons[@]}"; do
echo -e "${TAB}- ${a##*/update_}"
done
echo
local ans
for a in "${addons[@]}"; do
name="${a##*/update_}"
printf 'Do you also want to update addon "%s"? (y/N) [60s]: ' "$name"
ans=""
if read -r -t 60 ans; then :; else echo; fi
case "${ans,,}" in
y | yes)
bash "$a" || msg_warn "Addon update for $name failed (rc=$?)"
;;
*)
msg_info "Skipped addon: $name"
;;
esac
done
}
# ------------------------------------------------------------------------------
# start()
#
@@ -3523,6 +3569,7 @@ start() {
ensure_profile_loaded
get_lxc_ip
update_script
run_addon_updates
update_motd_ip
cleanup_lxc
else
@@ -3551,6 +3598,7 @@ start() {
ensure_profile_loaded
get_lxc_ip
update_script
run_addon_updates
update_motd_ip
cleanup_lxc
fi
@@ -5456,14 +5504,14 @@ create_lxc_container() {
local _has_fallback_option=false
if [[ "$do_retry" == "yes" ]] && has_previous_os_version_template; then
_has_fallback_option=true
echo " [1] Run host upgrade now (recommended). WARNING: this runs apt upgrade and updates all Packeages on your host!"
echo " [1] Run host upgrade now (recommended). WARNING: this runs apt upgrade and updates all Packages on your host!"
echo " [2] Use an older ${PCT_OSTYPE} template instead (may not work with all scripts)"
echo " [3] Ignore"
echo " [4] Cancel"
echo
read -rp "Select option [1/2/3/4]: " _ans </dev/tty
else
echo " [1] Run host upgrade now (recommended). WARNING: this runs apt upgrade and updates all Packeages on your host!"
echo " [1] Run host upgrade now (recommended). WARNING: this runs apt upgrade and updates all Packages on your host!"
echo " [2] Ignore"
echo " [3] Cancel"
echo
@@ -5678,6 +5726,10 @@ create_lxc_container() {
if ! pvesm status -content rootdir 2>/dev/null | awk 'NR>1{print $1}' | grep -qx "$CONTAINER_STORAGE"; then
msg_error "Storage '$CONTAINER_STORAGE' ($STORAGE_TYPE) does not support 'rootdir' content."
msg_custom "💡" "${YW}" "Enable 'Disk image' (rootdir) for storage '${CONTAINER_STORAGE}' in:"
msg_custom " " "${YW}" "Datacenter → Storage → ${CONTAINER_STORAGE} → Edit → Content"
msg_custom "📖" "${YW}" "See: https://pve.proxmox.com/wiki/Storage"
msg_custom "🔗" "${YW}" "Help: https://github.com/community-scripts/ProxmoxVE/discussions"
exit 213
fi
msg_ok "Storage '$CONTAINER_STORAGE' ($STORAGE_TYPE) validated"

View File

@@ -1924,8 +1924,8 @@ setup_deb822_repo() {
echo "Types: deb"
echo "URIs: $repo_url"
echo "Suites: $suite"
# Flat repositories (suite="./" or absolute path) must not have Components
if [[ "$suite" != "./" && -n "$component" ]]; then
# Flat repositories (suite ending with "/" or "./") must not have Components
if [[ "$suite" != *"/" && -n "$component" ]]; then
echo "Components: $component"
fi
[[ -n "$architectures" ]] && echo "Architectures: $architectures"
@@ -5964,14 +5964,14 @@ function setup_mariadb_db() {
}
# ------------------------------------------------------------------------------
# Installs or updates MongoDB to specified major version.
# Installs or updates MongoDB to specified version.
#
# Description:
# - Preserves data across installations
# - Adds official MongoDB repo
#
# Variables:
# MONGO_VERSION - MongoDB major version to install (e.g. 7.0, 8.0)
# MONGO_VERSION - MongoDB version to install (e.g. 7.0, 8.2)
# ------------------------------------------------------------------------------
function setup_mongodb() {
@@ -6044,8 +6044,11 @@ function setup_mongodb() {
}
# Setup repository
# MongoDB 8.x versions beyond 8.0 reuse the server-8.0.asc PGP key
local MONGO_KEY_VERSION="${MONGO_VERSION}"
[[ "${MONGO_VERSION}" == 8.[1-9]* ]] && MONGO_KEY_VERSION="8.0"
manage_tool_repository "mongodb" "$MONGO_VERSION" "$MONGO_BASE_URL" \
"https://www.mongodb.org/static/pgp/server-${MONGO_VERSION}.asc" || {
"https://www.mongodb.org/static/pgp/server-${MONGO_KEY_VERSION}.asc" || {
msg_error "Failed to setup MongoDB repository"
return 100
}

View File

@@ -151,6 +151,23 @@ function check_proxmox_host() {
# ==============================================================================
# CHECK / INSTALL DOCKER
# ==============================================================================
function ensure_openssl() {
if command -v openssl &>/dev/null; then
return
fi
msg_info "Installing openssl"
if [[ -f /etc/alpine-release ]]; then
$STD apk add openssl
elif command -v apt-get &>/dev/null; then
$STD apt-get update
$STD apt-get install -y openssl
else
msg_error "openssl is required but could not be installed automatically."
exit 10
fi
msg_ok "Installed openssl"
}
function check_or_install_docker() {
if command -v docker &>/dev/null; then
msg_ok "Docker $(docker --version | cut -d' ' -f3 | tr -d ',') is available"
@@ -160,6 +177,7 @@ function check_or_install_docker() {
msg_error "Docker Compose plugin is not available. Please install it."
exit 10
fi
ensure_openssl
return
fi
@@ -183,6 +201,8 @@ function check_or_install_docker() {
$STD sh <(curl -fsSL https://get.docker.com)
fi
msg_ok "Installed Docker"
ensure_openssl
}
# ==============================================================================

View File

@@ -405,11 +405,6 @@ for container in $CHOICE; do
esac
exit_code=$?
if [ "$template" == "false" ] && [ "$status" == "status: stopped" ]; then
echo -e "${BL}[Info]${GN} Shutting down${BL} $container ${CL} \n"
pct shutdown $container &
fi
#5) if build resources are different than run resources, then:
if [ "$UPDATE_BUILD_RESOURCES" -eq "1" ]; then
pct set "$container" --cores "$run_cpu" --memory "$run_ram"
@@ -421,6 +416,11 @@ for container in $CHOICE; do
containers_needing_reboot+=("$container ($container_hostname)")
fi
if [ "$template" == "false" ] && [ "$status" == "status: stopped" ]; then
echo -e "${BL}[Info]${GN} Shutting down${BL} $container ${CL} \n"
pct shutdown $container &>/dev/null &
fi
if [ $exit_code -eq 0 ]; then
msg_ok "Updated container $container"
elif [ $exit_code -eq 75 ]; then

View File

@@ -66,10 +66,20 @@ for container in $(pct list | awk '{if(NR>1) print $1}'); do
pct start "$container"
sleep 5
update_container "$container" || echo " [Error] Update failed for $container"
# check if patchmon agent is present in container and run a report if found
if pct exec "$container" -- [ -e "/usr/local/bin/patchmon-agent" ]; then
echo -e "${BL}[Info]${GN} patchmon-agent found in ${BL} $container ${CL}, triggering report. \n"
pct exec "$container" -- "/usr/local/bin/patchmon-agent" "report"
fi
echo -e "[Info] Shutting down $container"
pct shutdown "$container" --timeout 60 &
elif [ "$status" == "status: running" ]; then
update_container "$container" || echo " [Error] Update failed for $container"
# check if patchmon agent is present in container and run a report if found
if pct exec "$container" -- [ -e "/usr/local/bin/patchmon-agent" ]; then
echo -e "${BL}[Info]${GN} patchmon-agent found in ${BL} $container ${CL}, triggering report. \n"
pct exec "$container" -- "/usr/local/bin/patchmon-agent" "report"
fi
fi
fi
done

View File

@@ -110,15 +110,17 @@ for container in $(pct list | awk '{if(NR>1) print $1}'); do
elif [ "$status" == "status: running" ]; then
update_container $container
fi
if pct exec "$container" -- [ -e "/var/run/reboot-required" ]; then
# Get the container's hostname and add it to the list
container_hostname=$(pct exec "$container" hostname)
containers_needing_reboot+=("$container ($container_hostname)")
fi
# check if patchmon agent is present in container and run a report if found
if pct exec "$container" -- [ -e "/usr/local/bin/patchmon-agent" ]; then
echo -e "${BL}[Info]${GN} patchmon-agent found in ${BL} $container ${CL}, triggering report. \n"
pct exec "$container" -- "/usr/local/bin/patchmon-agent" "report"
if [ "$status" == "status: running" ]; then
if pct exec "$container" -- [ -e "/var/run/reboot-required" ]; then
# Get the container's hostname and add it to the list
container_hostname=$(pct exec "$container" hostname)
containers_needing_reboot+=("$container ($container_hostname)")
fi
# check if patchmon agent is present in container and run a report if found
if pct exec "$container" -- [ -e "/usr/local/bin/patchmon-agent" ]; then
echo -e "${BL}[Info]${GN} patchmon-agent found in ${BL} $container ${CL}, triggering report. \n"
pct exec "$container" -- "/usr/local/bin/patchmon-agent" "report"
fi
fi
fi
done

View File

@@ -738,7 +738,24 @@ done
msg_info "Creating a OPNsense VM"
qm create $VMID -agent 1${MACHINE} -tablet 0 -localtime 1 -bios ovmf${CPU_TYPE} -cores $CORE_COUNT -memory $RAM_SIZE \
-name $HN -tags community-script -net0 virtio,bridge=$BRG,macaddr=$MAC$VLAN$MTU -onboot 1 -ostype l26 -scsihw virtio-scsi-pci
pvesm alloc $STORAGE $VMID $DISK0 4M &>/dev/null
# Retry pvesm alloc on transient zfs_request "got timeout" errors (#14127)
alloc_attempt=1
alloc_max=4
alloc_delay=5
while :; do
alloc_err=$(pvesm alloc $STORAGE $VMID $DISK0 4M 2>&1 >/dev/null) && break
if [[ "$alloc_err" == *"got timeout"* && $alloc_attempt -lt $alloc_max ]]; then
msg_warn "pvesm alloc hit zfs timeout (attempt $alloc_attempt/$alloc_max), retrying in ${alloc_delay}s..."
pvesm free "${DISK0_REF}" &>/dev/null || true
sleep "$alloc_delay"
alloc_attempt=$((alloc_attempt + 1))
alloc_delay=$((alloc_delay * 2))
continue
fi
echo -e "$alloc_err" >&2
exit 220
done
qm importdisk $VMID ${FILE} $STORAGE ${DISK_IMPORT:-} &>/dev/null
qm set $VMID \
-efidisk0 ${DISK0_REF}${FORMAT} \