mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2026-05-15 13:04:56 +02:00
Compare commits
98 Commits
2026-05-03
...
2026-05-11
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7b1c510b53 | ||
|
|
0a38dabb59 | ||
|
|
e332d74536 | ||
|
|
c0c34cc4c8 | ||
|
|
a30c5f9ed4 | ||
|
|
41590d777e | ||
|
|
a04983674a | ||
|
|
07abb105d0 | ||
|
|
b15e84e2ba | ||
|
|
2fddbd9c67 | ||
|
|
b6c78b7ff0 | ||
|
|
1abf0f8b6d | ||
|
|
e05f2d94b4 | ||
|
|
8b57c42cb4 | ||
|
|
ebaa526560 | ||
|
|
bd6117eb6a | ||
|
|
26c94d8613 | ||
|
|
9dddf45a89 | ||
|
|
e3b7f01056 | ||
|
|
8b387313cf | ||
|
|
806c68df91 | ||
|
|
3352c8a63e | ||
|
|
5c9facd1f1 | ||
|
|
5be86d4fdf | ||
|
|
719cbca50f | ||
|
|
d7b0c2f4c6 | ||
|
|
86280881b4 | ||
|
|
a2c2c0ff09 | ||
|
|
2b11d05e7d | ||
|
|
28311987ae | ||
|
|
ae3b3eb3de | ||
|
|
6217669d66 | ||
|
|
a13b28e714 | ||
|
|
e0c7fa3295 | ||
|
|
f07c463c88 | ||
|
|
7b7aecef02 | ||
|
|
24d7b943e9 | ||
|
|
36f8e1f320 | ||
|
|
90a8b450ed | ||
|
|
409c0aad1b | ||
|
|
d3cdf27a77 | ||
|
|
c09ac3fb31 | ||
|
|
e39ce3285f | ||
|
|
d996b5a719 | ||
|
|
3f445acf9a | ||
|
|
90bc1ae1e5 | ||
|
|
83575e5972 | ||
|
|
6329ad7fa2 | ||
|
|
a450266925 | ||
|
|
02eaf288bf | ||
|
|
24fbf24c6d | ||
|
|
001bd8bf93 | ||
|
|
03bd701926 | ||
|
|
61f3e32827 | ||
|
|
a3844707ad | ||
|
|
91fe10ec4b | ||
|
|
cd21d98854 | ||
|
|
cb7d9037fb | ||
|
|
cd314ddb3f | ||
|
|
45b9103657 | ||
|
|
8a6655b7d1 | ||
|
|
32a4239f28 | ||
|
|
1fc7368ff9 | ||
|
|
9d9d763e63 | ||
|
|
28ae38e502 | ||
|
|
be2e3a4a3a | ||
|
|
3c02868add | ||
|
|
07cd4c0c3e | ||
|
|
f11edd21c9 | ||
|
|
9fc822d936 | ||
|
|
bf1c32ace8 | ||
|
|
611021ae8c | ||
|
|
12eb19ae4c | ||
|
|
c8dc0cffe0 | ||
|
|
95c7628362 | ||
|
|
50b3c3ae7f | ||
|
|
752fff3c8f | ||
|
|
129e221664 | ||
|
|
21c064464a | ||
|
|
6317e7a867 | ||
|
|
739e0aa41e | ||
|
|
a3e147cf20 | ||
|
|
4e9352572f | ||
|
|
686657e8ec | ||
|
|
9b8302cba0 | ||
|
|
5a6392d95f | ||
|
|
160c198731 | ||
|
|
b91ec6f7bc | ||
|
|
a7ddc3502b | ||
|
|
9bf64f60b9 | ||
|
|
559cfff56a | ||
|
|
b353063720 | ||
|
|
26b41d74ee | ||
|
|
812f8ed1c7 | ||
|
|
75c5aa3d5d | ||
|
|
12e7cb1777 | ||
|
|
7f9e1ce4d8 | ||
|
|
d118f101d8 |
131
.github/changelogs/2026/05.md
generated
vendored
131
.github/changelogs/2026/05.md
generated
vendored
@@ -1,3 +1,134 @@
|
||||
## 2026-05-09
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- FlowiseAI: Migrate to pnpm [@MickLesk](https://github.com/MickLesk) ([#14344](https://github.com/community-scripts/ProxmoxVE/pull/14344))
|
||||
- Purge openresty [@lucacome](https://github.com/lucacome) ([#14353](https://github.com/community-scripts/ProxmoxVE/pull/14353))
|
||||
- Check for release for Sonarr [@lucacome](https://github.com/lucacome) ([#14354](https://github.com/community-scripts/ProxmoxVE/pull/14354))
|
||||
- fix(termix-install.sh): add tmpfiles.d persistence and systemd PIDFile path [@runnylogan](https://github.com/runnylogan) ([#14350](https://github.com/community-scripts/ProxmoxVE/pull/14350))
|
||||
- ERPNext: start bench Redis services before bench new-site [@MickLesk](https://github.com/MickLesk) ([#14343](https://github.com/community-scripts/ProxmoxVE/pull/14343))
|
||||
- [Hotfix]Jotty: use absolute path when creating data dir [@vhsdream](https://github.com/vhsdream) ([#14355](https://github.com/community-scripts/ProxmoxVE/pull/14355))
|
||||
|
||||
## 2026-05-08
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- wishlist: pin pnpm to v10 to match engine requirements [@MickLesk](https://github.com/MickLesk) ([#14342](https://github.com/community-scripts/ProxmoxVE/pull/14342))
|
||||
- [pelican] fix env copy regression [@LetterN](https://github.com/LetterN) ([#14328](https://github.com/community-scripts/ProxmoxVE/pull/14328))
|
||||
- fix(homepage): fix ERR_PNPM_IGNORED_BUILDS error [@Sergih28](https://github.com/Sergih28) ([#14315](https://github.com/community-scripts/ProxmoxVE/pull/14315))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- tools.func: add setup_nltk as new function [@MickLesk](https://github.com/MickLesk) ([#14314](https://github.com/community-scripts/ProxmoxVE/pull/14314))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- tools.func: fix meilisearch import-dump background process handling [@MickLesk](https://github.com/MickLesk) ([#14341](https://github.com/community-scripts/ProxmoxVE/pull/14341))
|
||||
|
||||
## 2026-05-07
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- termix: create /tmp/nginx before nginx -t [@MickLesk](https://github.com/MickLesk) ([#14312](https://github.com/community-scripts/ProxmoxVE/pull/14312))
|
||||
- The Lounge: Fix service not starting automaticaly [@tremor021](https://github.com/tremor021) ([#14311](https://github.com/community-scripts/ProxmoxVE/pull/14311))
|
||||
- netbird-lxc: fix installation check [@MickLesk](https://github.com/MickLesk) ([#14309](https://github.com/community-scripts/ProxmoxVE/pull/14309))
|
||||
- databasus: Backup and secure configuration file [@MickLesk](https://github.com/MickLesk) ([#14308](https://github.com/community-scripts/ProxmoxVE/pull/14308))
|
||||
- vm: update disk image URL for Ubuntu 25.04 [@MickLesk](https://github.com/MickLesk) ([#14290](https://github.com/community-scripts/ProxmoxVE/pull/14290))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- pangolin: bump version to 1.18.3 [@MickLesk](https://github.com/MickLesk) ([#14297](https://github.com/community-scripts/ProxmoxVE/pull/14297))
|
||||
|
||||
### 🗑️ Deleted Scripts
|
||||
|
||||
- Remove: LiteLLM [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#14294](https://github.com/community-scripts/ProxmoxVE/pull/14294))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- update-apps: some improvements [@MickLesk](https://github.com/MickLesk) ([#14275](https://github.com/community-scripts/ProxmoxVE/pull/14275))
|
||||
|
||||
## 2026-05-06
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
- Hoodik ([#14279](https://github.com/community-scripts/ProxmoxVE/pull/14279))
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Pelican-Panel: create backup subdirectory before copying storage [@MickLesk](https://github.com/MickLesk) ([#14274](https://github.com/community-scripts/ProxmoxVE/pull/14274))
|
||||
- Rustdeskserver: remove redundant else with undefined RELEASE var [@MickLesk](https://github.com/MickLesk) ([#14272](https://github.com/community-scripts/ProxmoxVE/pull/14272))
|
||||
|
||||
### 🧰 Tools
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- AdguardHome-Sync replace ifconfig with hostname -I for IP detection [@MickLesk](https://github.com/MickLesk) ([#14273](https://github.com/community-scripts/ProxmoxVE/pull/14273))
|
||||
|
||||
## 2026-05-05
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
- LibreChat ([#14247](https://github.com/community-scripts/ProxmoxVE/pull/14247))
|
||||
- Matomo ([#14248](https://github.com/community-scripts/ProxmoxVE/pull/14248))
|
||||
- Storyteller ([#14122](https://github.com/community-scripts/ProxmoxVE/pull/14122))
|
||||
|
||||
### 🧰 Tools
|
||||
|
||||
- Fix container count message in update-apps.sh [@Quotacious](https://github.com/Quotacious) ([#14265](https://github.com/community-scripts/ProxmoxVE/pull/14265))
|
||||
|
||||
## 2026-05-04
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Databasus: move .env to filesystem root so service starts correctly [@Copilot](https://github.com/Copilot) ([#14252](https://github.com/community-scripts/ProxmoxVE/pull/14252))
|
||||
- Databasus: update mongo-tools fallback to 100.16.1 and use now pnpm instead of npm ci [@MickLesk](https://github.com/MickLesk) ([#14240](https://github.com/community-scripts/ProxmoxVE/pull/14240))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- tools.func get_latest_gh_tag - add pagination to find prefixed tags beyond first 50 [@MickLesk](https://github.com/MickLesk) ([#14241](https://github.com/community-scripts/ProxmoxVE/pull/14241))
|
||||
- tools.func: add GitLab release check/fetch/deploy helpers [@MickLesk](https://github.com/MickLesk) ([#14242](https://github.com/community-scripts/ProxmoxVE/pull/14242))
|
||||
|
||||
## 2026-05-03
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Hortusfox: fix update issues [@tomfrenzel](https://github.com/tomfrenzel) ([#14214](https://github.com/community-scripts/ProxmoxVE/pull/14214))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- Refactor: PeaNUT for v6 [@MickLesk](https://github.com/MickLesk) ([#14224](https://github.com/community-scripts/ProxmoxVE/pull/14224))
|
||||
- pangolin: pin version, drop manual SQL, use upstream migrator [@MickLesk](https://github.com/MickLesk) ([#14223](https://github.com/community-scripts/ProxmoxVE/pull/14223))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- core: fix validate_bridge function [@MichaelOultram](https://github.com/MichaelOultram) ([#14206](https://github.com/community-scripts/ProxmoxVE/pull/14206))
|
||||
|
||||
### 🧰 Tools
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- pve/pbs scripts: guard sed against missing /etc/apt/sources.list [@MickLesk](https://github.com/MickLesk) ([#14222](https://github.com/community-scripts/ProxmoxVE/pull/14222))
|
||||
|
||||
## 2026-05-02
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
282
CHANGELOG.md
282
CHANGELOG.md
@@ -47,6 +47,9 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -60,7 +63,7 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
||||
|
||||
|
||||
<details>
|
||||
<summary><h4>May (2 entries)</h4></summary>
|
||||
<summary><h4>May (9 entries)</h4></summary>
|
||||
|
||||
[View May 2026 Changelog](.github/changelogs/2026/05.md)
|
||||
|
||||
@@ -458,6 +461,155 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
||||
|
||||
</details>
|
||||
|
||||
## 2026-05-11
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
- Lychee ([#14424](https://github.com/community-scripts/ProxmoxVE/pull/14424))
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Termix: fix nginx pid path and log paths on update (#) [@MickLesk](https://github.com/MickLesk) ([#14419](https://github.com/community-scripts/ProxmoxVE/pull/14419))
|
||||
- Nginxproxymanager: restore NPM nginx.conf after OpenResty rebuid [@MickLesk](https://github.com/MickLesk) ([#14421](https://github.com/community-scripts/ProxmoxVE/pull/14421))
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- InvestBrain: add commented reverse proxy config hints to .env [@MickLesk](https://github.com/MickLesk) ([#14422](https://github.com/community-scripts/ProxmoxVE/pull/14422))
|
||||
|
||||
### 🧰 Tools
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Cronmaster: fix unexpected EOF in update_cronmaster script [@MickLesk](https://github.com/MickLesk) ([#14420](https://github.com/community-scripts/ProxmoxVE/pull/14420))
|
||||
|
||||
## 2026-05-10
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Save Beszel version [@lucacome](https://github.com/lucacome) ([#14389](https://github.com/community-scripts/ProxmoxVE/pull/14389))
|
||||
- karakeep: Fix SERVER_VERSION update [@MickLesk](https://github.com/MickLesk) ([#14378](https://github.com/community-scripts/ProxmoxVE/pull/14378))
|
||||
- inspIRCd: Fix service not autostarting [@tremor021](https://github.com/tremor021) ([#14368](https://github.com/community-scripts/ProxmoxVE/pull/14368))
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- refactor: webcheck [@CrazyWolf13](https://github.com/CrazyWolf13) ([#14391](https://github.com/community-scripts/ProxmoxVE/pull/14391))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- [tools.func]: Pin `pnpm` version [@tremor021](https://github.com/tremor021) ([#14386](https://github.com/community-scripts/ProxmoxVE/pull/14386))
|
||||
|
||||
## 2026-05-09
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- FlowiseAI: Migrate to pnpm [@MickLesk](https://github.com/MickLesk) ([#14344](https://github.com/community-scripts/ProxmoxVE/pull/14344))
|
||||
- Purge openresty [@lucacome](https://github.com/lucacome) ([#14353](https://github.com/community-scripts/ProxmoxVE/pull/14353))
|
||||
- Check for release for Sonarr [@lucacome](https://github.com/lucacome) ([#14354](https://github.com/community-scripts/ProxmoxVE/pull/14354))
|
||||
- fix(termix-install.sh): add tmpfiles.d persistence and systemd PIDFile path [@runnylogan](https://github.com/runnylogan) ([#14350](https://github.com/community-scripts/ProxmoxVE/pull/14350))
|
||||
- ERPNext: start bench Redis services before bench new-site [@MickLesk](https://github.com/MickLesk) ([#14343](https://github.com/community-scripts/ProxmoxVE/pull/14343))
|
||||
- [Hotfix]Jotty: use absolute path when creating data dir [@vhsdream](https://github.com/vhsdream) ([#14355](https://github.com/community-scripts/ProxmoxVE/pull/14355))
|
||||
|
||||
## 2026-05-08
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- wishlist: pin pnpm to v10 to match engine requirements [@MickLesk](https://github.com/MickLesk) ([#14342](https://github.com/community-scripts/ProxmoxVE/pull/14342))
|
||||
- [pelican] fix env copy regression [@LetterN](https://github.com/LetterN) ([#14328](https://github.com/community-scripts/ProxmoxVE/pull/14328))
|
||||
- fix(homepage): fix ERR_PNPM_IGNORED_BUILDS error [@Sergih28](https://github.com/Sergih28) ([#14315](https://github.com/community-scripts/ProxmoxVE/pull/14315))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- tools.func: add setup_nltk as new function [@MickLesk](https://github.com/MickLesk) ([#14314](https://github.com/community-scripts/ProxmoxVE/pull/14314))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- tools.func: fix meilisearch import-dump background process handling [@MickLesk](https://github.com/MickLesk) ([#14341](https://github.com/community-scripts/ProxmoxVE/pull/14341))
|
||||
|
||||
## 2026-05-07
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- termix: create /tmp/nginx before nginx -t [@MickLesk](https://github.com/MickLesk) ([#14312](https://github.com/community-scripts/ProxmoxVE/pull/14312))
|
||||
- The Lounge: Fix service not starting automaticaly [@tremor021](https://github.com/tremor021) ([#14311](https://github.com/community-scripts/ProxmoxVE/pull/14311))
|
||||
- netbird-lxc: fix installation check [@MickLesk](https://github.com/MickLesk) ([#14309](https://github.com/community-scripts/ProxmoxVE/pull/14309))
|
||||
- databasus: Backup and secure configuration file [@MickLesk](https://github.com/MickLesk) ([#14308](https://github.com/community-scripts/ProxmoxVE/pull/14308))
|
||||
- vm: update disk image URL for Ubuntu 25.04 [@MickLesk](https://github.com/MickLesk) ([#14290](https://github.com/community-scripts/ProxmoxVE/pull/14290))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- pangolin: bump version to 1.18.3 [@MickLesk](https://github.com/MickLesk) ([#14297](https://github.com/community-scripts/ProxmoxVE/pull/14297))
|
||||
|
||||
### 🗑️ Deleted Scripts
|
||||
|
||||
- Remove: LiteLLM [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#14294](https://github.com/community-scripts/ProxmoxVE/pull/14294))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- update-apps: some improvements [@MickLesk](https://github.com/MickLesk) ([#14275](https://github.com/community-scripts/ProxmoxVE/pull/14275))
|
||||
|
||||
## 2026-05-06
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
- Hoodik ([#14279](https://github.com/community-scripts/ProxmoxVE/pull/14279))
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Pelican-Panel: create backup subdirectory before copying storage [@MickLesk](https://github.com/MickLesk) ([#14274](https://github.com/community-scripts/ProxmoxVE/pull/14274))
|
||||
- Rustdeskserver: remove redundant else with undefined RELEASE var [@MickLesk](https://github.com/MickLesk) ([#14272](https://github.com/community-scripts/ProxmoxVE/pull/14272))
|
||||
|
||||
### 🧰 Tools
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- AdguardHome-Sync replace ifconfig with hostname -I for IP detection [@MickLesk](https://github.com/MickLesk) ([#14273](https://github.com/community-scripts/ProxmoxVE/pull/14273))
|
||||
|
||||
## 2026-05-05
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
- LibreChat ([#14247](https://github.com/community-scripts/ProxmoxVE/pull/14247))
|
||||
- Matomo ([#14248](https://github.com/community-scripts/ProxmoxVE/pull/14248))
|
||||
- Storyteller ([#14122](https://github.com/community-scripts/ProxmoxVE/pull/14122))
|
||||
|
||||
### 🧰 Tools
|
||||
|
||||
- Fix container count message in update-apps.sh [@Quotacious](https://github.com/Quotacious) ([#14265](https://github.com/community-scripts/ProxmoxVE/pull/14265))
|
||||
|
||||
## 2026-05-04
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Databasus: move .env to filesystem root so service starts correctly [@Copilot](https://github.com/Copilot) ([#14252](https://github.com/community-scripts/ProxmoxVE/pull/14252))
|
||||
- Databasus: update mongo-tools fallback to 100.16.1 and use now pnpm instead of npm ci [@MickLesk](https://github.com/MickLesk) ([#14240](https://github.com/community-scripts/ProxmoxVE/pull/14240))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- tools.func get_latest_gh_tag - add pagination to find prefixed tags beyond first 50 [@MickLesk](https://github.com/MickLesk) ([#14241](https://github.com/community-scripts/ProxmoxVE/pull/14241))
|
||||
- tools.func: add GitLab release check/fetch/deploy helpers [@MickLesk](https://github.com/MickLesk) ([#14242](https://github.com/community-scripts/ProxmoxVE/pull/14242))
|
||||
|
||||
## 2026-05-03
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
@@ -962,130 +1114,4 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- addons: Filebrowser & Filebrowser-Quantum get warning if host install [@MickLesk](https://github.com/MickLesk) ([#13639](https://github.com/community-scripts/ProxmoxVE/pull/13639))
|
||||
|
||||
## 2026-04-09
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- boostack: add: git [@CrazyWolf13](https://github.com/CrazyWolf13) ([#13620](https://github.com/community-scripts/ProxmoxVE/pull/13620))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- Update OPNsense version from 25.7 to 26.1 [@tdn131](https://github.com/tdn131) ([#13626](https://github.com/community-scripts/ProxmoxVE/pull/13626))
|
||||
- CheckMK: Bump Default OS to 13 (trixie) + dynamic codename + fix RELEASE-Tag Fetching [@MickLesk](https://github.com/MickLesk) ([#13610](https://github.com/community-scripts/ProxmoxVE/pull/13610))
|
||||
|
||||
## 2026-04-08
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
- IronClaw | Alpine-IronClaw ([#13591](https://github.com/community-scripts/ProxmoxVE/pull/13591))
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- immich: disable upgrade-insecure-requests CSP directive [@MickLesk](https://github.com/MickLesk) ([#13600](https://github.com/community-scripts/ProxmoxVE/pull/13600))
|
||||
- Immich: v2.7.2 [@vhsdream](https://github.com/vhsdream) ([#13579](https://github.com/community-scripts/ProxmoxVE/pull/13579))
|
||||
- Update flaresolverr-install.sh [@maztheman](https://github.com/maztheman) ([#13584](https://github.com/community-scripts/ProxmoxVE/pull/13584))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- bambuddy: add mkdir before data restore & add ffmpeg dependency [@MickLesk](https://github.com/MickLesk) ([#13601](https://github.com/community-scripts/ProxmoxVE/pull/13601))
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- feat: update UHF Server script to use setup_ffmpeg [@zackwithak13](https://github.com/zackwithak13) ([#13564](https://github.com/community-scripts/ProxmoxVE/pull/13564))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- core: add script page badges to descriptions | change donate URL [@MickLesk](https://github.com/MickLesk) ([#13596](https://github.com/community-scripts/ProxmoxVE/pull/13596))
|
||||
|
||||
## 2026-04-07
|
||||
|
||||
### 🗑️ Deleted Scripts
|
||||
|
||||
- Remove low-install-count CT scripts and installers [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#13570](https://github.com/community-scripts/ProxmoxVE/pull/13570))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- core: improve resilience for top Proxmox error codes (209, 215, 118, 206) [@MickLesk](https://github.com/MickLesk) ([#13575](https://github.com/community-scripts/ProxmoxVE/pull/13575))
|
||||
|
||||
## 2026-04-06
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
- OpenThread Border Router ([#13536](https://github.com/community-scripts/ProxmoxVE/pull/13536))
|
||||
- Homelable ([#13539](https://github.com/community-scripts/ProxmoxVE/pull/13539))
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Papra: check env before copy [@MickLesk](https://github.com/MickLesk) ([#13553](https://github.com/community-scripts/ProxmoxVE/pull/13553))
|
||||
- changedetection: fix: typing_extensions error [@CrazyWolf13](https://github.com/CrazyWolf13) ([#13548](https://github.com/community-scripts/ProxmoxVE/pull/13548))
|
||||
- kasm: fix: fetch latest version [@CrazyWolf13](https://github.com/CrazyWolf13) ([#13547](https://github.com/community-scripts/ProxmoxVE/pull/13547))
|
||||
|
||||
## 2026-04-05
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Grist: remove install:ee step (private repo, not needed for grist-core) [@MickLesk](https://github.com/MickLesk) ([#13526](https://github.com/community-scripts/ProxmoxVE/pull/13526))
|
||||
- Nginx Proxy Manager: ensure /tmp/nginx/body exists via openresty service [@MickLesk](https://github.com/MickLesk) ([#13528](https://github.com/community-scripts/ProxmoxVE/pull/13528))
|
||||
- MotionEye: run as root to enable SMB share support [@MickLesk](https://github.com/MickLesk) ([#13527](https://github.com/community-scripts/ProxmoxVE/pull/13527))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- core: silent() function - use return instead of exit to allow || true error handling [@MickLesk](https://github.com/MickLesk) ([#13529](https://github.com/community-scripts/ProxmoxVE/pull/13529))
|
||||
|
||||
## 2026-04-04
|
||||
|
||||
### 🧰 Tools
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- komodo: set `PERIPHERY_CORE_PUBLIC_KEYS` to default value if absent [@4ndv](https://github.com/4ndv) ([#13519](https://github.com/community-scripts/ProxmoxVE/pull/13519))
|
||||
|
||||
## 2026-04-03
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
- netboot.xyz ([#13480](https://github.com/community-scripts/ProxmoxVE/pull/13480))
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- OpenWRT-VM: use poweroff instead of halt to properly stop VM [@MickLesk](https://github.com/MickLesk) ([#13504](https://github.com/community-scripts/ProxmoxVE/pull/13504))
|
||||
- NginxProxyManager: fix openresty restart by setting user root before reload [@MickLesk](https://github.com/MickLesk) ([#13500](https://github.com/community-scripts/ProxmoxVE/pull/13500))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- Crafty Controller: add Java 25 for Minecraft 1.26.1+ [@MickLesk](https://github.com/MickLesk) ([#13502](https://github.com/community-scripts/ProxmoxVE/pull/13502))
|
||||
- Wealthfolio: update to v3.2.1 and Node.js 24 [@afadil](https://github.com/afadil) ([#13486](https://github.com/community-scripts/ProxmoxVE/pull/13486))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- core.func: prevent profile.d scripts from aborting on non-zero exit [@MickLesk](https://github.com/MickLesk) ([#13503](https://github.com/community-scripts/ProxmoxVE/pull/13503))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- APT Proxy: Support full URLs (http/https with custom ports) [@MickLesk](https://github.com/MickLesk) ([#13474](https://github.com/community-scripts/ProxmoxVE/pull/13474))
|
||||
|
||||
### 🧰 Tools
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- PVE LXC-Updater: pipe apt list through cat to prevent pager hang [@MickLesk](https://github.com/MickLesk) ([#13501](https://github.com/community-scripts/ProxmoxVE/pull/13501))
|
||||
- addons: Filebrowser & Filebrowser-Quantum get warning if host install [@MickLesk](https://github.com/MickLesk) ([#13639](https://github.com/community-scripts/ProxmoxVE/pull/13639))
|
||||
@@ -36,7 +36,9 @@ function update_script() {
|
||||
msg_info "Updating Beszel"
|
||||
$STD /opt/beszel/beszel update
|
||||
sleep 2 && chmod +x /opt/beszel/beszel
|
||||
msg_ok "Updated Beszel"
|
||||
VERSION=$(/opt/beszel/beszel -v | awk '{print $3}')
|
||||
echo "${VERSION}" >$HOME/.beszel
|
||||
msg_ok "Updated Beszel to ${VERSION}"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start beszel-hub
|
||||
|
||||
@@ -35,7 +35,10 @@ function update_script() {
|
||||
msg_ok "Stopped Databasus"
|
||||
|
||||
msg_info "Backing up Configuration"
|
||||
cp /opt/databasus/.env /opt/databasus.env.bak
|
||||
[[ ! -f /.env && -f /opt/databasus/.env ]] && cp /opt/databasus/.env /.env
|
||||
chmod 600 /.env
|
||||
cp /.env /opt/databasus.env.bak
|
||||
chmod 600 /opt/databasus.env.bak
|
||||
msg_ok "Backed up Configuration"
|
||||
|
||||
msg_info "Ensuring Database Clients"
|
||||
@@ -46,7 +49,7 @@ function update_script() {
|
||||
# Install MongoDB Database Tools via direct .deb (no APT repo for Debian 13)
|
||||
if ! command -v mongodump &>/dev/null; then
|
||||
[[ "$(get_os_info id)" == "ubuntu" ]] && MONGO_DIST="ubuntu2204" || MONGO_DIST="debian12"
|
||||
fetch_and_deploy_from_url "https://fastdl.mongodb.org/tools/db/mongodb-database-tools-${MONGO_DIST}-x86_64-100.14.1.deb"
|
||||
fetch_and_deploy_from_url "https://fastdl.mongodb.org/tools/db/mongodb-database-tools-${MONGO_DIST}-x86_64-100.16.1.deb"
|
||||
fi
|
||||
[[ -f /usr/bin/mongodump ]] && ln -sf /usr/bin/mongodump /usr/local/mongodb-database-tools/bin/mongodump
|
||||
[[ -f /usr/bin/mongorestore ]] && ln -sf /usr/bin/mongorestore /usr/local/mongodb-database-tools/bin/mongorestore
|
||||
@@ -66,9 +69,12 @@ function update_script() {
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "databasus" "databasus/databasus" "tarball" "latest" "/opt/databasus"
|
||||
|
||||
msg_info "Updating Databasus"
|
||||
export COREPACK_ENABLE_DOWNLOAD_PROMPT=0
|
||||
cd /opt/databasus/frontend
|
||||
$STD npm ci
|
||||
$STD npm run build
|
||||
$STD corepack enable
|
||||
$STD corepack prepare pnpm@latest --activate
|
||||
$STD pnpm install --frozen-lockfile
|
||||
$STD pnpm run build
|
||||
cd /opt/databasus/backend
|
||||
$STD go mod download
|
||||
$STD /root/go/bin/swag init -g cmd/main.go -o swagger
|
||||
@@ -81,11 +87,18 @@ function update_script() {
|
||||
msg_ok "Updated Databasus"
|
||||
|
||||
msg_info "Restoring Configuration"
|
||||
cp /opt/databasus.env.bak /opt/databasus/.env
|
||||
cp /opt/databasus.env.bak /.env
|
||||
rm -f /opt/databasus.env.bak
|
||||
chown postgres:postgres /opt/databasus/.env
|
||||
chmod 600 /.env
|
||||
msg_ok "Restored Configuration"
|
||||
|
||||
if ! grep -q "EnvironmentFile=/.env" /etc/systemd/system/databasus.service; then
|
||||
msg_info "Updating Service"
|
||||
sed -i 's|EnvironmentFile=.*|EnvironmentFile=/.env|' /etc/systemd/system/databasus.service
|
||||
$STD systemctl daemon-reload
|
||||
msg_ok "Updated Service"
|
||||
fi
|
||||
|
||||
msg_info "Starting Databasus"
|
||||
$STD systemctl start databasus
|
||||
msg_ok "Started Databasus"
|
||||
|
||||
@@ -27,9 +27,16 @@ function update_script() {
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
NODE_VERSION="20" NODE_MODULE="pnpm" setup_nodejs
|
||||
|
||||
msg_info "Updating FlowiseAI (this may take some time)"
|
||||
systemctl stop flowise
|
||||
$STD npm install -g flowise --upgrade
|
||||
$STD pnpm add -g flowise
|
||||
if grep -q 'ExecStart=npx flowise start' /etc/systemd/system/flowise.service; then
|
||||
sed -i 's|ExecStart=npx flowise start|ExecStart=flowise start|' /etc/systemd/system/flowise.service
|
||||
systemctl daemon-reload
|
||||
fi
|
||||
systemctl start flowise
|
||||
msg_ok "Updated FlowiseAI"
|
||||
msg_ok "Updated successfully!"
|
||||
|
||||
6
ct/headers/hoodik
Normal file
6
ct/headers/hoodik
Normal file
@@ -0,0 +1,6 @@
|
||||
__ __ ___ __
|
||||
/ / / /___ ____ ____/ (_) /__
|
||||
/ /_/ / __ \/ __ \/ __ / / //_/
|
||||
/ __ / /_/ / /_/ / /_/ / / ,<
|
||||
/_/ /_/\____/\____/\__,_/_/_/|_|
|
||||
|
||||
6
ct/headers/librechat
Normal file
6
ct/headers/librechat
Normal file
@@ -0,0 +1,6 @@
|
||||
__ _ __ ________ __
|
||||
/ / (_) /_ ________ / ____/ /_ ____ _/ /_
|
||||
/ / / / __ \/ ___/ _ \/ / / __ \/ __ `/ __/
|
||||
/ /___/ / /_/ / / / __/ /___/ / / / /_/ / /_
|
||||
/_____/_/_.___/_/ \___/\____/_/ /_/\__,_/\__/
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
__ _ __ __ __ __ ___
|
||||
/ / (_) /____ / / / / / |/ /
|
||||
/ / / / __/ _ \/ / / / / /|_/ /
|
||||
/ /___/ / /_/ __/ /___/ /___/ / / /
|
||||
/_____/_/\__/\___/_____/_____/_/ /_/
|
||||
|
||||
6
ct/headers/lychee
Normal file
6
ct/headers/lychee
Normal file
@@ -0,0 +1,6 @@
|
||||
__ __
|
||||
/ / __ _______/ /_ ___ ___
|
||||
/ / / / / / ___/ __ \/ _ \/ _ \
|
||||
/ /___/ /_/ / /__/ / / / __/ __/
|
||||
/_____/\__, /\___/_/ /_/\___/\___/
|
||||
/____/
|
||||
6
ct/headers/matomo
Normal file
6
ct/headers/matomo
Normal file
@@ -0,0 +1,6 @@
|
||||
__ ___ __
|
||||
/ |/ /___ _/ /_____ ____ ___ ____
|
||||
/ /|_/ / __ `/ __/ __ \/ __ `__ \/ __ \
|
||||
/ / / / /_/ / /_/ /_/ / / / / / / /_/ /
|
||||
/_/ /_/\__,_/\__/\____/_/ /_/ /_/\____/
|
||||
|
||||
6
ct/headers/shlink
Normal file
6
ct/headers/shlink
Normal file
@@ -0,0 +1,6 @@
|
||||
_____ __ ___ __
|
||||
/ ___// /_ / (_)___ / /__
|
||||
\__ \/ __ \/ / / __ \/ //_/
|
||||
___/ / / / / / / / / / ,<
|
||||
/____/_/ /_/_/_/_/ /_/_/|_|
|
||||
|
||||
6
ct/headers/solidtime
Normal file
6
ct/headers/solidtime
Normal file
@@ -0,0 +1,6 @@
|
||||
_____ ___ _________
|
||||
/ ___/____ / (_)___/ /_ __(_)___ ___ ___
|
||||
\__ \/ __ \/ / / __ / / / / / __ `__ \/ _ \
|
||||
___/ / /_/ / / / /_/ / / / / / / / / / / __/
|
||||
/____/\____/_/_/\__,_/ /_/ /_/_/ /_/ /_/\___/
|
||||
|
||||
6
ct/headers/storyteller
Normal file
6
ct/headers/storyteller
Normal file
@@ -0,0 +1,6 @@
|
||||
_____ __ __ ____
|
||||
/ ___// /_____ _______ __/ /____ / / /__ _____
|
||||
\__ \/ __/ __ \/ ___/ / / / __/ _ \/ / / _ \/ ___/
|
||||
___/ / /_/ /_/ / / / /_/ / /_/ __/ / / __/ /
|
||||
/____/\__/\____/_/ \__, /\__/\___/_/_/\___/_/
|
||||
/____/
|
||||
@@ -54,6 +54,7 @@ function update_script() {
|
||||
msg_info "Updating Homepage (Patience)"
|
||||
RELEASE=$(get_latest_github_release "gethomepage/homepage")
|
||||
cd /opt/homepage
|
||||
echo 'onlyBuiltDependencies=*' >> .npmrc
|
||||
$STD pnpm install
|
||||
$STD pnpm update --no-save caniuse-lite
|
||||
export NEXT_PUBLIC_VERSION="v$RELEASE"
|
||||
|
||||
64
ct/hoodik.sh
Normal file
64
ct/hoodik.sh
Normal file
@@ -0,0 +1,64 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/hudikhq/hoodik
|
||||
|
||||
APP="Hoodik"
|
||||
var_tags="${var_tags:-cloud;storage}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-1024}"
|
||||
var_disk="${var_disk:-5}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -f /opt/hoodik/hoodik ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_release "hoodik" "hudikhq/hoodik"; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop hoodik
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
msg_info "Backing up Configuration"
|
||||
cp /opt/hoodik/.env /opt/hoodik.env.bak
|
||||
msg_ok "Backed up Configuration"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "hoodik" "hudikhq/hoodik" "prebuild" "latest" "/opt/hoodik" "*x86_64.tar.gz"
|
||||
|
||||
msg_info "Restoring Configuration"
|
||||
cp /opt/hoodik.env.bak /opt/hoodik/.env
|
||||
rm -f /opt/hoodik.env.bak
|
||||
msg_ok "Restored Configuration"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start hoodik
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:5443/auth/register${CL}"
|
||||
@@ -59,6 +59,7 @@ function update_script() {
|
||||
if command -v corepack >/dev/null; then
|
||||
$STD corepack disable
|
||||
fi
|
||||
sed -i "s/^SERVER_VERSION=.*$/SERVER_VERSION=${CHECK_UPDATE_RELEASE#v}/" /etc/karakeep/karakeep.env
|
||||
MODULE_VERSION="$(jq -r '.packageManager | split("@")[1]' /opt/karakeep/package.json)"
|
||||
NODE_VERSION="24" NODE_MODULE="pnpm@${MODULE_VERSION}" setup_nodejs
|
||||
setup_meilisearch
|
||||
@@ -83,7 +84,6 @@ function update_script() {
|
||||
cd /opt/karakeep/packages/db
|
||||
$STD pnpm migrate
|
||||
$STD pnpm store prune
|
||||
sed -i "s/^SERVER_VERSION=.*$/SERVER_VERSION=${CHECK_UPDATE_RELEASE#v}/" /etc/karakeep/karakeep.env
|
||||
msg_ok "Updated Karakeep"
|
||||
|
||||
msg_info "Starting Services"
|
||||
|
||||
101
ct/librechat.sh
Normal file
101
ct/librechat.sh
Normal file
@@ -0,0 +1,101 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/danny-avila/LibreChat
|
||||
|
||||
APP="LibreChat"
|
||||
var_tags="${var_tags:-ai;chat}"
|
||||
var_cpu="${var_cpu:-4}"
|
||||
var_ram="${var_ram:-6144}"
|
||||
var_disk="${var_disk:-20}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/librechat ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_tag "librechat" "danny-avila/LibreChat" "v"; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop librechat rag-api
|
||||
msg_ok "Stopped Services"
|
||||
|
||||
msg_info "Backing up Configuration"
|
||||
cp /opt/librechat/.env /opt/librechat.env.bak
|
||||
msg_ok "Backed up Configuration"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_tag "librechat" "danny-avila/LibreChat"
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
cd /opt/librechat
|
||||
$STD npm ci
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
msg_info "Building Frontend"
|
||||
$STD npm run frontend
|
||||
$STD npm prune --production
|
||||
$STD npm cache clean --force
|
||||
msg_ok "Built Frontend"
|
||||
|
||||
msg_info "Restoring Configuration"
|
||||
cp /opt/librechat.env.bak /opt/librechat/.env
|
||||
rm -f /opt/librechat.env.bak
|
||||
msg_ok "Restored Configuration"
|
||||
|
||||
msg_info "Starting Services"
|
||||
systemctl start rag-api librechat
|
||||
msg_ok "Started Services"
|
||||
msg_ok "Updated LibreChat Successfully!"
|
||||
fi
|
||||
|
||||
if check_for_gh_release "rag-api" "danny-avila/rag_api"; then
|
||||
msg_info "Stopping RAG API"
|
||||
systemctl stop rag-api
|
||||
msg_ok "Stopped RAG API"
|
||||
|
||||
msg_info "Backing up RAG API Configuration"
|
||||
cp /opt/rag-api/.env /opt/rag-api.env.bak
|
||||
msg_ok "Backed up RAG API Configuration"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "rag-api" "danny-avila/rag_api" "tarball"
|
||||
|
||||
msg_info "Updating RAG API Dependencies"
|
||||
cd /opt/rag-api
|
||||
$STD .venv/bin/pip install -r requirements.lite.txt
|
||||
msg_ok "Updated RAG API Dependencies"
|
||||
|
||||
msg_info "Restoring RAG API Configuration"
|
||||
cp /opt/rag-api.env.bak /opt/rag-api/.env
|
||||
rm -f /opt/rag-api.env.bak
|
||||
msg_ok "Restored RAG API Configuration"
|
||||
|
||||
msg_info "Starting RAG API"
|
||||
systemctl start rag-api
|
||||
msg_ok "Started RAG API"
|
||||
msg_ok "Updated RAG API Successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3080${CL}"
|
||||
@@ -1,67 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: stout01
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/BerriAI/litellm
|
||||
|
||||
APP="LiteLLM"
|
||||
var_tags="${var_tags:-ai;interface}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_disk="${var_disk:-4}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -f /etc/systemd/system/litellm.service ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop litellm
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
VENV_PATH="/opt/litellm/.venv"
|
||||
PYTHON_VERSION="3.13" USE_UVX="YES" setup_uv
|
||||
|
||||
msg_info "Updating LiteLLM"
|
||||
$STD "$VENV_PATH/bin/python" -m pip install --upgrade litellm[proxy] prisma
|
||||
$STD "$VENV_PATH/bin/prisma" generate
|
||||
msg_ok "LiteLLM updated"
|
||||
|
||||
msg_info "Updating DB Schema"
|
||||
$STD /opt/litellm/.venv/bin/litellm --config /opt/litellm/litellm.yaml --use_prisma_db_push --skip_server_startup
|
||||
msg_ok "DB Schema Updated"
|
||||
|
||||
msg_info "Updating Service"
|
||||
sed -i 's|ExecStart=uv --directory=/opt/litellm run litellm|ExecStart=/opt/litellm/.venv/bin/litellm|' /etc/systemd/system/litellm.service
|
||||
systemctl daemon-reload
|
||||
msg_ok "Updated Service"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start litellm
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:4000${CL}"
|
||||
73
ct/lychee.sh
Normal file
73
ct/lychee.sh
Normal file
@@ -0,0 +1,73 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/LycheeOrg/Lychee
|
||||
|
||||
APP="Lychee"
|
||||
var_tags="${var_tags:-media;photos;gallery}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_disk="${var_disk:-8}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/lychee ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_release "lychee" "LycheeOrg/Lychee"; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop caddy
|
||||
msg_ok "Stopped Services"
|
||||
|
||||
msg_info "Backing up Data"
|
||||
cp /opt/lychee/.env /opt/lychee.env.bak
|
||||
cp -r /opt/lychee/storage /opt/lychee_storage_backup
|
||||
msg_ok "Backed up Data"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "lychee" "LycheeOrg/Lychee" "prebuild" "latest" "/opt/lychee" "Lychee.zip"
|
||||
|
||||
msg_info "Restoring Data"
|
||||
cp /opt/lychee.env.bak /opt/lychee/.env
|
||||
rm -f /opt/lychee.env.bak
|
||||
cp -r /opt/lychee_storage_backup/. /opt/lychee/storage
|
||||
rm -rf /opt/lychee_storage_backup
|
||||
msg_ok "Restored Data"
|
||||
|
||||
msg_info "Updating Application"
|
||||
cd /opt/lychee
|
||||
$STD php artisan migrate --force
|
||||
$STD php artisan optimize:clear
|
||||
chmod -R 775 /opt/lychee/storage /opt/lychee/bootstrap/cache
|
||||
msg_ok "Updated Application"
|
||||
|
||||
msg_info "Starting Services"
|
||||
systemctl start caddy
|
||||
msg_ok "Started Services"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"
|
||||
75
ct/matomo.sh
Normal file
75
ct/matomo.sh
Normal file
@@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://matomo.org/
|
||||
|
||||
APP="Matomo"
|
||||
var_tags="${var_tags:-analytics;tracking;privacy}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_disk="${var_disk:-16}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/matomo ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_release "matomo" "matomo-org/matomo"; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop caddy
|
||||
msg_ok "Stopped Services"
|
||||
|
||||
msg_info "Backing up Data"
|
||||
[[ -f /opt/matomo/config/config.ini.php ]] && cp /opt/matomo/config/config.ini.php /opt/matomo_config.bak
|
||||
[[ -d /opt/matomo/misc/user ]] && cp -r /opt/matomo/misc/user /opt/matomo_user_backup
|
||||
[[ -f /root/matomo.creds ]] && cp /root/matomo.creds /opt/matomo_db_creds.bak
|
||||
msg_ok "Backed up Data"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "matomo" "matomo-org/matomo" "prebuild" "latest" "/opt/matomo" "matomo-*.zip"
|
||||
|
||||
msg_info "Restoring Data"
|
||||
if [[ -f /opt/matomo_config.bak ]]; then
|
||||
mkdir -p /opt/matomo/config
|
||||
cp /opt/matomo_config.bak /opt/matomo/config/config.ini.php
|
||||
fi
|
||||
if [[ -d /opt/matomo_user_backup ]]; then
|
||||
mkdir -p /opt/matomo/misc/user
|
||||
cp -r /opt/matomo_user_backup/. /opt/matomo/misc/user
|
||||
fi
|
||||
[[ -f /opt/matomo_db_creds.bak ]] && cp /opt/matomo_db_creds.bak /root/matomo.creds
|
||||
rm -f /opt/matomo_config.bak /opt/matomo_db_creds.bak
|
||||
rm -rf /opt/matomo_user_backup
|
||||
chown -R www-data:www-data /opt/matomo
|
||||
msg_ok "Restored Data"
|
||||
|
||||
msg_info "Starting Services"
|
||||
systemctl start caddy
|
||||
msg_ok "Started Services"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"
|
||||
@@ -81,11 +81,7 @@ STARTEOF
|
||||
cp -r /opt/mealie/frontend/dist/* /opt/mealie/mealie/frontend/
|
||||
msg_ok "Copied Frontend"
|
||||
|
||||
msg_info "Updating NLTK Data"
|
||||
mkdir -p /nltk_data/
|
||||
cd /opt/mealie
|
||||
$STD uv run python -m nltk.downloader -d /nltk_data averaged_perceptron_tagger_eng
|
||||
msg_ok "Updated NLTK Data"
|
||||
setup_nltk "averaged_perceptron_tagger_eng" "/nltk_data"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start mealie
|
||||
|
||||
@@ -25,7 +25,7 @@ function update_script() {
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -f /etc/netbird/config.json ]]; then
|
||||
if [[ ! -d /var/lib/netbird/ ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
@@ -28,7 +28,6 @@ function update_script() {
|
||||
exit
|
||||
fi
|
||||
|
||||
|
||||
if command -v node &>/dev/null; then
|
||||
CURRENT_NODE_VERSION=$(node --version | cut -d'v' -f2 | cut -d'.' -f1)
|
||||
if [[ "$CURRENT_NODE_VERSION" != "22" ]]; then
|
||||
@@ -48,7 +47,7 @@ function update_script() {
|
||||
msg_info "Migrating from packaged OpenResty to source"
|
||||
rm -f /etc/apt/trusted.gpg.d/openresty-archive-keyring.gpg /etc/apt/trusted.gpg.d/openresty.gpg
|
||||
rm -f /etc/apt/sources.list.d/openresty.list /etc/apt/sources.list.d/openresty.sources
|
||||
$STD apt remove -y openresty
|
||||
$STD apt purge -y openresty
|
||||
$STD apt autoremove -y
|
||||
rm -f ~/.openresty
|
||||
msg_ok "Migrated from packaged OpenResty to source"
|
||||
@@ -93,6 +92,11 @@ ExecStart=/usr/local/openresty/nginx/sbin/nginx -g 'daemon off;'
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
if [ -f /opt/nginxproxymanager/docker/rootfs/etc/nginx/nginx.conf ]; then
|
||||
cp /opt/nginxproxymanager/docker/rootfs/etc/nginx/nginx.conf /usr/local/openresty/nginx/conf/nginx.conf
|
||||
sed -i 's+^daemon+#daemon+g' /usr/local/openresty/nginx/conf/nginx.conf
|
||||
sed -i 's+include conf.d+include /etc/nginx/conf.d+g' /usr/local/openresty/nginx/conf/nginx.conf
|
||||
fi
|
||||
sed -i 's/user npm/user root/g; s/^pid/#pid/g' /usr/local/openresty/nginx/conf/nginx.conf
|
||||
systemctl daemon-reload
|
||||
systemctl unmask openresty 2>/dev/null || true
|
||||
|
||||
@@ -6,7 +6,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
|
||||
# Source: https://pangolin.net/ | Github: https://github.com/fosrl/pangolin
|
||||
|
||||
APP="Pangolin"
|
||||
PANGOLIN_VERSION="${PANGOLIN_VERSION:-1.18.2}"
|
||||
PANGOLIN_VERSION="${PANGOLIN_VERSION:-1.18.3}"
|
||||
var_tags="${var_tags:-proxy}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-4096}"
|
||||
|
||||
@@ -164,13 +164,7 @@ function update_script() {
|
||||
fi
|
||||
fi
|
||||
|
||||
msg_info "Updating NLTK Data"
|
||||
cd /opt/paperless
|
||||
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data snowball_data
|
||||
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data stopwords
|
||||
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data punkt_tab ||
|
||||
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data punkt
|
||||
msg_ok "Updated NLTK Data"
|
||||
setup_nltk "snowball_data stopwords punkt_tab" "/usr/share/nltk_data"
|
||||
|
||||
msg_info "Starting all Paperless-ngx Services"
|
||||
systemctl start paperless-consumer paperless-webserver paperless-scheduler paperless-task-queue
|
||||
|
||||
@@ -45,7 +45,9 @@ function update_script() {
|
||||
$STD php artisan down
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
mkdir -p /opt/backup
|
||||
cp -a /opt/pelican-panel/.env /opt/backup
|
||||
mkdir -p /opt/backup/storage/app/
|
||||
cp -a /opt/pelican-panel/storage/app/public /opt/backup/storage/app/
|
||||
|
||||
SQLITE_INSTALL=$(ls /opt/pelican-panel/database/*.sqlite 1>/dev/null 2>&1 && echo "true" || echo "false")
|
||||
|
||||
@@ -48,8 +48,6 @@ function update_script() {
|
||||
msg_ok "Services started"
|
||||
|
||||
msg_ok "Updated successfully!"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
85
ct/shlink.sh
Normal file
85
ct/shlink.sh
Normal file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://shlink.io/
|
||||
|
||||
APP="Shlink"
|
||||
var_tags="${var_tags:-url-shortener;analytics;php}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_disk="${var_disk:-4}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/shlink ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_release "shlink" "shlinkio/shlink"; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop shlink
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
msg_info "Backing up Data"
|
||||
cp /opt/shlink/.env /opt/shlink.env.bak
|
||||
cp -r /opt/shlink/data /opt/shlink_data_backup
|
||||
msg_ok "Backed up Data"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "shlink" "shlinkio/shlink" "prebuild" "latest" "/opt/shlink" "shlink*_php8.5_dist.zip"
|
||||
|
||||
msg_info "Restoring Data"
|
||||
cp /opt/shlink.env.bak /opt/shlink/.env
|
||||
rm -f /opt/shlink.env.bak
|
||||
cp -r /opt/shlink_data_backup/. /opt/shlink/data
|
||||
rm -rf /opt/shlink_data_backup
|
||||
msg_ok "Restored Data"
|
||||
|
||||
msg_info "Updating Application"
|
||||
cd /opt/shlink
|
||||
$STD php ./vendor/bin/rr get --no-interaction --location bin/
|
||||
chmod +x bin/rr
|
||||
set -a
|
||||
source /opt/shlink/.env
|
||||
set +a
|
||||
$STD php vendor/bin/shlink-installer init --no-interaction --clear-db-cache --skip-download-geolite
|
||||
msg_ok "Updated Application"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start shlink
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
|
||||
if [[ -d /opt/shlink-web-client ]]; then
|
||||
if check_for_gh_release "shlink-web-client" "shlinkio/shlink-web-client"; then
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "shlink-web-client" "shlinkio/shlink-web-client" "prebuild" "latest" "/opt/shlink-web-client" "shlink-web-client_*_dist.zip"
|
||||
msg_ok "Updated Web Client"
|
||||
fi
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access Shlink Web Client using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"
|
||||
echo -e "${INFO}${YW} Shlink HTTP API:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"
|
||||
77
ct/solidtime.sh
Normal file
77
ct/solidtime.sh
Normal file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://www.solidtime.io/
|
||||
|
||||
APP="SolidTime"
|
||||
var_tags="${var_tags:-time-tracking;productivity;business}"
|
||||
var_cpu="${var_cpu:-4}"
|
||||
var_ram="${var_ram:-4096}"
|
||||
var_disk="${var_disk:-8}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/solidtime ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_release "solidtime" "solidtime-io/solidtime"; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop caddy
|
||||
msg_ok "Stopped Services"
|
||||
|
||||
msg_info "Backing up Data"
|
||||
cp /opt/solidtime/.env /opt/solidtime.env.bak
|
||||
cp -r /opt/solidtime/storage /opt/solidtime_storage_backup
|
||||
msg_ok "Backed up Data"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "solidtime" "solidtime-io/solidtime" "tarball"
|
||||
|
||||
msg_info "Restoring Data"
|
||||
cp /opt/solidtime.env.bak /opt/solidtime/.env
|
||||
rm -f /opt/solidtime.env.bak
|
||||
cp -r /opt/solidtime_storage_backup/. /opt/solidtime/storage
|
||||
rm -rf /opt/solidtime_storage_backup
|
||||
msg_ok "Restored Data"
|
||||
|
||||
msg_info "Updating Application"
|
||||
cd /opt/solidtime
|
||||
$STD composer install --no-dev --optimize-autoloader
|
||||
$STD npm install
|
||||
$STD npm run build
|
||||
$STD php artisan migrate --force
|
||||
$STD php artisan optimize:clear
|
||||
chown -R www-data:www-data /opt/solidtime
|
||||
msg_ok "Updated Application"
|
||||
|
||||
msg_info "Starting Services"
|
||||
systemctl start caddy
|
||||
msg_ok "Started Services"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"
|
||||
echo -e "${INFO}${YW}HTTPS is not enabled by default (use domain + reverse proxy/TLS if needed).${CL}"
|
||||
19
ct/sonarr.sh
19
ct/sonarr.sh
@@ -23,21 +23,24 @@ function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /var/lib/sonarr/ ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop sonarr
|
||||
msg_ok "Stopped Service"
|
||||
if check_for_gh_release "Sonarr" "Sonarr/Sonarr"; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop sonarr
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "Sonarr" "Sonarr/Sonarr" "prebuild" "latest" "/opt/Sonarr" "Sonarr.main.*.linux-x64.tar.gz"
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "Sonarr" "Sonarr/Sonarr" "prebuild" "latest" "/opt/Sonarr" "Sonarr.main.*.linux-x64.tar.gz"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start sonarr
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
msg_info "Starting Service"
|
||||
systemctl start sonarr
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
|
||||
85
ct/storyteller.sh
Normal file
85
ct/storyteller.sh
Normal file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://gitlab.com/storyteller-platform/storyteller
|
||||
|
||||
APP="Storyteller"
|
||||
var_tags="${var_tags:-media;ebook;audiobook}"
|
||||
var_cpu="${var_cpu:-4}"
|
||||
var_ram="${var_ram:-10240}"
|
||||
var_disk="${var_disk:-20}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/storyteller ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gl_release "storyteller" "storyteller-platform/storyteller"; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop storyteller
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
msg_info "Backing up Data"
|
||||
cp /opt/storyteller/.env /opt/storyteller_env.bak
|
||||
msg_ok "Backed up Data"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gl_release "storyteller" "storyteller-platform/storyteller" "tarball" "latest" "/opt/storyteller"
|
||||
|
||||
msg_info "Restoring Configuration"
|
||||
mv /opt/storyteller_env.bak /opt/storyteller/.env
|
||||
msg_ok "Restored Configuration"
|
||||
|
||||
msg_info "Rebuilding Storyteller"
|
||||
cd /opt/storyteller
|
||||
export NODE_OPTIONS="--max-old-space-size=4096"
|
||||
$STD yarn install --network-timeout 600000
|
||||
$STD gcc -g -fPIC -rdynamic -shared web/sqlite/uuid.c -o web/sqlite/uuid.c.so
|
||||
export CI=1
|
||||
export NODE_ENV=production
|
||||
export NEXT_TELEMETRY_DISABLED=1
|
||||
export SQLITE_NATIVE_BINDING=/opt/storyteller/node_modules/better-sqlite3/build/Release/better_sqlite3.node
|
||||
$STD yarn workspaces foreach -Rpt --from @storyteller-platform/web --exclude @storyteller-platform/eslint run build
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/.next/static
|
||||
cp -rT /opt/storyteller/web/.next/static /opt/storyteller/web/.next/standalone/web/.next/static
|
||||
if [[ -d /opt/storyteller/web/public ]]; then
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/public
|
||||
cp -rT /opt/storyteller/web/public /opt/storyteller/web/.next/standalone/web/public
|
||||
fi
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/migrations
|
||||
cp -rT /opt/storyteller/web/migrations /opt/storyteller/web/.next/standalone/web/migrations
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/sqlite
|
||||
cp -rT /opt/storyteller/web/sqlite /opt/storyteller/web/.next/standalone/web/sqlite
|
||||
ln -sf /opt/storyteller/.env /opt/storyteller/web/.next/standalone/web/.env
|
||||
msg_ok "Rebuilt Storyteller"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start storyteller
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8001${CL}"
|
||||
@@ -199,7 +199,9 @@ EOF
|
||||
cp /etc/nginx/nginx.conf /etc/nginx/nginx.conf.bak
|
||||
curl -fsSL "https://raw.githubusercontent.com/Termix-SSH/Termix/main/docker/nginx.conf" -o /etc/nginx/nginx.conf
|
||||
sed -i '/^master_process/d' /etc/nginx/nginx.conf
|
||||
sed -i '/^pid \/app\/nginx/d' /etc/nginx/nginx.conf
|
||||
sed -i 's|pid /tmp/nginx/nginx.pid;|pid /run/nginx.pid;|' /etc/nginx/nginx.conf
|
||||
sed -i 's|error_log /tmp/nginx/error.log|error_log /var/log/nginx/error.log|' /etc/nginx/nginx.conf
|
||||
sed -i 's|access_log /tmp/nginx/access.log|access_log /var/log/nginx/access.log|' /etc/nginx/nginx.conf
|
||||
sed -i 's|/app/html|/opt/termix/html|g' /etc/nginx/nginx.conf
|
||||
sed -i 's|/app/nginx|/opt/termix/nginx|g' /etc/nginx/nginx.conf
|
||||
sed -i 's|listen ${PORT};|listen 80;|g' /etc/nginx/nginx.conf
|
||||
|
||||
@@ -35,6 +35,7 @@ function update_script() {
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
fetch_and_deploy_gh_release "threadfin-app" "threadfin/threadfin" "singlefile" "latest" "/opt/threadfin" "Threadfin_linux_amd64"
|
||||
mv /opt/threadfin/threadfin-app /opt/threadfin/threadfin
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start threadfin
|
||||
|
||||
@@ -28,7 +28,7 @@ function update_script() {
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_release "web-check" "CrazyWolf13/web-check"; then
|
||||
if check_for_gh_release "web-check" "Lissy93/web-check"; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop web-check
|
||||
msg_ok "Stopped Service"
|
||||
@@ -38,7 +38,7 @@ function update_script() {
|
||||
msg_ok "Created backup"
|
||||
|
||||
NODE_VERSION="22" NODE_MODULE="yarn" setup_nodejs
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "web-check" "CrazyWolf13/web-check" "tarball"
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "web-check" "Lissy93/web-check" "tarball"
|
||||
|
||||
msg_info "Restoring backup"
|
||||
mv /opt/.env /opt/web-check
|
||||
|
||||
@@ -32,7 +32,7 @@ for v in 12 13 14 15 16 18; do
|
||||
done
|
||||
# Install MongoDB Database Tools via direct .deb (no APT repo for Debian 13)
|
||||
[[ "$(get_os_info id)" == "ubuntu" ]] && MONGO_DIST="ubuntu2204" || MONGO_DIST="debian12"
|
||||
MONGO_VERSION=$(get_latest_gh_tag "mongodb/mongo-tools" "100." || echo "100.14.1")
|
||||
MONGO_VERSION=$(get_latest_gh_tag "mongodb/mongo-tools" "100." || echo "100.16.1")
|
||||
fetch_and_deploy_from_url "https://fastdl.mongodb.org/tools/db/mongodb-database-tools-${MONGO_DIST}-x86_64-${MONGO_VERSION}.deb" ""
|
||||
mkdir -p /usr/local/mongodb-database-tools/bin
|
||||
[[ -f /usr/bin/mongodump ]] && ln -sf /usr/bin/mongodump /usr/local/mongodb-database-tools/bin/mongodump
|
||||
@@ -52,9 +52,12 @@ msg_ok "Installed Database Clients"
|
||||
fetch_and_deploy_gh_release "databasus" "databasus/databasus" "tarball" "latest" "/opt/databasus"
|
||||
|
||||
msg_info "Building Databasus (Patience)"
|
||||
export COREPACK_ENABLE_DOWNLOAD_PROMPT=0
|
||||
cd /opt/databasus/frontend
|
||||
$STD npm ci
|
||||
$STD npm run build
|
||||
$STD corepack enable
|
||||
$STD corepack prepare pnpm@latest --activate
|
||||
$STD pnpm install --frozen-lockfile
|
||||
$STD pnpm run build
|
||||
cd /opt/databasus/backend
|
||||
$STD go mod tidy
|
||||
$STD go mod download
|
||||
@@ -76,7 +79,7 @@ ENCRYPTION_KEY=$(openssl rand -hex 32)
|
||||
# Install goose for migrations
|
||||
$STD go install github.com/pressly/goose/v3/cmd/goose@latest
|
||||
ln -sf /root/go/bin/goose /usr/local/bin/goose
|
||||
cat <<EOF >/opt/databasus/.env
|
||||
cat <<EOF >/.env
|
||||
# Environment
|
||||
ENV_MODE=production
|
||||
|
||||
@@ -106,8 +109,7 @@ DATA_DIR=/databasus-data/data
|
||||
BACKUP_DIR=/databasus-data/backups
|
||||
LOG_DIR=/databasus-data/logs
|
||||
EOF
|
||||
chown postgres:postgres /opt/databasus/.env
|
||||
chmod 600 /opt/databasus/.env
|
||||
chmod 600 /.env
|
||||
msg_ok "Configured Databasus"
|
||||
|
||||
msg_info "Configuring Valkey"
|
||||
@@ -145,7 +147,7 @@ Requires=postgresql.service valkey.service
|
||||
[Service]
|
||||
Type=simple
|
||||
WorkingDirectory=/opt/databasus
|
||||
EnvironmentFile=/opt/databasus/.env
|
||||
EnvironmentFile=/.env
|
||||
ExecStart=/opt/databasus/databasus
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
|
||||
@@ -69,6 +69,13 @@ DB_ROOT_PASS=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | head -c13)
|
||||
mysql -u root -e "ALTER USER 'root'@'localhost' IDENTIFIED BY '${DB_ROOT_PASS}'; FLUSH PRIVILEGES;"
|
||||
$STD sudo -u frappe bash -c 'export PATH="$HOME/.local/bin:$PATH"; cd /opt && bench init --frappe-branch version-15 frappe-bench'
|
||||
$STD sudo -u frappe bash -c 'export PATH="$HOME/.local/bin:$PATH"; cd /opt/frappe-bench && bench get-app erpnext --branch version-15'
|
||||
|
||||
msg_info "Starting Redis Services for Site Setup"
|
||||
$STD sudo -u frappe bash -c 'redis-server /opt/frappe-bench/config/redis_queue.conf --daemonize yes'
|
||||
$STD sudo -u frappe bash -c 'redis-server /opt/frappe-bench/config/redis_cache.conf --daemonize yes'
|
||||
sleep 3
|
||||
msg_ok "Started Redis Services for Site Setup"
|
||||
|
||||
$STD sudo -u frappe bash -c "export PATH=\"\$HOME/.local/bin:\$PATH\"; cd /opt/frappe-bench && bench new-site site1.local --db-root-username root --db-root-password \"$DB_ROOT_PASS\" --admin-password \"$ADMIN_PASS\" --install-app erpnext --set-default"
|
||||
msg_ok "Initialized Frappe Bench"
|
||||
|
||||
|
||||
@@ -13,10 +13,10 @@ setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
NODE_VERSION="20" setup_nodejs
|
||||
NODE_VERSION="20" NODE_MODULE="pnpm" setup_nodejs
|
||||
|
||||
msg_info "Installing FlowiseAI (Patience)"
|
||||
$STD npm install -g flowise \
|
||||
$STD pnpm add -g flowise \
|
||||
@opentelemetry/exporter-trace-otlp-grpc \
|
||||
@opentelemetry/exporter-trace-otlp-proto \
|
||||
@opentelemetry/sdk-trace-node \
|
||||
@@ -33,7 +33,7 @@ After=network.target
|
||||
|
||||
[Service]
|
||||
EnvironmentFile=/opt/flowiseai/.env
|
||||
ExecStart=npx flowise start
|
||||
ExecStart=flowise start
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
|
||||
@@ -25,6 +25,7 @@ msg_info "Installing Homepage (Patience)"
|
||||
mkdir -p /opt/homepage/config
|
||||
cd /opt/homepage
|
||||
cp /opt/homepage/src/skeleton/* /opt/homepage/config
|
||||
echo 'onlyBuiltDependencies=*' >> .npmrc
|
||||
$STD pnpm install
|
||||
export NEXT_PUBLIC_VERSION="v$RELEASE"
|
||||
export NEXT_PUBLIC_REVISION="source"
|
||||
|
||||
58
install/hoodik-install.sh
Normal file
58
install/hoodik-install.sh
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/hudikhq/hoodik
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
fetch_and_deploy_gh_release "hoodik" "hudikhq/hoodik" "prebuild" "latest" "/opt/hoodik" "*x86_64.tar.gz"
|
||||
|
||||
msg_info "Configuring Hoodik"
|
||||
mkdir -p /opt/hoodik_data
|
||||
JWT_SECRET=$(openssl rand -base64 32 | tr -dc 'a-zA-Z0-9' | cut -c1-32)
|
||||
cat <<EOF >/opt/hoodik/.env
|
||||
DATA_DIR=/opt/hoodik_data
|
||||
HTTP_PORT=5443
|
||||
HTTP_ADDRESS=0.0.0.0
|
||||
JWT_SECRET=${JWT_SECRET}
|
||||
APP_URL=http://${LOCAL_IP}:5443
|
||||
SSL_DISABLED=true
|
||||
COOKIE_SECURE=false
|
||||
COOKIE_HTTP_ONLY=false
|
||||
MAILER_TYPE=none
|
||||
RUST_LOG=hoodik=info,error=info
|
||||
EOF
|
||||
msg_ok "Configured Hoodik"
|
||||
|
||||
msg_info "Creating Service"
|
||||
cat <<EOF >/etc/systemd/system/hoodik.service
|
||||
[Unit]
|
||||
Description=Hoodik - Encrypted File Storage
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/hoodik_data
|
||||
EnvironmentFile=/opt/hoodik/.env
|
||||
ExecStart=/opt/hoodik/hoodik
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now hoodik
|
||||
msg_ok "Created Service"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
@@ -30,6 +30,7 @@ cat <<EOF >/etc/inspircd/inspircd.conf
|
||||
email="irc@&networkDomain;">
|
||||
<bind address="" port="6667" type="clients">
|
||||
EOF
|
||||
systemctl enable -q --now inspircd
|
||||
msg_ok "Installed InspIRCd"
|
||||
|
||||
motd_ssh
|
||||
|
||||
@@ -90,6 +90,11 @@ MAIL_PORT=2525
|
||||
MAIL_FROM_ADDRESS="investbrain@${LOCAL_IP}"
|
||||
|
||||
VITE_APP_NAME=Investbrain
|
||||
|
||||
# Reverse Proxy Support (uncomment and set APP_URL/ASSET_URL to your domain when using a reverse proxy)
|
||||
# APP_URL=https://your-domain.com
|
||||
# ASSET_URL=https://your-domain.com
|
||||
# TRUSTED_PROXIES=*
|
||||
EOF
|
||||
export COMPOSER_ALLOW_SUPERUSER=1
|
||||
$STD /usr/local/bin/composer install --no-interaction --no-dev --optimize-autoloader
|
||||
|
||||
@@ -17,7 +17,7 @@ NODE_VERSION="22" NODE_MODULE="yarn" setup_nodejs
|
||||
fetch_and_deploy_gh_release "jotty" "fccview/jotty" "prebuild" "latest" "/opt/jotty" "jotty_*_prebuild.tar.gz"
|
||||
|
||||
msg_info "Setup jotty"
|
||||
mkdir -p data/{users,checklists,notes}
|
||||
mkdir -p /opt/jotty/data/{users,checklists,notes}
|
||||
|
||||
cat <<EOF >/opt/jotty/.env
|
||||
NODE_ENV=production
|
||||
|
||||
@@ -47,8 +47,7 @@ msg_info "Setting up KitchenOwl"
|
||||
cd /opt/kitchenowl/backend
|
||||
$STD uv sync --no-dev
|
||||
sed -i 's/default=True/default=False/' /opt/kitchenowl/backend/wsgi.py
|
||||
mkdir -p /nltk_data
|
||||
$STD uv run python -m nltk.downloader -d /nltk_data averaged_perceptron_tagger_eng
|
||||
setup_nltk "averaged_perceptron_tagger_eng" "/nltk_data"
|
||||
JWT_SECRET=$(openssl rand -hex 32)
|
||||
mkdir -p /opt/kitchenowl/data
|
||||
cat <<EOF >/opt/kitchenowl/kitchenowl.env
|
||||
|
||||
139
install/librechat-install.sh
Normal file
139
install/librechat-install.sh
Normal file
@@ -0,0 +1,139 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/danny-avila/LibreChat
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
MONGO_VERSION="8.0" setup_mongodb
|
||||
setup_meilisearch
|
||||
PG_VERSION="17" PG_MODULES="pgvector" setup_postgresql
|
||||
PG_DB_NAME="ragapi" PG_DB_USER="ragapi" PG_DB_EXTENSIONS="vector" setup_postgresql_db
|
||||
NODE_VERSION="24" setup_nodejs
|
||||
UV_PYTHON="3.12" setup_uv
|
||||
|
||||
fetch_and_deploy_gh_tag "librechat" "danny-avila/LibreChat"
|
||||
fetch_and_deploy_gh_release "rag-api" "danny-avila/rag_api" "tarball"
|
||||
|
||||
msg_info "Installing LibreChat Dependencies"
|
||||
cd /opt/librechat
|
||||
$STD npm ci
|
||||
msg_ok "Installed LibreChat Dependencies"
|
||||
|
||||
msg_info "Building Frontend"
|
||||
$STD npm run frontend
|
||||
$STD npm prune --production
|
||||
$STD npm cache clean --force
|
||||
msg_ok "Built Frontend"
|
||||
|
||||
msg_info "Installing RAG API Dependencies"
|
||||
cd /opt/rag-api
|
||||
$STD uv venv --python 3.12 --seed .venv
|
||||
$STD .venv/bin/pip install -r requirements.lite.txt
|
||||
mkdir -p /opt/rag-api/uploads
|
||||
msg_ok "Installed RAG API Dependencies"
|
||||
|
||||
msg_info "Configuring LibreChat"
|
||||
JWT_SECRET=$(openssl rand -hex 32)
|
||||
JWT_REFRESH_SECRET=$(openssl rand -hex 32)
|
||||
CREDS_KEY=$(openssl rand -hex 32)
|
||||
CREDS_IV=$(openssl rand -hex 16)
|
||||
cat <<EOF >/opt/librechat/.env
|
||||
HOST=0.0.0.0
|
||||
PORT=3080
|
||||
MONGO_URI=mongodb://127.0.0.1:27017/LibreChat
|
||||
DOMAIN_CLIENT=http://${LOCAL_IP}:3080
|
||||
DOMAIN_SERVER=http://${LOCAL_IP}:3080
|
||||
NO_INDEX=true
|
||||
TRUST_PROXY=1
|
||||
JWT_SECRET=${JWT_SECRET}
|
||||
JWT_REFRESH_SECRET=${JWT_REFRESH_SECRET}
|
||||
SESSION_EXPIRY=1000 * 60 * 15
|
||||
REFRESH_TOKEN_EXPIRY=(1000 * 60 * 60 * 24) * 7
|
||||
CREDS_KEY=${CREDS_KEY}
|
||||
CREDS_IV=${CREDS_IV}
|
||||
ALLOW_EMAIL_LOGIN=true
|
||||
ALLOW_REGISTRATION=true
|
||||
ALLOW_SOCIAL_LOGIN=false
|
||||
ALLOW_SOCIAL_REGISTRATION=false
|
||||
ALLOW_PASSWORD_RESET=false
|
||||
ALLOW_UNVERIFIED_EMAIL_LOGIN=true
|
||||
SEARCH=true
|
||||
MEILI_NO_ANALYTICS=true
|
||||
MEILI_HOST=http://127.0.0.1:7700
|
||||
MEILI_MASTER_KEY=${MEILISEARCH_MASTER_KEY}
|
||||
RAG_PORT=8000
|
||||
RAG_API_URL=http://127.0.0.1:8000
|
||||
APP_TITLE=LibreChat
|
||||
ENDPOINTS=openAI,agents,assistants,anthropic,google
|
||||
# OPENAI_API_KEY=your-key-here
|
||||
# OPENAI_MODELS=
|
||||
# ANTHROPIC_API_KEY=your-key-here
|
||||
# GOOGLE_KEY=your-key-here
|
||||
EOF
|
||||
msg_ok "Configured LibreChat"
|
||||
|
||||
msg_info "Configuring RAG API"
|
||||
cat <<EOF >/opt/rag-api/.env
|
||||
VECTOR_DB_TYPE=pgvector
|
||||
DB_HOST=127.0.0.1
|
||||
DB_PORT=5432
|
||||
POSTGRES_DB=${PG_DB_NAME}
|
||||
POSTGRES_USER=${PG_DB_USER}
|
||||
POSTGRES_PASSWORD=${PG_DB_PASS}
|
||||
RAG_HOST=0.0.0.0
|
||||
RAG_PORT=8000
|
||||
JWT_SECRET=${JWT_SECRET}
|
||||
RAG_UPLOAD_DIR=/opt/rag-api/uploads/
|
||||
EOF
|
||||
msg_ok "Configured RAG API"
|
||||
|
||||
msg_info "Creating Services"
|
||||
cat <<EOF >/etc/systemd/system/librechat.service
|
||||
[Unit]
|
||||
Description=LibreChat
|
||||
After=network.target mongod.service meilisearch.service rag-api.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/librechat
|
||||
EnvironmentFile=/opt/librechat/.env
|
||||
ExecStart=/usr/bin/npm run backend
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
cat <<EOF >/etc/systemd/system/rag-api.service
|
||||
[Unit]
|
||||
Description=LibreChat RAG API
|
||||
After=network.target postgresql.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/rag-api
|
||||
EnvironmentFile=/opt/rag-api/.env
|
||||
ExecStart=/opt/rag-api/.venv/bin/uvicorn main:app --host 0.0.0.0 --port 8000
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now rag-api librechat
|
||||
msg_ok "Created Services"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
@@ -1,65 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: stout01
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/BerriAI/litellm
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y \
|
||||
build-essential \
|
||||
python3-dev
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
PG_VERSION="17" setup_postgresql
|
||||
PG_DB_NAME="litellm_db" PG_DB_USER="litellm" setup_postgresql_db
|
||||
PYTHON_VERSION="3.13" USE_UVX="YES" setup_uv
|
||||
|
||||
msg_info "Setting up Virtual Environment"
|
||||
mkdir -p /opt/litellm
|
||||
cd /opt/litellm
|
||||
$STD uv venv --clear /opt/litellm/.venv
|
||||
$STD /opt/litellm/.venv/bin/python -m ensurepip --upgrade
|
||||
$STD /opt/litellm/.venv/bin/python -m pip install --upgrade pip
|
||||
$STD /opt/litellm/.venv/bin/python -m pip install litellm[proxy] prisma
|
||||
$STD /opt/litellm/.venv/bin/prisma generate
|
||||
msg_ok "Installed LiteLLM"
|
||||
|
||||
msg_info "Configuring LiteLLM"
|
||||
mkdir -p /opt
|
||||
cat <<EOF >/opt/litellm/litellm.yaml
|
||||
general_settings:
|
||||
master_key: sk-1234
|
||||
database_url: postgresql://$PG_DB_USER:$PG_DB_PASS@127.0.0.1:5432/$PG_DB_NAME
|
||||
store_model_in_db: true
|
||||
EOF
|
||||
$STD /opt/litellm/.venv/bin/litellm --config /opt/litellm/litellm.yaml --use_prisma_db_push --skip_server_startup
|
||||
msg_ok "Configured LiteLLM"
|
||||
|
||||
msg_info "Creating Service"
|
||||
cat <<EOF >/etc/systemd/system/litellm.service
|
||||
[Unit]
|
||||
Description=LiteLLM
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/opt/litellm/.venv/bin/litellm --config /opt/litellm/litellm.yaml
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now litellm
|
||||
msg_ok "Created Service"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
75
install/lychee-install.sh
Normal file
75
install/lychee-install.sh
Normal file
@@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/LycheeOrg/Lychee
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y \
|
||||
caddy \
|
||||
libimage-exiftool-perl \
|
||||
jpegoptim
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
PHP_VERSION="8.4" PHP_FPM="YES" PHP_MODULE="bcmath,ldap,exif,gd,intl,imagick,redis,zip,pdo_pgsql,pcntl" setup_php
|
||||
PG_VERSION="16" setup_postgresql
|
||||
PG_DB_NAME="lychee" PG_DB_USER="lychee" setup_postgresql_db
|
||||
setup_ffmpeg
|
||||
setup_imagemagick
|
||||
|
||||
fetch_and_deploy_gh_release "lychee" "LycheeOrg/Lychee" "prebuild" "latest" "/opt/lychee" "Lychee.zip"
|
||||
|
||||
msg_info "Configuring Application"
|
||||
cd /opt/lychee
|
||||
cp .env.example .env
|
||||
APP_KEY=$($STD php artisan key:generate --show)
|
||||
sed -i "s|^APP_KEY=.*|APP_KEY=${APP_KEY}|" .env
|
||||
sed -i "s|^APP_ENV=.*|APP_ENV=production|" .env
|
||||
sed -i "s|^APP_DEBUG=.*|APP_DEBUG=false|" .env
|
||||
sed -i "s|^APP_URL=.*|APP_URL=http://${LOCAL_IP}|" .env
|
||||
sed -i "s|^DB_CONNECTION=.*|DB_CONNECTION=pgsql|" .env
|
||||
sed -i "s|^DB_HOST=.*|DB_HOST=127.0.0.1|" .env
|
||||
sed -i "s|^DB_PORT=.*|DB_PORT=5432|" .env
|
||||
sed -i "s|^#\?DB_DATABASE=.*|DB_DATABASE=${PG_DB_NAME}|" .env
|
||||
sed -i "s|^DB_USERNAME=.*|DB_USERNAME=${PG_DB_USER}|" .env
|
||||
sed -i "s|^DB_PASSWORD=.*|DB_PASSWORD=${PG_DB_PASS}|" .env
|
||||
mkdir -p storage/framework/{cache,sessions,views} storage/logs bootstrap/cache public/dist public/uploads public/sym
|
||||
touch public/dist/user.css public/dist/custom.js
|
||||
chmod -R 775 storage bootstrap/cache public/dist public/uploads public/sym
|
||||
msg_ok "Configured Application"
|
||||
|
||||
msg_info "Running Database Migrations"
|
||||
cd /opt/lychee
|
||||
$STD php artisan migrate --force
|
||||
msg_ok "Ran Database Migrations"
|
||||
|
||||
chown -R www-data:www-data /opt/lychee
|
||||
|
||||
msg_info "Configuring Caddy"
|
||||
PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION . "." . PHP_MINOR_VERSION;')
|
||||
cat <<EOF >/etc/caddy/Caddyfile
|
||||
:80 {
|
||||
root * /opt/lychee/public
|
||||
php_fastcgi unix//run/php/php${PHP_VER}-fpm.sock
|
||||
file_server
|
||||
encode gzip
|
||||
}
|
||||
EOF
|
||||
usermod -aG www-data caddy
|
||||
msg_ok "Configured Caddy"
|
||||
|
||||
systemctl enable -q --now php${PHP_VER}-fpm
|
||||
systemctl restart caddy
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
66
install/matomo-install.sh
Normal file
66
install/matomo-install.sh
Normal file
@@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://matomo.org/
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y caddy
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
mkdir -p /opt/matomo
|
||||
|
||||
PHP_VERSION="8.3" PHP_FPM="YES" PHP_MODULES="pdo_mysql,gd,mbstring,xml,curl,intl,zip,ldap" setup_php
|
||||
setup_mariadb
|
||||
MARIADB_DB_NAME="matomo" MARIADB_DB_USER="matomo" setup_mariadb_db
|
||||
|
||||
msg_info "Allowing Local TCP Database Access"
|
||||
$STD mariadb -u root -e "CREATE USER IF NOT EXISTS '$MARIADB_DB_USER'@'127.0.0.1' IDENTIFIED BY '$MARIADB_DB_PASS';"
|
||||
$STD mariadb -u root -e "ALTER USER '$MARIADB_DB_USER'@'127.0.0.1' IDENTIFIED BY '$MARIADB_DB_PASS';"
|
||||
$STD mariadb -u root -e "GRANT ALL ON \`$MARIADB_DB_NAME\`.* TO '$MARIADB_DB_USER'@'127.0.0.1';"
|
||||
$STD mariadb -u root -e "FLUSH PRIVILEGES;"
|
||||
msg_ok "Allowed Local TCP Database Access"
|
||||
|
||||
fetch_and_deploy_gh_release "matomo" "matomo-org/matomo" "prebuild" "latest" "/opt/matomo" "matomo-*.zip"
|
||||
|
||||
msg_info "Setting up Matomo"
|
||||
if [[ -d /opt/matomo/matomo ]]; then
|
||||
rm -rf /opt/matomo/tmp "/opt/matomo/How to install Matomo.html"
|
||||
find /opt/matomo/matomo -mindepth 1 -maxdepth 1 -exec mv -t /opt/matomo {} +
|
||||
rm -rf /opt/matomo/matomo
|
||||
fi
|
||||
mkdir -p /opt/matomo/tmp
|
||||
chown -R www-data:www-data /opt/matomo
|
||||
chmod -R 755 /opt/matomo/tmp
|
||||
msg_ok "Set up Matomo"
|
||||
|
||||
msg_info "Configuring Caddy"
|
||||
PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION . "." . PHP_MINOR_VERSION;')
|
||||
cat <<EOF >/etc/caddy/Caddyfile
|
||||
:80 {
|
||||
root * /opt/matomo
|
||||
@blocked path /config /config/* /tmp /tmp/* /.* /.*/*
|
||||
respond @blocked 403
|
||||
php_fastcgi unix//run/php/php${PHP_VER}-fpm.sock
|
||||
file_server
|
||||
encode gzip
|
||||
}
|
||||
EOF
|
||||
usermod -aG www-data caddy
|
||||
msg_ok "Configured Caddy"
|
||||
|
||||
systemctl enable -q --now php${PHP_VER}-fpm
|
||||
systemctl restart caddy
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
@@ -55,11 +55,7 @@ mkdir -p /opt/mealie/mealie/frontend
|
||||
cp -r /opt/mealie/frontend/dist/* /opt/mealie/mealie/frontend/
|
||||
msg_ok "Copied Frontend"
|
||||
|
||||
msg_info "Downloading NLTK Data"
|
||||
mkdir -p /nltk_data/
|
||||
cd /opt/mealie
|
||||
$STD uv run python -m nltk.downloader -d /nltk_data averaged_perceptron_tagger_eng
|
||||
msg_ok "Downloaded NLTK Data"
|
||||
setup_nltk "averaged_perceptron_tagger_eng" "/nltk_data"
|
||||
|
||||
msg_info "Writing Environment File"
|
||||
SECRET=$(openssl rand -hex 32)
|
||||
|
||||
@@ -22,7 +22,7 @@ $STD apt install -y \
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
NODE_VERSION="24" setup_nodejs
|
||||
PANGOLIN_VERSION="${PANGOLIN_VERSION:-1.18.2}"
|
||||
PANGOLIN_VERSION="${PANGOLIN_VERSION:-1.18.3}"
|
||||
fetch_and_deploy_gh_release "pangolin" "fosrl/pangolin" "tarball" "$PANGOLIN_VERSION"
|
||||
fetch_and_deploy_gh_release "gerbil" "fosrl/gerbil" "singlefile" "latest" "/usr/bin" "gerbil_linux_amd64"
|
||||
fetch_and_deploy_gh_release "traefik" "traefik/traefik" "prebuild" "latest" "/usr/bin" "traefik_v*_linux_amd64.tar.gz"
|
||||
|
||||
@@ -94,18 +94,12 @@ user.save()
|
||||
EOF
|
||||
msg_ok "Set up admin Paperless-ngx User & Password"
|
||||
|
||||
msg_info "Installing Natural Language Toolkit (Patience)"
|
||||
cd /opt/paperless
|
||||
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data snowball_data
|
||||
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data stopwords
|
||||
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data punkt_tab ||
|
||||
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data punkt
|
||||
setup_nltk "snowball_data stopwords punkt_tab" "/usr/share/nltk_data"
|
||||
for policy_file in /etc/ImageMagick-6/policy.xml /etc/ImageMagick-7/policy.xml; do
|
||||
if [[ -f "$policy_file" ]]; then
|
||||
sed -i -e 's/rights="none" pattern="PDF"/rights="read|write" pattern="PDF"/' "$policy_file"
|
||||
fi
|
||||
done
|
||||
msg_ok "Installed Natural Language Toolkit"
|
||||
|
||||
msg_info "Creating Services"
|
||||
cat <<EOF >/etc/systemd/system/paperless-scheduler.service
|
||||
|
||||
126
install/shlink-install.sh
Normal file
126
install/shlink-install.sh
Normal file
@@ -0,0 +1,126 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://shlink.io/
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
PHP_VERSION="8.5" setup_php
|
||||
setup_mariadb
|
||||
MARIADB_DB_NAME="shlink" MARIADB_DB_USER="shlink" setup_mariadb_db
|
||||
|
||||
fetch_and_deploy_gh_release "shlink" "shlinkio/shlink" "prebuild" "latest" "/opt/shlink" "shlink*_php8.5_dist.zip"
|
||||
|
||||
msg_info "Setting up Application"
|
||||
cd /opt/shlink
|
||||
$STD php ./vendor/bin/rr get --no-interaction --location bin/
|
||||
chmod +x bin/rr
|
||||
mkdir -p data/cache data/locks data/log data/proxies data/temp-geolite
|
||||
chmod -R 775 data
|
||||
cat <<EOF >/opt/shlink/.env
|
||||
DEFAULT_DOMAIN=${LOCAL_IP}:8080
|
||||
IS_HTTPS_ENABLED=false
|
||||
DB_DRIVER=maria
|
||||
DB_NAME=${MARIADB_DB_NAME}
|
||||
DB_USER=${MARIADB_DB_USER}
|
||||
DB_PASSWORD=${MARIADB_DB_PASS}
|
||||
DB_HOST=127.0.0.1
|
||||
DB_PORT=3306
|
||||
EOF
|
||||
set -a
|
||||
source /opt/shlink/.env
|
||||
set +a
|
||||
$STD php vendor/bin/shlink-installer init --no-interaction --clear-db-cache --skip-download-geolite
|
||||
API_OUTPUT=$(php bin/cli api-key:generate --name=default 2>&1)
|
||||
INITIAL_API_KEY=$(echo "$API_OUTPUT" | sed -n 's/.*Generated API key: "\([^"]*\)".*/\1/p')
|
||||
if [[ -n "$INITIAL_API_KEY" ]]; then
|
||||
echo "INITIAL_API_KEY=${INITIAL_API_KEY}" >>/opt/shlink/.env
|
||||
fi
|
||||
msg_ok "Set up Application"
|
||||
|
||||
if prompt_confirm "Install Shlink Web Client?" "y" 60; then
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y nginx
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
fetch_and_deploy_gh_release "shlink-web-client" "shlinkio/shlink-web-client" "prebuild" "latest" "/opt/shlink-web-client" "shlink-web-client_*_dist.zip"
|
||||
|
||||
msg_info "Setting up Web Client"
|
||||
cat <<EOF >/opt/shlink-web-client/servers.json
|
||||
[
|
||||
{
|
||||
"name": "Shlink",
|
||||
"url": "http://${LOCAL_IP}:8080",
|
||||
"apiKey": "${INITIAL_API_KEY}"
|
||||
}
|
||||
]
|
||||
EOF
|
||||
cat <<'EOF' >/etc/nginx/sites-available/shlink-web-client
|
||||
server {
|
||||
listen 3000 default_server;
|
||||
charset utf-8;
|
||||
root /opt/shlink-web-client;
|
||||
index index.html;
|
||||
|
||||
location ~* \.(?:manifest|appcache|html?|xml|json)$ {
|
||||
expires -1;
|
||||
}
|
||||
|
||||
location ~* \.(?:jpg|jpeg|gif|png|ico|cur|gz|svg|svgz|mp4|ogg|ogv|webm|htc)$ {
|
||||
expires 1M;
|
||||
add_header Cache-Control "public";
|
||||
}
|
||||
|
||||
location ~* \.(?:css|js)$ {
|
||||
expires 1y;
|
||||
add_header Cache-Control "public";
|
||||
}
|
||||
|
||||
location = /servers.json {
|
||||
try_files /servers.json /conf.d/servers.json;
|
||||
}
|
||||
|
||||
location / {
|
||||
try_files $uri $uri/ /index.html$is_args$args;
|
||||
}
|
||||
}
|
||||
EOF
|
||||
ln -sf /etc/nginx/sites-available/shlink-web-client /etc/nginx/sites-enabled/shlink-web-client
|
||||
rm -f /etc/nginx/sites-enabled/default
|
||||
systemctl enable -q nginx
|
||||
$STD systemctl restart nginx
|
||||
msg_ok "Set up Web Client"
|
||||
fi
|
||||
|
||||
msg_info "Creating Service"
|
||||
cat <<EOF >/etc/systemd/system/shlink.service
|
||||
[Unit]
|
||||
Description=Shlink URL Shortener
|
||||
After=network.target mariadb.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/shlink
|
||||
EnvironmentFile=/opt/shlink/.env
|
||||
ExecStart=/opt/shlink/bin/rr serve -c config/roadrunner/.rr.yml
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now shlink
|
||||
msg_ok "Created Service"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
86
install/solidtime-install.sh
Normal file
86
install/solidtime-install.sh
Normal file
@@ -0,0 +1,86 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://www.solidtime.io/
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y caddy
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
PHP_VERSION="8.3" PHP_FPM="YES" PHP_MODULES="bcmath,gd,intl,xml,zip,pdo_pgsql,redis,mbstring,curl" setup_php
|
||||
setup_composer
|
||||
NODE_VERSION="22" setup_nodejs
|
||||
PG_VERSION="16" setup_postgresql
|
||||
PG_DB_NAME="solidtime" PG_DB_USER="solidtime" setup_postgresql_db
|
||||
|
||||
fetch_and_deploy_gh_release "solidtime" "solidtime-io/solidtime" "tarball"
|
||||
|
||||
msg_info "Setting up SolidTime"
|
||||
cd /opt/solidtime
|
||||
cp .env.example .env
|
||||
sed -i "s|^APP_ENV=.*|APP_ENV=production|" .env
|
||||
sed -i "s|^APP_DEBUG=.*|APP_DEBUG=false|" .env
|
||||
sed -i "s|^APP_URL=.*|APP_URL=http://${LOCAL_IP}|" .env
|
||||
sed -i "s|^APP_ENABLE_REGISTRATION=.*|APP_ENABLE_REGISTRATION=true|" .env
|
||||
sed -i "s|^DB_CONNECTION=.*|DB_CONNECTION=pgsql|" .env
|
||||
sed -i "s|^DB_HOST=.*|DB_HOST=127.0.0.1|" .env
|
||||
sed -i "s|^DB_PORT=.*|DB_PORT=5432|" .env
|
||||
sed -i "s|^DB_DATABASE=.*|DB_DATABASE=${PG_DB_NAME}|" .env
|
||||
sed -i "s|^DB_USERNAME=.*|DB_USERNAME=${PG_DB_USER}|" .env
|
||||
sed -i "s|^DB_PASSWORD=.*|DB_PASSWORD=${PG_DB_PASS}|" .env
|
||||
sed -i "s|^FILESYSTEM_DISK=.*|FILESYSTEM_DISK=local|" .env
|
||||
sed -i "s|^PUBLIC_FILESYSTEM_DISK=.*|PUBLIC_FILESYSTEM_DISK=public|" .env
|
||||
sed -i "s|^MAIL_MAILER=.*|MAIL_MAILER=log|" .env
|
||||
sed -i "s|^SESSION_SECURE_COOKIE=.*|SESSION_SECURE_COOKIE=false|" .env
|
||||
grep -q "^SESSION_SECURE_COOKIE=" .env || echo "SESSION_SECURE_COOKIE=false" >>.env
|
||||
sed -i "s|^APP_FORCE_HTTPS=.*|APP_FORCE_HTTPS=false|" .env
|
||||
grep -q "^APP_FORCE_HTTPS=" .env || echo "APP_FORCE_HTTPS=false" >>.env
|
||||
$STD composer install --no-dev --optimize-autoloader
|
||||
php artisan self-host:generate-keys >/tmp/solidtime.keys 2>/dev/null
|
||||
while IFS= read -r line; do
|
||||
KEY="${line%%=*}"
|
||||
[[ -z "$KEY" || "${KEY:0:1}" == "#" ]] && continue
|
||||
sed -i "/^${KEY}=/d" .env
|
||||
echo "$line" >>.env
|
||||
done </tmp/solidtime.keys
|
||||
rm -f /tmp/solidtime.keys
|
||||
$STD npm install
|
||||
$STD npm run build
|
||||
rm -rf node_modules
|
||||
mkdir -p storage/framework/{cache,sessions,views} storage/logs bootstrap/cache
|
||||
chown -R www-data:www-data /opt/solidtime
|
||||
chmod -R 775 storage bootstrap/cache
|
||||
$STD php artisan storage:link
|
||||
$STD php artisan migrate --force
|
||||
$STD php artisan passport:client --personal --name="API" -n
|
||||
$STD php artisan optimize:clear
|
||||
msg_ok "Set up SolidTime"
|
||||
|
||||
msg_info "Configuring Caddy"
|
||||
PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION . "." . PHP_MINOR_VERSION;')
|
||||
cat <<EOF >/etc/caddy/Caddyfile
|
||||
:80 {
|
||||
root * /opt/solidtime/public
|
||||
php_fastcgi unix//run/php/php${PHP_VER}-fpm.sock
|
||||
file_server
|
||||
encode gzip
|
||||
}
|
||||
EOF
|
||||
usermod -aG www-data caddy
|
||||
systemctl enable -q --now php${PHP_VER}-fpm
|
||||
systemctl restart caddy
|
||||
msg_ok "Configured Caddy"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
98
install/storyteller-install.sh
Normal file
98
install/storyteller-install.sh
Normal file
@@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://gitlab.com/storyteller-platform/storyteller
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y \
|
||||
build-essential \
|
||||
git \
|
||||
pkg-config \
|
||||
libsqlite3-dev \
|
||||
sqlite3 \
|
||||
python3-setuptools \
|
||||
ffmpeg
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
NODE_VERSION="22" NODE_MODULE="yarn" setup_nodejs
|
||||
|
||||
fetch_and_deploy_gh_release "readium" "readium/cli" "prebuild" "latest" "/opt/readium" "readium_linux_x86_64.tar.gz"
|
||||
ln -sf /opt/readium/readium /usr/local/bin/readium
|
||||
fetch_and_deploy_gl_release "storyteller" "storyteller-platform/storyteller" "tarball" "latest" "/opt/storyteller"
|
||||
|
||||
msg_info "Setting up Storyteller"
|
||||
cd /opt/storyteller
|
||||
$STD yarn install --network-timeout 600000
|
||||
$STD gcc -g -fPIC -rdynamic -shared web/sqlite/uuid.c -o web/sqlite/uuid.c.so
|
||||
STORYTELLER_SECRET_KEY=$(openssl rand -base64 32)
|
||||
cat <<EOF >/opt/storyteller/.env
|
||||
STORYTELLER_SECRET_KEY=${STORYTELLER_SECRET_KEY}
|
||||
STORYTELLER_DATA_DIR=/opt/storyteller/data
|
||||
PORT=8001
|
||||
HOSTNAME=0.0.0.0
|
||||
READIUM_PORT=9000
|
||||
NODE_ENV=production
|
||||
NEXT_TELEMETRY_DISABLED=1
|
||||
EOF
|
||||
mkdir -p /opt/storyteller/data
|
||||
{
|
||||
echo "Storyteller Credentials"
|
||||
echo "======================="
|
||||
echo "Secret Key: ${STORYTELLER_SECRET_KEY}"
|
||||
} >~/storyteller.creds
|
||||
msg_ok "Set up Storyteller"
|
||||
|
||||
msg_info "Building Storyteller"
|
||||
cd /opt/storyteller
|
||||
export CI=1
|
||||
export NODE_ENV=production
|
||||
export NEXT_TELEMETRY_DISABLED=1
|
||||
export SQLITE_NATIVE_BINDING=/opt/storyteller/node_modules/better-sqlite3/build/Release/better_sqlite3.node
|
||||
$STD yarn workspaces foreach -Rpt --from @storyteller-platform/web --exclude @storyteller-platform/eslint run build
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/.next/static
|
||||
cp -rT /opt/storyteller/web/.next/static /opt/storyteller/web/.next/standalone/web/.next/static
|
||||
if [[ -d /opt/storyteller/web/public ]]; then
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/public
|
||||
cp -rT /opt/storyteller/web/public /opt/storyteller/web/.next/standalone/web/public
|
||||
fi
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/migrations
|
||||
cp -rT /opt/storyteller/web/migrations /opt/storyteller/web/.next/standalone/web/migrations
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/sqlite
|
||||
cp -rT /opt/storyteller/web/sqlite /opt/storyteller/web/.next/standalone/web/sqlite
|
||||
ln -sf /opt/storyteller/.env /opt/storyteller/web/.next/standalone/web/.env
|
||||
msg_ok "Built Storyteller"
|
||||
|
||||
msg_info "Creating Service"
|
||||
cat <<EOF >/etc/systemd/system/storyteller.service
|
||||
[Unit]
|
||||
Description=Storyteller
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/storyteller/web/.next/standalone/web
|
||||
EnvironmentFile=/opt/storyteller/.env
|
||||
ExecStart=/usr/bin/node --enable-source-maps server.js
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now storyteller
|
||||
msg_ok "Created Service"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
@@ -100,6 +100,14 @@ sed -i 's|/app/html|/opt/termix/html|g' /etc/nginx/nginx.conf
|
||||
sed -i 's|/app/nginx|/opt/termix/nginx|g' /etc/nginx/nginx.conf
|
||||
sed -i 's|listen ${PORT};|listen 80;|g' /etc/nginx/nginx.conf
|
||||
|
||||
mkdir -p /tmp/nginx
|
||||
echo "d /tmp/nginx 0755 nobody nobody -" > /etc/tmpfiles.d/nginx-termix.conf
|
||||
mkdir -p /etc/systemd/system/nginx.service.d/
|
||||
cat > /etc/systemd/system/nginx.service.d/pidfile.conf << EOF
|
||||
[Service]
|
||||
PIDFile=/tmp/nginx/nginx.pid
|
||||
EOF
|
||||
systemctl daemon-reload
|
||||
rm -f /etc/nginx/sites-enabled/default
|
||||
nginx -t
|
||||
systemctl reload nginx
|
||||
|
||||
@@ -14,6 +14,7 @@ network_check
|
||||
update_os
|
||||
|
||||
fetch_and_deploy_gh_release "thelounge" "thelounge/thelounge-deb" "binary"
|
||||
systemctl enable -q --now thelounge
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
|
||||
@@ -21,6 +21,7 @@ $STD apt install -y \
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
fetch_and_deploy_gh_release "threadfin-app" "threadfin/threadfin" "singlefile" "latest" "/opt/threadfin" "Threadfin_linux_amd64"
|
||||
mv /opt/threadfin/threadfin-app /opt/threadfin/threadfin
|
||||
|
||||
msg_info "Creating Service"
|
||||
cat <<EOF >/etc/systemd/system/threadfin.service
|
||||
|
||||
@@ -18,13 +18,10 @@ export DEBIAN_FRONTEND=noninteractive
|
||||
$STD apt -y install --no-install-recommends \
|
||||
git \
|
||||
traceroute \
|
||||
make \
|
||||
g++ \
|
||||
traceroute \
|
||||
build-essential \
|
||||
xvfb \
|
||||
dbus \
|
||||
xorg \
|
||||
xvfb \
|
||||
gtk2-engines-pixbuf \
|
||||
dbus-x11 \
|
||||
xfonts-base \
|
||||
@@ -43,16 +40,13 @@ rm -rf /usr/lib/python3.*/EXTERNALLY-MANAGED
|
||||
msg_ok "Setup Python3"
|
||||
|
||||
msg_info "Installing Chromium"
|
||||
curl -fsSL https://dl-ssl.google.com/linux/linux_signing_key.pub | gpg --dearmor -o /usr/share/keyrings/google-chrome-keyring.gpg
|
||||
cat <<EOF | sudo tee /etc/apt/sources.list.d/google-chrome.sources >/dev/null
|
||||
Types: deb
|
||||
URIs: http://dl.google.com/linux/chrome/deb/
|
||||
Suites: stable
|
||||
Components: main
|
||||
Architectures: amd64
|
||||
Signed-By: /usr/share/keyrings/google-chrome-keyring.gpg
|
||||
EOF
|
||||
$STD apt update
|
||||
setup_deb822_repo \
|
||||
"google-chrome" \
|
||||
"https://dl-ssl.google.com/linux/linux_signing_key.pub" \
|
||||
"http://dl.google.com/linux/chrome/deb/" \
|
||||
"stable" \
|
||||
"main" \
|
||||
"amd64"
|
||||
$STD apt -y install \
|
||||
chromium \
|
||||
libxss1 \
|
||||
@@ -64,13 +58,14 @@ msg_info "Setting up Chromium"
|
||||
chmod 755 /usr/bin/chromium
|
||||
msg_ok "Setup Chromium"
|
||||
|
||||
fetch_and_deploy_gh_release "web-check" "CrazyWolf13/web-check" "tarball"
|
||||
fetch_and_deploy_gh_release "web-check" "Lissy93/web-check" "tarball"
|
||||
|
||||
msg_info "Installing Web-Check (Patience)"
|
||||
cd /opt/web-check
|
||||
cat <<'EOF' >/opt/web-check/.env
|
||||
CHROME_PATH=/usr/bin/chromium
|
||||
PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium
|
||||
PUPPETEER_SKIP_DOWNLOAD='true'
|
||||
HEADLESS=true
|
||||
GOOGLE_CLOUD_API_KEY=''
|
||||
REACT_APP_SHODAN_API_KEY=''
|
||||
|
||||
@@ -20,7 +20,7 @@ $STD apt install -y \
|
||||
caddy
|
||||
msg_ok "Installed dependencies"
|
||||
|
||||
NODE_VERSION="24" NODE_MODULE="pnpm" setup_nodejs
|
||||
NODE_VERSION="24" NODE_MODULE="pnpm@10" setup_nodejs
|
||||
fetch_and_deploy_gh_release "wishlist" "cmintey/wishlist" "tarball"
|
||||
LATEST_APP_VERSION=$(get_latest_github_release "cmintey/wishlist")
|
||||
|
||||
|
||||
117
misc/build.func
117
misc/build.func
@@ -1062,6 +1062,7 @@ load_vars_file() {
|
||||
var_gateway var_hostname var_ipv6_method var_mac var_mknod var_mount_fs var_mtu
|
||||
var_net var_nesting var_ns var_os var_protection var_pw var_ram var_tags var_timezone var_tun var_unprivileged
|
||||
var_verbose var_version var_vlan var_ssh var_ssh_authorized_key var_container_storage var_template_storage var_searchdomain
|
||||
var_post_install
|
||||
)
|
||||
|
||||
# Whitelist check helper
|
||||
@@ -1279,6 +1280,7 @@ default_var_settings() {
|
||||
var_gateway var_hostname var_ipv6_method var_mac var_mknod var_mount_fs var_mtu
|
||||
var_net var_nesting var_ns var_os var_protection var_pw var_ram var_tags var_timezone var_tun var_unprivileged
|
||||
var_verbose var_version var_vlan var_ssh var_ssh_authorized_key var_container_storage var_template_storage
|
||||
var_post_install
|
||||
)
|
||||
|
||||
# Snapshot: environment variables (highest precedence)
|
||||
@@ -1374,6 +1376,11 @@ var_verbose=no
|
||||
# GitHub Personal Access Token (optional – avoids API rate limits during installs)
|
||||
# Create at https://github.com/settings/tokens – read-only public access is sufficient
|
||||
# var_github_token=ghp_your_token_here
|
||||
|
||||
# Optional post-install script (host-side path to a *.sh on the Proxmox host)
|
||||
# Runs ON THE HOST after the container is fully provisioned.
|
||||
# Available env vars: APP, NSAPP, CTID, IP, HN, STORAGE, BRG
|
||||
# var_post_install=/opt/post-install/myhook.sh
|
||||
EOF
|
||||
|
||||
# Now choose storages (always prompt unless just one exists)
|
||||
@@ -1452,6 +1459,7 @@ if ! declare -p VAR_WHITELIST >/dev/null 2>&1; then
|
||||
var_gateway var_hostname var_ipv6_method var_mac var_mknod var_mount_fs var_mtu
|
||||
var_net var_nesting var_ns var_os var_protection var_pw var_ram var_tags var_timezone var_tun var_unprivileged
|
||||
var_verbose var_version var_vlan var_ssh var_ssh_authorized_key var_container_storage var_template_storage var_searchdomain
|
||||
var_post_install
|
||||
)
|
||||
fi
|
||||
|
||||
@@ -1664,6 +1672,7 @@ _build_current_app_vars_tmp() {
|
||||
|
||||
[ -n "$_tpl_storage" ] && echo "var_template_storage=$(_sanitize_value "$_tpl_storage")"
|
||||
[ -n "$_ct_storage" ] && echo "var_container_storage=$(_sanitize_value "$_ct_storage")"
|
||||
[ -n "${var_post_install:-}" ] && echo "var_post_install=$(_sanitize_value "${var_post_install}")"
|
||||
} >"$tmpf"
|
||||
|
||||
echo "$tmpf"
|
||||
@@ -1808,7 +1817,7 @@ advanced_settings() {
|
||||
TAGS="community-script${var_tags:+;${var_tags}}"
|
||||
fi
|
||||
local STEP=1
|
||||
local MAX_STEP=28
|
||||
local MAX_STEP=29
|
||||
|
||||
# Store values for back navigation - inherit from var_* app defaults
|
||||
local _ct_type="${var_unprivileged:-1}"
|
||||
@@ -1842,6 +1851,7 @@ advanced_settings() {
|
||||
local _enable_mknod="${var_mknod:-0}"
|
||||
local _mount_fs="${var_mount_fs:-}"
|
||||
local _protect_ct="${var_protection:-no}"
|
||||
local _post_install="${var_post_install:-}"
|
||||
|
||||
# Detect host timezone for default (if not set via var_timezone)
|
||||
local _host_timezone=""
|
||||
@@ -2699,9 +2709,61 @@ advanced_settings() {
|
||||
;;
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
# STEP 28: Verbose Mode & Confirmation
|
||||
# STEP 28: Optional host-side post-install hook (path on the Proxmox HOST)
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
28)
|
||||
local _hook_prompt="Optional: absolute path to a *.sh file ON THE PROXMOX HOST.
|
||||
|
||||
It runs as root on the HOST (NOT in the LXC) after the container
|
||||
is fully provisioned and started.
|
||||
|
||||
Available env vars: APP, NSAPP, CTID, IP, HN, STORAGE, BRG.
|
||||
|
||||
Leave empty to skip."
|
||||
while true; do
|
||||
if result=$(whiptail --backtitle "Proxmox VE Helper Scripts [Step $STEP/$MAX_STEP]" \
|
||||
--title "POST-INSTALL HOOK (HOST)" \
|
||||
--ok-button "Next" --cancel-button "Back" \
|
||||
--inputbox "$_hook_prompt" 16 70 "${_post_install}" \
|
||||
3>&1 1>&2 2>&3); then
|
||||
# Normalize: strip surrounding whitespace
|
||||
result="$(printf '%s' "$result" | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//')"
|
||||
if [[ -z "$result" ]]; then
|
||||
_post_install=""
|
||||
((STEP++))
|
||||
break
|
||||
fi
|
||||
# Reject obvious shell-meta sneaking through
|
||||
if [[ "$result" == *';'* || "$result" == *'$('* || "$result" == *'`'* || "$result" == *'&&'* || "$result" == *'||'* ]]; then
|
||||
whiptail --backtitle "Proxmox VE Helper Scripts" --title "INVALID PATH" \
|
||||
--msgbox "Path contains shell metacharacters. Please provide a plain absolute file path." 10 70
|
||||
continue
|
||||
fi
|
||||
if [[ "$result" != /* ]]; then
|
||||
whiptail --backtitle "Proxmox VE Helper Scripts" --title "INVALID PATH" \
|
||||
--msgbox "Path must be absolute (start with /).\n\nGot: $result" 10 70
|
||||
continue
|
||||
fi
|
||||
if [[ ! -f "$result" ]]; then
|
||||
if ! whiptail --backtitle "Proxmox VE Helper Scripts" --title "FILE NOT FOUND" \
|
||||
--yesno "File does not exist on host:\n\n$result\n\nKeep this path anyway?" 12 70; then
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
_post_install="$result"
|
||||
((STEP++))
|
||||
break
|
||||
else
|
||||
((STEP--))
|
||||
break
|
||||
fi
|
||||
done
|
||||
;;
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
# STEP 29: Verbose Mode & Confirmation
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
29)
|
||||
local verbose_default_flag="--defaultno"
|
||||
[[ "$_verbose" == "yes" ]] && verbose_default_flag=""
|
||||
|
||||
@@ -2730,6 +2792,11 @@ advanced_settings() {
|
||||
local apt_display="${_apt_cacher:-no}"
|
||||
[[ "$_apt_cacher" == "yes" && -n "$_apt_cacher_ip" ]] && apt_display="$_apt_cacher_ip"
|
||||
|
||||
local post_install_display="${_post_install:-(none)}"
|
||||
local post_install_warn=""
|
||||
[[ -n "$_post_install" ]] && post_install_warn="
|
||||
⚠ Hook runs as root on Proxmox HOST (not in LXC)"
|
||||
|
||||
local summary="Container Type: $ct_type_desc
|
||||
Container ID: $_ct_id
|
||||
Hostname: $_hostname
|
||||
@@ -2753,7 +2820,8 @@ Features:
|
||||
Advanced:
|
||||
Timezone: $tz_display
|
||||
APT Cacher: $apt_display
|
||||
Verbose: $_verbose"
|
||||
Verbose: $_verbose
|
||||
Post-Install Script: ${post_install_display}${post_install_warn}"
|
||||
|
||||
if whiptail --backtitle "Proxmox VE Helper Scripts [Step $STEP/$MAX_STEP]" \
|
||||
--title "CONFIRM SETTINGS" \
|
||||
@@ -2796,6 +2864,7 @@ Advanced:
|
||||
APT_CACHER="$_apt_cacher"
|
||||
APT_CACHER_IP="$_apt_cacher_ip"
|
||||
VERBOSE="$_verbose"
|
||||
var_post_install="$_post_install"
|
||||
|
||||
# Update var_* based on user choice (for functions that check these)
|
||||
var_gpu="$_enable_gpu"
|
||||
@@ -3230,6 +3299,10 @@ check_container_resources() {
|
||||
if [[ "$current_ram" -lt "$var_ram" ]] || [[ "$current_cpu" -lt "$var_cpu" ]]; then
|
||||
msg_warn "Under-provisioned: Required ${var_cpu} CPU/${var_ram}MB RAM, Current ${current_cpu} CPU/${current_ram}MB RAM"
|
||||
echo -e "${YWB}Please ensure that the ${APP} LXC is configured with at least ${var_cpu} vCPU and ${var_ram} MB RAM for the build process.${CL}\n"
|
||||
if is_unattended; then
|
||||
msg_error "Aborted: under-provisioned LXC in unattended mode (${current_cpu} CPU/${current_ram}MB RAM < ${var_cpu} CPU/${var_ram}MB RAM)"
|
||||
exit 113
|
||||
fi
|
||||
echo -ne "${INFO}${HOLD} May cause data loss! ${INFO} Continue update with under-provisioned LXC? <yes/No> "
|
||||
read -r prompt </dev/tty
|
||||
if [[ ! ${prompt,,} =~ ^(yes)$ ]]; then
|
||||
@@ -3253,6 +3326,10 @@ check_container_storage() {
|
||||
usage=$((100 * used_size / total_size))
|
||||
if ((usage > 80)); then
|
||||
msg_warn "Storage is dangerously low (${usage}% used on /boot)"
|
||||
if is_unattended; then
|
||||
msg_error "Aborted: storage too low in unattended mode (${usage}% used on /boot)"
|
||||
exit 114
|
||||
fi
|
||||
echo -ne "Continue anyway? <y/N> "
|
||||
read -r prompt </dev/tty
|
||||
if [[ ! ${prompt,,} =~ ^(y|yes)$ ]]; then
|
||||
@@ -6353,6 +6430,40 @@ EOF
|
||||
systemctl start ping-instances.service
|
||||
fi
|
||||
|
||||
# Optional host-side post-install hook
|
||||
# Path comes from var_post_install (default.vars / app.vars / advanced settings).
|
||||
# Runs ON THE PROXMOX HOST after the container is up and configured.
|
||||
# Exposed env vars: APP, NSAPP, CTID, IP, HN, STORAGE, BRG.
|
||||
# Output (stdout/stderr) is captured to /var/log/community-scripts/post-install-<CTID>.log
|
||||
if [[ -n "${var_post_install:-}" ]]; then
|
||||
local _hook_log_dir="/var/log/community-scripts"
|
||||
local _hook_log="${_hook_log_dir}/post-install-${CTID}.log"
|
||||
mkdir -p "$_hook_log_dir" 2>/dev/null || true
|
||||
|
||||
if [[ ! -f "${var_post_install}" ]]; then
|
||||
msg_error "Post-install hook not found on host: ${var_post_install}"
|
||||
whiptail --backtitle "Proxmox VE Helper Scripts" \
|
||||
--title "POST-INSTALL HOOK FAILED" \
|
||||
--msgbox "The configured post-install hook was not found on the Proxmox host:\n\n${var_post_install}\n\nThe LXC was created successfully, but the hook did NOT run." 14 72 || true
|
||||
else
|
||||
msg_info "Running post-install hook: ${var_post_install}"
|
||||
local _hook_rc=0
|
||||
APP="$APP" NSAPP="${NSAPP:-}" CTID="$CTID" IP="$IP" HN="${HN:-}" \
|
||||
STORAGE="${STORAGE:-}" BRG="${BRG:-}" \
|
||||
bash "${var_post_install}" >"${_hook_log}" 2>&1 || _hook_rc=$?
|
||||
if [[ $_hook_rc -eq 0 ]]; then
|
||||
msg_ok "Post-install hook completed (log: ${_hook_log})"
|
||||
else
|
||||
msg_error "Post-install hook failed (rc=${_hook_rc}) – see ${_hook_log}"
|
||||
local _hook_tail=""
|
||||
_hook_tail="$(tail -n 15 "${_hook_log}" 2>/dev/null || true)"
|
||||
whiptail --backtitle "Proxmox VE Helper Scripts" \
|
||||
--title "POST-INSTALL HOOK FAILED" \
|
||||
--msgbox "Hook exited with code ${_hook_rc}.\n\nScript: ${var_post_install}\nLog: ${_hook_log}\n\n--- Last log lines ---\n${_hook_tail}\n\nThe LXC itself was created successfully." 22 78 || true
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
INSTALL_COMPLETE=true
|
||||
post_update_to_api "done" "none"
|
||||
}
|
||||
|
||||
@@ -868,6 +868,12 @@ get_header() {
|
||||
# - Returns silently if header not available
|
||||
# ------------------------------------------------------------------------------
|
||||
header_info() {
|
||||
# Guard against printing the header twice in the same session (e.g. when
|
||||
# the ct script calls header_info at global scope AND again inside
|
||||
# update_script()).
|
||||
[[ "${_HEADER_SHOWN:-0}" == "1" ]] && return 0
|
||||
_HEADER_SHOWN=1
|
||||
|
||||
local app_name=$(echo "${APP,,}" | tr -d ' ')
|
||||
local header_content
|
||||
|
||||
|
||||
867
misc/tools.func
867
misc/tools.func
@@ -2079,15 +2079,33 @@ get_latest_gh_tag() {
|
||||
local temp_file
|
||||
temp_file=$(mktemp)
|
||||
|
||||
if ! github_api_call "https://api.github.com/repos/${repo}/tags?per_page=50" "$temp_file"; then
|
||||
rm -f "$temp_file"
|
||||
return 22
|
||||
fi
|
||||
|
||||
local tag=""
|
||||
|
||||
if [[ -n "$prefix" ]]; then
|
||||
tag=$(jq -r --arg p "$prefix" '[.[] | select(.name | startswith($p))][0].name // empty' "$temp_file")
|
||||
# Use git/matching-refs API for server-side prefix filtering. This avoids
|
||||
# paging through unrelated tags (e.g. mongodb/mongo-tools where 100.x tags
|
||||
# only appear after page 4 of /tags). Returns ALL tags matching the prefix
|
||||
# in a single call, sorted lexicographically ascending; we pick the
|
||||
# highest version using `sort -V`.
|
||||
if ! github_api_call "https://api.github.com/repos/${repo}/git/matching-refs/tags/${prefix}" "$temp_file"; then
|
||||
rm -f "$temp_file"
|
||||
return 22
|
||||
fi
|
||||
|
||||
local count
|
||||
count=$(jq 'length' "$temp_file" 2>/dev/null || echo 0)
|
||||
if [[ "$count" -gt 0 ]]; then
|
||||
tag=$(jq -r '.[].ref' "$temp_file" |
|
||||
sed 's|^refs/tags/||' |
|
||||
sort -V |
|
||||
tail -n1)
|
||||
fi
|
||||
else
|
||||
# No prefix: just take the first (newest) tag from /tags
|
||||
if ! github_api_call "https://api.github.com/repos/${repo}/tags?per_page=1" "$temp_file"; then
|
||||
rm -f "$temp_file"
|
||||
return 22
|
||||
fi
|
||||
tag=$(jq -r '.[0].name // empty' "$temp_file")
|
||||
fi
|
||||
|
||||
@@ -6475,6 +6493,14 @@ function setup_nodejs() {
|
||||
# Install global Node modules
|
||||
if [[ -n "$NODE_MODULE" ]]; then
|
||||
IFS=',' read -ra MODULES <<<"$NODE_MODULE"
|
||||
|
||||
# Pin pnpm to v10 to avoid breaking changes from newer major versions
|
||||
for i in "${!MODULES[@]}"; do
|
||||
if [[ "${MODULES[$i]}" =~ ^pnpm(@.*)?$ ]]; then
|
||||
MODULES[$i]="pnpm@^10"
|
||||
fi
|
||||
done
|
||||
|
||||
local failed_modules=0
|
||||
for mod in "${MODULES[@]}"; do
|
||||
local MODULE_NAME MODULE_REQ_VERSION MODULE_INSTALLED_VERSION
|
||||
@@ -7570,7 +7596,7 @@ function setup_meilisearch() {
|
||||
|
||||
# Start meilisearch with --import-dump flag
|
||||
# This is a one-time import that happens during startup
|
||||
/usr/bin/meilisearch --config-file-path /etc/meilisearch.toml --import-dump "$DUMP_FILE" &
|
||||
/usr/bin/meilisearch --config-file-path /etc/meilisearch.toml --import-dump "$DUMP_FILE" >/dev/null 2>&1 &
|
||||
local MEILI_PID=$!
|
||||
|
||||
# Wait for meilisearch to become healthy (import happens during startup)
|
||||
@@ -7593,6 +7619,7 @@ function setup_meilisearch() {
|
||||
|
||||
# Stop the manual process
|
||||
kill $MEILI_PID 2>/dev/null || true
|
||||
wait $MEILI_PID 2>/dev/null || true
|
||||
sleep 2
|
||||
|
||||
# Start via systemd for proper management
|
||||
@@ -8665,3 +8692,829 @@ EOF
|
||||
$STD apt update
|
||||
return 0
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Get latest GitLab release version.
|
||||
# Usage: get_latest_gitlab_release "owner/repo" [strip_v]
|
||||
# ------------------------------------------------------------------------------
|
||||
get_latest_gitlab_release() {
|
||||
local repo="$1"
|
||||
local strip_v="${2:-true}"
|
||||
|
||||
local repo_encoded
|
||||
repo_encoded=$(printf '%s' "$repo" | sed 's|/|%2F|g')
|
||||
|
||||
local header=()
|
||||
[[ -n "${GITLAB_TOKEN:-}" ]] && header=(-H "PRIVATE-TOKEN: $GITLAB_TOKEN")
|
||||
|
||||
local temp_file
|
||||
temp_file=$(mktemp)
|
||||
|
||||
local http_code
|
||||
http_code=$(curl --connect-timeout 10 --max-time 30 -sSL \
|
||||
-w "%{http_code}" -o "$temp_file" \
|
||||
"${header[@]}" \
|
||||
"https://gitlab.com/api/v4/projects/$repo_encoded/releases?per_page=1&order_by=released_at&sort=desc" 2>/dev/null) || true
|
||||
|
||||
if [[ "$http_code" != "200" ]]; then
|
||||
rm -f "$temp_file"
|
||||
msg_warn "GitLab API call failed for ${repo} (HTTP ${http_code})"
|
||||
return 22
|
||||
fi
|
||||
|
||||
local version
|
||||
version=$(jq -r '.[0].tag_name // empty' "$temp_file")
|
||||
rm -f "$temp_file"
|
||||
|
||||
if [[ -z "$version" ]]; then
|
||||
msg_error "Could not determine latest version for ${repo}"
|
||||
return 250
|
||||
fi
|
||||
|
||||
if [[ "$strip_v" == "true" ]]; then
|
||||
[[ "$version" =~ ^v[0-9] ]] && version="${version:1}"
|
||||
fi
|
||||
|
||||
echo "$version"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Checks for new GitLab release (latest tag).
|
||||
#
|
||||
# Description:
|
||||
# - Queries the GitLab API for the latest release tag
|
||||
# - Compares it to a local cached version (~/.<app>)
|
||||
# - If newer, sets global CHECK_UPDATE_RELEASE and returns 0
|
||||
#
|
||||
# Usage:
|
||||
# if check_for_gl_release "myapp" "owner/repo" [optional] "v1.2.3"; then
|
||||
# # trigger update...
|
||||
# fi
|
||||
# exit 0
|
||||
# } (end of update_script not from the function)
|
||||
#
|
||||
# Notes:
|
||||
# - Requires `jq` (auto-installed if missing)
|
||||
# - Supports GITLAB_TOKEN env var for private/rate-limited repos
|
||||
# - Does not modify anything, only checks version state
|
||||
# ------------------------------------------------------------------------------
|
||||
check_for_gl_release() {
|
||||
local app="$1"
|
||||
local source="$2"
|
||||
local pinned_version_in="${3:-}" # optional
|
||||
local pin_reason="${4:-}" # optional reason shown to user
|
||||
local app_lc="${app,,}"
|
||||
local current_file="$HOME/.${app_lc}"
|
||||
|
||||
msg_info "Checking for update: ${app}"
|
||||
|
||||
# DNS check
|
||||
if ! getent hosts gitlab.com >/dev/null 2>&1; then
|
||||
msg_error "Network error: cannot resolve gitlab.com"
|
||||
return 6
|
||||
fi
|
||||
|
||||
ensure_dependencies jq
|
||||
|
||||
local repo_encoded
|
||||
repo_encoded=$(printf '%s' "$source" | sed 's|/|%2F|g')
|
||||
|
||||
local header=()
|
||||
[[ -n "${GITLAB_TOKEN:-}" ]] && header=(-H "PRIVATE-TOKEN: $GITLAB_TOKEN")
|
||||
|
||||
local releases_json="" http_code=""
|
||||
|
||||
# For pinned versions, try to fetch the specific release tag first
|
||||
if [[ -n "$pinned_version_in" ]]; then
|
||||
local pinned_encoded="${pinned_version_in//\//%2F}"
|
||||
http_code=$(curl -sSL --max-time 20 -w "%{http_code}" -o /tmp/gl_check.json \
|
||||
"${header[@]}" \
|
||||
"https://gitlab.com/api/v4/projects/$repo_encoded/releases/$pinned_encoded" 2>/dev/null) || true
|
||||
if [[ "$http_code" == "200" ]] && [[ -s /tmp/gl_check.json ]]; then
|
||||
releases_json="[$(</tmp/gl_check.json)]"
|
||||
fi
|
||||
rm -f /tmp/gl_check.json
|
||||
fi
|
||||
|
||||
# Fetch full releases list if needed
|
||||
if [[ -z "$releases_json" ]]; then
|
||||
http_code=$(curl -sSL --max-time 20 -w "%{http_code}" -o /tmp/gl_check.json \
|
||||
"${header[@]}" \
|
||||
"https://gitlab.com/api/v4/projects/$repo_encoded/releases?per_page=100&order_by=released_at&sort=desc" 2>/dev/null) || true
|
||||
|
||||
if [[ "$http_code" == "200" ]] && [[ -s /tmp/gl_check.json ]]; then
|
||||
releases_json=$(</tmp/gl_check.json)
|
||||
elif [[ "$http_code" == "401" ]]; then
|
||||
msg_error "GitLab API authentication failed (HTTP 401)."
|
||||
if [[ -n "${GITLAB_TOKEN:-}" ]]; then
|
||||
msg_error "Your GITLAB_TOKEN appears to be invalid or expired."
|
||||
else
|
||||
msg_error "The repository may require authentication. Try: export GITLAB_TOKEN=\"glpat-your_token\""
|
||||
fi
|
||||
rm -f /tmp/gl_check.json
|
||||
return 22
|
||||
elif [[ "$http_code" == "404" ]]; then
|
||||
msg_error "GitLab project not found (HTTP 404). Ensure '${source}' is correct and publicly accessible."
|
||||
rm -f /tmp/gl_check.json
|
||||
return 22
|
||||
elif [[ "$http_code" == "429" ]]; then
|
||||
msg_error "GitLab API rate limit exceeded (HTTP 429)."
|
||||
msg_error "To increase the limit, export a GitLab token: export GITLAB_TOKEN=\"glpat-your_token_here\""
|
||||
rm -f /tmp/gl_check.json
|
||||
return 22
|
||||
elif [[ "$http_code" == "000" || -z "$http_code" ]]; then
|
||||
msg_error "GitLab API connection failed (no response)."
|
||||
msg_error "Check your network/DNS: curl -sSL https://gitlab.com/api/v4/version"
|
||||
rm -f /tmp/gl_check.json
|
||||
return 7
|
||||
else
|
||||
msg_error "Unable to fetch releases for ${app} (HTTP ${http_code})"
|
||||
rm -f /tmp/gl_check.json
|
||||
return 22
|
||||
fi
|
||||
rm -f /tmp/gl_check.json
|
||||
fi
|
||||
|
||||
mapfile -t raw_tags < <(jq -r '.[] | .tag_name' <<<"$releases_json")
|
||||
if ((${#raw_tags[@]} == 0)); then
|
||||
msg_error "No releases found for ${app} on GitLab"
|
||||
return 250
|
||||
fi
|
||||
|
||||
local clean_tags=()
|
||||
for t in "${raw_tags[@]}"; do
|
||||
# Only strip leading 'v' when followed by a digit (e.g. v1.2.3)
|
||||
if [[ "$t" =~ ^v[0-9] ]]; then
|
||||
clean_tags+=("${t:1}")
|
||||
else
|
||||
clean_tags+=("$t")
|
||||
fi
|
||||
done
|
||||
|
||||
local latest_raw="${raw_tags[0]}"
|
||||
local latest_clean="${clean_tags[0]}"
|
||||
|
||||
# current installed (stored without v)
|
||||
local current=""
|
||||
if [[ -f "$current_file" ]]; then
|
||||
current="$(<"$current_file")"
|
||||
else
|
||||
# Migration: search for any /opt/*_version.txt
|
||||
local legacy_files
|
||||
mapfile -t legacy_files < <(find /opt -maxdepth 1 -type f -name "*_version.txt" 2>/dev/null)
|
||||
if ((${#legacy_files[@]} == 1)); then
|
||||
current="$(<"${legacy_files[0]}")"
|
||||
echo "${current#v}" >"$current_file"
|
||||
rm -f "${legacy_files[0]}"
|
||||
fi
|
||||
fi
|
||||
if [[ "$current" =~ ^v[0-9] ]]; then
|
||||
current="${current:1}"
|
||||
fi
|
||||
|
||||
# Pinned version handling
|
||||
if [[ -n "$pinned_version_in" ]]; then
|
||||
local pin_clean
|
||||
if [[ "$pinned_version_in" =~ ^v[0-9] ]]; then
|
||||
pin_clean="${pinned_version_in:1}"
|
||||
else
|
||||
pin_clean="$pinned_version_in"
|
||||
fi
|
||||
local match_raw=""
|
||||
for i in "${!clean_tags[@]}"; do
|
||||
if [[ "${clean_tags[$i]}" == "$pin_clean" ]]; then
|
||||
match_raw="${raw_tags[$i]}"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -z "$match_raw" ]]; then
|
||||
msg_error "Pinned version ${pinned_version_in} not found upstream"
|
||||
return 250
|
||||
fi
|
||||
|
||||
if [[ "$current" != "$pin_clean" ]]; then
|
||||
CHECK_UPDATE_RELEASE="$match_raw"
|
||||
msg_ok "Update available: ${app} ${current:-not installed} → ${pin_clean}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ -n "$pin_reason" ]]; then
|
||||
msg_ok "No update available: ${app} (${current}) - update held back: ${pin_reason}"
|
||||
else
|
||||
msg_ok "No update available: ${app} (${current}) - update temporarily held back due to issues with newer releases"
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
|
||||
# No pinning → use latest
|
||||
if [[ -z "$current" || "$current" != "$latest_clean" ]]; then
|
||||
CHECK_UPDATE_RELEASE="$latest_raw"
|
||||
msg_ok "Update available: ${app} ${current:-not installed} → ${latest_clean}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
msg_ok "No update available: ${app} (${latest_clean})"
|
||||
return 1
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Scan older GitLab releases for a matching asset (fallback helper).
|
||||
#
|
||||
# Description:
|
||||
# When the latest release does not contain the expected asset
|
||||
# (e.g. .deb for the current arch, or a custom pattern), walks back
|
||||
# through up to 15 recent releases and returns the first release JSON
|
||||
# that has a matching asset. Used internally by fetch_and_deploy_gl_release.
|
||||
#
|
||||
# Usage (internal):
|
||||
# _gl_scan_older_releases "owner/repo" "owner%2Frepo" "https://gitlab.com" \
|
||||
# "binary|prebuild|singlefile" "$asset_pattern" "$skip_tag"
|
||||
#
|
||||
# Returns:
|
||||
# - stdout: JSON of the matching release (single object) on success
|
||||
# - 0 on success, 22 on API error, 250 if no match found
|
||||
# ------------------------------------------------------------------------------
|
||||
_gl_scan_older_releases() {
|
||||
local repo="$1"
|
||||
local repo_encoded="$2"
|
||||
local base_url="${3:-https://gitlab.com}"
|
||||
local mode="$4"
|
||||
local asset_pattern="$5"
|
||||
local skip_tag="$6"
|
||||
|
||||
local header=()
|
||||
[[ -n "${GITLAB_TOKEN:-}" ]] && header=(-H "PRIVATE-TOKEN: $GITLAB_TOKEN")
|
||||
|
||||
local releases_list
|
||||
releases_list=$(curl --connect-timeout 10 --max-time 30 -fsSL \
|
||||
"${header[@]}" \
|
||||
"${base_url}/api/v4/projects/${repo_encoded}/releases?per_page=15&order_by=released_at&sort=desc" 2>/dev/null) || {
|
||||
msg_warn "Failed to fetch older releases for ${repo}"
|
||||
return 22
|
||||
}
|
||||
|
||||
local count
|
||||
count=$(echo "$releases_list" | jq 'length' 2>/dev/null || echo 0)
|
||||
[[ "$count" -eq 0 ]] && return 250
|
||||
|
||||
for ((i = 0; i < count; i++)); do
|
||||
local rel_tag
|
||||
rel_tag=$(echo "$releases_list" | jq -r ".[$i].tag_name")
|
||||
|
||||
# Skip the tag we already checked
|
||||
[[ "$rel_tag" == "$skip_tag" ]] && continue
|
||||
|
||||
# Asset URLs for this release (direct_asset_url preferred, fallback to url)
|
||||
local asset_urls
|
||||
asset_urls=$(echo "$releases_list" | jq -r ".[$i].assets.links // [] | .[] | .direct_asset_url // .url")
|
||||
[[ -z "$asset_urls" ]] && continue
|
||||
|
||||
local has_match=false
|
||||
|
||||
if [[ "$mode" == "binary" ]]; then
|
||||
local arch
|
||||
arch=$(dpkg --print-architecture 2>/dev/null || uname -m)
|
||||
[[ "$arch" == "x86_64" ]] && arch="amd64"
|
||||
[[ "$arch" == "aarch64" ]] && arch="arm64"
|
||||
|
||||
# Check with explicit pattern first, then arch heuristic, then any .deb
|
||||
if [[ -n "$asset_pattern" ]]; then
|
||||
while read -r u; do
|
||||
case "${u##*/}" in $asset_pattern)
|
||||
has_match=true
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done <<<"$asset_urls"
|
||||
fi
|
||||
if [[ "$has_match" != "true" ]]; then
|
||||
echo "$asset_urls" | grep -qE "($arch|amd64|x86_64|aarch64|arm64).*\.deb$" && has_match=true
|
||||
fi
|
||||
if [[ "$has_match" != "true" ]]; then
|
||||
echo "$asset_urls" | grep -qE '\.deb$' && has_match=true
|
||||
fi
|
||||
|
||||
elif [[ "$mode" == "prebuild" || "$mode" == "singlefile" ]]; then
|
||||
while read -r u; do
|
||||
case "${u##*/}" in $asset_pattern)
|
||||
has_match=true
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done <<<"$asset_urls"
|
||||
fi
|
||||
|
||||
if [[ "$has_match" == "true" ]]; then
|
||||
local use_fallback="y"
|
||||
if [[ -t 0 ]]; then
|
||||
msg_warn "Release ${skip_tag} has no matching asset. Previous release ${rel_tag} has a compatible asset."
|
||||
read -rp "Use version ${rel_tag} instead? [Y/n] (auto-yes in 60s): " -t 60 use_fallback || use_fallback="y"
|
||||
use_fallback="${use_fallback:-y}"
|
||||
fi
|
||||
|
||||
if [[ "${use_fallback,,}" == "y" || "${use_fallback,,}" == "yes" ]]; then
|
||||
echo "$releases_list" | jq ".[$i]"
|
||||
return 0
|
||||
else
|
||||
return 250
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
return 250
|
||||
}
|
||||
|
||||
function fetch_and_deploy_gl_release() {
|
||||
local app="$1"
|
||||
local repo="$2"
|
||||
local mode="${3:-tarball}"
|
||||
local version="${var_appversion:-${4:-latest}}"
|
||||
local target="${5:-/opt/$app}"
|
||||
local asset_pattern="${6:-}"
|
||||
|
||||
if [[ -z "$app" ]]; then
|
||||
app="${repo##*/}"
|
||||
if [[ -z "$app" ]]; then
|
||||
msg_error "fetch_and_deploy_gl_release requires app name or valid repo"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
local app_lc=$(echo "${app,,}" | tr -d ' ')
|
||||
local version_file="$HOME/.${app_lc}"
|
||||
|
||||
local api_timeout="--connect-timeout 10 --max-time 60"
|
||||
local download_timeout="--connect-timeout 15 --max-time 900"
|
||||
|
||||
local current_version=""
|
||||
[[ -f "$version_file" ]] && current_version=$(<"$version_file")
|
||||
|
||||
ensure_dependencies jq
|
||||
|
||||
local repo_encoded
|
||||
repo_encoded=$(printf '%s' "$repo" | sed 's|/|%2F|g')
|
||||
|
||||
local api_base="https://gitlab.com/api/v4/projects/$repo_encoded/releases"
|
||||
local api_url
|
||||
if [[ "$version" != "latest" ]]; then
|
||||
api_url="$api_base/$version"
|
||||
else
|
||||
api_url="$api_base?per_page=1&order_by=released_at&sort=desc"
|
||||
fi
|
||||
|
||||
local header=()
|
||||
[[ -n "${GITLAB_TOKEN:-}" ]] && header=(-H "PRIVATE-TOKEN: $GITLAB_TOKEN")
|
||||
|
||||
local max_retries=3 retry_delay=2 attempt=1 success=false http_code
|
||||
|
||||
while ((attempt <= max_retries)); do
|
||||
http_code=$(curl $api_timeout -sSL -w "%{http_code}" -o /tmp/gl_rel.json "${header[@]}" "$api_url" 2>/dev/null) || true
|
||||
if [[ "$http_code" == "200" ]]; then
|
||||
success=true
|
||||
break
|
||||
elif [[ "$http_code" == "429" ]]; then
|
||||
if ((attempt < max_retries)); then
|
||||
msg_warn "GitLab API rate limit hit, retrying in ${retry_delay}s... (attempt $attempt/$max_retries)"
|
||||
sleep "$retry_delay"
|
||||
retry_delay=$((retry_delay * 2))
|
||||
fi
|
||||
else
|
||||
sleep "$retry_delay"
|
||||
fi
|
||||
((attempt++))
|
||||
done
|
||||
|
||||
if ! $success; then
|
||||
if [[ "$http_code" == "401" ]]; then
|
||||
msg_error "GitLab API authentication failed (HTTP 401)."
|
||||
if [[ -n "${GITLAB_TOKEN:-}" ]]; then
|
||||
msg_error "Your GITLAB_TOKEN appears to be invalid or expired."
|
||||
else
|
||||
msg_error "The repository may require authentication. Try: export GITLAB_TOKEN=\"glpat-your_token\""
|
||||
fi
|
||||
elif [[ "$http_code" == "404" ]]; then
|
||||
msg_error "GitLab project or release not found (HTTP 404)."
|
||||
msg_error "Ensure '$repo' is correct and the project is accessible."
|
||||
elif [[ "$http_code" == "429" ]]; then
|
||||
msg_error "GitLab API rate limit exceeded (HTTP 429)."
|
||||
msg_error "To increase the limit, export a GitLab token before running the script:"
|
||||
msg_error " export GITLAB_TOKEN=\"glpat-your_token_here\""
|
||||
elif [[ "$http_code" == "000" || -z "$http_code" ]]; then
|
||||
msg_error "GitLab API connection failed (no response)."
|
||||
msg_error "Check your network/DNS: curl -sSL https://gitlab.com/api/v4/version"
|
||||
else
|
||||
msg_error "Failed to fetch release metadata (HTTP $http_code)"
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
|
||||
local json tag_name
|
||||
json=$(</tmp/gl_rel.json)
|
||||
|
||||
if [[ "$version" == "latest" ]]; then
|
||||
json=$(echo "$json" | jq '.[0] // empty')
|
||||
if [[ -z "$json" || "$json" == "null" ]]; then
|
||||
msg_error "No releases found for $repo on GitLab"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
tag_name=$(echo "$json" | jq -r '.tag_name // empty')
|
||||
if [[ -z "$tag_name" ]]; then
|
||||
msg_error "Could not determine tag name from release metadata"
|
||||
return 1
|
||||
fi
|
||||
[[ "$tag_name" =~ ^v[0-9] ]] && version="${tag_name:1}" || version="$tag_name"
|
||||
local version_safe="${version//\//-}"
|
||||
|
||||
if [[ "$current_version" == "$version" ]]; then
|
||||
$STD msg_ok "$app is already up-to-date (v$version)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local tmpdir
|
||||
tmpdir=$(mktemp -d) || return 1
|
||||
local filename=""
|
||||
|
||||
msg_info "Fetching GitLab release: $app ($version)"
|
||||
|
||||
_gl_asset_urls() {
|
||||
local release_json="$1"
|
||||
echo "$release_json" | jq -r '
|
||||
(.assets.links // [])[] | .direct_asset_url // .url
|
||||
'
|
||||
}
|
||||
|
||||
### Tarball Mode ###
|
||||
if [[ "$mode" == "tarball" || "$mode" == "source" ]]; then
|
||||
local direct_tarball_url="https://gitlab.com/$repo/-/archive/$tag_name/${app_lc}-${version_safe}.tar.gz"
|
||||
filename="${app_lc}-${version_safe}.tar.gz"
|
||||
|
||||
curl $download_timeout -fsSL "${header[@]}" -o "$tmpdir/$filename" "$direct_tarball_url" || {
|
||||
msg_error "Download failed: $direct_tarball_url"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
mkdir -p "$target"
|
||||
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
|
||||
rm -rf "${target:?}/"*
|
||||
fi
|
||||
|
||||
tar --no-same-owner -xzf "$tmpdir/$filename" -C "$tmpdir" || {
|
||||
msg_error "Failed to extract tarball"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
local unpack_dir
|
||||
unpack_dir=$(find "$tmpdir" -mindepth 1 -maxdepth 1 -type d | head -n1)
|
||||
|
||||
shopt -s dotglob nullglob
|
||||
cp -r "$unpack_dir"/* "$target/"
|
||||
shopt -u dotglob nullglob
|
||||
|
||||
### Binary Mode ###
|
||||
elif [[ "$mode" == "binary" ]]; then
|
||||
local arch
|
||||
arch=$(dpkg --print-architecture 2>/dev/null || uname -m)
|
||||
[[ "$arch" == "x86_64" ]] && arch="amd64"
|
||||
[[ "$arch" == "aarch64" ]] && arch="arm64"
|
||||
|
||||
local assets url_match=""
|
||||
assets=$(_gl_asset_urls "$json")
|
||||
|
||||
if [[ -n "$asset_pattern" ]]; then
|
||||
for u in $assets; do
|
||||
case "${u##*/}" in
|
||||
$asset_pattern)
|
||||
url_match="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -z "$url_match" ]]; then
|
||||
for u in $assets; do
|
||||
if [[ "$u" =~ ($arch|amd64|x86_64|aarch64|arm64).*\.deb$ ]]; then
|
||||
url_match="$u"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -z "$url_match" ]]; then
|
||||
for u in $assets; do
|
||||
[[ "$u" =~ \.deb$ ]] && url_match="$u" && break
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -z "$url_match" ]]; then
|
||||
local fallback_json
|
||||
if fallback_json=$(_gl_scan_older_releases "$repo" "$repo_encoded" "https://gitlab.com" "binary" "$asset_pattern" "$tag_name"); then
|
||||
json="$fallback_json"
|
||||
tag_name=$(echo "$json" | jq -r '.tag_name // empty')
|
||||
[[ "$tag_name" =~ ^v[0-9] ]] && version="${tag_name:1}" || version="$tag_name"
|
||||
msg_info "Fetching GitLab release: $app ($version)"
|
||||
assets=$(_gl_asset_urls "$json")
|
||||
if [[ -n "$asset_pattern" ]]; then
|
||||
for u in $assets; do
|
||||
case "${u##*/}" in $asset_pattern)
|
||||
url_match="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
if [[ -z "$url_match" ]]; then
|
||||
for u in $assets; do
|
||||
[[ "$u" =~ ($arch|amd64|x86_64|aarch64|arm64).*\.deb$ ]] && url_match="$u" && break
|
||||
done
|
||||
fi
|
||||
if [[ -z "$url_match" ]]; then
|
||||
for u in $assets; do
|
||||
[[ "$u" =~ \.deb$ ]] && url_match="$u" && break
|
||||
done
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$url_match" ]]; then
|
||||
msg_error "No suitable .deb asset found for $app"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
filename="${url_match##*/}"
|
||||
curl $download_timeout -fsSL "${header[@]}" -o "$tmpdir/$filename" "$url_match" || {
|
||||
msg_error "Download failed: $url_match"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
chmod 644 "$tmpdir/$filename"
|
||||
local dpkg_opts=""
|
||||
[[ "${DPKG_FORCE_CONFOLD:-}" == "1" ]] && dpkg_opts="-o Dpkg::Options::=--force-confold"
|
||||
[[ "${DPKG_FORCE_CONFNEW:-}" == "1" ]] && dpkg_opts="-o Dpkg::Options::=--force-confnew"
|
||||
DEBIAN_FRONTEND=noninteractive SYSTEMD_OFFLINE=1 $STD apt install -y $dpkg_opts "$tmpdir/$filename" || {
|
||||
SYSTEMD_OFFLINE=1 $STD dpkg -i "$tmpdir/$filename" || {
|
||||
msg_error "Both apt and dpkg installation failed"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
}
|
||||
|
||||
### Prebuild Mode ###
|
||||
elif [[ "$mode" == "prebuild" ]]; then
|
||||
local pattern="${6%\"}"
|
||||
pattern="${pattern#\"}"
|
||||
[[ -z "$pattern" ]] && {
|
||||
msg_error "Mode 'prebuild' requires 6th parameter (asset filename pattern)"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
local asset_url=""
|
||||
for u in $(_gl_asset_urls "$json"); do
|
||||
filename_candidate="${u##*/}"
|
||||
case "$filename_candidate" in
|
||||
$pattern)
|
||||
asset_url="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -z "$asset_url" ]]; then
|
||||
local fallback_json
|
||||
if fallback_json=$(_gl_scan_older_releases "$repo" "$repo_encoded" "https://gitlab.com" "prebuild" "$pattern" "$tag_name"); then
|
||||
json="$fallback_json"
|
||||
tag_name=$(echo "$json" | jq -r '.tag_name // empty')
|
||||
[[ "$tag_name" =~ ^v[0-9] ]] && version="${tag_name:1}" || version="$tag_name"
|
||||
msg_info "Fetching GitLab release: $app ($version)"
|
||||
for u in $(_gl_asset_urls "$json"); do
|
||||
filename_candidate="${u##*/}"
|
||||
case "$filename_candidate" in $pattern)
|
||||
asset_url="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
fi
|
||||
|
||||
[[ -z "$asset_url" ]] && {
|
||||
msg_error "No asset matching '$pattern' found"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
filename="${asset_url##*/}"
|
||||
curl $download_timeout -fsSL "${header[@]}" -o "$tmpdir/$filename" "$asset_url" || {
|
||||
msg_error "Download failed: $asset_url"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
local unpack_tmp
|
||||
unpack_tmp=$(mktemp -d)
|
||||
mkdir -p "$target"
|
||||
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
|
||||
rm -rf "${target:?}/"*
|
||||
fi
|
||||
|
||||
if [[ "$filename" == *.zip ]]; then
|
||||
ensure_dependencies unzip
|
||||
unzip -q "$tmpdir/$filename" -d "$unpack_tmp" || {
|
||||
msg_error "Failed to extract ZIP archive"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
elif [[ "$filename" == *.tar.* || "$filename" == *.tgz || "$filename" == *.txz ]]; then
|
||||
tar --no-same-owner -xf "$tmpdir/$filename" -C "$unpack_tmp" || {
|
||||
msg_error "Failed to extract TAR archive"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
else
|
||||
msg_error "Unsupported archive format: $filename"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local top_entries inner_dir
|
||||
top_entries=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1)
|
||||
if [[ "$(echo "$top_entries" | wc -l)" -eq 1 && -d "$top_entries" ]]; then
|
||||
inner_dir="$top_entries"
|
||||
shopt -s dotglob nullglob
|
||||
if compgen -G "$inner_dir/*" >/dev/null; then
|
||||
cp -r "$inner_dir"/* "$target/" || {
|
||||
msg_error "Failed to copy contents from $inner_dir to $target"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
else
|
||||
msg_error "Inner directory is empty: $inner_dir"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
fi
|
||||
shopt -u dotglob nullglob
|
||||
else
|
||||
shopt -s dotglob nullglob
|
||||
if compgen -G "$unpack_tmp/*" >/dev/null; then
|
||||
cp -r "$unpack_tmp"/* "$target/" || {
|
||||
msg_error "Failed to copy contents to $target"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
else
|
||||
msg_error "Unpacked archive is empty"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
fi
|
||||
shopt -u dotglob nullglob
|
||||
fi
|
||||
|
||||
### Singlefile Mode ###
|
||||
elif [[ "$mode" == "singlefile" ]]; then
|
||||
local pattern="${6%\"}"
|
||||
pattern="${pattern#\"}"
|
||||
[[ -z "$pattern" ]] && {
|
||||
msg_error "Mode 'singlefile' requires 6th parameter (asset filename pattern)"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
local asset_url=""
|
||||
for u in $(_gl_asset_urls "$json"); do
|
||||
filename_candidate="${u##*/}"
|
||||
case "$filename_candidate" in
|
||||
$pattern)
|
||||
asset_url="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -z "$asset_url" ]]; then
|
||||
local fallback_json
|
||||
if fallback_json=$(_gl_scan_older_releases "$repo" "$repo_encoded" "https://gitlab.com" "singlefile" "$pattern" "$tag_name"); then
|
||||
json="$fallback_json"
|
||||
tag_name=$(echo "$json" | jq -r '.tag_name // empty')
|
||||
[[ "$tag_name" =~ ^v[0-9] ]] && version="${tag_name:1}" || version="$tag_name"
|
||||
msg_info "Fetching GitLab release: $app ($version)"
|
||||
for u in $(_gl_asset_urls "$json"); do
|
||||
filename_candidate="${u##*/}"
|
||||
case "$filename_candidate" in $pattern)
|
||||
asset_url="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
fi
|
||||
|
||||
[[ -z "$asset_url" ]] && {
|
||||
msg_error "No asset matching '$pattern' found"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
filename="${asset_url##*/}"
|
||||
mkdir -p "$target"
|
||||
|
||||
local use_filename="${USE_ORIGINAL_FILENAME:-false}"
|
||||
local target_file="$app"
|
||||
[[ "$use_filename" == "true" ]] && target_file="$filename"
|
||||
|
||||
curl $download_timeout -fsSL "${header[@]}" -o "$target/$target_file" "$asset_url" || {
|
||||
msg_error "Download failed: $asset_url"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
if [[ "$target_file" != *.jar && -f "$target/$target_file" ]]; then
|
||||
chmod +x "$target/$target_file"
|
||||
fi
|
||||
|
||||
else
|
||||
msg_error "Unknown mode: $mode"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "$version" >"$version_file"
|
||||
msg_ok "Deployed: $app ($version)"
|
||||
rm -rf "$tmpdir"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Download NLTK data packages directly from GitHub, bypassing Python.
|
||||
# Avoids CPU-instruction failures (SIGILL) on older hardware lacking AVX.
|
||||
#
|
||||
# Usage:
|
||||
# setup_nltk "averaged_perceptron_tagger_eng" "/nltk_data"
|
||||
# setup_nltk "snowball_data stopwords punkt_tab" "/usr/share/nltk_data"
|
||||
#
|
||||
# Parameters:
|
||||
# $1 - Space-separated list of NLTK package IDs
|
||||
# $2 - Target directory (default: /usr/share/nltk_data)
|
||||
#
|
||||
# Returns: 0 on success, non-zero if any package failed
|
||||
# ------------------------------------------------------------------------------
|
||||
function setup_nltk() {
|
||||
local packages="${1:?setup_nltk requires at least one package name}"
|
||||
local target_dir="${2:-/usr/share/nltk_data}"
|
||||
local NLTK_INDEX_URL="https://raw.githubusercontent.com/nltk/nltk_data/gh-pages/index.xml"
|
||||
local index_xml rc=0
|
||||
|
||||
ensure_dependencies unzip
|
||||
|
||||
index_xml=$(curl_with_retry "$NLTK_INDEX_URL" "-") || {
|
||||
msg_error "Failed to fetch NLTK package index"
|
||||
return 1
|
||||
}
|
||||
|
||||
local pkg
|
||||
for pkg in $packages; do
|
||||
msg_info "Downloading NLTK: $pkg"
|
||||
local pkg_line subdir pkg_url do_unzip tmp_zip
|
||||
|
||||
pkg_line=$(echo "$index_xml" | grep "id=\"${pkg}\"" | head -1)
|
||||
if [[ -z "$pkg_line" ]]; then
|
||||
msg_error "NLTK package not found in index: $pkg"
|
||||
rc=1
|
||||
continue
|
||||
fi
|
||||
|
||||
subdir=$(echo "$pkg_line" | grep -oP 'subdir="\K[^"]+')
|
||||
pkg_url=$(echo "$pkg_line" | grep -oP 'url="\K[^"]+')
|
||||
do_unzip=$(echo "$pkg_line" | grep -oP 'unzip="\K[^"]+')
|
||||
|
||||
if [[ -z "$subdir" || -z "$pkg_url" ]]; then
|
||||
msg_error "Could not parse NLTK index entry for: $pkg"
|
||||
rc=1
|
||||
continue
|
||||
fi
|
||||
|
||||
mkdir -p "${target_dir}/${subdir}"
|
||||
tmp_zip=$(mktemp --suffix=.zip)
|
||||
|
||||
if CURL_TIMEOUT=120 curl_with_retry "$pkg_url" "$tmp_zip"; then
|
||||
if [[ "$do_unzip" == "1" ]]; then
|
||||
$STD unzip -q -o "$tmp_zip" -d "${target_dir}/${subdir}/"
|
||||
rm -f "$tmp_zip"
|
||||
else
|
||||
mv "$tmp_zip" "${target_dir}/${subdir}/${pkg}.zip"
|
||||
fi
|
||||
msg_ok "Downloaded NLTK: $pkg"
|
||||
else
|
||||
msg_error "Failed to download NLTK package: $pkg"
|
||||
rm -f "$tmp_zip"
|
||||
rc=1
|
||||
fi
|
||||
done
|
||||
|
||||
return $rc
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ EOF
|
||||
# HELPER FUNCTIONS
|
||||
# ==============================================================================
|
||||
get_ip() {
|
||||
ifconfig | grep -v '127.0.0.1' | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -m1 -Eo '([0-9]*\.){3}[0-9]*' || echo "127.0.0.1"
|
||||
hostname -I 2>/dev/null | awk '{print $1}' || ip -4 addr show scope global 2>/dev/null | awk '/inet / {print $2}' | cut -d/ -f1 | head -n1 || echo "127.0.0.1"
|
||||
}
|
||||
|
||||
# ==============================================================================
|
||||
|
||||
@@ -147,7 +147,7 @@ EOF
|
||||
# Create update script
|
||||
msg_info "Creating update script"
|
||||
ensure_usr_local_bin_persist
|
||||
cat <<EOF >/usr/local/bin/update_cronmaster
|
||||
cat <<'EOF' >/usr/local/bin/update_cronmaster
|
||||
#!/usr/bin/env bash
|
||||
# CronMaster Update Script
|
||||
type=update bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/tools/addon/cronmaster.sh)"
|
||||
|
||||
@@ -68,6 +68,24 @@ function uninstall() {
|
||||
# ==============================================================================
|
||||
function update() {
|
||||
if check_for_gh_release "qbittorrent-exporter" "martabal/qbittorrent-exporter"; then
|
||||
if [[ "$(printf '%s\n' "2.0.0" "$CHECK_UPDATE_RELEASE" | sort -V | tail -n1)" == "$CHECK_UPDATE_RELEASE" ]] && \
|
||||
! grep -q "QBITTORRENT_API_KEY" "$CONFIG_PATH" 2>/dev/null; then
|
||||
echo ""
|
||||
msg_warn "Version 2.0.0 introduces a breaking change: username/password login has been replaced by an API key."
|
||||
echo -e "${TAB3}${INFO} You must create an API key in qBittorrent under Tools > Options > Web UI > API key"
|
||||
echo ""
|
||||
echo -n "${TAB3}Enter your qBittorrent API key (or press Enter to abort): "
|
||||
read -r QBITTORRENT_API_KEY
|
||||
if [[ -z "$QBITTORRENT_API_KEY" ]]; then
|
||||
msg_warn "No API key provided. Update aborted."
|
||||
exit 0
|
||||
fi
|
||||
sed -i '/^QBITTORRENT_USERNAME=/d' "$CONFIG_PATH"
|
||||
sed -i '/^QBITTORRENT_PASSWORD=/d' "$CONFIG_PATH"
|
||||
echo "QBITTORRENT_API_KEY=\"${QBITTORRENT_API_KEY}\"" >>"$CONFIG_PATH"
|
||||
msg_ok "API key saved to configuration"
|
||||
fi
|
||||
|
||||
msg_info "Stopping service"
|
||||
if [[ "$OS" == "Alpine" ]]; then
|
||||
rc-service qbittorrent-exporter stop &>/dev/null
|
||||
@@ -100,10 +118,9 @@ function update() {
|
||||
# INSTALL
|
||||
# ==============================================================================
|
||||
function install() {
|
||||
read -erp "Enter URL of qBittorrent, example: (http://127.0.0.1:8080): " QBITTORRENT_BASE_URL
|
||||
read -erp "Enter qBittorrent username: " QBITTORRENT_USERNAME
|
||||
read -rsp "Enter qBittorrent password: " QBITTORRENT_PASSWORD
|
||||
printf "\n"
|
||||
read -erp "${TAB3}Enter URL of qBittorrent, example: (http://127.0.0.1:8080): " QBITTORRENT_BASE_URL
|
||||
echo -e "${TAB3}${INFO} Create an API key in qBittorrent under Tools > Options > Web UI > API key"
|
||||
read -erp "${TAB3}Enter qBittorrent API key: " QBITTORRENT_API_KEY
|
||||
|
||||
fetch_and_deploy_gh_release "qbittorrent-exporter" "martabal/qbittorrent-exporter" "tarball" "latest"
|
||||
setup_go
|
||||
@@ -116,8 +133,7 @@ function install() {
|
||||
cat <<EOF >"$CONFIG_PATH"
|
||||
# https://github.com/martabal/qbittorrent-exporter?tab=readme-ov-file#parameters
|
||||
QBITTORRENT_BASE_URL="${QBITTORRENT_BASE_URL}"
|
||||
QBITTORRENT_USERNAME="${QBITTORRENT_USERNAME}"
|
||||
QBITTORRENT_PASSWORD="${QBITTORRENT_PASSWORD}"
|
||||
QBITTORRENT_API_KEY="${QBITTORRENT_API_KEY}"
|
||||
EOF
|
||||
msg_ok "Created configuration"
|
||||
|
||||
|
||||
436
tools/pve/post-install-hook-examples.sh
Normal file
436
tools/pve/post-install-hook-examples.sh
Normal file
@@ -0,0 +1,436 @@
|
||||
#!/usr/bin/env bash
|
||||
# ============================================================================
|
||||
# Community-Scripts ProxmoxVE — Post-Install Hook: Example Library
|
||||
# ----------------------------------------------------------------------------
|
||||
# This file is NOT meant to be executed as-is.
|
||||
# It is a collection of complete, copy-pasteable example hooks for the
|
||||
# optional `var_post_install` feature in build.func.
|
||||
#
|
||||
# HOW IT WORKS
|
||||
# ------------
|
||||
# In the ct/*.sh CT scripts (or via Advanced Settings → Step 28) you can
|
||||
# point `var_post_install` to an absolute path on the Proxmox HOST, e.g.:
|
||||
#
|
||||
# # in /root/.community-scripts/default.vars
|
||||
# var_post_install=/opt/community-scripts/hooks/notify.sh
|
||||
#
|
||||
# # OR per-app, in app.vars
|
||||
# var_post_install=/opt/community-scripts/hooks/vaultwarden-postprovision.sh
|
||||
#
|
||||
# # OR interactively in the Advanced Settings whiptail (Step 28).
|
||||
#
|
||||
# The hook runs ON THE PROXMOX HOST (NOT inside the LXC) as root,
|
||||
# AFTER the container is fully provisioned, started and the description
|
||||
# is set. stdout/stderr is captured to:
|
||||
#
|
||||
# /var/log/community-scripts/post-install-<CTID>.log
|
||||
#
|
||||
# AVAILABLE ENV VARIABLES
|
||||
# -----------------------
|
||||
# APP - Pretty name (e.g. "Vaultwarden")
|
||||
# NSAPP - Slug / lowercase (e.g. "vaultwarden")
|
||||
# CTID - Numeric container ID (e.g. "103")
|
||||
# IP - IPv4 address of the LXC (e.g. "192.168.1.50")
|
||||
# HN - Hostname (e.g. "vaultwarden")
|
||||
# STORAGE - Storage where the rootfs lives (e.g. "local-lvm")
|
||||
# BRG - Bridge (e.g. "vmbr0")
|
||||
#
|
||||
# GENERAL TIPS
|
||||
# ------------
|
||||
# - Use `set -euo pipefail` so failures actually surface.
|
||||
# - Use `|| true` on best-effort steps you do not want to abort the hook.
|
||||
# - The file just needs to be a valid script. `+x` is optional — it is
|
||||
# invoked via `bash <path>`. Shebang is honored only if you call it
|
||||
# yourself; otherwise the shebang line is purely cosmetic.
|
||||
# - If the hook exits non-zero, the user gets a whiptail popup with the
|
||||
# last 15 log lines. The LXC creation itself is NOT rolled back.
|
||||
# - Keep hooks idempotent — they may be re-run if you recreate a CT.
|
||||
#
|
||||
# HOW TO USE THIS FILE
|
||||
# --------------------
|
||||
# 1. Copy ONE example block (between the BEGIN/END markers) into a new
|
||||
# file on the Proxmox host, e.g. /opt/community-scripts/hooks/notify.sh
|
||||
# 2. chmod +x /opt/community-scripts/hooks/notify.sh (optional)
|
||||
# 3. Set var_post_install in default.vars / app.vars or pick the path
|
||||
# in Advanced Settings.
|
||||
# ============================================================================
|
||||
|
||||
# ============================================================================
|
||||
# ▼▼▼ EXAMPLE 1 — BEGIN ▼▼▼
|
||||
# ----------------------------------------------------------------------------
|
||||
# Name : minimal-logger.sh
|
||||
# Purpose : Append every newly created LXC to a single CSV-ish log.
|
||||
# Difficulty : ⭐ Beginner
|
||||
# Side effects: Writes to /var/log/community-scripts/created-lxcs.log
|
||||
# Use case : You just want a paper trail of "what got created when".
|
||||
# ============================================================================
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
LOG_DIR="/var/log/community-scripts"
|
||||
LOG_FILE="${LOG_DIR}/created-lxcs.log"
|
||||
|
||||
mkdir -p "$LOG_DIR"
|
||||
|
||||
# Header on first use
|
||||
if [[ ! -s "$LOG_FILE" ]]; then
|
||||
echo "timestamp;ctid;app;hostname;ip;bridge;storage" >"$LOG_FILE"
|
||||
fi
|
||||
|
||||
printf '%s;%s;%s;%s;%s;%s;%s\n' \
|
||||
"$(date -Iseconds)" \
|
||||
"${CTID}" \
|
||||
"${APP}" \
|
||||
"${HN}" \
|
||||
"${IP}" \
|
||||
"${BRG}" \
|
||||
"${STORAGE}" \
|
||||
>>"$LOG_FILE"
|
||||
|
||||
echo "Logged ${APP} (CTID=${CTID}) to ${LOG_FILE}"
|
||||
# ▲▲▲ EXAMPLE 1 — END ▲▲▲
|
||||
|
||||
# ============================================================================
|
||||
# ▼▼▼ EXAMPLE 2 — BEGIN ▼▼▼
|
||||
# ----------------------------------------------------------------------------
|
||||
# Name : discord-gotify-notify.sh
|
||||
# Purpose : Send a rich Discord embed AND a Gotify push notification
|
||||
# whenever a new LXC is provisioned.
|
||||
# Difficulty : ⭐⭐ Intermediate
|
||||
# Requires : curl on the host (default), reachable webhook URLs.
|
||||
# Side effects: Outbound HTTPS to Discord + your Gotify server.
|
||||
# ============================================================================
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# --- CONFIG (edit me) -------------------------------------------------------
|
||||
DISCORD_WEBHOOK="https://discord.com/api/webhooks/XXXXXXXX/YYYYYYYY"
|
||||
GOTIFY_URL="https://gotify.example.com"
|
||||
GOTIFY_TOKEN="AbCdEfGhIjKlMnO"
|
||||
GOTIFY_PRIORITY=5
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# Resolve the Proxmox node's hostname for context
|
||||
NODE="$(hostname -s)"
|
||||
TS="$(date -Iseconds)"
|
||||
|
||||
# --- Discord embed ----------------------------------------------------------
|
||||
read -r -d '' DISCORD_PAYLOAD <<JSON || true
|
||||
{
|
||||
"username": "Proxmox - ${NODE}",
|
||||
"avatar_url": "https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/images/logo-81x112.png",
|
||||
"embeds": [{
|
||||
"title": "✅ ${APP} LXC created",
|
||||
"description": "A new community-script LXC has been provisioned on **${NODE}**.",
|
||||
"color": 3066993,
|
||||
"timestamp": "${TS}",
|
||||
"fields": [
|
||||
{"name": "CTID", "value": "${CTID}", "inline": true},
|
||||
{"name": "Hostname", "value": "${HN}", "inline": true},
|
||||
{"name": "App", "value": "${APP}", "inline": true},
|
||||
{"name": "IP", "value": "${IP}", "inline": true},
|
||||
{"name": "Bridge", "value": "${BRG}", "inline": true},
|
||||
{"name": "Storage", "value": "${STORAGE}", "inline": true}
|
||||
],
|
||||
"footer": {"text": "community-scripts.org"}
|
||||
}]
|
||||
}
|
||||
JSON
|
||||
|
||||
curl -fsS --max-time 10 \
|
||||
-H "Content-Type: application/json" \
|
||||
-X POST "$DISCORD_WEBHOOK" \
|
||||
--data "$DISCORD_PAYLOAD" \
|
||||
>/dev/null ||
|
||||
echo "WARN: Discord webhook failed (non-fatal)"
|
||||
|
||||
# --- Gotify push ------------------------------------------------------------
|
||||
curl -fsS --max-time 10 \
|
||||
-H "X-Gotify-Key: ${GOTIFY_TOKEN}" \
|
||||
-F "title=Proxmox: ${APP} LXC created" \
|
||||
-F "message=CTID=${CTID} IP=${IP} HN=${HN} on ${NODE}" \
|
||||
-F "priority=${GOTIFY_PRIORITY}" \
|
||||
"${GOTIFY_URL}/message" \
|
||||
>/dev/null ||
|
||||
echo "WARN: Gotify push failed (non-fatal)"
|
||||
|
||||
echo "Notifications dispatched for CTID=${CTID}"
|
||||
# ▲▲▲ EXAMPLE 2 — END ▲▲▲
|
||||
|
||||
# ============================================================================
|
||||
# ▼▼▼ EXAMPLE 3 — BEGIN ▼▼▼
|
||||
# ----------------------------------------------------------------------------
|
||||
# Name : auto-pool-tags-backup.sh
|
||||
# Purpose : Add the new LXC to a Proxmox pool, append cluster-wide tags,
|
||||
# register a DNS record in pi-hole, and trigger an immediate
|
||||
# snapshot backup to a configured storage.
|
||||
# Difficulty : ⭐⭐⭐ Advanced
|
||||
# Requires : pvesh, pct, vzdump (host-side; available by default on PVE),
|
||||
# a reachable pi-hole admin API.
|
||||
# ============================================================================
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# --- CONFIG (edit me) -------------------------------------------------------
|
||||
TARGET_POOL="auto-lxc"
|
||||
EXTRA_TAGS=("auto-provisioned" "${NSAPP}") # community-script tag is set by build.func
|
||||
BACKUP_STORAGE="pbs-main" # set to "" to skip initial backup
|
||||
PIHOLE_HOST="192.168.1.5"
|
||||
PIHOLE_PASSWORD="changeme" # web-UI password
|
||||
DNS_DOMAIN="lan" # FQDN will be ${HN}.${DNS_DOMAIN}
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# 1) Ensure the pool exists, then attach the CT
|
||||
if ! pvesh get "/pools/${TARGET_POOL}" >/dev/null 2>&1; then
|
||||
echo "Creating pool: ${TARGET_POOL}"
|
||||
pvesh create /pools --poolid "${TARGET_POOL}" --comment "Auto-created by post-install hook" || true
|
||||
fi
|
||||
echo "Adding CTID=${CTID} to pool=${TARGET_POOL}"
|
||||
pvesh set "/pools/${TARGET_POOL}" --vms "${CTID}" || echo "WARN: pool attach failed (non-fatal)"
|
||||
|
||||
# 2) Merge new tags with existing ones (preserve community-script etc.)
|
||||
CURRENT_TAGS="$(pct config "${CTID}" | awk -F': ' '/^tags:/{print $2}')"
|
||||
declare -A TAG_SET
|
||||
IFS=';' read -r -a CUR_ARR <<<"${CURRENT_TAGS:-}"
|
||||
for t in "${CUR_ARR[@]}"; do [[ -n "$t" ]] && TAG_SET["$t"]=1; done
|
||||
for t in "${EXTRA_TAGS[@]}"; do [[ -n "$t" ]] && TAG_SET["$t"]=1; done
|
||||
NEW_TAGS="$(
|
||||
IFS=';'
|
||||
echo "${!TAG_SET[*]}"
|
||||
)"
|
||||
echo "Setting tags: ${NEW_TAGS}"
|
||||
pct set "${CTID}" --tags "${NEW_TAGS}" || echo "WARN: tag update failed (non-fatal)"
|
||||
|
||||
# 3) Register DNS in pi-hole (custom DNS record)
|
||||
FQDN="${HN}.${DNS_DOMAIN}"
|
||||
echo "Registering DNS: ${FQDN} → ${IP} on pi-hole ${PIHOLE_HOST}"
|
||||
SID="$(curl -fsS --max-time 5 \
|
||||
-d "pw=${PIHOLE_PASSWORD}" \
|
||||
"http://${PIHOLE_HOST}/api/auth" 2>/dev/null |
|
||||
sed -nE 's/.*"sid":"([^"]+)".*/\1/p' || true)"
|
||||
|
||||
if [[ -n "${SID}" ]]; then
|
||||
curl -fsS --max-time 5 -X PUT \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "sid: ${SID}" \
|
||||
-d "{\"hosts\":[\"${IP} ${FQDN}\"]}" \
|
||||
"http://${PIHOLE_HOST}/api/config/dns/hosts" >/dev/null ||
|
||||
echo "WARN: pi-hole DNS update failed (non-fatal)"
|
||||
curl -fsS --max-time 5 -X DELETE -H "sid: ${SID}" "http://${PIHOLE_HOST}/api/auth" >/dev/null || true
|
||||
else
|
||||
echo "WARN: could not obtain pi-hole session (skipping DNS)"
|
||||
fi
|
||||
|
||||
# 4) Initial backup (best-effort, can take a few minutes)
|
||||
if [[ -n "${BACKUP_STORAGE}" ]]; then
|
||||
if pvesh get "/storage/${BACKUP_STORAGE}" >/dev/null 2>&1; then
|
||||
echo "Triggering initial backup of CTID=${CTID} to ${BACKUP_STORAGE}"
|
||||
vzdump "${CTID}" \
|
||||
--storage "${BACKUP_STORAGE}" \
|
||||
--mode snapshot \
|
||||
--compress zstd \
|
||||
--notes-template "Initial backup of ${APP} (CTID=${CTID})" \
|
||||
--notification-mode auto ||
|
||||
echo "WARN: initial backup failed (non-fatal)"
|
||||
else
|
||||
echo "Backup storage '${BACKUP_STORAGE}' not found — skipping."
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Post-provision routine complete for ${APP} (CTID=${CTID})"
|
||||
# ▲▲▲ EXAMPLE 3 — END ▲▲▲
|
||||
|
||||
# ============================================================================
|
||||
# ▼▼▼ EXAMPLE 4 — BEGIN ▼▼▼
|
||||
# ----------------------------------------------------------------------------
|
||||
# Name : inject-ssh-and-monitoring.sh
|
||||
# Purpose : Push the host's admin SSH key into the new LXC, install the
|
||||
# Beszel monitoring agent inside the container, and register
|
||||
# an Uptime-Kuma HTTP push monitor for the LXC's IP.
|
||||
# Difficulty : ⭐⭐⭐ Advanced
|
||||
# Requires : pct (host), curl (inside LXC), reachable Beszel hub +
|
||||
# Uptime-Kuma push URL.
|
||||
# ============================================================================
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# --- CONFIG (edit me) -------------------------------------------------------
|
||||
ADMIN_KEY="/root/.ssh/admin_ed25519.pub"
|
||||
BESZEL_HUB_URL="http://192.168.1.10:8090"
|
||||
BESZEL_AGENT_KEY="ssh-ed25519 AAAA... beszel@hub" # public key of the hub
|
||||
UPTIME_KUMA_PUSH_BASE="http://uptime.lan/api/push/abc123"
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# 1) Inject the admin SSH key
|
||||
if [[ -f "${ADMIN_KEY}" ]]; then
|
||||
echo "Pushing admin SSH key into CTID=${CTID}"
|
||||
pct exec "${CTID}" -- mkdir -p /root/.ssh
|
||||
pct exec "${CTID}" -- chmod 700 /root/.ssh
|
||||
pct push "${CTID}" "${ADMIN_KEY}" /root/.ssh/authorized_keys
|
||||
pct exec "${CTID}" -- chmod 600 /root/.ssh/authorized_keys
|
||||
else
|
||||
echo "WARN: ${ADMIN_KEY} not found on host — skipping SSH key injection"
|
||||
fi
|
||||
|
||||
# 2) Wait for outbound networking inside the CT (max 30 s)
|
||||
echo "Waiting for network inside CTID=${CTID}…"
|
||||
for _ in $(seq 1 30); do
|
||||
if pct exec "${CTID}" -- bash -c 'getent hosts deb.debian.org >/dev/null 2>&1'; then
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
# 3) Install Beszel agent inside the LXC
|
||||
echo "Installing Beszel agent inside CTID=${CTID}"
|
||||
pct exec "${CTID}" -- bash -s <<'AGENT_INSTALL' || echo "WARN: Beszel install failed"
|
||||
set -euo pipefail
|
||||
ARCH="$(uname -m)"
|
||||
case "$ARCH" in
|
||||
x86_64) ARCH_TAG=amd64 ;;
|
||||
aarch64) ARCH_TAG=arm64 ;;
|
||||
*) echo "Unsupported arch: $ARCH"; exit 1 ;;
|
||||
esac
|
||||
TMP=$(mktemp -d)
|
||||
cd "$TMP"
|
||||
curl -fsSL "https://github.com/henrygd/beszel/releases/latest/download/beszel-agent_linux_${ARCH_TAG}.tar.gz" \
|
||||
| tar -xz
|
||||
install -m 0755 beszel-agent /usr/local/bin/beszel-agent
|
||||
|
||||
cat >/etc/systemd/system/beszel-agent.service <<UNIT
|
||||
[Unit]
|
||||
Description=Beszel Agent
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
[Service]
|
||||
Environment="PORT=45876"
|
||||
Environment="KEY=__KEY_PLACEHOLDER__"
|
||||
ExecStart=/usr/local/bin/beszel-agent
|
||||
Restart=always
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
UNIT
|
||||
AGENT_INSTALL
|
||||
|
||||
# Inject the configured public key into the unit file (avoids quoting hell)
|
||||
pct exec "${CTID}" -- sed -i "s|__KEY_PLACEHOLDER__|${BESZEL_AGENT_KEY}|" \
|
||||
/etc/systemd/system/beszel-agent.service
|
||||
|
||||
pct exec "${CTID}" -- systemctl daemon-reload
|
||||
pct exec "${CTID}" -- systemctl enable --now beszel-agent.service ||
|
||||
echo "WARN: could not start beszel-agent"
|
||||
|
||||
# 4) Register an Uptime-Kuma push monitor (host-side, just sends one ping)
|
||||
echo "Pinging Uptime-Kuma push monitor for ${HN}"
|
||||
curl -fsS --max-time 5 \
|
||||
--get \
|
||||
--data-urlencode "status=up" \
|
||||
--data-urlencode "msg=created by community-scripts" \
|
||||
--data-urlencode "ping=1" \
|
||||
--data-urlencode "label=${HN}" \
|
||||
"${UPTIME_KUMA_PUSH_BASE}" >/dev/null ||
|
||||
echo "WARN: Uptime-Kuma push failed (non-fatal)"
|
||||
|
||||
echo "Provisioned monitoring for ${APP} (CTID=${CTID}, IP=${IP})"
|
||||
# ▲▲▲ EXAMPLE 4 — END ▲▲▲
|
||||
|
||||
# ============================================================================
|
||||
# ▼▼▼ EXAMPLE 5 — BEGIN ▼▼▼
|
||||
# ----------------------------------------------------------------------------
|
||||
# Name : per-app-router.sh
|
||||
# Purpose : Single dispatcher hook that runs different actions
|
||||
# depending on the app being installed (NSAPP). Useful when
|
||||
# you want ONE hook for the whole cluster but distinct
|
||||
# behavior for, e.g., databases vs media services.
|
||||
# Difficulty : ⭐⭐⭐ Advanced
|
||||
# ============================================================================
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# --- CONFIG (edit me) -------------------------------------------------------
|
||||
DEFAULT_DNS_SUFFIX="lan"
|
||||
PROM_FILE_SD_DIR="/etc/prometheus/file_sd" # on the host that runs Prometheus
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
log() { printf '[%s] %s\n' "$(date +%H:%M:%S)" "$*"; }
|
||||
|
||||
# ---------- shared helpers --------------------------------------------------
|
||||
register_prometheus_target() {
|
||||
local job="$1" port="$2"
|
||||
local file="${PROM_FILE_SD_DIR}/${job}.json"
|
||||
mkdir -p "${PROM_FILE_SD_DIR}"
|
||||
if [[ ! -f "$file" ]]; then echo "[]" >"$file"; fi
|
||||
python3 - "$file" "${IP}:${port}" "${HN}" "${NSAPP}" <<'PY'
|
||||
import json, sys
|
||||
path, target, hn, app = sys.argv[1:5]
|
||||
data = json.load(open(path))
|
||||
# Avoid duplicates
|
||||
data = [b for b in data if target not in b.get("targets", [])]
|
||||
data.append({"targets": [target], "labels": {"hostname": hn, "app": app}})
|
||||
json.dump(data, open(path, "w"), indent=2)
|
||||
PY
|
||||
log "Registered Prometheus target ${IP}:${port} in ${file}"
|
||||
}
|
||||
|
||||
set_ct_options() {
|
||||
local cores="$1" mem="$2" desc="$3"
|
||||
pct set "${CTID}" --cores "${cores}" --memory "${mem}" || true
|
||||
pct set "${CTID}" --description "${desc}" || true
|
||||
}
|
||||
|
||||
# ---------- per-app dispatch ------------------------------------------------
|
||||
log "Dispatching post-install for NSAPP=${NSAPP} CTID=${CTID}"
|
||||
|
||||
case "${NSAPP}" in
|
||||
|
||||
# ------ Databases ---------------------------------------------------------
|
||||
postgresql | mariadb | mongodb | redis | valkey)
|
||||
log "Database role: bumping resources & adding to backup-critical pool"
|
||||
set_ct_options 4 4096 "DB: ${APP}"
|
||||
pvesh set /pools/db-critical --vms "${CTID}" 2>/dev/null || true
|
||||
register_prometheus_target "${NSAPP}-exporter" 9187
|
||||
;;
|
||||
|
||||
# ------ *arr media stack --------------------------------------------------
|
||||
sonarr | radarr | prowlarr | lidarr | readarr | bazarr)
|
||||
log "Media-arr role: tagging + Sonarr/Radarr API webhook"
|
||||
pct set "${CTID}" --tags "community-script;media;arr-stack" || true
|
||||
curl -fsS --max-time 5 -X POST \
|
||||
"http://media-hub.${DEFAULT_DNS_SUFFIX}/hooks/arr-added" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"app\":\"${NSAPP}\",\"ctid\":${CTID},\"ip\":\"${IP}\"}" \
|
||||
>/dev/null || log "WARN: media-hub webhook failed"
|
||||
;;
|
||||
|
||||
# ------ Web apps that should sit behind NPM/Traefik ----------------------
|
||||
vaultwarden | paperless-ngx | nextcloud | immich | bookstack)
|
||||
log "Web app role: registering reverse-proxy entry"
|
||||
curl -fsS --max-time 5 -X POST \
|
||||
"http://traefik.${DEFAULT_DNS_SUFFIX}/api/dynamic-add" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$(
|
||||
cat <<JSON
|
||||
{
|
||||
"name": "${HN}",
|
||||
"host": "${HN}.${DEFAULT_DNS_SUFFIX}",
|
||||
"backend": "http://${IP}",
|
||||
"app": "${NSAPP}"
|
||||
}
|
||||
JSON
|
||||
)" >/dev/null || log "WARN: traefik registration failed"
|
||||
register_prometheus_target "blackbox-http" 80
|
||||
;;
|
||||
|
||||
# ------ Default fallback --------------------------------------------------
|
||||
*)
|
||||
log "No special handling for ${NSAPP} — applying generic defaults"
|
||||
register_prometheus_target "node-exporter" 9100
|
||||
;;
|
||||
esac
|
||||
|
||||
log "Finished dispatcher for ${APP} (CTID=${CTID})"
|
||||
# ▲▲▲ EXAMPLE 5 — END ▲▲▲
|
||||
|
||||
# ============================================================================
|
||||
# END OF EXAMPLES
|
||||
# ============================================================================
|
||||
@@ -42,6 +42,17 @@ var_skip_confirm="${var_skip_confirm:-no}"
|
||||
# Options: "yes" | "no" | "" (empty = interactive prompt)
|
||||
var_auto_reboot="${var_auto_reboot:-}"
|
||||
|
||||
# var_continue_on_error: Continue updating remaining containers if one update fails
|
||||
# Options: "yes" | "no" (default: no = stop on first error)
|
||||
# Note: containers with backups always attempt restore on failure regardless of this setting
|
||||
var_continue_on_error="${var_continue_on_error:-no}"
|
||||
|
||||
# var_dry_run: Check for available updates without applying them
|
||||
# Options: "yes" | "no" (default: no)
|
||||
# Output: lists each container with current vs. latest version
|
||||
# Note: requires the container to be running; does not modify any container
|
||||
var_dry_run="${var_dry_run:-no}"
|
||||
|
||||
# var_tags: Optionally override the tags used for auto-detection
|
||||
# Options: "community-script|proxmox-helper-scripts" (default)
|
||||
var_tags="${var_tags:-community-script|proxmox-helper-scripts}"
|
||||
@@ -59,6 +70,8 @@ function export_config_json() {
|
||||
"var_unattended": "${var_unattended}",
|
||||
"var_skip_confirm": "${var_skip_confirm}",
|
||||
"var_auto_reboot": "${var_auto_reboot}",
|
||||
"var_continue_on_error": "${var_continue_on_error}",
|
||||
"var_dry_run": "${var_dry_run}",
|
||||
"var_tags": "${var_tags}"
|
||||
}
|
||||
EOF
|
||||
@@ -78,10 +91,12 @@ Environment Variables:
|
||||
var_backup Enable backup before update (yes/no)
|
||||
var_backup_storage Storage location for backups
|
||||
var_container Container selection (all/all_running/all_stopped/101,102,...)
|
||||
var_unattended Run updates unattended (yes/no)
|
||||
var_skip_confirm Skip initial confirmation (yes/no)
|
||||
var_auto_reboot Auto-reboot containers if required (yes/no)
|
||||
var_tags Optionally override auto-detection tags ("prod|smb|community-script")
|
||||
var_unattended Run updates unattended (yes/no)
|
||||
var_skip_confirm Skip initial confirmation (yes/no)
|
||||
var_auto_reboot Auto-reboot containers if required (yes/no)
|
||||
var_continue_on_error Continue to next container on update failure (yes/no)
|
||||
var_dry_run Check for updates without applying them (yes/no)
|
||||
var_tags Optionally override auto-detection tags ("prod|smb|community-script")
|
||||
|
||||
Examples:
|
||||
# Run interactively
|
||||
@@ -93,6 +108,12 @@ Examples:
|
||||
# Update specific containers without backup
|
||||
var_backup=no var_container=101,102,105 var_unattended=yes var_skip_confirm=yes $(basename "$0")
|
||||
|
||||
# Unattended cron-style: skip confirm, continue on error, no backup
|
||||
var_backup=no var_container=all_running var_unattended=yes var_skip_confirm=yes var_continue_on_error=yes $(basename "$0")
|
||||
|
||||
# Dry-run: show available updates for all running containers without applying
|
||||
var_container=all_running var_skip_confirm=yes var_dry_run=yes $(basename "$0")
|
||||
|
||||
# Export current configuration
|
||||
$(basename "$0") --export-config
|
||||
EOF
|
||||
@@ -131,6 +152,62 @@ function detect_service() {
|
||||
popd >/dev/null
|
||||
}
|
||||
|
||||
function dry_run_container() {
|
||||
local container="$1"
|
||||
local service="$2"
|
||||
|
||||
# Extract app name and source repo directly from check_for_gh_release call in the ct script
|
||||
# Pattern: check_for_gh_release "appname" "owner/repo"
|
||||
local check_line app_name app_lc source_repo
|
||||
check_line=$(echo "$script" | grep -m1 'check_for_gh_release')
|
||||
|
||||
if [[ -z "$check_line" ]]; then
|
||||
echo -e "${YW}[DRY-RUN]${CL} Container $container ($service): no check_for_gh_release found — skipping"
|
||||
DRY_RUN_RESULT="no check_for_gh_release found — skipping"
|
||||
return
|
||||
fi
|
||||
|
||||
app_name=$(echo "$check_line" | cut -d'"' -f2)
|
||||
source_repo=$(echo "$check_line" | cut -d'"' -f4)
|
||||
app_lc=$(echo "${app_name,,}" | tr -d ' ')
|
||||
|
||||
if [[ -z "$source_repo" || "$source_repo" != *"/"* ]]; then
|
||||
echo -e "${YW}[DRY-RUN]${CL} Container $container ($service): cannot parse source repo — skipping"
|
||||
DRY_RUN_RESULT="cannot parse source repo — skipping"
|
||||
return
|
||||
fi
|
||||
|
||||
# Read installed version from container (stored by check_for_gh_release as ~/.<appname>)
|
||||
local current_version
|
||||
current_version=$(pct exec "$container" -- bash -c "cat \$HOME/.${app_lc} 2>/dev/null" 2>/dev/null || true)
|
||||
current_version="${current_version#v}"
|
||||
|
||||
# Query latest release from GitHub API
|
||||
local latest_version
|
||||
latest_version=$(curl -sSL --max-time 10 \
|
||||
-H 'Accept: application/vnd.github+json' \
|
||||
-H 'X-GitHub-Api-Version: 2022-11-28' \
|
||||
"https://api.github.com/repos/${source_repo}/releases/latest" 2>/dev/null |
|
||||
grep '"tag_name"' | head -1 | cut -d'"' -f4 | sed 's/^v//')
|
||||
|
||||
if [[ -z "$latest_version" ]]; then
|
||||
echo -e "${YW}[DRY-RUN]${CL} Container $container ($service): cannot fetch latest version from $source_repo"
|
||||
DRY_RUN_RESULT="cannot fetch latest version from $source_repo"
|
||||
return
|
||||
fi
|
||||
|
||||
if [[ -z "$current_version" ]]; then
|
||||
echo -e "${BL}[DRY-RUN]${CL} Container $container ($service): installed version unknown, latest: ${latest_version} (${source_repo})"
|
||||
DRY_RUN_RESULT="version unknown — latest: ${latest_version}"
|
||||
elif [[ "$current_version" == "$latest_version" ]]; then
|
||||
echo -e "${GN}[DRY-RUN]${CL} Container $container ($service): up to date (${current_version})"
|
||||
DRY_RUN_RESULT="up to date (${current_version})"
|
||||
else
|
||||
echo -e "${YW}[DRY-RUN]${CL} Container $container ($service): update available ${current_version} → ${latest_version}"
|
||||
DRY_RUN_RESULT="update available ${current_version} → ${latest_version}"
|
||||
fi
|
||||
}
|
||||
|
||||
function backup_container() {
|
||||
msg_info "Creating backup for container $1"
|
||||
vzdump $1 --compress zstd --storage $STORAGE_CHOICE -notes-template "{{guestname}} - community-scripts backup updater" >/dev/null 2>&1
|
||||
@@ -169,8 +246,32 @@ END {
|
||||
' /etc/pve/storage.cfg)
|
||||
}
|
||||
|
||||
# Structured result tracking for the final summary report
|
||||
# Each entry: "CTID|service|STATUS|details"
|
||||
declare -a UPDATE_RESULTS=()
|
||||
function log_result() {
|
||||
# log_result <ctid> <service> <STATUS> <details>
|
||||
UPDATE_RESULTS+=("${1}|${2}|${3}|${4}")
|
||||
}
|
||||
|
||||
header_info
|
||||
|
||||
# =============================================================================
|
||||
# LOGGING SETUP
|
||||
# Key events are written directly to a timestamped log file under
|
||||
# /usr/local/community-scripts/update_apps/ — this avoids any stdout
|
||||
# redirection that would break interactive spinners or whiptail dialogs.
|
||||
# The full summary table is appended at the end of the run.
|
||||
# =============================================================================
|
||||
LOG_DIR="/usr/local/community-scripts/update_apps"
|
||||
mkdir -p "$LOG_DIR"
|
||||
LOG_FILE="${LOG_DIR}/$(date '+%Y%m%d_%H%M%S').log"
|
||||
echo "Update started: $(date '+%Y-%m-%d %H:%M:%S')" >"$LOG_FILE"
|
||||
|
||||
function log_write() {
|
||||
echo "[$(date '+%H:%M:%S')] $*" >>"$LOG_FILE"
|
||||
}
|
||||
|
||||
# Skip confirmation if var_skip_confirm is set to yes
|
||||
if [[ "$var_skip_confirm" != "yes" ]]; then
|
||||
whiptail --backtitle "Proxmox VE Helper Scripts" --title "LXC App Update" --yesno "This will update apps in LXCs installed by Helper-Scripts. Proceed?" 10 58 || exit
|
||||
@@ -199,7 +300,7 @@ while read -r container; do
|
||||
menu_items+=("$container_id" "$formatted_line" "OFF")
|
||||
fi
|
||||
done <<<"$containers"
|
||||
msg_ok "Loaded ${#menu_items[@]} containers"
|
||||
msg_ok "Loaded $((${#menu_items[@]} / 3)) containers"
|
||||
|
||||
# Determine container selection based on var_container
|
||||
if [[ -n "$var_container" ]]; then
|
||||
@@ -260,7 +361,10 @@ fi
|
||||
header_info
|
||||
|
||||
# Determine backup choice based on var_backup
|
||||
if [[ -n "$var_backup" ]]; then
|
||||
# Dry-run never needs a backup — skip the prompt entirely
|
||||
if [[ "$var_dry_run" == "yes" ]]; then
|
||||
BACKUP_CHOICE="no"
|
||||
elif [[ -n "$var_backup" ]]; then
|
||||
BACKUP_CHOICE="$var_backup"
|
||||
else
|
||||
BACKUP_CHOICE="no"
|
||||
@@ -270,7 +374,10 @@ else
|
||||
fi
|
||||
|
||||
# Determine unattended update based on var_unattended
|
||||
if [[ -n "$var_unattended" ]]; then
|
||||
# Dry-run never executes updates — skip the prompt entirely
|
||||
if [[ "$var_dry_run" == "yes" ]]; then
|
||||
UNATTENDED_UPDATE="no"
|
||||
elif [[ -n "$var_unattended" ]]; then
|
||||
UNATTENDED_UPDATE="$var_unattended"
|
||||
else
|
||||
UNATTENDED_UPDATE="no"
|
||||
@@ -321,6 +428,7 @@ fi
|
||||
containers_needing_reboot=()
|
||||
for container in $CHOICE; do
|
||||
echo -e "${BL}[INFO]${CL} Updating container $container"
|
||||
log_write "Container $container: starting"
|
||||
|
||||
if [ "$BACKUP_CHOICE" == "yes" ]; then
|
||||
backup_container $container
|
||||
@@ -342,9 +450,12 @@ for container in $CHOICE; do
|
||||
#1.1) If update script not detected, return
|
||||
if [ -z "${service}" ]; then
|
||||
echo -e "${YW}[WARN]${CL} Update script not found. Skipping to next container"
|
||||
log_result "$container" "(unknown)" "SKIPPED" "No update script found in container"
|
||||
log_write "Container $container: SKIPPED — no update script found"
|
||||
continue
|
||||
else
|
||||
echo -e "${BL}[INFO]${CL} Detected service: ${GN}${service}${CL}"
|
||||
log_write "Container $container: detected service '$service'"
|
||||
fi
|
||||
|
||||
#2) Extract service build/update resource requirements from config/installation file
|
||||
@@ -391,17 +502,29 @@ for container in $CHOICE; do
|
||||
fi
|
||||
|
||||
#3) if build resources are different than run resources, then:
|
||||
if [ "$UPDATE_BUILD_RESOURCES" -eq "1" ]; then
|
||||
if [ "$UPDATE_BUILD_RESOURCES" -eq "1" ] && [[ "$var_dry_run" != "yes" ]]; then
|
||||
pct set "$container" --cores "$build_cpu" --memory "$build_ram"
|
||||
fi
|
||||
|
||||
#3.5) Dry-run: report update availability without applying
|
||||
if [[ "$var_dry_run" == "yes" ]]; then
|
||||
DRY_RUN_RESULT=""
|
||||
dry_run_container "$container" "$service"
|
||||
log_result "$container" "$service" "DRY-RUN" "${DRY_RUN_RESULT:-version check only}"
|
||||
log_write "Container $container ($service): DRY-RUN — ${DRY_RUN_RESULT:-version check only}"
|
||||
continue
|
||||
fi
|
||||
|
||||
#4) Update service, using the update command
|
||||
# Prepend a no-op 'clear' wrapper to PATH so update scripts calling clear
|
||||
# don't fail without a TTY — works for all shells incl. ash (no export -f)
|
||||
SETUP_CMD="mkdir -p /tmp/.nc; printf '#!/bin/sh\n:\n' > /tmp/.nc/clear; chmod +x /tmp/.nc/clear; export PATH=/tmp/.nc:\$PATH; export TERM=dumb; "
|
||||
case "$os" in
|
||||
alpine) pct exec "$container" -- ash -c "$UPDATE_CMD" ;;
|
||||
archlinux) pct exec "$container" -- bash -c "$UPDATE_CMD" ;;
|
||||
fedora | rocky | centos | alma) pct exec "$container" -- bash -c "$UPDATE_CMD" ;;
|
||||
ubuntu | debian | devuan) pct exec "$container" -- bash -c "$UPDATE_CMD" ;;
|
||||
opensuse) pct exec "$container" -- bash -c "$UPDATE_CMD" ;;
|
||||
alpine) pct exec "$container" -- ash -c "${SETUP_CMD}${UPDATE_CMD}" ;;
|
||||
archlinux) pct exec "$container" -- bash -c "${SETUP_CMD}${UPDATE_CMD}" ;;
|
||||
fedora | rocky | centos | alma) pct exec "$container" -- bash -c "${SETUP_CMD}${UPDATE_CMD}" ;;
|
||||
ubuntu | debian | devuan) pct exec "$container" -- bash -c "${SETUP_CMD}${UPDATE_CMD}" ;;
|
||||
opensuse) pct exec "$container" -- bash -c "${SETUP_CMD}${UPDATE_CMD}" ;;
|
||||
esac
|
||||
exit_code=$?
|
||||
|
||||
@@ -423,16 +546,31 @@ for container in $CHOICE; do
|
||||
|
||||
if [ $exit_code -eq 0 ]; then
|
||||
msg_ok "Updated container $container"
|
||||
log_result "$container" "$service" "OK" "Updated successfully"
|
||||
log_write "Container $container ($service): OK"
|
||||
elif [ $exit_code -eq 75 ]; then
|
||||
echo -e "${YW}[WARN]${CL} Container $container skipped (requires interactive mode)"
|
||||
log_result "$container" "$service" "SKIPPED" "Requires interactive mode (exit 75)"
|
||||
log_write "Container $container ($service): SKIPPED — requires interactive mode"
|
||||
elif [ $exit_code -eq 113 ]; then
|
||||
echo -e "${YW}[WARN]${CL} Container $container skipped (under-provisioned: increase CPU/RAM to match template)"
|
||||
log_result "$container" "$service" "SKIPPED" "Under-provisioned — increase CPU/RAM to match template"
|
||||
log_write "Container $container ($service): SKIPPED — under-provisioned"
|
||||
elif [ $exit_code -eq 114 ]; then
|
||||
echo -e "${YW}[WARN]${CL} Container $container skipped (storage critically low on /boot)"
|
||||
log_result "$container" "$service" "SKIPPED" "Storage critically low on /boot (>80%)"
|
||||
log_write "Container $container ($service): SKIPPED — storage critically low on /boot"
|
||||
elif [ "$BACKUP_CHOICE" == "yes" ]; then
|
||||
msg_error "Update failed for container $container (exit code: $exit_code) — attempting restore"
|
||||
log_write "Container $container ($service): FAILED (exit $exit_code) — attempting restore"
|
||||
msg_info "Restoring LXC $container from backup ($STORAGE_CHOICE)"
|
||||
pct stop $container
|
||||
LXC_STORAGE=$(pct config $container | awk -F '[:,]' '/rootfs/ {print $2}')
|
||||
BACKUP_ENTRY=$(pvesm list "$STORAGE_CHOICE" 2>/dev/null | awk -v ctid="$container" '$1 ~ "vzdump-lxc-"ctid"-" || $1 ~ "/ct/"ctid"/" {print $1}' | sort -r | head -n1)
|
||||
if [ -z "$BACKUP_ENTRY" ]; then
|
||||
msg_error "No backup found in storage $STORAGE_CHOICE for container $container"
|
||||
log_result "$container" "$service" "FAILED" "Update failed (exit $exit_code) — no backup found for restore"
|
||||
log_write "Container $container ($service): FAILED — no backup found for restore"
|
||||
exit 235
|
||||
fi
|
||||
msg_info "Restoring from: $BACKUP_ENTRY"
|
||||
@@ -441,19 +579,76 @@ for container in $CHOICE; do
|
||||
if [ $restorestatus -eq 0 ]; then
|
||||
pct start $container
|
||||
msg_ok "Container $container successfully restored from backup"
|
||||
log_result "$container" "$service" "RESTORED" "Update failed (exit $exit_code) — restored from backup"
|
||||
log_write "Container $container ($service): RESTORED from $BACKUP_ENTRY"
|
||||
else
|
||||
msg_error "Restore failed for container $container"
|
||||
log_result "$container" "$service" "FAILED" "Update failed (exit $exit_code) — restore also failed"
|
||||
log_write "Container $container ($service): FAILED — restore also failed"
|
||||
exit 235
|
||||
fi
|
||||
else
|
||||
msg_error "Update failed for container $container. Exiting"
|
||||
exit "$exit_code"
|
||||
msg_error "Update failed for container $container (exit code: $exit_code)"
|
||||
log_result "$container" "$service" "FAILED" "Exit code $exit_code"
|
||||
log_write "Container $container ($service): FAILED (exit $exit_code)"
|
||||
if [[ "$var_continue_on_error" == "yes" ]]; then
|
||||
echo -e "${YW}[WARN]${CL} Continuing to next container (var_continue_on_error=yes)"
|
||||
continue
|
||||
else
|
||||
exit "$exit_code"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
wait
|
||||
header_info
|
||||
echo -e "${GN}The process is complete, and the containers have been successfully updated.${CL}\n"
|
||||
if [[ "$var_dry_run" == "yes" ]]; then
|
||||
echo -e "${GN}Dry-run complete. No containers were modified.${CL}\n"
|
||||
else
|
||||
echo -e "${GN}The process is complete, and the containers have been successfully updated.${CL}\n"
|
||||
fi
|
||||
|
||||
# =============================================================================
|
||||
# SUMMARY REPORT
|
||||
# =============================================================================
|
||||
if [ "${#UPDATE_RESULTS[@]}" -gt 0 ]; then
|
||||
SEPARATOR="━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
HEADER=$(printf " %-8s %-22s %-10s %s" "CTID" "Service" "Status" "Details")
|
||||
|
||||
# terminal output (with colours)
|
||||
echo ""
|
||||
echo "$SEPARATOR"
|
||||
echo "$HEADER"
|
||||
echo "$SEPARATOR"
|
||||
for entry in "${UPDATE_RESULTS[@]}"; do
|
||||
IFS='|' read -r _ctid _svc _status _details <<<"$entry"
|
||||
case "$_status" in
|
||||
OK) _color="${GN}" ;;
|
||||
FAILED) _color="${RD}" ;;
|
||||
RESTORED) _color="${YW}" ;;
|
||||
*) _color="${YW}" ;;
|
||||
esac
|
||||
printf " %-8s %-22s ${_color}%-10s${CL} %s\n" "$_ctid" "$_svc" "$_status" "$_details"
|
||||
done
|
||||
echo "$SEPARATOR"
|
||||
echo ""
|
||||
echo "Full log: $LOG_FILE"
|
||||
echo ""
|
||||
|
||||
# append plain-text summary to log file
|
||||
{
|
||||
echo ""
|
||||
echo "Update finished: $(date '+%Y-%m-%d %H:%M:%S')"
|
||||
echo "$SEPARATOR"
|
||||
echo "$HEADER"
|
||||
echo "$SEPARATOR"
|
||||
for entry in "${UPDATE_RESULTS[@]}"; do
|
||||
IFS='|' read -r _ctid _svc _status _details <<<"$entry"
|
||||
printf " %-8s %-22s %-10s %s\n" "$_ctid" "$_svc" "$_status" "$_details"
|
||||
done
|
||||
echo "$SEPARATOR"
|
||||
} >>"$LOG_FILE"
|
||||
fi
|
||||
if [ "${#containers_needing_reboot[@]}" -gt 0 ]; then
|
||||
echo -e "${RD}The following containers require a reboot:${CL}"
|
||||
for container_name in "${containers_needing_reboot[@]}"; do
|
||||
|
||||
@@ -494,7 +494,7 @@ fi
|
||||
msg_ok "Using ${CL}${BL}$STORAGE${CL} ${GN}for Storage Location."
|
||||
msg_ok "Virtual Machine ID is ${CL}${BL}$VMID${CL}."
|
||||
msg_info "Retrieving the URL for the Ubuntu 25.04 Disk Image"
|
||||
URL=https://cloud-images.ubuntu.com/plucky/current/plucky-server-cloudimg-amd64.img
|
||||
URL=https://cloud-images.ubuntu.com/releases/server/plucky/release/ubuntu-25.04-server-cloudimg-amd64.img
|
||||
sleep 2
|
||||
msg_ok "${CL}${BL}${URL}${CL}"
|
||||
curl -f#SL -o "$(basename "$URL")" "$URL"
|
||||
|
||||
Reference in New Issue
Block a user