mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2026-02-15 01:33:25 +01:00
Compare commits
2 Commits
copilot/ad
...
fix/archli
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ba93e85eea | ||
|
|
663bb828d0 |
52
CHANGELOG.md
52
CHANGELOG.md
@@ -401,76 +401,26 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
||||
|
||||
</details>
|
||||
|
||||
## 2026-02-14
|
||||
|
||||
## 2026-02-13
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- OpenWebUI: pin numba constraint [@MickLesk](https://github.com/MickLesk) ([#11874](https://github.com/community-scripts/ProxmoxVE/pull/11874))
|
||||
- Planka: add migrate step to update function [@ZimmermannLeon](https://github.com/ZimmermannLeon) ([#11877](https://github.com/community-scripts/ProxmoxVE/pull/11877))
|
||||
- Pangolin: switch sqlite-specific back to generic [@MickLesk](https://github.com/MickLesk) ([#11868](https://github.com/community-scripts/ProxmoxVE/pull/11868))
|
||||
- [Hotfix] Jotty: Copy contents of config backup into /opt/jotty/config [@vhsdream](https://github.com/vhsdream) ([#11864](https://github.com/community-scripts/ProxmoxVE/pull/11864))
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- Refactor: Radicale [@vhsdream](https://github.com/vhsdream) ([#11850](https://github.com/community-scripts/ProxmoxVE/pull/11850))
|
||||
- chore(donetick): add config entry for v0.1.73 [@tomfrenzel](https://github.com/tomfrenzel) ([#11872](https://github.com/community-scripts/ProxmoxVE/pull/11872))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- core: retry reporting with fallback payloads [@MickLesk](https://github.com/MickLesk) ([#11885](https://github.com/community-scripts/ProxmoxVE/pull/11885))
|
||||
|
||||
### 📡 API
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- error-handler: Implement json_escape and enhance error handling [@MickLesk](https://github.com/MickLesk) ([#11875](https://github.com/community-scripts/ProxmoxVE/pull/11875))
|
||||
|
||||
### 🌐 Website
|
||||
|
||||
- #### 📝 Script Information
|
||||
|
||||
- SQLServer-2025: add PVE9/Kernel 6.x incompatibility warning [@MickLesk](https://github.com/MickLesk) ([#11829](https://github.com/community-scripts/ProxmoxVE/pull/11829))
|
||||
|
||||
## 2026-02-12
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- EMQX: increase disk to 6GB and add optional MQ disable prompt [@MickLesk](https://github.com/MickLesk) ([#11844](https://github.com/community-scripts/ProxmoxVE/pull/11844))
|
||||
- Increased the Grafana container default disk size. [@shtefko](https://github.com/shtefko) ([#11840](https://github.com/community-scripts/ProxmoxVE/pull/11840))
|
||||
- Pangolin: Update database generation command in install script [@tremor021](https://github.com/tremor021) ([#11825](https://github.com/community-scripts/ProxmoxVE/pull/11825))
|
||||
- Deluge: add python3-setuptools as dep [@MickLesk](https://github.com/MickLesk) ([#11833](https://github.com/community-scripts/ProxmoxVE/pull/11833))
|
||||
- Dispatcharr: migrate to uv sync [@MickLesk](https://github.com/MickLesk) ([#11831](https://github.com/community-scripts/ProxmoxVE/pull/11831))
|
||||
- Pangolin: Update database generation command in install script [@tremor021](https://github.com/tremor021) ([#11825](https://github.com/community-scripts/ProxmoxVE/pull/11825))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- Archlinux-VM: fix LVM/LVM-thin storage and improve error reporting | VM's add correct exit_code for analytics [@MickLesk](https://github.com/MickLesk) ([#11842](https://github.com/community-scripts/ProxmoxVE/pull/11842))
|
||||
- Debian13-VM: Optimize First Boot & add noCloud/Cloud Selection [@MickLesk](https://github.com/MickLesk) ([#11810](https://github.com/community-scripts/ProxmoxVE/pull/11810))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- tools.func: auto-detect binary vs armored GPG keys in setup_deb822_repo [@MickLesk](https://github.com/MickLesk) ([#11841](https://github.com/community-scripts/ProxmoxVE/pull/11841))
|
||||
- core: remove old Go API and extend misc/api.func with new backend [@MickLesk](https://github.com/MickLesk) ([#11822](https://github.com/community-scripts/ProxmoxVE/pull/11822))
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- error_handler: prevent stuck 'installing' status [@MickLesk](https://github.com/MickLesk) ([#11845](https://github.com/community-scripts/ProxmoxVE/pull/11845))
|
||||
|
||||
### 🧰 Tools
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Tailscale: fix DNS check and keyrings directory issues [@MickLesk](https://github.com/MickLesk) ([#11837](https://github.com/community-scripts/ProxmoxVE/pull/11837))
|
||||
|
||||
## 2026-02-11
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
@@ -9,7 +9,7 @@ APP="Alpine-Grafana"
|
||||
var_tags="${var_tags:-alpine;monitoring}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-256}"
|
||||
var_disk="${var_disk:-2}"
|
||||
var_disk="${var_disk:-1}"
|
||||
var_os="${var_os:-alpine}"
|
||||
var_version="${var_version:-3.23}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
@@ -42,8 +42,7 @@ function update_script() {
|
||||
|
||||
msg_info "Restoring Configurations"
|
||||
mv /opt/selfhosted.yaml /opt/donetick/config
|
||||
grep -q 'http://localhost"$' /opt/donetick/config/selfhosted.yaml || sed -i '/https:\/\/localhost"$/a\ - "http://localhost"' /opt/donetick/config/selfhosted.yaml
|
||||
grep -q 'capacitor://localhost' /opt/donetick/config/selfhosted.yaml || sed -i '/http:\/\/localhost"$/a\ - "capacitor://localhost"' /opt/donetick/config/selfhosted.yaml
|
||||
sed -i '/capacitor:\/\/localhost/d' /opt/donetick/config/selfhosted.yaml
|
||||
mv /opt/donetick.db /opt/donetick
|
||||
msg_ok "Restored Configurations"
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ APP="EMQX"
|
||||
var_tags="${var_tags:-mqtt}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-1024}"
|
||||
var_disk="${var_disk:-6}"
|
||||
var_disk="${var_disk:-4}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
@@ -9,7 +9,7 @@ APP="Grafana"
|
||||
var_tags="${var_tags:-monitoring;visualization}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-512}"
|
||||
var_disk="${var_disk:-4}"
|
||||
var_disk="${var_disk:-2}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
@@ -46,7 +46,7 @@ function update_script() {
|
||||
msg_info "Restoring configuration & data"
|
||||
mv /opt/app.env /opt/jotty/.env
|
||||
[[ -d /opt/data ]] && mv /opt/data /opt/jotty/data
|
||||
[[ -d /opt/jotty/config ]] && cp -a /opt/config/* /opt/jotty/config && rm -rf /opt/config
|
||||
[[ -d /opt/jotty/config ]] && mv /opt/config/* /opt/jotty/config
|
||||
msg_ok "Restored configuration & data"
|
||||
|
||||
msg_info "Starting Service"
|
||||
|
||||
@@ -44,7 +44,7 @@ function update_script() {
|
||||
|
||||
msg_info "Installing uv-based Open-WebUI"
|
||||
PYTHON_VERSION="3.12" setup_uv
|
||||
$STD uv tool install --python 3.12 --constraint <(echo "numba>=0.60") open-webui[all]
|
||||
$STD uv tool install --python 3.12 open-webui[all]
|
||||
msg_ok "Installed uv-based Open-WebUI"
|
||||
|
||||
msg_info "Restoring data"
|
||||
@@ -126,7 +126,7 @@ EOF
|
||||
|
||||
msg_info "Updating Open WebUI via uv"
|
||||
PYTHON_VERSION="3.12" setup_uv
|
||||
$STD uv tool install --force --python 3.12 --constraint <(echo "numba>=0.60") open-webui[all]
|
||||
$STD uv tool upgrade --python 3.12 open-webui[all]
|
||||
systemctl restart open-webui
|
||||
msg_ok "Updated Open WebUI"
|
||||
msg_ok "Updated successfully!"
|
||||
|
||||
@@ -51,7 +51,7 @@ function update_script() {
|
||||
$STD npm run db:generate
|
||||
$STD npm run build
|
||||
$STD npm run build:cli
|
||||
$STD npm run db:push
|
||||
$STD npm run db:sqlite:push
|
||||
cp -R .next/standalone ./
|
||||
chmod +x ./dist/cli.mjs
|
||||
cp server/db/names.json ./dist/names.json
|
||||
|
||||
@@ -61,12 +61,6 @@ function update_script() {
|
||||
rm -rf "$BK"
|
||||
msg_ok "Restored data"
|
||||
|
||||
msg_ok "Migrate Database"
|
||||
cd /opt/planka
|
||||
$STD npm run db:upgrade
|
||||
$STD npm run db:migrate
|
||||
msg_ok "Migrated Database"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start planka
|
||||
msg_ok "Started Service"
|
||||
|
||||
@@ -28,55 +28,16 @@ function update_script() {
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_release "Radicale" "Kozea/Radicale"; then
|
||||
msg_info "Stopping service"
|
||||
systemctl stop radicale
|
||||
msg_ok "Stopped service"
|
||||
msg_info "Updating ${APP}"
|
||||
$STD python3 -m venv /opt/radicale
|
||||
source /opt/radicale/bin/activate
|
||||
$STD python3 -m pip install --upgrade https://github.com/Kozea/Radicale/archive/master.tar.gz
|
||||
msg_ok "Updated ${APP}"
|
||||
|
||||
msg_info "Backing up users file"
|
||||
cp /opt/radicale/users /opt/radicale_users_backup
|
||||
msg_ok "Backed up users file"
|
||||
|
||||
PYTHON_VERSION="3.13" setup_uv
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "Radicale" "Kozea/Radicale" "tarball" "latest" "/opt/radicale"
|
||||
|
||||
msg_info "Restoring users file"
|
||||
rm -f /opt/radicale/users
|
||||
mv /opt/radicale_users_backup /opt/radicale/users
|
||||
msg_ok "Restored users file"
|
||||
|
||||
if grep -q 'start.sh' /etc/systemd/system/radicale.service; then
|
||||
sed -i -e '/^Description/i[Unit]' \
|
||||
-e '\|^ExecStart|iWorkingDirectory=/opt/radicale' \
|
||||
-e 's|^ExecStart=.*|ExecStart=/usr/local/bin/uv run -m radicale --config /etc/radicale/config|' /etc/systemd/system/radicale.service
|
||||
systemctl daemon-reload
|
||||
fi
|
||||
if [[ ! -f /etc/radicale/config ]]; then
|
||||
msg_info "Migrating to config file (/etc/radicale/config)"
|
||||
mkdir -p /etc/radicale
|
||||
cat <<EOF >/etc/radicale/config
|
||||
[server]
|
||||
hosts = 0.0.0.0:5232
|
||||
|
||||
[auth]
|
||||
type = htpasswd
|
||||
htpasswd_filename = /opt/radicale/users
|
||||
htpasswd_encryption = sha512
|
||||
|
||||
[storage]
|
||||
type = multifilesystem
|
||||
filesystem_folder = /var/lib/radicale/collections
|
||||
|
||||
[web]
|
||||
type = internal
|
||||
EOF
|
||||
msg_ok "Migrated to config (/etc/radicale/config)"
|
||||
fi
|
||||
msg_info "Starting service"
|
||||
systemctl start radicale
|
||||
msg_ok "Started service"
|
||||
msg_ok "Updated Successfully!"
|
||||
fi
|
||||
msg_info "Starting Service"
|
||||
systemctl enable -q --now radicale
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
exit
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"resources": {
|
||||
"cpu": 2,
|
||||
"ram": 1024,
|
||||
"hdd": 6,
|
||||
"hdd": 4,
|
||||
"os": "debian",
|
||||
"version": "13"
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"generated": "2026-02-14T06:14:47Z",
|
||||
"generated": "2026-02-12T12:15:15Z",
|
||||
"versions": [
|
||||
{
|
||||
"slug": "2fauth",
|
||||
@@ -67,9 +67,9 @@
|
||||
{
|
||||
"slug": "autobrr",
|
||||
"repo": "autobrr/autobrr",
|
||||
"version": "v1.73.0",
|
||||
"version": "v1.72.1",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T16:37:28Z"
|
||||
"date": "2026-01-30T12:57:58Z"
|
||||
},
|
||||
{
|
||||
"slug": "autocaliweb",
|
||||
@@ -193,16 +193,16 @@
|
||||
{
|
||||
"slug": "cleanuparr",
|
||||
"repo": "Cleanuparr/Cleanuparr",
|
||||
"version": "v2.6.1",
|
||||
"version": "v2.5.1",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T10:00:19Z"
|
||||
"date": "2026-01-11T00:46:17Z"
|
||||
},
|
||||
{
|
||||
"slug": "cloudreve",
|
||||
"repo": "cloudreve/cloudreve",
|
||||
"version": "4.14.0",
|
||||
"version": "4.13.0",
|
||||
"pinned": false,
|
||||
"date": "2026-02-14T06:05:06Z"
|
||||
"date": "2026-02-05T12:53:24Z"
|
||||
},
|
||||
{
|
||||
"slug": "comfyui",
|
||||
@@ -298,9 +298,9 @@
|
||||
{
|
||||
"slug": "donetick",
|
||||
"repo": "donetick/donetick",
|
||||
"version": "v0.1.73",
|
||||
"version": "v0.1.71",
|
||||
"pinned": false,
|
||||
"date": "2026-02-12T23:42:30Z"
|
||||
"date": "2026-02-11T06:01:13Z"
|
||||
},
|
||||
{
|
||||
"slug": "drawio",
|
||||
@@ -403,9 +403,9 @@
|
||||
{
|
||||
"slug": "ghostfolio",
|
||||
"repo": "ghostfolio/ghostfolio",
|
||||
"version": "2.238.0",
|
||||
"version": "2.237.0",
|
||||
"pinned": false,
|
||||
"date": "2026-02-12T18:28:55Z"
|
||||
"date": "2026-02-08T13:59:53Z"
|
||||
},
|
||||
{
|
||||
"slug": "gitea",
|
||||
@@ -445,9 +445,9 @@
|
||||
{
|
||||
"slug": "gotify",
|
||||
"repo": "gotify/server",
|
||||
"version": "v2.9.0",
|
||||
"version": "v2.8.0",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T15:22:31Z"
|
||||
"date": "2026-01-02T11:56:16Z"
|
||||
},
|
||||
{
|
||||
"slug": "grist",
|
||||
@@ -508,9 +508,9 @@
|
||||
{
|
||||
"slug": "homarr",
|
||||
"repo": "homarr-labs/homarr",
|
||||
"version": "v1.53.1",
|
||||
"version": "v1.53.0",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T19:47:11Z"
|
||||
"date": "2026-02-06T19:42:58Z"
|
||||
},
|
||||
{
|
||||
"slug": "homebox",
|
||||
@@ -543,9 +543,9 @@
|
||||
{
|
||||
"slug": "huntarr",
|
||||
"repo": "plexguide/Huntarr.io",
|
||||
"version": "9.2.4.1",
|
||||
"version": "9.2.4",
|
||||
"pinned": false,
|
||||
"date": "2026-02-12T22:17:47Z"
|
||||
"date": "2026-02-12T08:31:23Z"
|
||||
},
|
||||
{
|
||||
"slug": "immich-public-proxy",
|
||||
@@ -571,16 +571,16 @@
|
||||
{
|
||||
"slug": "invoiceninja",
|
||||
"repo": "invoiceninja/invoiceninja",
|
||||
"version": "v5.12.59",
|
||||
"version": "v5.12.57",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T02:26:13Z"
|
||||
"date": "2026-02-11T23:08:56Z"
|
||||
},
|
||||
{
|
||||
"slug": "jackett",
|
||||
"repo": "Jackett/Jackett",
|
||||
"version": "v0.24.1109",
|
||||
"version": "v0.24.1098",
|
||||
"pinned": false,
|
||||
"date": "2026-02-14T05:54:26Z"
|
||||
"date": "2026-02-12T05:56:25Z"
|
||||
},
|
||||
{
|
||||
"slug": "jellystat",
|
||||
@@ -823,16 +823,16 @@
|
||||
{
|
||||
"slug": "metube",
|
||||
"repo": "alexta69/metube",
|
||||
"version": "2026.02.13",
|
||||
"version": "2026.02.08",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T15:18:17Z"
|
||||
"date": "2026-02-08T17:01:37Z"
|
||||
},
|
||||
{
|
||||
"slug": "miniflux",
|
||||
"repo": "miniflux/v2",
|
||||
"version": "2.2.17",
|
||||
"version": "2.2.16",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T20:30:17Z"
|
||||
"date": "2026-01-07T03:26:27Z"
|
||||
},
|
||||
{
|
||||
"slug": "monica",
|
||||
@@ -998,9 +998,9 @@
|
||||
{
|
||||
"slug": "pangolin",
|
||||
"repo": "fosrl/pangolin",
|
||||
"version": "1.15.4",
|
||||
"version": "1.15.3",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T23:01:29Z"
|
||||
"date": "2026-02-12T06:10:19Z"
|
||||
},
|
||||
{
|
||||
"slug": "paperless-ai",
|
||||
@@ -1026,9 +1026,9 @@
|
||||
{
|
||||
"slug": "patchmon",
|
||||
"repo": "PatchMon/PatchMon",
|
||||
"version": "v1.4.0",
|
||||
"version": "v1.3.7",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T10:39:03Z"
|
||||
"date": "2025-12-25T11:08:14Z"
|
||||
},
|
||||
{
|
||||
"slug": "paymenter",
|
||||
@@ -1096,9 +1096,9 @@
|
||||
{
|
||||
"slug": "pocketbase",
|
||||
"repo": "pocketbase/pocketbase",
|
||||
"version": "v0.36.3",
|
||||
"version": "v0.36.2",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T18:38:58Z"
|
||||
"date": "2026-02-01T08:12:42Z"
|
||||
},
|
||||
{
|
||||
"slug": "pocketid",
|
||||
@@ -1219,13 +1219,6 @@
|
||||
"pinned": false,
|
||||
"date": "2025-11-16T22:39:01Z"
|
||||
},
|
||||
{
|
||||
"slug": "radicale",
|
||||
"repo": "Kozea/Radicale",
|
||||
"version": "v3.6.0",
|
||||
"pinned": false,
|
||||
"date": "2026-01-10T06:56:46Z"
|
||||
},
|
||||
{
|
||||
"slug": "rclone",
|
||||
"repo": "rclone/rclone",
|
||||
@@ -1299,9 +1292,9 @@
|
||||
{
|
||||
"slug": "scraparr",
|
||||
"repo": "thecfu/scraparr",
|
||||
"version": "v3.0.3",
|
||||
"version": "v3.0.1",
|
||||
"pinned": false,
|
||||
"date": "2026-02-12T14:20:56Z"
|
||||
"date": "2026-02-11T17:42:23Z"
|
||||
},
|
||||
{
|
||||
"slug": "seelf",
|
||||
@@ -1313,9 +1306,9 @@
|
||||
{
|
||||
"slug": "semaphore",
|
||||
"repo": "semaphoreui/semaphore",
|
||||
"version": "v2.17.0",
|
||||
"version": "v2.16.51",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T21:08:30Z"
|
||||
"date": "2026-01-12T16:26:38Z"
|
||||
},
|
||||
{
|
||||
"slug": "shelfmark",
|
||||
@@ -1411,9 +1404,9 @@
|
||||
{
|
||||
"slug": "tandoor",
|
||||
"repo": "TandoorRecipes/recipes",
|
||||
"version": "2.5.1",
|
||||
"version": "2.5.0",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T15:57:27Z"
|
||||
"date": "2026-02-08T13:23:02Z"
|
||||
},
|
||||
{
|
||||
"slug": "tasmoadmin",
|
||||
@@ -1439,9 +1432,9 @@
|
||||
{
|
||||
"slug": "termix",
|
||||
"repo": "Termix-SSH/Termix",
|
||||
"version": "release-1.11.1-tag",
|
||||
"version": "release-1.11.0-tag",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T04:49:16Z"
|
||||
"date": "2026-01-25T02:09:52Z"
|
||||
},
|
||||
{
|
||||
"slug": "the-lounge",
|
||||
@@ -1467,9 +1460,9 @@
|
||||
{
|
||||
"slug": "tianji",
|
||||
"repo": "msgbyte/tianji",
|
||||
"version": "v1.31.13",
|
||||
"version": "v1.31.10",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T16:30:09Z"
|
||||
"date": "2026-02-04T17:21:04Z"
|
||||
},
|
||||
{
|
||||
"slug": "traccar",
|
||||
@@ -1516,9 +1509,9 @@
|
||||
{
|
||||
"slug": "tududi",
|
||||
"repo": "chrisvel/tududi",
|
||||
"version": "v0.88.5",
|
||||
"version": "v0.88.4",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T13:54:14Z"
|
||||
"date": "2026-01-20T15:11:58Z"
|
||||
},
|
||||
{
|
||||
"slug": "tunarr",
|
||||
@@ -1558,16 +1551,16 @@
|
||||
{
|
||||
"slug": "upsnap",
|
||||
"repo": "seriousm4x/UpSnap",
|
||||
"version": "5.2.8",
|
||||
"version": "5.2.7",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T00:02:37Z"
|
||||
"date": "2026-01-07T23:48:00Z"
|
||||
},
|
||||
{
|
||||
"slug": "uptimekuma",
|
||||
"repo": "louislam/uptime-kuma",
|
||||
"version": "2.1.1",
|
||||
"version": "2.1.0",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T16:07:33Z"
|
||||
"date": "2026-02-07T02:31:49Z"
|
||||
},
|
||||
{
|
||||
"slug": "vaultwarden",
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"resources": {
|
||||
"cpu": 1,
|
||||
"ram": 512,
|
||||
"hdd": 4,
|
||||
"hdd": 2,
|
||||
"os": "debian",
|
||||
"version": "13"
|
||||
}
|
||||
@@ -32,7 +32,7 @@
|
||||
"resources": {
|
||||
"cpu": 1,
|
||||
"ram": 256,
|
||||
"hdd": 2,
|
||||
"hdd": 1,
|
||||
"os": "alpine",
|
||||
"version": "3.23"
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"documentation": "https://radicale.org/master.html#documentation-1",
|
||||
"website": "https://radicale.org/",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/radicale.webp",
|
||||
"config_path": "/etc/radicale/config",
|
||||
"config_path": "/etc/radicale/config or ~/.config/radicale/config",
|
||||
"description": "Radicale is a small but powerful CalDAV (calendars, to-do lists) and CardDAV (contacts)",
|
||||
"install_methods": [
|
||||
{
|
||||
|
||||
@@ -32,10 +32,6 @@
|
||||
"password": null
|
||||
},
|
||||
"notes": [
|
||||
{
|
||||
"text": "SQL Server (2025) SQLPAL is incompatible with Proxmox VE 9 (Kernel 6.12+) in LXC containers. Use a VM instead or the SQL-Server 2022 LXC.",
|
||||
"type": "warning"
|
||||
},
|
||||
{
|
||||
"text": "If you choose not to run the installation setup, execute: `/opt/mssql/bin/mssql-conf setup` in LXC shell.",
|
||||
"type": "info"
|
||||
|
||||
@@ -14,8 +14,6 @@
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/ubiquiti-unifi.webp",
|
||||
"config_path": "",
|
||||
"description": "UniFi Network Server is a software that helps manage and monitor UniFi networks (Wi-Fi, Ethernet, etc.) by providing an intuitive user interface and advanced features. It allows network administrators to configure, monitor, and upgrade network devices, as well as view network statistics, client devices, and historical events. The aim of the application is to make the management of UniFi networks easier and more efficient.",
|
||||
"disable": true,
|
||||
"disable_description": "This script is disabled because UniFi no longer delivers APT packages for Debian systems. The installation relies on APT repositories that are no longer maintained or available. For more details, see: https://github.com/community-scripts/ProxmoxVE/issues/11876",
|
||||
"install_methods": [
|
||||
{
|
||||
"type": "default",
|
||||
|
||||
@@ -38,18 +38,6 @@ rm -f "$DEB_FILE"
|
||||
echo "$LATEST_VERSION" >~/.emqx
|
||||
msg_ok "Installed EMQX"
|
||||
|
||||
read -r -p "${TAB3}Would you like to disable the EMQX MQ feature? (reduces disk/CPU usage) <y/N> " prompt
|
||||
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
|
||||
msg_info "Disabling EMQX MQ feature"
|
||||
mkdir -p /etc/emqx
|
||||
if ! grep -q "^mq.enable" /etc/emqx/emqx.conf 2>/dev/null; then
|
||||
echo "mq.enable = false" >>/etc/emqx/emqx.conf
|
||||
else
|
||||
sed -i 's/^mq.enable.*/mq.enable = false/' /etc/emqx/emqx.conf
|
||||
fi
|
||||
msg_ok "Disabled EMQX MQ feature"
|
||||
fi
|
||||
|
||||
msg_info "Starting EMQX service"
|
||||
$STD systemctl enable -q --now emqx
|
||||
msg_ok "Enabled EMQX service"
|
||||
|
||||
@@ -24,7 +24,7 @@ setup_hwaccel
|
||||
PYTHON_VERSION="3.12" setup_uv
|
||||
|
||||
msg_info "Installing Open WebUI"
|
||||
$STD uv tool install --python 3.12 --constraint <(echo "numba>=0.60") open-webui[all]
|
||||
$STD uv tool install --python 3.12 open-webui[all]
|
||||
msg_ok "Installed Open WebUI"
|
||||
|
||||
read -r -p "${TAB3}Would you like to add Ollama? <y/N> " prompt
|
||||
|
||||
@@ -36,7 +36,7 @@ $STD npm ci
|
||||
$STD npm run set:sqlite
|
||||
$STD npm run set:oss
|
||||
rm -rf server/private
|
||||
$STD npm run db:generate
|
||||
$STD npm run db:sqlite:generate
|
||||
$STD npm run build
|
||||
$STD npm run build:cli
|
||||
cp -R .next/standalone ./
|
||||
@@ -178,7 +178,7 @@ http:
|
||||
servers:
|
||||
- url: "http://$LOCAL_IP:3000"
|
||||
EOF
|
||||
$STD npm run db:push
|
||||
$STD npm run db:sqlite:push
|
||||
|
||||
. /etc/os-release
|
||||
if [ "$VERSION_CODENAME" = "trixie" ]; then
|
||||
|
||||
@@ -14,51 +14,42 @@ network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y apache2-utils
|
||||
$STD apt install -y \
|
||||
apache2-utils \
|
||||
python3-pip \
|
||||
python3-venv
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
PYTHON_VERSION="3.13" setup_uv
|
||||
fetch_and_deploy_gh_release "Radicale" "Kozea/Radicale" "tarball" "latest" "/opt/radicale"
|
||||
|
||||
msg_info "Setting up Radicale"
|
||||
cd /opt/radicale
|
||||
python3 -m venv /opt/radicale
|
||||
source /opt/radicale/bin/activate
|
||||
$STD python3 -m pip install --upgrade https://github.com/Kozea/Radicale/archive/master.tar.gz
|
||||
RNDPASS=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | head -c13)
|
||||
$STD htpasswd -c -b -5 /opt/radicale/users admin "$RNDPASS"
|
||||
$STD htpasswd -c -b -5 /opt/radicale/users admin $RNDPASS
|
||||
{
|
||||
echo "Radicale Credentials"
|
||||
echo "Admin User: admin"
|
||||
echo "Admin Password: $RNDPASS"
|
||||
} >>~/radicale.creds
|
||||
msg_ok "Done setting up Radicale"
|
||||
|
||||
mkdir -p /etc/radicale
|
||||
cat <<EOF >/etc/radicale/config
|
||||
[server]
|
||||
hosts = 0.0.0.0:5232
|
||||
msg_info "Setup Service"
|
||||
|
||||
[auth]
|
||||
type = htpasswd
|
||||
htpasswd_filename = /opt/radicale/users
|
||||
htpasswd_encryption = sha512
|
||||
|
||||
[storage]
|
||||
type = multifilesystem
|
||||
filesystem_folder = /var/lib/radicale/collections
|
||||
|
||||
[web]
|
||||
type = internal
|
||||
cat <<EOF >/opt/radicale/start.sh
|
||||
#!/usr/bin/env bash
|
||||
source /opt/radicale/bin/activate
|
||||
python3 -m radicale --storage-filesystem-folder=/var/lib/radicale/collections --hosts 0.0.0.0:5232 --auth-type htpasswd --auth-htpasswd-filename /opt/radicale/users --auth-htpasswd-encryption sha512
|
||||
EOF
|
||||
msg_ok "Set up Radicale"
|
||||
|
||||
msg_info "Creating Service"
|
||||
chmod +x /opt/radicale/start.sh
|
||||
|
||||
cat <<EOF >/etc/systemd/system/radicale.service
|
||||
[Unit]
|
||||
Description=A simple CalDAV (calendar) and CardDAV (contact) server
|
||||
After=network.target
|
||||
Requires=network.target
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/opt/radicale
|
||||
ExecStart=/usr/local/bin/uv run -m radicale --config /etc/radicale/config
|
||||
ExecStart=/opt/radicale/start.sh
|
||||
Restart=on-failure
|
||||
# User=radicale
|
||||
# Deny other users access to the calendar data
|
||||
|
||||
@@ -15,18 +15,16 @@ update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y apt-transport-https
|
||||
curl -fsSL "https://dl.ui.com/unifi/unifi-repo.gpg" -o "/usr/share/keyrings/unifi-repo.gpg"
|
||||
cat <<EOF | sudo tee /etc/apt/sources.list.d/100-ubnt-unifi.sources >/dev/null
|
||||
Types: deb
|
||||
URIs: https://www.ui.com/downloads/unifi/debian
|
||||
Suites: stable
|
||||
Components: ubiquiti
|
||||
Architectures: amd64
|
||||
Signed-By: /usr/share/keyrings/unifi-repo.gpg
|
||||
EOF
|
||||
$STD apt update
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
setup_deb822_repo \
|
||||
"unifi" \
|
||||
"https://dl.ui.com/unifi/unifi-repo.gpg" \
|
||||
"https://www.ui.com/downloads/unifi/debian" \
|
||||
"stable" \
|
||||
"ubiquiti" \
|
||||
"amd64"
|
||||
|
||||
JAVA_VERSION="21" setup_java
|
||||
|
||||
if lscpu | grep -q 'avx'; then
|
||||
|
||||
179
misc/api.func
179
misc/api.func
@@ -153,7 +153,7 @@ explain_exit_code() {
|
||||
126) echo "Command invoked cannot execute (permission problem?)" ;;
|
||||
127) echo "Command not found" ;;
|
||||
128) echo "Invalid argument to exit" ;;
|
||||
130) echo "Aborted by user (SIGINT)" ;;
|
||||
130) echo "Terminated by Ctrl+C (SIGINT)" ;;
|
||||
134) echo "Process aborted (SIGABRT - possibly Node.js heap overflow)" ;;
|
||||
137) echo "Killed (SIGKILL / Out of memory?)" ;;
|
||||
139) echo "Segmentation fault (core dumped)" ;;
|
||||
@@ -233,43 +233,6 @@ explain_exit_code() {
|
||||
esac
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# json_escape()
|
||||
#
|
||||
# - Escapes a string for safe JSON embedding
|
||||
# - Handles backslashes, quotes, newlines, tabs, and carriage returns
|
||||
# ------------------------------------------------------------------------------
|
||||
json_escape() {
|
||||
local s="$1"
|
||||
s=${s//\\/\\\\}
|
||||
s=${s//"/\\"}
|
||||
s=${s//$'\n'/\\n}
|
||||
s=${s//$'\r'/}
|
||||
s=${s//$'\t'/\\t}
|
||||
echo "$s"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# get_error_text()
|
||||
#
|
||||
# - Returns last 20 lines of the active log (INSTALL_LOG or BUILD_LOG)
|
||||
# - Falls back to empty string if no log is available
|
||||
# ------------------------------------------------------------------------------
|
||||
get_error_text() {
|
||||
local logfile=""
|
||||
if declare -f get_active_logfile >/dev/null 2>&1; then
|
||||
logfile=$(get_active_logfile)
|
||||
elif [[ -n "${INSTALL_LOG:-}" ]]; then
|
||||
logfile="$INSTALL_LOG"
|
||||
elif [[ -n "${BUILD_LOG:-}" ]]; then
|
||||
logfile="$BUILD_LOG"
|
||||
fi
|
||||
|
||||
if [[ -n "$logfile" && -s "$logfile" ]]; then
|
||||
tail -n 20 "$logfile" 2>/dev/null | sed 's/\r$//'
|
||||
fi
|
||||
}
|
||||
|
||||
# ==============================================================================
|
||||
# SECTION 2: TELEMETRY FUNCTIONS
|
||||
# ==============================================================================
|
||||
@@ -422,8 +385,7 @@ post_to_api() {
|
||||
detect_gpu
|
||||
fi
|
||||
local gpu_vendor="${GPU_VENDOR:-unknown}"
|
||||
local gpu_model
|
||||
gpu_model=$(json_escape "${GPU_MODEL:-}")
|
||||
local gpu_model="${GPU_MODEL:-}"
|
||||
local gpu_passthrough="${GPU_PASSTHROUGH:-unknown}"
|
||||
|
||||
# Detect CPU if not already set
|
||||
@@ -431,8 +393,7 @@ post_to_api() {
|
||||
detect_cpu
|
||||
fi
|
||||
local cpu_vendor="${CPU_VENDOR:-unknown}"
|
||||
local cpu_model
|
||||
cpu_model=$(json_escape "${CPU_MODEL:-}")
|
||||
local cpu_model="${CPU_MODEL:-}"
|
||||
|
||||
# Detect RAM if not already set
|
||||
if [[ -z "${RAM_SPEED:-}" ]]; then
|
||||
@@ -523,8 +484,7 @@ post_to_api_vm() {
|
||||
detect_gpu
|
||||
fi
|
||||
local gpu_vendor="${GPU_VENDOR:-unknown}"
|
||||
local gpu_model
|
||||
gpu_model=$(json_escape "${GPU_MODEL:-}")
|
||||
local gpu_model="${GPU_MODEL:-}"
|
||||
local gpu_passthrough="${GPU_PASSTHROUGH:-unknown}"
|
||||
|
||||
# Detect CPU if not already set
|
||||
@@ -532,8 +492,7 @@ post_to_api_vm() {
|
||||
detect_cpu
|
||||
fi
|
||||
local cpu_vendor="${CPU_VENDOR:-unknown}"
|
||||
local cpu_model
|
||||
cpu_model=$(json_escape "${CPU_MODEL:-}")
|
||||
local cpu_model="${CPU_MODEL:-}"
|
||||
|
||||
# Detect RAM if not already set
|
||||
if [[ -z "${RAM_SPEED:-}" ]]; then
|
||||
@@ -596,12 +555,9 @@ post_update_to_api() {
|
||||
# Silent fail - telemetry should never break scripts
|
||||
command -v curl &>/dev/null || return 0
|
||||
|
||||
# Support "force" mode (3rd arg) to bypass duplicate check for retries after cleanup
|
||||
local force="${3:-}"
|
||||
# Prevent duplicate submissions
|
||||
POST_UPDATE_DONE=${POST_UPDATE_DONE:-false}
|
||||
if [[ "$POST_UPDATE_DONE" == "true" && "$force" != "force" ]]; then
|
||||
return 0
|
||||
fi
|
||||
[[ "$POST_UPDATE_DONE" == "true" ]] && return 0
|
||||
|
||||
[[ "${DIAGNOSTICS:-no}" == "no" ]] && return 0
|
||||
[[ -z "${RANDOM_UUID:-}" ]] && return 0
|
||||
@@ -612,14 +568,12 @@ post_update_to_api() {
|
||||
|
||||
# Get GPU info (if detected)
|
||||
local gpu_vendor="${GPU_VENDOR:-unknown}"
|
||||
local gpu_model
|
||||
gpu_model=$(json_escape "${GPU_MODEL:-}")
|
||||
local gpu_model="${GPU_MODEL:-}"
|
||||
local gpu_passthrough="${GPU_PASSTHROUGH:-unknown}"
|
||||
|
||||
# Get CPU info (if detected)
|
||||
local cpu_vendor="${CPU_VENDOR:-unknown}"
|
||||
local cpu_model
|
||||
cpu_model=$(json_escape "${CPU_MODEL:-}")
|
||||
local cpu_model="${CPU_MODEL:-}"
|
||||
|
||||
# Get RAM info (if detected)
|
||||
local ram_speed="${RAM_SPEED:-}"
|
||||
@@ -641,21 +595,13 @@ post_update_to_api() {
|
||||
esac
|
||||
|
||||
# For failed/unknown status, resolve exit code and error description
|
||||
local short_error=""
|
||||
if [[ "$pb_status" == "failed" ]] || [[ "$pb_status" == "unknown" ]]; then
|
||||
if [[ "$raw_exit_code" =~ ^[0-9]+$ ]]; then
|
||||
exit_code="$raw_exit_code"
|
||||
else
|
||||
exit_code=1
|
||||
fi
|
||||
local error_text=""
|
||||
error_text=$(get_error_text)
|
||||
if [[ -n "$error_text" ]]; then
|
||||
error=$(json_escape "$error_text")
|
||||
else
|
||||
error=$(json_escape "$(explain_exit_code "$exit_code")")
|
||||
fi
|
||||
short_error=$(json_escape "$(explain_exit_code "$exit_code")")
|
||||
error=$(explain_exit_code "$exit_code")
|
||||
error_category=$(categorize_error "$exit_code")
|
||||
[[ -z "$error" ]] && error="Unknown error"
|
||||
fi
|
||||
@@ -672,9 +618,8 @@ post_update_to_api() {
|
||||
pve_version=$(pveversion 2>/dev/null | awk -F'[/ ]' '{print $2}') || true
|
||||
fi
|
||||
|
||||
local http_code=""
|
||||
|
||||
# ── Attempt 1: Full payload with complete error text ──
|
||||
# Full payload including all fields - allows record creation if initial call failed
|
||||
# The Go service will find the record by random_id and PATCH, or create if not found
|
||||
local JSON_PAYLOAD
|
||||
JSON_PAYLOAD=$(
|
||||
cat <<EOF
|
||||
@@ -706,80 +651,11 @@ post_update_to_api() {
|
||||
EOF
|
||||
)
|
||||
|
||||
http_code=$(curl -sS -w "%{http_code}" -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$JSON_PAYLOAD" -o /dev/null 2>/dev/null) || http_code="000"
|
||||
|
||||
if [[ "$http_code" =~ ^2[0-9]{2}$ ]]; then
|
||||
POST_UPDATE_DONE=true
|
||||
return 0
|
||||
fi
|
||||
|
||||
# ── Attempt 2: Short error text (no full log) ──
|
||||
sleep 1
|
||||
local RETRY_PAYLOAD
|
||||
RETRY_PAYLOAD=$(
|
||||
cat <<EOF
|
||||
{
|
||||
"random_id": "${RANDOM_UUID}",
|
||||
"type": "${TELEMETRY_TYPE:-lxc}",
|
||||
"nsapp": "${NSAPP:-unknown}",
|
||||
"status": "${pb_status}",
|
||||
"ct_type": ${CT_TYPE:-1},
|
||||
"disk_size": ${DISK_SIZE:-0},
|
||||
"core_count": ${CORE_COUNT:-0},
|
||||
"ram_size": ${RAM_SIZE:-0},
|
||||
"os_type": "${var_os:-}",
|
||||
"os_version": "${var_version:-}",
|
||||
"pve_version": "${pve_version}",
|
||||
"method": "${METHOD:-default}",
|
||||
"exit_code": ${exit_code},
|
||||
"error": "${short_error}",
|
||||
"error_category": "${error_category}",
|
||||
"install_duration": ${duration},
|
||||
"cpu_vendor": "${cpu_vendor}",
|
||||
"cpu_model": "${cpu_model}",
|
||||
"gpu_vendor": "${gpu_vendor}",
|
||||
"gpu_model": "${gpu_model}",
|
||||
"gpu_passthrough": "${gpu_passthrough}",
|
||||
"ram_speed": "${ram_speed}",
|
||||
"repo_source": "${REPO_SOURCE}"
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
http_code=$(curl -sS -w "%{http_code}" -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$RETRY_PAYLOAD" -o /dev/null 2>/dev/null) || http_code="000"
|
||||
|
||||
if [[ "$http_code" =~ ^2[0-9]{2}$ ]]; then
|
||||
POST_UPDATE_DONE=true
|
||||
return 0
|
||||
fi
|
||||
|
||||
# ── Attempt 3: Minimal payload (bare minimum to set status) ──
|
||||
sleep 2
|
||||
local MINIMAL_PAYLOAD
|
||||
MINIMAL_PAYLOAD=$(
|
||||
cat <<EOF
|
||||
{
|
||||
"random_id": "${RANDOM_UUID}",
|
||||
"type": "${TELEMETRY_TYPE:-lxc}",
|
||||
"nsapp": "${NSAPP:-unknown}",
|
||||
"status": "${pb_status}",
|
||||
"exit_code": ${exit_code},
|
||||
"error": "${short_error}",
|
||||
"error_category": "${error_category}",
|
||||
"install_duration": ${duration}
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Fire-and-forget: never block, never fail
|
||||
curl -sS -w "%{http_code}" -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$MINIMAL_PAYLOAD" -o /dev/null 2>/dev/null || true
|
||||
-d "$JSON_PAYLOAD" -o /dev/null 2>&1 || true
|
||||
|
||||
# Tried 3 times - mark as done regardless to prevent infinite loops
|
||||
POST_UPDATE_DONE=true
|
||||
}
|
||||
|
||||
@@ -815,9 +691,6 @@ categorize_error() {
|
||||
# Configuration errors
|
||||
203 | 204 | 205 | 206 | 207 | 208) echo "config" ;;
|
||||
|
||||
# Aborted by user
|
||||
130) echo "aborted" ;;
|
||||
|
||||
# Resource errors (OOM, etc)
|
||||
137 | 134) echo "resource" ;;
|
||||
|
||||
@@ -882,13 +755,7 @@ post_tool_to_api() {
|
||||
|
||||
if [[ "$status" == "failed" ]]; then
|
||||
[[ ! "$exit_code" =~ ^[0-9]+$ ]] && exit_code=1
|
||||
local error_text=""
|
||||
error_text=$(get_error_text)
|
||||
if [[ -n "$error_text" ]]; then
|
||||
error=$(json_escape "$error_text")
|
||||
else
|
||||
error=$(json_escape "$(explain_exit_code "$exit_code")")
|
||||
fi
|
||||
error=$(explain_exit_code "$exit_code")
|
||||
error_category=$(categorize_error "$exit_code")
|
||||
fi
|
||||
|
||||
@@ -949,13 +816,7 @@ post_addon_to_api() {
|
||||
|
||||
if [[ "$status" == "failed" ]]; then
|
||||
[[ ! "$exit_code" =~ ^[0-9]+$ ]] && exit_code=1
|
||||
local error_text=""
|
||||
error_text=$(get_error_text)
|
||||
if [[ -n "$error_text" ]]; then
|
||||
error=$(json_escape "$error_text")
|
||||
else
|
||||
error=$(json_escape "$(explain_exit_code "$exit_code")")
|
||||
fi
|
||||
error=$(explain_exit_code "$exit_code")
|
||||
error_category=$(categorize_error "$exit_code")
|
||||
fi
|
||||
|
||||
@@ -1048,13 +909,7 @@ post_update_to_api_extended() {
|
||||
else
|
||||
exit_code=1
|
||||
fi
|
||||
local error_text=""
|
||||
error_text=$(get_error_text)
|
||||
if [[ -n "$error_text" ]]; then
|
||||
error=$(json_escape "$error_text")
|
||||
else
|
||||
error=$(json_escape "$(explain_exit_code "$exit_code")")
|
||||
fi
|
||||
error=$(explain_exit_code "$exit_code")
|
||||
error_category=$(categorize_error "$exit_code")
|
||||
[[ -z "$error" ]] && error="Unknown error"
|
||||
fi
|
||||
|
||||
@@ -4046,10 +4046,12 @@ EOF'
|
||||
if [[ $install_exit_code -ne 0 ]]; then
|
||||
msg_error "Installation failed in container ${CTID} (exit code: ${install_exit_code})"
|
||||
|
||||
# Copy install log from container BEFORE API call so get_error_text() can read it
|
||||
# Report failure to telemetry API
|
||||
post_update_to_api "failed" "$install_exit_code"
|
||||
|
||||
# Copy both logs from container before potential deletion
|
||||
local build_log_copied=false
|
||||
local install_log_copied=false
|
||||
local host_install_log="/tmp/install-lxc-${CTID}-${SESSION_ID}.log"
|
||||
|
||||
if [[ -n "$CTID" && -n "${SESSION_ID:-}" ]]; then
|
||||
# Copy BUILD_LOG (creation log) if it exists
|
||||
@@ -4057,22 +4059,15 @@ EOF'
|
||||
cp "${BUILD_LOG}" "/tmp/create-lxc-${CTID}-${SESSION_ID}.log" 2>/dev/null && build_log_copied=true
|
||||
fi
|
||||
|
||||
# Copy INSTALL_LOG from container to host
|
||||
if pct pull "$CTID" "/root/.install-${SESSION_ID}.log" "$host_install_log" 2>/dev/null; then
|
||||
# Copy INSTALL_LOG from container
|
||||
if pct pull "$CTID" "/root/.install-${SESSION_ID}.log" "/tmp/install-lxc-${CTID}-${SESSION_ID}.log" 2>/dev/null; then
|
||||
install_log_copied=true
|
||||
# Point INSTALL_LOG to host copy so get_error_text() finds it
|
||||
INSTALL_LOG="$host_install_log"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Report failure to telemetry API (now with log available on host)
|
||||
post_update_to_api "failed" "$install_exit_code"
|
||||
|
||||
# Show available logs
|
||||
if [[ -n "$CTID" && -n "${SESSION_ID:-}" ]]; then
|
||||
# Show available logs
|
||||
echo ""
|
||||
[[ "$build_log_copied" == true ]] && echo -e "${GN}✔${CL} Container creation log: ${BL}/tmp/create-lxc-${CTID}-${SESSION_ID}.log${CL}"
|
||||
[[ "$install_log_copied" == true ]] && echo -e "${GN}✔${CL} Installation log: ${BL}${host_install_log}${CL}"
|
||||
[[ "$install_log_copied" == true ]] && echo -e "${GN}✔${CL} Installation log: ${BL}/tmp/install-lxc-${CTID}-${SESSION_ID}.log${CL}"
|
||||
fi
|
||||
|
||||
# Dev mode: Keep container or open breakpoint shell
|
||||
@@ -4130,10 +4125,6 @@ EOF'
|
||||
echo -e "${BFR}${CM}${GN}Container ${CTID} removed${CL}"
|
||||
fi
|
||||
|
||||
# Force one final status update attempt after cleanup
|
||||
# This ensures status is updated even if the first attempt failed (e.g., HTTP 400)
|
||||
post_update_to_api "failed" "$install_exit_code" "force"
|
||||
|
||||
exit $install_exit_code
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -222,12 +222,6 @@ error_handler() {
|
||||
pct destroy "$CTID" &>/dev/null || true
|
||||
echo -e "${GN}✔${CL} Container ${CTID} removed"
|
||||
fi
|
||||
|
||||
# Force one final status update attempt after cleanup
|
||||
# This ensures status is updated even if the first attempt failed (e.g., HTTP 400)
|
||||
if declare -f post_update_to_api &>/dev/null; then
|
||||
post_update_to_api "failed" "$exit_code" "force"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
@@ -249,18 +243,6 @@ error_handler() {
|
||||
# ------------------------------------------------------------------------------
|
||||
on_exit() {
|
||||
local exit_code=$?
|
||||
# Report orphaned "installing" records to telemetry API
|
||||
# Catches ALL exit paths: errors (non-zero), signals, AND clean exits where
|
||||
# post_to_api was called ("installing" sent) but post_update_to_api was never called
|
||||
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then
|
||||
if declare -f post_update_to_api >/dev/null 2>&1; then
|
||||
if [[ $exit_code -ne 0 ]]; then
|
||||
post_update_to_api "failed" "$exit_code"
|
||||
else
|
||||
post_update_to_api "failed" "1"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
[[ -n "${lockfile:-}" && -e "$lockfile" ]] && rm -f "$lockfile"
|
||||
exit "$exit_code"
|
||||
}
|
||||
@@ -273,10 +255,6 @@ on_exit() {
|
||||
# - Exits with code 130 (128 + SIGINT=2)
|
||||
# ------------------------------------------------------------------------------
|
||||
on_interrupt() {
|
||||
# Report interruption to telemetry API (prevents stuck "installing" records)
|
||||
if declare -f post_update_to_api >/dev/null 2>&1; then
|
||||
post_update_to_api "failed" "130"
|
||||
fi
|
||||
if declare -f msg_error >/dev/null 2>&1; then
|
||||
msg_error "Interrupted by user (SIGINT)"
|
||||
else
|
||||
@@ -294,10 +272,6 @@ on_interrupt() {
|
||||
# - Triggered by external process termination
|
||||
# ------------------------------------------------------------------------------
|
||||
on_terminate() {
|
||||
# Report termination to telemetry API (prevents stuck "installing" records)
|
||||
if declare -f post_update_to_api >/dev/null 2>&1; then
|
||||
post_update_to_api "failed" "143"
|
||||
fi
|
||||
if declare -f msg_error >/dev/null 2>&1; then
|
||||
msg_error "Terminated by signal (SIGTERM)"
|
||||
else
|
||||
|
||||
@@ -465,7 +465,6 @@ manage_tool_repository() {
|
||||
msg_error "Failed to download MongoDB GPG key"
|
||||
return 1
|
||||
fi
|
||||
chmod 644 "/etc/apt/keyrings/mongodb-server-${version}.gpg"
|
||||
|
||||
# Setup repository
|
||||
local distro_codename
|
||||
@@ -1295,33 +1294,12 @@ setup_deb822_repo() {
|
||||
return 1
|
||||
}
|
||||
|
||||
# Import GPG key (auto-detect binary vs ASCII-armored format)
|
||||
local tmp_gpg
|
||||
tmp_gpg=$(mktemp) || return 1
|
||||
curl -fsSL "$gpg_url" -o "$tmp_gpg" || {
|
||||
msg_error "Failed to download GPG key for ${name}"
|
||||
rm -f "$tmp_gpg"
|
||||
# Import GPG
|
||||
curl -fsSL "$gpg_url" | gpg --dearmor --yes -o "/etc/apt/keyrings/${name}.gpg" || {
|
||||
msg_error "Failed to import GPG key for ${name}"
|
||||
return 1
|
||||
}
|
||||
|
||||
if grep -q "BEGIN PGP" "$tmp_gpg" 2>/dev/null; then
|
||||
# ASCII-armored — dearmor to binary
|
||||
gpg --dearmor --yes -o "/etc/apt/keyrings/${name}.gpg" < "$tmp_gpg" || {
|
||||
msg_error "Failed to dearmor GPG key for ${name}"
|
||||
rm -f "$tmp_gpg"
|
||||
return 1
|
||||
}
|
||||
else
|
||||
# Already in binary GPG format — copy directly
|
||||
cp "$tmp_gpg" "/etc/apt/keyrings/${name}.gpg" || {
|
||||
msg_error "Failed to install GPG key for ${name}"
|
||||
rm -f "$tmp_gpg"
|
||||
return 1
|
||||
}
|
||||
fi
|
||||
rm -f "$tmp_gpg"
|
||||
chmod 644 "/etc/apt/keyrings/${name}.gpg"
|
||||
|
||||
# Write deb822
|
||||
{
|
||||
echo "Types: deb"
|
||||
|
||||
@@ -75,37 +75,14 @@ pct exec "$CTID" -- bash -c '
|
||||
set -e
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Source os-release properly (handles quoted values)
|
||||
source /etc/os-release
|
||||
ID=$(grep "^ID=" /etc/os-release | cut -d"=" -f2)
|
||||
VER=$(grep "^VERSION_CODENAME=" /etc/os-release | cut -d"=" -f2)
|
||||
|
||||
# Fallback if DNS is poisoned or blocked
|
||||
# fallback if DNS is poisoned or blocked
|
||||
ORIG_RESOLV="/etc/resolv.conf"
|
||||
BACKUP_RESOLV="/tmp/resolv.conf.backup"
|
||||
|
||||
# Check DNS resolution using multiple methods (dig may not be installed)
|
||||
dns_check_failed=true
|
||||
if command -v dig &>/dev/null; then
|
||||
if dig +short pkgs.tailscale.com 2>/dev/null | grep -qvE "^127\.|^0\.0\.0\.0$|^$"; then
|
||||
dns_check_failed=false
|
||||
fi
|
||||
elif command -v host &>/dev/null; then
|
||||
if host pkgs.tailscale.com 2>/dev/null | grep -q "has address"; then
|
||||
dns_check_failed=false
|
||||
fi
|
||||
elif command -v nslookup &>/dev/null; then
|
||||
if nslookup pkgs.tailscale.com 2>/dev/null | grep -q "Address:"; then
|
||||
dns_check_failed=false
|
||||
fi
|
||||
elif command -v getent &>/dev/null; then
|
||||
if getent hosts pkgs.tailscale.com &>/dev/null; then
|
||||
dns_check_failed=false
|
||||
fi
|
||||
else
|
||||
# No DNS tools available, try curl directly and assume DNS works
|
||||
dns_check_failed=false
|
||||
fi
|
||||
|
||||
if $dns_check_failed; then
|
||||
if ! dig +short pkgs.tailscale.com | grep -qvE "^127\.|^0\.0\.0\.0$"; then
|
||||
echo "[INFO] DNS resolution for pkgs.tailscale.com failed (blocked or redirected)."
|
||||
echo "[INFO] Temporarily overriding /etc/resolv.conf with Cloudflare DNS (1.1.1.1)"
|
||||
cp "$ORIG_RESOLV" "$BACKUP_RESOLV"
|
||||
@@ -115,22 +92,17 @@ fi
|
||||
if ! command -v curl &>/dev/null; then
|
||||
echo "[INFO] curl not found, installing..."
|
||||
apt-get update -qq
|
||||
apt update -qq
|
||||
apt install -y curl >/dev/null
|
||||
apt-get install -y curl >/dev/null
|
||||
fi
|
||||
|
||||
# Ensure keyrings directory exists
|
||||
mkdir -p /usr/share/keyrings
|
||||
|
||||
curl -fsSL "https://pkgs.tailscale.com/stable/${ID}/${VERSION_CODENAME}.noarmor.gpg" \
|
||||
curl -fsSL https://pkgs.tailscale.com/stable/${ID}/${VER}.noarmor.gpg \
|
||||
| tee /usr/share/keyrings/tailscale-archive-keyring.gpg >/dev/null
|
||||
|
||||
echo "deb [signed-by=/usr/share/keyrings/tailscale-archive-keyring.gpg] https://pkgs.tailscale.com/stable/${ID} ${VERSION_CODENAME} main" \
|
||||
echo "deb [signed-by=/usr/share/keyrings/tailscale-archive-keyring.gpg] https://pkgs.tailscale.com/stable/${ID} ${VER} main" \
|
||||
>/etc/apt/sources.list.d/tailscale.list
|
||||
|
||||
apt-get update -qq
|
||||
apt update -qq
|
||||
apt install -y tailscale >/dev/null
|
||||
apt-get install -y tailscale >/dev/null
|
||||
|
||||
if [[ -f /tmp/resolv.conf.backup ]]; then
|
||||
echo "[INFO] Restoring original /etc/resolv.conf"
|
||||
|
||||
Reference in New Issue
Block a user