Compare commits

..

2 Commits

Author SHA1 Message Date
Tobias
511dd3abb5 Update overseerr.sh 2026-02-15 23:07:19 +01:00
Tobias
0b3a4ba1a3 fix: url 2026-02-15 23:05:37 +01:00
42 changed files with 125 additions and 598 deletions

3
.github/workflows/autolabeler.yml generated vendored
View File

@@ -100,8 +100,7 @@ jobs:
// If it's an update script PR with json changes and a content label, skip adding website/json // If it's an update script PR with json changes and a content label, skip adding website/json
// The PR should be categorized as update script with the content label // The PR should be categorized as update script with the content label
if (!(hasUpdateScript && hasJson && hasContentLabel)) { if (!(hasUpdateScript && hasJson && hasContentLabel)) {
labelsToAdd.add("website"); labelsToAdd.add(hasJson ? "json" : "website");
if (hasJson) labelsToAdd.add("json");
} }
} }

View File

@@ -404,47 +404,6 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
</details> </details>
## 2026-02-16
### 🆕 New Scripts
- LinkDing ([#11976](https://github.com/community-scripts/ProxmoxVE/pull/11976))
### 🚀 Updated Scripts
- #### 🐞 Bug Fixes
- Tududi: Fix sed command for DB_FILE configuration [@tremor021](https://github.com/tremor021) ([#11988](https://github.com/community-scripts/ProxmoxVE/pull/11988))
- slskd: fix exit position [@MickLesk](https://github.com/MickLesk) ([#11963](https://github.com/community-scripts/ProxmoxVE/pull/11963))
- cryptpad: restore config earlier and run onlyoffice upgrade [@MickLesk](https://github.com/MickLesk) ([#11964](https://github.com/community-scripts/ProxmoxVE/pull/11964))
- jellyseerr/overseerr: Migrate update script to Seerr; prompt rerun [@MickLesk](https://github.com/MickLesk) ([#11965](https://github.com/community-scripts/ProxmoxVE/pull/11965))
- #### 🔧 Refactor
- Vaultwarden: export VW_VERSION as version number [@MickLesk](https://github.com/MickLesk) ([#11966](https://github.com/community-scripts/ProxmoxVE/pull/11966))
- Zabbix: Improve zabbix-agent service detection [@MickLesk](https://github.com/MickLesk) ([#11968](https://github.com/community-scripts/ProxmoxVE/pull/11968))
### 💾 Core
- #### ✨ New Features
- tools.func: ensure /usr/local/bin PATH persists for pct enter sessions [@MickLesk](https://github.com/MickLesk) ([#11970](https://github.com/community-scripts/ProxmoxVE/pull/11970))
- #### 🔧 Refactor
- core: remove duplicate error handler from alpine-install.func [@MickLesk](https://github.com/MickLesk) ([#11971](https://github.com/community-scripts/ProxmoxVE/pull/11971))
### 📚 Documentation
- github: add "website" label if "json" changed [@MickLesk](https://github.com/MickLesk) ([#11975](https://github.com/community-scripts/ProxmoxVE/pull/11975))
### 🌐 Website
- #### 📝 Script Information
- Update Wishlist LXC webpage to include reverse proxy info [@summoningpixels](https://github.com/summoningpixels) ([#11973](https://github.com/community-scripts/ProxmoxVE/pull/11973))
- Update OpenCloud LXC webpage to include services ports [@summoningpixels](https://github.com/summoningpixels) ([#11969](https://github.com/community-scripts/ProxmoxVE/pull/11969))
## 2026-02-15 ## 2026-02-15
### 🆕 New Scripts ### 🆕 New Scripts

View File

@@ -27,7 +27,7 @@ function update_script() {
exit exit
fi fi
RELEASE=$(curl -fsSL https://teamspeak.com/en/downloads/#server | sed -n 's/.*teamspeak3-server_linux_amd64-\([0-9.]*[0-9]\).*/\1/p' | awk 'NR==1') set +o pipefail && RELEASE=$(curl -fsSL https://teamspeak.com/en/downloads/#server | sed -n 's/.*teamspeak3-server_linux_amd64-\([0-9.]*[0-9]\).*/\1/p' | head -1) && set -o pipefail
if [ "${RELEASE}" != "$(cat ~/.teamspeak-server)" ] || [ ! -f ~/.teamspeak-server ]; then if [ "${RELEASE}" != "$(cat ~/.teamspeak-server)" ] || [ ! -f ~/.teamspeak-server ]; then
msg_info "Updating ${APP} LXC" msg_info "Updating ${APP} LXC"

View File

@@ -39,20 +39,17 @@ function update_script() {
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "cryptpad" "cryptpad/cryptpad" "tarball" CLEAN_INSTALL=1 fetch_and_deploy_gh_release "cryptpad" "cryptpad/cryptpad" "tarball"
msg_info "Restoring configuration"
mv /opt/config.js /opt/cryptpad/config/
msg_ok "Configuration restored"
msg_info "Updating CryptaPad" msg_info "Updating CryptaPad"
cd /opt/cryptpad cd /opt/cryptpad
$STD npm ci $STD npm ci
$STD npm run install:components $STD npm run install:components
if [ -f "/opt/cryptpad/install-onlyoffice.sh" ]; then
$STD bash /opt/cryptpad/install-onlyoffice.sh --accept-license
fi
$STD npm run build $STD npm run build
msg_ok "Updated CryptaPad" msg_ok "Updated CryptaPad"
msg_info "Restoring configuration"
mv /opt/config.js /opt/cryptpad/config/
msg_ok "Configuration restored"
msg_info "Starting Service" msg_info "Starting Service"
systemctl start cryptpad systemctl start cryptpad
msg_ok "Started Service" msg_ok "Started Service"

View File

@@ -1,6 +0,0 @@
___ __ ___
/ (_)___ / /______/ (_)___ ____ _
/ / / __ \/ //_/ __ / / __ \/ __ `/
/ / / / / / ,< / /_/ / / / / / /_/ /
/_/_/_/ /_/_/|_|\__,_/_/_/ /_/\__, /
/____/

View File

@@ -45,18 +45,16 @@ function update_script() {
fi fi
msg_info "Switching update script to Seerr" msg_info "Switching update script to Seerr"
cat <<'EOF' >/usr/bin/update cat <<EOF >/usr/bin/update
#!/usr/bin/env bash
bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/ct/seerr.sh)" bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/ct/seerr.sh)"
EOF EOF
chmod +x /usr/bin/update chmod +x /usr/bin/update
msg_ok "Switched update script to Seerr" msg_ok "Switched update script to Seerr. Running update..."
msg_warn "Please type 'update' again to complete the migration" exec /usr/bin/update
exit
fi fi
msg_info "Updating Jellyseerr" msg_info "Updating Jellyseerr"
cd /opt/jellyseerr cd /opt/jellyseerr
systemctl stop jellyseerr systemctl stop jellyseerr
output=$(git pull --no-rebase) output=$(git pull --no-rebase)
pnpm_desired=$(grep -Po '"pnpm":\s*"\K[^"]+' /opt/jellyseerr/package.json) pnpm_desired=$(grep -Po '"pnpm":\s*"\K[^"]+' /opt/jellyseerr/package.json)
@@ -67,7 +65,7 @@ EOF
fi fi
rm -rf dist .next node_modules rm -rf dist .next node_modules
export CYPRESS_INSTALL_BINARY=0 export CYPRESS_INSTALL_BINARY=0
cd /opt/jellyseerr cd /opt/jellyseerr
$STD pnpm install --frozen-lockfile $STD pnpm install --frozen-lockfile
export NODE_OPTIONS="--max-old-space-size=3072" export NODE_OPTIONS="--max-old-space-size=3072"
$STD pnpm build $STD pnpm build

View File

@@ -1,79 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (MickLesk)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://linkding.link/
APP="linkding"
var_tags="${var_tags:-bookmarks;management}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-1024}"
var_disk="${var_disk:-4}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/linkding ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if check_for_gh_release "linkding" "sissbruecker/linkding"; then
msg_info "Stopping Services"
systemctl stop nginx linkding linkding-tasks
msg_ok "Stopped Services"
msg_info "Backing up Data"
cp -r /opt/linkding/data /opt/linkding_data_backup
cp /opt/linkding/.env /opt/linkding_env_backup
msg_ok "Backed up Data"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "linkding" "sissbruecker/linkding"
msg_info "Restoring Data"
cp -r /opt/linkding_data_backup/. /opt/linkding/data
cp /opt/linkding_env_backup /opt/linkding/.env
rm -rf /opt/linkding_data_backup /opt/linkding_env_backup
ln -sf /usr/lib/x86_64-linux-gnu/mod_icu.so /opt/linkding/libicu.so
msg_ok "Restored Data"
msg_info "Updating LinkDing"
cd /opt/linkding
rm -f bookmarks/settings/dev.py
touch bookmarks/settings/custom.py
$STD npm ci
$STD npm run build
$STD uv sync --no-dev --frozen
$STD uv pip install gunicorn
set -a && source /opt/linkding/.env && set +a
$STD /opt/linkding/.venv/bin/python manage.py migrate
$STD /opt/linkding/.venv/bin/python manage.py collectstatic --no-input
msg_ok "Updated LinkDing"
msg_info "Starting Services"
systemctl start nginx linkding linkding-tasks
msg_ok "Started Services"
msg_ok "Updated successfully!"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:9090${CL}"

View File

@@ -44,14 +44,12 @@ function update_script() {
fi fi
msg_info "Switching update script to Seerr" msg_info "Switching update script to Seerr"
cat <<'EOF' >/usr/bin/update cat <<EOF >/usr/bin/update
#!/usr/bin/env bash
bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/ct/seerr.sh)" bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/ct/seerr.sh)"
EOF EOF
chmod +x /usr/bin/update chmod +x /usr/bin/update
msg_ok "Switched update script to Seerr" msg_ok "Switched update script to Seerr. Running update..."
msg_warn "Please type 'update' again to complete the migration" exec /usr/bin/update
exit 0
fi fi
if check_for_gh_release "overseerr" "sct/overseerr"; then if check_for_gh_release "overseerr" "sct/overseerr"; then

View File

@@ -83,7 +83,6 @@ function update_script() {
msg_ok "Started Soularr Timer" msg_ok "Started Soularr Timer"
msg_ok "Updated Soularr successfully!" msg_ok "Updated Soularr successfully!"
fi fi
exit
} }
start start

View File

@@ -45,8 +45,6 @@ function update_script() {
msg_info "Updating VaultWarden to $VAULT (Patience)" msg_info "Updating VaultWarden to $VAULT (Patience)"
cd /tmp/vaultwarden-src cd /tmp/vaultwarden-src
VW_VERSION="$VAULT"
export VW_VERSION
$STD cargo build --features "sqlite,mysql,postgresql" --release $STD cargo build --features "sqlite,mysql,postgresql" --release
if [[ -f /usr/bin/vaultwarden ]]; then if [[ -f /usr/bin/vaultwarden ]]; then
cp target/release/vaultwarden /usr/bin/ cp target/release/vaultwarden /usr/bin/

View File

@@ -35,18 +35,15 @@ function update_script() {
exit exit
fi fi
if systemctl cat zabbix-agent2.service &>/dev/null; then if systemctl list-unit-files | grep -q zabbix-agent2.service; then
AGENT_SERVICE="zabbix-agent2" AGENT_SERVICE="zabbix-agent2"
elif systemctl cat zabbix-agent.service &>/dev/null; then
AGENT_SERVICE="zabbix-agent"
else else
AGENT_SERVICE="" AGENT_SERVICE="zabbix-agent"
msg_warn "No Zabbix Agent service found, skipping agent actions"
fi fi
msg_info "Stopping Services" msg_info "Stopping Services"
systemctl stop zabbix-server systemctl stop zabbix-server
[[ -n "$AGENT_SERVICE" ]] && systemctl stop "$AGENT_SERVICE" systemctl stop "$AGENT_SERVICE"
msg_ok "Stopped Services" msg_ok "Stopped Services"
read -rp "Choose Zabbix version [1] 7.0 LTS [2] 7.4 (Latest Stable) [3] Latest available (default: 2): " ZABBIX_CHOICE read -rp "Choose Zabbix version [1] 7.0 LTS [2] 7.4 (Latest Stable) [3] Latest available (default: 2): " ZABBIX_CHOICE
@@ -86,13 +83,13 @@ function update_script() {
$STD apt install --only-upgrade zabbix-server-pgsql zabbix-frontend-php php8.4-pgsql $STD apt install --only-upgrade zabbix-server-pgsql zabbix-frontend-php php8.4-pgsql
if [[ "$AGENT_SERVICE" == "zabbix-agent2" ]]; then if [ "$AGENT_SERVICE" = "zabbix-agent2" ]; then
$STD apt install --only-upgrade zabbix-agent2 zabbix-agent2-plugin-postgresql $STD apt install --only-upgrade zabbix-agent2 zabbix-agent2-plugin-postgresql
if [ -f /etc/zabbix/zabbix_agent2.d/plugins.d/nvidia.conf ]; then if [ -f /etc/zabbix/zabbix_agent2.d/plugins.d/nvidia.conf ]; then
sed -i 's|^Plugins.NVIDIA.System.Path=.*|# Plugins.NVIDIA.System.Path=/usr/libexec/zabbix/zabbix-agent2-plugin-nvidia-gpu|' \ sed -i 's|^Plugins.NVIDIA.System.Path=.*|# Plugins.NVIDIA.System.Path=/usr/libexec/zabbix/zabbix-agent2-plugin-nvidia-gpu|' \
/etc/zabbix/zabbix_agent2.d/plugins.d/nvidia.conf /etc/zabbix/zabbix_agent2.d/plugins.d/nvidia.conf
fi fi
elif [[ "$AGENT_SERVICE" == "zabbix-agent" ]]; then else
$STD apt install --only-upgrade zabbix-agent $STD apt install --only-upgrade zabbix-agent
fi fi
@@ -108,7 +105,7 @@ function update_script() {
msg_info "Starting Services" msg_info "Starting Services"
systemctl start zabbix-server systemctl start zabbix-server
[[ -n "$AGENT_SERVICE" ]] && systemctl start "$AGENT_SERVICE" systemctl start "$AGENT_SERVICE"
systemctl restart apache2 systemctl restart apache2
msg_ok "Started Services" msg_ok "Started Services"
msg_ok "Updated successfully!" msg_ok "Updated successfully!"

View File

@@ -1,5 +1,5 @@
{ {
"generated": "2026-02-16T06:25:10Z", "generated": "2026-02-15T18:07:51Z",
"versions": [ "versions": [
{ {
"slug": "2fauth", "slug": "2fauth",
@@ -256,9 +256,9 @@
{ {
"slug": "dawarich", "slug": "dawarich",
"repo": "Freika/dawarich", "repo": "Freika/dawarich",
"version": "1.2.0", "version": "1.1.0",
"pinned": false, "pinned": false,
"date": "2026-02-15T22:33:56Z" "date": "2026-02-08T14:42:45Z"
}, },
{ {
"slug": "discopanel", "slug": "discopanel",
@@ -316,13 +316,6 @@
"pinned": false, "pinned": false,
"date": "2026-01-06T12:05:40Z" "date": "2026-01-06T12:05:40Z"
}, },
{
"slug": "ebusd",
"repo": "john30/ebusd",
"version": "26.1",
"pinned": false,
"date": "2026-02-09T06:09:24Z"
},
{ {
"slug": "elementsynapse", "slug": "elementsynapse",
"repo": "etkecc/synapse-admin", "repo": "etkecc/synapse-admin",
@@ -361,9 +354,9 @@
{ {
"slug": "firefly", "slug": "firefly",
"repo": "firefly-iii/firefly-iii", "repo": "firefly-iii/firefly-iii",
"version": "v6.4.22", "version": "v6.4.21",
"pinned": false, "pinned": false,
"date": "2026-02-15T18:43:08Z" "date": "2026-02-14T19:40:46Z"
}, },
{ {
"slug": "fladder", "slug": "fladder",
@@ -641,9 +634,9 @@
{ {
"slug": "kimai", "slug": "kimai",
"repo": "kimai/kimai", "repo": "kimai/kimai",
"version": "2.49.0", "version": "2.48.0",
"pinned": false, "pinned": false,
"date": "2026-02-15T20:40:19Z" "date": "2026-01-31T18:10:59Z"
}, },
{ {
"slug": "kitchenowl", "slug": "kitchenowl",
@@ -1432,9 +1425,9 @@
{ {
"slug": "tautulli", "slug": "tautulli",
"repo": "Tautulli/Tautulli", "repo": "Tautulli/Tautulli",
"version": "v2.16.1", "version": "v2.16.0",
"pinned": false, "pinned": false,
"date": "2026-02-15T20:40:37Z" "date": "2025-09-09T01:05:45Z"
}, },
{ {
"slug": "teddycloud", "slug": "teddycloud",
@@ -1488,9 +1481,9 @@
{ {
"slug": "tracearr", "slug": "tracearr",
"repo": "connorgallopo/Tracearr", "repo": "connorgallopo/Tracearr",
"version": "v1.4.18", "version": "v1.4.17",
"pinned": false, "pinned": false,
"date": "2026-02-15T19:55:40Z" "date": "2026-02-11T01:33:21Z"
}, },
{ {
"slug": "tracktor", "slug": "tracktor",

View File

@@ -1,40 +0,0 @@
{
"name": "linkding",
"slug": "linkding",
"categories": [
12
],
"date_created": "2026-02-16",
"type": "ct",
"updateable": true,
"privileged": false,
"interface_port": 9090,
"documentation": "https://linkding.link/",
"website": "https://linkding.link/",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/linkding.webp",
"config_path": "/opt/linkding/.env",
"description": "linkding is a self-hosted bookmark manager that is designed to be minimal, fast, and easy to set up. It features a clean UI, tag-based organization, bulk editing, Markdown notes, read it later functionality, sharing, REST API, and browser extensions for Firefox and Chrome.",
"install_methods": [
{
"type": "default",
"script": "ct/linkding.sh",
"resources": {
"cpu": 2,
"ram": 1024,
"hdd": 4,
"os": "Debian",
"version": "13"
}
}
],
"default_credentials": {
"username": "admin",
"password": null
},
"notes": [
{
"text": "Admin credentials are stored in /opt/linkding/.env",
"type": "info"
}
]
}

View File

@@ -33,7 +33,7 @@
}, },
"notes": [ "notes": [
{ {
"text": "Valid TLS certificates and fully-qualified domain names behind a reverse proxy (Caddy) for 3 services - OpenCloud (port: 9200), Collabora (port: 9980), and WOPI (port: 9300) are **REQUIRED**", "text": "Valid TLS certificates and fully-qualified domain names behind a reverse proxy (Caddy) for 3 services - OpenCloud, Collabora, and WOPI are **REQUIRED**",
"type": "warning" "type": "warning"
}, },
{ {

View File

@@ -1,40 +1,35 @@
{ {
"name": "Wishlist", "name": "Wishlist",
"slug": "wishlist", "slug": "wishlist",
"categories": [ "categories": [
12 12
], ],
"date_created": "2026-02-04", "date_created": "2026-02-04",
"type": "ct", "type": "ct",
"updateable": true, "updateable": true,
"privileged": false, "privileged": false,
"interface_port": 3280, "interface_port": 3280,
"documentation": "https://github.com/cmintey/wishlist/blob/main/README.md#getting-started", "documentation": "https://github.com/cmintey/wishlist/blob/main/README.md#getting-started",
"website": "https://github.com/cmintey/wishlist", "config_path": "/opt/wishlist/.env",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/cmintey-wishlist.webp", "website": "https://github.com/cmintey/wishlist",
"config_path": "/opt/wishlist/.env", "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/cmintey-wishlist.webp",
"description": "Wishlist is a self-hosted wishlist application that you can share with your friends and family. You no longer have to wonder what to get your family for the holidays, simply check their wishlist and claim any available item!", "description": "Wishlist is a self-hosted wishlist application that you can share with your friends and family. You no longer have to wonder what to get your family for the holidays, simply check their wishlist and claim any available item!",
"install_methods": [ "install_methods": [
{ {
"type": "default", "type": "default",
"script": "ct/wishlist.sh", "script": "ct/wishlist.sh",
"resources": { "resources": {
"cpu": 2, "cpu": 2,
"ram": 2048, "ram": 2048,
"hdd": 5, "hdd": 5,
"os": "Debian", "os": "Debian",
"version": "13" "version": "13"
} }
} }
], ],
"default_credentials": { "default_credentials": {
"username": null, "username": null,
"password": null "password": null
}, },
"notes": [ "notes": []
{
"text": "When using a reverse proxy with this script, please edit the`ORIGIN` value in `/opt/wishlist/.env` to point to your new URL, otherwise creating an admin account or logging in will not work.",
"type": "info"
}
]
} }

View File

@@ -20,7 +20,7 @@ $STD apk add --no-cache \
libc6-compat libc6-compat
msg_ok "Installed dependencies" msg_ok "Installed dependencies"
RELEASE=$(curl -fsSL https://teamspeak.com/en/downloads/#server | sed -n 's/.*teamspeak3-server_linux_amd64-\([0-9.]*[0-9]\).*/\1/p' | awk 'NR==1') RELEASE=$(curl -fsSL https://teamspeak.com/en/downloads/#server | sed -n 's/.*teamspeak3-server_linux_amd64-\([0-9.]*[0-9]\).*/\1/p' | head -1)
msg_info "Installing Teamspeak Server v${RELEASE}" msg_info "Installing Teamspeak Server v${RELEASE}"
mkdir -p /opt/teamspeak-server mkdir -p /opt/teamspeak-server
cd /opt/teamspeak-server cd /opt/teamspeak-server

View File

@@ -26,13 +26,13 @@ msg_info "Setup CryptPad"
cd /opt/cryptpad cd /opt/cryptpad
$STD npm ci $STD npm ci
$STD npm run install:components $STD npm run install:components
if [[ "$onlyoffice" =~ ^[Yy]$ ]]; then $STD npm run build
$STD bash -c "./install-onlyoffice.sh --accept-license"
fi
cp config/config.example.js config/config.js cp config/config.example.js config/config.js
sed -i "51s/localhost/${LOCAL_IP}/g" /opt/cryptpad/config/config.js sed -i "51s/localhost/${LOCAL_IP}/g" /opt/cryptpad/config/config.js
sed -i "80s#//httpAddress: 'localhost'#httpAddress: '0.0.0.0'#g" /opt/cryptpad/config/config.js sed -i "80s#//httpAddress: 'localhost'#httpAddress: '0.0.0.0'#g" /opt/cryptpad/config/config.js
$STD npm run build if [[ "$onlyoffice" =~ ^[Yy]$ ]]; then
$STD bash -c "./install-onlyoffice.sh --accept-license"
fi
msg_ok "Setup CryptPad" msg_ok "Setup CryptPad"
msg_info "Creating Service" msg_info "Creating Service"

View File

@@ -1,126 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (MickLesk)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://linkding.link/
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
msg_info "Installing Dependencies"
$STD apt install -y \
build-essential \
pkg-config \
python3-dev \
nginx \
libpq-dev \
libicu-dev \
libsqlite3-dev \
libffi-dev
msg_ok "Installed Dependencies"
NODE_VERSION="22" setup_nodejs
setup_uv
fetch_and_deploy_gh_release "linkding" "sissbruecker/linkding"
msg_info "Building Frontend"
cd /opt/linkding
$STD npm ci
$STD npm run build
ln -sf /usr/lib/x86_64-linux-gnu/mod_icu.so /opt/linkding/libicu.so
msg_ok "Built Frontend"
msg_info "Setting up LinkDing"
rm -f bookmarks/settings/dev.py
touch bookmarks/settings/custom.py
$STD uv sync --no-dev --frozen
$STD uv pip install gunicorn
mkdir -p data/{favicons,previews,assets}
ADMIN_PASS=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | cut -c1-13)
cat <<EOF >/opt/linkding/.env
LD_SUPERUSER_NAME=admin
LD_SUPERUSER_PASSWORD=${ADMIN_PASS}
LD_CSRF_TRUSTED_ORIGINS=http://${LOCAL_IP}:9090
EOF
set -a && source /opt/linkding/.env && set +a
$STD /opt/linkding/.venv/bin/python manage.py generate_secret_key
$STD /opt/linkding/.venv/bin/python manage.py migrate
$STD /opt/linkding/.venv/bin/python manage.py enable_wal
$STD /opt/linkding/.venv/bin/python manage.py create_initial_superuser
$STD /opt/linkding/.venv/bin/python manage.py collectstatic --no-input
msg_ok "Set up LinkDing"
msg_info "Creating Services"
cat <<EOF >/etc/systemd/system/linkding.service
[Unit]
Description=linkding Bookmark Manager
After=network.target
[Service]
User=root
WorkingDirectory=/opt/linkding
EnvironmentFile=/opt/linkding/.env
ExecStart=/opt/linkding/.venv/bin/gunicorn \
--bind 127.0.0.1:8000 \
--workers 3 \
--threads 2 \
--timeout 120 \
bookmarks.wsgi:application
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
cat <<EOF >/etc/systemd/system/linkding-tasks.service
[Unit]
Description=linkding Background Tasks
After=network.target
[Service]
User=root
WorkingDirectory=/opt/linkding
EnvironmentFile=/opt/linkding/.env
ExecStart=/opt/linkding/.venv/bin/python manage.py run_huey
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
cat <<'EOF' >/etc/nginx/sites-available/linkding
server {
listen 9090;
server_name _;
client_max_body_size 20M;
location /static/ {
alias /opt/linkding/static/;
expires 30d;
}
location / {
proxy_pass http://127.0.0.1:8000;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_redirect off;
}
}
EOF
$STD rm -f /etc/nginx/sites-enabled/default
$STD ln -sf /etc/nginx/sites-available/linkding /etc/nginx/sites-enabled/linkding
systemctl enable -q --now nginx linkding linkding-tasks
systemctl restart nginx
msg_ok "Created Services"
motd_ssh
customize
cleanup_lxc

View File

@@ -38,8 +38,8 @@ SECRET="$(openssl rand -hex 64)"
sed -e '/^NODE_ENV=/s/=.*$/=production/' \ sed -e '/^NODE_ENV=/s/=.*$/=production/' \
-e 's/^TUDUDI_USER/# TUDUDI_USER/g' \ -e 's/^TUDUDI_USER/# TUDUDI_USER/g' \
-e "/_SECRET=/s/=.*$/=${SECRET}/" \ -e "/_SECRET=/s/=.*$/=${SECRET}/" \
-e '/^# DB_FILE=/s/^# //' \ -e "/^# DB_FILE/s/^# //; \
-e "s|^DB_FILE=.*|DB_FILE=${DB_LOCATION}/production.sqlite3|" \ \|DB_FILE|s|/path.*$|${DB_LOCATION}/production.sqlite3|" \
-e "/^# TUDUDI_ALLOWED/s/^# //; \ -e "/^# TUDUDI_ALLOWED/s/^# //; \
\|_ORIGINS=|s|=.*$|=<your tududi IP or FDQN>|" \ \|_ORIGINS=|s|=.*$|=<your tududi IP or FDQN>|" \
-e "/^# TUDUDI_UPLOAD/s/^# //; \ -e "/^# TUDUDI_UPLOAD/s/^# //; \

View File

@@ -29,8 +29,6 @@ fetch_and_deploy_gh_release "vaultwarden" "dani-garcia/vaultwarden" "tarball" "l
msg_info "Building Vaultwarden (Patience)" msg_info "Building Vaultwarden (Patience)"
cd /tmp/vaultwarden-src cd /tmp/vaultwarden-src
VW_VERSION=$(get_latest_github_release "dani-garcia/vaultwarden")
export VW_VERSION
$STD cargo build --features "sqlite,mysql,postgresql" --release $STD cargo build --features "sqlite,mysql,postgresql" --release
msg_ok "Built Vaultwarden" msg_ok "Built Vaultwarden"

View File

@@ -34,6 +34,42 @@ EOF
fi fi
} }
set -Eeuo pipefail
trap 'error_handler $? $LINENO "$BASH_COMMAND"' ERR
trap on_exit EXIT
trap on_interrupt INT
trap on_terminate TERM
error_handler() {
local exit_code="$1"
local line_number="$2"
local command="$3"
if [[ "$exit_code" -eq 0 ]]; then
return 0
fi
printf "\e[?25h"
echo -e "\n${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}\n"
exit "$exit_code"
}
on_exit() {
local exit_code="$?"
[[ -n "${lockfile:-}" && -e "$lockfile" ]] && rm -f "$lockfile"
exit "$exit_code"
}
on_interrupt() {
echo -e "\n${RD}Interrupted by user (SIGINT)${CL}"
exit 130
}
on_terminate() {
echo -e "\n${RD}Terminated by signal (SIGTERM)${CL}"
exit 143
}
# This function sets up the Container OS by generating the locale, setting the timezone, and checking the network connection # This function sets up the Container OS by generating the locale, setting the timezone, and checking the network connection
setting_up_container() { setting_up_container() {
msg_info "Setting up Container OS" msg_info "Setting up Container OS"

View File

@@ -34,19 +34,11 @@ net_resolves() {
} }
ensure_usr_local_bin_persist() { ensure_usr_local_bin_persist() {
# Login shells: /etc/profile.d/
local PROFILE_FILE="/etc/profile.d/10-localbin.sh" local PROFILE_FILE="/etc/profile.d/10-localbin.sh"
if [ ! -f "$PROFILE_FILE" ]; then if [ ! -f "$PROFILE_FILE" ]; then
echo 'case ":$PATH:" in *:/usr/local/bin:*) ;; *) export PATH="/usr/local/bin:$PATH";; esac' >"$PROFILE_FILE" echo 'case ":$PATH:" in *:/usr/local/bin:*) ;; *) export PATH="/usr/local/bin:$PATH";; esac' >"$PROFILE_FILE"
chmod +x "$PROFILE_FILE" chmod +x "$PROFILE_FILE"
fi fi
# Non-login shells (pct enter): /root/.profile and /root/.bashrc
for rc_file in /root/.profile /root/.bashrc; do
if [ -f "$rc_file" ] && ! grep -q '/usr/local/bin' "$rc_file"; then
echo 'export PATH="/usr/local/bin:$PATH"' >>"$rc_file"
fi
done
} }
download_with_progress() { download_with_progress() {

View File

@@ -135,44 +135,19 @@ explain_exit_code() {
# --- Generic / Shell --- # --- Generic / Shell ---
1) echo "General error / Operation not permitted" ;; 1) echo "General error / Operation not permitted" ;;
2) echo "Misuse of shell builtins (e.g. syntax error)" ;; 2) echo "Misuse of shell builtins (e.g. syntax error)" ;;
10) echo "Docker / privileged mode required (unsupported environment)" ;;
# --- curl / wget errors (commonly seen in downloads) --- # --- curl / wget errors (commonly seen in downloads) ---
4) echo "curl: Feature not supported or protocol error" ;;
5) echo "curl: Could not resolve proxy" ;;
6) echo "curl: DNS resolution failed (could not resolve host)" ;; 6) echo "curl: DNS resolution failed (could not resolve host)" ;;
7) echo "curl: Failed to connect (network unreachable / host down)" ;; 7) echo "curl: Failed to connect (network unreachable / host down)" ;;
8) echo "curl: FTP server reply error" ;;
22) echo "curl: HTTP error returned (404, 429, 500+)" ;; 22) echo "curl: HTTP error returned (404, 429, 500+)" ;;
23) echo "curl: Write error (disk full or permissions)" ;;
25) echo "curl: Upload failed" ;;
28) echo "curl: Operation timeout (network slow or server not responding)" ;; 28) echo "curl: Operation timeout (network slow or server not responding)" ;;
30) echo "curl: FTP port command failed" ;;
35) echo "curl: SSL/TLS handshake failed (certificate error)" ;; 35) echo "curl: SSL/TLS handshake failed (certificate error)" ;;
56) echo "curl: Receive error (connection reset by peer)" ;;
75) echo "Temporary failure (retry later)" ;;
78) echo "curl: Remote file not found (404 on FTP/file)" ;;
# --- Package manager / APT / DPKG --- # --- Package manager / APT / DPKG ---
100) echo "APT: Package manager error (broken packages / dependency problems)" ;; 100) echo "APT: Package manager error (broken packages / dependency problems)" ;;
101) echo "APT: Configuration error (bad sources.list, malformed config)" ;; 101) echo "APT: Configuration error (bad sources.list, malformed config)" ;;
102) echo "APT: Lock held by another process (dpkg/apt still running)" ;; 102) echo "APT: Lock held by another process (dpkg/apt still running)" ;;
# --- BSD sysexits.h (64-78) ---
64) echo "Usage error (wrong arguments)" ;;
65) echo "Data format error (bad input data)" ;;
66) echo "Input file not found (cannot open input)" ;;
67) echo "User not found (addressee unknown)" ;;
68) echo "Host not found (hostname unknown)" ;;
69) echo "Service unavailable" ;;
70) echo "Internal software error" ;;
71) echo "System error (OS-level failure)" ;;
72) echo "Critical OS file missing" ;;
73) echo "Cannot create output file" ;;
74) echo "I/O error" ;;
76) echo "Remote protocol error" ;;
77) echo "Permission denied" ;;
# --- Common shell/system errors --- # --- Common shell/system errors ---
124) echo "Command timed out (timeout command)" ;; 124) echo "Command timed out (timeout command)" ;;
126) echo "Command invoked cannot execute (permission problem?)" ;; 126) echo "Command invoked cannot execute (permission problem?)" ;;
@@ -649,8 +624,6 @@ EOF
curl -fsS -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \ curl -fsS -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \
-H "Content-Type: application/json" \ -H "Content-Type: application/json" \
-d "$JSON_PAYLOAD" &>/dev/null || true -d "$JSON_PAYLOAD" &>/dev/null || true
POST_TO_API_DONE=true
} }
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------

View File

@@ -5253,20 +5253,14 @@ ensure_log_on_host() {
# - Exit trap handler for reporting to API telemetry # - Exit trap handler for reporting to API telemetry
# - Captures exit code and reports to PocketBase using centralized error descriptions # - Captures exit code and reports to PocketBase using centralized error descriptions
# - Uses explain_exit_code() from api.func for consistent error messages # - Uses explain_exit_code() from api.func for consistent error messages
# - For non-zero exit codes: posts "failed" status # - Posts failure status with exit code to API (error description resolved automatically)
# - For zero exit codes where post_update_to_api was never called: # - Only executes on non-zero exit codes
# catches orphaned "installing" records (e.g., script exited cleanly
# but description() was never reached)
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
api_exit_script() { api_exit_script() {
local exit_code=$? exit_code=$?
if [ $exit_code -ne 0 ]; then if [ $exit_code -ne 0 ]; then
ensure_log_on_host ensure_log_on_host
post_update_to_api "failed" "$exit_code" post_update_to_api "failed" "$exit_code"
elif [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then
# Script exited with 0 but never sent a completion status
# This catches edge cases like early returns after post_to_api()
post_update_to_api "failed" "1"
fi fi
} }

View File

@@ -37,34 +37,11 @@ if ! declare -f explain_exit_code &>/dev/null; then
case "$code" in case "$code" in
1) echo "General error / Operation not permitted" ;; 1) echo "General error / Operation not permitted" ;;
2) echo "Misuse of shell builtins (e.g. syntax error)" ;; 2) echo "Misuse of shell builtins (e.g. syntax error)" ;;
10) echo "Docker / privileged mode required (unsupported environment)" ;;
4) echo "curl: Feature not supported or protocol error" ;;
5) echo "curl: Could not resolve proxy" ;;
6) echo "curl: DNS resolution failed (could not resolve host)" ;; 6) echo "curl: DNS resolution failed (could not resolve host)" ;;
7) echo "curl: Failed to connect (network unreachable / host down)" ;; 7) echo "curl: Failed to connect (network unreachable / host down)" ;;
8) echo "curl: FTP server reply error" ;;
22) echo "curl: HTTP error returned (404, 429, 500+)" ;; 22) echo "curl: HTTP error returned (404, 429, 500+)" ;;
23) echo "curl: Write error (disk full or permissions)" ;;
25) echo "curl: Upload failed" ;;
28) echo "curl: Operation timeout (network slow or server not responding)" ;; 28) echo "curl: Operation timeout (network slow or server not responding)" ;;
30) echo "curl: FTP port command failed" ;;
35) echo "curl: SSL/TLS handshake failed (certificate error)" ;; 35) echo "curl: SSL/TLS handshake failed (certificate error)" ;;
56) echo "curl: Receive error (connection reset by peer)" ;;
75) echo "Temporary failure (retry later)" ;;
78) echo "curl: Remote file not found (404 on FTP/file)" ;;
64) echo "Usage error (wrong arguments)" ;;
65) echo "Data format error (bad input data)" ;;
66) echo "Input file not found (cannot open input)" ;;
67) echo "User not found (addressee unknown)" ;;
68) echo "Host not found (hostname unknown)" ;;
69) echo "Service unavailable" ;;
70) echo "Internal software error" ;;
71) echo "System error (OS-level failure)" ;;
72) echo "Critical OS file missing" ;;
73) echo "Cannot create output file" ;;
74) echo "I/O error" ;;
76) echo "Remote protocol error" ;;
77) echo "Permission denied" ;;
100) echo "APT: Package manager error (broken packages / dependency problems)" ;; 100) echo "APT: Package manager error (broken packages / dependency problems)" ;;
101) echo "APT: Configuration error (bad sources.list, malformed config)" ;; 101) echo "APT: Configuration error (bad sources.list, malformed config)" ;;
102) echo "APT: Lock held by another process (dpkg/apt still running)" ;; 102) echo "APT: Lock held by another process (dpkg/apt still running)" ;;

View File

@@ -172,7 +172,7 @@ network_check() {
fi fi
set -e set -e
trap 'error_handler' ERR trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
} }
# ============================================================================== # ==============================================================================

View File

@@ -1851,26 +1851,16 @@ function download_with_progress() {
# Ensures /usr/local/bin is permanently in system PATH. # Ensures /usr/local/bin is permanently in system PATH.
# #
# Description: # Description:
# - Adds to /etc/profile.d for login shells (SSH, noVNC) # - Adds to /etc/profile.d if not present
# - Adds to /root/.bashrc for non-login shells (pct enter)
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
function ensure_usr_local_bin_persist() { function ensure_usr_local_bin_persist() {
# Skip on Proxmox host
command -v pveversion &>/dev/null && return
# Login shells: /etc/profile.d/
local PROFILE_FILE="/etc/profile.d/custom_path.sh" local PROFILE_FILE="/etc/profile.d/custom_path.sh"
if [[ ! -f "$PROFILE_FILE" ]]; then
if [[ ! -f "$PROFILE_FILE" ]] && ! command -v pveversion &>/dev/null; then
echo 'export PATH="/usr/local/bin:$PATH"' >"$PROFILE_FILE" echo 'export PATH="/usr/local/bin:$PATH"' >"$PROFILE_FILE"
chmod +x "$PROFILE_FILE" chmod +x "$PROFILE_FILE"
fi fi
# Non-login shells (pct enter): /root/.bashrc
local BASHRC="/root/.bashrc"
if [[ -f "$BASHRC" ]] && ! grep -q '/usr/local/bin' "$BASHRC"; then
echo 'export PATH="/usr/local/bin:$PATH"' >>"$BASHRC"
fi
} }
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------

View File

@@ -529,21 +529,9 @@ cleanup_vmid() {
} }
cleanup() { cleanup() {
local exit_code=$?
if [[ "$(dirs -p | wc -l)" -gt 1 ]]; then if [[ "$(dirs -p | wc -l)" -gt 1 ]]; then
popd >/dev/null || true popd >/dev/null || true
fi fi
# Report final telemetry status if post_to_api_vm was called but no update was sent
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then
if declare -f post_update_to_api >/dev/null 2>&1; then
if [[ $exit_code -ne 0 ]]; then
post_update_to_api "failed" "$exit_code"
else
# Exited cleanly but description()/success was never called — shouldn't happen
post_update_to_api "failed" "1"
fi
fi
fi
} }
check_root() { check_root() {

View File

@@ -100,15 +100,8 @@ function cleanup_vmid() {
} }
function cleanup() { function cleanup() {
local exit_code=$?
popd >/dev/null popd >/dev/null
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then post_update_to_api "done" "none"
if [[ $exit_code -eq 0 ]]; then
post_update_to_api "done" "none"
else
post_update_to_api "failed" "$exit_code"
fi
fi
rm -rf $TEMP_DIR rm -rf $TEMP_DIR
} }

View File

@@ -100,15 +100,8 @@ function cleanup_vmid() {
} }
function cleanup() { function cleanup() {
local exit_code=$?
popd >/dev/null popd >/dev/null
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then post_update_to_api "done" "none"
if [[ $exit_code -eq 0 ]]; then
post_update_to_api "done" "none"
else
post_update_to_api "failed" "$exit_code"
fi
fi
rm -rf $TEMP_DIR rm -rf $TEMP_DIR
} }

View File

@@ -100,15 +100,8 @@ function cleanup_vmid() {
} }
function cleanup() { function cleanup() {
local exit_code=$?
popd >/dev/null popd >/dev/null
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then post_update_to_api "done" "none"
if [[ $exit_code -eq 0 ]]; then
post_update_to_api "done" "none"
else
post_update_to_api "failed" "$exit_code"
fi
fi
rm -rf $TEMP_DIR rm -rf $TEMP_DIR
} }

View File

@@ -104,16 +104,8 @@ function cleanup_vmid() {
} }
function cleanup() { function cleanup() {
local exit_code=$?
popd >/dev/null popd >/dev/null
# Only send telemetry if post_to_api_vm was called (installing status was sent) post_update_to_api "done" "none"
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then
if [[ $exit_code -eq 0 ]]; then
post_update_to_api "done" "none"
else
post_update_to_api "failed" "$exit_code"
fi
fi
rm -rf $TEMP_DIR rm -rf $TEMP_DIR
} }

View File

@@ -101,15 +101,8 @@ function cleanup_vmid() {
} }
function cleanup() { function cleanup() {
local exit_code=$?
popd >/dev/null popd >/dev/null
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then post_update_to_api "done" "none"
if [[ $exit_code -eq 0 ]]; then
post_update_to_api "done" "none"
else
post_update_to_api "failed" "$exit_code"
fi
fi
rm -rf $TEMP_DIR rm -rf $TEMP_DIR
} }

View File

@@ -100,15 +100,8 @@ function cleanup_vmid() {
} }
function cleanup() { function cleanup() {
local exit_code=$?
popd >/dev/null popd >/dev/null
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then post_update_to_api "done" "none"
if [[ $exit_code -eq 0 ]]; then
post_update_to_api "done" "none"
else
post_update_to_api "failed" "$exit_code"
fi
fi
rm -rf $TEMP_DIR rm -rf $TEMP_DIR
} }

View File

@@ -105,15 +105,7 @@ function cleanup_vmid() {
} }
function cleanup() { function cleanup() {
local exit_code=$?
popd >/dev/null popd >/dev/null
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then
if [[ $exit_code -eq 0 ]]; then
post_update_to_api "done" "none"
else
post_update_to_api "failed" "$exit_code"
fi
fi
rm -rf $TEMP_DIR rm -rf $TEMP_DIR
} }

View File

@@ -79,15 +79,8 @@ function cleanup_vmid() {
} }
function cleanup() { function cleanup() {
local exit_code=$?
popd >/dev/null popd >/dev/null
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then post_update_to_api "done" "none"
if [[ $exit_code -eq 0 ]]; then
post_update_to_api "done" "none"
else
post_update_to_api "failed" "$exit_code"
fi
fi
rm -rf $TEMP_DIR rm -rf $TEMP_DIR
} }

View File

@@ -101,15 +101,8 @@ function cleanup_vmid() {
} }
function cleanup() { function cleanup() {
local exit_code=$?
popd >/dev/null popd >/dev/null
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then post_update_to_api "done" "none"
if [[ $exit_code -eq 0 ]]; then
post_update_to_api "done" "none"
else
post_update_to_api "failed" "$exit_code"
fi
fi
rm -rf $TEMP_DIR rm -rf $TEMP_DIR
} }

View File

@@ -109,15 +109,8 @@ function cleanup_vmid() {
} }
function cleanup() { function cleanup() {
local exit_code=$?
popd >/dev/null popd >/dev/null
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then post_update_to_api "done" "none"
if [[ $exit_code -eq 0 ]]; then
post_update_to_api "done" "none"
else
post_update_to_api "failed" "$exit_code"
fi
fi
rm -rf $TEMP_DIR rm -rf $TEMP_DIR
} }

View File

@@ -97,15 +97,7 @@ function cleanup_vmid() {
} }
function cleanup() { function cleanup() {
local exit_code=$?
popd >/dev/null popd >/dev/null
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then
if [[ $exit_code -eq 0 ]]; then
post_update_to_api "done" "none"
else
post_update_to_api "failed" "$exit_code"
fi
fi
rm -rf $TEMP_DIR rm -rf $TEMP_DIR
} }

View File

@@ -100,15 +100,7 @@ function cleanup_vmid() {
} }
function cleanup() { function cleanup() {
local exit_code=$?
popd >/dev/null popd >/dev/null
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then
if [[ $exit_code -eq 0 ]]; then
post_update_to_api "done" "none"
else
post_update_to_api "failed" "$exit_code"
fi
fi
rm -rf $TEMP_DIR rm -rf $TEMP_DIR
} }

View File

@@ -99,15 +99,7 @@ function cleanup_vmid() {
} }
function cleanup() { function cleanup() {
local exit_code=$?
popd >/dev/null popd >/dev/null
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then
if [[ $exit_code -eq 0 ]]; then
post_update_to_api "done" "none"
else
post_update_to_api "failed" "$exit_code"
fi
fi
rm -rf $TEMP_DIR rm -rf $TEMP_DIR
} }

View File

@@ -99,15 +99,8 @@ function cleanup_vmid() {
} }
function cleanup() { function cleanup() {
local exit_code=$?
popd >/dev/null popd >/dev/null
if [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then post_update_to_api "done" "none"
if [[ $exit_code -eq 0 ]]; then
post_update_to_api "done" "none"
else
post_update_to_api "failed" "$exit_code"
fi
fi
rm -rf $TEMP_DIR rm -rf $TEMP_DIR
} }