mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2026-02-14 17:23:25 +01:00
Compare commits
1 Commits
copilot/ad
...
MickLesk-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a270879be2 |
17
CHANGELOG.md
17
CHANGELOG.md
@@ -401,35 +401,18 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
||||
|
||||
</details>
|
||||
|
||||
## 2026-02-14
|
||||
|
||||
## 2026-02-13
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- OpenWebUI: pin numba constraint [@MickLesk](https://github.com/MickLesk) ([#11874](https://github.com/community-scripts/ProxmoxVE/pull/11874))
|
||||
- Planka: add migrate step to update function [@ZimmermannLeon](https://github.com/ZimmermannLeon) ([#11877](https://github.com/community-scripts/ProxmoxVE/pull/11877))
|
||||
- Pangolin: switch sqlite-specific back to generic [@MickLesk](https://github.com/MickLesk) ([#11868](https://github.com/community-scripts/ProxmoxVE/pull/11868))
|
||||
- [Hotfix] Jotty: Copy contents of config backup into /opt/jotty/config [@vhsdream](https://github.com/vhsdream) ([#11864](https://github.com/community-scripts/ProxmoxVE/pull/11864))
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- Refactor: Radicale [@vhsdream](https://github.com/vhsdream) ([#11850](https://github.com/community-scripts/ProxmoxVE/pull/11850))
|
||||
- chore(donetick): add config entry for v0.1.73 [@tomfrenzel](https://github.com/tomfrenzel) ([#11872](https://github.com/community-scripts/ProxmoxVE/pull/11872))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- core: retry reporting with fallback payloads [@MickLesk](https://github.com/MickLesk) ([#11885](https://github.com/community-scripts/ProxmoxVE/pull/11885))
|
||||
|
||||
### 📡 API
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- error-handler: Implement json_escape and enhance error handling [@MickLesk](https://github.com/MickLesk) ([#11875](https://github.com/community-scripts/ProxmoxVE/pull/11875))
|
||||
|
||||
### 🌐 Website
|
||||
|
||||
|
||||
@@ -42,8 +42,7 @@ function update_script() {
|
||||
|
||||
msg_info "Restoring Configurations"
|
||||
mv /opt/selfhosted.yaml /opt/donetick/config
|
||||
grep -q 'http://localhost"$' /opt/donetick/config/selfhosted.yaml || sed -i '/https:\/\/localhost"$/a\ - "http://localhost"' /opt/donetick/config/selfhosted.yaml
|
||||
grep -q 'capacitor://localhost' /opt/donetick/config/selfhosted.yaml || sed -i '/http:\/\/localhost"$/a\ - "capacitor://localhost"' /opt/donetick/config/selfhosted.yaml
|
||||
sed -i '/capacitor:\/\/localhost/d' /opt/donetick/config/selfhosted.yaml
|
||||
mv /opt/donetick.db /opt/donetick
|
||||
msg_ok "Restored Configurations"
|
||||
|
||||
|
||||
@@ -43,8 +43,8 @@ function update_script() {
|
||||
msg_ok "Removed legacy installation"
|
||||
|
||||
msg_info "Installing uv-based Open-WebUI"
|
||||
PYTHON_VERSION="3.12" setup_uv
|
||||
$STD uv tool install --python 3.12 --constraint <(echo "numba>=0.60") open-webui[all]
|
||||
PYTHON_VERSION="3.11" setup_uv
|
||||
$STD uv tool install --python 3.11 open-webui[all]
|
||||
msg_ok "Installed uv-based Open-WebUI"
|
||||
|
||||
msg_info "Restoring data"
|
||||
@@ -126,7 +126,7 @@ EOF
|
||||
|
||||
msg_info "Updating Open WebUI via uv"
|
||||
PYTHON_VERSION="3.12" setup_uv
|
||||
$STD uv tool install --force --python 3.12 --constraint <(echo "numba>=0.60") open-webui[all]
|
||||
$STD uv tool upgrade --python 3.12 open-webui[all]
|
||||
systemctl restart open-webui
|
||||
msg_ok "Updated Open WebUI"
|
||||
msg_ok "Updated successfully!"
|
||||
|
||||
@@ -61,12 +61,6 @@ function update_script() {
|
||||
rm -rf "$BK"
|
||||
msg_ok "Restored data"
|
||||
|
||||
msg_ok "Migrate Database"
|
||||
cd /opt/planka
|
||||
$STD npm run db:upgrade
|
||||
$STD npm run db:migrate
|
||||
msg_ok "Migrated Database"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start planka
|
||||
msg_ok "Started Service"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"generated": "2026-02-14T06:14:47Z",
|
||||
"generated": "2026-02-13T06:23:00Z",
|
||||
"versions": [
|
||||
{
|
||||
"slug": "2fauth",
|
||||
@@ -67,9 +67,9 @@
|
||||
{
|
||||
"slug": "autobrr",
|
||||
"repo": "autobrr/autobrr",
|
||||
"version": "v1.73.0",
|
||||
"version": "v1.72.1",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T16:37:28Z"
|
||||
"date": "2026-01-30T12:57:58Z"
|
||||
},
|
||||
{
|
||||
"slug": "autocaliweb",
|
||||
@@ -193,16 +193,16 @@
|
||||
{
|
||||
"slug": "cleanuparr",
|
||||
"repo": "Cleanuparr/Cleanuparr",
|
||||
"version": "v2.6.1",
|
||||
"version": "v2.6.0",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T10:00:19Z"
|
||||
"date": "2026-02-13T00:14:21Z"
|
||||
},
|
||||
{
|
||||
"slug": "cloudreve",
|
||||
"repo": "cloudreve/cloudreve",
|
||||
"version": "4.14.0",
|
||||
"version": "4.13.0",
|
||||
"pinned": false,
|
||||
"date": "2026-02-14T06:05:06Z"
|
||||
"date": "2026-02-05T12:53:24Z"
|
||||
},
|
||||
{
|
||||
"slug": "comfyui",
|
||||
@@ -445,9 +445,9 @@
|
||||
{
|
||||
"slug": "gotify",
|
||||
"repo": "gotify/server",
|
||||
"version": "v2.9.0",
|
||||
"version": "v2.8.0",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T15:22:31Z"
|
||||
"date": "2026-01-02T11:56:16Z"
|
||||
},
|
||||
{
|
||||
"slug": "grist",
|
||||
@@ -508,9 +508,9 @@
|
||||
{
|
||||
"slug": "homarr",
|
||||
"repo": "homarr-labs/homarr",
|
||||
"version": "v1.53.1",
|
||||
"version": "v1.53.0",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T19:47:11Z"
|
||||
"date": "2026-02-06T19:42:58Z"
|
||||
},
|
||||
{
|
||||
"slug": "homebox",
|
||||
@@ -578,9 +578,9 @@
|
||||
{
|
||||
"slug": "jackett",
|
||||
"repo": "Jackett/Jackett",
|
||||
"version": "v0.24.1109",
|
||||
"version": "v0.24.1103",
|
||||
"pinned": false,
|
||||
"date": "2026-02-14T05:54:26Z"
|
||||
"date": "2026-02-13T05:53:23Z"
|
||||
},
|
||||
{
|
||||
"slug": "jellystat",
|
||||
@@ -823,16 +823,16 @@
|
||||
{
|
||||
"slug": "metube",
|
||||
"repo": "alexta69/metube",
|
||||
"version": "2026.02.13",
|
||||
"version": "2026.02.12",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T15:18:17Z"
|
||||
"date": "2026-02-12T21:05:49Z"
|
||||
},
|
||||
{
|
||||
"slug": "miniflux",
|
||||
"repo": "miniflux/v2",
|
||||
"version": "2.2.17",
|
||||
"version": "2.2.16",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T20:30:17Z"
|
||||
"date": "2026-01-07T03:26:27Z"
|
||||
},
|
||||
{
|
||||
"slug": "monica",
|
||||
@@ -1000,7 +1000,7 @@
|
||||
"repo": "fosrl/pangolin",
|
||||
"version": "1.15.4",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T23:01:29Z"
|
||||
"date": "2026-02-13T00:54:02Z"
|
||||
},
|
||||
{
|
||||
"slug": "paperless-ai",
|
||||
@@ -1026,9 +1026,9 @@
|
||||
{
|
||||
"slug": "patchmon",
|
||||
"repo": "PatchMon/PatchMon",
|
||||
"version": "v1.4.0",
|
||||
"version": "v1.3.7",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T10:39:03Z"
|
||||
"date": "2025-12-25T11:08:14Z"
|
||||
},
|
||||
{
|
||||
"slug": "paymenter",
|
||||
@@ -1096,9 +1096,9 @@
|
||||
{
|
||||
"slug": "pocketbase",
|
||||
"repo": "pocketbase/pocketbase",
|
||||
"version": "v0.36.3",
|
||||
"version": "v0.36.2",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T18:38:58Z"
|
||||
"date": "2026-02-01T08:12:42Z"
|
||||
},
|
||||
{
|
||||
"slug": "pocketid",
|
||||
@@ -1219,13 +1219,6 @@
|
||||
"pinned": false,
|
||||
"date": "2025-11-16T22:39:01Z"
|
||||
},
|
||||
{
|
||||
"slug": "radicale",
|
||||
"repo": "Kozea/Radicale",
|
||||
"version": "v3.6.0",
|
||||
"pinned": false,
|
||||
"date": "2026-01-10T06:56:46Z"
|
||||
},
|
||||
{
|
||||
"slug": "rclone",
|
||||
"repo": "rclone/rclone",
|
||||
@@ -1313,9 +1306,9 @@
|
||||
{
|
||||
"slug": "semaphore",
|
||||
"repo": "semaphoreui/semaphore",
|
||||
"version": "v2.17.0",
|
||||
"version": "v2.16.51",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T21:08:30Z"
|
||||
"date": "2026-01-12T16:26:38Z"
|
||||
},
|
||||
{
|
||||
"slug": "shelfmark",
|
||||
@@ -1411,9 +1404,9 @@
|
||||
{
|
||||
"slug": "tandoor",
|
||||
"repo": "TandoorRecipes/recipes",
|
||||
"version": "2.5.1",
|
||||
"version": "2.5.0",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T15:57:27Z"
|
||||
"date": "2026-02-08T13:23:02Z"
|
||||
},
|
||||
{
|
||||
"slug": "tasmoadmin",
|
||||
@@ -1467,9 +1460,9 @@
|
||||
{
|
||||
"slug": "tianji",
|
||||
"repo": "msgbyte/tianji",
|
||||
"version": "v1.31.13",
|
||||
"version": "v1.31.12",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T16:30:09Z"
|
||||
"date": "2026-02-12T19:06:14Z"
|
||||
},
|
||||
{
|
||||
"slug": "traccar",
|
||||
@@ -1516,9 +1509,9 @@
|
||||
{
|
||||
"slug": "tududi",
|
||||
"repo": "chrisvel/tududi",
|
||||
"version": "v0.88.5",
|
||||
"version": "v0.88.4",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T13:54:14Z"
|
||||
"date": "2026-01-20T15:11:58Z"
|
||||
},
|
||||
{
|
||||
"slug": "tunarr",
|
||||
@@ -1565,9 +1558,9 @@
|
||||
{
|
||||
"slug": "uptimekuma",
|
||||
"repo": "louislam/uptime-kuma",
|
||||
"version": "2.1.1",
|
||||
"version": "2.1.0",
|
||||
"pinned": false,
|
||||
"date": "2026-02-13T16:07:33Z"
|
||||
"date": "2026-02-07T02:31:49Z"
|
||||
},
|
||||
{
|
||||
"slug": "vaultwarden",
|
||||
|
||||
@@ -14,8 +14,6 @@
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/ubiquiti-unifi.webp",
|
||||
"config_path": "",
|
||||
"description": "UniFi Network Server is a software that helps manage and monitor UniFi networks (Wi-Fi, Ethernet, etc.) by providing an intuitive user interface and advanced features. It allows network administrators to configure, monitor, and upgrade network devices, as well as view network statistics, client devices, and historical events. The aim of the application is to make the management of UniFi networks easier and more efficient.",
|
||||
"disable": true,
|
||||
"disable_description": "This script is disabled because UniFi no longer delivers APT packages for Debian systems. The installation relies on APT repositories that are no longer maintained or available. For more details, see: https://github.com/community-scripts/ProxmoxVE/issues/11876",
|
||||
"install_methods": [
|
||||
{
|
||||
"type": "default",
|
||||
|
||||
@@ -21,10 +21,10 @@ msg_ok "Installed Dependencies"
|
||||
|
||||
setup_hwaccel
|
||||
|
||||
PYTHON_VERSION="3.12" setup_uv
|
||||
PYTHON_VERSION="3.11" setup_uv
|
||||
|
||||
msg_info "Installing Open WebUI"
|
||||
$STD uv tool install --python 3.12 --constraint <(echo "numba>=0.60") open-webui[all]
|
||||
$STD uv tool install --python 3.11 open-webui[all]
|
||||
msg_ok "Installed Open WebUI"
|
||||
|
||||
read -r -p "${TAB3}Would you like to add Ollama? <y/N> " prompt
|
||||
|
||||
105
misc/api.func
105
misc/api.func
@@ -422,8 +422,7 @@ post_to_api() {
|
||||
detect_gpu
|
||||
fi
|
||||
local gpu_vendor="${GPU_VENDOR:-unknown}"
|
||||
local gpu_model
|
||||
gpu_model=$(json_escape "${GPU_MODEL:-}")
|
||||
local gpu_model="${GPU_MODEL:-}"
|
||||
local gpu_passthrough="${GPU_PASSTHROUGH:-unknown}"
|
||||
|
||||
# Detect CPU if not already set
|
||||
@@ -431,8 +430,7 @@ post_to_api() {
|
||||
detect_cpu
|
||||
fi
|
||||
local cpu_vendor="${CPU_VENDOR:-unknown}"
|
||||
local cpu_model
|
||||
cpu_model=$(json_escape "${CPU_MODEL:-}")
|
||||
local cpu_model="${CPU_MODEL:-}"
|
||||
|
||||
# Detect RAM if not already set
|
||||
if [[ -z "${RAM_SPEED:-}" ]]; then
|
||||
@@ -523,8 +521,7 @@ post_to_api_vm() {
|
||||
detect_gpu
|
||||
fi
|
||||
local gpu_vendor="${GPU_VENDOR:-unknown}"
|
||||
local gpu_model
|
||||
gpu_model=$(json_escape "${GPU_MODEL:-}")
|
||||
local gpu_model="${GPU_MODEL:-}"
|
||||
local gpu_passthrough="${GPU_PASSTHROUGH:-unknown}"
|
||||
|
||||
# Detect CPU if not already set
|
||||
@@ -532,8 +529,7 @@ post_to_api_vm() {
|
||||
detect_cpu
|
||||
fi
|
||||
local cpu_vendor="${CPU_VENDOR:-unknown}"
|
||||
local cpu_model
|
||||
cpu_model=$(json_escape "${CPU_MODEL:-}")
|
||||
local cpu_model="${CPU_MODEL:-}"
|
||||
|
||||
# Detect RAM if not already set
|
||||
if [[ -z "${RAM_SPEED:-}" ]]; then
|
||||
@@ -596,12 +592,9 @@ post_update_to_api() {
|
||||
# Silent fail - telemetry should never break scripts
|
||||
command -v curl &>/dev/null || return 0
|
||||
|
||||
# Support "force" mode (3rd arg) to bypass duplicate check for retries after cleanup
|
||||
local force="${3:-}"
|
||||
# Prevent duplicate submissions
|
||||
POST_UPDATE_DONE=${POST_UPDATE_DONE:-false}
|
||||
if [[ "$POST_UPDATE_DONE" == "true" && "$force" != "force" ]]; then
|
||||
return 0
|
||||
fi
|
||||
[[ "$POST_UPDATE_DONE" == "true" ]] && return 0
|
||||
|
||||
[[ "${DIAGNOSTICS:-no}" == "no" ]] && return 0
|
||||
[[ -z "${RANDOM_UUID:-}" ]] && return 0
|
||||
@@ -612,14 +605,12 @@ post_update_to_api() {
|
||||
|
||||
# Get GPU info (if detected)
|
||||
local gpu_vendor="${GPU_VENDOR:-unknown}"
|
||||
local gpu_model
|
||||
gpu_model=$(json_escape "${GPU_MODEL:-}")
|
||||
local gpu_model="${GPU_MODEL:-}"
|
||||
local gpu_passthrough="${GPU_PASSTHROUGH:-unknown}"
|
||||
|
||||
# Get CPU info (if detected)
|
||||
local cpu_vendor="${CPU_VENDOR:-unknown}"
|
||||
local cpu_model
|
||||
cpu_model=$(json_escape "${CPU_MODEL:-}")
|
||||
local cpu_model="${CPU_MODEL:-}"
|
||||
|
||||
# Get RAM info (if detected)
|
||||
local ram_speed="${RAM_SPEED:-}"
|
||||
@@ -641,7 +632,6 @@ post_update_to_api() {
|
||||
esac
|
||||
|
||||
# For failed/unknown status, resolve exit code and error description
|
||||
local short_error=""
|
||||
if [[ "$pb_status" == "failed" ]] || [[ "$pb_status" == "unknown" ]]; then
|
||||
if [[ "$raw_exit_code" =~ ^[0-9]+$ ]]; then
|
||||
exit_code="$raw_exit_code"
|
||||
@@ -655,7 +645,6 @@ post_update_to_api() {
|
||||
else
|
||||
error=$(json_escape "$(explain_exit_code "$exit_code")")
|
||||
fi
|
||||
short_error=$(json_escape "$(explain_exit_code "$exit_code")")
|
||||
error_category=$(categorize_error "$exit_code")
|
||||
[[ -z "$error" ]] && error="Unknown error"
|
||||
fi
|
||||
@@ -672,9 +661,8 @@ post_update_to_api() {
|
||||
pve_version=$(pveversion 2>/dev/null | awk -F'[/ ]' '{print $2}') || true
|
||||
fi
|
||||
|
||||
local http_code=""
|
||||
|
||||
# ── Attempt 1: Full payload with complete error text ──
|
||||
# Full payload including all fields - allows record creation if initial call failed
|
||||
# The Go service will find the record by random_id and PATCH, or create if not found
|
||||
local JSON_PAYLOAD
|
||||
JSON_PAYLOAD=$(
|
||||
cat <<EOF
|
||||
@@ -706,80 +694,11 @@ post_update_to_api() {
|
||||
EOF
|
||||
)
|
||||
|
||||
http_code=$(curl -sS -w "%{http_code}" -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$JSON_PAYLOAD" -o /dev/null 2>/dev/null) || http_code="000"
|
||||
|
||||
if [[ "$http_code" =~ ^2[0-9]{2}$ ]]; then
|
||||
POST_UPDATE_DONE=true
|
||||
return 0
|
||||
fi
|
||||
|
||||
# ── Attempt 2: Short error text (no full log) ──
|
||||
sleep 1
|
||||
local RETRY_PAYLOAD
|
||||
RETRY_PAYLOAD=$(
|
||||
cat <<EOF
|
||||
{
|
||||
"random_id": "${RANDOM_UUID}",
|
||||
"type": "${TELEMETRY_TYPE:-lxc}",
|
||||
"nsapp": "${NSAPP:-unknown}",
|
||||
"status": "${pb_status}",
|
||||
"ct_type": ${CT_TYPE:-1},
|
||||
"disk_size": ${DISK_SIZE:-0},
|
||||
"core_count": ${CORE_COUNT:-0},
|
||||
"ram_size": ${RAM_SIZE:-0},
|
||||
"os_type": "${var_os:-}",
|
||||
"os_version": "${var_version:-}",
|
||||
"pve_version": "${pve_version}",
|
||||
"method": "${METHOD:-default}",
|
||||
"exit_code": ${exit_code},
|
||||
"error": "${short_error}",
|
||||
"error_category": "${error_category}",
|
||||
"install_duration": ${duration},
|
||||
"cpu_vendor": "${cpu_vendor}",
|
||||
"cpu_model": "${cpu_model}",
|
||||
"gpu_vendor": "${gpu_vendor}",
|
||||
"gpu_model": "${gpu_model}",
|
||||
"gpu_passthrough": "${gpu_passthrough}",
|
||||
"ram_speed": "${ram_speed}",
|
||||
"repo_source": "${REPO_SOURCE}"
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
http_code=$(curl -sS -w "%{http_code}" -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$RETRY_PAYLOAD" -o /dev/null 2>/dev/null) || http_code="000"
|
||||
|
||||
if [[ "$http_code" =~ ^2[0-9]{2}$ ]]; then
|
||||
POST_UPDATE_DONE=true
|
||||
return 0
|
||||
fi
|
||||
|
||||
# ── Attempt 3: Minimal payload (bare minimum to set status) ──
|
||||
sleep 2
|
||||
local MINIMAL_PAYLOAD
|
||||
MINIMAL_PAYLOAD=$(
|
||||
cat <<EOF
|
||||
{
|
||||
"random_id": "${RANDOM_UUID}",
|
||||
"type": "${TELEMETRY_TYPE:-lxc}",
|
||||
"nsapp": "${NSAPP:-unknown}",
|
||||
"status": "${pb_status}",
|
||||
"exit_code": ${exit_code},
|
||||
"error": "${short_error}",
|
||||
"error_category": "${error_category}",
|
||||
"install_duration": ${duration}
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Fire-and-forget: never block, never fail
|
||||
curl -sS -w "%{http_code}" -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$MINIMAL_PAYLOAD" -o /dev/null 2>/dev/null || true
|
||||
-d "$JSON_PAYLOAD" -o /dev/null 2>&1 || true
|
||||
|
||||
# Tried 3 times - mark as done regardless to prevent infinite loops
|
||||
POST_UPDATE_DONE=true
|
||||
}
|
||||
|
||||
|
||||
@@ -4046,10 +4046,12 @@ EOF'
|
||||
if [[ $install_exit_code -ne 0 ]]; then
|
||||
msg_error "Installation failed in container ${CTID} (exit code: ${install_exit_code})"
|
||||
|
||||
# Copy install log from container BEFORE API call so get_error_text() can read it
|
||||
# Report failure to telemetry API
|
||||
post_update_to_api "failed" "$install_exit_code"
|
||||
|
||||
# Copy both logs from container before potential deletion
|
||||
local build_log_copied=false
|
||||
local install_log_copied=false
|
||||
local host_install_log="/tmp/install-lxc-${CTID}-${SESSION_ID}.log"
|
||||
|
||||
if [[ -n "$CTID" && -n "${SESSION_ID:-}" ]]; then
|
||||
# Copy BUILD_LOG (creation log) if it exists
|
||||
@@ -4057,22 +4059,15 @@ EOF'
|
||||
cp "${BUILD_LOG}" "/tmp/create-lxc-${CTID}-${SESSION_ID}.log" 2>/dev/null && build_log_copied=true
|
||||
fi
|
||||
|
||||
# Copy INSTALL_LOG from container to host
|
||||
if pct pull "$CTID" "/root/.install-${SESSION_ID}.log" "$host_install_log" 2>/dev/null; then
|
||||
# Copy INSTALL_LOG from container
|
||||
if pct pull "$CTID" "/root/.install-${SESSION_ID}.log" "/tmp/install-lxc-${CTID}-${SESSION_ID}.log" 2>/dev/null; then
|
||||
install_log_copied=true
|
||||
# Point INSTALL_LOG to host copy so get_error_text() finds it
|
||||
INSTALL_LOG="$host_install_log"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Report failure to telemetry API (now with log available on host)
|
||||
post_update_to_api "failed" "$install_exit_code"
|
||||
|
||||
# Show available logs
|
||||
if [[ -n "$CTID" && -n "${SESSION_ID:-}" ]]; then
|
||||
# Show available logs
|
||||
echo ""
|
||||
[[ "$build_log_copied" == true ]] && echo -e "${GN}✔${CL} Container creation log: ${BL}/tmp/create-lxc-${CTID}-${SESSION_ID}.log${CL}"
|
||||
[[ "$install_log_copied" == true ]] && echo -e "${GN}✔${CL} Installation log: ${BL}${host_install_log}${CL}"
|
||||
[[ "$install_log_copied" == true ]] && echo -e "${GN}✔${CL} Installation log: ${BL}/tmp/install-lxc-${CTID}-${SESSION_ID}.log${CL}"
|
||||
fi
|
||||
|
||||
# Dev mode: Keep container or open breakpoint shell
|
||||
@@ -4130,10 +4125,6 @@ EOF'
|
||||
echo -e "${BFR}${CM}${GN}Container ${CTID} removed${CL}"
|
||||
fi
|
||||
|
||||
# Force one final status update attempt after cleanup
|
||||
# This ensures status is updated even if the first attempt failed (e.g., HTTP 400)
|
||||
post_update_to_api "failed" "$install_exit_code" "force"
|
||||
|
||||
exit $install_exit_code
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -222,12 +222,6 @@ error_handler() {
|
||||
pct destroy "$CTID" &>/dev/null || true
|
||||
echo -e "${GN}✔${CL} Container ${CTID} removed"
|
||||
fi
|
||||
|
||||
# Force one final status update attempt after cleanup
|
||||
# This ensures status is updated even if the first attempt failed (e.g., HTTP 400)
|
||||
if declare -f post_update_to_api &>/dev/null; then
|
||||
post_update_to_api "failed" "$exit_code" "force"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
Reference in New Issue
Block a user