Compare commits

..

1 Commits

Author SHA1 Message Date
github-actions[bot]
f82be54f63 Archive old changelog entries 2026-02-08 00:08:56 +00:00
166 changed files with 2180 additions and 3280 deletions

View File

@@ -89,15 +89,9 @@ jobs:
slug=$(jq -r '.slug // empty' "$json_file" 2>/dev/null)
[[ -z "$slug" ]] && continue
# Find corresponding script (install script or addon script)
install_script=""
if [[ -f "install/${slug}-install.sh" ]]; then
install_script="install/${slug}-install.sh"
elif [[ -f "tools/addon/${slug}.sh" ]]; then
install_script="tools/addon/${slug}.sh"
else
continue
fi
# Find corresponding install script
install_script="install/${slug}-install.sh"
[[ ! -f "$install_script" ]] && continue
# Look for fetch_and_deploy_gh_release calls
# Pattern: fetch_and_deploy_gh_release "app" "owner/repo" ["mode"] ["version"]

View File

@@ -401,185 +401,6 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
</details>
## 2026-02-13
### 🚀 Updated Scripts
- #### 🐞 Bug Fixes
- OpenWebUI: pin numba constraint [@MickLesk](https://github.com/MickLesk) ([#11874](https://github.com/community-scripts/ProxmoxVE/pull/11874))
- Planka: add migrate step to update function [@ZimmermannLeon](https://github.com/ZimmermannLeon) ([#11877](https://github.com/community-scripts/ProxmoxVE/pull/11877))
- Pangolin: switch sqlite-specific back to generic [@MickLesk](https://github.com/MickLesk) ([#11868](https://github.com/community-scripts/ProxmoxVE/pull/11868))
- [Hotfix] Jotty: Copy contents of config backup into /opt/jotty/config [@vhsdream](https://github.com/vhsdream) ([#11864](https://github.com/community-scripts/ProxmoxVE/pull/11864))
- #### 🔧 Refactor
- chore(donetick): add config entry for v0.1.73 [@tomfrenzel](https://github.com/tomfrenzel) ([#11872](https://github.com/community-scripts/ProxmoxVE/pull/11872))
- Refactor: Radicale [@vhsdream](https://github.com/vhsdream) ([#11850](https://github.com/community-scripts/ProxmoxVE/pull/11850))
### 📡 API
- #### ✨ New Features
- error-handler: Implement json_escape and enhance error handling [@MickLesk](https://github.com/MickLesk) ([#11875](https://github.com/community-scripts/ProxmoxVE/pull/11875))
### 🌐 Website
- #### 📝 Script Information
- SQLServer-2025: add PVE9/Kernel 6.x incompatibility warning [@MickLesk](https://github.com/MickLesk) ([#11829](https://github.com/community-scripts/ProxmoxVE/pull/11829))
## 2026-02-12
### 🚀 Updated Scripts
- #### 🐞 Bug Fixes
- EMQX: increase disk to 6GB and add optional MQ disable prompt [@MickLesk](https://github.com/MickLesk) ([#11844](https://github.com/community-scripts/ProxmoxVE/pull/11844))
- Increased the Grafana container default disk size. [@shtefko](https://github.com/shtefko) ([#11840](https://github.com/community-scripts/ProxmoxVE/pull/11840))
- Pangolin: Update database generation command in install script [@tremor021](https://github.com/tremor021) ([#11825](https://github.com/community-scripts/ProxmoxVE/pull/11825))
- Deluge: add python3-setuptools as dep [@MickLesk](https://github.com/MickLesk) ([#11833](https://github.com/community-scripts/ProxmoxVE/pull/11833))
- Dispatcharr: migrate to uv sync [@MickLesk](https://github.com/MickLesk) ([#11831](https://github.com/community-scripts/ProxmoxVE/pull/11831))
- #### ✨ New Features
- Archlinux-VM: fix LVM/LVM-thin storage and improve error reporting | VM's add correct exit_code for analytics [@MickLesk](https://github.com/MickLesk) ([#11842](https://github.com/community-scripts/ProxmoxVE/pull/11842))
- Debian13-VM: Optimize First Boot & add noCloud/Cloud Selection [@MickLesk](https://github.com/MickLesk) ([#11810](https://github.com/community-scripts/ProxmoxVE/pull/11810))
### 💾 Core
- #### ✨ New Features
- tools.func: auto-detect binary vs armored GPG keys in setup_deb822_repo [@MickLesk](https://github.com/MickLesk) ([#11841](https://github.com/community-scripts/ProxmoxVE/pull/11841))
- core: remove old Go API and extend misc/api.func with new backend [@MickLesk](https://github.com/MickLesk) ([#11822](https://github.com/community-scripts/ProxmoxVE/pull/11822))
- #### 🔧 Refactor
- error_handler: prevent stuck 'installing' status [@MickLesk](https://github.com/MickLesk) ([#11845](https://github.com/community-scripts/ProxmoxVE/pull/11845))
### 🧰 Tools
- #### 🐞 Bug Fixes
- Tailscale: fix DNS check and keyrings directory issues [@MickLesk](https://github.com/MickLesk) ([#11837](https://github.com/community-scripts/ProxmoxVE/pull/11837))
## 2026-02-11
### 🆕 New Scripts
- Draw.io ([#11788](https://github.com/community-scripts/ProxmoxVE/pull/11788))
### 🚀 Updated Scripts
- #### 🐞 Bug Fixes
- dispatcharr: include port 9191 in success-message [@MickLesk](https://github.com/MickLesk) ([#11808](https://github.com/community-scripts/ProxmoxVE/pull/11808))
- fix: make donetick 0.1.71 compatible [@tomfrenzel](https://github.com/tomfrenzel) ([#11804](https://github.com/community-scripts/ProxmoxVE/pull/11804))
- Kasm: Support new version URL format without hash suffix [@MickLesk](https://github.com/MickLesk) ([#11787](https://github.com/community-scripts/ProxmoxVE/pull/11787))
- LibreTranslate: Remove Torch [@tremor021](https://github.com/tremor021) ([#11783](https://github.com/community-scripts/ProxmoxVE/pull/11783))
- Snowshare: fix update script [@TuroYT](https://github.com/TuroYT) ([#11726](https://github.com/community-scripts/ProxmoxVE/pull/11726))
- #### ✨ New Features
- [Feature] OpenCloud: support PosixFS Collaborative Mode [@vhsdream](https://github.com/vhsdream) ([#11806](https://github.com/community-scripts/ProxmoxVE/pull/11806))
### 💾 Core
- #### 🔧 Refactor
- core: respect EDITOR variable for config editing [@ls-root](https://github.com/ls-root) ([#11693](https://github.com/community-scripts/ProxmoxVE/pull/11693))
### 📚 Documentation
- Fix formatting in kutt.json notes section [@tiagodenoronha](https://github.com/tiagodenoronha) ([#11774](https://github.com/community-scripts/ProxmoxVE/pull/11774))
## 2026-02-10
### 🚀 Updated Scripts
- #### 🐞 Bug Fixes
- Immich: Pin version to 2.5.6 [@vhsdream](https://github.com/vhsdream) ([#11775](https://github.com/community-scripts/ProxmoxVE/pull/11775))
- Libretranslate: Fix setuptools [@tremor021](https://github.com/tremor021) ([#11772](https://github.com/community-scripts/ProxmoxVE/pull/11772))
- Element Synapse: prevent systemd invoke failure during apt install [@MickLesk](https://github.com/MickLesk) ([#11758](https://github.com/community-scripts/ProxmoxVE/pull/11758))
- #### ✨ New Features
- Refactor: Slskd & Soularr [@vhsdream](https://github.com/vhsdream) ([#11674](https://github.com/community-scripts/ProxmoxVE/pull/11674))
### 🗑️ Deleted Scripts
- move paperless-exporter from LXC to addon ([#11737](https://github.com/community-scripts/ProxmoxVE/pull/11737))
### 🧰 Tools
- #### 🐞 Bug Fixes
- feat: improve storage parsing & add guestname [@carlosmaroot](https://github.com/carlosmaroot) ([#11752](https://github.com/community-scripts/ProxmoxVE/pull/11752))
### 📂 Github
- Github-Version Workflow: include addon scripts in extraction [@MickLesk](https://github.com/MickLesk) ([#11757](https://github.com/community-scripts/ProxmoxVE/pull/11757))
### 🌐 Website
- #### 📝 Script Information
- Snowshare: fix typo in config file path on website [@BirdMakingStuff](https://github.com/BirdMakingStuff) ([#11754](https://github.com/community-scripts/ProxmoxVE/pull/11754))
## 2026-02-09
### 🚀 Updated Scripts
- #### 🐞 Bug Fixes
- several scripts: add --clear to uv venv calls for uv 0.10 compatibility [@MickLesk](https://github.com/MickLesk) ([#11723](https://github.com/community-scripts/ProxmoxVE/pull/11723))
- Koillection: ensure setup_composer is in update script [@MickLesk](https://github.com/MickLesk) ([#11734](https://github.com/community-scripts/ProxmoxVE/pull/11734))
- PeaNUT: symlink server.js after update [@vhsdream](https://github.com/vhsdream) ([#11696](https://github.com/community-scripts/ProxmoxVE/pull/11696))
- Umlautadaptarr: use release appsettings.json instead of hardcoded copy [@MickLesk](https://github.com/MickLesk) ([#11725](https://github.com/community-scripts/ProxmoxVE/pull/11725))
- tracearr: prepare for next stable release [@durzo](https://github.com/durzo) ([#11673](https://github.com/community-scripts/ProxmoxVE/pull/11673))
- #### ✨ New Features
- remove whiptail from update scripts for unattended update support [@MickLesk](https://github.com/MickLesk) ([#11712](https://github.com/community-scripts/ProxmoxVE/pull/11712))
- #### 🔧 Refactor
- Refactor: FileFlows [@tremor021](https://github.com/tremor021) ([#11108](https://github.com/community-scripts/ProxmoxVE/pull/11108))
- Refactor: wger [@MickLesk](https://github.com/MickLesk) ([#11722](https://github.com/community-scripts/ProxmoxVE/pull/11722))
- Nginx-UI: better User Handling | ACME [@MickLesk](https://github.com/MickLesk) ([#11715](https://github.com/community-scripts/ProxmoxVE/pull/11715))
- NginxProxymanager: use better-sqlite3 [@MickLesk](https://github.com/MickLesk) ([#11708](https://github.com/community-scripts/ProxmoxVE/pull/11708))
### 💾 Core
- #### 🔧 Refactor
- hwaccel: add libmfx-gen1.2 to Intel Arc setup for QSV support [@MickLesk](https://github.com/MickLesk) ([#11707](https://github.com/community-scripts/ProxmoxVE/pull/11707))
### 🧰 Tools
- #### 🐞 Bug Fixes
- addons: ensure curl is installed before use [@MickLesk](https://github.com/MickLesk) ([#11718](https://github.com/community-scripts/ProxmoxVE/pull/11718))
- Netbird (addon): add systemd ordering to start after Docker [@MickLesk](https://github.com/MickLesk) ([#11716](https://github.com/community-scripts/ProxmoxVE/pull/11716))
### ❔ Uncategorized
- Bichon: Update website [@tremor021](https://github.com/tremor021) ([#11711](https://github.com/community-scripts/ProxmoxVE/pull/11711))
## 2026-02-08
### 🚀 Updated Scripts
- #### 🐞 Bug Fixes
- feat(healthchecks): add sendalerts service [@Mika56](https://github.com/Mika56) ([#11694](https://github.com/community-scripts/ProxmoxVE/pull/11694))
- ComfyUI: Dynamic Fetch PyTorch Versions [@MickLesk](https://github.com/MickLesk) ([#11657](https://github.com/community-scripts/ProxmoxVE/pull/11657))
- #### 💥 Breaking Changes
- Semaphore: switch from Debian to Ubuntu 24.04 [@MickLesk](https://github.com/MickLesk) ([#11670](https://github.com/community-scripts/ProxmoxVE/pull/11670))
## 2026-02-07
### 🆕 New Scripts

5
api/.env.example Normal file
View File

@@ -0,0 +1,5 @@
MONGO_USER=
MONGO_PASSWORD=
MONGO_IP=
MONGO_PORT=
MONGO_DATABASE=

23
api/go.mod Normal file
View File

@@ -0,0 +1,23 @@
module proxmox-api
go 1.24.0
require (
github.com/gorilla/mux v1.8.1
github.com/joho/godotenv v1.5.1
github.com/rs/cors v1.11.1
go.mongodb.org/mongo-driver v1.17.2
)
require (
github.com/golang/snappy v0.0.4 // indirect
github.com/klauspost/compress v1.16.7 // indirect
github.com/montanaflynn/stats v0.7.1 // indirect
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
github.com/xdg-go/scram v1.1.2 // indirect
github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
golang.org/x/crypto v0.45.0 // indirect
golang.org/x/sync v0.18.0 // indirect
golang.org/x/text v0.31.0 // indirect
)

56
api/go.sum Normal file
View File

@@ -0,0 +1,56 @@
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I=
github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
github.com/rs/cors v1.11.1 h1:eU3gRzXLRK57F5rKMGMZURNdIG4EoAmX8k94r9wXWHA=
github.com/rs/cors v1.11.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU=
github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY=
github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4=
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.mongodb.org/mongo-driver v1.17.2 h1:gvZyk8352qSfzyZ2UMWcpDpMSGEr1eqE4T793SqyhzM=
go.mongodb.org/mongo-driver v1.17.2/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

450
api/main.go Normal file
View File

@@ -0,0 +1,450 @@
// Copyright (c) 2021-2026 community-scripts ORG
// Author: Michel Roegl-Brunner (michelroegl-brunner)
// License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
package main
import (
"context"
"encoding/json"
"fmt"
"log"
"net/http"
"os"
"strconv"
"time"
"github.com/gorilla/mux"
"github.com/joho/godotenv"
"github.com/rs/cors"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/primitive"
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
)
var client *mongo.Client
var collection *mongo.Collection
func loadEnv() {
if err := godotenv.Load(); err != nil {
log.Fatal("Error loading .env file")
}
}
// DataModel represents a single document in MongoDB
type DataModel struct {
ID primitive.ObjectID `json:"id" bson:"_id,omitempty"`
CT_TYPE uint `json:"ct_type" bson:"ct_type"`
DISK_SIZE float32 `json:"disk_size" bson:"disk_size"`
CORE_COUNT uint `json:"core_count" bson:"core_count"`
RAM_SIZE uint `json:"ram_size" bson:"ram_size"`
OS_TYPE string `json:"os_type" bson:"os_type"`
OS_VERSION string `json:"os_version" bson:"os_version"`
DISABLEIP6 string `json:"disableip6" bson:"disableip6"`
NSAPP string `json:"nsapp" bson:"nsapp"`
METHOD string `json:"method" bson:"method"`
CreatedAt time.Time `json:"created_at" bson:"created_at"`
PVEVERSION string `json:"pve_version" bson:"pve_version"`
STATUS string `json:"status" bson:"status"`
RANDOM_ID string `json:"random_id" bson:"random_id"`
TYPE string `json:"type" bson:"type"`
ERROR string `json:"error" bson:"error"`
}
type StatusModel struct {
RANDOM_ID string `json:"random_id" bson:"random_id"`
ERROR string `json:"error" bson:"error"`
STATUS string `json:"status" bson:"status"`
}
type CountResponse struct {
TotalEntries int64 `json:"total_entries"`
StatusCount map[string]int64 `json:"status_count"`
NSAPPCount map[string]int64 `json:"nsapp_count"`
}
// ConnectDatabase initializes the MongoDB connection
func ConnectDatabase() {
loadEnv()
mongoURI := fmt.Sprintf("mongodb://%s:%s@%s:%s",
os.Getenv("MONGO_USER"),
os.Getenv("MONGO_PASSWORD"),
os.Getenv("MONGO_IP"),
os.Getenv("MONGO_PORT"))
database := os.Getenv("MONGO_DATABASE")
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
var err error
client, err = mongo.Connect(ctx, options.Client().ApplyURI(mongoURI))
if err != nil {
log.Fatal("Failed to connect to MongoDB!", err)
}
collection = client.Database(database).Collection("data_models")
fmt.Println("Connected to MongoDB on 10.10.10.18")
}
// UploadJSON handles API requests and stores data as a document in MongoDB
func UploadJSON(w http.ResponseWriter, r *http.Request) {
var input DataModel
if err := json.NewDecoder(r.Body).Decode(&input); err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
input.CreatedAt = time.Now()
_, err := collection.InsertOne(context.Background(), input)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
log.Println("Received data:", input)
w.WriteHeader(http.StatusCreated)
json.NewEncoder(w).Encode(map[string]string{"message": "Data saved successfully"})
}
// UpdateStatus updates the status of a record based on RANDOM_ID
func UpdateStatus(w http.ResponseWriter, r *http.Request) {
var input StatusModel
if err := json.NewDecoder(r.Body).Decode(&input); err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
filter := bson.M{"random_id": input.RANDOM_ID}
update := bson.M{"$set": bson.M{"status": input.STATUS, "error": input.ERROR}}
_, err := collection.UpdateOne(context.Background(), filter, update)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
log.Println("Updated data:", input)
w.WriteHeader(http.StatusOK)
json.NewEncoder(w).Encode(map[string]string{"message": "Record updated successfully"})
}
// GetDataJSON fetches all data from MongoDB
func GetDataJSON(w http.ResponseWriter, r *http.Request) {
var records []DataModel
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
cursor, err := collection.Find(ctx, bson.M{})
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer cursor.Close(ctx)
for cursor.Next(ctx) {
var record DataModel
if err := cursor.Decode(&record); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
records = append(records, record)
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(records)
}
func GetPaginatedData(w http.ResponseWriter, r *http.Request) {
page, _ := strconv.Atoi(r.URL.Query().Get("page"))
limit, _ := strconv.Atoi(r.URL.Query().Get("limit"))
if page < 1 {
page = 1
}
if limit < 1 {
limit = 10
}
skip := (page - 1) * limit
var records []DataModel
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
options := options.Find().SetSkip(int64(skip)).SetLimit(int64(limit))
cursor, err := collection.Find(ctx, bson.M{}, options)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer cursor.Close(ctx)
for cursor.Next(ctx) {
var record DataModel
if err := cursor.Decode(&record); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
records = append(records, record)
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(records)
}
func GetSummary(w http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
totalCount, err := collection.CountDocuments(ctx, bson.M{})
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
statusCount := make(map[string]int64)
nsappCount := make(map[string]int64)
pipeline := []bson.M{
{"$group": bson.M{"_id": "$status", "count": bson.M{"$sum": 1}}},
}
cursor, err := collection.Aggregate(ctx, pipeline)
if err == nil {
for cursor.Next(ctx) {
var result struct {
ID string `bson:"_id"`
Count int64 `bson:"count"`
}
if err := cursor.Decode(&result); err == nil {
statusCount[result.ID] = result.Count
}
}
}
pipeline = []bson.M{
{"$group": bson.M{"_id": "$nsapp", "count": bson.M{"$sum": 1}}},
}
cursor, err = collection.Aggregate(ctx, pipeline)
if err == nil {
for cursor.Next(ctx) {
var result struct {
ID string `bson:"_id"`
Count int64 `bson:"count"`
}
if err := cursor.Decode(&result); err == nil {
nsappCount[result.ID] = result.Count
}
}
}
response := CountResponse{
TotalEntries: totalCount,
StatusCount: statusCount,
NSAPPCount: nsappCount,
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(response)
}
func GetByNsapp(w http.ResponseWriter, r *http.Request) {
nsapp := r.URL.Query().Get("nsapp")
var records []DataModel
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
cursor, err := collection.Find(ctx, bson.M{"nsapp": nsapp})
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer cursor.Close(ctx)
for cursor.Next(ctx) {
var record DataModel
if err := cursor.Decode(&record); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
records = append(records, record)
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(records)
}
func GetByDateRange(w http.ResponseWriter, r *http.Request) {
startDate := r.URL.Query().Get("start_date")
endDate := r.URL.Query().Get("end_date")
if startDate == "" || endDate == "" {
http.Error(w, "Both start_date and end_date are required", http.StatusBadRequest)
return
}
start, err := time.Parse("2006-01-02T15:04:05.999999+00:00", startDate+"T00:00:00+00:00")
if err != nil {
http.Error(w, "Invalid start_date format", http.StatusBadRequest)
return
}
end, err := time.Parse("2006-01-02T15:04:05.999999+00:00", endDate+"T23:59:59+00:00")
if err != nil {
http.Error(w, "Invalid end_date format", http.StatusBadRequest)
return
}
var records []DataModel
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
cursor, err := collection.Find(ctx, bson.M{
"created_at": bson.M{
"$gte": start,
"$lte": end,
},
})
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer cursor.Close(ctx)
for cursor.Next(ctx) {
var record DataModel
if err := cursor.Decode(&record); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
records = append(records, record)
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(records)
}
func GetByStatus(w http.ResponseWriter, r *http.Request) {
status := r.URL.Query().Get("status")
var records []DataModel
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
cursor, err := collection.Find(ctx, bson.M{"status": status})
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer cursor.Close(ctx)
for cursor.Next(ctx) {
var record DataModel
if err := cursor.Decode(&record); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
records = append(records, record)
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(records)
}
func GetByOS(w http.ResponseWriter, r *http.Request) {
osType := r.URL.Query().Get("os_type")
osVersion := r.URL.Query().Get("os_version")
var records []DataModel
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
cursor, err := collection.Find(ctx, bson.M{"os_type": osType, "os_version": osVersion})
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer cursor.Close(ctx)
for cursor.Next(ctx) {
var record DataModel
if err := cursor.Decode(&record); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
records = append(records, record)
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(records)
}
func GetErrors(w http.ResponseWriter, r *http.Request) {
errorCount := make(map[string]int)
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
cursor, err := collection.Find(ctx, bson.M{"error": bson.M{"$ne": ""}})
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer cursor.Close(ctx)
for cursor.Next(ctx) {
var record DataModel
if err := cursor.Decode(&record); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if record.ERROR != "" {
errorCount[record.ERROR]++
}
}
type ErrorCountResponse struct {
Error string `json:"error"`
Count int `json:"count"`
}
var errorCounts []ErrorCountResponse
for err, count := range errorCount {
errorCounts = append(errorCounts, ErrorCountResponse{
Error: err,
Count: count,
})
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(struct {
ErrorCounts []ErrorCountResponse `json:"error_counts"`
}{
ErrorCounts: errorCounts,
})
}
func main() {
ConnectDatabase()
router := mux.NewRouter()
router.HandleFunc("/upload", UploadJSON).Methods("POST")
router.HandleFunc("/upload/updatestatus", UpdateStatus).Methods("POST")
router.HandleFunc("/data/json", GetDataJSON).Methods("GET")
router.HandleFunc("/data/paginated", GetPaginatedData).Methods("GET")
router.HandleFunc("/data/summary", GetSummary).Methods("GET")
router.HandleFunc("/data/nsapp", GetByNsapp).Methods("GET")
router.HandleFunc("/data/date", GetByDateRange).Methods("GET")
router.HandleFunc("/data/status", GetByStatus).Methods("GET")
router.HandleFunc("/data/os", GetByOS).Methods("GET")
router.HandleFunc("/data/errors", GetErrors).Methods("GET")
c := cors.New(cors.Options{
AllowedOrigins: []string{"*"},
AllowedMethods: []string{"GET", "POST"},
AllowedHeaders: []string{"Content-Type", "Authorization"},
AllowCredentials: true,
})
handler := c.Handler(router)
fmt.Println("Server running on port 8080")
log.Fatal(http.ListenAndServe(":8080", handler))
}

View File

@@ -51,7 +51,7 @@ function update_script() {
cp -r /opt/adventurelog-backup/backend/server/media /opt/adventurelog/backend/server/media
cd /opt/adventurelog/backend/server
if [[ ! -x .venv/bin/python ]]; then
$STD uv venv --clear .venv
$STD uv venv .venv
$STD .venv/bin/python -m ensurepip --upgrade
fi
$STD .venv/bin/python -m pip install --upgrade pip

View File

@@ -20,9 +20,28 @@ color
catch_errors
function update_script() {
header_info
$STD apk -U upgrade
msg_ok "Updated successfully!"
if ! apk -e info newt >/dev/null 2>&1; then
apk add -q newt
fi
while true; do
CHOICE=$(
whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --menu "Select option" 11 58 1 \
"1" "Check for Docker Updates" 3>&2 2>&1 1>&3
)
exit_status=$?
if [ $exit_status == 1 ]; then
clear
exit-script
fi
header_info
case $CHOICE in
1)
$STD apk -U upgrade
msg_ok "Updated successfully!"
exit
;;
esac
done
exit 0
}

View File

@@ -9,7 +9,7 @@ APP="Alpine-Grafana"
var_tags="${var_tags:-alpine;monitoring}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-256}"
var_disk="${var_disk:-2}"
var_disk="${var_disk:-1}"
var_os="${var_os:-alpine}"
var_version="${var_version:-3.23}"
var_unprivileged="${var_unprivileged:-1}"
@@ -20,32 +20,43 @@ color
catch_errors
function update_script() {
if ! apk -e info newt >/dev/null 2>&1; then
apk add -q newt
fi
LXCIP=$(ip a s dev eth0 | awk '/inet / {print $2}' | cut -d/ -f1)
CHOICE=$(msg_menu "Grafana Update Options" \
"1" "Check for Grafana Updates" \
"2" "Allow 0.0.0.0 for listening" \
"3" "Allow only ${LXCIP} for listening")
case $CHOICE in
1)
$STD apk -U upgrade
msg_ok "Updated successfully!"
exit
;;
2)
sed -i -e "s/cfg:server.http_addr=.*/cfg:server.http_addr=0.0.0.0/g" /etc/conf.d/grafana
service grafana restart
msg_ok "Allowed listening on all interfaces!"
exit
;;
3)
sed -i -e "s/cfg:server.http_addr=.*/cfg:server.http_addr=$LXCIP/g" /etc/conf.d/grafana
service grafana restart
msg_ok "Allowed listening only on ${LXCIP}!"
exit
;;
esac
while true; do
CHOICE=$(
whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --menu "Select option" 11 58 3 \
"1" "Check for Grafana Updates" \
"2" "Allow 0.0.0.0 for listening" \
"3" "Allow only ${LXCIP} for listening" 3>&2 2>&1 1>&3
)
exit_status=$?
if [ $exit_status == 1 ]; then
clear
exit-script
fi
header_info
case $CHOICE in
1)
$STD apk -U upgrade
msg_ok "Updated successfully!"
exit
;;
2)
sed -i -e "s/cfg:server.http_addr=.*/cfg:server.http_addr=0.0.0.0/g" /etc/conf.d/grafana
service grafana restart
msg_ok "Allowed listening on all interfaces!"
exit
;;
3)
sed -i -e "s/cfg:server.http_addr=.*/cfg:server.http_addr=$LXCIP/g" /etc/conf.d/grafana
service grafana restart
msg_ok "Allowed listening only on ${LXCIP}!"
exit
;;
esac
done
exit 0
}

View File

@@ -20,32 +20,43 @@ color
catch_errors
function update_script() {
if ! apk -e info newt >/dev/null 2>&1; then
apk add -q newt
fi
LXCIP=$(ip a s dev eth0 | awk '/inet / {print $2}' | cut -d/ -f1)
CHOICE=$(msg_menu "Loki Update Options" \
"1" "Check for Loki Updates" \
"2" "Allow 0.0.0.0 for listening" \
"3" "Allow only ${LXCIP} for listening")
case $CHOICE in
1)
$STD apk -U upgrade
msg_ok "Updated successfully!"
exit
;;
2)
sed -i -e "s/cfg:server.http_addr=.*/cfg:server.http_addr=0.0.0.0/g" /etc/conf.d/loki
service loki restart
msg_ok "Allowed listening on all interfaces!"
exit
;;
3)
sed -i -e "s/cfg:server.http_addr=.*/cfg:server.http_addr=$LXCIP/g" /etc/conf.d/loki
service loki restart
msg_ok "Allowed listening only on ${LXCIP}!"
exit
;;
esac
while true; do
CHOICE=$(
whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --menu "Select option" 11 58 3 \
"1" "Check for Loki Updates" \
"2" "Allow 0.0.0.0 for listening" \
"3" "Allow only ${LXCIP} for listening" 3>&2 2>&1 1>&3
)
exit_status=$?
if [ $exit_status == 1 ]; then
clear
exit-script
fi
header_info
case $CHOICE in
1)
$STD apk -U upgrade
msg_ok "Updated successfully!"
exit
;;
2)
sed -i -e "s/cfg:server.http_addr=.*/cfg:server.http_addr=0.0.0.0/g" /etc/conf.d/loki
service loki restart
msg_ok "Allowed listening on all interfaces!"
exit
;;
3)
sed -i -e "s/cfg:server.http_addr=.*/cfg:server.http_addr=$LXCIP/g" /etc/conf.d/loki
service loki restart
msg_ok "Allowed listening only on ${LXCIP}!"
exit
;;
esac
done
exit 0
}

View File

@@ -24,31 +24,33 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
CHOICE=$(msg_menu "Nextcloud Options" \
"1" "Update Alpine Packages" \
"2" "Nextcloud Login Credentials" \
"3" "Renew Self-signed Certificate")
case $CHOICE in
1)
msg_info "Updating Alpine Packages"
$STD apk -U upgrade
msg_ok "Updated Alpine Packages"
msg_ok "Updated successfully!"
exit
;;
2)
cat nextcloud.creds
exit
;;
3)
openssl req -x509 -nodes -days 365 -newkey rsa:4096 -keyout /etc/ssl/private/nextcloud-selfsigned.key -out /etc/ssl/certs/nextcloud-selfsigned.crt -subj "/C=US/O=Nextcloud/OU=Domain Control Validated/CN=nextcloud.local" >/dev/null 2>&1
rc-service nginx restart
msg_ok "Renewed self-signed certificate"
exit
;;
esac
if ! apk -e info newt >/dev/null 2>&1; then
apk add -q newt
fi
while true; do
CHOICE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --radiolist --cancel-button Exit-Script "Spacebar = Select" 11 58 3 \
"1" "Nextcloud Login Credentials" ON \
"2" "Renew Self-signed Certificate" OFF \
3>&1 1>&2 2>&3)
exit_status=$?
if [ $exit_status == 1 ]; then
clear
exit-script
fi
header_info
case $CHOICE in
1)
cat nextcloud.creds
exit
;;
2)
openssl req -x509 -nodes -days 365 -newkey rsa:4096 -keyout /etc/ssl/private/nextcloud-selfsigned.key -out /etc/ssl/certs/nextcloud-selfsigned.crt -subj "/C=US/O=Nextcloud/OU=Domain Control Validated/CN=nextcloud.local" >/dev/null 2>&1
rc-service nginx restart
exit
;;
esac
done
exit 0
}
start

View File

@@ -20,36 +20,47 @@ color
catch_errors
function update_script() {
if ! apk -e info newt >/dev/null 2>&1; then
apk add -q newt
fi
LXCIP=$(ip a s dev eth0 | awk '/inet / {print $2}' | cut -d/ -f1)
CHOICE=$(msg_menu "Redis Management" \
"1" "Update Redis" \
"2" "Allow 0.0.0.0 for listening" \
"3" "Allow only ${LXCIP} for listening")
case $CHOICE in
1)
msg_info "Updating Redis"
apk update && apk upgrade redis
rc-service redis restart
msg_ok "Updated successfully!"
exit
;;
2)
msg_info "Setting Redis to listen on all interfaces"
sed -i 's/^bind .*/bind 0.0.0.0/' /etc/redis.conf
rc-service redis restart
msg_ok "Redis now listens on all interfaces!"
exit
;;
3)
msg_info "Setting Redis to listen only on ${LXCIP}"
sed -i "s/^bind .*/bind ${LXCIP}/" /etc/redis.conf
rc-service redis restart
msg_ok "Redis now listens only on ${LXCIP}!"
exit
;;
esac
while true; do
CHOICE=$(
whiptail --backtitle "Proxmox VE Helper Scripts" --title "Redis Management" --menu "Select option" 11 58 3 \
"1" "Update Redis" \
"2" "Allow 0.0.0.0 for listening" \
"3" "Allow only ${LXCIP} for listening" 3>&2 2>&1 1>&3
)
exit_status=$?
if [ $exit_status == 1 ]; then
clear
exit-script
fi
header_info
case $CHOICE in
1)
msg_info "Updating Redis"
apk update && apk upgrade redis
rc-service redis restart
msg_ok "Updated successfully!"
exit
;;
2)
msg_info "Setting Redis to listen on all interfaces"
sed -i 's/^bind .*/bind 0.0.0.0/' /etc/redis.conf
rc-service redis restart
msg_ok "Redis now listens on all interfaces!"
exit
;;
3)
msg_info "Setting Redis to listen only on ${LXCIP}"
sed -i "s/^bind .*/bind ${LXCIP}/" /etc/redis.conf
rc-service redis restart
msg_ok "Redis now listens only on ${LXCIP}!"
exit
;;
esac
done
}
start

View File

@@ -20,37 +20,48 @@ color
catch_errors
function update_script() {
if ! apk -e info newt >/dev/null 2>&1; then
apk add -q newt
fi
LXCIP=$(ip a s dev eth0 | awk '/inet / {print $2}' | cut -d/ -f1)
CHOICE=$(msg_menu "Valkey Management" \
"1" "Update Valkey" \
"2" "Allow 0.0.0.0 for listening" \
"3" "Allow only ${LXCIP} for listening")
case $CHOICE in
1)
msg_info "Updating Valkey"
apk update && apk upgrade valkey
rc-service valkey restart
msg_ok "Updated Valkey"
msg_ok "Updated successfully!"
exit
;;
2)
msg_info "Setting Valkey to listen on all interfaces"
sed -i 's/^bind .*/bind 0.0.0.0/' /etc/valkey/valkey.conf
rc-service valkey restart
msg_ok "Valkey now listens on all interfaces!"
exit
;;
3)
msg_info "Setting Valkey to listen only on ${LXCIP}"
sed -i "s/^bind .*/bind ${LXCIP}/" /etc/valkey/valkey.conf
rc-service valkey restart
msg_ok "Valkey now listens only on ${LXCIP}!"
exit
;;
esac
while true; do
CHOICE=$(
whiptail --backtitle "Proxmox VE Helper Scripts" --title "Valkey Management" --menu "Select option" 11 58 3 \
"1" "Update Valkey" \
"2" "Allow 0.0.0.0 for listening" \
"3" "Allow only ${LXCIP} for listening" 3>&2 2>&1 1>&3
)
exit_status=$?
if [ $exit_status == 1 ]; then
clear
exit-script
fi
header_info
case $CHOICE in
1)
msg_info "Updating Valkey"
apk update && apk upgrade valkey
rc-service valkey restart
msg_ok "Updated Valkey"
msg_ok "Updated successfully!"
exit
;;
2)
msg_info "Setting Valkey to listen on all interfaces"
sed -i 's/^bind .*/bind 0.0.0.0/' /etc/valkey/valkey.conf
rc-service valkey restart
msg_ok "Valkey now listens on all interfaces!"
exit
;;
3)
msg_info "Setting Valkey to listen only on ${LXCIP}"
sed -i "s/^bind .*/bind ${LXCIP}/" /etc/valkey/valkey.conf
rc-service valkey restart
msg_ok "Valkey now listens only on ${LXCIP}!"
exit
;;
esac
done
}
start

View File

@@ -20,38 +20,45 @@ color
catch_errors
function update_script() {
CHOICE=$(msg_menu "Vaultwarden Update Options" \
"1" "Update Vaultwarden" \
"2" "Reset ADMIN_TOKEN")
case $CHOICE in
1)
$STD apk -U upgrade
rc-service vaultwarden restart -q
msg_ok "Updated successfully!"
exit
;;
2)
if [[ "${PHS_SILENT:-0}" == "1" ]]; then
msg_warn "Reset ADMIN_TOKEN requires interactive mode, skipping."
exit
if ! apk -e info newt >/dev/null 2>&1; then
apk add -q newt
fi
while true; do
CHOICE=$(
whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --menu "Select option" 11 58 2 \
"1" "Update Vaultwarden" \
"2" "Reset ADMIN_TOKEN" 3>&2 2>&1 1>&3
)
exit_status=$?
if [ $exit_status == 1 ]; then
clear
exit-script
fi
read -r -s -p "Setup your ADMIN_TOKEN (make it strong): " NEWTOKEN
echo ""
if [[ -n "$NEWTOKEN" ]]; then
if ! command -v argon2 >/dev/null 2>&1; then apk add argon2 &>/dev/null; fi
TOKEN=$(echo -n "${NEWTOKEN}" | argon2 "$(openssl rand -base64 32)" -e -id -k 19456 -t 2 -p 1)
if [[ ! -f /var/lib/vaultwarden/config.json ]]; then
sed -i "s|export ADMIN_TOKEN=.*|export ADMIN_TOKEN='${TOKEN}'|" /etc/conf.d/vaultwarden
else
sed -i "s|\"admin_token\": .*|\"admin_token\": \"${TOKEN}\",|" /var/lib/vaultwarden/config.json
fi
header_info
case $CHOICE in
1)
$STD apk -U upgrade
rc-service vaultwarden restart -q
msg_ok "Admin token updated"
fi
exit
;;
esac
msg_ok "Updated successfully!"
exit
;;
2)
if NEWTOKEN=$(whiptail --backtitle "Proxmox VE Helper Scripts" --passwordbox "Setup your ADMIN_TOKEN (make it strong)" 10 58 3>&1 1>&2 2>&3); then
if [[ -z "$NEWTOKEN" ]]; then exit-script; fi
if ! command -v argon2 >/dev/null 2>&1; then apk add argon2 &>/dev/null; fi
TOKEN=$(echo -n "${NEWTOKEN}" | argon2 "$(openssl rand -base64 32)" -e -id -k 19456 -t 2 -p 1)
if [[ ! -f /var/lib/vaultwarden/config.json ]]; then
sed -i "s|export ADMIN_TOKEN=.*|export ADMIN_TOKEN='${TOKEN}'|" /etc/conf.d/vaultwarden
else
sed -i "s|\"admin_token\": .*|\"admin_token\": \"${TOKEN}\",|" /var/lib/vaultwarden/config.json
fi
rc-service vaultwarden restart -q
fi
clear
exit
;;
esac
done
}
start

View File

@@ -20,10 +20,28 @@ color
catch_errors
function update_script() {
header_info
$STD apk -U upgrade
msg_ok "Updated successfully!"
exit 0
if ! apk -e info newt >/dev/null 2>&1; then
apk add -q newt
fi
while true; do
CHOICE=$(
whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --menu "Select option" 11 58 1 \
"1" "Check for Zigbee2MQTT Updates" 3>&2 2>&1 1>&3
)
exit_status=$?
if [ $exit_status == 1 ]; then
clear
exit-script
fi
header_info
case $CHOICE in
1)
$STD apk -U upgrade
msg_ok "Updated successfully!"
exit
;;
esac
done
}
start

View File

@@ -20,10 +20,18 @@ color
catch_errors
function update_script() {
UPD=$(
whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --radiolist --cancel-button Exit-Script "Spacebar = Select" 11 58 1 \
"1" "Check for Alpine Updates" ON \
3>&1 1>&2 2>&3
)
header_info
$STD apk -U upgrade
msg_ok "Updated successfully!"
exit 0
if [ "$UPD" == "1" ]; then
$STD apk -U upgrade
msg_ok "Updated successfully!"
exit 0
fi
}
start

View File

@@ -44,7 +44,7 @@ function update_script() {
msg_info "Updating Autocaliweb"
cd "$INSTALL_DIR"
if [[ ! -d "$VIRTUAL_ENV" ]]; then
$STD uv venv --clear "$VIRTUAL_ENV"
$STD uv venv "$VIRTUAL_ENV"
fi
$STD uv sync --all-extras --active
cd "$INSTALL_DIR"/koreader/plugins

View File

@@ -40,7 +40,7 @@ function update_script() {
chmod 775 /opt/bazarr /var/lib/bazarr/
# Always ensure venv exists
if [[ ! -d /opt/bazarr/venv/ ]]; then
$STD uv venv --clear /opt/bazarr/venv --python 3.12
$STD uv venv /opt/bazarr/venv --python 3.12
fi
# Always check and fix service file if needed

View File

@@ -23,9 +23,10 @@ function update_script() {
header_info
check_container_storage
check_container_resources
UPD=$(msg_menu "Cronicle Update Options" \
"1" "Update ${APP}" \
"2" "Install ${APP} Worker")
UPD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --radiolist --cancel-button Exit-Script "Spacebar = Select" 11 58 2 \
"1" "Update ${APP}" ON \
"2" "Install ${APP} Worker" OFF \
3>&1 1>&2 2>&3)
if [ "$UPD" == "1" ]; then
if [[ ! -d /opt/cronicle ]]; then

View File

@@ -28,7 +28,6 @@ function update_script() {
exit
fi
msg_info "Updating Deluge"
ensure_dependencies python3-setuptools
$STD apt update
$STD pip3 install deluge[all] --upgrade
msg_ok "Updated Deluge"

View File

@@ -103,8 +103,8 @@ function update_script() {
cd /opt/dispatcharr
rm -rf .venv
$STD uv venv --clear
$STD uv sync
$STD uv venv
$STD uv pip install -r requirements.txt --index-strategy unsafe-best-match
$STD uv pip install gunicorn gevent celery redis daphne
msg_ok "Updated Dispatcharr Backend"
@@ -144,4 +144,4 @@ description
msg_ok "Completed successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:9191${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"

View File

@@ -35,15 +35,13 @@ function update_script() {
msg_ok "Stopped Service"
msg_info "Backing Up Configurations"
mv /opt/donetick/config/selfhosted.yaml /opt/donetick/donetick.db /opt
mv /opt/donetick/config/selfhosted.yml /opt/donetick/donetick.db /opt
msg_ok "Backed Up Configurations"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "donetick" "donetick/donetick" "prebuild" "latest" "/opt/donetick" "donetick_Linux_x86_64.tar.gz"
msg_info "Restoring Configurations"
mv /opt/selfhosted.yaml /opt/donetick/config
grep -q 'http://localhost"$' /opt/donetick/config/selfhosted.yaml || sed -i '/https:\/\/localhost"$/a\ - "http://localhost"' /opt/donetick/config/selfhosted.yaml
grep -q 'capacitor://localhost' /opt/donetick/config/selfhosted.yaml || sed -i '/http:\/\/localhost"$/a\ - "capacitor://localhost"' /opt/donetick/config/selfhosted.yaml
mv /opt/selfhosted.yml /opt/donetick/config
mv /opt/donetick.db /opt/donetick
msg_ok "Restored Configurations"

View File

@@ -1,58 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG
# Author: Slaviša Arežina (tremor021)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://www.drawio.com/
APP="DrawIO"
var_tags="${var_tags:-diagrams}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-2048}"
var_disk="${var_disk:-4}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -f /var/lib/tomcat11/webapps/draw.war ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if check_for_gh_release "drawio" "jgraph/drawio"; then
msg_info "Stopping service"
systemctl stop tomcat11
msg_ok "Service stopped"
msg_info "Updating Debian LXC"
$STD apt update
$STD apt upgrade -y
msg_ok "Updated Debian LXC"
USE_ORIGINAL_FILENAME=true fetch_and_deploy_gh_release "drawio" "jgraph/drawio" "singlefile" "latest" "/var/lib/tomcat11/webapps" "draw.war"
msg_info "Starting service"
systemctl start tomcat11
msg_ok "Service started"
msg_ok "Updated successfully!"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080/draw${CL}"

View File

@@ -9,7 +9,7 @@ APP="EMQX"
var_tags="${var_tags:-mqtt}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-1024}"
var_disk="${var_disk:-6}"
var_disk="${var_disk:-4}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"

View File

@@ -61,7 +61,7 @@ function update_script() {
msg_info "Updating Backend"
cd /opt/endurain/backend
$STD poetry export -f requirements.txt --output requirements.txt --without-hashes
$STD uv venv --clear
$STD uv venv
$STD uv pip install -r requirements.txt
msg_ok "Backend Updated"

View File

@@ -42,7 +42,7 @@ function update_script() {
rm -rf "$VENV_PATH"
mkdir -p /opt/esphome
cd /opt/esphome
$STD uv venv --clear "$VENV_PATH"
$STD uv venv "$VENV_PATH"
$STD "$VENV_PATH/bin/python" -m ensurepip --upgrade
$STD "$VENV_PATH/bin/python" -m pip install --upgrade pip
$STD "$VENV_PATH/bin/python" -m pip install esphome tornado esptool

View File

@@ -37,12 +37,17 @@ function update_script() {
msg_info "Stopped Service"
msg_info "Creating Backup"
ls /opt/*.tar.gz &>/dev/null && rm -f /opt/*.tar.gz
backup_filename="/opt/${APP}_backup_$(date +%F).tar.gz"
tar -czf "$backup_filename" -C /opt/fileflows Data
msg_ok "Backup Created"
fetch_and_deploy_from_url "https://fileflows.com/downloads/zip" "/opt/fileflows"
msg_info "Updating $APP to latest version"
temp_file=$(mktemp)
curl -fsSL https://fileflows.com/downloads/zip -o "$temp_file"
$STD unzip -o -d /opt/fileflows "$temp_file"
rm -rf "$temp_file"
rm -rf "$backup_filename"
msg_ok "Updated $APP to latest version"
msg_info "Starting Service"
systemctl start fileflows

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVE/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG
# Authors: MickLesk (CanbiZ) | Co-Author: remz1337
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2026 tteck
# Authors: tteck (tteckster)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://frigate.video/
@@ -11,7 +11,7 @@ var_cpu="${var_cpu:-4}"
var_ram="${var_ram:-4096}"
var_disk="${var_disk:-20}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_version="${var_version:-11}"
var_unprivileged="${var_unprivileged:-0}"
var_gpu="${var_gpu:-yes}"
@@ -21,15 +21,15 @@ color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -f /etc/systemd/system/frigate.service ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_error "To update Frigate, create a new container and transfer your configuration."
header_info
check_container_storage
check_container_resources
if [[ ! -f /etc/systemd/system/frigate.service ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_error "To update Frigate, create a new container and transfer your configuration."
exit
}
start

View File

@@ -31,27 +31,18 @@ function update_script() {
APP_VERSION=$(grep -o '"version": *"[^"]*"' /opt/gitea-mirror/package.json | cut -d'"' -f4)
if [[ $APP_VERSION =~ ^2\. ]]; then
if [[ "${PHS_SILENT:-0}" == "1" ]]; then
msg_warn "Version $APP_VERSION detected. Major version upgrade requires interactive confirmation, skipping."
exit 75
fi
msg_warn "WARNING: Version $APP_VERSION detected!"
msg_warn "Updating from version 2.x will CLEAR ALL CONFIGURATION."
msg_warn "This includes: API tokens, User settings, Repository configurations, All custom settings"
echo ""
read -r -p "Do you want to continue? (y/N): " CONFIRM
if [[ ! "$CONFIRM" =~ ^[Yy]$ ]]; then
if ! whiptail --backtitle "Gitea Mirror Update" --title "⚠️ VERSION 2.x DETECTED" --yesno \
"WARNING: Version $APP_VERSION detected!\n\nUpdating from version 2.x will CLEAR ALL CONFIGURATION.\n\nThis includes:\n• API tokens\n• User settings\n• Repository configurations\n• All custom settings\n\nDo you want to continue with the update process?" 15 70 --defaultno; then
exit 0
fi
msg_warn "FINAL WARNING: This update WILL clear all configuration!"
msg_warn "Please ensure you have backed up API tokens, custom configurations, and repository settings."
echo ""
read -r -p "Final confirmation - proceed? (y/N): " CONFIRM2
if [[ ! "$CONFIRM2" =~ ^[Yy]$ ]]; then
msg_info "Update cancelled. Please backup your configuration before proceeding."
if ! whiptail --backtitle "Gitea Mirror Update" --title "⚠️ FINAL CONFIRMATION" --yesno \
"FINAL WARNING: This update WILL clear all configuration!\n\nBEFORE PROCEEDING, please:\n\n• Copy API tokens to a safe location\n• Backup any custom configurations\n• Note down repository settings\n\nThis action CANNOT be undone!" 18 70 --defaultno; then
whiptail --backtitle "Gitea Mirror Update" --title "Update Cancelled" --msgbox "Update process cancelled. Please backup your configuration before proceeding." 8 60
exit 0
fi
msg_info "Proceeding with version $APP_VERSION update. All configuration will be cleared as warned."
whiptail --backtitle "Gitea Mirror Update" --title "Proceeding with Update" --msgbox \
"Proceeding with version $APP_VERSION update.\n\nAll configuration will be cleared as warned." 8 50
rm -rf /opt/gitea-mirror
fi

View File

@@ -9,7 +9,7 @@ APP="Grafana"
var_tags="${var_tags:-monitoring;visualization}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-512}"
var_disk="${var_disk:-4}"
var_disk="${var_disk:-2}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"

View File

@@ -1,6 +0,0 @@
____ ________
/ __ \_________ __ __/ _/ __ \
/ / / / ___/ __ `/ | /| / // // / / /
/ /_/ / / / /_/ /| |/ |/ // // /_/ /
/_____/_/ \__,_/ |__/|__/___/\____/

View File

@@ -0,0 +1,6 @@
____ __ __ ____ __ _ _________ __ ______ __
/ __ \_________ ____ ___ ___ / /_/ /_ ___ __ _______ / __ \____ _____ ___ _____/ /__ __________ / | / / ____/ |/ / / ____/ ______ ____ _____/ /____ _____
/ /_/ / ___/ __ \/ __ `__ \/ _ \/ __/ __ \/ _ \/ / / / ___/_____/ /_/ / __ `/ __ \/ _ \/ ___/ / _ \/ ___/ ___/_____/ |/ / / __ | /_____/ __/ | |/_/ __ \/ __ \/ ___/ __/ _ \/ ___/
/ ____/ / / /_/ / / / / / / __/ /_/ / / / __/ /_/ (__ )_____/ ____/ /_/ / /_/ / __/ / / / __(__ |__ )_____/ /| / /_/ // /_____/ /____> </ /_/ / /_/ / / / /_/ __/ /
/_/ /_/ \____/_/ /_/ /_/\___/\__/_/ /_/\___/\__,_/____/ /_/ \__,_/ .___/\___/_/ /_/\___/____/____/ /_/ |_/\____//_/|_| /_____/_/|_/ .___/\____/_/ \__/\___/_/
/_/ /_/

View File

@@ -27,11 +27,12 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
UPD=$(msg_menu "Home Assistant Update Options" \
"1" "Update ALL Containers" \
"2" "Remove ALL Unused Images" \
"3" "Install HACS" \
"4" "Install FileBrowser")
UPD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "UPDATE" --radiolist --cancel-button Exit-Script "Spacebar = Select" 11 58 4 \
"1" "Update ALL Containers" ON \
"2" "Remove ALL Unused Images" OFF \
"3" "Install HACS" OFF \
"4" "Install FileBrowser" OFF \
3>&1 1>&2 2>&3)
if [ "$UPD" == "1" ]; then
msg_info "Updating All Containers"

View File

@@ -105,7 +105,7 @@ EOF
msg_ok "Image-processing libraries up to date"
fi
RELEASE="2.5.6"
RELEASE="2.5.5"
if check_for_gh_release "Immich" "immich-app/immich" "${RELEASE}"; then
if [[ $(cat ~/.immich) > "2.5.1" ]]; then
msg_info "Enabling Maintenance Mode"

View File

@@ -46,7 +46,7 @@ function update_script() {
msg_info "Restoring configuration & data"
mv /opt/app.env /opt/jotty/.env
[[ -d /opt/data ]] && mv /opt/data /opt/jotty/data
[[ -d /opt/jotty/config ]] && cp -a /opt/config/* /opt/jotty/config && rm -rf /opt/config
[[ -d /opt/jotty/config ]] && mv /opt/config/* /opt/jotty/config
msg_ok "Restored configuration & data"
msg_info "Starting Service"

View File

@@ -34,7 +34,7 @@ function update_script() {
PYTHON_VERSION="3.12" setup_uv
mkdir -p "$INSTALL_DIR"
cd "$INSTALL_DIR"
$STD uv venv --clear .venv
$STD uv venv .venv
$STD "$VENV_PYTHON" -m ensurepip --upgrade
$STD "$VENV_PYTHON" -m pip install --upgrade pip
$STD "$VENV_PYTHON" -m pip install jupyter

View File

@@ -34,19 +34,10 @@ function update_script() {
CURRENT_VERSION=$(readlink -f /opt/kasm/current | awk -F'/' '{print $4}')
KASM_URL=$(curl -fsSL "https://www.kasm.com/downloads" | tr '\n' ' ' | grep -oE 'https://kasm-static-content[^"]*kasm_release_[0-9]+\.[0-9]+\.[0-9]+\.[a-z0-9]+\.tar\.gz' | head -n 1)
if [[ -z "$KASM_URL" ]]; then
SERVICE_IMAGE_URL=$(curl -fsSL "https://www.kasm.com/downloads" | tr '\n' ' ' | grep -oE 'https://kasm-static-content[^"]*kasm_release_service_images_amd64_[0-9]+\.[0-9]+\.[0-9]+\.tar\.gz' | head -n 1)
if [[ -n "$SERVICE_IMAGE_URL" ]]; then
KASM_VERSION=$(echo "$SERVICE_IMAGE_URL" | sed -E 's/.*kasm_release_service_images_amd64_([0-9]+\.[0-9]+\.[0-9]+).*/\1/')
KASM_URL="https://kasm-static-content.s3.amazonaws.com/kasm_release_${KASM_VERSION}.tar.gz"
fi
else
KASM_VERSION=$(echo "$KASM_URL" | sed -E 's/.*kasm_release_([0-9]+\.[0-9]+\.[0-9]+).*/\1/')
fi
if [[ -z "$KASM_URL" ]] || [[ -z "$KASM_VERSION" ]]; then
msg_error "Unable to detect latest Kasm release URL."
exit 1
fi
KASM_VERSION=$(echo "$KASM_URL" | sed -E 's/.*kasm_release_([0-9]+\.[0-9]+\.[0-9]+).*/\1/')
msg_info "Checked for new version"
msg_info "Removing outdated docker-compose plugin"

View File

@@ -33,8 +33,7 @@ function update_script() {
msg_ok "Stopped Service"
PHP_VERSION="8.5" PHP_APACHE="YES" setup_php
setup_composer
msg_info "Creating a backup"
mv /opt/koillection/ /opt/koillection-backup
msg_ok "Backup created"

View File

@@ -29,13 +29,21 @@ function update_script() {
exit 1
fi
CHOICE=$(msg_menu "Loki Update Options" \
"1" "Update Loki & Promtail" \
"2" "Allow 0.0.0.0 for listening" \
"3" "Allow only ${LOCAL_IP} for listening")
case $CHOICE in
1)
while true; do
CHOICE=$(
whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --menu "Select option" 11 58 3 \
"1" "Update Loki & Promtail" \
"2" "Allow 0.0.0.0 for listening" \
"3" "Allow only ${LOCAL_IP} for listening" 3>&2 2>&1 1>&3
)
exit_status=$?
if [ $exit_status == 1 ]; then
clear
exit-script
fi
header_info
case $CHOICE in
1)
msg_info "Stopping Loki"
systemctl stop loki
if systemctl is-active --quiet promtail 2>/dev/null || dpkg -s promtail >/dev/null 2>&1; then
@@ -77,6 +85,7 @@ function update_script() {
exit
;;
esac
done
exit 0
}

View File

@@ -24,9 +24,21 @@ function update_script() {
check_container_storage
check_container_resources
setup_meilisearch
UPD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Meilisearch Update" --radiolist --cancel-button Exit-Script "Spacebar = Select" 10 58 2 \
"1" "Update Meilisearch" ON \
"2" "Update Meilisearch-UI" OFF \
3>&1 1>&2 2>&3)
if [[ -d /opt/meilisearch-ui ]]; then
if [ "$UPD" == "1" ]; then
setup_meilisearch
exit
fi
if [ "$UPD" == "2" ]; then
if [[ ! -d /opt/meilisearch-ui ]]; then
msg_error "No Meilisearch-UI Installation Found!"
exit
fi
if check_for_gh_release "meilisearch-ui" "riccox/meilisearch-ui"; then
msg_info "Stopping Meilisearch-UI"
systemctl stop meilisearch-ui
@@ -46,11 +58,10 @@ function update_script() {
msg_info "Starting Meilisearch-UI"
systemctl start meilisearch-ui
msg_ok "Started Meilisearch-UI"
msg_ok "Updated successfully!"
fi
exit
fi
msg_ok "Updated successfully!"
exit
}
start

View File

@@ -28,6 +28,12 @@ function update_script() {
exit
fi
msg_error "This script is currently disabled due to an external issue with the OpenResty APT repository."
msg_error "The repository's GPG key uses SHA-1 signatures, which are no longer accepted by Debian as of February 1, 2026."
msg_error "The issue is tracked in openresty/openresty#1097"
msg_error "For more details, see: https://github.com/community-scripts/ProxmoxVE/issues/11406"
exit 1
if [[ $(grep -E '^VERSION_ID=' /etc/os-release) == *"12"* ]]; then
msg_error "Wrong Debian version detected!"
msg_error "Please create a snapshot first. You must upgrade your LXC to Debian Trixie before updating. Visit: https://github.com/community-scripts/ProxmoxVE/discussions/7489"
@@ -139,17 +145,15 @@ function update_script() {
"database": {
"engine": "knex-native",
"knex": {
"client": "better-sqlite3",
"client": "sqlite3",
"connection": {
"filename": "/data/database.sqlite"
},
"useNullAsDefault": true
}
}
}
}
EOF
fi
sed -i 's/"client": "sqlite3"/"client": "better-sqlite3"/' /app/config/production.json
cd /app
$STD yarn install --network-timeout 600000
msg_ok "Initialized Backend"

View File

@@ -27,9 +27,10 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
UPD=$(msg_menu "Node-Red Update Options" \
"1" "Update ${APP}" \
"2" "Install Themes")
UPD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --radiolist --cancel-button Exit-Script "Spacebar = Select" 11 58 2 \
"1" "Update ${APP}" ON \
"2" "Install Themes" OFF \
3>&1 1>&2 2>&3)
if [ "$UPD" == "1" ]; then
NODE_VERSION="22" setup_nodejs
@@ -48,31 +49,32 @@ function update_script() {
exit
fi
if [ "$UPD" == "2" ]; then
THEME=$(msg_menu "Node-Red Themes" \
"midnight-red" "Midnight Red (default)" \
"aurora" "Aurora" \
"cobalt2" "Cobalt2" \
"dark" "Dark" \
"dracula" "Dracula" \
"espresso-libre" "Espresso Libre" \
"github-dark" "GitHub Dark" \
"github-dark-default" "GitHub Dark Default" \
"github-dark-dimmed" "GitHub Dark Dimmed" \
"monoindustrial" "Monoindustrial" \
"monokai" "Monokai" \
"monokai-dimmed" "Monokai Dimmed" \
"noctis" "Noctis" \
"oceanic-next" "Oceanic Next" \
"oled" "OLED" \
"one-dark-pro" "One Dark Pro" \
"one-dark-pro-darker" "One Dark Pro Darker" \
"solarized-dark" "Solarized Dark" \
"solarized-light" "Solarized Light" \
"tokyo-night" "Tokyo Night" \
"tokyo-night-light" "Tokyo Night Light" \
"tokyo-night-storm" "Tokyo Night Storm" \
"totallyinformation" "TotallyInformation" \
"zenburn" "Zenburn")
THEME=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "NODE-RED THEMES" --radiolist --cancel-button Exit-Script "Choose Theme" 15 58 6 \
"aurora" "" OFF \
"cobalt2" "" OFF \
"dark" "" OFF \
"dracula" "" OFF \
"espresso-libre" "" OFF \
"github-dark" "" OFF \
"github-dark-default" "" OFF \
"github-dark-dimmed" "" OFF \
"midnight-red" "" ON \
"monoindustrial" "" OFF \
"monokai" "" OFF \
"monokai-dimmed" "" OFF \
"noctis" "" OFF \
"oceanic-next" "" OFF \
"oled" "" OFF \
"one-dark-pro" "" OFF \
"one-dark-pro-darker" "" OFF \
"solarized-dark" "" OFF \
"solarized-light" "" OFF \
"tokyo-night" "" OFF \
"tokyo-night-light" "" OFF \
"tokyo-night-storm" "" OFF \
"totallyinformation" "" OFF \
"zenburn" "" OFF \
3>&1 1>&2 2>&3)
header_info
msg_info "Installing ${THEME} Theme"
cd /root/.node-red

View File

@@ -20,21 +20,32 @@ color
catch_errors
function update_script() {
UPD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "UPDATE MODE" --radiolist --cancel-button Exit-Script "Spacebar = Select" 14 60 2 \
"1" "Check for Alpine Updates" OFF \
"2" "Update NPMplus Docker Container" ON \
3>&1 1>&2 2>&3)
header_info "$APP"
msg_info "Updating Alpine OS"
$STD apk -U upgrade
msg_ok "System updated"
msg_info "Pulling latest NPMplus container image"
cd /opt
$STD docker compose pull
msg_info "Recreating container"
$STD docker compose up -d
msg_ok "Updated NPMplus container"
msg_ok "Updated successfully!"
exit
case "$UPD" in
"1")
msg_info "Updating Alpine OS"
$STD apk -U upgrade
msg_ok "System updated"
exit
;;
"2")
msg_info "Updating NPMplus Container"
cd /opt
msg_info "Pulling latest container image"
$STD docker compose pull
msg_info "Recreating container"
$STD docker compose up -d
msg_ok "Updated successfully!"
exit
;;
esac
exit 0
}
start

View File

@@ -30,7 +30,7 @@ function update_script() {
fi
RELEASE="v5.0.2"
if check_for_gh_release "OpenCloud" "opencloud-eu/opencloud" "${RELEASE}"; then
if check_for_gh_release "opencloud" "opencloud-eu/opencloud" "${RELEASE}"; then
msg_info "Stopping services"
systemctl stop opencloud opencloud-wopi
msg_ok "Stopped services"
@@ -38,21 +38,9 @@ function update_script() {
msg_info "Updating packages"
$STD apt-get update
$STD apt-get dist-upgrade -y
ensure_dependencies "inotify-tools"
msg_ok "Updated packages"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "OpenCloud" "opencloud-eu/opencloud" "singlefile" "${RELEASE}" "/usr/bin" "opencloud-*-linux-amd64"
if ! grep -q 'POSIX_WATCH' /etc/opencloud/opencloud.env; then
sed -i '/^## External/i ## Uncomment below to enable PosixFS Collaborative Mode\
## Increase inotify watch/instance limits on your PVE host:\
### sysctl -w fs.inotify.max_user_watches=1048576\
### sysctl -w fs.inotify.max_user_instances=1024\
# STORAGE_USERS_POSIX_ENABLE_COLLABORATION=true\
# STORAGE_USERS_POSIX_WATCH_TYPE=inotifywait\
# STORAGE_USERS_POSIX_WATCH_FS=true\
# STORAGE_USERS_POSIX_WATCH_PATH=<path-to-storage-or-bind-mount>' /etc/opencloud/opencloud.env
fi
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "opencloud" "opencloud-eu/opencloud" "singlefile" "${RELEASE}" "/usr/bin" "opencloud-*-linux-amd64"
msg_info "Starting services"
systemctl start opencloud opencloud-wopi

View File

@@ -44,7 +44,7 @@ function update_script() {
msg_info "Installing uv-based Open-WebUI"
PYTHON_VERSION="3.12" setup_uv
$STD uv tool install --python 3.12 --constraint <(echo "numba>=0.60") open-webui[all]
$STD uv tool install --python 3.12 open-webui[all]
msg_ok "Installed uv-based Open-WebUI"
msg_info "Restoring data"
@@ -126,7 +126,7 @@ EOF
msg_info "Updating Open WebUI via uv"
PYTHON_VERSION="3.12" setup_uv
$STD uv tool install --force --python 3.12 --constraint <(echo "numba>=0.60") open-webui[all]
$STD uv tool upgrade --python 3.12 open-webui[all]
systemctl restart open-webui
msg_ok "Updated Open WebUI"
msg_ok "Updated successfully!"

View File

@@ -51,7 +51,7 @@ function update_script() {
$STD npm run db:generate
$STD npm run build
$STD npm run build:cli
$STD npm run db:push
$STD npm run db:sqlite:push
cp -R .next/standalone ./
chmod +x ./dist/cli.mjs
cp server/db/names.json ./dist/names.json

View File

@@ -30,30 +30,30 @@ function update_script() {
NODE_VERSION="24" NODE_MODULE="pnpm" setup_nodejs
if check_for_gh_release "PeaNUT" "Brandawg93/PeaNUT"; then
if check_for_gh_release "peanut" "Brandawg93/PeaNUT"; then
msg_info "Stopping Service"
systemctl stop peanut
msg_info "Stopped Service"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "PeaNUT" "Brandawg93/PeaNUT" "tarball" "latest" "/opt/peanut"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "peanut" "Brandawg93/PeaNUT" "tarball" "latest" "/opt/peanut"
if ! grep -q '/opt/peanut/entrypoint.mjs' /etc/systemd/system/peanut.service; then
msg_info "Fixing entrypoint"
cd /opt/peanut
ln -sf .next/standalone/server.js server.js
sed -i 's|/opt/peanut/.next/standalone/server.js|/opt/peanut/entrypoint.mjs|' /etc/systemd/system/peanut.service
systemctl daemon-reload
msg_ok "Fixed entrypoint"
fi
msg_info "Updating PeaNUT"
msg_info "Updating Peanut"
cd /opt/peanut
$STD pnpm i
$STD pnpm run build:local
cp -r .next/static .next/standalone/.next/
mkdir -p /opt/peanut/.next/standalone/config
ln -sf /etc/peanut/settings.yml /opt/peanut/.next/standalone/config/settings.yml
ln -sf .next/standalone/server.js server.js
msg_ok "Updated PeaNUT"
msg_ok "Updated Peanut"
msg_info "Starting Service"
systemctl start peanut

View File

@@ -61,12 +61,6 @@ function update_script() {
rm -rf "$BK"
msg_ok "Restored data"
msg_ok "Migrate Database"
cd /opt/planka
$STD npm run db:upgrade
$STD npm run db:migrate
msg_ok "Migrated Database"
msg_info "Starting Service"
systemctl start planka
msg_ok "Started Service"

View File

@@ -29,9 +29,10 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
UPD=$(msg_menu "Plex Update Options" \
"1" "Update LXC" \
"2" "Install plexupdate")
UPD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --radiolist --cancel-button Exit-Script "Spacebar = Select \nplexupdate info >> https://github.com/mrworf/plexupdate" 10 59 2 \
"1" "Update LXC" ON \
"2" "Install plexupdate" OFF \
3>&1 1>&2 2>&3)
if [ "$UPD" == "1" ]; then
msg_info "Updating ${APP} LXC"
$STD apt update

View File

@@ -27,11 +27,12 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
UPD=$(msg_menu "Home Assistant Update Options" \
"1" "Update system and containers" \
"2" "Install HACS" \
"3" "Install FileBrowser" \
"4" "Remove ALL Unused Images")
UPD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "UPDATE" --radiolist --cancel-button Exit-Script "Spacebar = Select" 11 58 4 \
"1" "Update system and containers" ON \
"2" "Install HACS" OFF \
"3" "Install FileBrowser" OFF \
"4" "Remove ALL Unused Images" OFF \
3>&1 1>&2 2>&3)
if [ "$UPD" == "1" ]; then
msg_info "Updating ${APP} LXC"

View File

@@ -0,0 +1,52 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG
# Author: Andy Grunwald (andygrunwald)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/hansmi/prometheus-paperless-exporter
APP="Prometheus-Paperless-NGX-Exporter"
var_tags="${var_tags:-monitoring;alerting}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-256}"
var_disk="${var_disk:-2}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -f /etc/systemd/system/prometheus-paperless-ngx-exporter.service ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if check_for_gh_release "prom-paperless-exp" "hansmi/prometheus-paperless-exporter"; then
msg_info "Stopping Service"
systemctl stop prometheus-paperless-ngx-exporter
msg_ok "Stopped Service"
fetch_and_deploy_gh_release "prom-paperless-exp" "hansmi/prometheus-paperless-exporter" "binary"
msg_info "Starting Service"
systemctl start prometheus-paperless-ngx-exporter
msg_ok "Started Service"
msg_ok "Updated successfully!"
fi
exit
}
start
build_container
description
msg_ok "Completed successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8081/metrics${CL}"

View File

@@ -41,7 +41,7 @@ function update_script() {
rm -rf "$PVE_VENV_PATH"
mkdir -p /opt/prometheus-pve-exporter
cd /opt/prometheus-pve-exporter
$STD uv venv --clear "$PVE_VENV_PATH"
$STD uv venv "$PVE_VENV_PATH"
$STD "$PVE_VENV_PATH/bin/python" -m ensurepip --upgrade
$STD "$PVE_VENV_PATH/bin/python" -m pip install --upgrade pip
$STD "$PVE_VENV_PATH/bin/python" -m pip install prometheus-pve-exporter

View File

@@ -28,55 +28,16 @@ function update_script() {
exit
fi
if check_for_gh_release "Radicale" "Kozea/Radicale"; then
msg_info "Stopping service"
systemctl stop radicale
msg_ok "Stopped service"
msg_info "Updating ${APP}"
$STD python3 -m venv /opt/radicale
source /opt/radicale/bin/activate
$STD python3 -m pip install --upgrade https://github.com/Kozea/Radicale/archive/master.tar.gz
msg_ok "Updated ${APP}"
msg_info "Backing up users file"
cp /opt/radicale/users /opt/radicale_users_backup
msg_ok "Backed up users file"
PYTHON_VERSION="3.13" setup_uv
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "Radicale" "Kozea/Radicale" "tarball" "latest" "/opt/radicale"
msg_info "Restoring users file"
rm -f /opt/radicale/users
mv /opt/radicale_users_backup /opt/radicale/users
msg_ok "Restored users file"
if grep -q 'start.sh' /etc/systemd/system/radicale.service; then
sed -i -e '/^Description/i[Unit]' \
-e '\|^ExecStart|iWorkingDirectory=/opt/radicale' \
-e 's|^ExecStart=.*|ExecStart=/usr/local/bin/uv run -m radicale --config /etc/radicale/config|' /etc/systemd/system/radicale.service
systemctl daemon-reload
fi
if [[ ! -f /etc/radicale/config ]]; then
msg_info "Migrating to config file (/etc/radicale/config)"
mkdir -p /etc/radicale
cat <<EOF >/etc/radicale/config
[server]
hosts = 0.0.0.0:5232
[auth]
type = htpasswd
htpasswd_filename = /opt/radicale/users
htpasswd_encryption = sha512
[storage]
type = multifilesystem
filesystem_folder = /var/lib/radicale/collections
[web]
type = internal
EOF
msg_ok "Migrated to config (/etc/radicale/config)"
fi
msg_info "Starting service"
systemctl start radicale
msg_ok "Started service"
msg_ok "Updated Successfully!"
fi
msg_info "Starting Service"
systemctl enable -q --now radicale
msg_ok "Started Service"
msg_ok "Updated successfully!"
exit
}

View File

@@ -41,7 +41,7 @@ function update_script() {
# Always ensure venv exists
if [[ ! -d /opt/sabnzbd/venv ]]; then
msg_info "Migrating SABnzbd to uv virtual environment"
$STD uv venv --clear /opt/sabnzbd/venv
$STD uv venv /opt/sabnzbd/venv
msg_ok "Created uv venv at /opt/sabnzbd/venv"
fi

View File

@@ -38,7 +38,7 @@ function update_script() {
msg_info "Updating Scraparr"
cd /opt/scraparr
$STD uv venv --clear /opt/scraparr/.venv
$STD uv venv /opt/scraparr/.venv
$STD /opt/scraparr/.venv/bin/python -m ensurepip --upgrade
$STD /opt/scraparr/.venv/bin/python -m pip install --upgrade pip
$STD /opt/scraparr/.venv/bin/python -m pip install -r /opt/scraparr/src/scraparr/requirements.txt

View File

@@ -10,8 +10,8 @@ var_tags="${var_tags:-dev_ops}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-2048}"
var_disk="${var_disk:-4}"
var_os="${var_os:-ubuntu}"
var_version="${var_version:-24.04}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"

View File

@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2026 community-scripts ORG
# Author: vhsdream
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/slskd/slskd, https://github.com/mrusse/soularr
# Source: https://github.com/slskd/slskd, https://soularr.net
APP="slskd"
var_tags="${var_tags:-arr;p2p}"
@@ -24,65 +24,50 @@ function update_script() {
check_container_storage
check_container_resources
if [[ ! -d /opt/slskd ]]; then
msg_error "No Slskd Installation Found!"
if [[ ! -d /opt/slskd ]] || [[ ! -d /opt/soularr ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if check_for_gh_release "Slskd" "slskd/slskd"; then
msg_info "Stopping Service(s)"
systemctl stop slskd
[[ -f /etc/systemd/system/soularr.service ]] && systemctl stop soularr.timer soularr.service
msg_ok "Stopped Service(s)"
RELEASE=$(curl -s https://api.github.com/repos/slskd/slskd/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
msg_info "Stopping Service"
systemctl stop slskd soularr.timer soularr.service
msg_info "Stopped Service"
msg_info "Backing up config"
cp /opt/slskd/config/slskd.yml /opt/slskd.yml.bak
msg_ok "Backed up config"
msg_info "Updating $APP to v${RELEASE}"
tmp_file=$(mktemp)
curl -fsSL "https://github.com/slskd/slskd/releases/download/${RELEASE}/slskd-${RELEASE}-linux-x64.zip" -o $tmp_file
$STD unzip -oj $tmp_file slskd -d /opt/${APP}
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated $APP to v${RELEASE}"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "Slskd" "slskd/slskd" "prebuild" "latest" "/opt/slskd" "slskd-*-linux-x64.zip"
msg_info "Restoring config"
mv /opt/slskd.yml.bak /opt/slskd/config/slskd.yml
msg_ok "Restored config"
msg_info "Starting Service(s)"
msg_info "Starting Service"
systemctl start slskd
[[ -f /etc/systemd/system/soularr.service ]] && systemctl start soularr.timer
msg_ok "Started Service(s)"
msg_ok "Updated Slskd successfully!"
msg_ok "Started Service"
rm -rf $tmp_file
else
msg_ok "No ${APP} update required. ${APP} is already at v${RELEASE}"
fi
[[ -d /opt/soularr ]] && if check_for_gh_release "Soularr" "mrusse/soularr"; then
if systemctl is-active soularr.timer >/dev/null; then
msg_info "Stopping Timer and Service"
systemctl stop soularr.timer soularr.service
msg_ok "Stopped Timer and Service"
fi
msg_info "Updating Soularr"
cp /opt/soularr/config.ini /opt/config.ini.bak
cp /opt/soularr/run.sh /opt/run.sh.bak
cd /tmp
rm -rf /opt/soularr
curl -fsSL -o main.zip https://github.com/mrusse/soularr/archive/refs/heads/main.zip
$STD unzip main.zip
mv soularr-main /opt/soularr
cd /opt/soularr
$STD pip install -r requirements.txt
mv /opt/config.ini.bak /opt/soularr/config.ini
mv /opt/run.sh.bak /opt/soularr/run.sh
rm -rf /tmp/main.zip
msg_ok "Updated soularr"
msg_info "Backing up Soularr config"
cp /opt/soularr/config.ini /opt/soularr_config.ini.bak
cp /opt/soularr/run.sh /opt/soularr_run.sh.bak
msg_ok "Backed up Soularr config"
PYTHON_VERSION="3.11" setup_uv
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "Soularr" "mrusse/soularr" "tarball" "latest" "/opt/soularr"
msg_info "Updating Soularr"
cd /opt/soularr
$STD uv venv -c venv
$STD source venv/bin/activate
$STD uv pip install -r requirements.txt
deactivate
msg_ok "Updated Soularr"
msg_info "Restoring Soularr config"
mv /opt/soularr_config.ini.bak /opt/soularr/config.ini
mv /opt/soularr_run.sh.bak /opt/soularr/run.sh
msg_ok "Restored Soularr config"
msg_info "Starting Soularr Timer"
systemctl restart soularr.timer
msg_ok "Started Soularr Timer"
msg_ok "Updated Soularr successfully!"
fi
msg_info "Starting soularr timer"
systemctl start soularr.timer
msg_ok "Started soularr timer"
exit
}
start

View File

@@ -33,15 +33,7 @@ function update_script() {
systemctl stop snowshare
msg_ok "Stopped Service"
msg_info "Backing up uploads"
[ -d /opt/snowshare/uploads ] && cp -a /opt/snowshare/uploads /opt/.snowshare_uploads_backup
msg_ok "Uploads backed up"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "snowshare" "TuroYT/snowshare" "tarball"
msg_info "Restoring uploads"
[ -d /opt/.snowshare_uploads_backup ] && rm -rf /opt/snowshare/uploads && cp -a /opt/.snowshare_uploads_backup /opt/snowshare/uploads
msg_ok "Uploads restored"
fetch_and_deploy_gh_release "snowshare" "TuroYT/snowshare" "tarball"
msg_info "Updating Snowshare"
cd /opt/snowshare

View File

@@ -39,7 +39,7 @@ function update_script() {
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "streamlink-webui" "CrazyWolf13/streamlink-webui" "tarball"
msg_info "Updating streamlink-webui"
$STD uv venv --clear /opt/streamlink-webui/backend/src/.venv
$STD uv venv /opt/streamlink-webui/backend/src/.venv
source /opt/streamlink-webui/backend/src/.venv/bin/activate
$STD uv pip install -r /opt/streamlink-webui/backend/src/requirements.txt --python=/opt/streamlink-webui/backend/src/.venv
cd /opt/streamlink-webui/frontend/src

View File

@@ -50,7 +50,7 @@ function update_script() {
cp -r /opt/tandoor.bak/{config,api,mediafiles,staticfiles} /opt/tandoor/
mv /opt/tandoor.bak/.env /opt/tandoor/.env
cd /opt/tandoor
$STD uv venv --clear .venv --python=python3
$STD uv venv .venv --python=python3
$STD uv pip install -r requirements.txt --python .venv/bin/python
cd /opt/tandoor/vue3
$STD yarn install

View File

@@ -105,7 +105,6 @@ EOF
cp -rf pnpm-lock.yaml /opt/tracearr/
cp -rf apps/server/package.json /opt/tracearr/apps/server/
cp -rf apps/server/dist /opt/tracearr/apps/server/dist
cp -rf apps/server/scripts /opt/tracearr/apps/server/scripts
cp -rf apps/web/dist /opt/tracearr/apps/web/dist
cp -rf packages/shared/package.json /opt/tracearr/packages/shared/
cp -rf packages/shared/dist /opt/tracearr/packages/shared/dist

View File

@@ -33,9 +33,7 @@ function update_script() {
systemctl stop umlautadaptarr
msg_ok "Stopped Service"
cp /opt/UmlautAdaptarr/appsettings.json /opt/UmlautAdaptarr/appsettings.json.bak
fetch_and_deploy_gh_release "UmlautAdaptarr" "PCJones/Umlautadaptarr" "prebuild" "latest" "/opt/UmlautAdaptarr" "linux-x64.zip"
cp /opt/UmlautAdaptarr/appsettings.json.bak /opt/UmlautAdaptarr/appsettings.json
msg_info "Starting Service"
systemctl start umlautadaptarr

View File

@@ -31,9 +31,11 @@ function update_script() {
VAULT=$(get_latest_github_release "dani-garcia/vaultwarden")
WVRELEASE=$(get_latest_github_release "dani-garcia/bw_web_builds")
UPD=$(msg_menu "Vaultwarden Update Options" \
"1" "Update VaultWarden + Web-Vault" \
"2" "Set Admin Token")
UPD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --radiolist --cancel-button Exit-Script "Spacebar = Select" 11 58 3 \
"1" "VaultWarden $VAULT" ON \
"2" "Web-Vault $WVRELEASE" OFF \
"3" "Set Admin Token" OFF \
3>&1 1>&2 2>&3)
if [ "$UPD" == "1" ]; then
if check_for_gh_release "vaultwarden" "dani-garcia/vaultwarden"; then
@@ -57,10 +59,14 @@ function update_script() {
msg_info "Starting Service"
systemctl start vaultwarden
msg_ok "Started Service"
msg_ok "Updated successfully!"
else
msg_ok "VaultWarden is already up-to-date"
fi
exit
fi
if [ "$UPD" == "2" ]; then
if check_for_gh_release "vaultwarden_webvault" "dani-garcia/bw_web_builds"; then
msg_info "Stopping Service"
systemctl stop vaultwarden
@@ -78,22 +84,16 @@ function update_script() {
msg_info "Starting Service"
systemctl start vaultwarden
msg_ok "Started Service"
msg_ok "Updated successfully!"
else
msg_ok "Web-Vault is already up-to-date"
fi
msg_ok "Updated successfully!"
exit
fi
if [ "$UPD" == "2" ]; then
if [[ "${PHS_SILENT:-0}" == "1" ]]; then
msg_warn "Set Admin Token requires interactive mode, skipping."
exit
fi
read -r -s -p "Set the ADMIN_TOKEN: " NEWTOKEN
echo ""
if [[ -n "$NEWTOKEN" ]]; then
if [ "$UPD" == "3" ]; then
if NEWTOKEN=$(whiptail --backtitle "Proxmox VE Helper Scripts" --passwordbox "Set the ADMIN_TOKEN" 10 58 3>&1 1>&2 2>&3); then
if [[ -z "$NEWTOKEN" ]]; then exit; fi
ensure_dependencies argon2
TOKEN=$(echo -n "${NEWTOKEN}" | argon2 "$(openssl rand -base64 32)" -t 2 -m 16 -p 4 -l 64 -e)
sed -i "s|ADMIN_TOKEN=.*|ADMIN_TOKEN='${TOKEN}'|" /opt/vaultwarden/.env

View File

@@ -39,7 +39,7 @@ function update_script() {
msg_info "Updating Warracker"
cd /opt/warracker/backend
$STD uv venv --clear .venv
$STD uv venv .venv
$STD source .venv/bin/activate
$STD uv pip install -r requirements.txt
msg_ok "Updated Warracker"

View File

@@ -7,9 +7,9 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
APP="wger"
var_tags="${var_tags:-management;fitness}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-2048}"
var_disk="${var_disk:-8}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-1024}"
var_disk="${var_disk:-6}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"
@@ -23,44 +23,38 @@ function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/wger ]]; then
if [[ ! -d /home/wger ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if check_for_gh_release "wger" "wger-project/wger"; then
RELEASE=$(curl -fsSL https://api.github.com/repos/wger-project/wger/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}')
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
msg_info "Stopping Service"
systemctl stop redis-server nginx celery celery-beat wger
systemctl stop wger
msg_ok "Stopped Service"
msg_info "Backing up Data"
cp -r /opt/wger/media /opt/wger_media_backup
cp /opt/wger/.env /opt/wger_env_backup
msg_ok "Backed up Data"
msg_info "Updating $APP to v${RELEASE}"
temp_file=$(mktemp)
curl -fsSL "https://github.com/wger-project/wger/archive/refs/tags/$RELEASE.tar.gz" -o "$temp_file"
tar xzf "$temp_file"
cp -rf wger-"$RELEASE"/* /home/wger/src
cd /home/wger/src
$STD pip install -r requirements_prod.txt --ignore-installed
$STD pip install -e .
$STD python3 manage.py migrate
$STD python3 manage.py collectstatic --no-input
$STD yarn install
$STD yarn build:css:sass
rm -rf "$temp_file"
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated $APP to v${RELEASE}"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "wger" "wger-project/wger" "tarball"
msg_info "Restoring Data"
cp -r /opt/wger_media_backup/. /opt/wger/media
cp /opt/wger_env_backup /opt/wger/.env
rm -rf /opt/wger_media_backup /opt/wger_env_backup
msg_ok "Restored Data"
msg_info "Updating wger"
cd /opt/wger
set -a && source /opt/wger/.env && set +a
export DJANGO_SETTINGS_MODULE=settings.main
$STD uv pip install .
$STD uv run python manage.py migrate
$STD uv run python manage.py collectstatic --no-input
msg_ok "Updated wger"
msg_info "Starting Services"
systemctl start redis-server nginx celery celery-beat wger
msg_ok "Started Services"
msg_ok "Updated Successfully"
msg_info "Starting Service"
systemctl start wger
msg_ok "Started Service"
msg_ok "Updated successfully!"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
fi
exit
}
@@ -69,7 +63,7 @@ start
build_container
description
msg_ok "Completed Successfully!\n"
msg_ok "Completed successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"

View File

@@ -35,10 +35,6 @@
{
"text": "The Disk space initially allocated by the script is only a placeholder, as we can't know how much space you will ever need. You should increase it to match your workload.",
"type": "info"
},
{
"text": "Please copy your `BICHON_ENCRYPT_PASSWORD` from `/opt/bichon/bichon.env` to a safe place.",
"type": "warning"
}
]
}

View File

@@ -1,35 +0,0 @@
{
"name": "Draw.IO",
"slug": "drawio",
"categories": [
12
],
"date_created": "2026-02-11",
"type": "ct",
"updateable": true,
"privileged": false,
"interface_port": 8080,
"documentation": "https://www.drawio.com/doc/",
"website": "https://www.drawio.com/",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/draw-io.webp",
"config_path": "",
"description": "draw.io is a configurable diagramming and whiteboarding application, jointly owned and developed by draw.io Ltd (previously named JGraph) and draw.io AG.",
"install_methods": [
{
"type": "default",
"script": "ct/drawio.sh",
"resources": {
"cpu": 1,
"ram": 2048,
"hdd": 4,
"os": "Debian",
"version": "13"
}
}
],
"default_credentials": {
"username": null,
"password": null
},
"notes": []
}

View File

@@ -21,7 +21,7 @@
"resources": {
"cpu": 2,
"ram": 1024,
"hdd": 6,
"hdd": 4,
"os": "debian",
"version": "13"
}

View File

@@ -1,44 +0,0 @@
{
"name": "Frigate",
"slug": "frigate",
"categories": [
15
],
"date_created": "2026-02-13",
"type": "ct",
"updateable": false,
"privileged": false,
"config_path": "/config/config.yml",
"interface_port": 5000,
"documentation": "https://frigate.io/",
"website": "https://frigate.io/",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/frigate-light.webp",
"description": "Frigate is a complete and local NVR (Network Video Recorder) with realtime AI object detection for CCTV cameras.",
"install_methods": [
{
"type": "default",
"script": "ct/frigate.sh",
"resources": {
"cpu": 4,
"ram": 4096,
"hdd": 20,
"os": "Debian",
"version": "12"
}
}
],
"default_credentials": {
"username": null,
"password": null
},
"notes": [
{
"text": "SemanticSearch is not pre-installed due to high resource requirements (8+ cores, 16-24GB RAM, GPU recommended). Manual configuration required if needed.",
"type": "info"
},
{
"text": "OpenVino detector may fail on older CPUs (pre-Haswell/AVX2). If you encounter 'Illegal instruction' errors, consider using alternative detectors.",
"type": "warning"
}
]
}

View File

@@ -0,0 +1,44 @@
{
"name": "Frigate",
"slug": "frigate",
"categories": [
15
],
"date_created": "2024-05-02",
"type": "ct",
"updateable": false,
"privileged": true,
"interface_port": 5000,
"documentation": "https://docs.frigate.video/",
"website": "https://frigate.video/",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/frigate.webp",
"config_path": "",
"description": "Frigate is an open source NVR built around real-time AI object detection. All processing is performed locally on your own hardware, and your camera feeds never leave your home.",
"install_methods": [
{
"type": "default",
"script": "ct/frigate.sh",
"resources": {
"cpu": 4,
"ram": 4096,
"hdd": 20,
"os": "debian",
"version": "11"
}
}
],
"default_credentials": {
"username": null,
"password": null
},
"notes": [
{
"text": "Discussions (explore more advanced methods): `https://github.com/tteck/Proxmox/discussions/2711`",
"type": "info"
},
{
"text": "go2rtc Interface port:`1984`",
"type": "info"
}
]
}

View File

@@ -1,5 +1,5 @@
{
"generated": "2026-02-13T12:11:36Z",
"generated": "2026-02-07T18:08:02Z",
"versions": [
{
"slug": "2fauth",
@@ -15,13 +15,6 @@
"pinned": false,
"date": "2025-12-08T14:34:55Z"
},
{
"slug": "adguardhome-sync",
"repo": "bakito/adguardhome-sync",
"version": "v0.8.2",
"pinned": false,
"date": "2025-10-24T17:13:47Z"
},
{
"slug": "adventurelog",
"repo": "seanmorley15/adventurelog",
@@ -116,9 +109,9 @@
{
"slug": "bentopdf",
"repo": "alam00000/bentopdf",
"version": "v2.2.0",
"version": "v2.1.0",
"pinned": false,
"date": "2026-02-09T07:07:40Z"
"date": "2026-02-02T14:30:55Z"
},
{
"slug": "beszel",
@@ -127,13 +120,6 @@
"pinned": false,
"date": "2026-02-01T19:02:42Z"
},
{
"slug": "bichon",
"repo": "rustmailer/bichon",
"version": "0.3.7",
"pinned": false,
"date": "2026-01-28T12:47:09Z"
},
{
"slug": "bitmagnet",
"repo": "bitmagnet-io/bitmagnet",
@@ -165,9 +151,9 @@
{
"slug": "byparr",
"repo": "ThePhaseless/Byparr",
"version": "v2.1.0",
"version": "v2.0.1",
"pinned": false,
"date": "2026-02-08T12:59:20Z"
"date": "2025-09-11T20:29:38Z"
},
{
"slug": "bytestash",
@@ -183,19 +169,12 @@
"pinned": false,
"date": "2025-07-29T16:39:18Z"
},
{
"slug": "checkmate",
"repo": "bluewave-labs/Checkmate",
"version": "v3.3",
"pinned": false,
"date": "2026-01-28T14:25:25Z"
},
{
"slug": "cleanuparr",
"repo": "Cleanuparr/Cleanuparr",
"version": "v2.6.1",
"version": "v2.5.1",
"pinned": false,
"date": "2026-02-13T10:00:19Z"
"date": "2026-01-11T00:46:17Z"
},
{
"slug": "cloudreve",
@@ -207,16 +186,16 @@
{
"slug": "comfyui",
"repo": "comfyanonymous/ComfyUI",
"version": "v0.13.0",
"version": "v0.12.3",
"pinned": false,
"date": "2026-02-10T20:27:38Z"
"date": "2026-02-05T07:04:07Z"
},
{
"slug": "commafeed",
"repo": "Athou/commafeed",
"version": "6.2.0",
"version": "6.1.1",
"pinned": false,
"date": "2026-02-09T19:44:58Z"
"date": "2026-01-26T15:14:16Z"
},
{
"slug": "configarr",
@@ -242,72 +221,65 @@
{
"slug": "cronicle",
"repo": "jhuckaby/Cronicle",
"version": "v0.9.106",
"version": "v0.9.105",
"pinned": false,
"date": "2026-02-11T17:11:46Z"
"date": "2026-02-05T18:16:11Z"
},
{
"slug": "cryptpad",
"repo": "cryptpad/cryptpad",
"version": "2026.2.0",
"version": "2025.9.0",
"pinned": false,
"date": "2026-02-11T15:39:05Z"
"date": "2025-10-22T10:06:29Z"
},
{
"slug": "dawarich",
"repo": "Freika/dawarich",
"version": "1.1.0",
"version": "1.0.4",
"pinned": false,
"date": "2026-02-08T14:42:45Z"
"date": "2026-02-01T11:37:27Z"
},
{
"slug": "discopanel",
"repo": "nickheyer/discopanel",
"version": "v1.0.36",
"version": "v1.0.35",
"pinned": false,
"date": "2026-02-09T21:15:44Z"
"date": "2026-02-02T05:20:12Z"
},
{
"slug": "dispatcharr",
"repo": "Dispatcharr/Dispatcharr",
"version": "v0.19.0",
"version": "v0.18.1",
"pinned": false,
"date": "2026-02-10T21:18:10Z"
"date": "2026-01-27T17:09:11Z"
},
{
"slug": "docmost",
"repo": "docmost/docmost",
"version": "v0.25.3",
"version": "v0.25.2",
"pinned": false,
"date": "2026-02-10T02:58:23Z"
"date": "2026-02-06T19:50:55Z"
},
{
"slug": "domain-locker",
"repo": "Lissy93/domain-locker",
"version": "v0.1.3",
"version": "v0.1.2",
"pinned": false,
"date": "2026-02-11T10:03:32Z"
"date": "2025-11-14T22:08:23Z"
},
{
"slug": "domain-monitor",
"repo": "Hosteroid/domain-monitor",
"version": "v1.1.3",
"version": "v1.1.1",
"pinned": false,
"date": "2026-02-11T15:48:18Z"
"date": "2025-11-18T11:32:30Z"
},
{
"slug": "donetick",
"repo": "donetick/donetick",
"version": "v0.1.73",
"version": "v0.1.64",
"pinned": false,
"date": "2026-02-12T23:42:30Z"
},
{
"slug": "drawio",
"repo": "jgraph/drawio",
"version": "v29.3.6",
"pinned": false,
"date": "2026-01-28T18:25:02Z"
"date": "2025-10-03T05:18:24Z"
},
{
"slug": "duplicati",
@@ -333,9 +305,9 @@
{
"slug": "endurain",
"repo": "endurain-project/endurain",
"version": "v0.17.4",
"version": "v0.17.3",
"pinned": false,
"date": "2026-02-11T04:54:22Z"
"date": "2026-01-23T22:02:05Z"
},
{
"slug": "ersatztv",
@@ -354,9 +326,9 @@
{
"slug": "firefly",
"repo": "firefly-iii/firefly-iii",
"version": "v6.4.18",
"version": "v6.4.17",
"pinned": false,
"date": "2026-02-08T07:28:00Z"
"date": "2026-02-07T06:56:00Z"
},
{
"slug": "fladder",
@@ -403,9 +375,9 @@
{
"slug": "ghostfolio",
"repo": "ghostfolio/ghostfolio",
"version": "2.238.0",
"version": "2.236.0",
"pinned": false,
"date": "2026-02-12T18:28:55Z"
"date": "2026-02-05T19:30:11Z"
},
{
"slug": "gitea",
@@ -543,16 +515,9 @@
{
"slug": "huntarr",
"repo": "plexguide/Huntarr.io",
"version": "9.2.4.1",
"version": "9.2.3",
"pinned": false,
"date": "2026-02-12T22:17:47Z"
},
{
"slug": "immich-public-proxy",
"repo": "alangrainger/immich-public-proxy",
"version": "v1.15.1",
"pinned": false,
"date": "2026-01-26T08:04:27Z"
"date": "2026-02-07T04:44:20Z"
},
{
"slug": "inspircd",
@@ -571,23 +536,16 @@
{
"slug": "invoiceninja",
"repo": "invoiceninja/invoiceninja",
"version": "v5.12.59",
"version": "v5.12.55",
"pinned": false,
"date": "2026-02-13T02:26:13Z"
"date": "2026-02-05T01:06:15Z"
},
{
"slug": "jackett",
"repo": "Jackett/Jackett",
"version": "v0.24.1103",
"version": "v0.24.1060",
"pinned": false,
"date": "2026-02-13T05:53:23Z"
},
{
"slug": "jellystat",
"repo": "CyferShepard/Jellystat",
"version": "V1.1.8",
"pinned": false,
"date": "2026-02-08T08:15:00Z"
"date": "2026-02-07T05:55:27Z"
},
{
"slug": "joplin-server",
@@ -599,9 +557,9 @@
{
"slug": "jotty",
"repo": "fccview/jotty",
"version": "1.20.0",
"version": "1.19.1",
"pinned": false,
"date": "2026-02-12T09:23:30Z"
"date": "2026-01-26T21:30:39Z"
},
{
"slug": "kapowarr",
@@ -627,9 +585,9 @@
{
"slug": "keycloak",
"repo": "keycloak/keycloak",
"version": "26.5.3",
"version": "26.5.2",
"pinned": false,
"date": "2026-02-10T07:30:08Z"
"date": "2026-01-23T14:26:58Z"
},
{
"slug": "kimai",
@@ -662,9 +620,9 @@
{
"slug": "kometa",
"repo": "Kometa-Team/Kometa",
"version": "v2.3.0",
"version": "v2.2.2",
"pinned": false,
"date": "2026-02-09T21:26:56Z"
"date": "2025-10-06T21:31:07Z"
},
{
"slug": "komga",
@@ -711,9 +669,9 @@
{
"slug": "libretranslate",
"repo": "LibreTranslate/LibreTranslate",
"version": "v1.9.0",
"version": "v1.8.4",
"pinned": false,
"date": "2026-02-10T19:05:48Z"
"date": "2026-02-02T17:45:16Z"
},
{
"slug": "lidarr",
@@ -746,9 +704,9 @@
{
"slug": "lubelogger",
"repo": "hargata/lubelog",
"version": "v1.6.0",
"version": "v1.5.8",
"pinned": false,
"date": "2026-02-10T20:16:32Z"
"date": "2026-01-26T18:18:03Z"
},
{
"slug": "mafl",
@@ -767,9 +725,9 @@
{
"slug": "mail-archiver",
"repo": "s1t5/mail-archiver",
"version": "2602.1",
"version": "2601.3",
"pinned": false,
"date": "2026-02-11T06:23:11Z"
"date": "2026-01-25T12:52:24Z"
},
{
"slug": "managemydamnlife",
@@ -781,9 +739,9 @@
{
"slug": "manyfold",
"repo": "manyfold3d/manyfold",
"version": "v0.132.1",
"version": "v0.132.0",
"pinned": false,
"date": "2026-02-09T22:02:28Z"
"date": "2026-01-29T13:53:21Z"
},
{
"slug": "mealie",
@@ -795,16 +753,16 @@
{
"slug": "mediamanager",
"repo": "maxdorninger/MediaManager",
"version": "v1.12.3",
"version": "v1.12.1",
"pinned": false,
"date": "2026-02-11T16:45:40Z"
"date": "2026-01-05T09:06:22Z"
},
{
"slug": "mediamtx",
"repo": "bluenviron/mediamtx",
"version": "v1.16.1",
"version": "v1.16.0",
"pinned": false,
"date": "2026-02-07T18:58:24Z"
"date": "2026-01-31T15:38:51Z"
},
{
"slug": "meilisearch",
@@ -818,14 +776,14 @@
"repo": "usememos/memos",
"version": "v0.25.3",
"pinned": true,
"date": "2025-11-25T15:40:41Z"
"date": "2025-11-25T00:00:00Z"
},
{
"slug": "metube",
"repo": "alexta69/metube",
"version": "2026.02.12",
"version": "2026.02.07",
"pinned": false,
"date": "2026-02-12T21:05:49Z"
"date": "2026-02-07T16:24:37Z"
},
{
"slug": "miniflux",
@@ -844,9 +802,9 @@
{
"slug": "myip",
"repo": "jason5ng32/MyIP",
"version": "v5.2.1",
"version": "v5.2.0",
"pinned": false,
"date": "2026-02-10T07:38:47Z"
"date": "2026-01-05T05:56:57Z"
},
{
"slug": "mylar3",
@@ -865,9 +823,9 @@
{
"slug": "navidrome",
"repo": "navidrome/navidrome",
"version": "v0.60.3",
"version": "v0.60.0",
"pinned": false,
"date": "2026-02-10T23:55:04Z"
"date": "2026-02-03T18:57:04Z"
},
{
"slug": "netbox",
@@ -876,13 +834,6 @@
"pinned": false,
"date": "2026-02-03T13:54:26Z"
},
{
"slug": "nextcloud-exporter",
"repo": "xperimental/nextcloud-exporter",
"version": "v0.9.0",
"pinned": false,
"date": "2025-10-12T20:03:10Z"
},
{
"slug": "nginx-ui",
"repo": "0xJacky/nginx-ui",
@@ -902,7 +853,7 @@
"repo": "nocodb/nocodb",
"version": "0.301.1",
"pinned": true,
"date": "2026-01-14T13:13:33Z"
"date": "2026-01-21T16:23:04Z"
},
{
"slug": "nodebb",
@@ -998,9 +949,9 @@
{
"slug": "pangolin",
"repo": "fosrl/pangolin",
"version": "1.15.4",
"version": "1.15.2",
"pinned": false,
"date": "2026-02-13T00:54:02Z"
"date": "2026-02-05T19:23:58Z"
},
{
"slug": "paperless-ai",
@@ -1026,9 +977,9 @@
{
"slug": "patchmon",
"repo": "PatchMon/PatchMon",
"version": "v1.4.0",
"version": "v1.3.7",
"pinned": false,
"date": "2026-02-13T10:39:03Z"
"date": "2025-12-25T11:08:14Z"
},
{
"slug": "paymenter",
@@ -1040,23 +991,23 @@
{
"slug": "peanut",
"repo": "Brandawg93/PeaNUT",
"version": "v5.22.0",
"version": "v5.21.2",
"pinned": false,
"date": "2026-02-08T00:32:25Z"
"date": "2026-01-18T17:32:08Z"
},
{
"slug": "pelican-panel",
"repo": "pelican-dev/panel",
"version": "v1.0.0-beta32",
"version": "v1.0.0-beta31",
"pinned": false,
"date": "2026-02-09T22:15:44Z"
"date": "2026-01-18T22:43:24Z"
},
{
"slug": "pelican-wings",
"repo": "pelican-dev/wings",
"version": "v1.0.0-beta23",
"version": "v1.0.0-beta22",
"pinned": false,
"date": "2026-02-09T22:10:26Z"
"date": "2026-01-18T22:38:36Z"
},
{
"slug": "pf2etools",
@@ -1072,19 +1023,12 @@
"pinned": false,
"date": "2025-12-01T05:07:31Z"
},
{
"slug": "pihole-exporter",
"repo": "eko/pihole-exporter",
"version": "v1.2.0",
"pinned": false,
"date": "2025-07-29T19:15:37Z"
},
{
"slug": "planka",
"repo": "plankanban/planka",
"version": "v2.0.0",
"version": "v2.0.0-rc.4",
"pinned": false,
"date": "2026-02-11T13:50:10Z"
"date": "2025-09-04T12:41:17Z"
},
{
"slug": "plant-it",
@@ -1131,9 +1075,9 @@
{
"slug": "prometheus-alertmanager",
"repo": "prometheus/alertmanager",
"version": "v0.31.1",
"version": "v0.31.0",
"pinned": false,
"date": "2026-02-11T21:28:26Z"
"date": "2026-02-02T13:34:15Z"
},
{
"slug": "prometheus-blackbox-exporter",
@@ -1173,9 +1117,9 @@
{
"slug": "pulse",
"repo": "rcourtman/Pulse",
"version": "v5.1.9",
"version": "v5.1.3",
"pinned": false,
"date": "2026-02-11T15:34:40Z"
"date": "2026-02-07T14:59:29Z"
},
{
"slug": "pve-scripts-local",
@@ -1191,13 +1135,6 @@
"pinned": false,
"date": "2025-11-19T23:54:34Z"
},
{
"slug": "qbittorrent-exporter",
"repo": "martabal/qbittorrent-exporter",
"version": "v1.13.2",
"pinned": false,
"date": "2025-12-13T22:59:03Z"
},
{
"slug": "qdrant",
"repo": "qdrant/qdrant",
@@ -1219,13 +1156,6 @@
"pinned": false,
"date": "2025-11-16T22:39:01Z"
},
{
"slug": "radicale",
"repo": "Kozea/Radicale",
"version": "v3.6.0",
"pinned": false,
"date": "2026-01-10T06:56:46Z"
},
{
"slug": "rclone",
"repo": "rclone/rclone",
@@ -1236,9 +1166,9 @@
{
"slug": "rdtclient",
"repo": "rogerfar/rdt-client",
"version": "v2.0.120",
"version": "v2.0.119",
"pinned": false,
"date": "2026-02-12T02:53:51Z"
"date": "2025-10-13T23:15:11Z"
},
{
"slug": "reactive-resume",
@@ -1292,16 +1222,16 @@
{
"slug": "scanopy",
"repo": "scanopy/scanopy",
"version": "v0.14.4",
"version": "v0.14.3",
"pinned": false,
"date": "2026-02-10T03:57:28Z"
"date": "2026-02-04T01:41:01Z"
},
{
"slug": "scraparr",
"repo": "thecfu/scraparr",
"version": "v3.0.3",
"version": "v2.2.5",
"pinned": false,
"date": "2026-02-12T14:20:56Z"
"date": "2025-10-07T12:34:31Z"
},
{
"slug": "seelf",
@@ -1338,13 +1268,6 @@
"pinned": false,
"date": "2026-01-16T12:08:28Z"
},
{
"slug": "slskd",
"repo": "slskd/slskd",
"version": "0.24.3",
"pinned": false,
"date": "2026-01-15T14:40:15Z"
},
{
"slug": "snipeit",
"repo": "grokability/snipe-it",
@@ -1355,9 +1278,9 @@
{
"slug": "snowshare",
"repo": "TuroYT/snowshare",
"version": "v1.3.5",
"version": "v1.2.12",
"pinned": false,
"date": "2026-02-11T10:24:51Z"
"date": "2026-01-30T13:35:56Z"
},
{
"slug": "sonarr",
@@ -1369,9 +1292,9 @@
{
"slug": "speedtest-tracker",
"repo": "alexjustesen/speedtest-tracker",
"version": "v1.13.9",
"version": "v1.13.8",
"pinned": false,
"date": "2026-02-08T21:48:49Z"
"date": "2026-02-04T19:24:23Z"
},
{
"slug": "spoolman",
@@ -1390,9 +1313,9 @@
{
"slug": "stirling-pdf",
"repo": "Stirling-Tools/Stirling-PDF",
"version": "v2.4.6",
"version": "v2.4.5",
"pinned": false,
"date": "2026-02-12T00:01:19Z"
"date": "2026-02-06T23:12:20Z"
},
{
"slug": "streamlink-webui",
@@ -1411,9 +1334,9 @@
{
"slug": "tandoor",
"repo": "TandoorRecipes/recipes",
"version": "2.5.0",
"version": "2.4.2",
"pinned": false,
"date": "2026-02-08T13:23:02Z"
"date": "2026-02-01T12:52:37Z"
},
{
"slug": "tasmoadmin",
@@ -1439,9 +1362,9 @@
{
"slug": "termix",
"repo": "Termix-SSH/Termix",
"version": "release-1.11.1-tag",
"version": "release-1.11.0-tag",
"pinned": false,
"date": "2026-02-13T04:49:16Z"
"date": "2026-01-25T02:09:52Z"
},
{
"slug": "the-lounge",
@@ -1467,9 +1390,9 @@
{
"slug": "tianji",
"repo": "msgbyte/tianji",
"version": "v1.31.12",
"version": "v1.31.10",
"pinned": false,
"date": "2026-02-12T19:06:14Z"
"date": "2026-02-04T17:21:04Z"
},
{
"slug": "traccar",
@@ -1481,9 +1404,9 @@
{
"slug": "tracearr",
"repo": "connorgallopo/Tracearr",
"version": "v1.4.17",
"version": "v1.4.12",
"pinned": false,
"date": "2026-02-11T01:33:21Z"
"date": "2026-01-28T23:29:37Z"
},
{
"slug": "tracktor",
@@ -1495,9 +1418,9 @@
{
"slug": "traefik",
"repo": "traefik/traefik",
"version": "v3.6.8",
"version": "v3.6.7",
"pinned": false,
"date": "2026-02-11T16:44:37Z"
"date": "2026-01-14T14:11:45Z"
},
{
"slug": "trilium",
@@ -1509,9 +1432,9 @@
{
"slug": "trip",
"repo": "itskovacs/TRIP",
"version": "1.40.0",
"version": "1.39.0",
"pinned": false,
"date": "2026-02-10T20:12:53Z"
"date": "2026-02-07T16:59:51Z"
},
{
"slug": "tududi",
@@ -1558,9 +1481,9 @@
{
"slug": "upsnap",
"repo": "seriousm4x/UpSnap",
"version": "5.2.8",
"version": "5.2.7",
"pinned": false,
"date": "2026-02-13T00:02:37Z"
"date": "2026-01-07T23:48:00Z"
},
{
"slug": "uptimekuma",
@@ -1572,9 +1495,9 @@
{
"slug": "vaultwarden",
"repo": "dani-garcia/vaultwarden",
"version": "1.35.3",
"version": "1.35.2",
"pinned": false,
"date": "2026-02-10T20:37:03Z"
"date": "2026-01-09T18:37:04Z"
},
{
"slug": "victoriametrics",
@@ -1586,9 +1509,9 @@
{
"slug": "vikunja",
"repo": "go-vikunja/vikunja",
"version": "v1.1.0",
"version": "v1.0.0",
"pinned": false,
"date": "2026-02-09T10:34:29Z"
"date": "2026-01-28T11:12:59Z"
},
{
"slug": "wallabag",
@@ -1600,9 +1523,9 @@
{
"slug": "wallos",
"repo": "ellite/Wallos",
"version": "v4.6.1",
"version": "v4.6.0",
"pinned": false,
"date": "2026-02-10T21:06:46Z"
"date": "2025-12-20T15:57:51Z"
},
{
"slug": "wanderer",
@@ -1635,9 +1558,9 @@
{
"slug": "wavelog",
"repo": "wavelog/wavelog",
"version": "2.3",
"version": "2.2.2",
"pinned": false,
"date": "2026-02-11T15:46:40Z"
"date": "2025-12-31T16:53:34Z"
},
{
"slug": "wealthfolio",
@@ -1653,26 +1576,19 @@
"pinned": false,
"date": "2025-11-11T14:30:28Z"
},
{
"slug": "wger",
"repo": "wger-project/wger",
"version": "2.4",
"pinned": false,
"date": "2026-01-18T12:12:02Z"
},
{
"slug": "wikijs",
"repo": "requarks/wiki",
"version": "v2.5.312",
"version": "v2.5.311",
"pinned": false,
"date": "2026-02-12T02:45:22Z"
"date": "2026-01-08T09:50:00Z"
},
{
"slug": "wishlist",
"repo": "cmintey/wishlist",
"version": "v0.60.0",
"version": "v0.59.0",
"pinned": false,
"date": "2026-02-10T04:05:26Z"
"date": "2026-01-19T16:42:14Z"
},
{
"slug": "wizarr",
@@ -1698,9 +1614,9 @@
{
"slug": "yubal",
"repo": "guillevc/yubal",
"version": "v0.5.0",
"version": "v0.4.0",
"pinned": false,
"date": "2026-02-09T22:11:32Z"
"date": "2026-02-06T21:25:24Z"
},
{
"slug": "zigbee2mqtt",
@@ -1712,9 +1628,9 @@
{
"slug": "zipline",
"repo": "diced/zipline",
"version": "v4.4.2",
"version": "v4.4.1",
"pinned": false,
"date": "2026-02-11T04:58:54Z"
"date": "2026-01-20T01:29:01Z"
},
{
"slug": "zitadel",

View File

@@ -21,7 +21,7 @@
"resources": {
"cpu": 1,
"ram": 512,
"hdd": 4,
"hdd": 2,
"os": "debian",
"version": "13"
}
@@ -32,7 +32,7 @@
"resources": {
"cpu": 1,
"ram": 256,
"hdd": 2,
"hdd": 1,
"os": "alpine",
"version": "3.23"
}

View File

@@ -33,7 +33,7 @@
},
"notes": [
{
"text": "Kutt needs so be served with an SSL certificate for its login to work. During install, you will be prompted to choose if you want to have Caddy installed for SSL termination or if you want to use your own reverse proxy (in that case point your reverse proxy to port 3000).",
"text": "Kutt needs so be served with an SSL certificate for its login to work. During install, you will be prompted to choose if you want to have Caddy installed for SSL termination or if you want to use your own reverse proxy (in that case point your reverse porxy to port 3000).",
"type": "info"
}
]

View File

@@ -28,14 +28,10 @@
}
],
"default_credentials": {
"username": null,
"username": "admin",
"password": null
},
"notes": [
{
"text": "On first visit, the setup wizard will guide you to create an admin account and configure ACME email.",
"type": "warning"
},
{
"text": "Nginx runs on ports 80/443, Nginx UI management interface on port 9000.",
"type": "info"
@@ -43,6 +39,10 @@
{
"text": "SSL certificates can be managed automatically with Let's Encrypt integration.",
"type": "info"
},
{
"text": "Initial Login data: `cat ~/nginx-ui.creds`",
"type": "info"
}
]
}

View File

@@ -30,14 +30,10 @@
}
],
"default_credentials": {
"username": null,
"password": null
"username": "admin@example.com",
"password": "changeme"
},
"notes": [
{
"text": "On first launch, a setup wizard will guide you through creating an admin account. There are no default credentials.",
"type": "info"
},
{
"text": "You can install the specific one certbot you prefer, or you can Running /app/scripts/install-certbot-plugins within the Nginx Proxy Manager (NPM) LXC shell will install many common plugins. Important: This script does not install all Certbot plugins, as some require additional, external system dependencies (like specific packages for certain DNS providers). These external dependencies must be manually installed within the LXC container before you can successfully install and use the corresponding Certbot plugin. Consult the plugin's documentation for required packages.",
"type": "info"

View File

@@ -1,35 +1,44 @@
{
"name": "Prometheus Paperless NGX Exporter",
"slug": "prometheus-paperless-ngx-exporter",
"categories": [
9
],
"date_created": "2025-02-07",
"type": "addon",
"updateable": true,
"privileged": false,
"interface_port": 8081,
"documentation": "https://github.com/hansmi/prometheus-paperless-exporter",
"website": "https://github.com/hansmi/prometheus-paperless-exporter",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/paperless-ngx.webp",
"config_path": "/etc/prometheus-paperless-ngx-exporter/config.env",
"description": "Prometheus metrics exporter for Paperless-NGX, a document management system transforming physical documents into a searchable online archive. The exporter relies on Paperless' REST API.",
"install_methods": [
{
"type": "default",
"script": "tools/addon/prometheus-paperless-ngx-exporter.sh",
"resources": {
"cpu": null,
"ram": null,
"hdd": null,
"os": null,
"version": null
}
}
],
"default_credentials": {
"username": null,
"password": null
},
"notes": []
"name": "Prometheus Paperless NGX Exporter",
"slug": "prometheus-paperless-ngx-exporter",
"categories": [
9
],
"date_created": "2025-02-07",
"type": "ct",
"updateable": true,
"privileged": false,
"interface_port": 8081,
"documentation": null,
"website": "https://github.com/hansmi/prometheus-paperless-exporter",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/paperless-ngx.webp",
"config_path": "",
"description": "Prometheus metrics exporter for Paperless-NGX, a document management system transforming physical documents into a searchable online archive. The exporter relies on Paperless' REST API.",
"install_methods": [
{
"type": "default",
"script": "ct/prometheus-paperless-ngx-exporter.sh",
"resources": {
"cpu": 1,
"ram": 256,
"hdd": 2,
"os": "debian",
"version": "13"
}
}
],
"default_credentials": {
"username": null,
"password": null
},
"notes": [
{
"text": "Please adjust the Paperless URL in the systemd unit file: /etc/systemd/system/prometheus-paperless-ngx-exporter.service",
"type": "info"
},
{
"text": "Please adjust the Paperless authentication token in the configuration file: /etc/prometheus-paperless-ngx-exporter/paperless_auth_token_file",
"type": "info"
}
]
}

View File

@@ -12,7 +12,7 @@
"documentation": "https://radicale.org/master.html#documentation-1",
"website": "https://radicale.org/",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/radicale.webp",
"config_path": "/etc/radicale/config",
"config_path": "/etc/radicale/config or ~/.config/radicale/config",
"description": "Radicale is a small but powerful CalDAV (calendars, to-do lists) and CardDAV (contacts)",
"install_methods": [
{

View File

@@ -22,8 +22,8 @@
"cpu": 2,
"ram": 2048,
"hdd": 4,
"os": "ubuntu",
"version": "24.04"
"os": "debian",
"version": "12"
}
}
],

View File

@@ -1,5 +1,5 @@
{
"name": "Slskd",
"name": "slskd",
"slug": "slskd",
"categories": [
11
@@ -35,6 +35,10 @@
{
"text": "See /opt/slskd/config/slskd.yml to add your Soulseek credentials",
"type": "info"
},
{
"text": "This LXC includes Soularr; it needs to be configured (/opt/soularr/config.ini) before it will work",
"type": "info"
}
]
}

View File

@@ -10,7 +10,7 @@
"privileged": false,
"interface_port": 3000,
"documentation": "https://github.com/TuroYT/snowshare",
"config_path": "/opt/snowshare.env",
"config_path": "/opt/snowshare/.env",
"website": "https://github.com/TuroYT/snowshare",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/png/snowshare.png",
"description": "A modern, secure file and link sharing platform built with Next.js, Prisma, and NextAuth. Share URLs, code snippets, and files with customizable expiration, privacy, and QR codes.",

View File

@@ -32,10 +32,6 @@
"password": null
},
"notes": [
{
"text": "SQL Server (2025) SQLPAL is incompatible with Proxmox VE 9 (Kernel 6.12+) in LXC containers. Use a VM instead or the SQL-Server 2022 LXC.",
"type": "warning"
},
{
"text": "If you choose not to run the installation setup, execute: `/opt/mssql/bin/mssql-conf setup` in LXC shell.",
"type": "info"

View File

@@ -19,9 +19,9 @@
"type": "default",
"script": "ct/wger.sh",
"resources": {
"cpu": 2,
"ram": 2048,
"hdd": 8,
"cpu": 1,
"ram": 1024,
"hdd": 6,
"os": "debian",
"version": "13"
}
@@ -33,7 +33,7 @@
},
"notes": [
{
"text": "This LXC also runs Celery and Redis to synchronize workouts and ingredients",
"text": "Enable proxy support by uncommenting this line in `/home/wger/src/settings.py` and pointing it to your URL: `# CSRF_TRUSTED_ORIGINS = ['http://127.0.0.1', 'https://my.domain.example.com']`, then restart the service `systemctl restart wger`.",
"type": "info"
}
]

View File

@@ -58,7 +58,7 @@ DISABLE_REGISTRATION=False
EOF
cd /opt/adventurelog/backend/server
mkdir -p /opt/adventurelog/backend/server/media
$STD uv venv --clear /opt/adventurelog/backend/server/.venv
$STD uv venv /opt/adventurelog/backend/server/.venv
$STD /opt/adventurelog/backend/server/.venv/bin/python -m ensurepip --upgrade
$STD /opt/adventurelog/backend/server/.venv/bin/python -m pip install --upgrade pip
$STD /opt/adventurelog/backend/server/.venv/bin/python -m pip install -r requirements.txt

View File

@@ -77,7 +77,7 @@ echo "${KEPUB_VERSION#v}" >"$INSTALL_DIR"/KEPUBIFY_RELEASE
sed 's/^/v/' ~/.autocaliweb >"$INSTALL_DIR"/ACW_RELEASE
cd "$INSTALL_DIR"
$STD uv venv --clear "$VIRTUAL_ENV"
$STD uv venv "$VIRTUAL_ENV"
$STD uv sync --all-extras --active
cat <<EOF >./dirs.json
{

View File

@@ -29,7 +29,7 @@ fetch_and_deploy_gh_release "babybuddy" "babybuddy/babybuddy" "tarball"
msg_info "Installing Babybuddy"
mkdir -p /opt/data
cd /opt/babybuddy
$STD uv venv --clear .venv
$STD uv venv .venv
$STD source .venv/bin/activate
$STD uv pip install -r requirements.txt
cp babybuddy/settings/production.example.py babybuddy/settings/production.py

View File

@@ -20,7 +20,7 @@ msg_info "Installing Bazarr"
mkdir -p /var/lib/bazarr/
chmod 775 /opt/bazarr /var/lib/bazarr/
sed -i.bak 's/--only-binary=Pillow//g' /opt/bazarr/requirements.txt
$STD uv venv --clear /opt/bazarr/venv --python 3.12
$STD uv venv /opt/bazarr/venv --python 3.12
$STD uv pip install -r /opt/bazarr/requirements.txt --python /opt/bazarr/venv/bin/python3
msg_ok "Installed Bazarr"

View File

@@ -36,43 +36,27 @@ PYTHON_VERSION="3.12" setup_uv
fetch_and_deploy_gh_release "ComfyUI" "comfyanonymous/ComfyUI" "tarball" "latest" "/opt/ComfyUI"
msg_info "Python dependencies"
$STD uv venv --clear "/opt/ComfyUI/venv"
$STD uv venv "/opt/ComfyUI/venv"
if [[ "${comfyui_gpu_type,,}" == "nvidia" ]]; then
pytorch_url="https://download.pytorch.org/whl/cu130"
if [[ -f "/opt/ComfyUI/README.md" ]]; then
extracted=$(grep -oP 'pip install.*?--extra-index-url\s+\Khttps://download\.pytorch\.org/whl/cu\d+' /opt/ComfyUI/README.md | head -1 || true)
[[ -n "$extracted" ]] && pytorch_url="$extracted"
fi
$STD uv pip install \
torch \
torchvision \
torchaudio \
--extra-index-url "$pytorch_url" \
--extra-index-url "https://download.pytorch.org/whl/cu128" \
--python="/opt/ComfyUI/venv/bin/python"
elif [[ "${comfyui_gpu_type,,}" == "amd" ]]; then
pytorch_url="https://download.pytorch.org/whl/rocm6.4"
if [[ -f "/opt/ComfyUI/README.md" ]]; then
extracted=$(grep -oP 'pip install.*?--index-url\s+\Khttps://download\.pytorch\.org/whl/rocm[\d.]+' /opt/ComfyUI/README.md | grep -v 'nightly' | head -1 || true)
[[ -n "$extracted" ]] && pytorch_url="$extracted"
fi
$STD uv pip install \
torch \
torchvision \
torchaudio \
--index-url "$pytorch_url" \
--index-url "https://download.pytorch.org/whl/rocm6.3" \
--python="/opt/ComfyUI/venv/bin/python"
elif [[ "${comfyui_gpu_type,,}" == "intel" ]]; then
pytorch_url="https://download.pytorch.org/whl/xpu"
if [[ -f "/opt/ComfyUI/README.md" ]]; then
extracted=$(grep -oP 'pip install.*?--index-url\s+\Khttps://download\.pytorch\.org/whl/xpu' /opt/ComfyUI/README.md | head -1 || true)
[[ -n "$extracted" ]] && pytorch_url="$extracted"
fi
$STD uv pip install \
torch \
torchvision \
torchaudio \
--index-url "$pytorch_url" \
--index-url "https://download.pytorch.org/whl/xpu" \
--python="/opt/ComfyUI/venv/bin/python"
fi
$STD uv pip install -r "/opt/ComfyUI/requirements.txt" --python="/opt/ComfyUI/venv/bin/python"

View File

@@ -16,8 +16,7 @@ update_os
msg_info "Installing Dependencies"
$STD apt install -y \
python3-pip \
python3-libtorrent \
python3-setuptools
python3-libtorrent
msg_ok "Installed Dependencies"
msg_info "Installing Deluge"

View File

@@ -36,8 +36,8 @@ fetch_and_deploy_gh_release "dispatcharr" "Dispatcharr/Dispatcharr" "tarball"
msg_info "Installing Python Dependencies with uv"
cd /opt/dispatcharr
$STD uv venv --clear
$STD uv sync
$STD uv venv
$STD uv pip install -r requirements.txt --index-strategy unsafe-best-match
$STD uv pip install gunicorn gevent celery redis daphne
msg_ok "Installed Python Dependencies"

View File

@@ -1,25 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: Slaviša Arežina (tremor021)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://www.drawio.com/
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
setup_hwaccel
msg_info "Installing Dependencies"
$STD apt install -y tomcat11
msg_ok "Installed Dependencies"
USE_ORIGINAL_FILENAME=true fetch_and_deploy_gh_release "drawio" "jgraph/drawio" "singlefile" "latest" "/var/lib/tomcat11/webapps" "draw.war"
motd_ssh
customize
cleanup_lxc

View File

@@ -31,10 +31,8 @@ setup_deb822_repo "matrix-org" \
"main"
echo "matrix-synapse-py3 matrix-synapse/server-name string $servername" | debconf-set-selections
echo "matrix-synapse-py3 matrix-synapse/report-stats boolean false" | debconf-set-selections
echo "exit 101" >/usr/sbin/policy-rc.d
chmod +x /usr/sbin/policy-rc.d
$STD apt install matrix-synapse-py3 -y
rm -f /usr/sbin/policy-rc.d
systemctl stop matrix-synapse
sed -i 's/127.0.0.1/0.0.0.0/g' /etc/matrix-synapse/homeserver.yaml
sed -i 's/'\''::1'\'', //g' /etc/matrix-synapse/homeserver.yaml
SECRET=$(openssl rand -hex 32)

View File

@@ -38,18 +38,6 @@ rm -f "$DEB_FILE"
echo "$LATEST_VERSION" >~/.emqx
msg_ok "Installed EMQX"
read -r -p "${TAB3}Would you like to disable the EMQX MQ feature? (reduces disk/CPU usage) <y/N> " prompt
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
msg_info "Disabling EMQX MQ feature"
mkdir -p /etc/emqx
if ! grep -q "^mq.enable" /etc/emqx/emqx.conf 2>/dev/null; then
echo "mq.enable = false" >>/etc/emqx/emqx.conf
else
sed -i 's/^mq.enable.*/mq.enable = false/' /etc/emqx/emqx.conf
fi
msg_ok "Disabled EMQX MQ feature"
fi
msg_info "Starting EMQX service"
$STD systemctl enable -q --now emqx
msg_ok "Enabled EMQX service"

View File

@@ -86,7 +86,7 @@ $STD uv tool update-shell
export PATH="/root/.local/bin:$PATH"
$STD poetry self add poetry-plugin-export
$STD poetry export -f requirements.txt --output requirements.txt --without-hashes
$STD uv venv --clear
$STD uv venv
$STD uv pip install -r requirements.txt
msg_ok "Setup Backend"

View File

@@ -23,7 +23,7 @@ msg_info "Setting up Virtual Environment"
mkdir -p /opt/esphome
mkdir -p /root/config
cd /opt/esphome
$STD uv venv --clear /opt/esphome/.venv
$STD uv venv /opt/esphome/.venv
$STD /opt/esphome/.venv/bin/python -m ensurepip --upgrade
$STD /opt/esphome/.venv/bin/python -m pip install --upgrade pip
$STD /opt/esphome/.venv/bin/python -m pip install esphome tornado esptool

View File

@@ -15,30 +15,31 @@ network_check
update_os
msg_info "Installing Dependencies"
$STD apt install -y \
$STD apt-get install -y \
ffmpeg \
jq \
imagemagick
msg_ok "Installed Dependencies"
setup_hwaccel
msg_info "Installing ASP.NET Core Runtime"
setup_deb822_repo \
"microsoft" \
"https://packages.microsoft.com/keys/microsoft-2025.asc" \
"https://packages.microsoft.com/debian/13/prod/" \
"trixie"
$STD apt install -y aspnetcore-runtime-8.0
curl -fsSL https://packages.microsoft.com/config/debian/13/packages-microsoft-prod.deb -o packages-microsoft-prod.deb
$STD dpkg -i packages-microsoft-prod.deb
rm -rf packages-microsoft-prod.deb
$STD apt-get update
$STD apt-get install -y aspnetcore-runtime-8.0
msg_ok "Installed ASP.NET Core Runtime"
fetch_and_deploy_from_url "https://fileflows.com/downloads/zip" "/opt/fileflows"
msg_info "Setup FileFlows"
$STD ln -svf /usr/bin/ffmpeg /usr/local/bin/ffmpeg
$STD ln -svf /usr/bin/ffprobe /usr/local/bin/ffprobe
cd /opt/fileflows/Server
dotnet FileFlows.Server.dll --systemd install --root true
temp_file=$(mktemp)
curl -fsSL https://fileflows.com/downloads/zip -o "$temp_file"
$STD unzip -d /opt/fileflows "$temp_file"
$STD bash -c "cd /opt/fileflows/Server && dotnet FileFlows.Server.dll --systemd install --root true"
systemctl enable -q --now fileflows
rm -f "$temp_file"
msg_ok "Setup FileFlows"
motd_ssh

View File

@@ -1,8 +1,8 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Authors: MickLesk (CanbiZ)
# Co-Authors: remz1337
# Copyright (c) 2021-2026 tteck
# Author: tteck (tteckster)
# Co-Author: remz1337
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://frigate.video/
@@ -14,221 +14,60 @@ setting_up_container
network_check
update_os
source /etc/os-release
if [[ "$VERSION_ID" != "12" ]]; then
msg_error "Frigate requires Debian 12 (Bookworm) due to Python 3.11 dependencies"
exit 1
fi
msg_info "Converting APT sources to DEB822 format"
if [ -f /etc/apt/sources.list ]; then
cat > /etc/apt/sources.list.d/debian.sources <<'EOF'
Types: deb
URIs: http://deb.debian.org/debian
Suites: bookworm
Components: main contrib
Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg
Types: deb
URIs: http://deb.debian.org/debian
Suites: bookworm-updates
Components: main contrib
Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg
Types: deb
URIs: http://security.debian.org
Suites: bookworm-security
Components: main contrib
Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg
EOF
mv /etc/apt/sources.list /etc/apt/sources.list.bak
$STD apt update
fi
msg_ok "Converted APT sources"
msg_info "Installing Dependencies"
$STD apt install -y \
xz-utils \
python3 \
python3-dev \
python3-pip \
gcc \
pkg-config \
libhdf5-dev \
build-essential \
automake \
libtool \
ccache \
libusb-1.0-0-dev \
apt-transport-https \
cmake \
git \
libgtk-3-dev \
libavcodec-dev \
libavformat-dev \
libswscale-dev \
libv4l-dev \
libxvidcore-dev \
libx264-dev \
libjpeg-dev \
libpng-dev \
libtiff-dev \
gfortran \
openexr \
libssl-dev \
libtbbmalloc2 \
libtbb-dev \
libdc1394-dev \
libopenexr-dev \
libgstreamer-plugins-base1.0-dev \
libgstreamer1.0-dev \
tclsh \
libopenblas-dev \
liblapack-dev \
make \
moreutils
msg_info "Installing Dependencies (Patience)"
$STD apt-get install -y {git,ca-certificates,automake,build-essential,xz-utils,libtool,ccache,pkg-config,libgtk-3-dev,libavcodec-dev,libavformat-dev,libswscale-dev,libv4l-dev,libxvidcore-dev,libx264-dev,libjpeg-dev,libpng-dev,libtiff-dev,gfortran,openexr,libatlas-base-dev,libssl-dev,libtbb2,libtbb-dev,libdc1394-22-dev,libopenexr-dev,libgstreamer-plugins-base1.0-dev,libgstreamer1.0-dev,gcc,gfortran,libopenblas-dev,liblapack-dev,libusb-1.0-0-dev,jq,moreutils}
msg_ok "Installed Dependencies"
msg_info "Setup Python3"
$STD apt-get install -y {python3,python3-dev,python3-setuptools,python3-distutils,python3-pip}
$STD pip install --upgrade pip
msg_ok "Setup Python3"
NODE_VERSION="22" setup_nodejs
msg_info "Installing go2rtc"
mkdir -p /usr/local/go2rtc/bin
cd /usr/local/go2rtc/bin
curl -fsSL "https://github.com/AlexxIT/go2rtc/releases/latest/download/go2rtc_linux_amd64" -o "go2rtc"
chmod +x go2rtc
$STD ln -svf /usr/local/go2rtc/bin/go2rtc /usr/local/bin/go2rtc
msg_ok "Installed go2rtc"
setup_hwaccel
export TARGETARCH="amd64"
export CCACHE_DIR=/root/.ccache
export CCACHE_MAXSIZE=2G
export APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn
export PIP_BREAK_SYSTEM_PACKAGES=1
export NVIDIA_VISIBLE_DEVICES=all
export NVIDIA_DRIVER_CAPABILITIES="compute,video,utility"
export TOKENIZERS_PARALLELISM=true
export TRANSFORMERS_NO_ADVISORY_WARNINGS=1
export OPENCV_FFMPEG_LOGLEVEL=8
export HAILORT_LOGGER_PATH=NONE
fetch_and_deploy_gh_release "frigate" "blakeblackshear/frigate" "tarball" "v0.16.4" "/opt/frigate"
msg_info "Building Nginx"
$STD bash /opt/frigate/docker/main/build_nginx.sh
sed -e '/s6-notifyoncheck/ s/^#*/#/' -i /opt/frigate/docker/main/rootfs/etc/s6-overlay/s6-rc.d/nginx/run
ln -sf /usr/local/nginx/sbin/nginx /usr/local/bin/nginx
msg_ok "Built Nginx"
msg_info "Building SQLite Extensions"
$STD bash /opt/frigate/docker/main/build_sqlite_vec.sh
msg_ok "Built SQLite Extensions"
fetch_and_deploy_gh_release "go2rtc" "AlexxIT/go2rtc" "singlefile" "latest" "/usr/local/go2rtc/bin" "go2rtc_linux_amd64"
msg_info "Installing Tempio"
sed -i 's|/rootfs/usr/local|/usr/local|g' /opt/frigate/docker/main/install_tempio.sh
$STD bash /opt/frigate/docker/main/install_tempio.sh
ln -sf /usr/local/tempio/bin/tempio /usr/local/bin/tempio
msg_ok "Installed Tempio"
msg_info "Building libUSB"
fetch_and_deploy_gh_release "libusb" "libusb/libusb" "tarball" "v1.0.26" "/opt/libusb"
cd /opt/libusb
$STD ./bootstrap.sh
$STD ./configure CC='ccache gcc' CCX='ccache g++' --disable-udev --enable-shared
$STD make -j "$(nproc)"
cd /opt/libusb/libusb
mkdir -p /usr/local/lib /usr/local/include/libusb-1.0 /usr/local/lib/pkgconfig
$STD bash ../libtool --mode=install /usr/bin/install -c libusb-1.0.la /usr/local/lib
install -c -m 644 libusb.h /usr/local/include/libusb-1.0
cd /opt/libusb/
install -c -m 644 libusb-1.0.pc /usr/local/lib/pkgconfig
ldconfig
msg_ok "Built libUSB"
msg_info "Installing Python Dependencies"
$STD pip3 install -r /opt/frigate/docker/main/requirements.txt
msg_ok "Installed Python Dependencies"
msg_info "Building Python Wheels (Patience)"
mkdir -p /wheels
sed -i 's|^SQLITE3_VERSION=.*|SQLITE3_VERSION="version-3.46.0"|g' /opt/frigate/docker/main/build_pysqlite3.sh
$STD bash /opt/frigate/docker/main/build_pysqlite3.sh
for i in {1..3}; do
$STD pip3 wheel --wheel-dir=/wheels -r /opt/frigate/docker/main/requirements-wheels.txt --default-timeout=300 --retries=3 && break
[[ $i -lt 3 ]] && sleep 10
done
msg_ok "Built Python Wheels"
NODE_VERSION="22" NODE_MODULE="yarn" setup_nodejs
msg_info "Downloading Inference Models"
mkdir -p /models /openvino-model
wget -q -O /edgetpu_model.tflite https://github.com/google-coral/test_data/raw/release-frogfish/ssdlite_mobiledet_coco_qat_postprocess_edgetpu.tflite
wget -q -O /models/cpu_model.tflite https://github.com/google-coral/test_data/raw/release-frogfish/ssdlite_mobiledet_coco_qat_postprocess.tflite
cp /opt/frigate/labelmap.txt /labelmap.txt
msg_ok "Downloaded Inference Models"
msg_info "Downloading Audio Model"
wget -q -O /tmp/yamnet.tar.gz https://www.kaggle.com/api/v1/models/google/yamnet/tfLite/classification-tflite/1/download
$STD tar xzf /tmp/yamnet.tar.gz -C /
mv /1.tflite /cpu_audio_model.tflite
cp /opt/frigate/audio-labelmap.txt /audio-labelmap.txt
rm -f /tmp/yamnet.tar.gz
msg_ok "Downloaded Audio Model"
msg_info "Installing HailoRT Runtime"
$STD bash /opt/frigate/docker/main/install_hailort.sh
msg_info "Installing Frigate v0.14.1 (Perseverance)"
cd ~
mkdir -p /opt/frigate/models
curl -fsSL "https://github.com/blakeblackshear/frigate/archive/refs/tags/v0.14.1.tar.gz" -o "frigate.tar.gz"
tar -xzf frigate.tar.gz -C /opt/frigate --strip-components 1
rm -rf frigate.tar.gz
cd /opt/frigate
$STD pip3 wheel --wheel-dir=/wheels -r /opt/frigate/docker/main/requirements-wheels.txt
cp -a /opt/frigate/docker/main/rootfs/. /
sed -i '/^.*unset DEBIAN_FRONTEND.*$/d' /opt/frigate/docker/main/install_deps.sh
echo "libedgetpu1-max libedgetpu/accepted-eula boolean true" | debconf-set-selections
echo "libedgetpu1-max libedgetpu/install-confirm-max boolean true" | debconf-set-selections
$STD bash /opt/frigate/docker/main/install_deps.sh
export TARGETARCH="amd64"
echo 'libc6 libraries/restart-without-asking boolean true' | debconf-set-selections
$STD /opt/frigate/docker/main/install_deps.sh
$STD apt update
$STD ln -svf /usr/lib/btbn-ffmpeg/bin/ffmpeg /usr/local/bin/ffmpeg
$STD ln -svf /usr/lib/btbn-ffmpeg/bin/ffprobe /usr/local/bin/ffprobe
$STD pip3 install -U /wheels/*.whl
ldconfig
msg_ok "Installed HailoRT Runtime"
msg_info "Installing OpenVino"
$STD pip3 install -r /opt/frigate/docker/main/requirements-ov.txt
msg_ok "Installed OpenVino"
msg_info "Building OpenVino Model"
cd /models
wget -q http://download.tensorflow.org/models/object_detection/ssdlite_mobilenet_v2_coco_2018_05_09.tar.gz
$STD tar -zxf ssdlite_mobilenet_v2_coco_2018_05_09.tar.gz --no-same-owner
if python3 /opt/frigate/docker/main/build_ov_model.py 2>&1; then
cp /models/ssdlite_mobilenet_v2.xml /openvino-model/
cp /models/ssdlite_mobilenet_v2.bin /openvino-model/
wget -q https://github.com/openvinotoolkit/open_model_zoo/raw/master/data/dataset_classes/coco_91cl_bkgr.txt -O /openvino-model/coco_91cl_bkgr.txt
sed -i 's/truck/car/g' /openvino-model/coco_91cl_bkgr.txt
msg_ok "Built OpenVino Model"
else
msg_warn "OpenVino build failed (CPU may not support required instructions). Frigate will use CPU model."
fi
msg_info "Building Frigate Application (Patience)"
cd /opt/frigate
$STD pip3 install -r /opt/frigate/docker/main/requirements-dev.txt
$STD bash /opt/frigate/.devcontainer/initialize.sh
$STD /opt/frigate/.devcontainer/initialize.sh
$STD make version
cd /opt/frigate/web
$STD npm install
$STD npm run build
cp -r /opt/frigate/web/dist/* /opt/frigate/web/
sed -i '/^s6-svc -O \.$/s/^/#/' /opt/frigate/docker/main/rootfs/etc/s6-overlay/s6-rc.d/frigate/run
msg_ok "Built Frigate Application"
msg_info "Configuring Frigate"
mkdir -p /config /media/frigate
cp -r /opt/frigate/config/. /config
curl -fsSL "https://github.com/intel-iot-devkit/sample-videos/raw/master/person-bicycle-car-detection.mp4" -o "/media/frigate/person-bicycle-car-detection.mp4"
echo "tmpfs /tmp/cache tmpfs defaults 0 0" >>/etc/fstab
cat <<EOF >/etc/frigate.env
DEFAULT_FFMPEG_VERSION="7.0"
INCLUDED_FFMPEG_VERSIONS="7.0:5.0"
EOF
sed -i '/^s6-svc -O \.$/s/^/#/' /opt/frigate/docker/main/rootfs/etc/s6-overlay/s6-rc.d/frigate/run
cat <<EOF >/config/config.yml
mqtt:
enabled: false
cameras:
test:
ffmpeg:
#hwaccel_args: preset-vaapi
inputs:
- path: /media/frigate/person-bicycle-car-detection.mp4
input_args: -re -stream_loop -1 -fflags +genpts
@@ -239,42 +78,96 @@ cameras:
height: 1080
width: 1920
fps: 5
auth:
enabled: false
detect:
enabled: false
EOF
ln -sf /config/config.yml /opt/frigate/config/config.yml
if [[ "$CTTYPE" == "0" ]]; then
sed -i -e 's/^kvm:x:104:$/render:x:104:root,frigate/' -e 's/^render:x:105:root$/kvm:x:105:/' /etc/group
else
sed -i -e 's/^kvm:x:104:$/render:x:104:frigate/' -e 's/^render:x:105:$/kvm:x:105:/' /etc/group
fi
echo "tmpfs /tmp/cache tmpfs defaults 0 0" >>/etc/fstab
msg_ok "Installed Frigate"
if grep -q -o -m1 -E 'avx[^ ]*|sse4_2' /proc/cpuinfo; then
if grep -q -o -m1 -E 'avx[^ ]*' /proc/cpuinfo; then
msg_ok "AVX Support Detected"
msg_info "Installing Openvino Object Detection Model (Resilience)"
$STD pip install -r /opt/frigate/docker/main/requirements-ov.txt
cd /opt/frigate/models
export ENABLE_ANALYTICS=NO
$STD /usr/local/bin/omz_downloader --name ssdlite_mobilenet_v2 --num_attempts 2
$STD /usr/local/bin/omz_converter --name ssdlite_mobilenet_v2 --precision FP16 --mo /usr/local/bin/mo
cd /
cp -r /opt/frigate/models/public/ssdlite_mobilenet_v2 openvino-model
curl -fsSL "https://github.com/openvinotoolkit/open_model_zoo/raw/master/data/dataset_classes/coco_91cl_bkgr.txt" -o "openvino-model/coco_91cl_bkgr.txt"
sed -i 's/truck/car/g' openvino-model/coco_91cl_bkgr.txt
cat <<EOF >>/config/config.yml
ffmpeg:
hwaccel_args: auto
detectors:
detector01:
ov:
type: openvino
device: CPU
model:
path: /openvino-model/FP16/ssdlite_mobilenet_v2.xml
model:
width: 300
height: 300
input_tensor: nhwc
input_pixel_format: bgr
path: /openvino-model/ssdlite_mobilenet_v2.xml
labelmap_path: /openvino-model/coco_91cl_bkgr.txt
EOF
msg_ok "Installed Openvino Object Detection Model"
else
cat <<EOF >>/config/config.yml
ffmpeg:
hwaccel_args: auto
model:
path: /cpu_model.tflite
EOF
fi
msg_ok "Configured Frigate"
msg_info "Installing Coral Object Detection Model (Patience)"
cd /opt/frigate
export CCACHE_DIR=/root/.ccache
export CCACHE_MAXSIZE=2G
curl -fsSL "https://github.com/libusb/libusb/archive/v1.0.26.zip" -o "v1.0.26.zip"
$STD unzip v1.0.26.zip
rm v1.0.26.zip
cd libusb-1.0.26
$STD ./bootstrap.sh
$STD ./configure --disable-udev --enable-shared
$STD make -j $(nproc --all)
cd /opt/frigate/libusb-1.0.26/libusb
mkdir -p /usr/local/lib
$STD /bin/bash ../libtool --mode=install /usr/bin/install -c libusb-1.0.la '/usr/local/lib'
mkdir -p /usr/local/include/libusb-1.0
$STD /usr/bin/install -c -m 644 libusb.h '/usr/local/include/libusb-1.0'
ldconfig
cd /
curl -fsSL "https://github.com/google-coral/test_data/raw/release-frogfish/ssdlite_mobiledet_coco_qat_postprocess_edgetpu.tflite" -o "edgetpu_model.tflite"
curl -fsSL "https://github.com/google-coral/test_data/raw/release-frogfish/ssdlite_mobiledet_coco_qat_postprocess.tflite" -o "cpu_model.tflite"
cp /opt/frigate/labelmap.txt /labelmap.txt
curl -fsSL "https://www.kaggle.com/api/v1/models/google/yamnet/tfLite/classification-tflite/1/download" -o "yamnet-tflite-classification-tflite-v1.tar.gz"
tar xzf yamnet-tflite-classification-tflite-v1.tar.gz
rm -rf yamnet-tflite-classification-tflite-v1.tar.gz
mv 1.tflite cpu_audio_model.tflite
cp /opt/frigate/audio-labelmap.txt /audio-labelmap.txt
mkdir -p /media/frigate
curl -fsSL "https://github.com/intel-iot-devkit/sample-videos/raw/master/person-bicycle-car-detection.mp4" -o "/media/frigate/person-bicycle-car-detection.mp4"
msg_ok "Installed Coral Object Detection Model"
msg_info "Building Nginx with Custom Modules"
$STD /opt/frigate/docker/main/build_nginx.sh
sed -e '/s6-notifyoncheck/ s/^#*/#/' -i /opt/frigate/docker/main/rootfs/etc/s6-overlay/s6-rc.d/nginx/run
ln -sf /usr/local/nginx/sbin/nginx /usr/local/bin/nginx
msg_ok "Built Nginx"
msg_info "Installing Tempio"
sed -i 's|/rootfs/usr/local|/usr/local|g' /opt/frigate/docker/main/install_tempio.sh
$STD /opt/frigate/docker/main/install_tempio.sh
ln -sf /usr/local/tempio/bin/tempio /usr/local/bin/tempio
msg_ok "Installed Tempio"
msg_info "Creating Services"
cat <<EOF >/etc/systemd/system/create_directories.service
[Unit]
Description=Create necessary directories for Frigate logs
Before=frigate.service go2rtc.service nginx.service
Description=Create necessary directories for logs
[Service]
Type=oneshot
@@ -283,11 +176,13 @@ ExecStart=/bin/bash -c '/bin/mkdir -p /dev/shm/logs/{frigate,go2rtc,nginx} && /b
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now create_directories
sleep 3
cat <<EOF >/etc/systemd/system/go2rtc.service
[Unit]
Description=go2rtc streaming service
After=network.target create_directories.service
Description=go2rtc service
After=network.target
After=create_directories.service
StartLimitIntervalSec=0
[Service]
@@ -295,8 +190,7 @@ Type=simple
Restart=always
RestartSec=1
User=root
EnvironmentFile=/etc/frigate.env
ExecStartPre=+rm -f /dev/shm/logs/go2rtc/current
ExecStartPre=+rm /dev/shm/logs/go2rtc/current
ExecStart=/bin/bash -c "bash /opt/frigate/docker/main/rootfs/etc/s6-overlay/s6-rc.d/go2rtc/run 2> >(/usr/bin/ts '%%Y-%%m-%%d %%H:%%M:%%.S ' >&2) | /usr/bin/ts '%%Y-%%m-%%d %%H:%%M:%%.S '"
StandardOutput=file:/dev/shm/logs/go2rtc/current
StandardError=file:/dev/shm/logs/go2rtc/current
@@ -304,11 +198,13 @@ StandardError=file:/dev/shm/logs/go2rtc/current
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now go2rtc
sleep 3
cat <<EOF >/etc/systemd/system/frigate.service
[Unit]
Description=Frigate NVR service
After=go2rtc.service create_directories.service
Description=Frigate service
After=go2rtc.service
After=create_directories.service
StartLimitIntervalSec=0
[Service]
@@ -316,8 +212,8 @@ Type=simple
Restart=always
RestartSec=1
User=root
EnvironmentFile=/etc/frigate.env
ExecStartPre=+rm -f /dev/shm/logs/frigate/current
# Environment=PLUS_API_KEY=
ExecStartPre=+rm /dev/shm/logs/frigate/current
ExecStart=/bin/bash -c "bash /opt/frigate/docker/main/rootfs/etc/s6-overlay/s6-rc.d/frigate/run 2> >(/usr/bin/ts '%%Y-%%m-%%d %%H:%%M:%%.S ' >&2) | /usr/bin/ts '%%Y-%%m-%%d %%H:%%M:%%.S '"
StandardOutput=file:/dev/shm/logs/frigate/current
StandardError=file:/dev/shm/logs/frigate/current
@@ -325,11 +221,13 @@ StandardError=file:/dev/shm/logs/frigate/current
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now frigate
sleep 3
cat <<EOF >/etc/systemd/system/nginx.service
[Unit]
Description=Nginx reverse proxy for Frigate
After=frigate.service create_directories.service
Description=Nginx service
After=frigate.service
After=create_directories.service
StartLimitIntervalSec=0
[Service]
@@ -337,7 +235,7 @@ Type=simple
Restart=always
RestartSec=1
User=root
ExecStartPre=+rm -f /dev/shm/logs/nginx/current
ExecStartPre=+rm /dev/shm/logs/nginx/current
ExecStart=/bin/bash -c "bash /opt/frigate/docker/main/rootfs/etc/s6-overlay/s6-rc.d/nginx/run 2> >(/usr/bin/ts '%%Y-%%m-%%d %%H:%%M:%%.S ' >&2) | /usr/bin/ts '%%Y-%%m-%%d %%H:%%M:%%.S '"
StandardOutput=file:/dev/shm/logs/nginx/current
StandardError=file:/dev/shm/logs/nginx/current
@@ -345,20 +243,8 @@ StandardError=file:/dev/shm/logs/nginx/current
[Install]
WantedBy=multi-user.target
EOF
systemctl daemon-reload
systemctl enable -q --now create_directories
sleep 2
systemctl enable -q --now go2rtc
sleep 2
systemctl enable -q --now frigate
sleep 2
systemctl enable -q --now nginx
msg_ok "Created Services"
msg_info "Cleaning Up"
rm -rf /opt/libusb /wheels /models/*.tar.gz
msg_ok "Cleaned Up"
msg_ok "Configured Services"
motd_ssh
customize

View File

@@ -108,7 +108,7 @@ ${LOCAL_IP} {
EOF
msg_ok "Configured Caddy"
msg_info "Creating systemd services"
msg_info "Creating systemd service"
cat <<EOF >/etc/systemd/system/healthchecks.service
[Unit]
Description=Healthchecks Service
@@ -123,23 +123,9 @@ Restart=always
WantedBy=multi-user.target
EOF
cat <<EOF >/etc/systemd/system/healthchecks-sendalerts.service
[Unit]
Description=Healthchecks Sendalerts Service
After=network.target postgresql.service healthchecks.service
[Service]
WorkingDirectory=/opt/healthchecks/
ExecStart=/opt/healthchecks/venv/bin/python manage.py sendalerts
Restart=always
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now healthchecks healthchecks-sendalerts caddy
systemctl enable -q --now healthchecks caddy
systemctl reload caddy
msg_ok "Created Services"
msg_ok "Created Service"
motd_ssh
customize

View File

@@ -17,7 +17,7 @@ PYTHON_VERSION="3.12" setup_uv
fetch_and_deploy_gh_release "huntarr" "plexguide/Huntarr.io" "tarball"
msg_info "Configure Huntarr"
$STD uv venv --clear /opt/huntarr/.venv
$STD uv venv /opt/huntarr/.venv
$STD uv pip install --python /opt/huntarr/.venv/bin/python -r /opt/huntarr/requirements.txt
msg_ok "Configured Huntrarr"

View File

@@ -289,7 +289,7 @@ ML_DIR="${APP_DIR}/machine-learning"
GEO_DIR="${INSTALL_DIR}/geodata"
mkdir -p {"${APP_DIR}","${UPLOAD_DIR}","${GEO_DIR}","${INSTALL_DIR}"/cache}
fetch_and_deploy_gh_release "Immich" "immich-app/immich" "tarball" "v2.5.6" "$SRC_DIR"
fetch_and_deploy_gh_release "Immich" "immich-app/immich" "tarball" "v2.5.5" "$SRC_DIR"
PNPM_VERSION="$(jq -r '.packageManager | split("@")[1]' ${SRC_DIR}/package.json)"
NODE_VERSION="24" NODE_MODULE="pnpm@${PNPM_VERSION}" setup_nodejs

View File

@@ -18,7 +18,7 @@ PYTHON_VERSION="3.12" setup_uv
msg_info "Installing Jupyter"
mkdir -p /opt/jupyter
cd /opt/jupyter
$STD uv venv --clear /opt/jupyter/.venv
$STD uv venv /opt/jupyter/.venv
$STD /opt/jupyter/.venv/bin/python -m ensurepip --upgrade
$STD /opt/jupyter/.venv/bin/python -m pip install --upgrade pip
$STD /opt/jupyter/.venv/bin/python -m pip install jupyter

Some files were not shown because too many files have changed in this diff Show More