mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2026-02-13 08:43:25 +01:00
Compare commits
3 Commits
fix/debian
...
feat/remov
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2f676df1cb | ||
|
|
d6f9cc7b59 | ||
|
|
6fa49fa3d1 |
12
.github/workflows/update-versions-github.yml
generated
vendored
12
.github/workflows/update-versions-github.yml
generated
vendored
@@ -89,15 +89,9 @@ jobs:
|
|||||||
slug=$(jq -r '.slug // empty' "$json_file" 2>/dev/null)
|
slug=$(jq -r '.slug // empty' "$json_file" 2>/dev/null)
|
||||||
[[ -z "$slug" ]] && continue
|
[[ -z "$slug" ]] && continue
|
||||||
|
|
||||||
# Find corresponding script (install script or addon script)
|
# Find corresponding install script
|
||||||
install_script=""
|
install_script="install/${slug}-install.sh"
|
||||||
if [[ -f "install/${slug}-install.sh" ]]; then
|
[[ ! -f "$install_script" ]] && continue
|
||||||
install_script="install/${slug}-install.sh"
|
|
||||||
elif [[ -f "tools/addon/${slug}.sh" ]]; then
|
|
||||||
install_script="tools/addon/${slug}.sh"
|
|
||||||
else
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Look for fetch_and_deploy_gh_release calls
|
# Look for fetch_and_deploy_gh_release calls
|
||||||
# Pattern: fetch_and_deploy_gh_release "app" "owner/repo" ["mode"] ["version"]
|
# Pattern: fetch_and_deploy_gh_release "app" "owner/repo" ["mode"] ["version"]
|
||||||
|
|||||||
92
CHANGELOG.md
92
CHANGELOG.md
@@ -401,104 +401,14 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
|||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
## 2026-02-11
|
|
||||||
|
|
||||||
### 🆕 New Scripts
|
|
||||||
|
|
||||||
- Draw.io ([#11788](https://github.com/community-scripts/ProxmoxVE/pull/11788))
|
|
||||||
|
|
||||||
### 🚀 Updated Scripts
|
|
||||||
|
|
||||||
- #### 🐞 Bug Fixes
|
|
||||||
|
|
||||||
- fix: make donetick 0.1.71 compatible [@tomfrenzel](https://github.com/tomfrenzel) ([#11804](https://github.com/community-scripts/ProxmoxVE/pull/11804))
|
|
||||||
- Kasm: Support new version URL format without hash suffix [@MickLesk](https://github.com/MickLesk) ([#11787](https://github.com/community-scripts/ProxmoxVE/pull/11787))
|
|
||||||
- LibreTranslate: Remove Torch [@tremor021](https://github.com/tremor021) ([#11783](https://github.com/community-scripts/ProxmoxVE/pull/11783))
|
|
||||||
- Snowshare: fix update script [@TuroYT](https://github.com/TuroYT) ([#11726](https://github.com/community-scripts/ProxmoxVE/pull/11726))
|
|
||||||
|
|
||||||
### 💾 Core
|
|
||||||
|
|
||||||
- #### 🔧 Refactor
|
|
||||||
|
|
||||||
- core: respect EDITOR variable for config editing [@ls-root](https://github.com/ls-root) ([#11693](https://github.com/community-scripts/ProxmoxVE/pull/11693))
|
|
||||||
|
|
||||||
### 📚 Documentation
|
|
||||||
|
|
||||||
- Fix formatting in kutt.json notes section [@tiagodenoronha](https://github.com/tiagodenoronha) ([#11774](https://github.com/community-scripts/ProxmoxVE/pull/11774))
|
|
||||||
|
|
||||||
## 2026-02-10
|
|
||||||
|
|
||||||
### 🚀 Updated Scripts
|
|
||||||
|
|
||||||
- #### 🐞 Bug Fixes
|
|
||||||
|
|
||||||
- Immich: Pin version to 2.5.6 [@vhsdream](https://github.com/vhsdream) ([#11775](https://github.com/community-scripts/ProxmoxVE/pull/11775))
|
|
||||||
- Libretranslate: Fix setuptools [@tremor021](https://github.com/tremor021) ([#11772](https://github.com/community-scripts/ProxmoxVE/pull/11772))
|
|
||||||
- Element Synapse: prevent systemd invoke failure during apt install [@MickLesk](https://github.com/MickLesk) ([#11758](https://github.com/community-scripts/ProxmoxVE/pull/11758))
|
|
||||||
|
|
||||||
- #### ✨ New Features
|
|
||||||
|
|
||||||
- Refactor: Slskd & Soularr [@vhsdream](https://github.com/vhsdream) ([#11674](https://github.com/community-scripts/ProxmoxVE/pull/11674))
|
|
||||||
|
|
||||||
### 🗑️ Deleted Scripts
|
|
||||||
|
|
||||||
- move paperless-exporter from LXC to addon ([#11737](https://github.com/community-scripts/ProxmoxVE/pull/11737))
|
|
||||||
|
|
||||||
### 🧰 Tools
|
|
||||||
|
|
||||||
- #### 🐞 Bug Fixes
|
|
||||||
|
|
||||||
- feat: improve storage parsing & add guestname [@carlosmaroot](https://github.com/carlosmaroot) ([#11752](https://github.com/community-scripts/ProxmoxVE/pull/11752))
|
|
||||||
|
|
||||||
### 📂 Github
|
|
||||||
|
|
||||||
- Github-Version Workflow: include addon scripts in extraction [@MickLesk](https://github.com/MickLesk) ([#11757](https://github.com/community-scripts/ProxmoxVE/pull/11757))
|
|
||||||
|
|
||||||
### 🌐 Website
|
|
||||||
|
|
||||||
- #### 📝 Script Information
|
|
||||||
|
|
||||||
- Snowshare: fix typo in config file path on website [@BirdMakingStuff](https://github.com/BirdMakingStuff) ([#11754](https://github.com/community-scripts/ProxmoxVE/pull/11754))
|
|
||||||
|
|
||||||
## 2026-02-09
|
## 2026-02-09
|
||||||
|
|
||||||
### 🚀 Updated Scripts
|
### 🚀 Updated Scripts
|
||||||
|
|
||||||
- #### 🐞 Bug Fixes
|
- #### 🐞 Bug Fixes
|
||||||
|
|
||||||
- several scripts: add --clear to uv venv calls for uv 0.10 compatibility [@MickLesk](https://github.com/MickLesk) ([#11723](https://github.com/community-scripts/ProxmoxVE/pull/11723))
|
|
||||||
- Koillection: ensure setup_composer is in update script [@MickLesk](https://github.com/MickLesk) ([#11734](https://github.com/community-scripts/ProxmoxVE/pull/11734))
|
|
||||||
- PeaNUT: symlink server.js after update [@vhsdream](https://github.com/vhsdream) ([#11696](https://github.com/community-scripts/ProxmoxVE/pull/11696))
|
|
||||||
- Umlautadaptarr: use release appsettings.json instead of hardcoded copy [@MickLesk](https://github.com/MickLesk) ([#11725](https://github.com/community-scripts/ProxmoxVE/pull/11725))
|
|
||||||
- tracearr: prepare for next stable release [@durzo](https://github.com/durzo) ([#11673](https://github.com/community-scripts/ProxmoxVE/pull/11673))
|
- tracearr: prepare for next stable release [@durzo](https://github.com/durzo) ([#11673](https://github.com/community-scripts/ProxmoxVE/pull/11673))
|
||||||
|
- PeaNUT: symlink server.js after update [@vhsdream](https://github.com/vhsdream) ([#11696](https://github.com/community-scripts/ProxmoxVE/pull/11696))
|
||||||
- #### ✨ New Features
|
|
||||||
|
|
||||||
- remove whiptail from update scripts for unattended update support [@MickLesk](https://github.com/MickLesk) ([#11712](https://github.com/community-scripts/ProxmoxVE/pull/11712))
|
|
||||||
|
|
||||||
- #### 🔧 Refactor
|
|
||||||
|
|
||||||
- Refactor: FileFlows [@tremor021](https://github.com/tremor021) ([#11108](https://github.com/community-scripts/ProxmoxVE/pull/11108))
|
|
||||||
- Refactor: wger [@MickLesk](https://github.com/MickLesk) ([#11722](https://github.com/community-scripts/ProxmoxVE/pull/11722))
|
|
||||||
- Nginx-UI: better User Handling | ACME [@MickLesk](https://github.com/MickLesk) ([#11715](https://github.com/community-scripts/ProxmoxVE/pull/11715))
|
|
||||||
- NginxProxymanager: use better-sqlite3 [@MickLesk](https://github.com/MickLesk) ([#11708](https://github.com/community-scripts/ProxmoxVE/pull/11708))
|
|
||||||
|
|
||||||
### 💾 Core
|
|
||||||
|
|
||||||
- #### 🔧 Refactor
|
|
||||||
|
|
||||||
- hwaccel: add libmfx-gen1.2 to Intel Arc setup for QSV support [@MickLesk](https://github.com/MickLesk) ([#11707](https://github.com/community-scripts/ProxmoxVE/pull/11707))
|
|
||||||
|
|
||||||
### 🧰 Tools
|
|
||||||
|
|
||||||
- #### 🐞 Bug Fixes
|
|
||||||
|
|
||||||
- addons: ensure curl is installed before use [@MickLesk](https://github.com/MickLesk) ([#11718](https://github.com/community-scripts/ProxmoxVE/pull/11718))
|
|
||||||
- Netbird (addon): add systemd ordering to start after Docker [@MickLesk](https://github.com/MickLesk) ([#11716](https://github.com/community-scripts/ProxmoxVE/pull/11716))
|
|
||||||
|
|
||||||
### ❔ Uncategorized
|
|
||||||
|
|
||||||
- Bichon: Update website [@tremor021](https://github.com/tremor021) ([#11711](https://github.com/community-scripts/ProxmoxVE/pull/11711))
|
|
||||||
|
|
||||||
## 2026-02-08
|
## 2026-02-08
|
||||||
|
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ function update_script() {
|
|||||||
cp -r /opt/adventurelog-backup/backend/server/media /opt/adventurelog/backend/server/media
|
cp -r /opt/adventurelog-backup/backend/server/media /opt/adventurelog/backend/server/media
|
||||||
cd /opt/adventurelog/backend/server
|
cd /opt/adventurelog/backend/server
|
||||||
if [[ ! -x .venv/bin/python ]]; then
|
if [[ ! -x .venv/bin/python ]]; then
|
||||||
$STD uv venv --clear .venv
|
$STD uv venv .venv
|
||||||
$STD .venv/bin/python -m ensurepip --upgrade
|
$STD .venv/bin/python -m ensurepip --upgrade
|
||||||
fi
|
fi
|
||||||
$STD .venv/bin/python -m pip install --upgrade pip
|
$STD .venv/bin/python -m pip install --upgrade pip
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ function update_script() {
|
|||||||
msg_info "Updating Autocaliweb"
|
msg_info "Updating Autocaliweb"
|
||||||
cd "$INSTALL_DIR"
|
cd "$INSTALL_DIR"
|
||||||
if [[ ! -d "$VIRTUAL_ENV" ]]; then
|
if [[ ! -d "$VIRTUAL_ENV" ]]; then
|
||||||
$STD uv venv --clear "$VIRTUAL_ENV"
|
$STD uv venv "$VIRTUAL_ENV"
|
||||||
fi
|
fi
|
||||||
$STD uv sync --all-extras --active
|
$STD uv sync --all-extras --active
|
||||||
cd "$INSTALL_DIR"/koreader/plugins
|
cd "$INSTALL_DIR"/koreader/plugins
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ function update_script() {
|
|||||||
chmod 775 /opt/bazarr /var/lib/bazarr/
|
chmod 775 /opt/bazarr /var/lib/bazarr/
|
||||||
# Always ensure venv exists
|
# Always ensure venv exists
|
||||||
if [[ ! -d /opt/bazarr/venv/ ]]; then
|
if [[ ! -d /opt/bazarr/venv/ ]]; then
|
||||||
$STD uv venv --clear /opt/bazarr/venv --python 3.12
|
$STD uv venv /opt/bazarr/venv --python 3.12
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Always check and fix service file if needed
|
# Always check and fix service file if needed
|
||||||
|
|||||||
@@ -103,7 +103,7 @@ function update_script() {
|
|||||||
|
|
||||||
cd /opt/dispatcharr
|
cd /opt/dispatcharr
|
||||||
rm -rf .venv
|
rm -rf .venv
|
||||||
$STD uv venv --clear
|
$STD uv venv
|
||||||
$STD uv pip install -r requirements.txt --index-strategy unsafe-best-match
|
$STD uv pip install -r requirements.txt --index-strategy unsafe-best-match
|
||||||
$STD uv pip install gunicorn gevent celery redis daphne
|
$STD uv pip install gunicorn gevent celery redis daphne
|
||||||
msg_ok "Updated Dispatcharr Backend"
|
msg_ok "Updated Dispatcharr Backend"
|
||||||
|
|||||||
@@ -35,14 +35,13 @@ function update_script() {
|
|||||||
msg_ok "Stopped Service"
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
msg_info "Backing Up Configurations"
|
msg_info "Backing Up Configurations"
|
||||||
mv /opt/donetick/config/selfhosted.yaml /opt/donetick/donetick.db /opt
|
mv /opt/donetick/config/selfhosted.yml /opt/donetick/donetick.db /opt
|
||||||
msg_ok "Backed Up Configurations"
|
msg_ok "Backed Up Configurations"
|
||||||
|
|
||||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "donetick" "donetick/donetick" "prebuild" "latest" "/opt/donetick" "donetick_Linux_x86_64.tar.gz"
|
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "donetick" "donetick/donetick" "prebuild" "latest" "/opt/donetick" "donetick_Linux_x86_64.tar.gz"
|
||||||
|
|
||||||
msg_info "Restoring Configurations"
|
msg_info "Restoring Configurations"
|
||||||
mv /opt/selfhosted.yaml /opt/donetick/config
|
mv /opt/selfhosted.yml /opt/donetick/config
|
||||||
sed -i '/capacitor:\/\/localhost/d' /opt/donetick/config/selfhosted.yaml
|
|
||||||
mv /opt/donetick.db /opt/donetick
|
mv /opt/donetick.db /opt/donetick
|
||||||
msg_ok "Restored Configurations"
|
msg_ok "Restored Configurations"
|
||||||
|
|
||||||
|
|||||||
58
ct/drawio.sh
58
ct/drawio.sh
@@ -1,58 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
|
||||||
# Copyright (c) 2021-2026 community-scripts ORG
|
|
||||||
# Author: Slaviša Arežina (tremor021)
|
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
|
||||||
# Source: https://www.drawio.com/
|
|
||||||
|
|
||||||
APP="DrawIO"
|
|
||||||
var_tags="${var_tags:-diagrams}"
|
|
||||||
var_cpu="${var_cpu:-1}"
|
|
||||||
var_ram="${var_ram:-2048}"
|
|
||||||
var_disk="${var_disk:-4}"
|
|
||||||
var_os="${var_os:-debian}"
|
|
||||||
var_version="${var_version:-13}"
|
|
||||||
var_unprivileged="${var_unprivileged:-1}"
|
|
||||||
|
|
||||||
header_info "$APP"
|
|
||||||
variables
|
|
||||||
color
|
|
||||||
catch_errors
|
|
||||||
|
|
||||||
function update_script() {
|
|
||||||
header_info
|
|
||||||
check_container_storage
|
|
||||||
check_container_resources
|
|
||||||
if [[ ! -f /var/lib/tomcat11/webapps/draw.war ]]; then
|
|
||||||
msg_error "No ${APP} Installation Found!"
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
|
|
||||||
if check_for_gh_release "drawio" "jgraph/drawio"; then
|
|
||||||
msg_info "Stopping service"
|
|
||||||
systemctl stop tomcat11
|
|
||||||
msg_ok "Service stopped"
|
|
||||||
|
|
||||||
msg_info "Updating Debian LXC"
|
|
||||||
$STD apt update
|
|
||||||
$STD apt upgrade -y
|
|
||||||
msg_ok "Updated Debian LXC"
|
|
||||||
|
|
||||||
USE_ORIGINAL_FILENAME=true fetch_and_deploy_gh_release "drawio" "jgraph/drawio" "singlefile" "latest" "/var/lib/tomcat11/webapps" "draw.war"
|
|
||||||
|
|
||||||
msg_info "Starting service"
|
|
||||||
systemctl start tomcat11
|
|
||||||
msg_ok "Service started"
|
|
||||||
msg_ok "Updated successfully!"
|
|
||||||
fi
|
|
||||||
exit
|
|
||||||
}
|
|
||||||
|
|
||||||
start
|
|
||||||
build_container
|
|
||||||
description
|
|
||||||
|
|
||||||
msg_ok "Completed Successfully!\n"
|
|
||||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
|
||||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
|
||||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080/draw${CL}"
|
|
||||||
@@ -61,7 +61,7 @@ function update_script() {
|
|||||||
msg_info "Updating Backend"
|
msg_info "Updating Backend"
|
||||||
cd /opt/endurain/backend
|
cd /opt/endurain/backend
|
||||||
$STD poetry export -f requirements.txt --output requirements.txt --without-hashes
|
$STD poetry export -f requirements.txt --output requirements.txt --without-hashes
|
||||||
$STD uv venv --clear
|
$STD uv venv
|
||||||
$STD uv pip install -r requirements.txt
|
$STD uv pip install -r requirements.txt
|
||||||
msg_ok "Backend Updated"
|
msg_ok "Backend Updated"
|
||||||
|
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ function update_script() {
|
|||||||
rm -rf "$VENV_PATH"
|
rm -rf "$VENV_PATH"
|
||||||
mkdir -p /opt/esphome
|
mkdir -p /opt/esphome
|
||||||
cd /opt/esphome
|
cd /opt/esphome
|
||||||
$STD uv venv --clear "$VENV_PATH"
|
$STD uv venv "$VENV_PATH"
|
||||||
$STD "$VENV_PATH/bin/python" -m ensurepip --upgrade
|
$STD "$VENV_PATH/bin/python" -m ensurepip --upgrade
|
||||||
$STD "$VENV_PATH/bin/python" -m pip install --upgrade pip
|
$STD "$VENV_PATH/bin/python" -m pip install --upgrade pip
|
||||||
$STD "$VENV_PATH/bin/python" -m pip install esphome tornado esptool
|
$STD "$VENV_PATH/bin/python" -m pip install esphome tornado esptool
|
||||||
|
|||||||
@@ -37,12 +37,17 @@ function update_script() {
|
|||||||
msg_info "Stopped Service"
|
msg_info "Stopped Service"
|
||||||
|
|
||||||
msg_info "Creating Backup"
|
msg_info "Creating Backup"
|
||||||
ls /opt/*.tar.gz &>/dev/null && rm -f /opt/*.tar.gz
|
|
||||||
backup_filename="/opt/${APP}_backup_$(date +%F).tar.gz"
|
backup_filename="/opt/${APP}_backup_$(date +%F).tar.gz"
|
||||||
tar -czf "$backup_filename" -C /opt/fileflows Data
|
tar -czf "$backup_filename" -C /opt/fileflows Data
|
||||||
msg_ok "Backup Created"
|
msg_ok "Backup Created"
|
||||||
|
|
||||||
fetch_and_deploy_from_url "https://fileflows.com/downloads/zip" "/opt/fileflows"
|
msg_info "Updating $APP to latest version"
|
||||||
|
temp_file=$(mktemp)
|
||||||
|
curl -fsSL https://fileflows.com/downloads/zip -o "$temp_file"
|
||||||
|
$STD unzip -o -d /opt/fileflows "$temp_file"
|
||||||
|
rm -rf "$temp_file"
|
||||||
|
rm -rf "$backup_filename"
|
||||||
|
msg_ok "Updated $APP to latest version"
|
||||||
|
|
||||||
msg_info "Starting Service"
|
msg_info "Starting Service"
|
||||||
systemctl start fileflows
|
systemctl start fileflows
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
____ ________
|
|
||||||
/ __ \_________ __ __/ _/ __ \
|
|
||||||
/ / / / ___/ __ `/ | /| / // // / / /
|
|
||||||
/ /_/ / / / /_/ /| |/ |/ // // /_/ /
|
|
||||||
/_____/_/ \__,_/ |__/|__/___/\____/
|
|
||||||
|
|
||||||
6
ct/headers/prometheus-paperless-ngx-exporter
Normal file
6
ct/headers/prometheus-paperless-ngx-exporter
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
____ __ __ ____ __ _ _________ __ ______ __
|
||||||
|
/ __ \_________ ____ ___ ___ / /_/ /_ ___ __ _______ / __ \____ _____ ___ _____/ /__ __________ / | / / ____/ |/ / / ____/ ______ ____ _____/ /____ _____
|
||||||
|
/ /_/ / ___/ __ \/ __ `__ \/ _ \/ __/ __ \/ _ \/ / / / ___/_____/ /_/ / __ `/ __ \/ _ \/ ___/ / _ \/ ___/ ___/_____/ |/ / / __ | /_____/ __/ | |/_/ __ \/ __ \/ ___/ __/ _ \/ ___/
|
||||||
|
/ ____/ / / /_/ / / / / / / __/ /_/ / / / __/ /_/ (__ )_____/ ____/ /_/ / /_/ / __/ / / / __(__ |__ )_____/ /| / /_/ // /_____/ /____> </ /_/ / /_/ / / / /_/ __/ /
|
||||||
|
/_/ /_/ \____/_/ /_/ /_/\___/\__/_/ /_/\___/\__,_/____/ /_/ \__,_/ .___/\___/_/ /_/\___/____/____/ /_/ |_/\____//_/|_| /_____/_/|_/ .___/\____/_/ \__/\___/_/
|
||||||
|
/_/ /_/
|
||||||
@@ -105,7 +105,7 @@ EOF
|
|||||||
msg_ok "Image-processing libraries up to date"
|
msg_ok "Image-processing libraries up to date"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
RELEASE="2.5.6"
|
RELEASE="2.5.5"
|
||||||
if check_for_gh_release "Immich" "immich-app/immich" "${RELEASE}"; then
|
if check_for_gh_release "Immich" "immich-app/immich" "${RELEASE}"; then
|
||||||
if [[ $(cat ~/.immich) > "2.5.1" ]]; then
|
if [[ $(cat ~/.immich) > "2.5.1" ]]; then
|
||||||
msg_info "Enabling Maintenance Mode"
|
msg_info "Enabling Maintenance Mode"
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ function update_script() {
|
|||||||
PYTHON_VERSION="3.12" setup_uv
|
PYTHON_VERSION="3.12" setup_uv
|
||||||
mkdir -p "$INSTALL_DIR"
|
mkdir -p "$INSTALL_DIR"
|
||||||
cd "$INSTALL_DIR"
|
cd "$INSTALL_DIR"
|
||||||
$STD uv venv --clear .venv
|
$STD uv venv .venv
|
||||||
$STD "$VENV_PYTHON" -m ensurepip --upgrade
|
$STD "$VENV_PYTHON" -m ensurepip --upgrade
|
||||||
$STD "$VENV_PYTHON" -m pip install --upgrade pip
|
$STD "$VENV_PYTHON" -m pip install --upgrade pip
|
||||||
$STD "$VENV_PYTHON" -m pip install jupyter
|
$STD "$VENV_PYTHON" -m pip install jupyter
|
||||||
|
|||||||
11
ct/kasm.sh
11
ct/kasm.sh
@@ -34,19 +34,10 @@ function update_script() {
|
|||||||
CURRENT_VERSION=$(readlink -f /opt/kasm/current | awk -F'/' '{print $4}')
|
CURRENT_VERSION=$(readlink -f /opt/kasm/current | awk -F'/' '{print $4}')
|
||||||
KASM_URL=$(curl -fsSL "https://www.kasm.com/downloads" | tr '\n' ' ' | grep -oE 'https://kasm-static-content[^"]*kasm_release_[0-9]+\.[0-9]+\.[0-9]+\.[a-z0-9]+\.tar\.gz' | head -n 1)
|
KASM_URL=$(curl -fsSL "https://www.kasm.com/downloads" | tr '\n' ' ' | grep -oE 'https://kasm-static-content[^"]*kasm_release_[0-9]+\.[0-9]+\.[0-9]+\.[a-z0-9]+\.tar\.gz' | head -n 1)
|
||||||
if [[ -z "$KASM_URL" ]]; then
|
if [[ -z "$KASM_URL" ]]; then
|
||||||
SERVICE_IMAGE_URL=$(curl -fsSL "https://www.kasm.com/downloads" | tr '\n' ' ' | grep -oE 'https://kasm-static-content[^"]*kasm_release_service_images_amd64_[0-9]+\.[0-9]+\.[0-9]+\.tar\.gz' | head -n 1)
|
|
||||||
if [[ -n "$SERVICE_IMAGE_URL" ]]; then
|
|
||||||
KASM_VERSION=$(echo "$SERVICE_IMAGE_URL" | sed -E 's/.*kasm_release_service_images_amd64_([0-9]+\.[0-9]+\.[0-9]+).*/\1/')
|
|
||||||
KASM_URL="https://kasm-static-content.s3.amazonaws.com/kasm_release_${KASM_VERSION}.tar.gz"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
KASM_VERSION=$(echo "$KASM_URL" | sed -E 's/.*kasm_release_([0-9]+\.[0-9]+\.[0-9]+).*/\1/')
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z "$KASM_URL" ]] || [[ -z "$KASM_VERSION" ]]; then
|
|
||||||
msg_error "Unable to detect latest Kasm release URL."
|
msg_error "Unable to detect latest Kasm release URL."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
KASM_VERSION=$(echo "$KASM_URL" | sed -E 's/.*kasm_release_([0-9]+\.[0-9]+\.[0-9]+).*/\1/')
|
||||||
msg_info "Checked for new version"
|
msg_info "Checked for new version"
|
||||||
|
|
||||||
msg_info "Removing outdated docker-compose plugin"
|
msg_info "Removing outdated docker-compose plugin"
|
||||||
|
|||||||
@@ -33,8 +33,7 @@ function update_script() {
|
|||||||
msg_ok "Stopped Service"
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
PHP_VERSION="8.5" PHP_APACHE="YES" setup_php
|
PHP_VERSION="8.5" PHP_APACHE="YES" setup_php
|
||||||
setup_composer
|
|
||||||
|
|
||||||
msg_info "Creating a backup"
|
msg_info "Creating a backup"
|
||||||
mv /opt/koillection/ /opt/koillection-backup
|
mv /opt/koillection/ /opt/koillection-backup
|
||||||
msg_ok "Backup created"
|
msg_ok "Backup created"
|
||||||
|
|||||||
@@ -28,6 +28,12 @@ function update_script() {
|
|||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
msg_error "This script is currently disabled due to an external issue with the OpenResty APT repository."
|
||||||
|
msg_error "The repository's GPG key uses SHA-1 signatures, which are no longer accepted by Debian as of February 1, 2026."
|
||||||
|
msg_error "The issue is tracked in openresty/openresty#1097"
|
||||||
|
msg_error "For more details, see: https://github.com/community-scripts/ProxmoxVE/issues/11406"
|
||||||
|
exit 1
|
||||||
|
|
||||||
if [[ $(grep -E '^VERSION_ID=' /etc/os-release) == *"12"* ]]; then
|
if [[ $(grep -E '^VERSION_ID=' /etc/os-release) == *"12"* ]]; then
|
||||||
msg_error "Wrong Debian version detected!"
|
msg_error "Wrong Debian version detected!"
|
||||||
msg_error "Please create a snapshot first. You must upgrade your LXC to Debian Trixie before updating. Visit: https://github.com/community-scripts/ProxmoxVE/discussions/7489"
|
msg_error "Please create a snapshot first. You must upgrade your LXC to Debian Trixie before updating. Visit: https://github.com/community-scripts/ProxmoxVE/discussions/7489"
|
||||||
@@ -139,17 +145,15 @@ function update_script() {
|
|||||||
"database": {
|
"database": {
|
||||||
"engine": "knex-native",
|
"engine": "knex-native",
|
||||||
"knex": {
|
"knex": {
|
||||||
"client": "better-sqlite3",
|
"client": "sqlite3",
|
||||||
"connection": {
|
"connection": {
|
||||||
"filename": "/data/database.sqlite"
|
"filename": "/data/database.sqlite"
|
||||||
},
|
}
|
||||||
"useNullAsDefault": true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
EOF
|
EOF
|
||||||
fi
|
fi
|
||||||
sed -i 's/"client": "sqlite3"/"client": "better-sqlite3"/' /app/config/production.json
|
|
||||||
cd /app
|
cd /app
|
||||||
$STD yarn install --network-timeout 600000
|
$STD yarn install --network-timeout 600000
|
||||||
msg_ok "Initialized Backend"
|
msg_ok "Initialized Backend"
|
||||||
|
|||||||
52
ct/prometheus-paperless-ngx-exporter.sh
Executable file
52
ct/prometheus-paperless-ngx-exporter.sh
Executable file
@@ -0,0 +1,52 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: Andy Grunwald (andygrunwald)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://github.com/hansmi/prometheus-paperless-exporter
|
||||||
|
|
||||||
|
APP="Prometheus-Paperless-NGX-Exporter"
|
||||||
|
var_tags="${var_tags:-monitoring;alerting}"
|
||||||
|
var_cpu="${var_cpu:-1}"
|
||||||
|
var_ram="${var_ram:-256}"
|
||||||
|
var_disk="${var_disk:-2}"
|
||||||
|
var_os="${var_os:-debian}"
|
||||||
|
var_version="${var_version:-13}"
|
||||||
|
var_unprivileged="${var_unprivileged:-1}"
|
||||||
|
|
||||||
|
header_info "$APP"
|
||||||
|
variables
|
||||||
|
color
|
||||||
|
catch_errors
|
||||||
|
|
||||||
|
function update_script() {
|
||||||
|
header_info
|
||||||
|
check_container_storage
|
||||||
|
check_container_resources
|
||||||
|
if [[ ! -f /etc/systemd/system/prometheus-paperless-ngx-exporter.service ]]; then
|
||||||
|
msg_error "No ${APP} Installation Found!"
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
if check_for_gh_release "prom-paperless-exp" "hansmi/prometheus-paperless-exporter"; then
|
||||||
|
msg_info "Stopping Service"
|
||||||
|
systemctl stop prometheus-paperless-ngx-exporter
|
||||||
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
|
fetch_and_deploy_gh_release "prom-paperless-exp" "hansmi/prometheus-paperless-exporter" "binary"
|
||||||
|
|
||||||
|
msg_info "Starting Service"
|
||||||
|
systemctl start prometheus-paperless-ngx-exporter
|
||||||
|
msg_ok "Started Service"
|
||||||
|
msg_ok "Updated successfully!"
|
||||||
|
fi
|
||||||
|
exit
|
||||||
|
}
|
||||||
|
|
||||||
|
start
|
||||||
|
build_container
|
||||||
|
description
|
||||||
|
|
||||||
|
msg_ok "Completed successfully!\n"
|
||||||
|
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||||
|
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||||
|
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8081/metrics${CL}"
|
||||||
@@ -41,7 +41,7 @@ function update_script() {
|
|||||||
rm -rf "$PVE_VENV_PATH"
|
rm -rf "$PVE_VENV_PATH"
|
||||||
mkdir -p /opt/prometheus-pve-exporter
|
mkdir -p /opt/prometheus-pve-exporter
|
||||||
cd /opt/prometheus-pve-exporter
|
cd /opt/prometheus-pve-exporter
|
||||||
$STD uv venv --clear "$PVE_VENV_PATH"
|
$STD uv venv "$PVE_VENV_PATH"
|
||||||
$STD "$PVE_VENV_PATH/bin/python" -m ensurepip --upgrade
|
$STD "$PVE_VENV_PATH/bin/python" -m ensurepip --upgrade
|
||||||
$STD "$PVE_VENV_PATH/bin/python" -m pip install --upgrade pip
|
$STD "$PVE_VENV_PATH/bin/python" -m pip install --upgrade pip
|
||||||
$STD "$PVE_VENV_PATH/bin/python" -m pip install prometheus-pve-exporter
|
$STD "$PVE_VENV_PATH/bin/python" -m pip install prometheus-pve-exporter
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ function update_script() {
|
|||||||
# Always ensure venv exists
|
# Always ensure venv exists
|
||||||
if [[ ! -d /opt/sabnzbd/venv ]]; then
|
if [[ ! -d /opt/sabnzbd/venv ]]; then
|
||||||
msg_info "Migrating SABnzbd to uv virtual environment"
|
msg_info "Migrating SABnzbd to uv virtual environment"
|
||||||
$STD uv venv --clear /opt/sabnzbd/venv
|
$STD uv venv /opt/sabnzbd/venv
|
||||||
msg_ok "Created uv venv at /opt/sabnzbd/venv"
|
msg_ok "Created uv venv at /opt/sabnzbd/venv"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ function update_script() {
|
|||||||
|
|
||||||
msg_info "Updating Scraparr"
|
msg_info "Updating Scraparr"
|
||||||
cd /opt/scraparr
|
cd /opt/scraparr
|
||||||
$STD uv venv --clear /opt/scraparr/.venv
|
$STD uv venv /opt/scraparr/.venv
|
||||||
$STD /opt/scraparr/.venv/bin/python -m ensurepip --upgrade
|
$STD /opt/scraparr/.venv/bin/python -m ensurepip --upgrade
|
||||||
$STD /opt/scraparr/.venv/bin/python -m pip install --upgrade pip
|
$STD /opt/scraparr/.venv/bin/python -m pip install --upgrade pip
|
||||||
$STD /opt/scraparr/.venv/bin/python -m pip install -r /opt/scraparr/src/scraparr/requirements.txt
|
$STD /opt/scraparr/.venv/bin/python -m pip install -r /opt/scraparr/src/scraparr/requirements.txt
|
||||||
|
|||||||
89
ct/slskd.sh
89
ct/slskd.sh
@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
|
|||||||
# Copyright (c) 2021-2026 community-scripts ORG
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
# Author: vhsdream
|
# Author: vhsdream
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://github.com/slskd/slskd, https://github.com/mrusse/soularr
|
# Source: https://github.com/slskd/slskd, https://soularr.net
|
||||||
|
|
||||||
APP="slskd"
|
APP="slskd"
|
||||||
var_tags="${var_tags:-arr;p2p}"
|
var_tags="${var_tags:-arr;p2p}"
|
||||||
@@ -24,65 +24,50 @@ function update_script() {
|
|||||||
check_container_storage
|
check_container_storage
|
||||||
check_container_resources
|
check_container_resources
|
||||||
|
|
||||||
if [[ ! -d /opt/slskd ]]; then
|
if [[ ! -d /opt/slskd ]] || [[ ! -d /opt/soularr ]]; then
|
||||||
msg_error "No Slskd Installation Found!"
|
msg_error "No ${APP} Installation Found!"
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if check_for_gh_release "Slskd" "slskd/slskd"; then
|
RELEASE=$(curl -s https://api.github.com/repos/slskd/slskd/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||||
msg_info "Stopping Service(s)"
|
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
|
||||||
systemctl stop slskd
|
msg_info "Stopping Service"
|
||||||
[[ -f /etc/systemd/system/soularr.service ]] && systemctl stop soularr.timer soularr.service
|
systemctl stop slskd soularr.timer soularr.service
|
||||||
msg_ok "Stopped Service(s)"
|
msg_info "Stopped Service"
|
||||||
|
|
||||||
msg_info "Backing up config"
|
msg_info "Updating $APP to v${RELEASE}"
|
||||||
cp /opt/slskd/config/slskd.yml /opt/slskd.yml.bak
|
tmp_file=$(mktemp)
|
||||||
msg_ok "Backed up config"
|
curl -fsSL "https://github.com/slskd/slskd/releases/download/${RELEASE}/slskd-${RELEASE}-linux-x64.zip" -o $tmp_file
|
||||||
|
$STD unzip -oj $tmp_file slskd -d /opt/${APP}
|
||||||
|
echo "${RELEASE}" >/opt/${APP}_version.txt
|
||||||
|
msg_ok "Updated $APP to v${RELEASE}"
|
||||||
|
|
||||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "Slskd" "slskd/slskd" "prebuild" "latest" "/opt/slskd" "slskd-*-linux-x64.zip"
|
msg_info "Starting Service"
|
||||||
|
|
||||||
msg_info "Restoring config"
|
|
||||||
mv /opt/slskd.yml.bak /opt/slskd/config/slskd.yml
|
|
||||||
msg_ok "Restored config"
|
|
||||||
|
|
||||||
msg_info "Starting Service(s)"
|
|
||||||
systemctl start slskd
|
systemctl start slskd
|
||||||
[[ -f /etc/systemd/system/soularr.service ]] && systemctl start soularr.timer
|
msg_ok "Started Service"
|
||||||
msg_ok "Started Service(s)"
|
rm -rf $tmp_file
|
||||||
msg_ok "Updated Slskd successfully!"
|
else
|
||||||
|
msg_ok "No ${APP} update required. ${APP} is already at v${RELEASE}"
|
||||||
fi
|
fi
|
||||||
[[ -d /opt/soularr ]] && if check_for_gh_release "Soularr" "mrusse/soularr"; then
|
msg_info "Updating Soularr"
|
||||||
if systemctl is-active soularr.timer >/dev/null; then
|
cp /opt/soularr/config.ini /opt/config.ini.bak
|
||||||
msg_info "Stopping Timer and Service"
|
cp /opt/soularr/run.sh /opt/run.sh.bak
|
||||||
systemctl stop soularr.timer soularr.service
|
cd /tmp
|
||||||
msg_ok "Stopped Timer and Service"
|
rm -rf /opt/soularr
|
||||||
fi
|
curl -fsSL -o main.zip https://github.com/mrusse/soularr/archive/refs/heads/main.zip
|
||||||
|
$STD unzip main.zip
|
||||||
|
mv soularr-main /opt/soularr
|
||||||
|
cd /opt/soularr
|
||||||
|
$STD pip install -r requirements.txt
|
||||||
|
mv /opt/config.ini.bak /opt/soularr/config.ini
|
||||||
|
mv /opt/run.sh.bak /opt/soularr/run.sh
|
||||||
|
rm -rf /tmp/main.zip
|
||||||
|
msg_ok "Updated soularr"
|
||||||
|
|
||||||
msg_info "Backing up Soularr config"
|
msg_info "Starting soularr timer"
|
||||||
cp /opt/soularr/config.ini /opt/soularr_config.ini.bak
|
systemctl start soularr.timer
|
||||||
cp /opt/soularr/run.sh /opt/soularr_run.sh.bak
|
msg_ok "Started soularr timer"
|
||||||
msg_ok "Backed up Soularr config"
|
exit
|
||||||
|
|
||||||
PYTHON_VERSION="3.11" setup_uv
|
|
||||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "Soularr" "mrusse/soularr" "tarball" "latest" "/opt/soularr"
|
|
||||||
msg_info "Updating Soularr"
|
|
||||||
cd /opt/soularr
|
|
||||||
$STD uv venv -c venv
|
|
||||||
$STD source venv/bin/activate
|
|
||||||
$STD uv pip install -r requirements.txt
|
|
||||||
deactivate
|
|
||||||
msg_ok "Updated Soularr"
|
|
||||||
|
|
||||||
msg_info "Restoring Soularr config"
|
|
||||||
mv /opt/soularr_config.ini.bak /opt/soularr/config.ini
|
|
||||||
mv /opt/soularr_run.sh.bak /opt/soularr/run.sh
|
|
||||||
msg_ok "Restored Soularr config"
|
|
||||||
|
|
||||||
msg_info "Starting Soularr Timer"
|
|
||||||
systemctl restart soularr.timer
|
|
||||||
msg_ok "Started Soularr Timer"
|
|
||||||
msg_ok "Updated Soularr successfully!"
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
start
|
start
|
||||||
|
|||||||
@@ -33,15 +33,7 @@ function update_script() {
|
|||||||
systemctl stop snowshare
|
systemctl stop snowshare
|
||||||
msg_ok "Stopped Service"
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
msg_info "Backing up uploads"
|
fetch_and_deploy_gh_release "snowshare" "TuroYT/snowshare" "tarball"
|
||||||
[ -d /opt/snowshare/uploads ] && cp -a /opt/snowshare/uploads /opt/.snowshare_uploads_backup
|
|
||||||
msg_ok "Uploads backed up"
|
|
||||||
|
|
||||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "snowshare" "TuroYT/snowshare" "tarball"
|
|
||||||
|
|
||||||
msg_info "Restoring uploads"
|
|
||||||
[ -d /opt/.snowshare_uploads_backup ] && rm -rf /opt/snowshare/uploads && cp -a /opt/.snowshare_uploads_backup /opt/snowshare/uploads
|
|
||||||
msg_ok "Uploads restored"
|
|
||||||
|
|
||||||
msg_info "Updating Snowshare"
|
msg_info "Updating Snowshare"
|
||||||
cd /opt/snowshare
|
cd /opt/snowshare
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ function update_script() {
|
|||||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "streamlink-webui" "CrazyWolf13/streamlink-webui" "tarball"
|
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "streamlink-webui" "CrazyWolf13/streamlink-webui" "tarball"
|
||||||
|
|
||||||
msg_info "Updating streamlink-webui"
|
msg_info "Updating streamlink-webui"
|
||||||
$STD uv venv --clear /opt/streamlink-webui/backend/src/.venv
|
$STD uv venv /opt/streamlink-webui/backend/src/.venv
|
||||||
source /opt/streamlink-webui/backend/src/.venv/bin/activate
|
source /opt/streamlink-webui/backend/src/.venv/bin/activate
|
||||||
$STD uv pip install -r /opt/streamlink-webui/backend/src/requirements.txt --python=/opt/streamlink-webui/backend/src/.venv
|
$STD uv pip install -r /opt/streamlink-webui/backend/src/requirements.txt --python=/opt/streamlink-webui/backend/src/.venv
|
||||||
cd /opt/streamlink-webui/frontend/src
|
cd /opt/streamlink-webui/frontend/src
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ function update_script() {
|
|||||||
cp -r /opt/tandoor.bak/{config,api,mediafiles,staticfiles} /opt/tandoor/
|
cp -r /opt/tandoor.bak/{config,api,mediafiles,staticfiles} /opt/tandoor/
|
||||||
mv /opt/tandoor.bak/.env /opt/tandoor/.env
|
mv /opt/tandoor.bak/.env /opt/tandoor/.env
|
||||||
cd /opt/tandoor
|
cd /opt/tandoor
|
||||||
$STD uv venv --clear .venv --python=python3
|
$STD uv venv .venv --python=python3
|
||||||
$STD uv pip install -r requirements.txt --python .venv/bin/python
|
$STD uv pip install -r requirements.txt --python .venv/bin/python
|
||||||
cd /opt/tandoor/vue3
|
cd /opt/tandoor/vue3
|
||||||
$STD yarn install
|
$STD yarn install
|
||||||
|
|||||||
@@ -33,9 +33,7 @@ function update_script() {
|
|||||||
systemctl stop umlautadaptarr
|
systemctl stop umlautadaptarr
|
||||||
msg_ok "Stopped Service"
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
cp /opt/UmlautAdaptarr/appsettings.json /opt/UmlautAdaptarr/appsettings.json.bak
|
|
||||||
fetch_and_deploy_gh_release "UmlautAdaptarr" "PCJones/Umlautadaptarr" "prebuild" "latest" "/opt/UmlautAdaptarr" "linux-x64.zip"
|
fetch_and_deploy_gh_release "UmlautAdaptarr" "PCJones/Umlautadaptarr" "prebuild" "latest" "/opt/UmlautAdaptarr" "linux-x64.zip"
|
||||||
cp /opt/UmlautAdaptarr/appsettings.json.bak /opt/UmlautAdaptarr/appsettings.json
|
|
||||||
|
|
||||||
msg_info "Starting Service"
|
msg_info "Starting Service"
|
||||||
systemctl start umlautadaptarr
|
systemctl start umlautadaptarr
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ function update_script() {
|
|||||||
|
|
||||||
msg_info "Updating Warracker"
|
msg_info "Updating Warracker"
|
||||||
cd /opt/warracker/backend
|
cd /opt/warracker/backend
|
||||||
$STD uv venv --clear .venv
|
$STD uv venv .venv
|
||||||
$STD source .venv/bin/activate
|
$STD source .venv/bin/activate
|
||||||
$STD uv pip install -r requirements.txt
|
$STD uv pip install -r requirements.txt
|
||||||
msg_ok "Updated Warracker"
|
msg_ok "Updated Warracker"
|
||||||
|
|||||||
64
ct/wger.sh
64
ct/wger.sh
@@ -7,9 +7,9 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
|
|||||||
|
|
||||||
APP="wger"
|
APP="wger"
|
||||||
var_tags="${var_tags:-management;fitness}"
|
var_tags="${var_tags:-management;fitness}"
|
||||||
var_cpu="${var_cpu:-2}"
|
var_cpu="${var_cpu:-1}"
|
||||||
var_ram="${var_ram:-2048}"
|
var_ram="${var_ram:-1024}"
|
||||||
var_disk="${var_disk:-8}"
|
var_disk="${var_disk:-6}"
|
||||||
var_os="${var_os:-debian}"
|
var_os="${var_os:-debian}"
|
||||||
var_version="${var_version:-13}"
|
var_version="${var_version:-13}"
|
||||||
var_unprivileged="${var_unprivileged:-1}"
|
var_unprivileged="${var_unprivileged:-1}"
|
||||||
@@ -23,44 +23,38 @@ function update_script() {
|
|||||||
header_info
|
header_info
|
||||||
check_container_storage
|
check_container_storage
|
||||||
check_container_resources
|
check_container_resources
|
||||||
|
if [[ ! -d /home/wger ]]; then
|
||||||
if [[ ! -d /opt/wger ]]; then
|
|
||||||
msg_error "No ${APP} Installation Found!"
|
msg_error "No ${APP} Installation Found!"
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
|
RELEASE=$(curl -fsSL https://api.github.com/repos/wger-project/wger/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}')
|
||||||
if check_for_gh_release "wger" "wger-project/wger"; then
|
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
|
||||||
msg_info "Stopping Service"
|
msg_info "Stopping Service"
|
||||||
systemctl stop redis-server nginx celery celery-beat wger
|
systemctl stop wger
|
||||||
msg_ok "Stopped Service"
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
msg_info "Backing up Data"
|
msg_info "Updating $APP to v${RELEASE}"
|
||||||
cp -r /opt/wger/media /opt/wger_media_backup
|
temp_file=$(mktemp)
|
||||||
cp /opt/wger/.env /opt/wger_env_backup
|
curl -fsSL "https://github.com/wger-project/wger/archive/refs/tags/$RELEASE.tar.gz" -o "$temp_file"
|
||||||
msg_ok "Backed up Data"
|
tar xzf "$temp_file"
|
||||||
|
cp -rf wger-"$RELEASE"/* /home/wger/src
|
||||||
|
cd /home/wger/src
|
||||||
|
$STD pip install -r requirements_prod.txt --ignore-installed
|
||||||
|
$STD pip install -e .
|
||||||
|
$STD python3 manage.py migrate
|
||||||
|
$STD python3 manage.py collectstatic --no-input
|
||||||
|
$STD yarn install
|
||||||
|
$STD yarn build:css:sass
|
||||||
|
rm -rf "$temp_file"
|
||||||
|
echo "${RELEASE}" >/opt/${APP}_version.txt
|
||||||
|
msg_ok "Updated $APP to v${RELEASE}"
|
||||||
|
|
||||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "wger" "wger-project/wger" "tarball"
|
msg_info "Starting Service"
|
||||||
|
systemctl start wger
|
||||||
msg_info "Restoring Data"
|
msg_ok "Started Service"
|
||||||
cp -r /opt/wger_media_backup/. /opt/wger/media
|
msg_ok "Updated successfully!"
|
||||||
cp /opt/wger_env_backup /opt/wger/.env
|
else
|
||||||
rm -rf /opt/wger_media_backup /opt/wger_env_backup
|
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||||
|
|
||||||
msg_ok "Restored Data"
|
|
||||||
|
|
||||||
msg_info "Updating wger"
|
|
||||||
cd /opt/wger
|
|
||||||
set -a && source /opt/wger/.env && set +a
|
|
||||||
export DJANGO_SETTINGS_MODULE=settings.main
|
|
||||||
$STD uv pip install .
|
|
||||||
$STD uv run python manage.py migrate
|
|
||||||
$STD uv run python manage.py collectstatic --no-input
|
|
||||||
msg_ok "Updated wger"
|
|
||||||
|
|
||||||
msg_info "Starting Services"
|
|
||||||
systemctl start redis-server nginx celery celery-beat wger
|
|
||||||
msg_ok "Started Services"
|
|
||||||
msg_ok "Updated Successfully"
|
|
||||||
fi
|
fi
|
||||||
exit
|
exit
|
||||||
}
|
}
|
||||||
@@ -69,7 +63,7 @@ start
|
|||||||
build_container
|
build_container
|
||||||
description
|
description
|
||||||
|
|
||||||
msg_ok "Completed Successfully!\n"
|
msg_ok "Completed successfully!\n"
|
||||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"
|
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"
|
||||||
|
|||||||
@@ -35,10 +35,6 @@
|
|||||||
{
|
{
|
||||||
"text": "The Disk space initially allocated by the script is only a placeholder, as we can't know how much space you will ever need. You should increase it to match your workload.",
|
"text": "The Disk space initially allocated by the script is only a placeholder, as we can't know how much space you will ever need. You should increase it to match your workload.",
|
||||||
"type": "info"
|
"type": "info"
|
||||||
},
|
|
||||||
{
|
|
||||||
"text": "Please copy your `BICHON_ENCRYPT_PASSWORD` from `/opt/bichon/bichon.env` to a safe place.",
|
|
||||||
"type": "warning"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,35 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "Draw.IO",
|
|
||||||
"slug": "drawio",
|
|
||||||
"categories": [
|
|
||||||
12
|
|
||||||
],
|
|
||||||
"date_created": "2026-02-11",
|
|
||||||
"type": "ct",
|
|
||||||
"updateable": true,
|
|
||||||
"privileged": false,
|
|
||||||
"interface_port": 8080,
|
|
||||||
"documentation": "https://www.drawio.com/doc/",
|
|
||||||
"website": "https://www.drawio.com/",
|
|
||||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/draw-io.webp",
|
|
||||||
"config_path": "",
|
|
||||||
"description": "draw.io is a configurable diagramming and whiteboarding application, jointly owned and developed by draw.io Ltd (previously named JGraph) and draw.io AG.",
|
|
||||||
"install_methods": [
|
|
||||||
{
|
|
||||||
"type": "default",
|
|
||||||
"script": "ct/drawio.sh",
|
|
||||||
"resources": {
|
|
||||||
"cpu": 1,
|
|
||||||
"ram": 2048,
|
|
||||||
"hdd": 4,
|
|
||||||
"os": "Debian",
|
|
||||||
"version": "13"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"default_credentials": {
|
|
||||||
"username": null,
|
|
||||||
"password": null
|
|
||||||
},
|
|
||||||
"notes": []
|
|
||||||
}
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"generated": "2026-02-11T18:22:25Z",
|
"generated": "2026-02-09T06:27:10Z",
|
||||||
"versions": [
|
"versions": [
|
||||||
{
|
{
|
||||||
"slug": "2fauth",
|
"slug": "2fauth",
|
||||||
@@ -15,13 +15,6 @@
|
|||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2025-12-08T14:34:55Z"
|
"date": "2025-12-08T14:34:55Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"slug": "adguardhome-sync",
|
|
||||||
"repo": "bakito/adguardhome-sync",
|
|
||||||
"version": "v0.8.2",
|
|
||||||
"pinned": false,
|
|
||||||
"date": "2025-10-24T17:13:47Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"slug": "adventurelog",
|
"slug": "adventurelog",
|
||||||
"repo": "seanmorley15/adventurelog",
|
"repo": "seanmorley15/adventurelog",
|
||||||
@@ -116,9 +109,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "bentopdf",
|
"slug": "bentopdf",
|
||||||
"repo": "alam00000/bentopdf",
|
"repo": "alam00000/bentopdf",
|
||||||
"version": "v2.2.0",
|
"version": "v2.1.0",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-09T07:07:40Z"
|
"date": "2026-02-02T14:30:55Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "beszel",
|
"slug": "beszel",
|
||||||
@@ -207,16 +200,16 @@
|
|||||||
{
|
{
|
||||||
"slug": "comfyui",
|
"slug": "comfyui",
|
||||||
"repo": "comfyanonymous/ComfyUI",
|
"repo": "comfyanonymous/ComfyUI",
|
||||||
"version": "v0.13.0",
|
"version": "v0.12.3",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-10T20:27:38Z"
|
"date": "2026-02-05T07:04:07Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "commafeed",
|
"slug": "commafeed",
|
||||||
"repo": "Athou/commafeed",
|
"repo": "Athou/commafeed",
|
||||||
"version": "6.2.0",
|
"version": "6.1.1",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-09T19:44:58Z"
|
"date": "2026-01-26T15:14:16Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "configarr",
|
"slug": "configarr",
|
||||||
@@ -242,16 +235,16 @@
|
|||||||
{
|
{
|
||||||
"slug": "cronicle",
|
"slug": "cronicle",
|
||||||
"repo": "jhuckaby/Cronicle",
|
"repo": "jhuckaby/Cronicle",
|
||||||
"version": "v0.9.106",
|
"version": "v0.9.105",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T17:11:46Z"
|
"date": "2026-02-05T18:16:11Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "cryptpad",
|
"slug": "cryptpad",
|
||||||
"repo": "cryptpad/cryptpad",
|
"repo": "cryptpad/cryptpad",
|
||||||
"version": "2026.2.0",
|
"version": "2025.9.0",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T15:39:05Z"
|
"date": "2025-10-22T10:06:29Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "dawarich",
|
"slug": "dawarich",
|
||||||
@@ -263,51 +256,44 @@
|
|||||||
{
|
{
|
||||||
"slug": "discopanel",
|
"slug": "discopanel",
|
||||||
"repo": "nickheyer/discopanel",
|
"repo": "nickheyer/discopanel",
|
||||||
"version": "v1.0.36",
|
"version": "v1.0.35",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-09T21:15:44Z"
|
"date": "2026-02-02T05:20:12Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "dispatcharr",
|
"slug": "dispatcharr",
|
||||||
"repo": "Dispatcharr/Dispatcharr",
|
"repo": "Dispatcharr/Dispatcharr",
|
||||||
"version": "v0.19.0",
|
"version": "v0.18.1",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-10T21:18:10Z"
|
"date": "2026-01-27T17:09:11Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "docmost",
|
"slug": "docmost",
|
||||||
"repo": "docmost/docmost",
|
"repo": "docmost/docmost",
|
||||||
"version": "v0.25.3",
|
"version": "v0.25.2",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-10T02:58:23Z"
|
"date": "2026-02-06T19:50:55Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "domain-locker",
|
"slug": "domain-locker",
|
||||||
"repo": "Lissy93/domain-locker",
|
"repo": "Lissy93/domain-locker",
|
||||||
"version": "v0.1.3",
|
"version": "v0.1.2",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T10:03:32Z"
|
"date": "2025-11-14T22:08:23Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "domain-monitor",
|
"slug": "domain-monitor",
|
||||||
"repo": "Hosteroid/domain-monitor",
|
"repo": "Hosteroid/domain-monitor",
|
||||||
"version": "v1.1.3",
|
"version": "v1.1.1",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T15:48:18Z"
|
"date": "2025-11-18T11:32:30Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "donetick",
|
"slug": "donetick",
|
||||||
"repo": "donetick/donetick",
|
"repo": "donetick/donetick",
|
||||||
"version": "v0.1.71",
|
"version": "v0.1.64",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T06:01:13Z"
|
"date": "2025-10-03T05:18:24Z"
|
||||||
},
|
|
||||||
{
|
|
||||||
"slug": "drawio",
|
|
||||||
"repo": "jgraph/drawio",
|
|
||||||
"version": "v29.3.6",
|
|
||||||
"pinned": false,
|
|
||||||
"date": "2026-01-28T18:25:02Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "duplicati",
|
"slug": "duplicati",
|
||||||
@@ -333,9 +319,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "endurain",
|
"slug": "endurain",
|
||||||
"repo": "endurain-project/endurain",
|
"repo": "endurain-project/endurain",
|
||||||
"version": "v0.17.4",
|
"version": "v0.17.3",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T04:54:22Z"
|
"date": "2026-01-23T22:02:05Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "ersatztv",
|
"slug": "ersatztv",
|
||||||
@@ -547,13 +533,6 @@
|
|||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-07T04:44:20Z"
|
"date": "2026-02-07T04:44:20Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"slug": "immich-public-proxy",
|
|
||||||
"repo": "alangrainger/immich-public-proxy",
|
|
||||||
"version": "v1.15.1",
|
|
||||||
"pinned": false,
|
|
||||||
"date": "2026-01-26T08:04:27Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"slug": "inspircd",
|
"slug": "inspircd",
|
||||||
"repo": "inspircd/inspircd",
|
"repo": "inspircd/inspircd",
|
||||||
@@ -578,16 +557,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "jackett",
|
"slug": "jackett",
|
||||||
"repo": "Jackett/Jackett",
|
"repo": "Jackett/Jackett",
|
||||||
"version": "v0.24.1094",
|
"version": "v0.24.1074",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T06:01:16Z"
|
"date": "2026-02-09T06:01:19Z"
|
||||||
},
|
|
||||||
{
|
|
||||||
"slug": "jellystat",
|
|
||||||
"repo": "CyferShepard/Jellystat",
|
|
||||||
"version": "V1.1.8",
|
|
||||||
"pinned": false,
|
|
||||||
"date": "2026-02-08T08:15:00Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "joplin-server",
|
"slug": "joplin-server",
|
||||||
@@ -627,9 +599,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "keycloak",
|
"slug": "keycloak",
|
||||||
"repo": "keycloak/keycloak",
|
"repo": "keycloak/keycloak",
|
||||||
"version": "26.5.3",
|
"version": "26.5.2",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-10T07:30:08Z"
|
"date": "2026-01-23T14:26:58Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "kimai",
|
"slug": "kimai",
|
||||||
@@ -662,9 +634,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "kometa",
|
"slug": "kometa",
|
||||||
"repo": "Kometa-Team/Kometa",
|
"repo": "Kometa-Team/Kometa",
|
||||||
"version": "v2.3.0",
|
"version": "v2.2.2",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-09T21:26:56Z"
|
"date": "2025-10-06T21:31:07Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "komga",
|
"slug": "komga",
|
||||||
@@ -711,9 +683,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "libretranslate",
|
"slug": "libretranslate",
|
||||||
"repo": "LibreTranslate/LibreTranslate",
|
"repo": "LibreTranslate/LibreTranslate",
|
||||||
"version": "v1.9.0",
|
"version": "v1.8.4",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-10T19:05:48Z"
|
"date": "2026-02-02T17:45:16Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "lidarr",
|
"slug": "lidarr",
|
||||||
@@ -746,9 +718,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "lubelogger",
|
"slug": "lubelogger",
|
||||||
"repo": "hargata/lubelog",
|
"repo": "hargata/lubelog",
|
||||||
"version": "v1.6.0",
|
"version": "v1.5.8",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-10T20:16:32Z"
|
"date": "2026-01-26T18:18:03Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "mafl",
|
"slug": "mafl",
|
||||||
@@ -767,9 +739,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "mail-archiver",
|
"slug": "mail-archiver",
|
||||||
"repo": "s1t5/mail-archiver",
|
"repo": "s1t5/mail-archiver",
|
||||||
"version": "2602.1",
|
"version": "2601.3",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T06:23:11Z"
|
"date": "2026-01-25T12:52:24Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "managemydamnlife",
|
"slug": "managemydamnlife",
|
||||||
@@ -781,9 +753,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "manyfold",
|
"slug": "manyfold",
|
||||||
"repo": "manyfold3d/manyfold",
|
"repo": "manyfold3d/manyfold",
|
||||||
"version": "v0.132.1",
|
"version": "v0.132.0",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-09T22:02:28Z"
|
"date": "2026-01-29T13:53:21Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "mealie",
|
"slug": "mealie",
|
||||||
@@ -795,9 +767,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "mediamanager",
|
"slug": "mediamanager",
|
||||||
"repo": "maxdorninger/MediaManager",
|
"repo": "maxdorninger/MediaManager",
|
||||||
"version": "v1.12.3",
|
"version": "v1.12.2",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T16:45:40Z"
|
"date": "2026-02-08T19:18:29Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "mediamtx",
|
"slug": "mediamtx",
|
||||||
@@ -844,9 +816,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "myip",
|
"slug": "myip",
|
||||||
"repo": "jason5ng32/MyIP",
|
"repo": "jason5ng32/MyIP",
|
||||||
"version": "v5.2.1",
|
"version": "v5.2.0",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-10T07:38:47Z"
|
"date": "2026-01-05T05:56:57Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "mylar3",
|
"slug": "mylar3",
|
||||||
@@ -865,9 +837,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "navidrome",
|
"slug": "navidrome",
|
||||||
"repo": "navidrome/navidrome",
|
"repo": "navidrome/navidrome",
|
||||||
"version": "v0.60.3",
|
"version": "v0.60.2",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-10T23:55:04Z"
|
"date": "2026-02-07T19:42:33Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "netbox",
|
"slug": "netbox",
|
||||||
@@ -876,13 +848,6 @@
|
|||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-03T13:54:26Z"
|
"date": "2026-02-03T13:54:26Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"slug": "nextcloud-exporter",
|
|
||||||
"repo": "xperimental/nextcloud-exporter",
|
|
||||||
"version": "v0.9.0",
|
|
||||||
"pinned": false,
|
|
||||||
"date": "2025-10-12T20:03:10Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"slug": "nginx-ui",
|
"slug": "nginx-ui",
|
||||||
"repo": "0xJacky/nginx-ui",
|
"repo": "0xJacky/nginx-ui",
|
||||||
@@ -1047,16 +1012,16 @@
|
|||||||
{
|
{
|
||||||
"slug": "pelican-panel",
|
"slug": "pelican-panel",
|
||||||
"repo": "pelican-dev/panel",
|
"repo": "pelican-dev/panel",
|
||||||
"version": "v1.0.0-beta32",
|
"version": "v1.0.0-beta31",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-09T22:15:44Z"
|
"date": "2026-01-18T22:43:24Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "pelican-wings",
|
"slug": "pelican-wings",
|
||||||
"repo": "pelican-dev/wings",
|
"repo": "pelican-dev/wings",
|
||||||
"version": "v1.0.0-beta23",
|
"version": "v1.0.0-beta22",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-09T22:10:26Z"
|
"date": "2026-01-18T22:38:36Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "pf2etools",
|
"slug": "pf2etools",
|
||||||
@@ -1072,19 +1037,12 @@
|
|||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2025-12-01T05:07:31Z"
|
"date": "2025-12-01T05:07:31Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"slug": "pihole-exporter",
|
|
||||||
"repo": "eko/pihole-exporter",
|
|
||||||
"version": "v1.2.0",
|
|
||||||
"pinned": false,
|
|
||||||
"date": "2025-07-29T19:15:37Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"slug": "planka",
|
"slug": "planka",
|
||||||
"repo": "plankanban/planka",
|
"repo": "plankanban/planka",
|
||||||
"version": "v2.0.0",
|
"version": "v2.0.0-rc.4",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T13:50:10Z"
|
"date": "2025-09-04T12:41:17Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "plant-it",
|
"slug": "plant-it",
|
||||||
@@ -1173,9 +1131,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "pulse",
|
"slug": "pulse",
|
||||||
"repo": "rcourtman/Pulse",
|
"repo": "rcourtman/Pulse",
|
||||||
"version": "v5.1.9",
|
"version": "v5.1.5",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T15:34:40Z"
|
"date": "2026-02-08T12:19:53Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "pve-scripts-local",
|
"slug": "pve-scripts-local",
|
||||||
@@ -1191,13 +1149,6 @@
|
|||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2025-11-19T23:54:34Z"
|
"date": "2025-11-19T23:54:34Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"slug": "qbittorrent-exporter",
|
|
||||||
"repo": "martabal/qbittorrent-exporter",
|
|
||||||
"version": "v1.13.2",
|
|
||||||
"pinned": false,
|
|
||||||
"date": "2025-12-13T22:59:03Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"slug": "qdrant",
|
"slug": "qdrant",
|
||||||
"repo": "qdrant/qdrant",
|
"repo": "qdrant/qdrant",
|
||||||
@@ -1285,16 +1236,16 @@
|
|||||||
{
|
{
|
||||||
"slug": "scanopy",
|
"slug": "scanopy",
|
||||||
"repo": "scanopy/scanopy",
|
"repo": "scanopy/scanopy",
|
||||||
"version": "v0.14.4",
|
"version": "v0.14.3",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-10T03:57:28Z"
|
"date": "2026-02-04T01:41:01Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "scraparr",
|
"slug": "scraparr",
|
||||||
"repo": "thecfu/scraparr",
|
"repo": "thecfu/scraparr",
|
||||||
"version": "v3.0.1",
|
"version": "v2.2.5",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T17:42:23Z"
|
"date": "2025-10-07T12:34:31Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "seelf",
|
"slug": "seelf",
|
||||||
@@ -1331,13 +1282,6 @@
|
|||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-01-16T12:08:28Z"
|
"date": "2026-01-16T12:08:28Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"slug": "slskd",
|
|
||||||
"repo": "slskd/slskd",
|
|
||||||
"version": "0.24.3",
|
|
||||||
"pinned": false,
|
|
||||||
"date": "2026-01-15T14:40:15Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"slug": "snipeit",
|
"slug": "snipeit",
|
||||||
"repo": "grokability/snipe-it",
|
"repo": "grokability/snipe-it",
|
||||||
@@ -1348,9 +1292,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "snowshare",
|
"slug": "snowshare",
|
||||||
"repo": "TuroYT/snowshare",
|
"repo": "TuroYT/snowshare",
|
||||||
"version": "v1.3.5",
|
"version": "v1.2.12",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T10:24:51Z"
|
"date": "2026-01-30T13:35:56Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "sonarr",
|
"slug": "sonarr",
|
||||||
@@ -1474,9 +1418,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "tracearr",
|
"slug": "tracearr",
|
||||||
"repo": "connorgallopo/Tracearr",
|
"repo": "connorgallopo/Tracearr",
|
||||||
"version": "v1.4.17",
|
"version": "v1.4.12",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T01:33:21Z"
|
"date": "2026-01-28T23:29:37Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "tracktor",
|
"slug": "tracktor",
|
||||||
@@ -1488,9 +1432,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "traefik",
|
"slug": "traefik",
|
||||||
"repo": "traefik/traefik",
|
"repo": "traefik/traefik",
|
||||||
"version": "v3.6.8",
|
"version": "v3.6.7",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T16:44:37Z"
|
"date": "2026-01-14T14:11:45Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "trilium",
|
"slug": "trilium",
|
||||||
@@ -1502,9 +1446,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "trip",
|
"slug": "trip",
|
||||||
"repo": "itskovacs/TRIP",
|
"repo": "itskovacs/TRIP",
|
||||||
"version": "1.40.0",
|
"version": "1.39.0",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-10T20:12:53Z"
|
"date": "2026-02-07T16:59:51Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "tududi",
|
"slug": "tududi",
|
||||||
@@ -1565,9 +1509,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "vaultwarden",
|
"slug": "vaultwarden",
|
||||||
"repo": "dani-garcia/vaultwarden",
|
"repo": "dani-garcia/vaultwarden",
|
||||||
"version": "1.35.3",
|
"version": "1.35.2",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-10T20:37:03Z"
|
"date": "2026-01-09T18:37:04Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "victoriametrics",
|
"slug": "victoriametrics",
|
||||||
@@ -1579,9 +1523,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "vikunja",
|
"slug": "vikunja",
|
||||||
"repo": "go-vikunja/vikunja",
|
"repo": "go-vikunja/vikunja",
|
||||||
"version": "v1.1.0",
|
"version": "v1.0.0",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-09T10:34:29Z"
|
"date": "2026-01-28T11:12:59Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "wallabag",
|
"slug": "wallabag",
|
||||||
@@ -1593,9 +1537,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "wallos",
|
"slug": "wallos",
|
||||||
"repo": "ellite/Wallos",
|
"repo": "ellite/Wallos",
|
||||||
"version": "v4.6.1",
|
"version": "v4.6.0",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-10T21:06:46Z"
|
"date": "2025-12-20T15:57:51Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "wanderer",
|
"slug": "wanderer",
|
||||||
@@ -1628,9 +1572,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "wavelog",
|
"slug": "wavelog",
|
||||||
"repo": "wavelog/wavelog",
|
"repo": "wavelog/wavelog",
|
||||||
"version": "2.3",
|
"version": "2.2.2",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T15:46:40Z"
|
"date": "2025-12-31T16:53:34Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "wealthfolio",
|
"slug": "wealthfolio",
|
||||||
@@ -1646,13 +1590,6 @@
|
|||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2025-11-11T14:30:28Z"
|
"date": "2025-11-11T14:30:28Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"slug": "wger",
|
|
||||||
"repo": "wger-project/wger",
|
|
||||||
"version": "2.4",
|
|
||||||
"pinned": false,
|
|
||||||
"date": "2026-01-18T12:12:02Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"slug": "wikijs",
|
"slug": "wikijs",
|
||||||
"repo": "requarks/wiki",
|
"repo": "requarks/wiki",
|
||||||
@@ -1663,9 +1600,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "wishlist",
|
"slug": "wishlist",
|
||||||
"repo": "cmintey/wishlist",
|
"repo": "cmintey/wishlist",
|
||||||
"version": "v0.60.0",
|
"version": "v0.59.0",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-10T04:05:26Z"
|
"date": "2026-01-19T16:42:14Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "wizarr",
|
"slug": "wizarr",
|
||||||
@@ -1691,9 +1628,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "yubal",
|
"slug": "yubal",
|
||||||
"repo": "guillevc/yubal",
|
"repo": "guillevc/yubal",
|
||||||
"version": "v0.5.0",
|
"version": "v0.4.2",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-09T22:11:32Z"
|
"date": "2026-02-08T21:35:13Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "zigbee2mqtt",
|
"slug": "zigbee2mqtt",
|
||||||
@@ -1705,9 +1642,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "zipline",
|
"slug": "zipline",
|
||||||
"repo": "diced/zipline",
|
"repo": "diced/zipline",
|
||||||
"version": "v4.4.2",
|
"version": "v4.4.1",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T04:58:54Z"
|
"date": "2026-01-20T01:29:01Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "zitadel",
|
"slug": "zitadel",
|
||||||
|
|||||||
@@ -33,7 +33,7 @@
|
|||||||
},
|
},
|
||||||
"notes": [
|
"notes": [
|
||||||
{
|
{
|
||||||
"text": "Kutt needs so be served with an SSL certificate for its login to work. During install, you will be prompted to choose if you want to have Caddy installed for SSL termination or if you want to use your own reverse proxy (in that case point your reverse proxy to port 3000).",
|
"text": "Kutt needs so be served with an SSL certificate for its login to work. During install, you will be prompted to choose if you want to have Caddy installed for SSL termination or if you want to use your own reverse proxy (in that case point your reverse porxy to port 3000).",
|
||||||
"type": "info"
|
"type": "info"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -28,14 +28,10 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"default_credentials": {
|
"default_credentials": {
|
||||||
"username": null,
|
"username": "admin",
|
||||||
"password": null
|
"password": null
|
||||||
},
|
},
|
||||||
"notes": [
|
"notes": [
|
||||||
{
|
|
||||||
"text": "On first visit, the setup wizard will guide you to create an admin account and configure ACME email.",
|
|
||||||
"type": "warning"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"text": "Nginx runs on ports 80/443, Nginx UI management interface on port 9000.",
|
"text": "Nginx runs on ports 80/443, Nginx UI management interface on port 9000.",
|
||||||
"type": "info"
|
"type": "info"
|
||||||
@@ -43,6 +39,10 @@
|
|||||||
{
|
{
|
||||||
"text": "SSL certificates can be managed automatically with Let's Encrypt integration.",
|
"text": "SSL certificates can be managed automatically with Let's Encrypt integration.",
|
||||||
"type": "info"
|
"type": "info"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "Initial Login data: `cat ~/nginx-ui.creds`",
|
||||||
|
"type": "info"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -30,14 +30,10 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"default_credentials": {
|
"default_credentials": {
|
||||||
"username": null,
|
"username": "admin@example.com",
|
||||||
"password": null
|
"password": "changeme"
|
||||||
},
|
},
|
||||||
"notes": [
|
"notes": [
|
||||||
{
|
|
||||||
"text": "On first launch, a setup wizard will guide you through creating an admin account. There are no default credentials.",
|
|
||||||
"type": "info"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"text": "You can install the specific one certbot you prefer, or you can Running /app/scripts/install-certbot-plugins within the Nginx Proxy Manager (NPM) LXC shell will install many common plugins. Important: This script does not install all Certbot plugins, as some require additional, external system dependencies (like specific packages for certain DNS providers). These external dependencies must be manually installed within the LXC container before you can successfully install and use the corresponding Certbot plugin. Consult the plugin's documentation for required packages.",
|
"text": "You can install the specific one certbot you prefer, or you can Running /app/scripts/install-certbot-plugins within the Nginx Proxy Manager (NPM) LXC shell will install many common plugins. Important: This script does not install all Certbot plugins, as some require additional, external system dependencies (like specific packages for certain DNS providers). These external dependencies must be manually installed within the LXC container before you can successfully install and use the corresponding Certbot plugin. Consult the plugin's documentation for required packages.",
|
||||||
"type": "info"
|
"type": "info"
|
||||||
|
|||||||
@@ -1,35 +1,44 @@
|
|||||||
{
|
{
|
||||||
"name": "Prometheus Paperless NGX Exporter",
|
"name": "Prometheus Paperless NGX Exporter",
|
||||||
"slug": "prometheus-paperless-ngx-exporter",
|
"slug": "prometheus-paperless-ngx-exporter",
|
||||||
"categories": [
|
"categories": [
|
||||||
9
|
9
|
||||||
],
|
],
|
||||||
"date_created": "2025-02-07",
|
"date_created": "2025-02-07",
|
||||||
"type": "addon",
|
"type": "ct",
|
||||||
"updateable": true,
|
"updateable": true,
|
||||||
"privileged": false,
|
"privileged": false,
|
||||||
"interface_port": 8081,
|
"interface_port": 8081,
|
||||||
"documentation": "https://github.com/hansmi/prometheus-paperless-exporter",
|
"documentation": null,
|
||||||
"website": "https://github.com/hansmi/prometheus-paperless-exporter",
|
"website": "https://github.com/hansmi/prometheus-paperless-exporter",
|
||||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/paperless-ngx.webp",
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/paperless-ngx.webp",
|
||||||
"config_path": "/etc/prometheus-paperless-ngx-exporter/config.env",
|
"config_path": "",
|
||||||
"description": "Prometheus metrics exporter for Paperless-NGX, a document management system transforming physical documents into a searchable online archive. The exporter relies on Paperless' REST API.",
|
"description": "Prometheus metrics exporter for Paperless-NGX, a document management system transforming physical documents into a searchable online archive. The exporter relies on Paperless' REST API.",
|
||||||
"install_methods": [
|
"install_methods": [
|
||||||
{
|
{
|
||||||
"type": "default",
|
"type": "default",
|
||||||
"script": "tools/addon/prometheus-paperless-ngx-exporter.sh",
|
"script": "ct/prometheus-paperless-ngx-exporter.sh",
|
||||||
"resources": {
|
"resources": {
|
||||||
"cpu": null,
|
"cpu": 1,
|
||||||
"ram": null,
|
"ram": 256,
|
||||||
"hdd": null,
|
"hdd": 2,
|
||||||
"os": null,
|
"os": "debian",
|
||||||
"version": null
|
"version": "13"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"default_credentials": {
|
"default_credentials": {
|
||||||
"username": null,
|
"username": null,
|
||||||
"password": null
|
"password": null
|
||||||
},
|
},
|
||||||
"notes": []
|
"notes": [
|
||||||
|
{
|
||||||
|
"text": "Please adjust the Paperless URL in the systemd unit file: /etc/systemd/system/prometheus-paperless-ngx-exporter.service",
|
||||||
|
"type": "info"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "Please adjust the Paperless authentication token in the configuration file: /etc/prometheus-paperless-ngx-exporter/paperless_auth_token_file",
|
||||||
|
"type": "info"
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"name": "Slskd",
|
"name": "slskd",
|
||||||
"slug": "slskd",
|
"slug": "slskd",
|
||||||
"categories": [
|
"categories": [
|
||||||
11
|
11
|
||||||
@@ -35,6 +35,10 @@
|
|||||||
{
|
{
|
||||||
"text": "See /opt/slskd/config/slskd.yml to add your Soulseek credentials",
|
"text": "See /opt/slskd/config/slskd.yml to add your Soulseek credentials",
|
||||||
"type": "info"
|
"type": "info"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "This LXC includes Soularr; it needs to be configured (/opt/soularr/config.ini) before it will work",
|
||||||
|
"type": "info"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,7 +10,7 @@
|
|||||||
"privileged": false,
|
"privileged": false,
|
||||||
"interface_port": 3000,
|
"interface_port": 3000,
|
||||||
"documentation": "https://github.com/TuroYT/snowshare",
|
"documentation": "https://github.com/TuroYT/snowshare",
|
||||||
"config_path": "/opt/snowshare.env",
|
"config_path": "/opt/snowshare/.env",
|
||||||
"website": "https://github.com/TuroYT/snowshare",
|
"website": "https://github.com/TuroYT/snowshare",
|
||||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/png/snowshare.png",
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/png/snowshare.png",
|
||||||
"description": "A modern, secure file and link sharing platform built with Next.js, Prisma, and NextAuth. Share URLs, code snippets, and files with customizable expiration, privacy, and QR codes.",
|
"description": "A modern, secure file and link sharing platform built with Next.js, Prisma, and NextAuth. Share URLs, code snippets, and files with customizable expiration, privacy, and QR codes.",
|
||||||
|
|||||||
@@ -19,9 +19,9 @@
|
|||||||
"type": "default",
|
"type": "default",
|
||||||
"script": "ct/wger.sh",
|
"script": "ct/wger.sh",
|
||||||
"resources": {
|
"resources": {
|
||||||
"cpu": 2,
|
"cpu": 1,
|
||||||
"ram": 2048,
|
"ram": 1024,
|
||||||
"hdd": 8,
|
"hdd": 6,
|
||||||
"os": "debian",
|
"os": "debian",
|
||||||
"version": "13"
|
"version": "13"
|
||||||
}
|
}
|
||||||
@@ -33,7 +33,7 @@
|
|||||||
},
|
},
|
||||||
"notes": [
|
"notes": [
|
||||||
{
|
{
|
||||||
"text": "This LXC also runs Celery and Redis to synchronize workouts and ingredients",
|
"text": "Enable proxy support by uncommenting this line in `/home/wger/src/settings.py` and pointing it to your URL: `# CSRF_TRUSTED_ORIGINS = ['http://127.0.0.1', 'https://my.domain.example.com']`, then restart the service `systemctl restart wger`.",
|
||||||
"type": "info"
|
"type": "info"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -58,7 +58,7 @@ DISABLE_REGISTRATION=False
|
|||||||
EOF
|
EOF
|
||||||
cd /opt/adventurelog/backend/server
|
cd /opt/adventurelog/backend/server
|
||||||
mkdir -p /opt/adventurelog/backend/server/media
|
mkdir -p /opt/adventurelog/backend/server/media
|
||||||
$STD uv venv --clear /opt/adventurelog/backend/server/.venv
|
$STD uv venv /opt/adventurelog/backend/server/.venv
|
||||||
$STD /opt/adventurelog/backend/server/.venv/bin/python -m ensurepip --upgrade
|
$STD /opt/adventurelog/backend/server/.venv/bin/python -m ensurepip --upgrade
|
||||||
$STD /opt/adventurelog/backend/server/.venv/bin/python -m pip install --upgrade pip
|
$STD /opt/adventurelog/backend/server/.venv/bin/python -m pip install --upgrade pip
|
||||||
$STD /opt/adventurelog/backend/server/.venv/bin/python -m pip install -r requirements.txt
|
$STD /opt/adventurelog/backend/server/.venv/bin/python -m pip install -r requirements.txt
|
||||||
|
|||||||
@@ -77,7 +77,7 @@ echo "${KEPUB_VERSION#v}" >"$INSTALL_DIR"/KEPUBIFY_RELEASE
|
|||||||
sed 's/^/v/' ~/.autocaliweb >"$INSTALL_DIR"/ACW_RELEASE
|
sed 's/^/v/' ~/.autocaliweb >"$INSTALL_DIR"/ACW_RELEASE
|
||||||
|
|
||||||
cd "$INSTALL_DIR"
|
cd "$INSTALL_DIR"
|
||||||
$STD uv venv --clear "$VIRTUAL_ENV"
|
$STD uv venv "$VIRTUAL_ENV"
|
||||||
$STD uv sync --all-extras --active
|
$STD uv sync --all-extras --active
|
||||||
cat <<EOF >./dirs.json
|
cat <<EOF >./dirs.json
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ fetch_and_deploy_gh_release "babybuddy" "babybuddy/babybuddy" "tarball"
|
|||||||
msg_info "Installing Babybuddy"
|
msg_info "Installing Babybuddy"
|
||||||
mkdir -p /opt/data
|
mkdir -p /opt/data
|
||||||
cd /opt/babybuddy
|
cd /opt/babybuddy
|
||||||
$STD uv venv --clear .venv
|
$STD uv venv .venv
|
||||||
$STD source .venv/bin/activate
|
$STD source .venv/bin/activate
|
||||||
$STD uv pip install -r requirements.txt
|
$STD uv pip install -r requirements.txt
|
||||||
cp babybuddy/settings/production.example.py babybuddy/settings/production.py
|
cp babybuddy/settings/production.example.py babybuddy/settings/production.py
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ msg_info "Installing Bazarr"
|
|||||||
mkdir -p /var/lib/bazarr/
|
mkdir -p /var/lib/bazarr/
|
||||||
chmod 775 /opt/bazarr /var/lib/bazarr/
|
chmod 775 /opt/bazarr /var/lib/bazarr/
|
||||||
sed -i.bak 's/--only-binary=Pillow//g' /opt/bazarr/requirements.txt
|
sed -i.bak 's/--only-binary=Pillow//g' /opt/bazarr/requirements.txt
|
||||||
$STD uv venv --clear /opt/bazarr/venv --python 3.12
|
$STD uv venv /opt/bazarr/venv --python 3.12
|
||||||
$STD uv pip install -r /opt/bazarr/requirements.txt --python /opt/bazarr/venv/bin/python3
|
$STD uv pip install -r /opt/bazarr/requirements.txt --python /opt/bazarr/venv/bin/python3
|
||||||
msg_ok "Installed Bazarr"
|
msg_ok "Installed Bazarr"
|
||||||
|
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ PYTHON_VERSION="3.12" setup_uv
|
|||||||
fetch_and_deploy_gh_release "ComfyUI" "comfyanonymous/ComfyUI" "tarball" "latest" "/opt/ComfyUI"
|
fetch_and_deploy_gh_release "ComfyUI" "comfyanonymous/ComfyUI" "tarball" "latest" "/opt/ComfyUI"
|
||||||
|
|
||||||
msg_info "Python dependencies"
|
msg_info "Python dependencies"
|
||||||
$STD uv venv --clear "/opt/ComfyUI/venv"
|
$STD uv venv "/opt/ComfyUI/venv"
|
||||||
|
|
||||||
if [[ "${comfyui_gpu_type,,}" == "nvidia" ]]; then
|
if [[ "${comfyui_gpu_type,,}" == "nvidia" ]]; then
|
||||||
pytorch_url="https://download.pytorch.org/whl/cu130"
|
pytorch_url="https://download.pytorch.org/whl/cu130"
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ fetch_and_deploy_gh_release "dispatcharr" "Dispatcharr/Dispatcharr" "tarball"
|
|||||||
|
|
||||||
msg_info "Installing Python Dependencies with uv"
|
msg_info "Installing Python Dependencies with uv"
|
||||||
cd /opt/dispatcharr
|
cd /opt/dispatcharr
|
||||||
$STD uv venv --clear
|
$STD uv venv
|
||||||
$STD uv pip install -r requirements.txt --index-strategy unsafe-best-match
|
$STD uv pip install -r requirements.txt --index-strategy unsafe-best-match
|
||||||
$STD uv pip install gunicorn gevent celery redis daphne
|
$STD uv pip install gunicorn gevent celery redis daphne
|
||||||
msg_ok "Installed Python Dependencies"
|
msg_ok "Installed Python Dependencies"
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# Copyright (c) 2021-2026 community-scripts ORG
|
|
||||||
# Author: Slaviša Arežina (tremor021)
|
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
|
||||||
# Source: https://www.drawio.com/
|
|
||||||
|
|
||||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
|
||||||
color
|
|
||||||
verb_ip6
|
|
||||||
catch_errors
|
|
||||||
setting_up_container
|
|
||||||
network_check
|
|
||||||
update_os
|
|
||||||
setup_hwaccel
|
|
||||||
|
|
||||||
msg_info "Installing Dependencies"
|
|
||||||
$STD apt install -y tomcat11
|
|
||||||
msg_ok "Installed Dependencies"
|
|
||||||
|
|
||||||
USE_ORIGINAL_FILENAME=true fetch_and_deploy_gh_release "drawio" "jgraph/drawio" "singlefile" "latest" "/var/lib/tomcat11/webapps" "draw.war"
|
|
||||||
|
|
||||||
motd_ssh
|
|
||||||
customize
|
|
||||||
cleanup_lxc
|
|
||||||
@@ -31,10 +31,8 @@ setup_deb822_repo "matrix-org" \
|
|||||||
"main"
|
"main"
|
||||||
echo "matrix-synapse-py3 matrix-synapse/server-name string $servername" | debconf-set-selections
|
echo "matrix-synapse-py3 matrix-synapse/server-name string $servername" | debconf-set-selections
|
||||||
echo "matrix-synapse-py3 matrix-synapse/report-stats boolean false" | debconf-set-selections
|
echo "matrix-synapse-py3 matrix-synapse/report-stats boolean false" | debconf-set-selections
|
||||||
echo "exit 101" >/usr/sbin/policy-rc.d
|
|
||||||
chmod +x /usr/sbin/policy-rc.d
|
|
||||||
$STD apt install matrix-synapse-py3 -y
|
$STD apt install matrix-synapse-py3 -y
|
||||||
rm -f /usr/sbin/policy-rc.d
|
systemctl stop matrix-synapse
|
||||||
sed -i 's/127.0.0.1/0.0.0.0/g' /etc/matrix-synapse/homeserver.yaml
|
sed -i 's/127.0.0.1/0.0.0.0/g' /etc/matrix-synapse/homeserver.yaml
|
||||||
sed -i 's/'\''::1'\'', //g' /etc/matrix-synapse/homeserver.yaml
|
sed -i 's/'\''::1'\'', //g' /etc/matrix-synapse/homeserver.yaml
|
||||||
SECRET=$(openssl rand -hex 32)
|
SECRET=$(openssl rand -hex 32)
|
||||||
|
|||||||
@@ -86,7 +86,7 @@ $STD uv tool update-shell
|
|||||||
export PATH="/root/.local/bin:$PATH"
|
export PATH="/root/.local/bin:$PATH"
|
||||||
$STD poetry self add poetry-plugin-export
|
$STD poetry self add poetry-plugin-export
|
||||||
$STD poetry export -f requirements.txt --output requirements.txt --without-hashes
|
$STD poetry export -f requirements.txt --output requirements.txt --without-hashes
|
||||||
$STD uv venv --clear
|
$STD uv venv
|
||||||
$STD uv pip install -r requirements.txt
|
$STD uv pip install -r requirements.txt
|
||||||
msg_ok "Setup Backend"
|
msg_ok "Setup Backend"
|
||||||
|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ msg_info "Setting up Virtual Environment"
|
|||||||
mkdir -p /opt/esphome
|
mkdir -p /opt/esphome
|
||||||
mkdir -p /root/config
|
mkdir -p /root/config
|
||||||
cd /opt/esphome
|
cd /opt/esphome
|
||||||
$STD uv venv --clear /opt/esphome/.venv
|
$STD uv venv /opt/esphome/.venv
|
||||||
$STD /opt/esphome/.venv/bin/python -m ensurepip --upgrade
|
$STD /opt/esphome/.venv/bin/python -m ensurepip --upgrade
|
||||||
$STD /opt/esphome/.venv/bin/python -m pip install --upgrade pip
|
$STD /opt/esphome/.venv/bin/python -m pip install --upgrade pip
|
||||||
$STD /opt/esphome/.venv/bin/python -m pip install esphome tornado esptool
|
$STD /opt/esphome/.venv/bin/python -m pip install esphome tornado esptool
|
||||||
|
|||||||
@@ -15,30 +15,31 @@ network_check
|
|||||||
update_os
|
update_os
|
||||||
|
|
||||||
msg_info "Installing Dependencies"
|
msg_info "Installing Dependencies"
|
||||||
$STD apt install -y \
|
$STD apt-get install -y \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
|
jq \
|
||||||
imagemagick
|
imagemagick
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
setup_hwaccel
|
setup_hwaccel
|
||||||
|
|
||||||
msg_info "Installing ASP.NET Core Runtime"
|
msg_info "Installing ASP.NET Core Runtime"
|
||||||
setup_deb822_repo \
|
curl -fsSL https://packages.microsoft.com/config/debian/13/packages-microsoft-prod.deb -o packages-microsoft-prod.deb
|
||||||
"microsoft" \
|
$STD dpkg -i packages-microsoft-prod.deb
|
||||||
"https://packages.microsoft.com/keys/microsoft-2025.asc" \
|
rm -rf packages-microsoft-prod.deb
|
||||||
"https://packages.microsoft.com/debian/13/prod/" \
|
$STD apt-get update
|
||||||
"trixie"
|
$STD apt-get install -y aspnetcore-runtime-8.0
|
||||||
$STD apt install -y aspnetcore-runtime-8.0
|
|
||||||
msg_ok "Installed ASP.NET Core Runtime"
|
msg_ok "Installed ASP.NET Core Runtime"
|
||||||
|
|
||||||
fetch_and_deploy_from_url "https://fileflows.com/downloads/zip" "/opt/fileflows"
|
|
||||||
|
|
||||||
msg_info "Setup FileFlows"
|
msg_info "Setup FileFlows"
|
||||||
$STD ln -svf /usr/bin/ffmpeg /usr/local/bin/ffmpeg
|
$STD ln -svf /usr/bin/ffmpeg /usr/local/bin/ffmpeg
|
||||||
$STD ln -svf /usr/bin/ffprobe /usr/local/bin/ffprobe
|
$STD ln -svf /usr/bin/ffprobe /usr/local/bin/ffprobe
|
||||||
cd /opt/fileflows/Server
|
temp_file=$(mktemp)
|
||||||
dotnet FileFlows.Server.dll --systemd install --root true
|
curl -fsSL https://fileflows.com/downloads/zip -o "$temp_file"
|
||||||
|
$STD unzip -d /opt/fileflows "$temp_file"
|
||||||
|
$STD bash -c "cd /opt/fileflows/Server && dotnet FileFlows.Server.dll --systemd install --root true"
|
||||||
systemctl enable -q --now fileflows
|
systemctl enable -q --now fileflows
|
||||||
|
rm -f "$temp_file"
|
||||||
msg_ok "Setup FileFlows"
|
msg_ok "Setup FileFlows"
|
||||||
|
|
||||||
motd_ssh
|
motd_ssh
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ PYTHON_VERSION="3.12" setup_uv
|
|||||||
fetch_and_deploy_gh_release "huntarr" "plexguide/Huntarr.io" "tarball"
|
fetch_and_deploy_gh_release "huntarr" "plexguide/Huntarr.io" "tarball"
|
||||||
|
|
||||||
msg_info "Configure Huntarr"
|
msg_info "Configure Huntarr"
|
||||||
$STD uv venv --clear /opt/huntarr/.venv
|
$STD uv venv /opt/huntarr/.venv
|
||||||
$STD uv pip install --python /opt/huntarr/.venv/bin/python -r /opt/huntarr/requirements.txt
|
$STD uv pip install --python /opt/huntarr/.venv/bin/python -r /opt/huntarr/requirements.txt
|
||||||
msg_ok "Configured Huntrarr"
|
msg_ok "Configured Huntrarr"
|
||||||
|
|
||||||
|
|||||||
@@ -289,7 +289,7 @@ ML_DIR="${APP_DIR}/machine-learning"
|
|||||||
GEO_DIR="${INSTALL_DIR}/geodata"
|
GEO_DIR="${INSTALL_DIR}/geodata"
|
||||||
mkdir -p {"${APP_DIR}","${UPLOAD_DIR}","${GEO_DIR}","${INSTALL_DIR}"/cache}
|
mkdir -p {"${APP_DIR}","${UPLOAD_DIR}","${GEO_DIR}","${INSTALL_DIR}"/cache}
|
||||||
|
|
||||||
fetch_and_deploy_gh_release "Immich" "immich-app/immich" "tarball" "v2.5.6" "$SRC_DIR"
|
fetch_and_deploy_gh_release "Immich" "immich-app/immich" "tarball" "v2.5.5" "$SRC_DIR"
|
||||||
PNPM_VERSION="$(jq -r '.packageManager | split("@")[1]' ${SRC_DIR}/package.json)"
|
PNPM_VERSION="$(jq -r '.packageManager | split("@")[1]' ${SRC_DIR}/package.json)"
|
||||||
NODE_VERSION="24" NODE_MODULE="pnpm@${PNPM_VERSION}" setup_nodejs
|
NODE_VERSION="24" NODE_MODULE="pnpm@${PNPM_VERSION}" setup_nodejs
|
||||||
|
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ PYTHON_VERSION="3.12" setup_uv
|
|||||||
msg_info "Installing Jupyter"
|
msg_info "Installing Jupyter"
|
||||||
mkdir -p /opt/jupyter
|
mkdir -p /opt/jupyter
|
||||||
cd /opt/jupyter
|
cd /opt/jupyter
|
||||||
$STD uv venv --clear /opt/jupyter/.venv
|
$STD uv venv /opt/jupyter/.venv
|
||||||
$STD /opt/jupyter/.venv/bin/python -m ensurepip --upgrade
|
$STD /opt/jupyter/.venv/bin/python -m ensurepip --upgrade
|
||||||
$STD /opt/jupyter/.venv/bin/python -m pip install --upgrade pip
|
$STD /opt/jupyter/.venv/bin/python -m pip install --upgrade pip
|
||||||
$STD /opt/jupyter/.venv/bin/python -m pip install jupyter
|
$STD /opt/jupyter/.venv/bin/python -m pip install jupyter
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ fetch_and_deploy_gh_release "kapowarr" "Casvt/Kapowarr" "tarball"
|
|||||||
|
|
||||||
msg_info "Setup Kapowarr"
|
msg_info "Setup Kapowarr"
|
||||||
cd /opt/kapowarr
|
cd /opt/kapowarr
|
||||||
$STD uv venv --clear .venv
|
$STD uv venv .venv
|
||||||
$STD source .venv/bin/activate
|
$STD source .venv/bin/activate
|
||||||
$STD uv pip install --upgrade pip
|
$STD uv pip install --upgrade pip
|
||||||
$STD uv pip install --no-cache-dir -r requirements.txt
|
$STD uv pip install --no-cache-dir -r requirements.txt
|
||||||
|
|||||||
@@ -20,19 +20,10 @@ msg_ok "Installed Docker"
|
|||||||
msg_info "Detecting latest Kasm Workspaces release"
|
msg_info "Detecting latest Kasm Workspaces release"
|
||||||
KASM_URL=$(curl -fsSL "https://www.kasm.com/downloads" | tr '\n' ' ' | grep -oE 'https://kasm-static-content[^"]*kasm_release_[0-9]+\.[0-9]+\.[0-9]+\.[a-z0-9]+\.tar\.gz' | head -n 1)
|
KASM_URL=$(curl -fsSL "https://www.kasm.com/downloads" | tr '\n' ' ' | grep -oE 'https://kasm-static-content[^"]*kasm_release_[0-9]+\.[0-9]+\.[0-9]+\.[a-z0-9]+\.tar\.gz' | head -n 1)
|
||||||
if [[ -z "$KASM_URL" ]]; then
|
if [[ -z "$KASM_URL" ]]; then
|
||||||
SERVICE_IMAGE_URL=$(curl -fsSL "https://www.kasm.com/downloads" | tr '\n' ' ' | grep -oE 'https://kasm-static-content[^"]*kasm_release_service_images_amd64_[0-9]+\.[0-9]+\.[0-9]+\.tar\.gz' | head -n 1)
|
|
||||||
if [[ -n "$SERVICE_IMAGE_URL" ]]; then
|
|
||||||
KASM_VERSION=$(echo "$SERVICE_IMAGE_URL" | sed -E 's/.*kasm_release_service_images_amd64_([0-9]+\.[0-9]+\.[0-9]+).*/\1/')
|
|
||||||
KASM_URL="https://kasm-static-content.s3.amazonaws.com/kasm_release_${KASM_VERSION}.tar.gz"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
KASM_VERSION=$(echo "$KASM_URL" | sed -E 's/.*kasm_release_([0-9]+\.[0-9]+\.[0-9]+).*/\1/')
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z "$KASM_URL" ]] || [[ -z "$KASM_VERSION" ]]; then
|
|
||||||
msg_error "Unable to detect latest Kasm release URL."
|
msg_error "Unable to detect latest Kasm release URL."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
KASM_VERSION=$(echo "$KASM_URL" | sed -E 's/.*kasm_release_([0-9]+\.[0-9]+\.[0-9]+).*/\1/')
|
||||||
msg_ok "Detected Kasm Workspaces version $KASM_VERSION"
|
msg_ok "Detected Kasm Workspaces version $KASM_VERSION"
|
||||||
|
|
||||||
msg_warn "WARNING: This script will run an external installer from a third-party source (https://www.kasmweb.com/)."
|
msg_warn "WARNING: This script will run an external installer from a third-party source (https://www.kasmweb.com/)."
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ $STD useradd librenms -d /opt/librenms -M -r -s "$(which bash)"
|
|||||||
mkdir -p /opt/librenms/{rrd,logs,bootstrap/cache,storage,html}
|
mkdir -p /opt/librenms/{rrd,logs,bootstrap/cache,storage,html}
|
||||||
cd /opt/librenms
|
cd /opt/librenms
|
||||||
APP_KEY=$(openssl rand -base64 40 | tr -dc 'a-zA-Z0-9')
|
APP_KEY=$(openssl rand -base64 40 | tr -dc 'a-zA-Z0-9')
|
||||||
$STD uv venv --clear .venv
|
$STD uv venv .venv
|
||||||
$STD source .venv/bin/activate
|
$STD source .venv/bin/activate
|
||||||
$STD uv pip install -r requirements.txt
|
$STD uv pip install -r requirements.txt
|
||||||
cat <<EOF >/opt/librenms/.env
|
cat <<EOF >/opt/librenms/.env
|
||||||
|
|||||||
@@ -37,13 +37,18 @@ PYTHON_VERSION="3.12" setup_uv
|
|||||||
fetch_and_deploy_gh_release "libretranslate" "LibreTranslate/LibreTranslate" "tarball"
|
fetch_and_deploy_gh_release "libretranslate" "LibreTranslate/LibreTranslate" "tarball"
|
||||||
|
|
||||||
msg_info "Setup LibreTranslate (Patience)"
|
msg_info "Setup LibreTranslate (Patience)"
|
||||||
|
TORCH_VERSION=$(grep -Eo '"torch ==[0-9]+\.[0-9]+\.[0-9]+' /opt/libretranslate/pyproject.toml |
|
||||||
|
tail -n1 | sed 's/.*==//')
|
||||||
|
if [[ -z "$TORCH_VERSION" ]]; then
|
||||||
|
TORCH_VERSION="2.5.0"
|
||||||
|
fi
|
||||||
cd /opt/libretranslate
|
cd /opt/libretranslate
|
||||||
$STD uv venv --clear .venv --python 3.12
|
$STD uv venv .venv --python 3.12
|
||||||
$STD source .venv/bin/activate
|
$STD source .venv/bin/activate
|
||||||
$STD uv pip install --upgrade pip
|
$STD uv pip install --upgrade pip setuptools
|
||||||
$STD uv pip install "setuptools<81"
|
|
||||||
$STD uv pip install Babel==2.12.1
|
$STD uv pip install Babel==2.12.1
|
||||||
$STD .venv/bin/python scripts/compile_locales.py
|
$STD .venv/bin/python scripts/compile_locales.py
|
||||||
|
$STD uv pip install "torch==${TORCH_VERSION}" --extra-index-url https://download.pytorch.org/whl/cpu
|
||||||
$STD uv pip install "numpy<2"
|
$STD uv pip install "numpy<2"
|
||||||
$STD uv pip install .
|
$STD uv pip install .
|
||||||
$STD uv pip install libretranslate
|
$STD uv pip install libretranslate
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ msg_ok "Set up PostgreSQL"
|
|||||||
msg_info "Setting up Virtual Environment"
|
msg_info "Setting up Virtual Environment"
|
||||||
mkdir -p /opt/litellm
|
mkdir -p /opt/litellm
|
||||||
cd /opt/litellm
|
cd /opt/litellm
|
||||||
$STD uv venv --clear /opt/litellm/.venv
|
$STD uv venv /opt/litellm/.venv
|
||||||
$STD /opt/litellm/.venv/bin/python -m ensurepip --upgrade
|
$STD /opt/litellm/.venv/bin/python -m ensurepip --upgrade
|
||||||
$STD /opt/litellm/.venv/bin/python -m pip install --upgrade pip
|
$STD /opt/litellm/.venv/bin/python -m pip install --upgrade pip
|
||||||
$STD /opt/litellm/.venv/bin/python -m pip install litellm[proxy] prisma
|
$STD /opt/litellm/.venv/bin/python -m pip install litellm[proxy] prisma
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ fetch_and_deploy_gh_release "mylar3" "mylar3/mylar3" "tarball"
|
|||||||
|
|
||||||
msg_info "Installing ${APPLICATION}"
|
msg_info "Installing ${APPLICATION}"
|
||||||
mkdir -p /opt/mylar3-data
|
mkdir -p /opt/mylar3-data
|
||||||
$STD uv venv --clear /opt/mylar3/.venv
|
$STD uv venv /opt/mylar3/.venv
|
||||||
$STD /opt/mylar3/.venv/bin/python -m ensurepip --upgrade
|
$STD /opt/mylar3/.venv/bin/python -m ensurepip --upgrade
|
||||||
$STD /opt/mylar3/.venv/bin/python -m pip install --upgrade pip
|
$STD /opt/mylar3/.venv/bin/python -m pip install --upgrade pip
|
||||||
$STD /opt/mylar3/.venv/bin/python -m pip install --no-cache-dir -r /opt/mylar3/requirements.txt
|
$STD /opt/mylar3/.venv/bin/python -m pip install --no-cache-dir -r /opt/mylar3/requirements.txt
|
||||||
|
|||||||
@@ -30,19 +30,29 @@ msg_ok "Installed Nginx UI"
|
|||||||
msg_info "Configuring Nginx UI"
|
msg_info "Configuring Nginx UI"
|
||||||
mkdir -p /usr/local/etc/nginx-ui
|
mkdir -p /usr/local/etc/nginx-ui
|
||||||
cat <<EOF >/usr/local/etc/nginx-ui/app.ini
|
cat <<EOF >/usr/local/etc/nginx-ui/app.ini
|
||||||
|
[server]
|
||||||
|
HttpHost = 0.0.0.0
|
||||||
|
HttpPort = 9000
|
||||||
|
RunMode = release
|
||||||
|
JwtSecret = $(openssl rand -hex 32)
|
||||||
|
|
||||||
|
[nginx]
|
||||||
|
AccessLogPath = /var/log/nginx/access.log
|
||||||
|
ErrorLogPath = /var/log/nginx/error.log
|
||||||
|
ConfigDir = /etc/nginx
|
||||||
|
PIDPath = /run/nginx.pid
|
||||||
|
TestConfigCmd = nginx -t
|
||||||
|
ReloadCmd = nginx -s reload
|
||||||
|
RestartCmd = systemctl restart nginx
|
||||||
|
|
||||||
[app]
|
[app]
|
||||||
PageSize = 10
|
PageSize = 10
|
||||||
|
|
||||||
[server]
|
|
||||||
Host = 0.0.0.0
|
|
||||||
Port = 9000
|
|
||||||
RunMode = release
|
|
||||||
|
|
||||||
[cert]
|
[cert]
|
||||||
HTTPChallengePort = 9180
|
Email =
|
||||||
|
CADir =
|
||||||
[terminal]
|
RenewalInterval = 7
|
||||||
StartCmd = login
|
RecursiveNameservers =
|
||||||
EOF
|
EOF
|
||||||
msg_ok "Configured Nginx UI"
|
msg_ok "Configured Nginx UI"
|
||||||
|
|
||||||
@@ -68,6 +78,17 @@ EOF
|
|||||||
systemctl daemon-reload
|
systemctl daemon-reload
|
||||||
msg_ok "Created Service"
|
msg_ok "Created Service"
|
||||||
|
|
||||||
|
msg_info "Creating Initial Admin User"
|
||||||
|
systemctl start nginx-ui
|
||||||
|
sleep 3
|
||||||
|
systemctl stop nginx-ui
|
||||||
|
sleep 1
|
||||||
|
/usr/local/bin/nginx-ui reset-password --config /usr/local/etc/nginx-ui/app.ini &>/tmp/nginx-ui-reset.log || true
|
||||||
|
ADMIN_PASS=$(grep -oP 'Password: \K\S+' /tmp/nginx-ui-reset.log || echo "admin")
|
||||||
|
echo -e "Nginx-UI Credentials\nUsername: admin\nPassword: $ADMIN_PASS" >~/nginx-ui.creds
|
||||||
|
rm -f /tmp/nginx-ui-reset.log
|
||||||
|
msg_ok "Created Initial Admin User"
|
||||||
|
|
||||||
msg_info "Starting Service"
|
msg_info "Starting Service"
|
||||||
systemctl enable -q --now nginx-ui
|
systemctl enable -q --now nginx-ui
|
||||||
rm -rf /etc/nginx/sites-enabled/default
|
rm -rf /etc/nginx/sites-enabled/default
|
||||||
|
|||||||
@@ -130,11 +130,10 @@ if [ ! -f /app/config/production.json ]; then
|
|||||||
"database": {
|
"database": {
|
||||||
"engine": "knex-native",
|
"engine": "knex-native",
|
||||||
"knex": {
|
"knex": {
|
||||||
"client": "better-sqlite3",
|
"client": "sqlite3",
|
||||||
"connection": {
|
"connection": {
|
||||||
"filename": "/data/database.sqlite"
|
"filename": "/data/database.sqlite"
|
||||||
},
|
}
|
||||||
"useNullAsDefault": true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
47
install/prometheus-paperless-ngx-exporter-install.sh
Executable file
47
install/prometheus-paperless-ngx-exporter-install.sh
Executable file
@@ -0,0 +1,47 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: Andy Grunwald (andygrunwald)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://github.com/hansmi/prometheus-paperless-exporter
|
||||||
|
|
||||||
|
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||||
|
color
|
||||||
|
verb_ip6
|
||||||
|
catch_errors
|
||||||
|
setting_up_container
|
||||||
|
network_check
|
||||||
|
update_os
|
||||||
|
|
||||||
|
fetch_and_deploy_gh_release "prom-paperless-exp" "hansmi/prometheus-paperless-exporter" "binary"
|
||||||
|
|
||||||
|
msg_info "Configuring Prometheus Paperless NGX Exporter"
|
||||||
|
mkdir -p /etc/prometheus-paperless-ngx-exporter
|
||||||
|
echo "SECRET_AUTH_TOKEN" >/etc/prometheus-paperless-ngx-exporter/paperless_auth_token_file
|
||||||
|
msg_ok "Configured Prometheus Paperless NGX Exporter"
|
||||||
|
|
||||||
|
msg_info "Creating Service"
|
||||||
|
cat <<EOF >/etc/systemd/system/prometheus-paperless-ngx-exporter.service
|
||||||
|
[Unit]
|
||||||
|
Description=Prometheus Paperless NGX Exporter
|
||||||
|
Wants=network-online.target
|
||||||
|
After=network-online.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
User=root
|
||||||
|
Restart=always
|
||||||
|
Type=simple
|
||||||
|
ExecStart=/usr/bin/prometheus-paperless-exporter \
|
||||||
|
--paperless_url=http://paperless.example.org \
|
||||||
|
--paperless_auth_token_file=/etc/prometheus-paperless-ngx-exporter/paperless_auth_token_file
|
||||||
|
ExecReload=/bin/kill -HUP \$MAINPID
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF
|
||||||
|
systemctl enable -q --now prometheus-paperless-ngx-exporter
|
||||||
|
msg_ok "Created Service"
|
||||||
|
|
||||||
|
motd_ssh
|
||||||
|
customize
|
||||||
|
cleanup_lxc
|
||||||
@@ -19,7 +19,7 @@ msg_info "Installing Prometheus Proxmox VE Exporter"
|
|||||||
mkdir -p /opt/prometheus-pve-exporter
|
mkdir -p /opt/prometheus-pve-exporter
|
||||||
cd /opt/prometheus-pve-exporter
|
cd /opt/prometheus-pve-exporter
|
||||||
|
|
||||||
$STD uv venv --clear /opt/prometheus-pve-exporter/.venv
|
$STD uv venv /opt/prometheus-pve-exporter/.venv
|
||||||
$STD /opt/prometheus-pve-exporter/.venv/bin/python -m ensurepip --upgrade
|
$STD /opt/prometheus-pve-exporter/.venv/bin/python -m ensurepip --upgrade
|
||||||
$STD /opt/prometheus-pve-exporter/.venv/bin/python -m pip install --upgrade pip
|
$STD /opt/prometheus-pve-exporter/.venv/bin/python -m pip install --upgrade pip
|
||||||
$STD /opt/prometheus-pve-exporter/.venv/bin/python -m pip install prometheus-pve-exporter
|
$STD /opt/prometheus-pve-exporter/.venv/bin/python -m pip install prometheus-pve-exporter
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ msg_ok "Setup Unrar"
|
|||||||
fetch_and_deploy_gh_release "sabnzbd-org" "sabnzbd/sabnzbd" "prebuild" "latest" "/opt/sabnzbd" "SABnzbd-*-src.tar.gz"
|
fetch_and_deploy_gh_release "sabnzbd-org" "sabnzbd/sabnzbd" "prebuild" "latest" "/opt/sabnzbd" "SABnzbd-*-src.tar.gz"
|
||||||
|
|
||||||
msg_info "Installing SABnzbd"
|
msg_info "Installing SABnzbd"
|
||||||
$STD uv venv --clear /opt/sabnzbd/venv
|
$STD uv venv /opt/sabnzbd/venv
|
||||||
$STD uv pip install -r /opt/sabnzbd/requirements.txt --python=/opt/sabnzbd/venv/bin/python
|
$STD uv pip install -r /opt/sabnzbd/requirements.txt --python=/opt/sabnzbd/venv/bin/python
|
||||||
msg_ok "Installed SABnzbd"
|
msg_ok "Installed SABnzbd"
|
||||||
|
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ fetch_and_deploy_gh_release "scrappar" "thecfu/scraparr" "tarball" "latest" "/op
|
|||||||
|
|
||||||
msg_info "Installing Scraparr"
|
msg_info "Installing Scraparr"
|
||||||
cd /opt/scraparr
|
cd /opt/scraparr
|
||||||
$STD uv venv --clear /opt/scraparr/.venv
|
$STD uv venv /opt/scraparr/.venv
|
||||||
$STD /opt/scraparr/.venv/bin/python -m ensurepip --upgrade
|
$STD /opt/scraparr/.venv/bin/python -m ensurepip --upgrade
|
||||||
$STD /opt/scraparr/.venv/bin/python -m pip install --upgrade pip
|
$STD /opt/scraparr/.venv/bin/python -m pip install --upgrade pip
|
||||||
$STD /opt/scraparr/.venv/bin/python -m pip install -r /opt/scraparr/src/scraparr/requirements.txt
|
$STD /opt/scraparr/.venv/bin/python -m pip install -r /opt/scraparr/src/scraparr/requirements.txt
|
||||||
|
|||||||
@@ -131,7 +131,7 @@ msg_ok "Built Shelfmark frontend"
|
|||||||
|
|
||||||
msg_info "Configuring Shelfmark"
|
msg_info "Configuring Shelfmark"
|
||||||
cd /opt/shelfmark
|
cd /opt/shelfmark
|
||||||
$STD uv venv --clear ./venv
|
$STD uv venv ./venv
|
||||||
$STD source ./venv/bin/activate
|
$STD source ./venv/bin/activate
|
||||||
$STD uv pip install -r ./requirements-base.txt
|
$STD uv pip install -r ./requirements-base.txt
|
||||||
[[ "$DEPLOYMENT_TYPE" == "1" ]] && $STD uv pip install -r ./requirements-shelfmark.txt
|
[[ "$DEPLOYMENT_TYPE" == "1" ]] && $STD uv pip install -r ./requirements-shelfmark.txt
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
# Copyright (c) 2021-2026 community-scripts ORG
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
# Author: vhsdream
|
# Author: vhsdream
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://github.com/slskd/slskd/, https://github.com/mrusse/soularr
|
# Source: https://github.com/slskd/slskd/, https://soularr.net
|
||||||
|
|
||||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||||
color
|
color
|
||||||
@@ -13,71 +13,71 @@ setting_up_container
|
|||||||
network_check
|
network_check
|
||||||
update_os
|
update_os
|
||||||
|
|
||||||
fetch_and_deploy_gh_release "Slskd" "slskd/slskd" "prebuild" "latest" "/opt/slskd" "slskd-*-linux-x64.zip"
|
msg_info "Installing Dependencies"
|
||||||
|
$STD apt install -y \
|
||||||
|
python3-pip
|
||||||
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
msg_info "Configuring Slskd"
|
msg_info "Setup ${APPLICATION}"
|
||||||
|
tmp_file=$(mktemp)
|
||||||
|
RELEASE=$(curl -s https://api.github.com/repos/slskd/slskd/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||||
|
curl -fsSL "https://github.com/slskd/slskd/releases/download/${RELEASE}/slskd-${RELEASE}-linux-x64.zip" -o $tmp_file
|
||||||
|
$STD unzip $tmp_file -d /opt/${APPLICATION}
|
||||||
|
echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
|
||||||
JWT_KEY=$(openssl rand -base64 44)
|
JWT_KEY=$(openssl rand -base64 44)
|
||||||
SLSKD_API_KEY=$(openssl rand -base64 44)
|
SLSKD_API_KEY=$(openssl rand -base64 44)
|
||||||
cp /opt/slskd/config/slskd.example.yml /opt/slskd/config/slskd.yml
|
cp /opt/${APPLICATION}/config/slskd.example.yml /opt/${APPLICATION}/config/slskd.yml
|
||||||
sed -i \
|
sed -i \
|
||||||
-e '/web:/,/cidr/s/^# //' \
|
-e "\|web:|,\|cidr|s|^#||" \
|
||||||
-e '/https:/,/port: 5031/s/false/true/' \
|
-e "\|https:|,\|5031|s|false|true|" \
|
||||||
-e '/port: 5030/,/socket/s/,.*$//' \
|
|
||||||
-e '/content_path:/,/authentication/s/false/true/' \
|
|
||||||
-e "\|api_keys|,\|cidr|s|<some.*$|$SLSKD_API_KEY|; \
|
-e "\|api_keys|,\|cidr|s|<some.*$|$SLSKD_API_KEY|; \
|
||||||
s|role: readonly|role: readwrite|; \
|
s|role: readonly|role: readwrite|; \
|
||||||
s|0.0.0.0/0,::/0|& # Replace this with your subnet|" \
|
s|0.0.0.0/0,::/0|& # Replace this with your subnet|" \
|
||||||
|
-e "\|soulseek|,\|write_queue|s|^#||" \
|
||||||
-e "\|jwt:|,\|ttl|s|key: ~|key: $JWT_KEY|" \
|
-e "\|jwt:|,\|ttl|s|key: ~|key: $JWT_KEY|" \
|
||||||
-e '/soulseek/,/write_queue/s/^# //' \
|
-e "s|^ picture|# picture|" \
|
||||||
-e 's/^.*picture/#&/' /opt/slskd/config/slskd.yml
|
/opt/${APPLICATION}/config/slskd.yml
|
||||||
msg_ok "Configured Slskd"
|
msg_ok "Setup ${APPLICATION}"
|
||||||
|
|
||||||
read -rp "${TAB3}Do you want to install Soularr? y/N " soularr
|
msg_info "Installing Soularr"
|
||||||
if [[ ${soularr,,} =~ ^(y|yes)$ ]]; then
|
rm -rf /usr/lib/python3.*/EXTERNALLY-MANAGED
|
||||||
PYTHON_VERSION="3.11" setup_uv
|
cd /tmp
|
||||||
fetch_and_deploy_gh_release "Soularr" "mrusse/soularr" "tarball" "latest" "/opt/soularr"
|
curl -fsSL -o main.zip https://github.com/mrusse/soularr/archive/refs/heads/main.zip
|
||||||
cd /opt/soularr
|
$STD unzip main.zip
|
||||||
$STD uv venv venv
|
mv soularr-main /opt/soularr
|
||||||
$STD source venv/bin/activate
|
cd /opt/soularr
|
||||||
$STD uv pip install -r requirements.txt
|
$STD pip install -r requirements.txt
|
||||||
sed -i \
|
sed -i \
|
||||||
-e "\|[Slskd]|,\|host_url|s|yourslskdapikeygoeshere|$SLSKD_API_KEY|" \
|
-e "\|[Slskd]|,\|host_url|s|yourslskdapikeygoeshere|$SLSKD_API_KEY|" \
|
||||||
-e "/host_url/s/slskd/localhost/" \
|
-e "/host_url/s/slskd/localhost/" \
|
||||||
/opt/soularr/config.ini
|
/opt/soularr/config.ini
|
||||||
cat <<EOF >/opt/soularr/run.sh
|
sed -i \
|
||||||
#!/usr/bin/env bash
|
-e "/#This\|#Default\|INTERVAL/{N;d;}" \
|
||||||
|
-e "/while\|#Pass/d" \
|
||||||
|
-e "\|python|s|app|opt/soularr|; s|python|python3|" \
|
||||||
|
-e "/dt/,+2d" \
|
||||||
|
/opt/soularr/run.sh
|
||||||
|
sed -i -E "/(soularr.py)/s/.{5}$//; /if/,/fi/s/.{4}//" /opt/soularr/run.sh
|
||||||
|
chmod +x /opt/soularr/run.sh
|
||||||
|
msg_ok "Installed Soularr"
|
||||||
|
|
||||||
if ps aux | grep "[s]oularr.py" >/dev/null; then
|
msg_info "Creating Services"
|
||||||
echo "Soularr is already running. Exiting..."
|
cat <<EOF >/etc/systemd/system/${APPLICATION}.service
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
source /opt/soularr/venv/bin/activate
|
|
||||||
uv run python3 -u /opt/soularr/soularr.py --config-dir /opt/soularr
|
|
||||||
fi
|
|
||||||
EOF
|
|
||||||
chmod +x /opt/soularr/run.sh
|
|
||||||
deactivate
|
|
||||||
msg_ok "Installed Soularr"
|
|
||||||
fi
|
|
||||||
|
|
||||||
msg_info "Creating Service"
|
|
||||||
cat <<EOF >/etc/systemd/system/slskd.service
|
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=Slskd Service
|
Description=${APPLICATION} Service
|
||||||
After=network.target
|
After=network.target
|
||||||
Wants=network.target
|
Wants=network.target
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
WorkingDirectory=/opt/slskd
|
WorkingDirectory=/opt/${APPLICATION}
|
||||||
ExecStart=/opt/slskd/slskd --config /opt/slskd/config/slskd.yml
|
ExecStart=/opt/${APPLICATION}/slskd --config /opt/${APPLICATION}/config/slskd.yml
|
||||||
Restart=always
|
Restart=always
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
if [[ -d /opt/soularr ]]; then
|
cat <<EOF >/etc/systemd/system/soularr.timer
|
||||||
cat <<EOF >/etc/systemd/system/soularr.timer
|
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=Soularr service timer
|
Description=Soularr service timer
|
||||||
RefuseManualStart=no
|
RefuseManualStart=no
|
||||||
@@ -85,15 +85,15 @@ RefuseManualStop=no
|
|||||||
|
|
||||||
[Timer]
|
[Timer]
|
||||||
Persistent=true
|
Persistent=true
|
||||||
# run every 10 minutes
|
# run every 5 minutes
|
||||||
OnCalendar=*-*-* *:0/10:00
|
OnCalendar=*-*-* *:0/5:00
|
||||||
Unit=soularr.service
|
Unit=soularr.service
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=timers.target
|
WantedBy=timers.target
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
cat <<EOF >/etc/systemd/system/soularr.service
|
cat <<EOF >/etc/systemd/system/soularr.service
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=Soularr service
|
Description=Soularr service
|
||||||
After=network.target slskd.service
|
After=network.target slskd.service
|
||||||
@@ -106,9 +106,10 @@ ExecStart=/bin/bash -c /opt/soularr/run.sh
|
|||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
EOF
|
EOF
|
||||||
msg_warn "Add your Lidarr API key to Soularr in '/opt/soularr/config.ini', then run 'systemctl enable --now soularr.timer'"
|
systemctl enable -q --now ${APPLICATION}
|
||||||
fi
|
systemctl enable -q soularr.timer
|
||||||
systemctl enable -q --now slskd
|
rm -rf $tmp_file
|
||||||
|
rm -rf /tmp/main.zip
|
||||||
msg_ok "Created Services"
|
msg_ok "Created Services"
|
||||||
|
|
||||||
motd_ssh
|
motd_ssh
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ msg_ok "Installed LibreOffice Components"
|
|||||||
|
|
||||||
msg_info "Installing Python Dependencies"
|
msg_info "Installing Python Dependencies"
|
||||||
mkdir -p /tmp/stirling-pdf
|
mkdir -p /tmp/stirling-pdf
|
||||||
$STD uv venv --clear /opt/.venv
|
$STD uv venv /opt/.venv
|
||||||
export PATH="/opt/.venv/bin:$PATH"
|
export PATH="/opt/.venv/bin:$PATH"
|
||||||
source /opt/.venv/bin/activate
|
source /opt/.venv/bin/activate
|
||||||
$STD uv pip install --upgrade pip
|
$STD uv pip install --upgrade pip
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ fetch_and_deploy_gh_release "streamlink-webui" "CrazyWolf13/streamlink-webui" "t
|
|||||||
|
|
||||||
msg_info "Setup ${APPLICATION}"
|
msg_info "Setup ${APPLICATION}"
|
||||||
mkdir -p "/opt/${APPLICATION}-download"
|
mkdir -p "/opt/${APPLICATION}-download"
|
||||||
$STD uv venv --clear /opt/"${APPLICATION}"/backend/src/.venv
|
$STD uv venv /opt/"${APPLICATION}"/backend/src/.venv
|
||||||
source /opt/"${APPLICATION}"/backend/src/.venv/bin/activate
|
source /opt/"${APPLICATION}"/backend/src/.venv/bin/activate
|
||||||
$STD uv pip install -r /opt/streamlink-webui/backend/src/requirements.txt --python=/opt/"${APPLICATION}"/backend/src/.venv
|
$STD uv pip install -r /opt/streamlink-webui/backend/src/requirements.txt --python=/opt/"${APPLICATION}"/backend/src/.venv
|
||||||
cd /opt/"${APPLICATION}"/frontend/src
|
cd /opt/"${APPLICATION}"/frontend/src
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ SECRET_KEY=$(openssl rand -base64 45 | sed 's/\//\\\//g')
|
|||||||
msg_info "Setup Tandoor"
|
msg_info "Setup Tandoor"
|
||||||
mkdir -p /opt/tandoor/{config,api,mediafiles,staticfiles}
|
mkdir -p /opt/tandoor/{config,api,mediafiles,staticfiles}
|
||||||
cd /opt/tandoor
|
cd /opt/tandoor
|
||||||
$STD uv venv --clear .venv --python=python3
|
$STD uv venv .venv --python=python3
|
||||||
$STD uv pip install -r requirements.txt --python .venv/bin/python
|
$STD uv pip install -r requirements.txt --python .venv/bin/python
|
||||||
cd /opt/tandoor/vue3
|
cd /opt/tandoor/vue3
|
||||||
$STD yarn install
|
$STD yarn install
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ cd /opt/Tautulli
|
|||||||
TAUTULLI_VERSION=$(get_latest_github_release "Tautulli/Tautulli" "false")
|
TAUTULLI_VERSION=$(get_latest_github_release "Tautulli/Tautulli" "false")
|
||||||
echo "${TAUTULLI_VERSION}" >/opt/Tautulli/version.txt
|
echo "${TAUTULLI_VERSION}" >/opt/Tautulli/version.txt
|
||||||
echo "master" >/opt/Tautulli/branch.txt
|
echo "master" >/opt/Tautulli/branch.txt
|
||||||
$STD uv venv --clear
|
$STD uv venv
|
||||||
$STD source /opt/Tautulli/.venv/bin/activate
|
$STD source /opt/Tautulli/.venv/bin/activate
|
||||||
$STD uv pip install -r requirements.txt
|
$STD uv pip install -r requirements.txt
|
||||||
$STD uv pip install pyopenssl
|
$STD uv pip install pyopenssl
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ msg_ok "Built Frontend"
|
|||||||
|
|
||||||
msg_info "Setting up Backend"
|
msg_info "Setting up Backend"
|
||||||
cd /opt/trip/backend
|
cd /opt/trip/backend
|
||||||
$STD uv venv --clear /opt/trip/.venv
|
$STD uv venv /opt/trip/.venv
|
||||||
$STD uv pip install --python /opt/trip/.venv/bin/python -r trip/requirements.txt
|
$STD uv pip install --python /opt/trip/.venv/bin/python -r trip/requirements.txt
|
||||||
msg_ok "Set up Backend"
|
msg_ok "Set up Backend"
|
||||||
|
|
||||||
|
|||||||
@@ -27,6 +27,68 @@ msg_ok "Installed Dependencies"
|
|||||||
|
|
||||||
fetch_and_deploy_gh_release "UmlautAdaptarr" "PCJones/Umlautadaptarr" "prebuild" "latest" "/opt/UmlautAdaptarr" "linux-x64.zip"
|
fetch_and_deploy_gh_release "UmlautAdaptarr" "PCJones/Umlautadaptarr" "prebuild" "latest" "/opt/UmlautAdaptarr" "linux-x64.zip"
|
||||||
|
|
||||||
|
msg_info "Setting up UmlautAdaptarr"
|
||||||
|
cat <<EOF >/opt/UmlautAdaptarr/appsettings.json
|
||||||
|
{
|
||||||
|
"Logging": {
|
||||||
|
"LogLevel": {
|
||||||
|
"Default": "Information",
|
||||||
|
"Microsoft.AspNetCore": "Warning"
|
||||||
|
},
|
||||||
|
"Console": {
|
||||||
|
"TimestampFormat": "yyyy-MM-dd HH:mm:ss::"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"AllowedHosts": "*",
|
||||||
|
"Kestrel": {
|
||||||
|
"Endpoints": {
|
||||||
|
"Http": {
|
||||||
|
"Url": "http://[::]:5005"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"Settings": {
|
||||||
|
"UserAgent": "UmlautAdaptarr/1.0",
|
||||||
|
"UmlautAdaptarrApiHost": "https://umlautadaptarr.pcjones.de/api/v1",
|
||||||
|
"IndexerRequestsCacheDurationInMinutes": 12
|
||||||
|
},
|
||||||
|
"Sonarr": [
|
||||||
|
{
|
||||||
|
"Enabled": false,
|
||||||
|
"Name": "Sonarr",
|
||||||
|
"Host": "http://192.168.1.100:8989",
|
||||||
|
"ApiKey": "dein_sonarr_api_key"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"Radarr": [
|
||||||
|
{
|
||||||
|
"Enabled": false,
|
||||||
|
"Name": "Radarr",
|
||||||
|
"Host": "http://192.168.1.101:7878",
|
||||||
|
"ApiKey": "dein_radarr_api_key"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"Lidarr": [
|
||||||
|
{
|
||||||
|
"Enabled": false,
|
||||||
|
"Host": "http://192.168.1.102:8686",
|
||||||
|
"ApiKey": "dein_lidarr_api_key"
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"Readarr": [
|
||||||
|
{
|
||||||
|
"Enabled": false,
|
||||||
|
"Host": "http://192.168.1.103:8787",
|
||||||
|
"ApiKey": "dein_readarr_api_key"
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"IpLeakTest": {
|
||||||
|
"Enabled": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
msg_ok "Setup UmlautAdaptarr"
|
||||||
|
|
||||||
msg_info "Creating Service"
|
msg_info "Creating Service"
|
||||||
cat <<EOF >/etc/systemd/system/umlautadaptarr.service
|
cat <<EOF >/etc/systemd/system/umlautadaptarr.service
|
||||||
[Unit]
|
[Unit]
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ fetch_and_deploy_gh_release "warracker" "sassanix/Warracker" "tarball" "latest"
|
|||||||
|
|
||||||
msg_info "Installing Warracker"
|
msg_info "Installing Warracker"
|
||||||
cd /opt/warracker/backend
|
cd /opt/warracker/backend
|
||||||
$STD uv venv --clear .venv
|
$STD uv venv .venv
|
||||||
$STD source .venv/bin/activate
|
$STD source .venv/bin/activate
|
||||||
$STD uv pip install -r requirements.txt
|
$STD uv pip install -r requirements.txt
|
||||||
mv /opt/warracker/env.example /opt/.env
|
mv /opt/warracker/env.example /opt/.env
|
||||||
|
|||||||
@@ -15,167 +15,92 @@ update_os
|
|||||||
|
|
||||||
msg_info "Installing Dependencies"
|
msg_info "Installing Dependencies"
|
||||||
$STD apt install -y \
|
$STD apt install -y \
|
||||||
build-essential \
|
git \
|
||||||
nginx \
|
apache2 \
|
||||||
redis-server \
|
libapache2-mod-wsgi-py3
|
||||||
libpq-dev
|
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
NODE_VERSION="22" NODE_MODULE="sass" setup_nodejs
|
msg_info "Installing Python"
|
||||||
setup_uv
|
$STD apt install -y python3-pip
|
||||||
PG_VERSION="16" setup_postgresql
|
rm -rf /usr/lib/python3.*/EXTERNALLY-MANAGED
|
||||||
PG_DB_NAME="wger" PG_DB_USER="wger" setup_postgresql_db
|
msg_ok "Installed Python"
|
||||||
fetch_and_deploy_gh_release "wger" "wger-project/wger" "tarball"
|
|
||||||
|
NODE_VERSION="22" NODE_MODULE="yarn,sass" setup_nodejs
|
||||||
|
|
||||||
msg_info "Setting up wger"
|
msg_info "Setting up wger"
|
||||||
mkdir -p /opt/wger/{static,media}
|
$STD adduser wger --disabled-password --gecos ""
|
||||||
chmod o+w /opt/wger/media
|
mkdir /home/wger/db
|
||||||
cd /opt/wger
|
touch /home/wger/db/database.sqlite
|
||||||
$STD corepack enable
|
chown :www-data -R /home/wger/db
|
||||||
$STD npm install
|
chmod g+w /home/wger/db /home/wger/db/database.sqlite
|
||||||
$STD npm run build:css:sass
|
mkdir /home/wger/{static,media}
|
||||||
$STD uv venv
|
chmod o+w /home/wger/media
|
||||||
$STD uv pip install . --group docker
|
temp_dir=$(mktemp -d)
|
||||||
SECRET_KEY=$(openssl rand -base64 40)
|
cd "$temp_dir"
|
||||||
cat <<EOF >/opt/wger/.env
|
RELEASE=$(curl -fsSL https://api.github.com/repos/wger-project/wger/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}')
|
||||||
DJANGO_SETTINGS_MODULE=settings.main
|
curl -fsSL "https://github.com/wger-project/wger/archive/refs/tags/$RELEASE.tar.gz" -o "$RELEASE.tar.gz"
|
||||||
PYTHONPATH=/opt/wger
|
tar xzf "$RELEASE".tar.gz
|
||||||
|
mv wger-"$RELEASE" /home/wger/src
|
||||||
|
cd /home/wger/src
|
||||||
|
$STD pip install -r requirements_prod.txt --ignore-installed
|
||||||
|
$STD pip install -e .
|
||||||
|
$STD wger create-settings --database-path /home/wger/db/database.sqlite
|
||||||
|
sed -i "s#home/wger/src/media#home/wger/media#g" /home/wger/src/settings.py
|
||||||
|
sed -i "/MEDIA_ROOT = '\/home\/wger\/media'/a STATIC_ROOT = '/home/wger/static'" /home/wger/src/settings.py
|
||||||
|
$STD wger bootstrap
|
||||||
|
$STD python3 manage.py collectstatic
|
||||||
|
rm -rf "$temp_dir"
|
||||||
|
echo "${RELEASE}" >/opt/wger_version.txt
|
||||||
|
msg_ok "Finished setting up wger"
|
||||||
|
|
||||||
DJANGO_DB_ENGINE=django.db.backends.postgresql
|
msg_info "Creating Service"
|
||||||
DJANGO_DB_DATABASE=${PG_DB_NAME}
|
cat <<EOF >/etc/apache2/sites-available/wger.conf
|
||||||
DJANGO_DB_USER=${PG_DB_USER}
|
<Directory /home/wger/src>
|
||||||
DJANGO_DB_PASSWORD=${PG_DB_PASS}
|
<Files wsgi.py>
|
||||||
DJANGO_DB_HOST=localhost
|
Require all granted
|
||||||
DJANGO_DB_PORT=5432
|
</Files>
|
||||||
DATABASE_URL=postgresql://${PG_DB_USER}:${PG_DB_PASS}@localhost:5432/${PG_DB_NAME}
|
</Directory>
|
||||||
|
|
||||||
DJANGO_MEDIA_ROOT=/opt/wger/media
|
<VirtualHost *:80>
|
||||||
DJANGO_STATIC_ROOT=/opt/wger/static
|
WSGIApplicationGroup %{GLOBAL}
|
||||||
DJANGO_STATIC_URL=/static/
|
WSGIDaemonProcess wger python-path=/home/wger/src python-home=/home/wger
|
||||||
|
WSGIProcessGroup wger
|
||||||
|
WSGIScriptAlias / /home/wger/src/wger/wsgi.py
|
||||||
|
WSGIPassAuthorization On
|
||||||
|
|
||||||
ALLOWED_HOSTS=${LOCAL_IP},localhost,127.0.0.1
|
Alias /static/ /home/wger/static/
|
||||||
CSRF_TRUSTED_ORIGINS=http://${LOCAL_IP}:3000
|
<Directory /home/wger/static>
|
||||||
|
Require all granted
|
||||||
|
</Directory>
|
||||||
|
|
||||||
USE_X_FORWARDED_HOST=True
|
Alias /media/ /home/wger/media/
|
||||||
SECURE_PROXY_SSL_HEADER=HTTP_X_FORWARDED_PROTO,http
|
<Directory /home/wger/media>
|
||||||
|
Require all granted
|
||||||
|
</Directory>
|
||||||
|
|
||||||
DJANGO_CACHE_BACKEND=django_redis.cache.RedisCache
|
ErrorLog /var/log/apache2/wger-error.log
|
||||||
DJANGO_CACHE_LOCATION=redis://127.0.0.1:6379/1
|
CustomLog /var/log/apache2/wger-access.log combined
|
||||||
DJANGO_CACHE_TIMEOUT=300
|
</VirtualHost>
|
||||||
DJANGO_CACHE_CLIENT_CLASS=django_redis.client.DefaultClient
|
|
||||||
AXES_CACHE_ALIAS=default
|
|
||||||
|
|
||||||
USE_CELERY=True
|
|
||||||
CELERY_BROKER=redis://127.0.0.1:6379/2
|
|
||||||
CELERY_BACKEND=redis://127.0.0.1:6379/2
|
|
||||||
|
|
||||||
SITE_URL=http://${LOCAL_IP}:3000
|
|
||||||
SECRET_KEY=${SECRET_KEY}
|
|
||||||
EOF
|
EOF
|
||||||
set -a && source /opt/wger/.env && set +a
|
$STD a2dissite 000-default.conf
|
||||||
$STD uv run wger bootstrap
|
$STD a2ensite wger
|
||||||
$STD uv run python manage.py collectstatic --no-input
|
systemctl restart apache2
|
||||||
cat <<EOF | uv run python manage.py shell
|
|
||||||
from django.contrib.auth import get_user_model
|
|
||||||
User = get_user_model()
|
|
||||||
|
|
||||||
user, created = User.objects.get_or_create(
|
|
||||||
username="admin",
|
|
||||||
defaults={"email": "admin@localhost"},
|
|
||||||
)
|
|
||||||
|
|
||||||
if created:
|
|
||||||
user.set_password("${PG_DB_PASS}")
|
|
||||||
user.is_superuser = True
|
|
||||||
user.is_staff = True
|
|
||||||
user.save()
|
|
||||||
EOF
|
|
||||||
msg_ok "Set up wger"
|
|
||||||
msg_info "Creating Config and Services"
|
|
||||||
cat <<EOF >/etc/systemd/system/wger.service
|
cat <<EOF >/etc/systemd/system/wger.service
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=wger Gunicorn
|
Description=wger Service
|
||||||
After=network.target
|
After=network.target
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
|
Type=simple
|
||||||
User=root
|
User=root
|
||||||
WorkingDirectory=/opt/wger
|
ExecStart=/usr/local/bin/wger start -a 0.0.0.0 -p 3000
|
||||||
EnvironmentFile=/opt/wger/.env
|
|
||||||
ExecStart=/opt/wger/.venv/bin/gunicorn \
|
|
||||||
--bind 127.0.0.1:8000 \
|
|
||||||
--workers 3 \
|
|
||||||
--threads 2 \
|
|
||||||
--timeout 120 \
|
|
||||||
wger.wsgi:application
|
|
||||||
Restart=always
|
Restart=always
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
EOF
|
EOF
|
||||||
cat <<EOF >/etc/systemd/system/celery.service
|
systemctl enable -q --now wger
|
||||||
[Unit]
|
msg_ok "Created Service"
|
||||||
Description=wger Celery Worker
|
|
||||||
After=network.target redis-server.service
|
|
||||||
Requires=redis-server.service
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
WorkingDirectory=/opt/wger
|
|
||||||
EnvironmentFile=/opt/wger/.env
|
|
||||||
ExecStart=/opt/wger/.venv/bin/celery -A wger worker -l info
|
|
||||||
Restart=always
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
EOF
|
|
||||||
|
|
||||||
mkdir -p /var/lib/wger/celery
|
|
||||||
chmod 700 /var/lib/wger/celery
|
|
||||||
cat <<EOF >/etc/systemd/system/celery-beat.service
|
|
||||||
[Unit]
|
|
||||||
Description=wger Celery Beat
|
|
||||||
After=network.target redis-server.service
|
|
||||||
Requires=redis-server.service
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
WorkingDirectory=/opt/wger
|
|
||||||
EnvironmentFile=/opt/wger/.env
|
|
||||||
ExecStart=/opt/wger/.venv/bin/celery -A wger beat -l info \
|
|
||||||
--schedule /var/lib/wger/celery/celerybeat-schedule
|
|
||||||
Restart=always
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
EOF
|
|
||||||
cat <<'EOF' >/etc/nginx/sites-available/wger
|
|
||||||
server {
|
|
||||||
listen 3000;
|
|
||||||
server_name _;
|
|
||||||
|
|
||||||
client_max_body_size 20M;
|
|
||||||
|
|
||||||
location /static/ {
|
|
||||||
alias /opt/wger/static/;
|
|
||||||
expires 30d;
|
|
||||||
}
|
|
||||||
|
|
||||||
location /media/ {
|
|
||||||
alias /opt/wger/media/;
|
|
||||||
}
|
|
||||||
|
|
||||||
location / {
|
|
||||||
proxy_pass http://127.0.0.1:8000;
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
proxy_redirect off;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
$STD rm -f /etc/nginx/sites-enabled/default
|
|
||||||
$STD ln -sf /etc/nginx/sites-available/wger /etc/nginx/sites-enabled/wger
|
|
||||||
systemctl enable -q --now redis-server nginx wger celery celery-beat
|
|
||||||
systemctl restart nginx
|
|
||||||
msg_ok "Created Config and Services"
|
|
||||||
|
|
||||||
motd_ssh
|
motd_ssh
|
||||||
customize
|
customize
|
||||||
|
|||||||
@@ -3078,10 +3078,10 @@ settings_menu() {
|
|||||||
|
|
||||||
case "$choice" in
|
case "$choice" in
|
||||||
1) diagnostics_menu ;;
|
1) diagnostics_menu ;;
|
||||||
2) ${EDITOR:-nano} /usr/local/community-scripts/default.vars ;;
|
2) nano /usr/local/community-scripts/default.vars ;;
|
||||||
3)
|
3)
|
||||||
if [ -f "$(get_app_defaults_path)" ]; then
|
if [ -f "$(get_app_defaults_path)" ]; then
|
||||||
${EDITOR:-nano} "$(get_app_defaults_path)"
|
nano "$(get_app_defaults_path)"
|
||||||
else
|
else
|
||||||
# Back was selected (no app.vars available)
|
# Back was selected (no app.vars available)
|
||||||
return
|
return
|
||||||
@@ -3351,21 +3351,19 @@ msg_menu() {
|
|||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Display menu to /dev/tty so it doesn't get captured by command substitution
|
# Display menu
|
||||||
{
|
echo ""
|
||||||
echo ""
|
msg_custom "📋" "${BL}" "${title}"
|
||||||
msg_custom "📋" "${BL}" "${title}"
|
echo ""
|
||||||
echo ""
|
for i in "${!tags[@]}"; do
|
||||||
for i in "${!tags[@]}"; do
|
local marker=" "
|
||||||
local marker=" "
|
[[ $i -eq 0 ]] && marker="* "
|
||||||
[[ $i -eq 0 ]] && marker="* "
|
printf "${TAB3}${marker}%s) %s\n" "${tags[$i]}" "${descs[$i]}"
|
||||||
printf "${TAB3}${marker}%s) %s\n" "${tags[$i]}" "${descs[$i]}"
|
done
|
||||||
done
|
echo ""
|
||||||
echo ""
|
|
||||||
} >/dev/tty
|
|
||||||
|
|
||||||
local selection=""
|
local selection=""
|
||||||
read -r -t 10 -p "${TAB3}Select [default=${default_tag}, timeout 10s]: " selection </dev/tty >/dev/tty || true
|
read -r -t 10 -p "${TAB3}Select [default=${default_tag}, timeout 10s]: " selection || true
|
||||||
|
|
||||||
# Validate selection
|
# Validate selection
|
||||||
if [[ -n "$selection" ]]; then
|
if [[ -n "$selection" ]]; then
|
||||||
|
|||||||
@@ -3595,7 +3595,6 @@ _setup_intel_arc() {
|
|||||||
$STD apt -y install \
|
$STD apt -y install \
|
||||||
intel-media-va-driver-non-free \
|
intel-media-va-driver-non-free \
|
||||||
intel-opencl-icd \
|
intel-opencl-icd \
|
||||||
libmfx-gen1.2 \
|
|
||||||
vainfo \
|
vainfo \
|
||||||
intel-gpu-tools 2>/dev/null || msg_warn "Some Intel Arc packages failed"
|
intel-gpu-tools 2>/dev/null || msg_warn "Some Intel Arc packages failed"
|
||||||
|
|
||||||
@@ -3622,7 +3621,6 @@ _setup_intel_arc() {
|
|||||||
intel-media-va-driver-non-free \
|
intel-media-va-driver-non-free \
|
||||||
ocl-icd-libopencl1 \
|
ocl-icd-libopencl1 \
|
||||||
libvpl2 \
|
libvpl2 \
|
||||||
libmfx-gen1.2 \
|
|
||||||
vainfo \
|
vainfo \
|
||||||
intel-gpu-tools 2>/dev/null || msg_warn "Some Intel Arc packages failed"
|
intel-gpu-tools 2>/dev/null || msg_warn "Some Intel Arc packages failed"
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -79,24 +79,11 @@ EOF
|
|||||||
header_info
|
header_info
|
||||||
msg "Installing NetBird..."
|
msg "Installing NetBird..."
|
||||||
pct exec "$CTID" -- bash -c '
|
pct exec "$CTID" -- bash -c '
|
||||||
if ! command -v curl &>/dev/null; then
|
|
||||||
apt-get update -qq
|
|
||||||
apt-get install -y curl >/dev/null
|
|
||||||
fi
|
|
||||||
apt install -y ca-certificates gpg &>/dev/null
|
apt install -y ca-certificates gpg &>/dev/null
|
||||||
curl -fsSL "https://pkgs.netbird.io/debian/public.key" | gpg --dearmor >/usr/share/keyrings/netbird-archive-keyring.gpg
|
curl -fsSL "https://pkgs.netbird.io/debian/public.key" | gpg --dearmor >/usr/share/keyrings/netbird-archive-keyring.gpg
|
||||||
echo "deb [signed-by=/usr/share/keyrings/netbird-archive-keyring.gpg] https://pkgs.netbird.io/debian stable main" >/etc/apt/sources.list.d/netbird.list
|
echo "deb [signed-by=/usr/share/keyrings/netbird-archive-keyring.gpg] https://pkgs.netbird.io/debian stable main" >/etc/apt/sources.list.d/netbird.list
|
||||||
apt-get update &>/dev/null
|
apt-get update &>/dev/null
|
||||||
apt-get install -y netbird-ui &>/dev/null
|
apt-get install -y netbird-ui &>/dev/null
|
||||||
if systemctl list-unit-files docker.service &>/dev/null; then
|
|
||||||
mkdir -p /etc/systemd/system/netbird.service.d
|
|
||||||
cat <<OVERRIDE >/etc/systemd/system/netbird.service.d/after-docker.conf
|
|
||||||
[Unit]
|
|
||||||
After=docker.service
|
|
||||||
Wants=docker.service
|
|
||||||
OVERRIDE
|
|
||||||
systemctl daemon-reload
|
|
||||||
fi
|
|
||||||
'
|
'
|
||||||
msg "\e[1;32m ✔ Installed NetBird.\e[0m"
|
msg "\e[1;32m ✔ Installed NetBird.\e[0m"
|
||||||
sleep 2
|
sleep 2
|
||||||
|
|||||||
@@ -89,12 +89,6 @@ if ! dig +short pkgs.tailscale.com | grep -qvE "^127\.|^0\.0\.0\.0$"; then
|
|||||||
echo "nameserver 1.1.1.1" >"$ORIG_RESOLV"
|
echo "nameserver 1.1.1.1" >"$ORIG_RESOLV"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! command -v curl &>/dev/null; then
|
|
||||||
echo "[INFO] curl not found, installing..."
|
|
||||||
apt-get update -qq
|
|
||||||
apt-get install -y curl >/dev/null
|
|
||||||
fi
|
|
||||||
|
|
||||||
curl -fsSL https://pkgs.tailscale.com/stable/${ID}/${VER}.noarmor.gpg \
|
curl -fsSL https://pkgs.tailscale.com/stable/${ID}/${VER}.noarmor.gpg \
|
||||||
| tee /usr/share/keyrings/tailscale-archive-keyring.gpg >/dev/null
|
| tee /usr/share/keyrings/tailscale-archive-keyring.gpg >/dev/null
|
||||||
|
|
||||||
|
|||||||
@@ -5,11 +5,6 @@
|
|||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://github.com/bakito/adguardhome-sync
|
# Source: https://github.com/bakito/adguardhome-sync
|
||||||
|
|
||||||
if ! command -v curl &>/dev/null; then
|
|
||||||
printf "\r\e[2K%b" '\033[93m Setup Source \033[m' >&2
|
|
||||||
apt-get update >/dev/null 2>&1
|
|
||||||
apt-get install -y curl >/dev/null 2>&1
|
|
||||||
fi
|
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
||||||
|
|||||||
@@ -5,11 +5,6 @@
|
|||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://github.com/9001/copyparty
|
# Source: https://github.com/9001/copyparty
|
||||||
|
|
||||||
if ! command -v curl &>/dev/null; then
|
|
||||||
printf "\r\e[2K%b" '\033[93m Setup Source \033[m' >&2
|
|
||||||
apt-get update >/dev/null 2>&1
|
|
||||||
apt-get install -y curl >/dev/null 2>&1
|
|
||||||
fi
|
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
||||||
|
|||||||
@@ -110,7 +110,6 @@ if [[ -f "$INSTALL_PATH" ]]; then
|
|||||||
read -r update_prompt
|
read -r update_prompt
|
||||||
if [[ "${update_prompt,,}" =~ ^(y|yes)$ ]]; then
|
if [[ "${update_prompt,,}" =~ ^(y|yes)$ ]]; then
|
||||||
msg_info "Updating ${APP}"
|
msg_info "Updating ${APP}"
|
||||||
if ! command -v curl &>/dev/null; then $PKG_MANAGER curl &>/dev/null; fi
|
|
||||||
curl -fsSL https://github.com/gtsteffaniak/filebrowser/releases/latest/download/linux-amd64-filebrowser -o "$TMP_BIN"
|
curl -fsSL https://github.com/gtsteffaniak/filebrowser/releases/latest/download/linux-amd64-filebrowser -o "$TMP_BIN"
|
||||||
chmod +x "$TMP_BIN"
|
chmod +x "$TMP_BIN"
|
||||||
mv -f "$TMP_BIN" /usr/local/bin/filebrowser
|
mv -f "$TMP_BIN" /usr/local/bin/filebrowser
|
||||||
|
|||||||
@@ -88,7 +88,6 @@ if [ -f "$INSTALL_PATH" ]; then
|
|||||||
read -r -p "Would you like to update ${APP}? (y/N): " update_prompt
|
read -r -p "Would you like to update ${APP}? (y/N): " update_prompt
|
||||||
if [[ "${update_prompt,,}" =~ ^(y|yes)$ ]]; then
|
if [[ "${update_prompt,,}" =~ ^(y|yes)$ ]]; then
|
||||||
msg_info "Updating ${APP}"
|
msg_info "Updating ${APP}"
|
||||||
if ! command -v curl &>/dev/null; then $PKG_MANAGER curl &>/dev/null; fi
|
|
||||||
curl -fsSL "https://github.com/filebrowser/filebrowser/releases/latest/download/linux-amd64-filebrowser.tar.gz" | tar -xzv -C /usr/local/bin &>/dev/null
|
curl -fsSL "https://github.com/filebrowser/filebrowser/releases/latest/download/linux-amd64-filebrowser.tar.gz" | tar -xzv -C /usr/local/bin &>/dev/null
|
||||||
chmod +x "$INSTALL_PATH"
|
chmod +x "$INSTALL_PATH"
|
||||||
msg_ok "Updated ${APP}"
|
msg_ok "Updated ${APP}"
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ IP=$(get_lxc_ip)
|
|||||||
install_glances_debian() {
|
install_glances_debian() {
|
||||||
msg_info "Installing dependencies"
|
msg_info "Installing dependencies"
|
||||||
apt-get update >/dev/null 2>&1
|
apt-get update >/dev/null 2>&1
|
||||||
apt-get install -y gcc lm-sensors wireless-tools curl >/dev/null 2>&1
|
apt-get install -y gcc lm-sensors wireless-tools >/dev/null 2>&1
|
||||||
msg_ok "Installed dependencies"
|
msg_ok "Installed dependencies"
|
||||||
|
|
||||||
msg_info "Setting up Python + uv"
|
msg_info "Setting up Python + uv"
|
||||||
@@ -56,7 +56,7 @@ install_glances_debian() {
|
|||||||
cd /opt
|
cd /opt
|
||||||
mkdir -p glances
|
mkdir -p glances
|
||||||
cd glances
|
cd glances
|
||||||
uv venv --clear
|
uv venv
|
||||||
source .venv/bin/activate >/dev/null 2>&1
|
source .venv/bin/activate >/dev/null 2>&1
|
||||||
uv pip install --upgrade pip wheel setuptools >/dev/null 2>&1
|
uv pip install --upgrade pip wheel setuptools >/dev/null 2>&1
|
||||||
uv pip install "glances[web]" >/dev/null 2>&1
|
uv pip install "glances[web]" >/dev/null 2>&1
|
||||||
@@ -114,7 +114,7 @@ install_glances_alpine() {
|
|||||||
apk update >/dev/null 2>&1
|
apk update >/dev/null 2>&1
|
||||||
$STD apk add --no-cache \
|
$STD apk add --no-cache \
|
||||||
gcc musl-dev linux-headers python3-dev \
|
gcc musl-dev linux-headers python3-dev \
|
||||||
python3 py3-pip py3-virtualenv lm-sensors wireless-tools curl >/dev/null 2>&1
|
python3 py3-pip py3-virtualenv lm-sensors wireless-tools >/dev/null 2>&1
|
||||||
msg_ok "Installed dependencies"
|
msg_ok "Installed dependencies"
|
||||||
|
|
||||||
msg_info "Setting up Python + uv"
|
msg_info "Setting up Python + uv"
|
||||||
@@ -126,7 +126,7 @@ install_glances_alpine() {
|
|||||||
cd /opt
|
cd /opt
|
||||||
mkdir -p glances
|
mkdir -p glances
|
||||||
cd glances
|
cd glances
|
||||||
uv venv --clear
|
uv venv
|
||||||
source .venv/bin/activate
|
source .venv/bin/activate
|
||||||
uv pip install --upgrade pip wheel setuptools >/dev/null 2>&1
|
uv pip install --upgrade pip wheel setuptools >/dev/null 2>&1
|
||||||
uv pip install "glances[web]" >/dev/null 2>&1
|
uv pip install "glances[web]" >/dev/null 2>&1
|
||||||
|
|||||||
@@ -5,11 +5,6 @@
|
|||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://github.com/alangrainger/immich-public-proxy
|
# Source: https://github.com/alangrainger/immich-public-proxy
|
||||||
|
|
||||||
if ! command -v curl &>/dev/null; then
|
|
||||||
printf "\r\e[2K%b" '\033[93m Setup Source \033[m' >&2
|
|
||||||
apt-get update >/dev/null 2>&1
|
|
||||||
apt-get install -y curl >/dev/null 2>&1
|
|
||||||
fi
|
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
||||||
|
|||||||
@@ -5,11 +5,6 @@
|
|||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://github.com/CyferShepard/Jellystat
|
# Source: https://github.com/CyferShepard/Jellystat
|
||||||
|
|
||||||
if ! command -v curl &>/dev/null; then
|
|
||||||
printf "\r\e[2K%b" '\033[93m Setup Source \033[m' >&2
|
|
||||||
apt-get update >/dev/null 2>&1
|
|
||||||
apt-get install -y curl >/dev/null 2>&1
|
|
||||||
fi
|
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
||||||
|
|||||||
@@ -5,11 +5,6 @@
|
|||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://github.com/xperimental/nextcloud-exporter
|
# Source: https://github.com/xperimental/nextcloud-exporter
|
||||||
|
|
||||||
if ! command -v curl &>/dev/null; then
|
|
||||||
printf "\r\e[2K%b" '\033[93m Setup Source \033[m' >&2
|
|
||||||
apt-get update >/dev/null 2>&1
|
|
||||||
apt-get install -y curl >/dev/null 2>&1
|
|
||||||
fi
|
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
||||||
|
|||||||
@@ -51,10 +51,6 @@ function msg_ok() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
msg_info "Installing ${APP}"
|
msg_info "Installing ${APP}"
|
||||||
if ! command -v curl &>/dev/null; then
|
|
||||||
apt-get update >/dev/null 2>&1
|
|
||||||
apt-get install -y curl >/dev/null 2>&1
|
|
||||||
fi
|
|
||||||
curl -fsSL "https://github.com/OliveTin/OliveTin/releases/latest/download/OliveTin_linux_amd64.deb" -o $(basename "https://github.com/OliveTin/OliveTin/releases/latest/download/OliveTin_linux_amd64.deb")
|
curl -fsSL "https://github.com/OliveTin/OliveTin/releases/latest/download/OliveTin_linux_amd64.deb" -o $(basename "https://github.com/OliveTin/OliveTin/releases/latest/download/OliveTin_linux_amd64.deb")
|
||||||
dpkg -i OliveTin_linux_amd64.deb &>/dev/null
|
dpkg -i OliveTin_linux_amd64.deb &>/dev/null
|
||||||
systemctl enable --now OliveTin &>/dev/null
|
systemctl enable --now OliveTin &>/dev/null
|
||||||
|
|||||||
@@ -57,10 +57,6 @@ function msg_ok() { echo -e "${CM} ${GN}${1}${CL}"; }
|
|||||||
function msg_error() { echo -e "${CROSS} ${RD}${1}${CL}"; }
|
function msg_error() { echo -e "${CROSS} ${RD}${1}${CL}"; }
|
||||||
|
|
||||||
function check_internet() {
|
function check_internet() {
|
||||||
if ! command -v curl &>/dev/null; then
|
|
||||||
apt-get update >/dev/null 2>&1
|
|
||||||
apt-get install -y curl >/dev/null 2>&1
|
|
||||||
fi
|
|
||||||
msg_info "Checking Internet connectivity to GitHub"
|
msg_info "Checking Internet connectivity to GitHub"
|
||||||
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" https://github.com)
|
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" https://github.com)
|
||||||
if [[ "$HTTP_CODE" -ge 200 && "$HTTP_CODE" -lt 400 ]]; then
|
if [[ "$HTTP_CODE" -ge 200 && "$HTTP_CODE" -lt 400 ]]; then
|
||||||
|
|||||||
@@ -5,11 +5,6 @@
|
|||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://github.com/eko/pihole-exporter/
|
# Source: https://github.com/eko/pihole-exporter/
|
||||||
|
|
||||||
if ! command -v curl &>/dev/null; then
|
|
||||||
printf "\r\e[2K%b" '\033[93m Setup Source \033[m' >&2
|
|
||||||
apt-get update >/dev/null 2>&1
|
|
||||||
apt-get install -y curl >/dev/null 2>&1
|
|
||||||
fi
|
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
||||||
|
|||||||
@@ -1,188 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# Copyright (c) 2021-2026 community-scripts ORG
|
|
||||||
# Author: Andy Grunwald (andygrunwald)
|
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
|
||||||
# Source: https://github.com/hansmi/prometheus-paperless-exporter
|
|
||||||
|
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
|
||||||
|
|
||||||
# Enable error handling
|
|
||||||
set -Eeuo pipefail
|
|
||||||
trap 'error_handler' ERR
|
|
||||||
load_functions
|
|
||||||
|
|
||||||
# ==============================================================================
|
|
||||||
# CONFIGURATION
|
|
||||||
# ==============================================================================
|
|
||||||
VERBOSE=${var_verbose:-no}
|
|
||||||
APP="prometheus-paperless-ngx-exporter"
|
|
||||||
APP_TYPE="tools"
|
|
||||||
BINARY_PATH="/usr/bin/prometheus-paperless-exporter"
|
|
||||||
CONFIG_PATH="/etc/prometheus-paperless-ngx-exporter/config.env"
|
|
||||||
SERVICE_PATH="/etc/systemd/system/prometheus-paperless-ngx-exporter.service"
|
|
||||||
AUTH_TOKEN_FILE="/etc/prometheus-paperless-ngx-exporter/paperless_auth_token_file"
|
|
||||||
|
|
||||||
# ==============================================================================
|
|
||||||
# OS DETECTION
|
|
||||||
# ==============================================================================
|
|
||||||
if ! grep -qE 'ID=debian|ID=ubuntu' /etc/os-release 2>/dev/null; then
|
|
||||||
echo -e "${CROSS} Unsupported OS detected. This script only supports Debian and Ubuntu."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# ==============================================================================
|
|
||||||
# UNINSTALL
|
|
||||||
# ==============================================================================
|
|
||||||
function uninstall() {
|
|
||||||
msg_info "Uninstalling Prometheus-Paperless-NGX-Exporter"
|
|
||||||
systemctl disable -q --now prometheus-paperless-ngx-exporter
|
|
||||||
|
|
||||||
if dpkg -l | grep -q prometheus-paperless-exporter; then
|
|
||||||
$STD apt-get remove -y prometheus-paperless-exporter || $STD dpkg -r prometheus-paperless-exporter
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -f "$SERVICE_PATH"
|
|
||||||
rm -rf /etc/prometheus-paperless-ngx-exporter
|
|
||||||
rm -f "/usr/local/bin/update_prometheus-paperless-ngx-exporter"
|
|
||||||
rm -f "$HOME/.prometheus-paperless-ngx-exporter"
|
|
||||||
msg_ok "Prometheus-Paperless-NGX-Exporter has been uninstalled"
|
|
||||||
}
|
|
||||||
|
|
||||||
# ==============================================================================
|
|
||||||
# UPDATE
|
|
||||||
# ==============================================================================
|
|
||||||
function update() {
|
|
||||||
if check_for_gh_release "prom-paperless-exp" "hansmi/prometheus-paperless-exporter"; then
|
|
||||||
msg_info "Stopping service"
|
|
||||||
systemctl stop prometheus-paperless-ngx-exporter
|
|
||||||
msg_ok "Stopped service"
|
|
||||||
|
|
||||||
fetch_and_deploy_gh_release "prom-paperless-exp" "hansmi/prometheus-paperless-exporter" "binary" "latest"
|
|
||||||
|
|
||||||
msg_info "Starting service"
|
|
||||||
systemctl start prometheus-paperless-ngx-exporter
|
|
||||||
msg_ok "Started service"
|
|
||||||
msg_ok "Updated successfully!"
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# ==============================================================================
|
|
||||||
# INSTALL
|
|
||||||
# ==============================================================================
|
|
||||||
function install() {
|
|
||||||
read -erp "Enter URL of Paperless-NGX, example: (http://127.0.0.1:8000): " PAPERLESS_URL
|
|
||||||
read -rsp "Enter Paperless-NGX authentication token: " PAPERLESS_AUTH_TOKEN
|
|
||||||
printf "\n"
|
|
||||||
|
|
||||||
fetch_and_deploy_gh_release "prom-paperless-exp" "hansmi/prometheus-paperless-exporter" "binary" "latest"
|
|
||||||
|
|
||||||
msg_info "Creating configuration"
|
|
||||||
mkdir -p /etc/prometheus-paperless-ngx-exporter
|
|
||||||
cat <<EOF >"$CONFIG_PATH"
|
|
||||||
# https://github.com/hansmi/prometheus-paperless-exporter
|
|
||||||
PAPERLESS_URL="${PAPERLESS_URL}"
|
|
||||||
EOF
|
|
||||||
echo "${PAPERLESS_AUTH_TOKEN}" >"$AUTH_TOKEN_FILE"
|
|
||||||
chmod 600 "$AUTH_TOKEN_FILE"
|
|
||||||
msg_ok "Created configuration"
|
|
||||||
|
|
||||||
msg_info "Creating service"
|
|
||||||
cat <<EOF >"$SERVICE_PATH"
|
|
||||||
[Unit]
|
|
||||||
Description=Prometheus Paperless NGX Exporter
|
|
||||||
Wants=network-online.target
|
|
||||||
After=network-online.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
User=root
|
|
||||||
EnvironmentFile=$CONFIG_PATH
|
|
||||||
ExecStart=$BINARY_PATH \\
|
|
||||||
--paperless_url=\${PAPERLESS_URL} \\
|
|
||||||
--paperless_auth_token_file=$AUTH_TOKEN_FILE
|
|
||||||
Restart=always
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
EOF
|
|
||||||
systemctl daemon-reload
|
|
||||||
systemctl enable -q --now prometheus-paperless-ngx-exporter
|
|
||||||
msg_ok "Created and started service"
|
|
||||||
|
|
||||||
# Create update script
|
|
||||||
msg_info "Creating update script"
|
|
||||||
ensure_usr_local_bin_persist
|
|
||||||
cat <<'UPDATEEOF' >/usr/local/bin/update_prometheus-paperless-ngx-exporter
|
|
||||||
#!/usr/bin/env bash
|
|
||||||
# prometheus-paperless-ngx-exporter Update Script
|
|
||||||
type=update bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/tools/addon/prometheus-paperless-ngx-exporter.sh)"
|
|
||||||
UPDATEEOF
|
|
||||||
chmod +x /usr/local/bin/update_prometheus-paperless-ngx-exporter
|
|
||||||
msg_ok "Created update script (/usr/local/bin/update_prometheus-paperless-ngx-exporter)"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
msg_ok "Prometheus-Paperless-NGX-Exporter installed successfully"
|
|
||||||
msg_ok "Metrics: ${BL}http://${LOCAL_IP}:8081/metrics${CL}"
|
|
||||||
msg_ok "Config: ${BL}${CONFIG_PATH}${CL}"
|
|
||||||
}
|
|
||||||
|
|
||||||
# ==============================================================================
|
|
||||||
# MAIN
|
|
||||||
# ==============================================================================
|
|
||||||
header_info
|
|
||||||
ensure_usr_local_bin_persist
|
|
||||||
get_lxc_ip
|
|
||||||
|
|
||||||
# Handle type=update (called from update script)
|
|
||||||
if [[ "${type:-}" == "update" ]]; then
|
|
||||||
if [[ -f "$BINARY_PATH" ]]; then
|
|
||||||
update
|
|
||||||
else
|
|
||||||
msg_error "Prometheus-Paperless-NGX-Exporter is not installed. Nothing to update."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if already installed
|
|
||||||
if [[ -f "$BINARY_PATH" ]]; then
|
|
||||||
msg_warn "Prometheus-Paperless-NGX-Exporter is already installed."
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
echo -n "${TAB}Uninstall Prometheus-Paperless-NGX-Exporter? (y/N): "
|
|
||||||
read -r uninstall_prompt
|
|
||||||
if [[ "${uninstall_prompt,,}" =~ ^(y|yes)$ ]]; then
|
|
||||||
uninstall
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo -n "${TAB}Update Prometheus-Paperless-NGX-Exporter? (y/N): "
|
|
||||||
read -r update_prompt
|
|
||||||
if [[ "${update_prompt,,}" =~ ^(y|yes)$ ]]; then
|
|
||||||
update
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
msg_warn "No action selected. Exiting."
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Fresh installation
|
|
||||||
msg_warn "Prometheus-Paperless-NGX-Exporter is not installed."
|
|
||||||
echo ""
|
|
||||||
echo -e "${TAB}${INFO} This will install:"
|
|
||||||
echo -e "${TAB} - Prometheus Paperless NGX Exporter (binary)"
|
|
||||||
echo -e "${TAB} - Systemd service"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
echo -n "${TAB}Install Prometheus-Paperless-NGX-Exporter? (y/N): "
|
|
||||||
read -r install_prompt
|
|
||||||
if [[ "${install_prompt,,}" =~ ^(y|yes)$ ]]; then
|
|
||||||
install
|
|
||||||
else
|
|
||||||
msg_warn "Installation cancelled. Exiting."
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
@@ -5,11 +5,6 @@
|
|||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://github.com/martabal/qbittorrent-exporter
|
# Source: https://github.com/martabal/qbittorrent-exporter
|
||||||
|
|
||||||
if ! command -v curl &>/dev/null; then
|
|
||||||
printf "\r\e[2K%b" '\033[93m Setup Source \033[m' >&2
|
|
||||||
apt-get update >/dev/null 2>&1
|
|
||||||
apt-get install -y curl >/dev/null 2>&1
|
|
||||||
fi
|
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func)
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func)
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ whiptail --backtitle "Proxmox VE Helper Scripts" --title "Webmin Installer" --ye
|
|||||||
|
|
||||||
msg_info "Installing Prerequisites"
|
msg_info "Installing Prerequisites"
|
||||||
apt update &>/dev/null
|
apt update &>/dev/null
|
||||||
apt-get -y install libnet-ssleay-perl libauthen-pam-perl libio-pty-perl unzip shared-mime-info curl &>/dev/null
|
apt-get -y install libnet-ssleay-perl libauthen-pam-perl libio-pty-perl unzip shared-mime-info &>/dev/null
|
||||||
msg_ok "Installed Prerequisites"
|
msg_ok "Installed Prerequisites"
|
||||||
|
|
||||||
LATEST=$(curl -fsSL https://api.github.com/repos/webmin/webmin/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
|
LATEST=$(curl -fsSL https://api.github.com/repos/webmin/webmin/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
__ __ __ __
|
|
||||||
____ _________ ____ ___ ___ / /_/ /_ ___ __ _______ ____ ____ _____ ___ _____/ /__ __________ ____ ____ __ __ ___ _ ______ ____ _____/ /____ _____
|
|
||||||
/ __ \/ ___/ __ \/ __ `__ \/ _ \/ __/ __ \/ _ \/ / / / ___/_____/ __ \/ __ `/ __ \/ _ \/ ___/ / _ \/ ___/ ___/_____/ __ \/ __ `/ |/_/_____/ _ \| |/_/ __ \/ __ \/ ___/ __/ _ \/ ___/
|
|
||||||
/ /_/ / / / /_/ / / / / / / __/ /_/ / / / __/ /_/ (__ )_____/ /_/ / /_/ / /_/ / __/ / / / __(__ |__ )_____/ / / / /_/ /> </_____/ __/> </ /_/ / /_/ / / / /_/ __/ /
|
|
||||||
/ .___/_/ \____/_/ /_/ /_/\___/\__/_/ /_/\___/\__,_/____/ / .___/\__,_/ .___/\___/_/ /_/\___/____/____/ /_/ /_/\__, /_/|_| \___/_/|_/ .___/\____/_/ \__/\___/_/
|
|
||||||
/_/ /_/ /_/ /____/ /_/
|
|
||||||
@@ -131,7 +131,7 @@ function detect_service() {
|
|||||||
|
|
||||||
function backup_container() {
|
function backup_container() {
|
||||||
msg_info "Creating backup for container $1"
|
msg_info "Creating backup for container $1"
|
||||||
vzdump $1 --compress zstd --storage $STORAGE_CHOICE -notes-template "{{guestname}} - community-scripts backup updater" >/dev/null 2>&1
|
vzdump $1 --compress zstd --storage $STORAGE_CHOICE -notes-template "community-scripts backup updater" >/dev/null 2>&1
|
||||||
status=$?
|
status=$?
|
||||||
|
|
||||||
if [ $status -eq 0 ]; then
|
if [ $status -eq 0 ]; then
|
||||||
@@ -151,11 +151,11 @@ function get_backup_storages() {
|
|||||||
split($0, a, ":")
|
split($0, a, ":")
|
||||||
type = a[1]
|
type = a[1]
|
||||||
name = a[2]
|
name = a[2]
|
||||||
gsub(/^[ \t]+|[ \t]+$/, "", name)
|
sub(/^ +/, "", name)
|
||||||
has_content = 0
|
has_content = 0
|
||||||
has_backup = 0
|
has_backup = 0
|
||||||
}
|
}
|
||||||
/^[ \t]*content/ {
|
/^ +content/ {
|
||||||
has_content = 1
|
has_content = 1
|
||||||
if ($0 ~ /backup/) has_backup = 1
|
if ($0 ~ /backup/) has_backup = 1
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -201,17 +201,6 @@ function exit-script() {
|
|||||||
exit
|
exit
|
||||||
}
|
}
|
||||||
|
|
||||||
function select_cloud_init() {
|
|
||||||
if (whiptail --backtitle "Proxmox VE Helper Scripts" --title "CLOUD-INIT" \
|
|
||||||
--yesno "Enable Cloud-Init for VM configuration?\n\nCloud-Init allows automatic configuration of:\n- User accounts and passwords\n- SSH keys\n- Network settings (DHCP/Static)\n- DNS configuration\n\nYou can also configure these settings later in Proxmox UI.\n\nNote: Without Cloud-Init, the nocloud image will be used with console auto-login." --defaultno 18 68); then
|
|
||||||
CLOUD_INIT="yes"
|
|
||||||
echo -e "${CLOUD}${BOLD}${DGN}Cloud-Init: ${BGN}yes${CL}"
|
|
||||||
else
|
|
||||||
CLOUD_INIT="no"
|
|
||||||
echo -e "${CLOUD}${BOLD}${DGN}Cloud-Init: ${BGN}no${CL}"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function default_settings() {
|
function default_settings() {
|
||||||
VMID=$(get_valid_nextid)
|
VMID=$(get_valid_nextid)
|
||||||
FORMAT=",efitype=4m"
|
FORMAT=",efitype=4m"
|
||||||
@@ -227,6 +216,7 @@ function default_settings() {
|
|||||||
VLAN=""
|
VLAN=""
|
||||||
MTU=""
|
MTU=""
|
||||||
START_VM="yes"
|
START_VM="yes"
|
||||||
|
CLOUD_INIT="no"
|
||||||
METHOD="default"
|
METHOD="default"
|
||||||
echo -e "${CONTAINERID}${BOLD}${DGN}Virtual Machine ID: ${BGN}${VMID}${CL}"
|
echo -e "${CONTAINERID}${BOLD}${DGN}Virtual Machine ID: ${BGN}${VMID}${CL}"
|
||||||
echo -e "${CONTAINERTYPE}${BOLD}${DGN}Machine Type: ${BGN}i440fx${CL}"
|
echo -e "${CONTAINERTYPE}${BOLD}${DGN}Machine Type: ${BGN}i440fx${CL}"
|
||||||
@@ -240,7 +230,7 @@ function default_settings() {
|
|||||||
echo -e "${MACADDRESS}${BOLD}${DGN}MAC Address: ${BGN}${MAC}${CL}"
|
echo -e "${MACADDRESS}${BOLD}${DGN}MAC Address: ${BGN}${MAC}${CL}"
|
||||||
echo -e "${VLANTAG}${BOLD}${DGN}VLAN: ${BGN}Default${CL}"
|
echo -e "${VLANTAG}${BOLD}${DGN}VLAN: ${BGN}Default${CL}"
|
||||||
echo -e "${DEFAULT}${BOLD}${DGN}Interface MTU Size: ${BGN}Default${CL}"
|
echo -e "${DEFAULT}${BOLD}${DGN}Interface MTU Size: ${BGN}Default${CL}"
|
||||||
select_cloud_init
|
echo -e "${CLOUD}${BOLD}${DGN}Configure Cloud-init: ${BGN}no${CL}"
|
||||||
echo -e "${GATEWAY}${BOLD}${DGN}Start VM when completed: ${BGN}yes${CL}"
|
echo -e "${GATEWAY}${BOLD}${DGN}Start VM when completed: ${BGN}yes${CL}"
|
||||||
echo -e "${CREATING}${BOLD}${DGN}Creating a Debian 13 VM using the above default settings${CL}"
|
echo -e "${CREATING}${BOLD}${DGN}Creating a Debian 13 VM using the above default settings${CL}"
|
||||||
}
|
}
|
||||||
@@ -410,7 +400,13 @@ function advanced_settings() {
|
|||||||
exit-script
|
exit-script
|
||||||
fi
|
fi
|
||||||
|
|
||||||
select_cloud_init
|
if (whiptail --backtitle "Proxmox VE Helper Scripts" --title "CLOUD-INIT" --yesno "Configure the VM with Cloud-init?" --defaultno 10 58); then
|
||||||
|
echo -e "${CLOUD}${BOLD}${DGN}Configure Cloud-init: ${BGN}yes${CL}"
|
||||||
|
CLOUD_INIT="yes"
|
||||||
|
else
|
||||||
|
echo -e "${CLOUD}${BOLD}${DGN}Configure Cloud-init: ${BGN}no${CL}"
|
||||||
|
CLOUD_INIT="no"
|
||||||
|
fi
|
||||||
|
|
||||||
if (whiptail --backtitle "Proxmox VE Helper Scripts" --title "START VIRTUAL MACHINE" --yesno "Start VM when completed?" 10 58); then
|
if (whiptail --backtitle "Proxmox VE Helper Scripts" --title "START VIRTUAL MACHINE" --yesno "Start VM when completed?" 10 58); then
|
||||||
echo -e "${GATEWAY}${BOLD}${DGN}Start VM when completed: ${BGN}yes${CL}"
|
echo -e "${GATEWAY}${BOLD}${DGN}Start VM when completed: ${BGN}yes${CL}"
|
||||||
@@ -477,17 +473,6 @@ else
|
|||||||
fi
|
fi
|
||||||
msg_ok "Using ${CL}${BL}$STORAGE${CL} ${GN}for Storage Location."
|
msg_ok "Using ${CL}${BL}$STORAGE${CL} ${GN}for Storage Location."
|
||||||
msg_ok "Virtual Machine ID is ${CL}${BL}$VMID${CL}."
|
msg_ok "Virtual Machine ID is ${CL}${BL}$VMID${CL}."
|
||||||
|
|
||||||
# ==============================================================================
|
|
||||||
# PREREQUISITES
|
|
||||||
# ==============================================================================
|
|
||||||
if ! command -v virt-customize &>/dev/null; then
|
|
||||||
msg_info "Installing libguestfs-tools"
|
|
||||||
apt-get update >/dev/null 2>&1
|
|
||||||
apt-get install -y libguestfs-tools >/dev/null 2>&1
|
|
||||||
msg_ok "Installed libguestfs-tools"
|
|
||||||
fi
|
|
||||||
|
|
||||||
msg_info "Retrieving the URL for the Debian 13 Qcow2 Disk Image"
|
msg_info "Retrieving the URL for the Debian 13 Qcow2 Disk Image"
|
||||||
if [ "$CLOUD_INIT" == "yes" ]; then
|
if [ "$CLOUD_INIT" == "yes" ]; then
|
||||||
URL=https://cloud.debian.org/images/cloud/trixie/latest/debian-13-genericcloud-amd64.qcow2
|
URL=https://cloud.debian.org/images/cloud/trixie/latest/debian-13-genericcloud-amd64.qcow2
|
||||||
@@ -501,50 +486,6 @@ echo -en "\e[1A\e[0K"
|
|||||||
FILE=$(basename $URL)
|
FILE=$(basename $URL)
|
||||||
msg_ok "Downloaded ${CL}${BL}${FILE}${CL}"
|
msg_ok "Downloaded ${CL}${BL}${FILE}${CL}"
|
||||||
|
|
||||||
# ==============================================================================
|
|
||||||
# IMAGE CUSTOMIZATION
|
|
||||||
# ==============================================================================
|
|
||||||
msg_info "Customizing ${FILE} image"
|
|
||||||
|
|
||||||
WORK_FILE=$(mktemp --suffix=.qcow2)
|
|
||||||
cp "$FILE" "$WORK_FILE"
|
|
||||||
|
|
||||||
# Set hostname
|
|
||||||
virt-customize -q -a "$WORK_FILE" --hostname "${HN}" >/dev/null 2>&1
|
|
||||||
|
|
||||||
# Prepare for unique machine-id on first boot
|
|
||||||
virt-customize -q -a "$WORK_FILE" --run-command "truncate -s 0 /etc/machine-id" >/dev/null 2>&1
|
|
||||||
virt-customize -q -a "$WORK_FILE" --run-command "rm -f /var/lib/dbus/machine-id" >/dev/null 2>&1
|
|
||||||
|
|
||||||
# Disable systemd-firstboot to prevent interactive prompts blocking the console
|
|
||||||
virt-customize -q -a "$WORK_FILE" --run-command "systemctl disable systemd-firstboot.service 2>/dev/null; rm -f /etc/systemd/system/sysinit.target.wants/systemd-firstboot.service; ln -sf /dev/null /etc/systemd/system/systemd-firstboot.service" >/dev/null 2>&1 || true
|
|
||||||
|
|
||||||
# Pre-seed firstboot settings so it won't prompt even if triggered
|
|
||||||
virt-customize -q -a "$WORK_FILE" --run-command "echo 'Etc/UTC' > /etc/timezone && ln -sf /usr/share/zoneinfo/Etc/UTC /etc/localtime" >/dev/null 2>&1 || true
|
|
||||||
virt-customize -q -a "$WORK_FILE" --run-command "touch /etc/locale.conf" >/dev/null 2>&1 || true
|
|
||||||
|
|
||||||
if [ "$CLOUD_INIT" == "yes" ]; then
|
|
||||||
# Cloud-Init handles SSH and login
|
|
||||||
virt-customize -q -a "$WORK_FILE" --run-command "sed -i 's/^#*PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config" >/dev/null 2>&1 || true
|
|
||||||
virt-customize -q -a "$WORK_FILE" --run-command "sed -i 's/^#*PasswordAuthentication.*/PasswordAuthentication yes/' /etc/ssh/sshd_config" >/dev/null 2>&1 || true
|
|
||||||
else
|
|
||||||
# Configure auto-login on serial console (ttyS0) and virtual console (tty1)
|
|
||||||
virt-customize -q -a "$WORK_FILE" --run-command "mkdir -p /etc/systemd/system/serial-getty@ttyS0.service.d" >/dev/null 2>&1 || true
|
|
||||||
virt-customize -q -a "$WORK_FILE" --run-command 'cat > /etc/systemd/system/serial-getty@ttyS0.service.d/autologin.conf << EOF
|
|
||||||
[Service]
|
|
||||||
ExecStart=
|
|
||||||
ExecStart=-/sbin/agetty --autologin root --noclear %I \$TERM
|
|
||||||
EOF' >/dev/null 2>&1 || true
|
|
||||||
virt-customize -q -a "$WORK_FILE" --run-command "mkdir -p /etc/systemd/system/getty@tty1.service.d" >/dev/null 2>&1 || true
|
|
||||||
virt-customize -q -a "$WORK_FILE" --run-command 'cat > /etc/systemd/system/getty@tty1.service.d/autologin.conf << EOF
|
|
||||||
[Service]
|
|
||||||
ExecStart=
|
|
||||||
ExecStart=-/sbin/agetty --autologin root --noclear %I \$TERM
|
|
||||||
EOF' >/dev/null 2>&1 || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
msg_ok "Customized image"
|
|
||||||
|
|
||||||
STORAGE_TYPE=$(pvesm status -storage "$STORAGE" | awk 'NR>1 {print $2}')
|
STORAGE_TYPE=$(pvesm status -storage "$STORAGE" | awk 'NR>1 {print $2}')
|
||||||
case $STORAGE_TYPE in
|
case $STORAGE_TYPE in
|
||||||
nfs | dir)
|
nfs | dir)
|
||||||
@@ -571,7 +512,7 @@ msg_info "Creating a Debian 13 VM"
|
|||||||
qm create $VMID -agent 1${MACHINE} -tablet 0 -localtime 1 -bios ovmf${CPU_TYPE} -cores $CORE_COUNT -memory $RAM_SIZE \
|
qm create $VMID -agent 1${MACHINE} -tablet 0 -localtime 1 -bios ovmf${CPU_TYPE} -cores $CORE_COUNT -memory $RAM_SIZE \
|
||||||
-name $HN -tags community-script -net0 virtio,bridge=$BRG,macaddr=$MAC$VLAN$MTU -onboot 1 -ostype l26 -scsihw virtio-scsi-pci
|
-name $HN -tags community-script -net0 virtio,bridge=$BRG,macaddr=$MAC$VLAN$MTU -onboot 1 -ostype l26 -scsihw virtio-scsi-pci
|
||||||
pvesm alloc $STORAGE $VMID $DISK0 4M 1>&/dev/null
|
pvesm alloc $STORAGE $VMID $DISK0 4M 1>&/dev/null
|
||||||
qm importdisk $VMID ${WORK_FILE} $STORAGE ${DISK_IMPORT:-} 1>&/dev/null
|
qm importdisk $VMID ${FILE} $STORAGE ${DISK_IMPORT:-} 1>&/dev/null
|
||||||
if [ "$CLOUD_INIT" == "yes" ]; then
|
if [ "$CLOUD_INIT" == "yes" ]; then
|
||||||
qm set $VMID \
|
qm set $VMID \
|
||||||
-efidisk0 ${DISK0_REF}${FORMAT} \
|
-efidisk0 ${DISK0_REF}${FORMAT} \
|
||||||
@@ -586,10 +527,6 @@ else
|
|||||||
-boot order=scsi0 \
|
-boot order=scsi0 \
|
||||||
-serial0 socket >/dev/null
|
-serial0 socket >/dev/null
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Clean up work file
|
|
||||||
rm -f "$WORK_FILE"
|
|
||||||
|
|
||||||
DESCRIPTION=$(
|
DESCRIPTION=$(
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
<div align='center'>
|
<div align='center'>
|
||||||
|
|||||||
Reference in New Issue
Block a user