mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2026-05-06 00:28:50 +02:00
Compare commits
28 Commits
2026-05-02
...
fix/rustde
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
044d4dc238 | ||
|
|
a3e147cf20 | ||
|
|
4e9352572f | ||
|
|
686657e8ec | ||
|
|
9b8302cba0 | ||
|
|
5a6392d95f | ||
|
|
160c198731 | ||
|
|
b91ec6f7bc | ||
|
|
a7ddc3502b | ||
|
|
9bf64f60b9 | ||
|
|
559cfff56a | ||
|
|
b353063720 | ||
|
|
26b41d74ee | ||
|
|
812f8ed1c7 | ||
|
|
75c5aa3d5d | ||
|
|
12e7cb1777 | ||
|
|
7f9e1ce4d8 | ||
|
|
d118f101d8 | ||
|
|
03a44a8c9c | ||
|
|
08b1398e7b | ||
|
|
dca3fb40a8 | ||
|
|
37eafa199d | ||
|
|
6729fa2a87 | ||
|
|
cd6bd154d9 | ||
|
|
92f2079a79 | ||
|
|
683231127c | ||
|
|
7733ac2806 | ||
|
|
1d83725249 |
51
CHANGELOG.md
51
CHANGELOG.md
@@ -458,8 +458,59 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
||||
|
||||
</details>
|
||||
|
||||
## 2026-05-05
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
- LibreChat ([#14247](https://github.com/community-scripts/ProxmoxVE/pull/14247))
|
||||
- Matomo ([#14248](https://github.com/community-scripts/ProxmoxVE/pull/14248))
|
||||
- Storyteller ([#14122](https://github.com/community-scripts/ProxmoxVE/pull/14122))
|
||||
|
||||
### 🧰 Tools
|
||||
|
||||
- Fix container count message in update-apps.sh [@Quotacious](https://github.com/Quotacious) ([#14265](https://github.com/community-scripts/ProxmoxVE/pull/14265))
|
||||
|
||||
## 2026-05-04
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Databasus: move .env to filesystem root so service starts correctly [@Copilot](https://github.com/Copilot) ([#14252](https://github.com/community-scripts/ProxmoxVE/pull/14252))
|
||||
- Databasus: update mongo-tools fallback to 100.16.1 and use now pnpm instead of npm ci [@MickLesk](https://github.com/MickLesk) ([#14240](https://github.com/community-scripts/ProxmoxVE/pull/14240))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- tools.func get_latest_gh_tag - add pagination to find prefixed tags beyond first 50 [@MickLesk](https://github.com/MickLesk) ([#14241](https://github.com/community-scripts/ProxmoxVE/pull/14241))
|
||||
- tools.func: add GitLab release check/fetch/deploy helpers [@MickLesk](https://github.com/MickLesk) ([#14242](https://github.com/community-scripts/ProxmoxVE/pull/14242))
|
||||
|
||||
## 2026-05-03
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Hortusfox: fix update issues [@tomfrenzel](https://github.com/tomfrenzel) ([#14214](https://github.com/community-scripts/ProxmoxVE/pull/14214))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- Refactor: PeaNUT for v6 [@MickLesk](https://github.com/MickLesk) ([#14224](https://github.com/community-scripts/ProxmoxVE/pull/14224))
|
||||
- pangolin: pin version, drop manual SQL, use upstream migrator [@MickLesk](https://github.com/MickLesk) ([#14223](https://github.com/community-scripts/ProxmoxVE/pull/14223))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- core: fix validate_bridge function [@MichaelOultram](https://github.com/MichaelOultram) ([#14206](https://github.com/community-scripts/ProxmoxVE/pull/14206))
|
||||
|
||||
### 🧰 Tools
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- pve/pbs scripts: guard sed against missing /etc/apt/sources.list [@MickLesk](https://github.com/MickLesk) ([#14222](https://github.com/community-scripts/ProxmoxVE/pull/14222))
|
||||
|
||||
## 2026-05-02
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
@@ -35,7 +35,8 @@ function update_script() {
|
||||
msg_ok "Stopped Databasus"
|
||||
|
||||
msg_info "Backing up Configuration"
|
||||
cp /opt/databasus/.env /opt/databasus.env.bak
|
||||
cp /.env /opt/databasus.env.bak
|
||||
chmod 600 /opt/databasus.env.bak
|
||||
msg_ok "Backed up Configuration"
|
||||
|
||||
msg_info "Ensuring Database Clients"
|
||||
@@ -46,7 +47,7 @@ function update_script() {
|
||||
# Install MongoDB Database Tools via direct .deb (no APT repo for Debian 13)
|
||||
if ! command -v mongodump &>/dev/null; then
|
||||
[[ "$(get_os_info id)" == "ubuntu" ]] && MONGO_DIST="ubuntu2204" || MONGO_DIST="debian12"
|
||||
fetch_and_deploy_from_url "https://fastdl.mongodb.org/tools/db/mongodb-database-tools-${MONGO_DIST}-x86_64-100.14.1.deb"
|
||||
fetch_and_deploy_from_url "https://fastdl.mongodb.org/tools/db/mongodb-database-tools-${MONGO_DIST}-x86_64-100.16.1.deb"
|
||||
fi
|
||||
[[ -f /usr/bin/mongodump ]] && ln -sf /usr/bin/mongodump /usr/local/mongodb-database-tools/bin/mongodump
|
||||
[[ -f /usr/bin/mongorestore ]] && ln -sf /usr/bin/mongorestore /usr/local/mongodb-database-tools/bin/mongorestore
|
||||
@@ -66,9 +67,12 @@ function update_script() {
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "databasus" "databasus/databasus" "tarball" "latest" "/opt/databasus"
|
||||
|
||||
msg_info "Updating Databasus"
|
||||
export COREPACK_ENABLE_DOWNLOAD_PROMPT=0
|
||||
cd /opt/databasus/frontend
|
||||
$STD npm ci
|
||||
$STD npm run build
|
||||
$STD corepack enable
|
||||
$STD corepack prepare pnpm@latest --activate
|
||||
$STD pnpm install --frozen-lockfile
|
||||
$STD pnpm run build
|
||||
cd /opt/databasus/backend
|
||||
$STD go mod download
|
||||
$STD /root/go/bin/swag init -g cmd/main.go -o swagger
|
||||
@@ -81,11 +85,18 @@ function update_script() {
|
||||
msg_ok "Updated Databasus"
|
||||
|
||||
msg_info "Restoring Configuration"
|
||||
cp /opt/databasus.env.bak /opt/databasus/.env
|
||||
cp /opt/databasus.env.bak /.env
|
||||
rm -f /opt/databasus.env.bak
|
||||
chown postgres:postgres /opt/databasus/.env
|
||||
chmod 600 /.env
|
||||
msg_ok "Restored Configuration"
|
||||
|
||||
if ! grep -q "EnvironmentFile=/.env" /etc/systemd/system/databasus.service; then
|
||||
msg_info "Updating Service"
|
||||
sed -i 's|EnvironmentFile=.*|EnvironmentFile=/.env|' /etc/systemd/system/databasus.service
|
||||
$STD systemctl daemon-reload
|
||||
msg_ok "Updated Service"
|
||||
fi
|
||||
|
||||
msg_info "Starting Databasus"
|
||||
$STD systemctl start databasus
|
||||
msg_ok "Started Databasus"
|
||||
|
||||
6
ct/headers/librechat
Normal file
6
ct/headers/librechat
Normal file
@@ -0,0 +1,6 @@
|
||||
__ _ __ ________ __
|
||||
/ / (_) /_ ________ / ____/ /_ ____ _/ /_
|
||||
/ / / / __ \/ ___/ _ \/ / / __ \/ __ `/ __/
|
||||
/ /___/ / /_/ / / / __/ /___/ / / / /_/ / /_
|
||||
/_____/_/_.___/_/ \___/\____/_/ /_/\__,_/\__/
|
||||
|
||||
6
ct/headers/matomo
Normal file
6
ct/headers/matomo
Normal file
@@ -0,0 +1,6 @@
|
||||
__ ___ __
|
||||
/ |/ /___ _/ /_____ ____ ___ ____
|
||||
/ /|_/ / __ `/ __/ __ \/ __ `__ \/ __ \
|
||||
/ / / / /_/ / /_/ /_/ / / / / / / /_/ /
|
||||
/_/ /_/\__,_/\__/\____/_/ /_/ /_/\____/
|
||||
|
||||
6
ct/headers/storyteller
Normal file
6
ct/headers/storyteller
Normal file
@@ -0,0 +1,6 @@
|
||||
_____ __ __ ____
|
||||
/ ___// /_____ _______ __/ /____ / / /__ _____
|
||||
\__ \/ __/ __ \/ ___/ / / / __/ _ \/ / / _ \/ ___/
|
||||
___/ / /_/ /_/ / / / /_/ / /_/ __/ / / __/ /
|
||||
/____/\__/\____/_/ \__, /\__/\___/_/_/\___/_/
|
||||
/____/
|
||||
@@ -38,13 +38,15 @@ function update_script() {
|
||||
mv /opt/hortusfox/ /opt/hortusfox-backup
|
||||
msg_ok "Backed up current HortusFox installation"
|
||||
|
||||
fetch_and_deploy_gh_release "hortusfox" "danielbrendel/hortusfox-web" "tarball"
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "hortusfox" "danielbrendel/hortusfox-web" "tarball"
|
||||
|
||||
msg_info "Updating HortusFox"
|
||||
cd /opt/hortusfox
|
||||
mv /opt/hortusfox-backup/.env /opt/hortusfox/.env
|
||||
cp /opt/hortusfox-backup/.env /opt/hortusfox/.env
|
||||
cp -a /opt/hortusfox-backup/public/img/. /opt/hortusfox/public/img/
|
||||
export COMPOSER_ALLOW_SUPERUSER=1
|
||||
$STD composer install --no-dev --optimize-autoloader
|
||||
$STD php asatru migrate --no-interaction
|
||||
$STD php asatru migrate:upgrade
|
||||
$STD php asatru plants:attributes
|
||||
$STD php asatru calendar:classes
|
||||
chown -R www-data:www-data /opt/hortusfox
|
||||
|
||||
101
ct/librechat.sh
Normal file
101
ct/librechat.sh
Normal file
@@ -0,0 +1,101 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/danny-avila/LibreChat
|
||||
|
||||
APP="LibreChat"
|
||||
var_tags="${var_tags:-ai;chat}"
|
||||
var_cpu="${var_cpu:-4}"
|
||||
var_ram="${var_ram:-6144}"
|
||||
var_disk="${var_disk:-20}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/librechat ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_tag "librechat" "danny-avila/LibreChat" "v"; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop librechat rag-api
|
||||
msg_ok "Stopped Services"
|
||||
|
||||
msg_info "Backing up Configuration"
|
||||
cp /opt/librechat/.env /opt/librechat.env.bak
|
||||
msg_ok "Backed up Configuration"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_tag "librechat" "danny-avila/LibreChat"
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
cd /opt/librechat
|
||||
$STD npm ci
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
msg_info "Building Frontend"
|
||||
$STD npm run frontend
|
||||
$STD npm prune --production
|
||||
$STD npm cache clean --force
|
||||
msg_ok "Built Frontend"
|
||||
|
||||
msg_info "Restoring Configuration"
|
||||
cp /opt/librechat.env.bak /opt/librechat/.env
|
||||
rm -f /opt/librechat.env.bak
|
||||
msg_ok "Restored Configuration"
|
||||
|
||||
msg_info "Starting Services"
|
||||
systemctl start rag-api librechat
|
||||
msg_ok "Started Services"
|
||||
msg_ok "Updated LibreChat Successfully!"
|
||||
fi
|
||||
|
||||
if check_for_gh_release "rag-api" "danny-avila/rag_api"; then
|
||||
msg_info "Stopping RAG API"
|
||||
systemctl stop rag-api
|
||||
msg_ok "Stopped RAG API"
|
||||
|
||||
msg_info "Backing up RAG API Configuration"
|
||||
cp /opt/rag-api/.env /opt/rag-api.env.bak
|
||||
msg_ok "Backed up RAG API Configuration"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "rag-api" "danny-avila/rag_api" "tarball"
|
||||
|
||||
msg_info "Updating RAG API Dependencies"
|
||||
cd /opt/rag-api
|
||||
$STD .venv/bin/pip install -r requirements.lite.txt
|
||||
msg_ok "Updated RAG API Dependencies"
|
||||
|
||||
msg_info "Restoring RAG API Configuration"
|
||||
cp /opt/rag-api.env.bak /opt/rag-api/.env
|
||||
rm -f /opt/rag-api.env.bak
|
||||
msg_ok "Restored RAG API Configuration"
|
||||
|
||||
msg_info "Starting RAG API"
|
||||
systemctl start rag-api
|
||||
msg_ok "Started RAG API"
|
||||
msg_ok "Updated RAG API Successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3080${CL}"
|
||||
75
ct/matomo.sh
Normal file
75
ct/matomo.sh
Normal file
@@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://matomo.org/
|
||||
|
||||
APP="Matomo"
|
||||
var_tags="${var_tags:-analytics;tracking;privacy}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_disk="${var_disk:-16}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/matomo ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_release "matomo" "matomo-org/matomo"; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop caddy
|
||||
msg_ok "Stopped Services"
|
||||
|
||||
msg_info "Backing up Data"
|
||||
[[ -f /opt/matomo/config/config.ini.php ]] && cp /opt/matomo/config/config.ini.php /opt/matomo_config.bak
|
||||
[[ -d /opt/matomo/misc/user ]] && cp -r /opt/matomo/misc/user /opt/matomo_user_backup
|
||||
[[ -f /root/matomo.creds ]] && cp /root/matomo.creds /opt/matomo_db_creds.bak
|
||||
msg_ok "Backed up Data"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "matomo" "matomo-org/matomo" "prebuild" "latest" "/opt/matomo" "matomo-*.zip"
|
||||
|
||||
msg_info "Restoring Data"
|
||||
if [[ -f /opt/matomo_config.bak ]]; then
|
||||
mkdir -p /opt/matomo/config
|
||||
cp /opt/matomo_config.bak /opt/matomo/config/config.ini.php
|
||||
fi
|
||||
if [[ -d /opt/matomo_user_backup ]]; then
|
||||
mkdir -p /opt/matomo/misc/user
|
||||
cp -r /opt/matomo_user_backup/. /opt/matomo/misc/user
|
||||
fi
|
||||
[[ -f /opt/matomo_db_creds.bak ]] && cp /opt/matomo_db_creds.bak /root/matomo.creds
|
||||
rm -f /opt/matomo_config.bak /opt/matomo_db_creds.bak
|
||||
rm -rf /opt/matomo_user_backup
|
||||
chown -R www-data:www-data /opt/matomo
|
||||
msg_ok "Restored Data"
|
||||
|
||||
msg_info "Starting Services"
|
||||
systemctl start caddy
|
||||
msg_ok "Started Services"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"
|
||||
@@ -6,6 +6,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
|
||||
# Source: https://pangolin.net/ | Github: https://github.com/fosrl/pangolin
|
||||
|
||||
APP="Pangolin"
|
||||
PANGOLIN_VERSION="${PANGOLIN_VERSION:-1.18.2}"
|
||||
var_tags="${var_tags:-proxy}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-4096}"
|
||||
@@ -33,7 +34,7 @@ function update_script() {
|
||||
|
||||
NODE_VERSION="24" setup_nodejs
|
||||
|
||||
if check_for_gh_release "pangolin" "fosrl/pangolin"; then
|
||||
if check_for_gh_release "pangolin" "fosrl/pangolin" "$PANGOLIN_VERSION" "Pinned to a tested release because Pangolin's schema changes have repeatedly broken unattended updates. To try a newer version at your own risk, run: 'export PANGOLIN_VERSION=<tag>' and re-run update. If it breaks, please open an issue at https://github.com/community-scripts/ProxmoxVE/issues with the error log."; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop pangolin
|
||||
systemctl stop gerbil
|
||||
@@ -41,9 +42,13 @@ function update_script() {
|
||||
|
||||
msg_info "Creating backup"
|
||||
tar -czf /opt/pangolin_config_backup.tar.gz -C /opt/pangolin config
|
||||
if [[ -f /opt/pangolin/config/db/db.sqlite ]]; then
|
||||
cp -a /opt/pangolin/config/db/db.sqlite \
|
||||
"/opt/pangolin/config/db/db.sqlite.pre-${PANGOLIN_VERSION}-$(date +%Y%m%d-%H%M%S).bak"
|
||||
fi
|
||||
msg_ok "Created backup"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "pangolin" "fosrl/pangolin" "tarball"
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "pangolin" "fosrl/pangolin" "tarball" "$PANGOLIN_VERSION"
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "gerbil" "fosrl/gerbil" "singlefile" "latest" "/usr/bin" "gerbil_linux_amd64"
|
||||
|
||||
msg_info "Updating Pangolin"
|
||||
@@ -67,36 +72,16 @@ function update_script() {
|
||||
rm -f /opt/pangolin_config_backup.tar.gz
|
||||
msg_ok "Restored config"
|
||||
|
||||
msg_info "Running database migrations"
|
||||
cd /opt/pangolin
|
||||
|
||||
# Pre-apply potentially destructive schema changes safely so drizzle-kit
|
||||
# does not recreate tables (which would delete all rows).
|
||||
local DB="/opt/pangolin/config/db/db.sqlite"
|
||||
if [[ -f "$DB" ]]; then
|
||||
sqlite3 "$DB" "ALTER TABLE 'orgs' ADD COLUMN 'settingsLogRetentionDaysConnection' integer DEFAULT 0 NOT NULL;" 2>/dev/null || true
|
||||
sqlite3 "$DB" "ALTER TABLE 'clientSitesAssociationsCache' ADD COLUMN 'isJitMode' integer DEFAULT 0 NOT NULL;" 2>/dev/null || true
|
||||
sqlite3 "$DB" "ALTER TABLE 'userOrgs' ADD COLUMN 'pamUsername' text;" 2>/dev/null || true
|
||||
|
||||
# Create new role-mapping tables and migrate data before drizzle-kit
|
||||
# drops the roleId columns from userOrgs and userInvites.
|
||||
sqlite3 "$DB" "CREATE TABLE IF NOT EXISTS 'userOrgRoles' (
|
||||
'userId' text NOT NULL REFERENCES 'user'('id') ON DELETE CASCADE,
|
||||
'orgId' text NOT NULL REFERENCES 'orgs'('orgId') ON DELETE CASCADE,
|
||||
'roleId' integer NOT NULL REFERENCES 'roles'('roleId') ON DELETE CASCADE,
|
||||
UNIQUE('userId', 'orgId', 'roleId')
|
||||
);" 2>/dev/null || true
|
||||
sqlite3 "$DB" "INSERT OR IGNORE INTO 'userOrgRoles' (userId, orgId, roleId) SELECT userId, orgId, roleId FROM 'userOrgs' WHERE roleId IS NOT NULL;" 2>/dev/null || true
|
||||
|
||||
sqlite3 "$DB" "CREATE TABLE IF NOT EXISTS 'userInviteRoles' (
|
||||
'inviteId' text NOT NULL REFERENCES 'userInvites'('inviteId') ON DELETE CASCADE,
|
||||
'roleId' integer NOT NULL REFERENCES 'roles'('roleId') ON DELETE CASCADE,
|
||||
PRIMARY KEY('inviteId', 'roleId')
|
||||
);" 2>/dev/null || true
|
||||
sqlite3 "$DB" "INSERT OR IGNORE INTO 'userInviteRoles' (inviteId, roleId) SELECT inviteId, roleId FROM 'userInvites' WHERE roleId IS NOT NULL;" 2>/dev/null || true
|
||||
if ! grep -q '^ExecStartPre=/usr/bin/node dist/migrations.mjs' /etc/systemd/system/pangolin.service 2>/dev/null; then
|
||||
msg_info "Adding migration step to pangolin.service"
|
||||
sed -i '/^ExecStart=\/usr\/bin\/node --enable-source-maps dist\/server.mjs/i ExecStartPre=/usr/bin/node dist/migrations.mjs' /etc/systemd/system/pangolin.service
|
||||
systemctl daemon-reload
|
||||
msg_ok "Updated pangolin.service"
|
||||
fi
|
||||
|
||||
ENVIRONMENT=prod $STD npx drizzle-kit push --force --config drizzle.sqlite.config.ts
|
||||
msg_info "Running database migrations"
|
||||
cd /opt/pangolin
|
||||
ENVIRONMENT=prod $STD node dist/migrations.mjs
|
||||
msg_ok "Ran database migrations"
|
||||
|
||||
msg_info "Updating Badger plugin version"
|
||||
|
||||
27
ct/peanut.sh
27
ct/peanut.sh
@@ -45,6 +45,33 @@ function update_script() {
|
||||
msg_ok "Fixed entrypoint"
|
||||
fi
|
||||
|
||||
if [[ ! -f /etc/peanut/peanut.env ]]; then
|
||||
msg_info "Migrating service to EnvironmentFile"
|
||||
mkdir -p /etc/peanut
|
||||
cat <<EOF >/etc/peanut/peanut.env
|
||||
NODE_ENV=production
|
||||
|
||||
#WEB_HOST=0.0.0.0
|
||||
#WEB_PORT=8080
|
||||
#NUT_HOST=localhost
|
||||
#NUT_PORT=3493
|
||||
|
||||
# Disable auth entirely:
|
||||
#AUTH_DISABLED=true
|
||||
|
||||
# Bootstrap initial account on first start (ignored afterwards):
|
||||
#WEB_USERNAME=admin
|
||||
#WEB_PASSWORD=changeme
|
||||
EOF
|
||||
chmod 600 /etc/peanut/peanut.env
|
||||
sed -i '/^Environment=/d' /etc/systemd/system/peanut.service
|
||||
if ! grep -q '^EnvironmentFile=/etc/peanut/peanut.env' /etc/systemd/system/peanut.service; then
|
||||
sed -i '/^Type=simple/a EnvironmentFile=/etc/peanut/peanut.env' /etc/systemd/system/peanut.service
|
||||
fi
|
||||
systemctl daemon-reload
|
||||
msg_ok "Migrated to /etc/peanut/peanut.env"
|
||||
fi
|
||||
|
||||
msg_info "Updating PeaNUT"
|
||||
cd /opt/peanut
|
||||
$STD pnpm i
|
||||
|
||||
@@ -48,8 +48,6 @@ function update_script() {
|
||||
msg_ok "Services started"
|
||||
|
||||
msg_ok "Updated successfully!"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
85
ct/storyteller.sh
Normal file
85
ct/storyteller.sh
Normal file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://gitlab.com/storyteller-platform/storyteller
|
||||
|
||||
APP="Storyteller"
|
||||
var_tags="${var_tags:-media;ebook;audiobook}"
|
||||
var_cpu="${var_cpu:-4}"
|
||||
var_ram="${var_ram:-10240}"
|
||||
var_disk="${var_disk:-20}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/storyteller ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gl_release "storyteller" "storyteller-platform/storyteller"; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop storyteller
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
msg_info "Backing up Data"
|
||||
cp /opt/storyteller/.env /opt/storyteller_env.bak
|
||||
msg_ok "Backed up Data"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gl_release "storyteller" "storyteller-platform/storyteller" "tarball" "latest" "/opt/storyteller"
|
||||
|
||||
msg_info "Restoring Configuration"
|
||||
mv /opt/storyteller_env.bak /opt/storyteller/.env
|
||||
msg_ok "Restored Configuration"
|
||||
|
||||
msg_info "Rebuilding Storyteller"
|
||||
cd /opt/storyteller
|
||||
export NODE_OPTIONS="--max-old-space-size=4096"
|
||||
$STD yarn install --network-timeout 600000
|
||||
$STD gcc -g -fPIC -rdynamic -shared web/sqlite/uuid.c -o web/sqlite/uuid.c.so
|
||||
export CI=1
|
||||
export NODE_ENV=production
|
||||
export NEXT_TELEMETRY_DISABLED=1
|
||||
export SQLITE_NATIVE_BINDING=/opt/storyteller/node_modules/better-sqlite3/build/Release/better_sqlite3.node
|
||||
$STD yarn workspaces foreach -Rpt --from @storyteller-platform/web --exclude @storyteller-platform/eslint run build
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/.next/static
|
||||
cp -rT /opt/storyteller/web/.next/static /opt/storyteller/web/.next/standalone/web/.next/static
|
||||
if [[ -d /opt/storyteller/web/public ]]; then
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/public
|
||||
cp -rT /opt/storyteller/web/public /opt/storyteller/web/.next/standalone/web/public
|
||||
fi
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/migrations
|
||||
cp -rT /opt/storyteller/web/migrations /opt/storyteller/web/.next/standalone/web/migrations
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/sqlite
|
||||
cp -rT /opt/storyteller/web/sqlite /opt/storyteller/web/.next/standalone/web/sqlite
|
||||
ln -sf /opt/storyteller/.env /opt/storyteller/web/.next/standalone/web/.env
|
||||
msg_ok "Rebuilt Storyteller"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start storyteller
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8001${CL}"
|
||||
@@ -32,7 +32,7 @@ for v in 12 13 14 15 16 18; do
|
||||
done
|
||||
# Install MongoDB Database Tools via direct .deb (no APT repo for Debian 13)
|
||||
[[ "$(get_os_info id)" == "ubuntu" ]] && MONGO_DIST="ubuntu2204" || MONGO_DIST="debian12"
|
||||
MONGO_VERSION=$(get_latest_gh_tag "mongodb/mongo-tools" "100." || echo "100.14.1")
|
||||
MONGO_VERSION=$(get_latest_gh_tag "mongodb/mongo-tools" "100." || echo "100.16.1")
|
||||
fetch_and_deploy_from_url "https://fastdl.mongodb.org/tools/db/mongodb-database-tools-${MONGO_DIST}-x86_64-${MONGO_VERSION}.deb" ""
|
||||
mkdir -p /usr/local/mongodb-database-tools/bin
|
||||
[[ -f /usr/bin/mongodump ]] && ln -sf /usr/bin/mongodump /usr/local/mongodb-database-tools/bin/mongodump
|
||||
@@ -52,9 +52,12 @@ msg_ok "Installed Database Clients"
|
||||
fetch_and_deploy_gh_release "databasus" "databasus/databasus" "tarball" "latest" "/opt/databasus"
|
||||
|
||||
msg_info "Building Databasus (Patience)"
|
||||
export COREPACK_ENABLE_DOWNLOAD_PROMPT=0
|
||||
cd /opt/databasus/frontend
|
||||
$STD npm ci
|
||||
$STD npm run build
|
||||
$STD corepack enable
|
||||
$STD corepack prepare pnpm@latest --activate
|
||||
$STD pnpm install --frozen-lockfile
|
||||
$STD pnpm run build
|
||||
cd /opt/databasus/backend
|
||||
$STD go mod tidy
|
||||
$STD go mod download
|
||||
@@ -76,7 +79,7 @@ ENCRYPTION_KEY=$(openssl rand -hex 32)
|
||||
# Install goose for migrations
|
||||
$STD go install github.com/pressly/goose/v3/cmd/goose@latest
|
||||
ln -sf /root/go/bin/goose /usr/local/bin/goose
|
||||
cat <<EOF >/opt/databasus/.env
|
||||
cat <<EOF >/.env
|
||||
# Environment
|
||||
ENV_MODE=production
|
||||
|
||||
@@ -106,8 +109,7 @@ DATA_DIR=/databasus-data/data
|
||||
BACKUP_DIR=/databasus-data/backups
|
||||
LOG_DIR=/databasus-data/logs
|
||||
EOF
|
||||
chown postgres:postgres /opt/databasus/.env
|
||||
chmod 600 /opt/databasus/.env
|
||||
chmod 600 /.env
|
||||
msg_ok "Configured Databasus"
|
||||
|
||||
msg_info "Configuring Valkey"
|
||||
@@ -145,7 +147,7 @@ Requires=postgresql.service valkey.service
|
||||
[Service]
|
||||
Type=simple
|
||||
WorkingDirectory=/opt/databasus
|
||||
EnvironmentFile=/opt/databasus/.env
|
||||
EnvironmentFile=/.env
|
||||
ExecStart=/opt/databasus/databasus
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
|
||||
139
install/librechat-install.sh
Normal file
139
install/librechat-install.sh
Normal file
@@ -0,0 +1,139 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/danny-avila/LibreChat
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
MONGO_VERSION="8.0" setup_mongodb
|
||||
setup_meilisearch
|
||||
PG_VERSION="17" PG_MODULES="pgvector" setup_postgresql
|
||||
PG_DB_NAME="ragapi" PG_DB_USER="ragapi" PG_DB_EXTENSIONS="vector" setup_postgresql_db
|
||||
NODE_VERSION="24" setup_nodejs
|
||||
UV_PYTHON="3.12" setup_uv
|
||||
|
||||
fetch_and_deploy_gh_tag "librechat" "danny-avila/LibreChat"
|
||||
fetch_and_deploy_gh_release "rag-api" "danny-avila/rag_api" "tarball"
|
||||
|
||||
msg_info "Installing LibreChat Dependencies"
|
||||
cd /opt/librechat
|
||||
$STD npm ci
|
||||
msg_ok "Installed LibreChat Dependencies"
|
||||
|
||||
msg_info "Building Frontend"
|
||||
$STD npm run frontend
|
||||
$STD npm prune --production
|
||||
$STD npm cache clean --force
|
||||
msg_ok "Built Frontend"
|
||||
|
||||
msg_info "Installing RAG API Dependencies"
|
||||
cd /opt/rag-api
|
||||
$STD uv venv --python 3.12 --seed .venv
|
||||
$STD .venv/bin/pip install -r requirements.lite.txt
|
||||
mkdir -p /opt/rag-api/uploads
|
||||
msg_ok "Installed RAG API Dependencies"
|
||||
|
||||
msg_info "Configuring LibreChat"
|
||||
JWT_SECRET=$(openssl rand -hex 32)
|
||||
JWT_REFRESH_SECRET=$(openssl rand -hex 32)
|
||||
CREDS_KEY=$(openssl rand -hex 32)
|
||||
CREDS_IV=$(openssl rand -hex 16)
|
||||
cat <<EOF >/opt/librechat/.env
|
||||
HOST=0.0.0.0
|
||||
PORT=3080
|
||||
MONGO_URI=mongodb://127.0.0.1:27017/LibreChat
|
||||
DOMAIN_CLIENT=http://${LOCAL_IP}:3080
|
||||
DOMAIN_SERVER=http://${LOCAL_IP}:3080
|
||||
NO_INDEX=true
|
||||
TRUST_PROXY=1
|
||||
JWT_SECRET=${JWT_SECRET}
|
||||
JWT_REFRESH_SECRET=${JWT_REFRESH_SECRET}
|
||||
SESSION_EXPIRY=1000 * 60 * 15
|
||||
REFRESH_TOKEN_EXPIRY=(1000 * 60 * 60 * 24) * 7
|
||||
CREDS_KEY=${CREDS_KEY}
|
||||
CREDS_IV=${CREDS_IV}
|
||||
ALLOW_EMAIL_LOGIN=true
|
||||
ALLOW_REGISTRATION=true
|
||||
ALLOW_SOCIAL_LOGIN=false
|
||||
ALLOW_SOCIAL_REGISTRATION=false
|
||||
ALLOW_PASSWORD_RESET=false
|
||||
ALLOW_UNVERIFIED_EMAIL_LOGIN=true
|
||||
SEARCH=true
|
||||
MEILI_NO_ANALYTICS=true
|
||||
MEILI_HOST=http://127.0.0.1:7700
|
||||
MEILI_MASTER_KEY=${MEILISEARCH_MASTER_KEY}
|
||||
RAG_PORT=8000
|
||||
RAG_API_URL=http://127.0.0.1:8000
|
||||
APP_TITLE=LibreChat
|
||||
ENDPOINTS=openAI,agents,assistants,anthropic,google
|
||||
# OPENAI_API_KEY=your-key-here
|
||||
# OPENAI_MODELS=
|
||||
# ANTHROPIC_API_KEY=your-key-here
|
||||
# GOOGLE_KEY=your-key-here
|
||||
EOF
|
||||
msg_ok "Configured LibreChat"
|
||||
|
||||
msg_info "Configuring RAG API"
|
||||
cat <<EOF >/opt/rag-api/.env
|
||||
VECTOR_DB_TYPE=pgvector
|
||||
DB_HOST=127.0.0.1
|
||||
DB_PORT=5432
|
||||
POSTGRES_DB=${PG_DB_NAME}
|
||||
POSTGRES_USER=${PG_DB_USER}
|
||||
POSTGRES_PASSWORD=${PG_DB_PASS}
|
||||
RAG_HOST=0.0.0.0
|
||||
RAG_PORT=8000
|
||||
JWT_SECRET=${JWT_SECRET}
|
||||
RAG_UPLOAD_DIR=/opt/rag-api/uploads/
|
||||
EOF
|
||||
msg_ok "Configured RAG API"
|
||||
|
||||
msg_info "Creating Services"
|
||||
cat <<EOF >/etc/systemd/system/librechat.service
|
||||
[Unit]
|
||||
Description=LibreChat
|
||||
After=network.target mongod.service meilisearch.service rag-api.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/librechat
|
||||
EnvironmentFile=/opt/librechat/.env
|
||||
ExecStart=/usr/bin/npm run backend
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
cat <<EOF >/etc/systemd/system/rag-api.service
|
||||
[Unit]
|
||||
Description=LibreChat RAG API
|
||||
After=network.target postgresql.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/rag-api
|
||||
EnvironmentFile=/opt/rag-api/.env
|
||||
ExecStart=/opt/rag-api/.venv/bin/uvicorn main:app --host 0.0.0.0 --port 8000
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now rag-api librechat
|
||||
msg_ok "Created Services"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
66
install/matomo-install.sh
Normal file
66
install/matomo-install.sh
Normal file
@@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://matomo.org/
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y caddy
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
mkdir -p /opt/matomo
|
||||
|
||||
PHP_VERSION="8.3" PHP_FPM="YES" PHP_MODULES="pdo_mysql,gd,mbstring,xml,curl,intl,zip,ldap" setup_php
|
||||
setup_mariadb
|
||||
MARIADB_DB_NAME="matomo" MARIADB_DB_USER="matomo" setup_mariadb_db
|
||||
|
||||
msg_info "Allowing Local TCP Database Access"
|
||||
$STD mariadb -u root -e "CREATE USER IF NOT EXISTS '$MARIADB_DB_USER'@'127.0.0.1' IDENTIFIED BY '$MARIADB_DB_PASS';"
|
||||
$STD mariadb -u root -e "ALTER USER '$MARIADB_DB_USER'@'127.0.0.1' IDENTIFIED BY '$MARIADB_DB_PASS';"
|
||||
$STD mariadb -u root -e "GRANT ALL ON \`$MARIADB_DB_NAME\`.* TO '$MARIADB_DB_USER'@'127.0.0.1';"
|
||||
$STD mariadb -u root -e "FLUSH PRIVILEGES;"
|
||||
msg_ok "Allowed Local TCP Database Access"
|
||||
|
||||
fetch_and_deploy_gh_release "matomo" "matomo-org/matomo" "prebuild" "latest" "/opt/matomo" "matomo-*.zip"
|
||||
|
||||
msg_info "Setting up Matomo"
|
||||
if [[ -d /opt/matomo/matomo ]]; then
|
||||
rm -rf /opt/matomo/tmp "/opt/matomo/How to install Matomo.html"
|
||||
find /opt/matomo/matomo -mindepth 1 -maxdepth 1 -exec mv -t /opt/matomo {} +
|
||||
rm -rf /opt/matomo/matomo
|
||||
fi
|
||||
mkdir -p /opt/matomo/tmp
|
||||
chown -R www-data:www-data /opt/matomo
|
||||
chmod -R 755 /opt/matomo/tmp
|
||||
msg_ok "Set up Matomo"
|
||||
|
||||
msg_info "Configuring Caddy"
|
||||
PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION . "." . PHP_MINOR_VERSION;')
|
||||
cat <<EOF >/etc/caddy/Caddyfile
|
||||
:80 {
|
||||
root * /opt/matomo
|
||||
@blocked path /config /config/* /tmp /tmp/* /.* /.*/*
|
||||
respond @blocked 403
|
||||
php_fastcgi unix//run/php/php${PHP_VER}-fpm.sock
|
||||
file_server
|
||||
encode gzip
|
||||
}
|
||||
EOF
|
||||
usermod -aG www-data caddy
|
||||
msg_ok "Configured Caddy"
|
||||
|
||||
systemctl enable -q --now php${PHP_VER}-fpm
|
||||
systemctl restart caddy
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
@@ -22,7 +22,8 @@ $STD apt install -y \
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
NODE_VERSION="24" setup_nodejs
|
||||
fetch_and_deploy_gh_release "pangolin" "fosrl/pangolin" "tarball"
|
||||
PANGOLIN_VERSION="${PANGOLIN_VERSION:-1.18.2}"
|
||||
fetch_and_deploy_gh_release "pangolin" "fosrl/pangolin" "tarball" "$PANGOLIN_VERSION"
|
||||
fetch_and_deploy_gh_release "gerbil" "fosrl/gerbil" "singlefile" "latest" "/usr/bin" "gerbil_linux_amd64"
|
||||
fetch_and_deploy_gh_release "traefik" "traefik/traefik" "prebuild" "latest" "/usr/bin" "traefik_v*_linux_amd64.tar.gz"
|
||||
|
||||
@@ -204,6 +205,7 @@ User=root
|
||||
Environment=NODE_ENV=production
|
||||
Environment=ENVIRONMENT=prod
|
||||
WorkingDirectory=/opt/pangolin
|
||||
ExecStartPre=/usr/bin/node dist/migrations.mjs
|
||||
ExecStart=/usr/bin/node --enable-source-maps dist/server.mjs
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
|
||||
@@ -29,13 +29,28 @@ cp -r .next/static .next/standalone/.next/
|
||||
mkdir -p /opt/peanut/.next/standalone/config
|
||||
mkdir -p /etc/peanut/
|
||||
ln -sf .next/standalone/server.js server.js
|
||||
cat <<EOF >/etc/peanut/settings.yml
|
||||
WEB_HOST: 0.0.0.0
|
||||
WEB_PORT: 8080
|
||||
NUT_HOST: 0.0.0.0
|
||||
NUT_PORT: 3493
|
||||
if [[ ! -f /etc/peanut/settings.yml ]]; then
|
||||
cat <<EOF >/etc/peanut/settings.yml
|
||||
NUT_SERVERS: []
|
||||
EOF
|
||||
fi
|
||||
ln -sf /etc/peanut/settings.yml /opt/peanut/.next/standalone/config/settings.yml
|
||||
cat <<EOF >/etc/peanut/peanut.env
|
||||
NODE_ENV=production
|
||||
|
||||
#WEB_HOST=0.0.0.0
|
||||
#WEB_PORT=8080
|
||||
#NUT_HOST=localhost
|
||||
#NUT_PORT=3493
|
||||
|
||||
# Disable auth entirely:
|
||||
#AUTH_DISABLED=true
|
||||
|
||||
# Bootstrap initial account on first start (ignored afterwards):
|
||||
#WEB_USERNAME=admin
|
||||
#WEB_PASSWORD=changeme
|
||||
EOF
|
||||
chmod 600 /etc/peanut/peanut.env
|
||||
msg_ok "Setup Peanut"
|
||||
|
||||
msg_info "Creating Service"
|
||||
@@ -48,11 +63,7 @@ SyslogIdentifier=peanut
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
Type=simple
|
||||
Environment="NODE_ENV=production"
|
||||
#Environment="NUT_HOST=localhost"
|
||||
#Environment="NUT_PORT=3493"
|
||||
#Environment="WEB_HOST=0.0.0.0"
|
||||
#Environment="WEB_PORT=8080"
|
||||
EnvironmentFile=/etc/peanut/peanut.env
|
||||
WorkingDirectory=/opt/peanut
|
||||
ExecStart=node /opt/peanut/entrypoint.mjs
|
||||
TimeoutStopSec=30
|
||||
|
||||
98
install/storyteller-install.sh
Normal file
98
install/storyteller-install.sh
Normal file
@@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://gitlab.com/storyteller-platform/storyteller
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y \
|
||||
build-essential \
|
||||
git \
|
||||
pkg-config \
|
||||
libsqlite3-dev \
|
||||
sqlite3 \
|
||||
python3-setuptools \
|
||||
ffmpeg
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
NODE_VERSION="22" NODE_MODULE="yarn" setup_nodejs
|
||||
|
||||
fetch_and_deploy_gh_release "readium" "readium/cli" "prebuild" "latest" "/opt/readium" "readium_linux_x86_64.tar.gz"
|
||||
ln -sf /opt/readium/readium /usr/local/bin/readium
|
||||
fetch_and_deploy_gl_release "storyteller" "storyteller-platform/storyteller" "tarball" "latest" "/opt/storyteller"
|
||||
|
||||
msg_info "Setting up Storyteller"
|
||||
cd /opt/storyteller
|
||||
$STD yarn install --network-timeout 600000
|
||||
$STD gcc -g -fPIC -rdynamic -shared web/sqlite/uuid.c -o web/sqlite/uuid.c.so
|
||||
STORYTELLER_SECRET_KEY=$(openssl rand -base64 32)
|
||||
cat <<EOF >/opt/storyteller/.env
|
||||
STORYTELLER_SECRET_KEY=${STORYTELLER_SECRET_KEY}
|
||||
STORYTELLER_DATA_DIR=/opt/storyteller/data
|
||||
PORT=8001
|
||||
HOSTNAME=0.0.0.0
|
||||
READIUM_PORT=9000
|
||||
NODE_ENV=production
|
||||
NEXT_TELEMETRY_DISABLED=1
|
||||
EOF
|
||||
mkdir -p /opt/storyteller/data
|
||||
{
|
||||
echo "Storyteller Credentials"
|
||||
echo "======================="
|
||||
echo "Secret Key: ${STORYTELLER_SECRET_KEY}"
|
||||
} >~/storyteller.creds
|
||||
msg_ok "Set up Storyteller"
|
||||
|
||||
msg_info "Building Storyteller"
|
||||
cd /opt/storyteller
|
||||
export CI=1
|
||||
export NODE_ENV=production
|
||||
export NEXT_TELEMETRY_DISABLED=1
|
||||
export SQLITE_NATIVE_BINDING=/opt/storyteller/node_modules/better-sqlite3/build/Release/better_sqlite3.node
|
||||
$STD yarn workspaces foreach -Rpt --from @storyteller-platform/web --exclude @storyteller-platform/eslint run build
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/.next/static
|
||||
cp -rT /opt/storyteller/web/.next/static /opt/storyteller/web/.next/standalone/web/.next/static
|
||||
if [[ -d /opt/storyteller/web/public ]]; then
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/public
|
||||
cp -rT /opt/storyteller/web/public /opt/storyteller/web/.next/standalone/web/public
|
||||
fi
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/migrations
|
||||
cp -rT /opt/storyteller/web/migrations /opt/storyteller/web/.next/standalone/web/migrations
|
||||
mkdir -p /opt/storyteller/web/.next/standalone/web/sqlite
|
||||
cp -rT /opt/storyteller/web/sqlite /opt/storyteller/web/.next/standalone/web/sqlite
|
||||
ln -sf /opt/storyteller/.env /opt/storyteller/web/.next/standalone/web/.env
|
||||
msg_ok "Built Storyteller"
|
||||
|
||||
msg_info "Creating Service"
|
||||
cat <<EOF >/etc/systemd/system/storyteller.service
|
||||
[Unit]
|
||||
Description=Storyteller
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/storyteller/web/.next/standalone/web
|
||||
EnvironmentFile=/opt/storyteller/.env
|
||||
ExecStart=/usr/bin/node --enable-source-maps server.js
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now storyteller
|
||||
msg_ok "Created Service"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
@@ -513,7 +513,7 @@ validate_bridge() {
|
||||
[[ -z "$bridge" ]] && return 1
|
||||
|
||||
# Check if bridge interface exists
|
||||
if ! ip link show "$bridge" &>/dev/null; then
|
||||
if ! ip link show dev "$bridge" &>/dev/null; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
786
misc/tools.func
786
misc/tools.func
@@ -2079,15 +2079,33 @@ get_latest_gh_tag() {
|
||||
local temp_file
|
||||
temp_file=$(mktemp)
|
||||
|
||||
if ! github_api_call "https://api.github.com/repos/${repo}/tags?per_page=50" "$temp_file"; then
|
||||
rm -f "$temp_file"
|
||||
return 22
|
||||
fi
|
||||
|
||||
local tag=""
|
||||
|
||||
if [[ -n "$prefix" ]]; then
|
||||
tag=$(jq -r --arg p "$prefix" '[.[] | select(.name | startswith($p))][0].name // empty' "$temp_file")
|
||||
# Use git/matching-refs API for server-side prefix filtering. This avoids
|
||||
# paging through unrelated tags (e.g. mongodb/mongo-tools where 100.x tags
|
||||
# only appear after page 4 of /tags). Returns ALL tags matching the prefix
|
||||
# in a single call, sorted lexicographically ascending; we pick the
|
||||
# highest version using `sort -V`.
|
||||
if ! github_api_call "https://api.github.com/repos/${repo}/git/matching-refs/tags/${prefix}" "$temp_file"; then
|
||||
rm -f "$temp_file"
|
||||
return 22
|
||||
fi
|
||||
|
||||
local count
|
||||
count=$(jq 'length' "$temp_file" 2>/dev/null || echo 0)
|
||||
if [[ "$count" -gt 0 ]]; then
|
||||
tag=$(jq -r '.[].ref' "$temp_file" \
|
||||
| sed 's|^refs/tags/||' \
|
||||
| sort -V \
|
||||
| tail -n1)
|
||||
fi
|
||||
else
|
||||
# No prefix: just take the first (newest) tag from /tags
|
||||
if ! github_api_call "https://api.github.com/repos/${repo}/tags?per_page=1" "$temp_file"; then
|
||||
rm -f "$temp_file"
|
||||
return 22
|
||||
fi
|
||||
tag=$(jq -r '.[0].name // empty' "$temp_file")
|
||||
fi
|
||||
|
||||
@@ -8665,3 +8683,759 @@ EOF
|
||||
$STD apt update
|
||||
return 0
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Get latest GitLab release version.
|
||||
# Usage: get_latest_gitlab_release "owner/repo" [strip_v]
|
||||
# ------------------------------------------------------------------------------
|
||||
get_latest_gitlab_release() {
|
||||
local repo="$1"
|
||||
local strip_v="${2:-true}"
|
||||
|
||||
local repo_encoded
|
||||
repo_encoded=$(printf '%s' "$repo" | sed 's|/|%2F|g')
|
||||
|
||||
local header=()
|
||||
[[ -n "${GITLAB_TOKEN:-}" ]] && header=(-H "PRIVATE-TOKEN: $GITLAB_TOKEN")
|
||||
|
||||
local temp_file
|
||||
temp_file=$(mktemp)
|
||||
|
||||
local http_code
|
||||
http_code=$(curl --connect-timeout 10 --max-time 30 -sSL \
|
||||
-w "%{http_code}" -o "$temp_file" \
|
||||
"${header[@]}" \
|
||||
"https://gitlab.com/api/v4/projects/$repo_encoded/releases?per_page=1&order_by=released_at&sort=desc" 2>/dev/null) || true
|
||||
|
||||
if [[ "$http_code" != "200" ]]; then
|
||||
rm -f "$temp_file"
|
||||
msg_warn "GitLab API call failed for ${repo} (HTTP ${http_code})"
|
||||
return 22
|
||||
fi
|
||||
|
||||
local version
|
||||
version=$(jq -r '.[0].tag_name // empty' "$temp_file")
|
||||
rm -f "$temp_file"
|
||||
|
||||
if [[ -z "$version" ]]; then
|
||||
msg_error "Could not determine latest version for ${repo}"
|
||||
return 250
|
||||
fi
|
||||
|
||||
if [[ "$strip_v" == "true" ]]; then
|
||||
[[ "$version" =~ ^v[0-9] ]] && version="${version:1}"
|
||||
fi
|
||||
|
||||
echo "$version"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Checks for new GitLab release (latest tag).
|
||||
#
|
||||
# Description:
|
||||
# - Queries the GitLab API for the latest release tag
|
||||
# - Compares it to a local cached version (~/.<app>)
|
||||
# - If newer, sets global CHECK_UPDATE_RELEASE and returns 0
|
||||
#
|
||||
# Usage:
|
||||
# if check_for_gl_release "myapp" "owner/repo" [optional] "v1.2.3"; then
|
||||
# # trigger update...
|
||||
# fi
|
||||
# exit 0
|
||||
# } (end of update_script not from the function)
|
||||
#
|
||||
# Notes:
|
||||
# - Requires `jq` (auto-installed if missing)
|
||||
# - Supports GITLAB_TOKEN env var for private/rate-limited repos
|
||||
# - Does not modify anything, only checks version state
|
||||
# ------------------------------------------------------------------------------
|
||||
check_for_gl_release() {
|
||||
local app="$1"
|
||||
local source="$2"
|
||||
local pinned_version_in="${3:-}" # optional
|
||||
local pin_reason="${4:-}" # optional reason shown to user
|
||||
local app_lc="${app,,}"
|
||||
local current_file="$HOME/.${app_lc}"
|
||||
|
||||
msg_info "Checking for update: ${app}"
|
||||
|
||||
# DNS check
|
||||
if ! getent hosts gitlab.com >/dev/null 2>&1; then
|
||||
msg_error "Network error: cannot resolve gitlab.com"
|
||||
return 6
|
||||
fi
|
||||
|
||||
ensure_dependencies jq
|
||||
|
||||
local repo_encoded
|
||||
repo_encoded=$(printf '%s' "$source" | sed 's|/|%2F|g')
|
||||
|
||||
local header=()
|
||||
[[ -n "${GITLAB_TOKEN:-}" ]] && header=(-H "PRIVATE-TOKEN: $GITLAB_TOKEN")
|
||||
|
||||
local releases_json="" http_code=""
|
||||
|
||||
# For pinned versions, try to fetch the specific release tag first
|
||||
if [[ -n "$pinned_version_in" ]]; then
|
||||
local pinned_encoded="${pinned_version_in//\//%2F}"
|
||||
http_code=$(curl -sSL --max-time 20 -w "%{http_code}" -o /tmp/gl_check.json \
|
||||
"${header[@]}" \
|
||||
"https://gitlab.com/api/v4/projects/$repo_encoded/releases/$pinned_encoded" 2>/dev/null) || true
|
||||
if [[ "$http_code" == "200" ]] && [[ -s /tmp/gl_check.json ]]; then
|
||||
releases_json="[$(</tmp/gl_check.json)]"
|
||||
fi
|
||||
rm -f /tmp/gl_check.json
|
||||
fi
|
||||
|
||||
# Fetch full releases list if needed
|
||||
if [[ -z "$releases_json" ]]; then
|
||||
http_code=$(curl -sSL --max-time 20 -w "%{http_code}" -o /tmp/gl_check.json \
|
||||
"${header[@]}" \
|
||||
"https://gitlab.com/api/v4/projects/$repo_encoded/releases?per_page=100&order_by=released_at&sort=desc" 2>/dev/null) || true
|
||||
|
||||
if [[ "$http_code" == "200" ]] && [[ -s /tmp/gl_check.json ]]; then
|
||||
releases_json=$(</tmp/gl_check.json)
|
||||
elif [[ "$http_code" == "401" ]]; then
|
||||
msg_error "GitLab API authentication failed (HTTP 401)."
|
||||
if [[ -n "${GITLAB_TOKEN:-}" ]]; then
|
||||
msg_error "Your GITLAB_TOKEN appears to be invalid or expired."
|
||||
else
|
||||
msg_error "The repository may require authentication. Try: export GITLAB_TOKEN=\"glpat-your_token\""
|
||||
fi
|
||||
rm -f /tmp/gl_check.json
|
||||
return 22
|
||||
elif [[ "$http_code" == "404" ]]; then
|
||||
msg_error "GitLab project not found (HTTP 404). Ensure '${source}' is correct and publicly accessible."
|
||||
rm -f /tmp/gl_check.json
|
||||
return 22
|
||||
elif [[ "$http_code" == "429" ]]; then
|
||||
msg_error "GitLab API rate limit exceeded (HTTP 429)."
|
||||
msg_error "To increase the limit, export a GitLab token: export GITLAB_TOKEN=\"glpat-your_token_here\""
|
||||
rm -f /tmp/gl_check.json
|
||||
return 22
|
||||
elif [[ "$http_code" == "000" || -z "$http_code" ]]; then
|
||||
msg_error "GitLab API connection failed (no response)."
|
||||
msg_error "Check your network/DNS: curl -sSL https://gitlab.com/api/v4/version"
|
||||
rm -f /tmp/gl_check.json
|
||||
return 7
|
||||
else
|
||||
msg_error "Unable to fetch releases for ${app} (HTTP ${http_code})"
|
||||
rm -f /tmp/gl_check.json
|
||||
return 22
|
||||
fi
|
||||
rm -f /tmp/gl_check.json
|
||||
fi
|
||||
|
||||
mapfile -t raw_tags < <(jq -r '.[] | .tag_name' <<<"$releases_json")
|
||||
if ((${#raw_tags[@]} == 0)); then
|
||||
msg_error "No releases found for ${app} on GitLab"
|
||||
return 250
|
||||
fi
|
||||
|
||||
local clean_tags=()
|
||||
for t in "${raw_tags[@]}"; do
|
||||
# Only strip leading 'v' when followed by a digit (e.g. v1.2.3)
|
||||
if [[ "$t" =~ ^v[0-9] ]]; then
|
||||
clean_tags+=("${t:1}")
|
||||
else
|
||||
clean_tags+=("$t")
|
||||
fi
|
||||
done
|
||||
|
||||
local latest_raw="${raw_tags[0]}"
|
||||
local latest_clean="${clean_tags[0]}"
|
||||
|
||||
# current installed (stored without v)
|
||||
local current=""
|
||||
if [[ -f "$current_file" ]]; then
|
||||
current="$(<"$current_file")"
|
||||
else
|
||||
# Migration: search for any /opt/*_version.txt
|
||||
local legacy_files
|
||||
mapfile -t legacy_files < <(find /opt -maxdepth 1 -type f -name "*_version.txt" 2>/dev/null)
|
||||
if ((${#legacy_files[@]} == 1)); then
|
||||
current="$(<"${legacy_files[0]}")"
|
||||
echo "${current#v}" >"$current_file"
|
||||
rm -f "${legacy_files[0]}"
|
||||
fi
|
||||
fi
|
||||
if [[ "$current" =~ ^v[0-9] ]]; then
|
||||
current="${current:1}"
|
||||
fi
|
||||
|
||||
# Pinned version handling
|
||||
if [[ -n "$pinned_version_in" ]]; then
|
||||
local pin_clean
|
||||
if [[ "$pinned_version_in" =~ ^v[0-9] ]]; then
|
||||
pin_clean="${pinned_version_in:1}"
|
||||
else
|
||||
pin_clean="$pinned_version_in"
|
||||
fi
|
||||
local match_raw=""
|
||||
for i in "${!clean_tags[@]}"; do
|
||||
if [[ "${clean_tags[$i]}" == "$pin_clean" ]]; then
|
||||
match_raw="${raw_tags[$i]}"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -z "$match_raw" ]]; then
|
||||
msg_error "Pinned version ${pinned_version_in} not found upstream"
|
||||
return 250
|
||||
fi
|
||||
|
||||
if [[ "$current" != "$pin_clean" ]]; then
|
||||
CHECK_UPDATE_RELEASE="$match_raw"
|
||||
msg_ok "Update available: ${app} ${current:-not installed} → ${pin_clean}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ -n "$pin_reason" ]]; then
|
||||
msg_ok "No update available: ${app} (${current}) - update held back: ${pin_reason}"
|
||||
else
|
||||
msg_ok "No update available: ${app} (${current}) - update temporarily held back due to issues with newer releases"
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
|
||||
# No pinning → use latest
|
||||
if [[ -z "$current" || "$current" != "$latest_clean" ]]; then
|
||||
CHECK_UPDATE_RELEASE="$latest_raw"
|
||||
msg_ok "Update available: ${app} ${current:-not installed} → ${latest_clean}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
msg_ok "No update available: ${app} (${latest_clean})"
|
||||
return 1
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Scan older GitLab releases for a matching asset (fallback helper).
|
||||
#
|
||||
# Description:
|
||||
# When the latest release does not contain the expected asset
|
||||
# (e.g. .deb for the current arch, or a custom pattern), walks back
|
||||
# through up to 15 recent releases and returns the first release JSON
|
||||
# that has a matching asset. Used internally by fetch_and_deploy_gl_release.
|
||||
#
|
||||
# Usage (internal):
|
||||
# _gl_scan_older_releases "owner/repo" "owner%2Frepo" "https://gitlab.com" \
|
||||
# "binary|prebuild|singlefile" "$asset_pattern" "$skip_tag"
|
||||
#
|
||||
# Returns:
|
||||
# - stdout: JSON of the matching release (single object) on success
|
||||
# - 0 on success, 22 on API error, 250 if no match found
|
||||
# ------------------------------------------------------------------------------
|
||||
_gl_scan_older_releases() {
|
||||
local repo="$1"
|
||||
local repo_encoded="$2"
|
||||
local base_url="${3:-https://gitlab.com}"
|
||||
local mode="$4"
|
||||
local asset_pattern="$5"
|
||||
local skip_tag="$6"
|
||||
|
||||
local header=()
|
||||
[[ -n "${GITLAB_TOKEN:-}" ]] && header=(-H "PRIVATE-TOKEN: $GITLAB_TOKEN")
|
||||
|
||||
local releases_list
|
||||
releases_list=$(curl --connect-timeout 10 --max-time 30 -fsSL \
|
||||
"${header[@]}" \
|
||||
"${base_url}/api/v4/projects/${repo_encoded}/releases?per_page=15&order_by=released_at&sort=desc" 2>/dev/null) || {
|
||||
msg_warn "Failed to fetch older releases for ${repo}"
|
||||
return 22
|
||||
}
|
||||
|
||||
local count
|
||||
count=$(echo "$releases_list" | jq 'length' 2>/dev/null || echo 0)
|
||||
[[ "$count" -eq 0 ]] && return 250
|
||||
|
||||
for ((i = 0; i < count; i++)); do
|
||||
local rel_tag
|
||||
rel_tag=$(echo "$releases_list" | jq -r ".[$i].tag_name")
|
||||
|
||||
# Skip the tag we already checked
|
||||
[[ "$rel_tag" == "$skip_tag" ]] && continue
|
||||
|
||||
# Asset URLs for this release (direct_asset_url preferred, fallback to url)
|
||||
local asset_urls
|
||||
asset_urls=$(echo "$releases_list" | jq -r ".[$i].assets.links // [] | .[] | .direct_asset_url // .url")
|
||||
[[ -z "$asset_urls" ]] && continue
|
||||
|
||||
local has_match=false
|
||||
|
||||
if [[ "$mode" == "binary" ]]; then
|
||||
local arch
|
||||
arch=$(dpkg --print-architecture 2>/dev/null || uname -m)
|
||||
[[ "$arch" == "x86_64" ]] && arch="amd64"
|
||||
[[ "$arch" == "aarch64" ]] && arch="arm64"
|
||||
|
||||
# Check with explicit pattern first, then arch heuristic, then any .deb
|
||||
if [[ -n "$asset_pattern" ]]; then
|
||||
while read -r u; do
|
||||
case "${u##*/}" in $asset_pattern)
|
||||
has_match=true
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done <<<"$asset_urls"
|
||||
fi
|
||||
if [[ "$has_match" != "true" ]]; then
|
||||
echo "$asset_urls" | grep -qE "($arch|amd64|x86_64|aarch64|arm64).*\.deb$" && has_match=true
|
||||
fi
|
||||
if [[ "$has_match" != "true" ]]; then
|
||||
echo "$asset_urls" | grep -qE '\.deb$' && has_match=true
|
||||
fi
|
||||
|
||||
elif [[ "$mode" == "prebuild" || "$mode" == "singlefile" ]]; then
|
||||
while read -r u; do
|
||||
case "${u##*/}" in $asset_pattern)
|
||||
has_match=true
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done <<<"$asset_urls"
|
||||
fi
|
||||
|
||||
if [[ "$has_match" == "true" ]]; then
|
||||
local use_fallback="y"
|
||||
if [[ -t 0 ]]; then
|
||||
msg_warn "Release ${skip_tag} has no matching asset. Previous release ${rel_tag} has a compatible asset."
|
||||
read -rp "Use version ${rel_tag} instead? [Y/n] (auto-yes in 60s): " -t 60 use_fallback || use_fallback="y"
|
||||
use_fallback="${use_fallback:-y}"
|
||||
fi
|
||||
|
||||
if [[ "${use_fallback,,}" == "y" || "${use_fallback,,}" == "yes" ]]; then
|
||||
echo "$releases_list" | jq ".[$i]"
|
||||
return 0
|
||||
else
|
||||
return 250
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
return 250
|
||||
}
|
||||
|
||||
function fetch_and_deploy_gl_release() {
|
||||
local app="$1"
|
||||
local repo="$2"
|
||||
local mode="${3:-tarball}"
|
||||
local version="${var_appversion:-${4:-latest}}"
|
||||
local target="${5:-/opt/$app}"
|
||||
local asset_pattern="${6:-}"
|
||||
|
||||
if [[ -z "$app" ]]; then
|
||||
app="${repo##*/}"
|
||||
if [[ -z "$app" ]]; then
|
||||
msg_error "fetch_and_deploy_gl_release requires app name or valid repo"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
local app_lc=$(echo "${app,,}" | tr -d ' ')
|
||||
local version_file="$HOME/.${app_lc}"
|
||||
|
||||
local api_timeout="--connect-timeout 10 --max-time 60"
|
||||
local download_timeout="--connect-timeout 15 --max-time 900"
|
||||
|
||||
local current_version=""
|
||||
[[ -f "$version_file" ]] && current_version=$(<"$version_file")
|
||||
|
||||
ensure_dependencies jq
|
||||
|
||||
local repo_encoded
|
||||
repo_encoded=$(printf '%s' "$repo" | sed 's|/|%2F|g')
|
||||
|
||||
local api_base="https://gitlab.com/api/v4/projects/$repo_encoded/releases"
|
||||
local api_url
|
||||
if [[ "$version" != "latest" ]]; then
|
||||
api_url="$api_base/$version"
|
||||
else
|
||||
api_url="$api_base?per_page=1&order_by=released_at&sort=desc"
|
||||
fi
|
||||
|
||||
local header=()
|
||||
[[ -n "${GITLAB_TOKEN:-}" ]] && header=(-H "PRIVATE-TOKEN: $GITLAB_TOKEN")
|
||||
|
||||
local max_retries=3 retry_delay=2 attempt=1 success=false http_code
|
||||
|
||||
while ((attempt <= max_retries)); do
|
||||
http_code=$(curl $api_timeout -sSL -w "%{http_code}" -o /tmp/gl_rel.json "${header[@]}" "$api_url" 2>/dev/null) || true
|
||||
if [[ "$http_code" == "200" ]]; then
|
||||
success=true
|
||||
break
|
||||
elif [[ "$http_code" == "429" ]]; then
|
||||
if ((attempt < max_retries)); then
|
||||
msg_warn "GitLab API rate limit hit, retrying in ${retry_delay}s... (attempt $attempt/$max_retries)"
|
||||
sleep "$retry_delay"
|
||||
retry_delay=$((retry_delay * 2))
|
||||
fi
|
||||
else
|
||||
sleep "$retry_delay"
|
||||
fi
|
||||
((attempt++))
|
||||
done
|
||||
|
||||
if ! $success; then
|
||||
if [[ "$http_code" == "401" ]]; then
|
||||
msg_error "GitLab API authentication failed (HTTP 401)."
|
||||
if [[ -n "${GITLAB_TOKEN:-}" ]]; then
|
||||
msg_error "Your GITLAB_TOKEN appears to be invalid or expired."
|
||||
else
|
||||
msg_error "The repository may require authentication. Try: export GITLAB_TOKEN=\"glpat-your_token\""
|
||||
fi
|
||||
elif [[ "$http_code" == "404" ]]; then
|
||||
msg_error "GitLab project or release not found (HTTP 404)."
|
||||
msg_error "Ensure '$repo' is correct and the project is accessible."
|
||||
elif [[ "$http_code" == "429" ]]; then
|
||||
msg_error "GitLab API rate limit exceeded (HTTP 429)."
|
||||
msg_error "To increase the limit, export a GitLab token before running the script:"
|
||||
msg_error " export GITLAB_TOKEN=\"glpat-your_token_here\""
|
||||
elif [[ "$http_code" == "000" || -z "$http_code" ]]; then
|
||||
msg_error "GitLab API connection failed (no response)."
|
||||
msg_error "Check your network/DNS: curl -sSL https://gitlab.com/api/v4/version"
|
||||
else
|
||||
msg_error "Failed to fetch release metadata (HTTP $http_code)"
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
|
||||
local json tag_name
|
||||
json=$(</tmp/gl_rel.json)
|
||||
|
||||
if [[ "$version" == "latest" ]]; then
|
||||
json=$(echo "$json" | jq '.[0] // empty')
|
||||
if [[ -z "$json" || "$json" == "null" ]]; then
|
||||
msg_error "No releases found for $repo on GitLab"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
tag_name=$(echo "$json" | jq -r '.tag_name // empty')
|
||||
if [[ -z "$tag_name" ]]; then
|
||||
msg_error "Could not determine tag name from release metadata"
|
||||
return 1
|
||||
fi
|
||||
[[ "$tag_name" =~ ^v[0-9] ]] && version="${tag_name:1}" || version="$tag_name"
|
||||
local version_safe="${version//\//-}"
|
||||
|
||||
if [[ "$current_version" == "$version" ]]; then
|
||||
$STD msg_ok "$app is already up-to-date (v$version)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local tmpdir
|
||||
tmpdir=$(mktemp -d) || return 1
|
||||
local filename=""
|
||||
|
||||
msg_info "Fetching GitLab release: $app ($version)"
|
||||
|
||||
_gl_asset_urls() {
|
||||
local release_json="$1"
|
||||
echo "$release_json" | jq -r '
|
||||
(.assets.links // [])[] | .direct_asset_url // .url
|
||||
'
|
||||
}
|
||||
|
||||
### Tarball Mode ###
|
||||
if [[ "$mode" == "tarball" || "$mode" == "source" ]]; then
|
||||
local direct_tarball_url="https://gitlab.com/$repo/-/archive/$tag_name/${app_lc}-${version_safe}.tar.gz"
|
||||
filename="${app_lc}-${version_safe}.tar.gz"
|
||||
|
||||
curl $download_timeout -fsSL "${header[@]}" -o "$tmpdir/$filename" "$direct_tarball_url" || {
|
||||
msg_error "Download failed: $direct_tarball_url"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
mkdir -p "$target"
|
||||
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
|
||||
rm -rf "${target:?}/"*
|
||||
fi
|
||||
|
||||
tar --no-same-owner -xzf "$tmpdir/$filename" -C "$tmpdir" || {
|
||||
msg_error "Failed to extract tarball"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
local unpack_dir
|
||||
unpack_dir=$(find "$tmpdir" -mindepth 1 -maxdepth 1 -type d | head -n1)
|
||||
|
||||
shopt -s dotglob nullglob
|
||||
cp -r "$unpack_dir"/* "$target/"
|
||||
shopt -u dotglob nullglob
|
||||
|
||||
### Binary Mode ###
|
||||
elif [[ "$mode" == "binary" ]]; then
|
||||
local arch
|
||||
arch=$(dpkg --print-architecture 2>/dev/null || uname -m)
|
||||
[[ "$arch" == "x86_64" ]] && arch="amd64"
|
||||
[[ "$arch" == "aarch64" ]] && arch="arm64"
|
||||
|
||||
local assets url_match=""
|
||||
assets=$(_gl_asset_urls "$json")
|
||||
|
||||
if [[ -n "$asset_pattern" ]]; then
|
||||
for u in $assets; do
|
||||
case "${u##*/}" in
|
||||
$asset_pattern)
|
||||
url_match="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -z "$url_match" ]]; then
|
||||
for u in $assets; do
|
||||
if [[ "$u" =~ ($arch|amd64|x86_64|aarch64|arm64).*\.deb$ ]]; then
|
||||
url_match="$u"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -z "$url_match" ]]; then
|
||||
for u in $assets; do
|
||||
[[ "$u" =~ \.deb$ ]] && url_match="$u" && break
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -z "$url_match" ]]; then
|
||||
local fallback_json
|
||||
if fallback_json=$(_gl_scan_older_releases "$repo" "$repo_encoded" "https://gitlab.com" "binary" "$asset_pattern" "$tag_name"); then
|
||||
json="$fallback_json"
|
||||
tag_name=$(echo "$json" | jq -r '.tag_name // empty')
|
||||
[[ "$tag_name" =~ ^v[0-9] ]] && version="${tag_name:1}" || version="$tag_name"
|
||||
msg_info "Fetching GitLab release: $app ($version)"
|
||||
assets=$(_gl_asset_urls "$json")
|
||||
if [[ -n "$asset_pattern" ]]; then
|
||||
for u in $assets; do
|
||||
case "${u##*/}" in $asset_pattern)
|
||||
url_match="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
if [[ -z "$url_match" ]]; then
|
||||
for u in $assets; do
|
||||
[[ "$u" =~ ($arch|amd64|x86_64|aarch64|arm64).*\.deb$ ]] && url_match="$u" && break
|
||||
done
|
||||
fi
|
||||
if [[ -z "$url_match" ]]; then
|
||||
for u in $assets; do
|
||||
[[ "$u" =~ \.deb$ ]] && url_match="$u" && break
|
||||
done
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$url_match" ]]; then
|
||||
msg_error "No suitable .deb asset found for $app"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
filename="${url_match##*/}"
|
||||
curl $download_timeout -fsSL "${header[@]}" -o "$tmpdir/$filename" "$url_match" || {
|
||||
msg_error "Download failed: $url_match"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
chmod 644 "$tmpdir/$filename"
|
||||
local dpkg_opts=""
|
||||
[[ "${DPKG_FORCE_CONFOLD:-}" == "1" ]] && dpkg_opts="-o Dpkg::Options::=--force-confold"
|
||||
[[ "${DPKG_FORCE_CONFNEW:-}" == "1" ]] && dpkg_opts="-o Dpkg::Options::=--force-confnew"
|
||||
DEBIAN_FRONTEND=noninteractive SYSTEMD_OFFLINE=1 $STD apt install -y $dpkg_opts "$tmpdir/$filename" || {
|
||||
SYSTEMD_OFFLINE=1 $STD dpkg -i "$tmpdir/$filename" || {
|
||||
msg_error "Both apt and dpkg installation failed"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
}
|
||||
|
||||
### Prebuild Mode ###
|
||||
elif [[ "$mode" == "prebuild" ]]; then
|
||||
local pattern="${6%\"}"
|
||||
pattern="${pattern#\"}"
|
||||
[[ -z "$pattern" ]] && {
|
||||
msg_error "Mode 'prebuild' requires 6th parameter (asset filename pattern)"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
local asset_url=""
|
||||
for u in $(_gl_asset_urls "$json"); do
|
||||
filename_candidate="${u##*/}"
|
||||
case "$filename_candidate" in
|
||||
$pattern)
|
||||
asset_url="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -z "$asset_url" ]]; then
|
||||
local fallback_json
|
||||
if fallback_json=$(_gl_scan_older_releases "$repo" "$repo_encoded" "https://gitlab.com" "prebuild" "$pattern" "$tag_name"); then
|
||||
json="$fallback_json"
|
||||
tag_name=$(echo "$json" | jq -r '.tag_name // empty')
|
||||
[[ "$tag_name" =~ ^v[0-9] ]] && version="${tag_name:1}" || version="$tag_name"
|
||||
msg_info "Fetching GitLab release: $app ($version)"
|
||||
for u in $(_gl_asset_urls "$json"); do
|
||||
filename_candidate="${u##*/}"
|
||||
case "$filename_candidate" in $pattern)
|
||||
asset_url="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
fi
|
||||
|
||||
[[ -z "$asset_url" ]] && {
|
||||
msg_error "No asset matching '$pattern' found"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
filename="${asset_url##*/}"
|
||||
curl $download_timeout -fsSL "${header[@]}" -o "$tmpdir/$filename" "$asset_url" || {
|
||||
msg_error "Download failed: $asset_url"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
local unpack_tmp
|
||||
unpack_tmp=$(mktemp -d)
|
||||
mkdir -p "$target"
|
||||
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
|
||||
rm -rf "${target:?}/"*
|
||||
fi
|
||||
|
||||
if [[ "$filename" == *.zip ]]; then
|
||||
ensure_dependencies unzip
|
||||
unzip -q "$tmpdir/$filename" -d "$unpack_tmp" || {
|
||||
msg_error "Failed to extract ZIP archive"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
elif [[ "$filename" == *.tar.* || "$filename" == *.tgz || "$filename" == *.txz ]]; then
|
||||
tar --no-same-owner -xf "$tmpdir/$filename" -C "$unpack_tmp" || {
|
||||
msg_error "Failed to extract TAR archive"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
else
|
||||
msg_error "Unsupported archive format: $filename"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local top_entries inner_dir
|
||||
top_entries=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1)
|
||||
if [[ "$(echo "$top_entries" | wc -l)" -eq 1 && -d "$top_entries" ]]; then
|
||||
inner_dir="$top_entries"
|
||||
shopt -s dotglob nullglob
|
||||
if compgen -G "$inner_dir/*" >/dev/null; then
|
||||
cp -r "$inner_dir"/* "$target/" || {
|
||||
msg_error "Failed to copy contents from $inner_dir to $target"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
else
|
||||
msg_error "Inner directory is empty: $inner_dir"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
fi
|
||||
shopt -u dotglob nullglob
|
||||
else
|
||||
shopt -s dotglob nullglob
|
||||
if compgen -G "$unpack_tmp/*" >/dev/null; then
|
||||
cp -r "$unpack_tmp"/* "$target/" || {
|
||||
msg_error "Failed to copy contents to $target"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
else
|
||||
msg_error "Unpacked archive is empty"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
fi
|
||||
shopt -u dotglob nullglob
|
||||
fi
|
||||
|
||||
### Singlefile Mode ###
|
||||
elif [[ "$mode" == "singlefile" ]]; then
|
||||
local pattern="${6%\"}"
|
||||
pattern="${pattern#\"}"
|
||||
[[ -z "$pattern" ]] && {
|
||||
msg_error "Mode 'singlefile' requires 6th parameter (asset filename pattern)"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
local asset_url=""
|
||||
for u in $(_gl_asset_urls "$json"); do
|
||||
filename_candidate="${u##*/}"
|
||||
case "$filename_candidate" in
|
||||
$pattern)
|
||||
asset_url="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -z "$asset_url" ]]; then
|
||||
local fallback_json
|
||||
if fallback_json=$(_gl_scan_older_releases "$repo" "$repo_encoded" "https://gitlab.com" "singlefile" "$pattern" "$tag_name"); then
|
||||
json="$fallback_json"
|
||||
tag_name=$(echo "$json" | jq -r '.tag_name // empty')
|
||||
[[ "$tag_name" =~ ^v[0-9] ]] && version="${tag_name:1}" || version="$tag_name"
|
||||
msg_info "Fetching GitLab release: $app ($version)"
|
||||
for u in $(_gl_asset_urls "$json"); do
|
||||
filename_candidate="${u##*/}"
|
||||
case "$filename_candidate" in $pattern)
|
||||
asset_url="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
fi
|
||||
|
||||
[[ -z "$asset_url" ]] && {
|
||||
msg_error "No asset matching '$pattern' found"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
filename="${asset_url##*/}"
|
||||
mkdir -p "$target"
|
||||
|
||||
local use_filename="${USE_ORIGINAL_FILENAME:-false}"
|
||||
local target_file="$app"
|
||||
[[ "$use_filename" == "true" ]] && target_file="$filename"
|
||||
|
||||
curl $download_timeout -fsSL "${header[@]}" -o "$target/$target_file" "$asset_url" || {
|
||||
msg_error "Download failed: $asset_url"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
if [[ "$target_file" != *.jar && -f "$target/$target_file" ]]; then
|
||||
chmod +x "$target/$target_file"
|
||||
fi
|
||||
|
||||
else
|
||||
msg_error "Unknown mode: $mode"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "$version" >"$version_file"
|
||||
msg_ok "Deployed: $app ($version)"
|
||||
rm -rf "$tmpdir"
|
||||
}
|
||||
|
||||
@@ -57,7 +57,9 @@ start_routines() {
|
||||
yes)
|
||||
msg_info "Switching to Debian 13 (Trixie) Sources"
|
||||
rm -f /etc/apt/sources.list.d/*.list
|
||||
sed -i '/proxmox/d;/bookworm/d' /etc/apt/sources.list || true
|
||||
if [ -f /etc/apt/sources.list ]; then
|
||||
sed -i '/proxmox/d;/bookworm/d' /etc/apt/sources.list
|
||||
fi
|
||||
cat >/etc/apt/sources.list.d/debian.sources <<EOF
|
||||
Types: deb
|
||||
URIs: http://deb.debian.org/debian
|
||||
|
||||
@@ -188,7 +188,9 @@ start_routines_4() {
|
||||
yes)
|
||||
msg_info "Correcting Debian Sources (deb822)"
|
||||
rm -f /etc/apt/sources.list.d/*.list
|
||||
sed -i '/proxmox/d;/bookworm/d' /etc/apt/sources.list || true
|
||||
if [ -f /etc/apt/sources.list ]; then
|
||||
sed -i '/proxmox/d;/bookworm/d' /etc/apt/sources.list
|
||||
fi
|
||||
cat >/etc/apt/sources.list.d/debian.sources <<EOF
|
||||
Types: deb
|
||||
URIs: http://deb.debian.org/debian/
|
||||
|
||||
@@ -251,8 +251,10 @@ start_routines_9() {
|
||||
msg_info "Correcting Proxmox VE Sources (deb822)"
|
||||
# remove all existing .list files
|
||||
rm -f /etc/apt/sources.list.d/*.list
|
||||
# remove bookworm and proxmox entries from sources.list
|
||||
sed -i '/proxmox/d;/bookworm/d' /etc/apt/sources.list || true
|
||||
# remove bookworm and proxmox entries from sources.list (if it exists)
|
||||
if [ -f /etc/apt/sources.list ]; then
|
||||
sed -i '/proxmox/d;/bookworm/d' /etc/apt/sources.list
|
||||
fi
|
||||
# Create new deb822 sources
|
||||
cat >/etc/apt/sources.list.d/debian.sources <<EOF
|
||||
Types: deb
|
||||
|
||||
@@ -199,7 +199,7 @@ while read -r container; do
|
||||
menu_items+=("$container_id" "$formatted_line" "OFF")
|
||||
fi
|
||||
done <<<"$containers"
|
||||
msg_ok "Loaded ${#menu_items[@]} containers"
|
||||
msg_ok "Loaded $((${#menu_items[@]} / 3)) containers"
|
||||
|
||||
# Determine container selection based on var_container
|
||||
if [[ -n "$var_container" ]]; then
|
||||
|
||||
Reference in New Issue
Block a user