mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2026-02-04 12:23:26 +01:00
Compare commits
27 Commits
tools_func
...
fix/debian
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
397baf0a2c | ||
|
|
07ad467f34 | ||
|
|
ff4f5f6a0a | ||
|
|
ea116222f4 | ||
|
|
fa00a51110 | ||
|
|
a259ae2b3e | ||
|
|
03e660fdef | ||
|
|
3a5e2f9515 | ||
|
|
83b1a5e39b | ||
|
|
01da983f72 | ||
|
|
3a04923479 | ||
|
|
ebb48f697c | ||
|
|
533ca924c9 | ||
|
|
3042162065 | ||
|
|
ddd0164c54 | ||
|
|
279e33c3a5 | ||
|
|
6c1d1e1e71 | ||
|
|
0453673115 | ||
|
|
497cefa850 | ||
|
|
ba279675a8 | ||
|
|
830c6923b5 | ||
|
|
cc59d69cb7 | ||
|
|
74b06f82e4 | ||
|
|
436dc8568b | ||
|
|
0e7e08579b | ||
|
|
22bbba572c | ||
|
|
d789af9637 |
41
CHANGELOG.md
41
CHANGELOG.md
@@ -391,6 +391,36 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
||||
|
||||
</details>
|
||||
|
||||
## 2026-01-28
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- various scripts: use setup_meilisearch function [@MickLesk](https://github.com/MickLesk) ([#11259](https://github.com/community-scripts/ProxmoxVE/pull/11259))
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- Refactor: NPMPlus / Default Login [@MickLesk](https://github.com/MickLesk) ([#11262](https://github.com/community-scripts/ProxmoxVE/pull/11262))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- tools.func: prevent systemd-tmpfiles failure in unprivileged LXC during deb install [@MickLesk](https://github.com/MickLesk) ([#11271](https://github.com/community-scripts/ProxmoxVE/pull/11271))
|
||||
- tools.func: fix php "wait_for" hint [@MickLesk](https://github.com/MickLesk) ([#11254](https://github.com/community-scripts/ProxmoxVE/pull/11254))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- core: update dynamic values in LXC profile on update_motd_ip [@MickLesk](https://github.com/MickLesk) ([#11268](https://github.com/community-scripts/ProxmoxVE/pull/11268))
|
||||
- tools.func: add new function - setup_meilisearch [@MickLesk](https://github.com/MickLesk) ([#11258](https://github.com/community-scripts/ProxmoxVE/pull/11258))
|
||||
|
||||
### 🌐 Website
|
||||
|
||||
- #### 📝 Script Information
|
||||
|
||||
- fix: homarr: conf location [@CrazyWolf13](https://github.com/CrazyWolf13) ([#11253](https://github.com/community-scripts/ProxmoxVE/pull/11253))
|
||||
|
||||
## 2026-01-27
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
@@ -398,12 +428,21 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Immich: update libraw [@vhsdream](https://github.com/vhsdream) ([#11233](https://github.com/community-scripts/ProxmoxVE/pull/11233))
|
||||
- [FIX] Jotty: backup and restore custom config [@vhsdream](https://github.com/vhsdream) ([#11212](https://github.com/community-scripts/ProxmoxVE/pull/11212))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- grist: enable optional enterprise features toggle [@MickLesk](https://github.com/MickLesk) ([#11239](https://github.com/community-scripts/ProxmoxVE/pull/11239))
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- Termix: use nginx.conf from upstream repo [@MickLesk](https://github.com/MickLesk) ([#11228](https://github.com/community-scripts/ProxmoxVE/pull/11228))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- feat: add NVIDIA driver install prompt for GPU-enabled containers [@devdecrux](https://github.com/devdecrux) ([#11184](https://github.com/community-scripts/ProxmoxVE/pull/11184))
|
||||
|
||||
### 📚 Documentation
|
||||
|
||||
- doc setup_deb822_repo arg order [@chrnie](https://github.com/chrnie) ([#11215](https://github.com/community-scripts/ProxmoxVE/pull/11215))
|
||||
|
||||
@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: vhsdream
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://codeberg.org/gelbphoenix/autocaliweb
|
||||
# Source: https://github.com/gelbphoenix/autocaliweb
|
||||
|
||||
APP="Autocaliweb"
|
||||
var_tags="${var_tags:-ebooks}"
|
||||
@@ -30,8 +30,8 @@ function update_script() {
|
||||
|
||||
setup_uv
|
||||
|
||||
RELEASE=$(get_latest_codeberg_release "gelbphoenix/autocaliweb")
|
||||
if check_for_codeberg_release "autocaliweb" "gelbphoenix/autocaliweb"; then
|
||||
RELEASE=$(get_latest_github_release "gelbphoenix/autocaliweb")
|
||||
if check_for_gh_release "autocaliweb" "gelbphoenix/autocaliweb"; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop autocaliweb metadata-change-detector acw-ingest-service acw-auto-zipper
|
||||
msg_ok "Stopped Services"
|
||||
@@ -39,7 +39,7 @@ function update_script() {
|
||||
INSTALL_DIR="/opt/autocaliweb"
|
||||
export VIRTUAL_ENV="${INSTALL_DIR}/venv"
|
||||
$STD tar -cf ~/autocaliweb_bkp.tar "$INSTALL_DIR"/{metadata_change_logs,dirs.json,.env,scripts/ingest_watcher.sh,scripts/auto_zipper_wrapper.sh,scripts/metadata_change_detector_wrapper.sh}
|
||||
fetch_and_deploy_codeberg_release "autocaliweb" "gelbphoenix/autocaliweb" "tarball" "latest" "/opt/autocaliweb"
|
||||
fetch_and_deploy_gh_release "autocaliweb" "gelbphoenix/autocaliweb" "tarball" "latest" "/opt/autocaliweb"
|
||||
|
||||
msg_info "Updating Autocaliweb"
|
||||
cd "$INSTALL_DIR"
|
||||
|
||||
@@ -34,7 +34,7 @@ function update_script() {
|
||||
systemctl stop nginx
|
||||
msg_ok "Stopped nginx"
|
||||
|
||||
PHP_VERSION="8.4" PHP_FPM=YES PHP_MODULE="ffi,opcache,redis,zip,pdo-sqlite,bcmath,pdo,curl,dom,fpm" setup_php
|
||||
PHP_VERSION="8.4" PHP_FPM=YES PHP_MODULE="ffi,redis,pdo-sqlite" setup_php
|
||||
|
||||
msg_info "Backing up Bar Assistant"
|
||||
mv /opt/bar-assistant /opt/bar-assistant-backup
|
||||
@@ -88,18 +88,8 @@ function update_script() {
|
||||
msg_ok "Started nginx"
|
||||
fi
|
||||
|
||||
if check_for_gh_release "meilisearch" "meilisearch/meilisearch"; then
|
||||
msg_info "Stopping Meilisearch"
|
||||
systemctl stop meilisearch
|
||||
msg_ok "Stopped Meilisearch"
|
||||
setup_meilisearch
|
||||
|
||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
||||
|
||||
msg_info "Starting Meilisearch"
|
||||
systemctl start meilisearch
|
||||
msg_ok "Started Meilisearch"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ function update_script() {
|
||||
cp /opt/grist_bak/landing.db /opt/grist/landing.db
|
||||
cd /opt/grist
|
||||
$STD yarn install
|
||||
$STD yarn run install:ee
|
||||
$STD yarn run build:prod
|
||||
$STD yarn run install:python
|
||||
msg_ok "Updated Grist"
|
||||
|
||||
@@ -61,6 +61,7 @@ function update_script() {
|
||||
fi
|
||||
MODULE_VERSION="$(jq -r '.packageManager | split("@")[1]' /opt/karakeep/package.json)"
|
||||
NODE_VERSION="22" NODE_MODULE="pnpm@${MODULE_VERSION}" setup_nodejs
|
||||
setup_meilisearch
|
||||
|
||||
msg_info "Updating Karakeep"
|
||||
corepack enable
|
||||
@@ -90,6 +91,7 @@ function update_script() {
|
||||
msg_ok "Started Services"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
|
||||
exit
|
||||
}
|
||||
|
||||
|
||||
@@ -30,18 +30,7 @@ function update_script() {
|
||||
3>&1 1>&2 2>&3)
|
||||
|
||||
if [ "$UPD" == "1" ]; then
|
||||
if check_for_gh_release "meilisearch" "meilisearch/meilisearch"; then
|
||||
msg_info "Stopping Meilisearch"
|
||||
systemctl stop meilisearch
|
||||
msg_ok "Stopped Meilisearch"
|
||||
|
||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
||||
|
||||
msg_info "Starting Meilisearch"
|
||||
systemctl start meilisearch
|
||||
msg_ok "Started Meilisearch"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
setup_meilisearch
|
||||
exit
|
||||
fi
|
||||
|
||||
|
||||
@@ -28,6 +28,8 @@ function update_script() {
|
||||
exit
|
||||
fi
|
||||
|
||||
setup_meilisearch
|
||||
|
||||
if check_for_gh_release "openarchiver" "LogicLabs-OU/OpenArchiver"; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop openarchiver
|
||||
@@ -54,6 +56,7 @@ function update_script() {
|
||||
msg_ok "Started Services"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
|
||||
exit
|
||||
}
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ function update_script() {
|
||||
|
||||
CURRENT_PHP=$(php -v 2>/dev/null | awk '/^PHP/{print $2}' | cut -d. -f1,2)
|
||||
if [[ "$CURRENT_PHP" != "8.3" ]]; then
|
||||
PHP_VERSION="8.3" PHP_FPM="YES" PHP_MODULE="common,mysql,fpm,redis" setup_php
|
||||
PHP_VERSION="8.3" PHP_FPM="YES" PHP_MODULE="mysql,redis" setup_php
|
||||
setup_composer
|
||||
sed -i 's|php8\.2-fpm\.sock|php8.3-fpm.sock|g' /etc/nginx/sites-available/paymenter.conf
|
||||
$STD systemctl reload nginx
|
||||
|
||||
@@ -9,9 +9,9 @@
|
||||
"updateable": true,
|
||||
"privileged": false,
|
||||
"interface_port": 8083,
|
||||
"documentation": "https://codeberg.org/gelbphoenix/autocaliweb/wiki",
|
||||
"documentation": "https://github.com/gelbphoenix/autocaliweb/wiki",
|
||||
"config_path": "/etc/autocaliweb",
|
||||
"website": "https://codeberg.org/gelbphoenix/autocaliweb",
|
||||
"website": "https://github.com/gelbphoenix/autocaliweb",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/autocaliweb.webp",
|
||||
"description": "A modern web management system for eBooks, eComics and PDFs",
|
||||
"install_methods": [
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"documentation": "https://homarr.dev/docs/getting-started/",
|
||||
"website": "https://homarr.dev/",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/homarr.webp",
|
||||
"config_path": "/opt/homarr/.env",
|
||||
"config_path": "/opt/homarr.env",
|
||||
"description": "Homarr is a sleek, modern dashboard that puts all of your apps and services at your fingertips.",
|
||||
"install_methods": [
|
||||
{
|
||||
|
||||
@@ -39,8 +39,8 @@
|
||||
}
|
||||
],
|
||||
"default_credentials": {
|
||||
"username": "admin@example.org",
|
||||
"password": null
|
||||
"username": "admin@local.com",
|
||||
"password": "helper-scripts.com"
|
||||
},
|
||||
"notes": [
|
||||
{
|
||||
@@ -48,11 +48,7 @@
|
||||
"type": "info"
|
||||
},
|
||||
{
|
||||
"text": "The initial starting process can be take 1-2min. ",
|
||||
"type": "info"
|
||||
},
|
||||
{
|
||||
"text": "Application credentials: `cat /opt/.npm_pwd` - if file not exist in LXC check docker logs for password with `docker logs npmplus`",
|
||||
"text": "The initial starting process can take 1-2min. ",
|
||||
"type": "info"
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,29 +1,234 @@
|
||||
[
|
||||
{
|
||||
"name": "cockpit-project/cockpit",
|
||||
"version": "355",
|
||||
"date": "2026-01-28T11:14:44Z"
|
||||
},
|
||||
{
|
||||
"name": "go-vikunja/vikunja",
|
||||
"version": "v1.0.0",
|
||||
"date": "2026-01-28T11:12:59Z"
|
||||
},
|
||||
{
|
||||
"name": "glpi-project/glpi",
|
||||
"version": "11.0.5",
|
||||
"date": "2026-01-28T10:49:10Z"
|
||||
},
|
||||
{
|
||||
"name": "openobserve/openobserve",
|
||||
"version": "v0.60.0-rc1",
|
||||
"date": "2026-01-28T10:38:45Z"
|
||||
},
|
||||
{
|
||||
"name": "toeverything/AFFiNE",
|
||||
"version": "v2026.1.28-canary.910",
|
||||
"date": "2026-01-28T10:02:45Z"
|
||||
},
|
||||
{
|
||||
"name": "cross-seed/cross-seed",
|
||||
"version": "v7.0.0-7",
|
||||
"date": "2026-01-22T06:46:13Z"
|
||||
},
|
||||
{
|
||||
"name": "SigNoz/signoz",
|
||||
"version": "v0.109.2",
|
||||
"date": "2026-01-28T08:26:01Z"
|
||||
},
|
||||
{
|
||||
"name": "meilisearch/meilisearch",
|
||||
"version": "latest",
|
||||
"date": "2026-01-28T08:21:07Z"
|
||||
},
|
||||
{
|
||||
"name": "emqx/emqx",
|
||||
"version": "e5.10.3",
|
||||
"date": "2026-01-28T07:50:24Z"
|
||||
},
|
||||
{
|
||||
"name": "cloudreve/cloudreve",
|
||||
"version": "4.12.1",
|
||||
"date": "2026-01-28T07:29:16Z"
|
||||
},
|
||||
{
|
||||
"name": "connorgallopo/Tracearr",
|
||||
"version": "v1.4.10",
|
||||
"date": "2026-01-28T06:37:35Z"
|
||||
},
|
||||
{
|
||||
"name": "morpheus65535/bazarr",
|
||||
"version": "v1.5.4",
|
||||
"date": "2026-01-04T22:41:00Z"
|
||||
},
|
||||
{
|
||||
"name": "donetick/donetick",
|
||||
"version": "v0.1.64",
|
||||
"date": "2025-10-03T05:18:24Z"
|
||||
},
|
||||
{
|
||||
"name": "nickheyer/discopanel",
|
||||
"version": "v1.0.27",
|
||||
"date": "2026-01-28T04:32:34Z"
|
||||
},
|
||||
{
|
||||
"name": "gotson/komga",
|
||||
"version": "1.24.0",
|
||||
"date": "2026-01-28T04:19:22Z"
|
||||
},
|
||||
{
|
||||
"name": "BerriAI/litellm",
|
||||
"version": "v1.81.3.rc.3",
|
||||
"date": "2026-01-28T04:13:53Z"
|
||||
},
|
||||
{
|
||||
"name": "firefly-iii/firefly-iii",
|
||||
"version": "v6.4.16",
|
||||
"date": "2026-01-17T07:54:15Z"
|
||||
},
|
||||
{
|
||||
"name": "chrisbenincasa/tunarr",
|
||||
"version": "v1.2.0-dev.4",
|
||||
"date": "2026-01-28T02:15:09Z"
|
||||
},
|
||||
{
|
||||
"name": "release-argus/Argus",
|
||||
"version": "0.29.3",
|
||||
"date": "2026-01-28T02:07:24Z"
|
||||
},
|
||||
{
|
||||
"name": "jeedom/core",
|
||||
"version": "4.5.2",
|
||||
"date": "2026-01-28T00:27:07Z"
|
||||
},
|
||||
{
|
||||
"name": "steveiliop56/tinyauth",
|
||||
"version": "v4.1.0",
|
||||
"date": "2025-11-23T12:13:34Z"
|
||||
},
|
||||
{
|
||||
"name": "influxdata/influxdb",
|
||||
"version": "v2.8.0",
|
||||
"date": "2025-12-12T20:25:00Z"
|
||||
},
|
||||
{
|
||||
"name": "outline/outline",
|
||||
"version": "v1.4.0",
|
||||
"date": "2026-01-27T23:43:03Z"
|
||||
},
|
||||
{
|
||||
"name": "transmission/transmission",
|
||||
"version": "4.1.0",
|
||||
"date": "2026-01-27T23:30:41Z"
|
||||
},
|
||||
{
|
||||
"name": "immich-app/immich",
|
||||
"version": "v2.5.1",
|
||||
"date": "2026-01-27T23:10:13Z"
|
||||
},
|
||||
{
|
||||
"name": "moghtech/komodo",
|
||||
"version": "v1.19.5",
|
||||
"date": "2025-09-27T20:59:46Z"
|
||||
},
|
||||
{
|
||||
"name": "Infisical/infisical",
|
||||
"version": "v0.157.0",
|
||||
"date": "2026-01-27T21:58:06Z"
|
||||
},
|
||||
{
|
||||
"name": "tailscale/tailscale",
|
||||
"version": "v1.94.1",
|
||||
"date": "2026-01-27T21:35:08Z"
|
||||
},
|
||||
{
|
||||
"name": "grafana/grafana",
|
||||
"version": "v12.3.2",
|
||||
"date": "2026-01-27T20:59:58Z"
|
||||
},
|
||||
{
|
||||
"name": "autobrr/autobrr",
|
||||
"version": "v1.72.0",
|
||||
"date": "2026-01-27T20:53:54Z"
|
||||
},
|
||||
{
|
||||
"name": "autobrr/qui",
|
||||
"version": "v1.13.0",
|
||||
"date": "2026-01-27T20:32:22Z"
|
||||
},
|
||||
{
|
||||
"name": "Dokploy/dokploy",
|
||||
"version": "v0.26.6",
|
||||
"date": "2026-01-27T20:02:17Z"
|
||||
},
|
||||
{
|
||||
"name": "metabase/metabase",
|
||||
"version": "v0.58.x",
|
||||
"date": "2026-01-27T20:00:09Z"
|
||||
},
|
||||
{
|
||||
"name": "mongodb/mongo",
|
||||
"version": "r8.3.0-alpha3",
|
||||
"date": "2026-01-27T19:04:43Z"
|
||||
},
|
||||
{
|
||||
"name": "n8n-io/n8n",
|
||||
"version": "n8n@2.4.6",
|
||||
"date": "2026-01-23T15:32:50Z"
|
||||
},
|
||||
{
|
||||
"name": "keycloak/keycloak",
|
||||
"version": "26.5.2",
|
||||
"date": "2026-01-23T14:26:58Z"
|
||||
},
|
||||
{
|
||||
"name": "typesense/typesense",
|
||||
"version": "v30.0",
|
||||
"date": "2026-01-27T18:02:25Z"
|
||||
},
|
||||
{
|
||||
"name": "Dispatcharr/Dispatcharr",
|
||||
"version": "v0.18.1",
|
||||
"date": "2026-01-27T17:09:11Z"
|
||||
},
|
||||
{
|
||||
"name": "element-hq/synapse",
|
||||
"version": "v1.146.0",
|
||||
"date": "2026-01-27T16:09:34Z"
|
||||
},
|
||||
{
|
||||
"name": "heiher/hev-socks5-server",
|
||||
"version": "2.11.2",
|
||||
"date": "2026-01-27T16:07:21Z"
|
||||
},
|
||||
{
|
||||
"name": "thomiceli/opengist",
|
||||
"version": "v1.12.0",
|
||||
"date": "2026-01-27T15:31:57Z"
|
||||
},
|
||||
{
|
||||
"name": "OctoPrint/OctoPrint",
|
||||
"version": "1.11.6",
|
||||
"date": "2026-01-27T13:27:46Z"
|
||||
},
|
||||
{
|
||||
"name": "fuma-nama/fumadocs",
|
||||
"version": "@fumadocs/story@0.0.5",
|
||||
"date": "2026-01-27T13:13:44Z"
|
||||
},
|
||||
{
|
||||
"name": "cloudflare/cloudflared",
|
||||
"version": "2026.1.2",
|
||||
"date": "2026-01-27T11:40:42Z"
|
||||
},
|
||||
{
|
||||
"name": "Jackett/Jackett",
|
||||
"version": "v0.24.955",
|
||||
"date": "2026-01-27T11:04:42Z"
|
||||
},
|
||||
{
|
||||
"name": "grafana/grafana",
|
||||
"version": "v12.0.8+security-01",
|
||||
"date": "2026-01-27T10:54:20Z"
|
||||
},
|
||||
{
|
||||
"name": "mattermost/mattermost",
|
||||
"version": "@mattermost/client@11.3.0",
|
||||
"date": "2026-01-20T15:26:31Z"
|
||||
},
|
||||
{
|
||||
"name": "toeverything/AFFiNE",
|
||||
"version": "v2026.1.27-canary.909",
|
||||
"date": "2026-01-27T10:02:59Z"
|
||||
},
|
||||
{
|
||||
"name": "meilisearch/meilisearch",
|
||||
"version": "latest",
|
||||
"date": "2026-01-27T09:33:08Z"
|
||||
},
|
||||
{
|
||||
"name": "theonedev/onedev",
|
||||
"version": "v14.1.0",
|
||||
@@ -44,41 +249,11 @@
|
||||
"version": "v0.11.0",
|
||||
"date": "2026-01-27T06:23:53Z"
|
||||
},
|
||||
{
|
||||
"name": "morpheus65535/bazarr",
|
||||
"version": "v1.5.4",
|
||||
"date": "2026-01-04T22:41:00Z"
|
||||
},
|
||||
{
|
||||
"name": "metabase/metabase",
|
||||
"version": "v0.58.3",
|
||||
"date": "2026-01-27T06:04:07Z"
|
||||
},
|
||||
{
|
||||
"name": "emqx/emqx",
|
||||
"version": "e5.10.3-rc.4",
|
||||
"date": "2026-01-27T05:13:40Z"
|
||||
},
|
||||
{
|
||||
"name": "ollama/ollama",
|
||||
"version": "v0.15.2",
|
||||
"date": "2026-01-27T00:52:57Z"
|
||||
},
|
||||
{
|
||||
"name": "steveiliop56/tinyauth",
|
||||
"version": "v4.1.0",
|
||||
"date": "2025-11-23T12:13:34Z"
|
||||
},
|
||||
{
|
||||
"name": "jeedom/core",
|
||||
"version": "4.5.2",
|
||||
"date": "2026-01-27T00:27:06Z"
|
||||
},
|
||||
{
|
||||
"name": "transmission/transmission",
|
||||
"version": "4.1.0",
|
||||
"date": "2026-01-26T23:58:07Z"
|
||||
},
|
||||
{
|
||||
"name": "webmin/webmin",
|
||||
"version": "2.621",
|
||||
@@ -99,51 +274,26 @@
|
||||
"version": "v2.1.0",
|
||||
"date": "2025-08-29T12:56:13Z"
|
||||
},
|
||||
{
|
||||
"name": "chrisbenincasa/tunarr",
|
||||
"version": "v1.2.0-dev.3",
|
||||
"date": "2026-01-26T21:56:05Z"
|
||||
},
|
||||
{
|
||||
"name": "fccview/jotty",
|
||||
"version": "1.19.1",
|
||||
"date": "2026-01-26T21:30:39Z"
|
||||
},
|
||||
{
|
||||
"name": "keycloak/keycloak",
|
||||
"version": "26.5.2",
|
||||
"date": "2026-01-23T14:26:58Z"
|
||||
},
|
||||
{
|
||||
"name": "Sportarr/Sportarr",
|
||||
"version": "v4.0.983.1057",
|
||||
"date": "2026-01-26T18:54:50Z"
|
||||
},
|
||||
{
|
||||
"name": "BerriAI/litellm",
|
||||
"version": "v1.81.3.rc.2",
|
||||
"date": "2026-01-26T18:18:16Z"
|
||||
},
|
||||
{
|
||||
"name": "hargata/lubelog",
|
||||
"version": "v1.5.8",
|
||||
"date": "2026-01-26T18:18:03Z"
|
||||
},
|
||||
{
|
||||
"name": "connorgallopo/Tracearr",
|
||||
"version": "v1.4.8",
|
||||
"date": "2026-01-26T17:59:06Z"
|
||||
},
|
||||
{
|
||||
"name": "itskovacs/trip",
|
||||
"version": "1.36.1",
|
||||
"date": "2026-01-26T17:41:48Z"
|
||||
},
|
||||
{
|
||||
"name": "mongodb/mongo",
|
||||
"version": "r8.3.0-alpha2",
|
||||
"date": "2026-01-26T17:21:09Z"
|
||||
},
|
||||
{
|
||||
"name": "nzbgetcom/nzbget",
|
||||
"version": "v25.4",
|
||||
@@ -164,21 +314,11 @@
|
||||
"version": "v5.0.0",
|
||||
"date": "2026-01-26T15:58:00Z"
|
||||
},
|
||||
{
|
||||
"name": "n8n-io/n8n",
|
||||
"version": "n8n@2.4.6",
|
||||
"date": "2026-01-23T15:32:50Z"
|
||||
},
|
||||
{
|
||||
"name": "Athou/commafeed",
|
||||
"version": "6.1.1",
|
||||
"date": "2026-01-26T15:14:16Z"
|
||||
},
|
||||
{
|
||||
"name": "fuma-nama/fumadocs",
|
||||
"version": "@fumadocs/ui@16.4.9",
|
||||
"date": "2026-01-26T15:02:21Z"
|
||||
},
|
||||
{
|
||||
"name": "itsmng/itsm-ng",
|
||||
"version": "v2.1.2",
|
||||
@@ -219,11 +359,6 @@
|
||||
"version": "v7.5.3",
|
||||
"date": "2026-01-26T07:28:46Z"
|
||||
},
|
||||
{
|
||||
"name": "firefly-iii/firefly-iii",
|
||||
"version": "v6.4.16",
|
||||
"date": "2026-01-17T07:54:15Z"
|
||||
},
|
||||
{
|
||||
"name": "invoiceninja/invoiceninja",
|
||||
"version": "v5.12.50",
|
||||
@@ -334,21 +469,11 @@
|
||||
"version": "v4.7.1",
|
||||
"date": "2026-01-24T21:03:08Z"
|
||||
},
|
||||
{
|
||||
"name": "go-vikunja/vikunja",
|
||||
"version": "v1.0.0-rc0",
|
||||
"date": "2025-08-17T18:47:15Z"
|
||||
},
|
||||
{
|
||||
"name": "fosrl/pangolin",
|
||||
"version": "1.15.1",
|
||||
"date": "2026-01-24T20:34:24Z"
|
||||
},
|
||||
{
|
||||
"name": "moghtech/komodo",
|
||||
"version": "v1.19.5",
|
||||
"date": "2025-09-27T20:59:46Z"
|
||||
},
|
||||
{
|
||||
"name": "benzino77/tasmocompiler",
|
||||
"version": "v13.1.0",
|
||||
@@ -389,11 +514,6 @@
|
||||
"version": "4.9.3.0",
|
||||
"date": "2026-01-08T16:08:34Z"
|
||||
},
|
||||
{
|
||||
"name": "nickheyer/discopanel",
|
||||
"version": "v1.0.25",
|
||||
"date": "2026-01-24T05:56:58Z"
|
||||
},
|
||||
{
|
||||
"name": "TomBursch/kitchenowl",
|
||||
"version": "v0.7.6",
|
||||
@@ -444,11 +564,6 @@
|
||||
"version": "2.233.0",
|
||||
"date": "2026-01-23T18:41:45Z"
|
||||
},
|
||||
{
|
||||
"name": "openobserve/openobserve",
|
||||
"version": "v0.50.3",
|
||||
"date": "2026-01-23T16:09:10Z"
|
||||
},
|
||||
{
|
||||
"name": "zitadel/zitadel",
|
||||
"version": "v4.10.0",
|
||||
@@ -479,11 +594,6 @@
|
||||
"version": "v1.20.4",
|
||||
"date": "2026-01-23T01:29:26Z"
|
||||
},
|
||||
{
|
||||
"name": "tailscale/tailscale",
|
||||
"version": "v1.94.1",
|
||||
"date": "2026-01-22T19:07:16Z"
|
||||
},
|
||||
{
|
||||
"name": "redis/redis",
|
||||
"version": "8.4.0",
|
||||
@@ -544,11 +654,6 @@
|
||||
"version": "v0.27.1",
|
||||
"date": "2025-11-11T19:32:29Z"
|
||||
},
|
||||
{
|
||||
"name": "cross-seed/cross-seed",
|
||||
"version": "v7.0.0-7",
|
||||
"date": "2026-01-22T06:46:13Z"
|
||||
},
|
||||
{
|
||||
"name": "rabbitmq/rabbitmq-server",
|
||||
"version": "v4.2.3",
|
||||
@@ -559,21 +664,11 @@
|
||||
"version": "v1.25.4",
|
||||
"date": "2026-01-22T01:43:42Z"
|
||||
},
|
||||
{
|
||||
"name": "Infisical/infisical",
|
||||
"version": "v0.156.3",
|
||||
"date": "2026-01-22T00:44:20Z"
|
||||
},
|
||||
{
|
||||
"name": "grafana/loki",
|
||||
"version": "helm-loki-6.51.0",
|
||||
"date": "2026-01-21T21:31:29Z"
|
||||
},
|
||||
{
|
||||
"name": "influxdata/influxdb",
|
||||
"version": "v2.8.0",
|
||||
"date": "2025-12-12T20:25:00Z"
|
||||
},
|
||||
{
|
||||
"name": "LimeSurvey/LimeSurvey",
|
||||
"version": "7.0.0-beta1+260113",
|
||||
@@ -594,16 +689,6 @@
|
||||
"version": "v5.0.2",
|
||||
"date": "2026-01-21T07:42:38Z"
|
||||
},
|
||||
{
|
||||
"name": "SigNoz/signoz",
|
||||
"version": "v0.108.0",
|
||||
"date": "2026-01-21T06:45:16Z"
|
||||
},
|
||||
{
|
||||
"name": "donetick/donetick",
|
||||
"version": "v0.1.64",
|
||||
"date": "2025-10-03T05:18:24Z"
|
||||
},
|
||||
{
|
||||
"name": "paperless-ngx/paperless-ngx",
|
||||
"version": "v2.20.5",
|
||||
@@ -629,11 +714,6 @@
|
||||
"version": "mysql-cluster-8.0.45",
|
||||
"date": "2026-01-20T18:27:03Z"
|
||||
},
|
||||
{
|
||||
"name": "element-hq/synapse",
|
||||
"version": "v1.145.0",
|
||||
"date": "2026-01-13T16:49:51Z"
|
||||
},
|
||||
{
|
||||
"name": "chrisvel/tududi",
|
||||
"version": "v0.88.4",
|
||||
@@ -654,11 +734,6 @@
|
||||
"version": "v1.2.7",
|
||||
"date": "2026-01-20T11:59:40Z"
|
||||
},
|
||||
{
|
||||
"name": "cloudflare/cloudflared",
|
||||
"version": "2026.1.1",
|
||||
"date": "2026-01-20T11:22:06Z"
|
||||
},
|
||||
{
|
||||
"name": "HydroshieldMKII/Guardian",
|
||||
"version": "v1.3.4",
|
||||
@@ -759,11 +834,6 @@
|
||||
"version": "v7.14.2",
|
||||
"date": "2026-01-18T00:26:09Z"
|
||||
},
|
||||
{
|
||||
"name": "outline/outline",
|
||||
"version": "v1.3.0",
|
||||
"date": "2026-01-17T16:28:04Z"
|
||||
},
|
||||
{
|
||||
"name": "LogicLabs-OU/OpenArchiver",
|
||||
"version": "v0.4.1",
|
||||
@@ -804,11 +874,6 @@
|
||||
"version": "v11.10.1",
|
||||
"date": "2026-01-15T15:58:06Z"
|
||||
},
|
||||
{
|
||||
"name": "Dokploy/dokploy",
|
||||
"version": "v0.26.5",
|
||||
"date": "2026-01-15T15:32:35Z"
|
||||
},
|
||||
{
|
||||
"name": "slskd/slskd",
|
||||
"version": "0.24.3",
|
||||
@@ -839,21 +904,11 @@
|
||||
"version": "v3.6.7",
|
||||
"date": "2026-01-14T14:11:45Z"
|
||||
},
|
||||
{
|
||||
"name": "cloudreve/cloudreve",
|
||||
"version": "4.11.1",
|
||||
"date": "2026-01-14T08:40:47Z"
|
||||
},
|
||||
{
|
||||
"name": "NginxProxyManager/nginx-proxy-manager",
|
||||
"version": "v2.13.6",
|
||||
"date": "2026-01-14T05:04:11Z"
|
||||
},
|
||||
{
|
||||
"name": "Dispatcharr/Dispatcharr",
|
||||
"version": "v0.17.0",
|
||||
"date": "2026-01-13T22:51:49Z"
|
||||
},
|
||||
{
|
||||
"name": "C4illin/ConvertX",
|
||||
"version": "v0.17.0",
|
||||
@@ -894,11 +949,6 @@
|
||||
"version": "v1.7.10",
|
||||
"date": "2026-01-12T20:50:50Z"
|
||||
},
|
||||
{
|
||||
"name": "release-argus/Argus",
|
||||
"version": "0.29.2",
|
||||
"date": "2026-01-12T18:56:44Z"
|
||||
},
|
||||
{
|
||||
"name": "TryGhost/Ghost-CLI",
|
||||
"version": "v1.28.4",
|
||||
@@ -1029,11 +1079,6 @@
|
||||
"version": "1.0.1",
|
||||
"date": "2026-01-07T13:54:40Z"
|
||||
},
|
||||
{
|
||||
"name": "cockpit-project/cockpit",
|
||||
"version": "354",
|
||||
"date": "2026-01-07T11:16:12Z"
|
||||
},
|
||||
{
|
||||
"name": "Prowlarr/Prowlarr",
|
||||
"version": "v2.3.0.5236",
|
||||
@@ -1084,11 +1129,6 @@
|
||||
"version": "v5.2.0",
|
||||
"date": "2026-01-05T05:56:57Z"
|
||||
},
|
||||
{
|
||||
"name": "autobrr/qui",
|
||||
"version": "v1.12.0",
|
||||
"date": "2026-01-04T21:35:59Z"
|
||||
},
|
||||
{
|
||||
"name": "actualbudget/actual",
|
||||
"version": "v26.1.0",
|
||||
@@ -1219,11 +1259,6 @@
|
||||
"version": "v1.16.3",
|
||||
"date": "2025-12-19T17:45:42Z"
|
||||
},
|
||||
{
|
||||
"name": "immich-app/immich",
|
||||
"version": "v2.4.1",
|
||||
"date": "2025-12-19T15:50:12Z"
|
||||
},
|
||||
{
|
||||
"name": "jhuckaby/Cronicle",
|
||||
"version": "v0.9.102",
|
||||
@@ -1254,11 +1289,6 @@
|
||||
"version": "v4.3.1",
|
||||
"date": "2025-12-13T15:38:37Z"
|
||||
},
|
||||
{
|
||||
"name": "autobrr/autobrr",
|
||||
"version": "v1.71.0",
|
||||
"date": "2025-12-13T14:38:09Z"
|
||||
},
|
||||
{
|
||||
"name": "grokability/snipe-it",
|
||||
"version": "v8.3.7",
|
||||
@@ -1344,21 +1374,11 @@
|
||||
"version": "v1.8.3",
|
||||
"date": "2025-12-04T21:07:00Z"
|
||||
},
|
||||
{
|
||||
"name": "glpi-project/glpi",
|
||||
"version": "11.0.4",
|
||||
"date": "2025-12-04T09:26:37Z"
|
||||
},
|
||||
{
|
||||
"name": "WordPress/WordPress",
|
||||
"version": "6.9",
|
||||
"date": "2025-12-02T18:36:17Z"
|
||||
},
|
||||
{
|
||||
"name": "OctoPrint/OctoPrint",
|
||||
"version": "1.11.5",
|
||||
"date": "2025-12-01T12:58:46Z"
|
||||
},
|
||||
{
|
||||
"name": "photoprism/photoprism",
|
||||
"version": "251130-b3068414c",
|
||||
@@ -1384,11 +1404,6 @@
|
||||
"version": "v3.4.6",
|
||||
"date": "2025-11-29T02:43:00Z"
|
||||
},
|
||||
{
|
||||
"name": "gotson/komga",
|
||||
"version": "1.23.6",
|
||||
"date": "2025-11-28T03:52:50Z"
|
||||
},
|
||||
{
|
||||
"name": "phpipam/phpipam",
|
||||
"version": "v1.7.4",
|
||||
@@ -1489,11 +1504,6 @@
|
||||
"version": "3.5.1",
|
||||
"date": "2025-11-09T05:09:28Z"
|
||||
},
|
||||
{
|
||||
"name": "heiher/hev-socks5-server",
|
||||
"version": "2.11.1",
|
||||
"date": "2025-11-08T14:27:27Z"
|
||||
},
|
||||
{
|
||||
"name": "investbrainapp/investbrain",
|
||||
"version": "v1.2.4",
|
||||
@@ -1589,11 +1599,6 @@
|
||||
"version": "v2.2.2",
|
||||
"date": "2025-10-06T21:31:07Z"
|
||||
},
|
||||
{
|
||||
"name": "thomiceli/opengist",
|
||||
"version": "v1.11.1",
|
||||
"date": "2025-09-30T00:24:16Z"
|
||||
},
|
||||
{
|
||||
"name": "Pf2eToolsOrg/Pf2eTools",
|
||||
"version": "v0.10.1",
|
||||
@@ -1709,11 +1714,6 @@
|
||||
"version": "v2.1.1",
|
||||
"date": "2025-07-15T22:38:01Z"
|
||||
},
|
||||
{
|
||||
"name": "typesense/typesense",
|
||||
"version": "v29.0",
|
||||
"date": "2025-06-30T03:52:33Z"
|
||||
},
|
||||
{
|
||||
"name": "arunavo4/gitea-mirror",
|
||||
"version": "v2.18.0",
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: vhsdream
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://codeberg.org/gelbphoenix/autocaliweb
|
||||
# Source: https://github.com/gelbphoenix/autocaliweb
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
@@ -56,7 +56,7 @@ msg_ok "Installed Calibre"
|
||||
|
||||
setup_uv
|
||||
|
||||
fetch_and_deploy_codeberg_release "autocaliweb" "gelbphoenix/autocaliweb" "tarball" "latest" "/opt/autocaliweb"
|
||||
fetch_and_deploy_gh_release "autocaliweb" "gelbphoenix/autocaliweb" "tarball" "latest" "/opt/autocaliweb"
|
||||
|
||||
msg_info "Configuring Autocaliweb"
|
||||
INSTALL_DIR="/opt/autocaliweb"
|
||||
@@ -111,8 +111,8 @@ msg_info "Initializing databases"
|
||||
KEPUBIFY_PATH=$(command -v kepubify 2>/dev/null || echo "/usr/bin/kepubify")
|
||||
EBOOK_CONVERT_PATH=$(command -v ebook-convert 2>/dev/null || echo "/usr/bin/ebook-convert")
|
||||
CALIBRE_BIN_DIR=$(dirname "$EBOOK_CONVERT_PATH")
|
||||
curl -fsSL https://codeberg.org/gelbphoenix/autocaliweb/raw/branch/main/library/metadata.db -o "$CALIBRE_LIB_DIR"/metadata.db
|
||||
curl -fsSL https://codeberg.org/gelbphoenix/autocaliweb/raw/branch/main/library/app.db -o "$CONFIG_DIR"/app.db
|
||||
curl -fsSL https://github.com/gelbphoenix/autocaliweb/raw/refs/heads/main/library/metadata.db -o "$CALIBRE_LIB_DIR"/metadata.db
|
||||
curl -fsSL https://github.com/gelbphoenix/autocaliweb/raw/refs/heads/main/library/app.db -o "$CONFIG_DIR"/app.db
|
||||
sqlite3 "$CONFIG_DIR/app.db" <<EOS
|
||||
UPDATE settings SET
|
||||
config_kepubifypath='$KEPUBIFY_PATH',
|
||||
|
||||
@@ -23,10 +23,10 @@ $STD apt install -y \
|
||||
libvips
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
PHP_VERSION="8.4" PHP_FPM=YES PHP_MODULE="ffi,opcache,redis,zip,pdo-sqlite,bcmath,pdo,curl,dom,fpm" setup_php
|
||||
PHP_VERSION="8.4" PHP_FPM=YES PHP_MODULE="ffi,redis,pdo-sqlite" setup_php
|
||||
setup_composer
|
||||
NODE_VERSION="22" setup_nodejs
|
||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
||||
setup_meilisearch
|
||||
fetch_and_deploy_gh_release "bar-assistant" "karlomikus/bar-assistant" "tarball" "latest" "/opt/bar-assistant"
|
||||
fetch_and_deploy_gh_release "vue-salt-rim" "karlomikus/vue-salt-rim" "tarball" "latest" "/opt/vue-salt-rim"
|
||||
|
||||
@@ -36,49 +36,16 @@ sed -i.bak -E 's/^\s*;?\s*ffi\.enable\s*=.*/ffi.enable=true/' /etc/php/${PHPVER}
|
||||
$STD systemctl reload php${PHPVER}-fpm
|
||||
msg_info "configured PHP"
|
||||
|
||||
msg_info "Configure MeiliSearch"
|
||||
curl -fsSL https://raw.githubusercontent.com/meilisearch/meilisearch/latest/config.toml -o /etc/meilisearch.toml
|
||||
MASTER_KEY=$(openssl rand -base64 12)
|
||||
sed -i \
|
||||
-e 's|^env =.*|env = "production"|' \
|
||||
-e "s|^# master_key =.*|master_key = \"$MASTER_KEY\"|" \
|
||||
-e 's|^db_path =.*|db_path = "/var/lib/meilisearch/data"|' \
|
||||
-e 's|^dump_dir =.*|dump_dir = "/var/lib/meilisearch/dumps"|' \
|
||||
-e 's|^snapshot_dir =.*|snapshot_dir = "/var/lib/meilisearch/snapshots"|' \
|
||||
-e 's|^# no_analytics = true|no_analytics = true|' \
|
||||
-e 's|^http_addr =.*|http_addr = "127.0.0.1:7700"|' \
|
||||
/etc/meilisearch.toml
|
||||
msg_ok "Configured MeiliSearch"
|
||||
|
||||
msg_info "Creating MeiliSearch service"
|
||||
cat <<EOF >/etc/systemd/system/meilisearch.service
|
||||
[Unit]
|
||||
Description=Meilisearch
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/meilisearch --config-file-path /etc/meilisearch.toml
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now meilisearch
|
||||
sleep 5
|
||||
msg_ok "Created Service MeiliSearch"
|
||||
|
||||
msg_info "Installing Bar Assistant"
|
||||
cd /opt/bar-assistant
|
||||
cp /opt/bar-assistant/.env.dist /opt/bar-assistant/.env
|
||||
mkdir -p /opt/bar-assistant/resources/data
|
||||
curl -fsSL https://github.com/bar-assistant/data/archive/refs/heads/v5.tar.gz | tar -xz --strip-components=1 -C /opt/bar-assistant/resources/data
|
||||
MeiliSearch_API_KEY=$(curl -s -X GET 'http://127.0.0.1:7700/keys' -H "Authorization: Bearer $MASTER_KEY" | grep -o '"key":"[^"]*"' | head -n 1 | sed 's/"key":"//;s/"//')
|
||||
MeiliSearch_API_KEY_UID=$(curl -s -X GET 'http://127.0.0.1:7700/keys' -H "Authorization: Bearer $MASTER_KEY" | grep -o '"uid":"[^"]*"' | head -n 1 | sed 's/"uid":"//;s/"//')
|
||||
sed -i -e "s|^APP_URL=|APP_URL=http://${LOCAL_IP}/bar/|" \
|
||||
-e "s|^MEILISEARCH_HOST=|MEILISEARCH_HOST=http://127.0.0.1:7700|" \
|
||||
-e "s|^MEILISEARCH_KEY=|MEILISEARCH_KEY=${MASTER_KEY}|" \
|
||||
-e "s|^MEILISEARCH_API_KEY=|MEILISEARCH_API_KEY=${MeiliSearch_API_KEY}|" \
|
||||
-e "s|^MEILISEARCH_API_KEY_UID=|MEILISEARCH_API_KEY_UID=${MeiliSearch_API_KEY_UID}|" \
|
||||
-e "s|^MEILISEARCH_KEY=|MEILISEARCH_KEY=${MEILISEARCH_MASTER_KEY}|" \
|
||||
-e "s|^MEILISEARCH_API_KEY=|MEILISEARCH_API_KEY=${MEILISEARCH_API_KEY}|" \
|
||||
-e "s|^MEILISEARCH_API_KEY_UID=|MEILISEARCH_API_KEY_UID=${MEILISEARCH_API_KEY_UID}|" \
|
||||
/opt/bar-assistant/.env
|
||||
$STD composer install --no-interaction
|
||||
$STD php artisan key:generate
|
||||
|
||||
@@ -27,6 +27,7 @@ export CYPRESS_INSTALL_BINARY=0
|
||||
export NODE_OPTIONS="--max-old-space-size=2048"
|
||||
cd /opt/grist
|
||||
$STD yarn install
|
||||
$STD yarn run install:ee
|
||||
$STD yarn run build:prod
|
||||
$STD yarn run install:python
|
||||
cat <<EOF >/opt/grist/.env
|
||||
|
||||
@@ -13,6 +13,7 @@ setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_custom "ℹ️" "${GN}" "If NVIDIA GPU passthrough is detected, you'll be asked whether to install drivers in the container"
|
||||
setup_hwaccel
|
||||
|
||||
msg_info "Installing Jellyfin"
|
||||
|
||||
@@ -25,20 +25,7 @@ msg_ok "Installed Dependencies"
|
||||
|
||||
fetch_and_deploy_gh_release "monolith" "Y2Z/monolith" "singlefile" "latest" "/usr/bin" "monolith-gnu-linux-x86_64"
|
||||
fetch_and_deploy_gh_release "yt-dlp" "yt-dlp/yt-dlp-nightly-builds" "singlefile" "latest" "/usr/bin" "yt-dlp_linux"
|
||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
||||
|
||||
msg_info "Configuring Meilisearch"
|
||||
curl -fsSL "https://raw.githubusercontent.com/meilisearch/meilisearch/latest/config.toml" -o "/etc/meilisearch.toml"
|
||||
MASTER_KEY=$(openssl rand -base64 12)
|
||||
sed -i \
|
||||
-e 's|^env =.*|env = "production"|' \
|
||||
-e "s|^# master_key =.*|master_key = \"$MASTER_KEY\"|" \
|
||||
-e 's|^db_path =.*|db_path = "/var/lib/meilisearch/data"|' \
|
||||
-e 's|^dump_dir =.*|dump_dir = "/var/lib/meilisearch/dumps"|' \
|
||||
-e 's|^snapshot_dir =.*|snapshot_dir = "/var/lib/meilisearch/snapshots"|' \
|
||||
-e 's|^# no_analytics = true|no_analytics = true|' \
|
||||
/etc/meilisearch.toml
|
||||
msg_ok "Configured Meilisearch"
|
||||
setup_meilisearch
|
||||
|
||||
fetch_and_deploy_gh_release "karakeep" "karakeep-app/karakeep" "tarball"
|
||||
cd /opt/karakeep
|
||||
@@ -70,7 +57,7 @@ NEXTAUTH_SECRET="$karakeep_SECRET"
|
||||
NEXTAUTH_URL="http://localhost:3000"
|
||||
DATA_DIR=${DATA_DIR}
|
||||
MEILI_ADDR="http://127.0.0.1:7700"
|
||||
MEILI_MASTER_KEY="$MASTER_KEY"
|
||||
MEILI_MASTER_KEY="$MEILISEARCH_MASTER_KEY"
|
||||
BROWSER_WEB_URL="http://127.0.0.1:9222"
|
||||
DB_WAL_MODE=true
|
||||
|
||||
@@ -109,19 +96,6 @@ $STD pnpm migrate
|
||||
msg_ok "Database Migration Completed"
|
||||
|
||||
msg_info "Creating Services"
|
||||
cat <<EOF >/etc/systemd/system/meilisearch.service
|
||||
[Unit]
|
||||
Description=Meilisearch
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/meilisearch --config-file-path /etc/meilisearch.toml
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
cat <<EOF >/etc/systemd/system/karakeep-web.service
|
||||
[Unit]
|
||||
Description=karakeep Web
|
||||
@@ -169,7 +143,7 @@ TimeoutStopSec=5
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
systemctl enable -q --now meilisearch karakeep-browser karakeep-workers karakeep-web
|
||||
systemctl enable -q --now karakeep-browser karakeep-workers karakeep-web
|
||||
msg_ok "Created Services"
|
||||
|
||||
motd_ssh
|
||||
|
||||
@@ -13,21 +13,7 @@ setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
||||
|
||||
msg_info "Configuring ${APPLICATION}"
|
||||
curl -fsSL https://raw.githubusercontent.com/meilisearch/meilisearch/latest/config.toml -o /etc/meilisearch.toml
|
||||
MASTER_KEY=$(openssl rand -base64 12)
|
||||
sed -i \
|
||||
-e 's|^env =.*|env = "production"|' \
|
||||
-e "s|^# master_key =.*|master_key = \"$MASTER_KEY\"|" \
|
||||
-e 's|^db_path =.*|db_path = "/var/lib/meilisearch/data"|' \
|
||||
-e 's|^dump_dir =.*|dump_dir = "/var/lib/meilisearch/dumps"|' \
|
||||
-e 's|^snapshot_dir =.*|snapshot_dir = "/var/lib/meilisearch/snapshots"|' \
|
||||
-e 's|^# no_analytics = true|no_analytics = true|' \
|
||||
-e 's|^http_addr =.*|http_addr = "0.0.0.0:7700"|' \
|
||||
/etc/meilisearch.toml
|
||||
msg_ok "Configured ${APPLICATION}"
|
||||
MEILISEARCH_BIND="0.0.0.0:7700" setup_meilisearch
|
||||
|
||||
read -r -p "${TAB3}Do you want add meilisearch-ui? [y/n]: " prompt
|
||||
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
|
||||
@@ -41,27 +27,11 @@ if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
|
||||
cat <<EOF >/opt/meilisearch-ui/.env.local
|
||||
VITE_SINGLETON_MODE=true
|
||||
VITE_SINGLETON_HOST=http://${LOCAL_IP}:7700
|
||||
VITE_SINGLETON_API_KEY=${MASTER_KEY}
|
||||
VITE_SINGLETON_API_KEY=${MEILISEARCH_MASTER_KEY}
|
||||
EOF
|
||||
msg_ok "Configured ${APPLICATION}-ui"
|
||||
fi
|
||||
|
||||
msg_info "Creating service"
|
||||
cat <<EOF >/etc/systemd/system/meilisearch.service
|
||||
[Unit]
|
||||
Description=Meilisearch
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/meilisearch --config-file-path /etc/meilisearch.toml
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now meilisearch
|
||||
|
||||
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
|
||||
msg_info "Creating Meilisearch-UI service"
|
||||
cat <<EOF >/etc/systemd/system/meilisearch-ui.service
|
||||
[Unit]
|
||||
Description=Meilisearch UI Service
|
||||
@@ -82,8 +52,8 @@ SyslogIdentifier=meilisearch-ui
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now meilisearch-ui
|
||||
msg_ok "Created Meilisearch-UI service"
|
||||
fi
|
||||
msg_ok "Service created"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
|
||||
@@ -59,8 +59,8 @@ read -r -p "${TAB3}Enter your ACME Email: " ACME_EMAIL_INPUT
|
||||
|
||||
yq -i "
|
||||
.services.npmplus.environment |=
|
||||
(map(select(. != \"TZ=*\" and . != \"ACME_EMAIL=*\")) +
|
||||
[\"TZ=$TZ_INPUT\", \"ACME_EMAIL=$ACME_EMAIL_INPUT\"])
|
||||
(map(select(. != \"TZ=*\" and . != \"ACME_EMAIL=*\" and . != \"INITIAL_ADMIN_EMAIL=*\" and . != \"INITIAL_ADMIN_PASSWORD=*\")) +
|
||||
[\"TZ=$TZ_INPUT\", \"ACME_EMAIL=$ACME_EMAIL_INPUT\", \"INITIAL_ADMIN_EMAIL=admin@local.com\", \"INITIAL_ADMIN_PASSWORD=helper-scripts.com\"])
|
||||
" /opt/compose.yaml
|
||||
|
||||
msg_info "Building and Starting NPMplus (Patience)"
|
||||
@@ -86,39 +86,3 @@ msg_ok "Builded and started NPMplus"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
|
||||
msg_info "Retrieving Default Login (Patience)"
|
||||
PASSWORD_FOUND=0
|
||||
|
||||
for i in {1..60}; do
|
||||
PASSWORD_LINE=$(
|
||||
{ awk '/Creating a new user:/{print; exit}' < <(docker logs "$CONTAINER_ID" 2>&1); } || true
|
||||
)
|
||||
|
||||
if [[ -n "${PASSWORD_LINE:-}" ]]; then
|
||||
PASSWORD="${PASSWORD_LINE#*password: }"
|
||||
printf 'username: admin@example.org\npassword: %s\n' "$PASSWORD" >/opt/.npm_pwd
|
||||
msg_ok "Saved default login to /opt/.npm_pwd"
|
||||
PASSWORD_FOUND=1
|
||||
break
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
if [[ $PASSWORD_FOUND -eq 0 ]]; then
|
||||
PASSWORD_LINE=$(
|
||||
timeout 30s bash -c '
|
||||
docker logs -f --since=0s --tail=0 "$1" 2>&1 | awk "/Creating a new user:/{print; exit}"
|
||||
' _ "$CONTAINER_ID" || true
|
||||
)
|
||||
if [[ -n "${PASSWORD_LINE:-}" ]]; then
|
||||
PASSWORD="${PASSWORD_LINE#*password: }"
|
||||
printf 'username: admin@example.org\npassword: %s\n' "$PASSWORD" >/opt/.npm_pwd
|
||||
msg_ok "Saved default login to /opt/.npm_pwd (live)"
|
||||
PASSWORD_FOUND=1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $PASSWORD_FOUND -eq 0 ]]; then
|
||||
msg_error "Could not retrieve default login after 120s."
|
||||
echo -e "\nYou can manually check the container logs with:\n docker logs $CONTAINER_ID | grep 'Creating a new user:'\n"
|
||||
fi
|
||||
|
||||
@@ -21,40 +21,11 @@ NODE_VERSION="22" NODE_MODULE="pnpm" setup_nodejs
|
||||
PG_VERSION="17" setup_postgresql
|
||||
PG_DB_NAME="openarchiver_db" PG_DB_USER="openarchiver" setup_postgresql_db
|
||||
|
||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
||||
setup_meilisearch
|
||||
fetch_and_deploy_gh_release "openarchiver" "LogicLabs-OU/OpenArchiver" "tarball"
|
||||
JWT_KEY="$(openssl rand -hex 32)"
|
||||
SECRET_KEY="$(openssl rand -hex 32)"
|
||||
|
||||
msg_info "Configuring MeiliSearch"
|
||||
curl -fsSL https://raw.githubusercontent.com/meilisearch/meilisearch/latest/config.toml -o /etc/meilisearch.toml
|
||||
MASTER_KEY=$(openssl rand -base64 12)
|
||||
sed -i \
|
||||
-e 's|^env =.*|env = "production"|' \
|
||||
-e "s|^# master_key =.*|master_key = \"$MASTER_KEY\"|" \
|
||||
-e 's|^db_path =.*|db_path = "/var/lib/meilisearch/data"|' \
|
||||
-e 's|^dump_dir =.*|dump_dir = "/var/lib/meilisearch/dumps"|' \
|
||||
-e 's|^snapshot_dir =.*|snapshot_dir = "/var/lib/meilisearch/snapshots"|' \
|
||||
-e 's|^# no_analytics = true|no_analytics = true|' \
|
||||
-e 's|^http_addr =.*|http_addr = "127.0.0.1:7700"|' \
|
||||
/etc/meilisearch.toml
|
||||
|
||||
cat <<EOF >/etc/systemd/system/meilisearch.service
|
||||
[Unit]
|
||||
Description=Meilisearch
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/meilisearch --config-file-path /etc/meilisearch.toml
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now meilisearch
|
||||
sleep 5
|
||||
msg_ok "Configured MeiliSearch"
|
||||
|
||||
msg_info "Setting up Open Archiver"
|
||||
mkdir -p /opt/openarchiver-data
|
||||
cd /opt/openarchiver
|
||||
@@ -65,7 +36,7 @@ sed -i "s|^POSTGRES_USER=.*|POSTGRES_USER=$PG_DB_USER|g" /opt/openarchiver/.env
|
||||
sed -i "s|^POSTGRES_PASSWORD=.*|POSTGRES_PASSWORD=$PG_DB_PASS|g" /opt/openarchiver/.env
|
||||
sed -i "s|^DATABASE_URL=.*|DATABASE_URL=\"postgresql://$PG_DB_USER:$PG_DB_PASS@localhost:5432/$PG_DB_NAME\"|g" /opt/openarchiver/.env
|
||||
sed -i "s|^MEILI_HOST=.*|MEILI_HOST=http://localhost:7700|g" /opt/openarchiver/.env
|
||||
sed -i "s|^MEILI_MASTER_KEY=.*|MEILI_MASTER_KEY=$MASTER_KEY|g" /opt/openarchiver/.env
|
||||
sed -i "s|^MEILI_MASTER_KEY=.*|MEILI_MASTER_KEY=$MEILISEARCH_MASTER_KEY|g" /opt/openarchiver/.env
|
||||
sed -i "s|^REDIS_HOST=.*|REDIS_HOST=localhost|g" /opt/openarchiver/.env
|
||||
sed -i "s|^REDIS_PASSWORD=.*|REDIS_PASSWORD=|g" /opt/openarchiver/.env
|
||||
sed -i "s|^STORAGE_LOCAL_ROOT_PATH=.*|STORAGE_LOCAL_ROOT_PATH=/opt/openarchiver-data|g" /opt/openarchiver/.env
|
||||
|
||||
@@ -19,7 +19,7 @@ $STD apt install -y \
|
||||
openssl
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
PHP_VERSION="8.2" PHP_MODULE="common,fpm" setup_php
|
||||
PHP_VERSION="8.2" PHP_FPM="YES" setup_php
|
||||
create_self_signed_cert
|
||||
fetch_and_deploy_gh_release "privatebin" "PrivateBin/PrivateBin" "tarball"
|
||||
|
||||
|
||||
@@ -195,9 +195,11 @@ get_current_ip() {
|
||||
#
|
||||
# - Updates /etc/motd with current container IP
|
||||
# - Removes old IP entries to avoid duplicates
|
||||
# - Regenerates /etc/profile.d/00_lxc-details.sh with dynamic OS/IP info
|
||||
# ------------------------------------------------------------------------------
|
||||
update_motd_ip() {
|
||||
MOTD_FILE="/etc/motd"
|
||||
PROFILE_FILE="/etc/profile.d/00_lxc-details.sh"
|
||||
|
||||
if [ -f "$MOTD_FILE" ]; then
|
||||
# Remove existing IP Address lines to prevent duplication
|
||||
@@ -207,6 +209,26 @@ update_motd_ip() {
|
||||
# Add the new IP address
|
||||
echo -e "${TAB}${NETWORK}${YW} IP Address: ${GN}${IP}${CL}" >>"$MOTD_FILE"
|
||||
fi
|
||||
|
||||
# Update dynamic LXC details profile if values changed (e.g., after OS upgrade)
|
||||
# Only update if file exists and is from community-scripts
|
||||
if [ -f "$PROFILE_FILE" ] && grep -q "community-scripts" "$PROFILE_FILE" 2>/dev/null; then
|
||||
# Get current values
|
||||
local current_os="$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '"') - Version: $(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '"')"
|
||||
local current_hostname="$(hostname)"
|
||||
local current_ip="$(hostname -I | awk '{print $1}')"
|
||||
|
||||
# Update only if values actually changed
|
||||
if ! grep -q "OS:.*$current_os" "$PROFILE_FILE" 2>/dev/null; then
|
||||
sed -i "s|OS:.*|OS: \${GN}$current_os\${CL}\\\"|" "$PROFILE_FILE"
|
||||
fi
|
||||
if ! grep -q "Hostname:.*$current_hostname" "$PROFILE_FILE" 2>/dev/null; then
|
||||
sed -i "s|Hostname:.*|Hostname: \${GN}$current_hostname\${CL}\\\"|" "$PROFILE_FILE"
|
||||
fi
|
||||
if ! grep -q "IP Address:.*$current_ip" "$PROFILE_FILE" 2>/dev/null; then
|
||||
sed -i "s|IP Address:.*|IP Address: \${GN}$current_ip\${CL}\\\"|" "$PROFILE_FILE"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
@@ -3310,6 +3332,7 @@ start() {
|
||||
set_std_mode
|
||||
ensure_profile_loaded
|
||||
update_script
|
||||
update_motd_ip
|
||||
cleanup_lxc
|
||||
else
|
||||
CHOICE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "${APP} LXC Update/Setting" --menu \
|
||||
@@ -3336,6 +3359,7 @@ start() {
|
||||
esac
|
||||
ensure_profile_loaded
|
||||
update_script
|
||||
update_motd_ip
|
||||
cleanup_lxc
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -79,6 +79,13 @@ EOF
|
||||
# ------------------------------------------------------------------------------
|
||||
setting_up_container() {
|
||||
msg_info "Setting up Container OS"
|
||||
|
||||
# Fix Debian 13 LXC template bug where / is owned by nobody
|
||||
# Only attempt in privileged containers (unprivileged cannot chown /)
|
||||
if [[ "$(stat -c '%U' /)" != "root" ]]; then
|
||||
chown root:root / 2>/dev/null || true
|
||||
fi
|
||||
|
||||
for ((i = RETRY_NUM; i > 0; i--)); do
|
||||
if [ "$(hostname -I)" != "" ]; then
|
||||
break
|
||||
|
||||
816
misc/tools.func
816
misc/tools.func
@@ -574,7 +574,8 @@ EOF
|
||||
msg_error "Failed to download PHP keyring"
|
||||
return 1
|
||||
}
|
||||
dpkg -i /tmp/debsuryorg-archive-keyring.deb >/dev/null 2>&1 || {
|
||||
# Don't use /dev/null redirection for dpkg as it may use background processes
|
||||
dpkg -i /tmp/debsuryorg-archive-keyring.deb >>"$(get_active_logfile)" 2>&1 || {
|
||||
msg_error "Failed to install PHP keyring"
|
||||
rm -f /tmp/debsuryorg-archive-keyring.deb
|
||||
return 1
|
||||
@@ -820,54 +821,6 @@ github_api_call() {
|
||||
return 1
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Codeberg API call with retry logic
|
||||
# ------------------------------------------------------------------------------
|
||||
codeberg_api_call() {
|
||||
local url="$1"
|
||||
local output_file="${2:-/dev/stdout}"
|
||||
local max_retries=3
|
||||
local retry_delay=2
|
||||
|
||||
for attempt in $(seq 1 $max_retries); do
|
||||
local http_code
|
||||
http_code=$(curl -fsSL -w "%{http_code}" -o "$output_file" \
|
||||
-H "Accept: application/json" \
|
||||
"$url" 2>/dev/null || echo "000")
|
||||
|
||||
case "$http_code" in
|
||||
200)
|
||||
return 0
|
||||
;;
|
||||
403)
|
||||
# Rate limit - retry
|
||||
if [[ $attempt -lt $max_retries ]]; then
|
||||
msg_warn "Codeberg API rate limit, waiting ${retry_delay}s... (attempt $attempt/$max_retries)"
|
||||
sleep "$retry_delay"
|
||||
retry_delay=$((retry_delay * 2))
|
||||
continue
|
||||
fi
|
||||
msg_error "Codeberg API rate limit exceeded."
|
||||
return 1
|
||||
;;
|
||||
404)
|
||||
msg_error "Codeberg API endpoint not found: $url"
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
if [[ $attempt -lt $max_retries ]]; then
|
||||
sleep "$retry_delay"
|
||||
continue
|
||||
fi
|
||||
msg_error "Codeberg API call failed with HTTP $http_code"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
should_upgrade() {
|
||||
local current="$1"
|
||||
local target="$2"
|
||||
@@ -1432,37 +1385,6 @@ get_latest_github_release() {
|
||||
echo "$version"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Get latest Codeberg release version
|
||||
# ------------------------------------------------------------------------------
|
||||
get_latest_codeberg_release() {
|
||||
local repo="$1"
|
||||
local strip_v="${2:-true}"
|
||||
local temp_file=$(mktemp)
|
||||
|
||||
# Codeberg API: get all releases and pick the first non-draft/non-prerelease
|
||||
if ! codeberg_api_call "https://codeberg.org/api/v1/repos/${repo}/releases" "$temp_file"; then
|
||||
rm -f "$temp_file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local version
|
||||
# Codeberg uses same JSON structure but releases endpoint returns array
|
||||
version=$(jq -r '[.[] | select(.draft==false and .prerelease==false)][0].tag_name // empty' "$temp_file")
|
||||
|
||||
if [[ "$strip_v" == "true" ]]; then
|
||||
version="${version#v}"
|
||||
fi
|
||||
|
||||
rm -f "$temp_file"
|
||||
|
||||
if [[ -z "$version" ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "$version"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Debug logging (only if DEBUG=1)
|
||||
# ------------------------------------------------------------------------------
|
||||
@@ -1637,119 +1559,6 @@ check_for_gh_release() {
|
||||
return 1
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Checks for new Codeberg release (latest tag).
|
||||
#
|
||||
# Description:
|
||||
# - Queries the Codeberg API for the latest release tag
|
||||
# - Compares it to a local cached version (~/.<app>)
|
||||
# - If newer, sets global CHECK_UPDATE_RELEASE and returns 0
|
||||
#
|
||||
# Usage:
|
||||
# if check_for_codeberg_release "autocaliweb" "gelbphoenix/autocaliweb" [optional] "v0.11.3"; then
|
||||
# # trigger update...
|
||||
# fi
|
||||
# exit 0
|
||||
# } (end of update_script not from the function)
|
||||
#
|
||||
# Notes:
|
||||
# - Requires `jq` (auto-installed if missing)
|
||||
# - Does not modify anything, only checks version state
|
||||
# - Does not support pre-releases
|
||||
# ------------------------------------------------------------------------------
|
||||
check_for_codeberg_release() {
|
||||
local app="$1"
|
||||
local source="$2"
|
||||
local pinned_version_in="${3:-}" # optional
|
||||
local app_lc="${app,,}"
|
||||
local current_file="$HOME/.${app_lc}"
|
||||
|
||||
msg_info "Checking for update: ${app}"
|
||||
|
||||
# DNS check
|
||||
if ! getent hosts codeberg.org >/dev/null 2>&1; then
|
||||
msg_error "Network error: cannot resolve codeberg.org"
|
||||
return 1
|
||||
fi
|
||||
|
||||
ensure_dependencies jq
|
||||
|
||||
# Fetch releases from Codeberg API
|
||||
local releases_json=""
|
||||
releases_json=$(curl -fsSL --max-time 20 \
|
||||
-H 'Accept: application/json' \
|
||||
"https://codeberg.org/api/v1/repos/${source}/releases" 2>/dev/null) || {
|
||||
msg_error "Unable to fetch releases for ${app}"
|
||||
return 1
|
||||
}
|
||||
|
||||
mapfile -t raw_tags < <(jq -r '.[] | select(.draft==false and .prerelease==false) | .tag_name' <<<"$releases_json")
|
||||
if ((${#raw_tags[@]} == 0)); then
|
||||
msg_error "No stable releases found for ${app}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local clean_tags=()
|
||||
for t in "${raw_tags[@]}"; do
|
||||
clean_tags+=("${t#v}")
|
||||
done
|
||||
|
||||
local latest_raw="${raw_tags[0]}"
|
||||
local latest_clean="${clean_tags[0]}"
|
||||
|
||||
# current installed (stored without v)
|
||||
local current=""
|
||||
if [[ -f "$current_file" ]]; then
|
||||
current="$(<"$current_file")"
|
||||
else
|
||||
# Migration: search for any /opt/*_version.txt
|
||||
local legacy_files
|
||||
mapfile -t legacy_files < <(find /opt -maxdepth 1 -type f -name "*_version.txt" 2>/dev/null)
|
||||
if ((${#legacy_files[@]} == 1)); then
|
||||
current="$(<"${legacy_files[0]}")"
|
||||
echo "${current#v}" >"$current_file"
|
||||
rm -f "${legacy_files[0]}"
|
||||
fi
|
||||
fi
|
||||
current="${current#v}"
|
||||
|
||||
# Pinned version handling
|
||||
if [[ -n "$pinned_version_in" ]]; then
|
||||
local pin_clean="${pinned_version_in#v}"
|
||||
local match_raw=""
|
||||
for i in "${!clean_tags[@]}"; do
|
||||
if [[ "${clean_tags[$i]}" == "$pin_clean" ]]; then
|
||||
match_raw="${raw_tags[$i]}"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -z "$match_raw" ]]; then
|
||||
msg_error "Pinned version ${pinned_version_in} not found upstream"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ "$current" != "$pin_clean" ]]; then
|
||||
CHECK_UPDATE_RELEASE="$match_raw"
|
||||
msg_ok "Update available: ${app} ${current:-not installed} → ${pin_clean}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
msg_ok "No update available: ${app} is already on pinned version (${current})"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# No pinning → use latest
|
||||
if [[ -z "$current" || "$current" != "$latest_clean" ]]; then
|
||||
CHECK_UPDATE_RELEASE="$latest_raw"
|
||||
msg_ok "Update available: ${app} ${current:-not installed} → ${latest_clean}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
msg_ok "No update available: ${app} (${latest_clean})"
|
||||
return 1
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Creates and installs self-signed certificates.
|
||||
#
|
||||
@@ -1839,440 +1648,6 @@ function ensure_usr_local_bin_persist() {
|
||||
fi
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Downloads and deploys latest Codeberg release (source, binary, tarball, asset).
|
||||
#
|
||||
# Description:
|
||||
# - Fetches latest release metadata from Codeberg API
|
||||
# - Supports the following modes:
|
||||
# - tarball: Source code tarball (default if omitted)
|
||||
# - source: Alias for tarball (same behavior)
|
||||
# - binary: .deb package install (arch-dependent)
|
||||
# - prebuild: Prebuilt .tar.gz archive (e.g. Go binaries)
|
||||
# - singlefile: Standalone binary (no archive, direct chmod +x install)
|
||||
# - tag: Direct tag download (bypasses Release API)
|
||||
# - Handles download, extraction/installation and version tracking in ~/.<app>
|
||||
#
|
||||
# Parameters:
|
||||
# $1 APP - Application name (used for install path and version file)
|
||||
# $2 REPO - Codeberg repository in form user/repo
|
||||
# $3 MODE - Release type:
|
||||
# tarball → source tarball (.tar.gz)
|
||||
# binary → .deb file (auto-arch matched)
|
||||
# prebuild → prebuilt archive (e.g. tar.gz)
|
||||
# singlefile→ standalone binary (chmod +x)
|
||||
# tag → direct tag (bypasses Release API)
|
||||
# $4 VERSION - Optional release tag (default: latest)
|
||||
# $5 TARGET_DIR - Optional install path (default: /opt/<app>)
|
||||
# $6 ASSET_FILENAME - Required for:
|
||||
# - prebuild → archive filename or pattern
|
||||
# - singlefile→ binary filename or pattern
|
||||
#
|
||||
# Examples:
|
||||
# # 1. Minimal: Fetch and deploy source tarball
|
||||
# fetch_and_deploy_codeberg_release "autocaliweb" "gelbphoenix/autocaliweb"
|
||||
#
|
||||
# # 2. Binary install via .deb asset (architecture auto-detected)
|
||||
# fetch_and_deploy_codeberg_release "myapp" "myuser/myapp" "binary"
|
||||
#
|
||||
# # 3. Prebuilt archive (.tar.gz) with asset filename match
|
||||
# fetch_and_deploy_codeberg_release "myapp" "myuser/myapp" "prebuild" "latest" "/opt/myapp" "myapp_Linux_x86_64.tar.gz"
|
||||
#
|
||||
# # 4. Single binary (chmod +x)
|
||||
# fetch_and_deploy_codeberg_release "myapp" "myuser/myapp" "singlefile" "v1.0.0" "/opt/myapp" "myapp-linux-amd64"
|
||||
#
|
||||
# # 5. Explicit tag version
|
||||
# fetch_and_deploy_codeberg_release "autocaliweb" "gelbphoenix/autocaliweb" "tag" "v0.11.3" "/opt/autocaliweb"
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
function fetch_and_deploy_codeberg_release() {
|
||||
local app="$1"
|
||||
local repo="$2"
|
||||
local mode="${3:-tarball}" # tarball | binary | prebuild | singlefile | tag
|
||||
local version="${4:-latest}"
|
||||
local target="${5:-/opt/$app}"
|
||||
local asset_pattern="${6:-}"
|
||||
|
||||
local app_lc=$(echo "${app,,}" | tr -d ' ')
|
||||
local version_file="$HOME/.${app_lc}"
|
||||
|
||||
local api_timeout="--connect-timeout 10 --max-time 60"
|
||||
local download_timeout="--connect-timeout 15 --max-time 900"
|
||||
|
||||
local current_version=""
|
||||
[[ -f "$version_file" ]] && current_version=$(<"$version_file")
|
||||
|
||||
ensure_dependencies jq
|
||||
|
||||
### Tag Mode (bypass Release API) ###
|
||||
if [[ "$mode" == "tag" ]]; then
|
||||
if [[ "$version" == "latest" ]]; then
|
||||
msg_error "Mode 'tag' requires explicit version (not 'latest')"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local tag_name="$version"
|
||||
[[ "$tag_name" =~ ^v ]] && version="${tag_name:1}" || version="$tag_name"
|
||||
|
||||
if [[ "$current_version" == "$version" ]]; then
|
||||
$STD msg_ok "$app is already up-to-date (v$version)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# DNS check
|
||||
if ! getent hosts "codeberg.org" &>/dev/null; then
|
||||
msg_error "DNS resolution failed for codeberg.org – check /etc/resolv.conf or networking"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local tmpdir
|
||||
tmpdir=$(mktemp -d) || return 1
|
||||
|
||||
msg_info "Fetching Codeberg tag: $app ($tag_name)"
|
||||
|
||||
local safe_version="${version//@/_}"
|
||||
safe_version="${safe_version//\//_}"
|
||||
local filename="${app_lc}-${safe_version}.tar.gz"
|
||||
local download_success=false
|
||||
|
||||
# Codeberg archive URL format: https://codeberg.org/{owner}/{repo}/archive/{tag}.tar.gz
|
||||
local archive_url="https://codeberg.org/$repo/archive/${tag_name}.tar.gz"
|
||||
if curl $download_timeout -fsSL -o "$tmpdir/$filename" "$archive_url"; then
|
||||
download_success=true
|
||||
fi
|
||||
|
||||
if [[ "$download_success" != "true" ]]; then
|
||||
msg_error "Download failed for $app ($tag_name)"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
mkdir -p "$target"
|
||||
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
|
||||
rm -rf "${target:?}/"*
|
||||
fi
|
||||
|
||||
tar --no-same-owner -xzf "$tmpdir/$filename" -C "$tmpdir" || {
|
||||
msg_error "Failed to extract tarball"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
local unpack_dir
|
||||
unpack_dir=$(find "$tmpdir" -mindepth 1 -maxdepth 1 -type d | head -n1)
|
||||
|
||||
shopt -s dotglob nullglob
|
||||
cp -r "$unpack_dir"/* "$target/"
|
||||
shopt -u dotglob nullglob
|
||||
|
||||
echo "$version" >"$version_file"
|
||||
msg_ok "Deployed: $app ($version)"
|
||||
rm -rf "$tmpdir"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Codeberg API: https://codeberg.org/api/v1/repos/{owner}/{repo}/releases
|
||||
local api_url="https://codeberg.org/api/v1/repos/$repo/releases"
|
||||
if [[ "$version" != "latest" ]]; then
|
||||
# Get release by tag: /repos/{owner}/{repo}/releases/tags/{tag}
|
||||
api_url="https://codeberg.org/api/v1/repos/$repo/releases/tags/$version"
|
||||
fi
|
||||
|
||||
# dns pre check
|
||||
if ! getent hosts "codeberg.org" &>/dev/null; then
|
||||
msg_error "DNS resolution failed for codeberg.org – check /etc/resolv.conf or networking"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local max_retries=3 retry_delay=2 attempt=1 success=false resp http_code
|
||||
|
||||
while ((attempt <= max_retries)); do
|
||||
resp=$(curl $api_timeout -fsSL -w "%{http_code}" -o /tmp/codeberg_rel.json "$api_url") && success=true && break
|
||||
sleep "$retry_delay"
|
||||
((attempt++))
|
||||
done
|
||||
|
||||
if ! $success; then
|
||||
msg_error "Failed to fetch release metadata from $api_url after $max_retries attempts"
|
||||
return 1
|
||||
fi
|
||||
|
||||
http_code="${resp:(-3)}"
|
||||
[[ "$http_code" != "200" ]] && {
|
||||
msg_error "Codeberg API returned HTTP $http_code"
|
||||
return 1
|
||||
}
|
||||
|
||||
local json tag_name
|
||||
json=$(</tmp/codeberg_rel.json)
|
||||
|
||||
# For "latest", the API returns an array - take the first (most recent) release
|
||||
if [[ "$version" == "latest" ]]; then
|
||||
json=$(echo "$json" | jq '.[0]')
|
||||
fi
|
||||
|
||||
tag_name=$(echo "$json" | jq -r '.tag_name // .name // empty')
|
||||
[[ "$tag_name" =~ ^v ]] && version="${tag_name:1}" || version="$tag_name"
|
||||
|
||||
if [[ "$current_version" == "$version" ]]; then
|
||||
$STD msg_ok "$app is already up-to-date (v$version)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local tmpdir
|
||||
tmpdir=$(mktemp -d) || return 1
|
||||
local filename="" url=""
|
||||
|
||||
msg_info "Fetching Codeberg release: $app ($version)"
|
||||
|
||||
### Tarball Mode ###
|
||||
if [[ "$mode" == "tarball" || "$mode" == "source" ]]; then
|
||||
local safe_version="${version//@/_}"
|
||||
safe_version="${safe_version//\//_}"
|
||||
filename="${app_lc}-${safe_version}.tar.gz"
|
||||
local download_success=false
|
||||
|
||||
# Codeberg archive URL format
|
||||
local archive_url="https://codeberg.org/$repo/archive/${tag_name}.tar.gz"
|
||||
if curl $download_timeout -fsSL -o "$tmpdir/$filename" "$archive_url"; then
|
||||
download_success=true
|
||||
fi
|
||||
|
||||
if [[ "$download_success" != "true" ]]; then
|
||||
msg_error "Download failed for $app ($tag_name)"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
mkdir -p "$target"
|
||||
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
|
||||
rm -rf "${target:?}/"*
|
||||
fi
|
||||
|
||||
tar --no-same-owner -xzf "$tmpdir/$filename" -C "$tmpdir" || {
|
||||
msg_error "Failed to extract tarball"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
local unpack_dir
|
||||
unpack_dir=$(find "$tmpdir" -mindepth 1 -maxdepth 1 -type d | head -n1)
|
||||
|
||||
shopt -s dotglob nullglob
|
||||
cp -r "$unpack_dir"/* "$target/"
|
||||
shopt -u dotglob nullglob
|
||||
|
||||
### Binary Mode ###
|
||||
elif [[ "$mode" == "binary" ]]; then
|
||||
local arch
|
||||
arch=$(dpkg --print-architecture 2>/dev/null || uname -m)
|
||||
[[ "$arch" == "x86_64" ]] && arch="amd64"
|
||||
[[ "$arch" == "aarch64" ]] && arch="arm64"
|
||||
|
||||
local assets url_match=""
|
||||
# Codeberg assets are in .assets[].browser_download_url
|
||||
assets=$(echo "$json" | jq -r '.assets[].browser_download_url')
|
||||
|
||||
# If explicit filename pattern is provided, match that first
|
||||
if [[ -n "$asset_pattern" ]]; then
|
||||
for u in $assets; do
|
||||
case "${u##*/}" in
|
||||
$asset_pattern)
|
||||
url_match="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
|
||||
# Fall back to architecture heuristic
|
||||
if [[ -z "$url_match" ]]; then
|
||||
for u in $assets; do
|
||||
if [[ "$u" =~ ($arch|amd64|x86_64|aarch64|arm64).*\.deb$ ]]; then
|
||||
url_match="$u"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Fallback: any .deb file
|
||||
if [[ -z "$url_match" ]]; then
|
||||
for u in $assets; do
|
||||
[[ "$u" =~ \.deb$ ]] && url_match="$u" && break
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -z "$url_match" ]]; then
|
||||
msg_error "No suitable .deb asset found for $app"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
filename="${url_match##*/}"
|
||||
curl $download_timeout -fsSL -o "$tmpdir/$filename" "$url_match" || {
|
||||
msg_error "Download failed: $url_match"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
chmod 644 "$tmpdir/$filename"
|
||||
$STD apt install -y "$tmpdir/$filename" || {
|
||||
$STD dpkg -i "$tmpdir/$filename" || {
|
||||
msg_error "Both apt and dpkg installation failed"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
}
|
||||
|
||||
### Prebuild Mode ###
|
||||
elif [[ "$mode" == "prebuild" ]]; then
|
||||
local pattern="${6%\"}"
|
||||
pattern="${pattern#\"}"
|
||||
[[ -z "$pattern" ]] && {
|
||||
msg_error "Mode 'prebuild' requires 6th parameter (asset filename pattern)"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
local asset_url=""
|
||||
for u in $(echo "$json" | jq -r '.assets[].browser_download_url'); do
|
||||
filename_candidate="${u##*/}"
|
||||
case "$filename_candidate" in
|
||||
$pattern)
|
||||
asset_url="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
[[ -z "$asset_url" ]] && {
|
||||
msg_error "No asset matching '$pattern' found"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
filename="${asset_url##*/}"
|
||||
curl $download_timeout -fsSL -o "$tmpdir/$filename" "$asset_url" || {
|
||||
msg_error "Download failed: $asset_url"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
local unpack_tmp
|
||||
unpack_tmp=$(mktemp -d)
|
||||
mkdir -p "$target"
|
||||
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
|
||||
rm -rf "${target:?}/"*
|
||||
fi
|
||||
|
||||
if [[ "$filename" == *.zip ]]; then
|
||||
ensure_dependencies unzip
|
||||
unzip -q "$tmpdir/$filename" -d "$unpack_tmp" || {
|
||||
msg_error "Failed to extract ZIP archive"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
elif [[ "$filename" == *.tar.* || "$filename" == *.tgz ]]; then
|
||||
tar --no-same-owner -xf "$tmpdir/$filename" -C "$unpack_tmp" || {
|
||||
msg_error "Failed to extract TAR archive"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
else
|
||||
msg_error "Unsupported archive format: $filename"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local top_dirs
|
||||
top_dirs=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1 -type d | wc -l)
|
||||
local top_entries inner_dir
|
||||
top_entries=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1)
|
||||
if [[ "$(echo "$top_entries" | wc -l)" -eq 1 && -d "$top_entries" ]]; then
|
||||
inner_dir="$top_entries"
|
||||
shopt -s dotglob nullglob
|
||||
if compgen -G "$inner_dir/*" >/dev/null; then
|
||||
cp -r "$inner_dir"/* "$target/" || {
|
||||
msg_error "Failed to copy contents from $inner_dir to $target"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
else
|
||||
msg_error "Inner directory is empty: $inner_dir"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
fi
|
||||
shopt -u dotglob nullglob
|
||||
else
|
||||
shopt -s dotglob nullglob
|
||||
if compgen -G "$unpack_tmp/*" >/dev/null; then
|
||||
cp -r "$unpack_tmp"/* "$target/" || {
|
||||
msg_error "Failed to copy contents to $target"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
else
|
||||
msg_error "Unpacked archive is empty"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
fi
|
||||
shopt -u dotglob nullglob
|
||||
fi
|
||||
|
||||
### Singlefile Mode ###
|
||||
elif [[ "$mode" == "singlefile" ]]; then
|
||||
local pattern="${6%\"}"
|
||||
pattern="${pattern#\"}"
|
||||
[[ -z "$pattern" ]] && {
|
||||
msg_error "Mode 'singlefile' requires 6th parameter (asset filename pattern)"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
local asset_url=""
|
||||
for u in $(echo "$json" | jq -r '.assets[].browser_download_url'); do
|
||||
filename_candidate="${u##*/}"
|
||||
case "$filename_candidate" in
|
||||
$pattern)
|
||||
asset_url="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
[[ -z "$asset_url" ]] && {
|
||||
msg_error "No asset matching '$pattern' found"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
filename="${asset_url##*/}"
|
||||
mkdir -p "$target"
|
||||
|
||||
local use_filename="${USE_ORIGINAL_FILENAME:-false}"
|
||||
local target_file="$app"
|
||||
[[ "$use_filename" == "true" ]] && target_file="$filename"
|
||||
|
||||
curl $download_timeout -fsSL -o "$target/$target_file" "$asset_url" || {
|
||||
msg_error "Download failed: $asset_url"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
if [[ "$target_file" != *.jar && -f "$target/$target_file" ]]; then
|
||||
chmod +x "$target/$target_file"
|
||||
fi
|
||||
|
||||
else
|
||||
msg_error "Unknown mode: $mode"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "$version" >"$version_file"
|
||||
msg_ok "Deployed: $app ($version)"
|
||||
rm -rf "$tmpdir"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Downloads and deploys latest GitHub release (source, binary, tarball, asset).
|
||||
#
|
||||
@@ -2470,8 +1845,9 @@ function fetch_and_deploy_gh_release() {
|
||||
}
|
||||
|
||||
chmod 644 "$tmpdir/$filename"
|
||||
$STD apt install -y "$tmpdir/$filename" || {
|
||||
$STD dpkg -i "$tmpdir/$filename" || {
|
||||
# SYSTEMD_OFFLINE=1 prevents systemd-tmpfiles failures in unprivileged LXC (Debian 13+/systemd 257+)
|
||||
SYSTEMD_OFFLINE=1 $STD apt install -y "$tmpdir/$filename" || {
|
||||
SYSTEMD_OFFLINE=1 $STD dpkg -i "$tmpdir/$filename" || {
|
||||
msg_error "Both apt and dpkg installation failed"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
@@ -3257,6 +2633,7 @@ function setup_hwaccel() {
|
||||
# GPU Selection - Let user choose which GPU(s) to configure
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
local -a SELECTED_INDICES=()
|
||||
local install_nvidia_drivers="yes"
|
||||
|
||||
if [[ $gpu_count -eq 1 ]]; then
|
||||
# Single GPU - auto-select
|
||||
@@ -3318,6 +2695,30 @@ function setup_hwaccel() {
|
||||
fi
|
||||
fi
|
||||
|
||||
# Ask whether to install NVIDIA drivers in the container
|
||||
local nvidia_selected="no"
|
||||
for idx in "${SELECTED_INDICES[@]}"; do
|
||||
if [[ "${GPU_TYPES[$idx]}" == "NVIDIA" ]]; then
|
||||
nvidia_selected="yes"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$nvidia_selected" == "yes" ]]; then
|
||||
if [[ -n "${INSTALL_NVIDIA_DRIVERS:-}" ]]; then
|
||||
install_nvidia_drivers="${INSTALL_NVIDIA_DRIVERS}"
|
||||
else
|
||||
echo ""
|
||||
msg_custom "🎮" "${GN}" "NVIDIA GPU passthrough detected"
|
||||
local nvidia_reply=""
|
||||
read -r -t 60 -p "${TAB3}⚙️ Install NVIDIA driver libraries in the container? [Y/n] (auto-yes in 60s): " nvidia_reply || nvidia_reply=""
|
||||
case "${nvidia_reply,,}" in
|
||||
n | no) install_nvidia_drivers="no" ;;
|
||||
*) install_nvidia_drivers="yes" ;;
|
||||
esac
|
||||
fi
|
||||
fi
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
# OS Detection
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
@@ -3378,7 +2779,11 @@ function setup_hwaccel() {
|
||||
# NVIDIA GPUs
|
||||
# ─────────────────────────────────────────────────────────────────────────
|
||||
NVIDIA)
|
||||
_setup_nvidia_gpu "$os_id" "$os_codename" "$os_version"
|
||||
if [[ "$install_nvidia_drivers" == "yes" ]]; then
|
||||
_setup_nvidia_gpu "$os_id" "$os_codename" "$os_version"
|
||||
else
|
||||
msg_warn "Skipping NVIDIA driver installation (user opted to install manually)"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
@@ -5125,7 +4530,8 @@ EOF
|
||||
# Ubuntu: Use ondrej/php PPA
|
||||
msg_info "Adding ondrej/php PPA for Ubuntu"
|
||||
$STD apt install -y software-properties-common
|
||||
$STD add-apt-repository -y ppa:ondrej/php
|
||||
# Don't use $STD for add-apt-repository as it uses background processes
|
||||
add-apt-repository -y ppa:ondrej/php >>"$(get_active_logfile)" 2>&1
|
||||
else
|
||||
# Debian: Use Sury repository
|
||||
manage_tool_repository "php" "$PHP_VERSION" "" "https://packages.sury.org/debsuryorg-archive-keyring.deb" || {
|
||||
@@ -5155,6 +4561,14 @@ EOF
|
||||
|
||||
if [[ "$PHP_FPM" == "YES" ]]; then
|
||||
MODULE_LIST+=" php${PHP_VERSION}-fpm"
|
||||
# Create systemd override for PHP-FPM to fix runtime directory issues in LXC containers
|
||||
mkdir -p /etc/systemd/system/php${PHP_VERSION}-fpm.service.d/
|
||||
cat <<EOF >/etc/systemd/system/php${PHP_VERSION}-fpm.service.d/override.conf
|
||||
[Service]
|
||||
RuntimeDirectory=php
|
||||
RuntimeDirectoryMode=0755
|
||||
EOF
|
||||
$STD systemctl daemon-reload
|
||||
fi
|
||||
|
||||
# install apache2 with PHP support if requested
|
||||
@@ -5722,6 +5136,146 @@ function setup_ruby() {
|
||||
msg_ok "Setup Ruby $RUBY_VERSION"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Installs or updates MeiliSearch search engine.
|
||||
#
|
||||
# Description:
|
||||
# - Fresh install: Downloads binary, creates config/service, starts
|
||||
# - Update: Checks for new release, updates binary if available
|
||||
# - Waits for service to be ready before returning
|
||||
# - Exports API keys for use by caller
|
||||
#
|
||||
# Variables:
|
||||
# MEILISEARCH_BIND - Bind address (default: 127.0.0.1:7700)
|
||||
# MEILISEARCH_ENV - Environment: production/development (default: production)
|
||||
# MEILISEARCH_DB_PATH - Database path (default: /var/lib/meilisearch/data)
|
||||
#
|
||||
# Exports:
|
||||
# MEILISEARCH_MASTER_KEY - The master key for admin access
|
||||
# MEILISEARCH_API_KEY - The default search API key
|
||||
# MEILISEARCH_API_KEY_UID - The UID of the default API key
|
||||
#
|
||||
# Example (install script):
|
||||
# setup_meilisearch
|
||||
#
|
||||
# Example (CT update_script):
|
||||
# setup_meilisearch
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
function setup_meilisearch() {
|
||||
local MEILISEARCH_BIND="${MEILISEARCH_BIND:-127.0.0.1:7700}"
|
||||
local MEILISEARCH_ENV="${MEILISEARCH_ENV:-production}"
|
||||
local MEILISEARCH_DB_PATH="${MEILISEARCH_DB_PATH:-/var/lib/meilisearch/data}"
|
||||
local MEILISEARCH_DUMP_DIR="${MEILISEARCH_DUMP_DIR:-/var/lib/meilisearch/dumps}"
|
||||
local MEILISEARCH_SNAPSHOT_DIR="${MEILISEARCH_SNAPSHOT_DIR:-/var/lib/meilisearch/snapshots}"
|
||||
|
||||
# Get bind address for health checks
|
||||
local MEILISEARCH_HOST="${MEILISEARCH_BIND%%:*}"
|
||||
local MEILISEARCH_PORT="${MEILISEARCH_BIND##*:}"
|
||||
[[ "$MEILISEARCH_HOST" == "0.0.0.0" ]] && MEILISEARCH_HOST="127.0.0.1"
|
||||
|
||||
# Update mode: MeiliSearch already installed
|
||||
if [[ -f /usr/bin/meilisearch ]]; then
|
||||
if check_for_gh_release "meilisearch" "meilisearch/meilisearch"; then
|
||||
msg_info "Updating MeiliSearch"
|
||||
systemctl stop meilisearch
|
||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
||||
systemctl start meilisearch
|
||||
msg_ok "Updated MeiliSearch"
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Fresh install
|
||||
msg_info "Setup MeiliSearch"
|
||||
|
||||
# Install binary
|
||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary" || {
|
||||
msg_error "Failed to install MeiliSearch binary"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Download default config
|
||||
curl -fsSL https://raw.githubusercontent.com/meilisearch/meilisearch/latest/config.toml -o /etc/meilisearch.toml || {
|
||||
msg_error "Failed to download MeiliSearch config"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Generate master key
|
||||
MEILISEARCH_MASTER_KEY=$(openssl rand -base64 12)
|
||||
export MEILISEARCH_MASTER_KEY
|
||||
|
||||
# Configure
|
||||
sed -i \
|
||||
-e "s|^env =.*|env = \"${MEILISEARCH_ENV}\"|" \
|
||||
-e "s|^# master_key =.*|master_key = \"${MEILISEARCH_MASTER_KEY}\"|" \
|
||||
-e "s|^db_path =.*|db_path = \"${MEILISEARCH_DB_PATH}\"|" \
|
||||
-e "s|^dump_dir =.*|dump_dir = \"${MEILISEARCH_DUMP_DIR}\"|" \
|
||||
-e "s|^snapshot_dir =.*|snapshot_dir = \"${MEILISEARCH_SNAPSHOT_DIR}\"|" \
|
||||
-e 's|^# no_analytics = true|no_analytics = true|' \
|
||||
-e "s|^http_addr =.*|http_addr = \"${MEILISEARCH_BIND}\"|" \
|
||||
/etc/meilisearch.toml
|
||||
|
||||
# Create data directories
|
||||
mkdir -p "${MEILISEARCH_DB_PATH}" "${MEILISEARCH_DUMP_DIR}" "${MEILISEARCH_SNAPSHOT_DIR}"
|
||||
|
||||
# Create systemd service
|
||||
cat <<EOF >/etc/systemd/system/meilisearch.service
|
||||
[Unit]
|
||||
Description=Meilisearch
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/meilisearch --config-file-path /etc/meilisearch.toml
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
# Enable and start service
|
||||
systemctl daemon-reload
|
||||
systemctl enable -q --now meilisearch
|
||||
|
||||
# Wait for MeiliSearch to be ready (up to 30 seconds)
|
||||
for i in {1..30}; do
|
||||
if curl -s -o /dev/null -w "%{http_code}" "http://${MEILISEARCH_HOST}:${MEILISEARCH_PORT}/health" 2>/dev/null | grep -q "200"; then
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
# Verify service is running
|
||||
if ! systemctl is-active --quiet meilisearch; then
|
||||
msg_error "MeiliSearch service failed to start"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Get API keys with retry logic
|
||||
MEILISEARCH_API_KEY=""
|
||||
for i in {1..10}; do
|
||||
MEILISEARCH_API_KEY=$(curl -s -X GET "http://${MEILISEARCH_HOST}:${MEILISEARCH_PORT}/keys" \
|
||||
-H "Authorization: Bearer ${MEILISEARCH_MASTER_KEY}" 2>/dev/null | \
|
||||
grep -o '"key":"[^"]*"' | head -n 1 | sed 's/"key":"//;s/"//') || true
|
||||
[[ -n "$MEILISEARCH_API_KEY" ]] && break
|
||||
sleep 2
|
||||
done
|
||||
|
||||
MEILISEARCH_API_KEY_UID=$(curl -s -X GET "http://${MEILISEARCH_HOST}:${MEILISEARCH_PORT}/keys" \
|
||||
-H "Authorization: Bearer ${MEILISEARCH_MASTER_KEY}" 2>/dev/null | \
|
||||
grep -o '"uid":"[^"]*"' | head -n 1 | sed 's/"uid":"//;s/"//') || true
|
||||
|
||||
export MEILISEARCH_API_KEY
|
||||
export MEILISEARCH_API_KEY_UID
|
||||
|
||||
# Cache version
|
||||
local MEILISEARCH_VERSION
|
||||
MEILISEARCH_VERSION=$(/usr/bin/meilisearch --version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -1) || true
|
||||
cache_installed_version "meilisearch" "${MEILISEARCH_VERSION:-unknown}"
|
||||
|
||||
msg_ok "Setup MeiliSearch ${MEILISEARCH_VERSION:-}"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Installs or upgrades ClickHouse database server.
|
||||
#
|
||||
|
||||
Reference in New Issue
Block a user