mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2026-02-04 12:23:26 +01:00
Compare commits
68 Commits
fix/2fauth
...
pr-update-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ef72f2a7de | ||
|
|
e99702977c | ||
|
|
757a54e23a | ||
|
|
0029ad0dee | ||
|
|
65e50542b0 | ||
|
|
ce22e8ae8b | ||
|
|
56e626c897 | ||
|
|
75e79b2100 | ||
|
|
057523aabb | ||
|
|
0b48fdf7fd | ||
|
|
f9c5c1d0b4 | ||
|
|
fb368bc2d8 | ||
|
|
18f6df752f | ||
|
|
45aa75afc0 | ||
|
|
baabbc4d53 | ||
|
|
89e53f9245 | ||
|
|
7c0a812b3d | ||
|
|
6411ae1d37 | ||
|
|
6967029ae3 | ||
|
|
3621a4ef35 | ||
|
|
586a436f3d | ||
|
|
97e37cfb1f | ||
|
|
24b2a945d5 | ||
|
|
40780edbd2 | ||
|
|
3b1b703561 | ||
|
|
991b56d412 | ||
|
|
4b9adbb249 | ||
|
|
13b4094ff8 | ||
|
|
3d0243adb0 | ||
|
|
c612bfefcd | ||
|
|
d7872a8240 | ||
|
|
abfd57f486 | ||
|
|
7c8abb5c68 | ||
|
|
474f1e8886 | ||
|
|
1b941f36f1 | ||
|
|
4e27213df1 | ||
|
|
b0d9864ebd | ||
|
|
8fd1826e87 | ||
|
|
ba2d3e5030 | ||
|
|
dd240c4b3c | ||
|
|
a9121c9572 | ||
|
|
b86fabf8ab | ||
|
|
2be8b94176 | ||
|
|
f3dad5163f | ||
|
|
be42ee40fb | ||
|
|
632aa1991f | ||
|
|
7fc77fe5be | ||
|
|
ac74b760f0 | ||
|
|
f84c9960bb | ||
|
|
29e90da2bc | ||
|
|
0773114aeb | ||
|
|
ea264f673b | ||
|
|
3f769c6492 | ||
|
|
742248dabd | ||
|
|
8226360700 | ||
|
|
b706775a4b | ||
|
|
2f907cc4e0 | ||
|
|
b1927d2678 | ||
|
|
a35b0fbd79 | ||
|
|
4418ed4615 | ||
|
|
19d8592104 | ||
|
|
627587c54b | ||
|
|
b36609dfd5 | ||
|
|
98dc00a1a0 | ||
|
|
ff9f0ed21d | ||
|
|
bc83a37124 | ||
|
|
e0593caa02 | ||
|
|
99e19d4422 |
79
.github/workflows/lock-issue.yaml
generated
vendored
79
.github/workflows/lock-issue.yaml
generated
vendored
@@ -18,7 +18,6 @@ jobs:
|
||||
with:
|
||||
script: |
|
||||
const daysBeforeLock = 3;
|
||||
const cutoffDate = new Date('2026-01-27T00:00:00Z');
|
||||
const lockDate = new Date();
|
||||
lockDate.setDate(lockDate.getDate() - daysBeforeLock);
|
||||
|
||||
@@ -29,48 +28,50 @@ jobs:
|
||||
/dependabot/i
|
||||
];
|
||||
|
||||
// Search for closed, unlocked issues older than 3 days
|
||||
const issues = await github.rest.search.issuesAndPullRequests({
|
||||
q: `repo:${context.repo.owner}/${context.repo.repo} is:closed is:unlocked updated:<${lockDate.toISOString().split('T')[0]}`,
|
||||
per_page: 50
|
||||
});
|
||||
// Search for closed, unlocked issues older than 3 days (paginated, oldest first)
|
||||
let page = 1;
|
||||
let totalLocked = 0;
|
||||
|
||||
console.log(`Found ${issues.data.items.length} issues/PRs to process`);
|
||||
|
||||
for (const item of issues.data.items) {
|
||||
// Skip excluded items
|
||||
const shouldExclude = excludePatterns.some(pattern => pattern.test(item.title));
|
||||
if (shouldExclude) {
|
||||
console.log(`Skipped #${item.number}: "${item.title}" (matches exclude pattern)`);
|
||||
continue;
|
||||
}
|
||||
while (true) {
|
||||
const issues = await github.rest.search.issuesAndPullRequests({
|
||||
q: `repo:${context.repo.owner}/${context.repo.repo} is:closed is:unlocked updated:<${lockDate.toISOString().split('T')[0]}`,
|
||||
sort: 'updated',
|
||||
order: 'asc',
|
||||
per_page: 100,
|
||||
page: page
|
||||
});
|
||||
|
||||
const createdAt = new Date(item.created_at);
|
||||
const isNew = createdAt >= cutoffDate;
|
||||
if (issues.data.items.length === 0) break;
|
||||
|
||||
try {
|
||||
// Add comment only for new issues (created after 2026-01-27)
|
||||
if (isNew) {
|
||||
const comment = item.pull_request
|
||||
? 'This pull request has been automatically locked. Please open a new issue for related bugs.'
|
||||
: 'This issue has been automatically locked. Please open a new issue for related bugs and reference this issue if needed.';
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number: item.number,
|
||||
body: comment
|
||||
});
|
||||
console.log(`Page ${page}: ${issues.data.items.length} items (total available: ${issues.data.total_count})`);
|
||||
|
||||
for (const item of issues.data.items) {
|
||||
// Skip excluded items
|
||||
const shouldExclude = excludePatterns.some(pattern => pattern.test(item.title));
|
||||
if (shouldExclude) {
|
||||
console.log(`Skipped #${item.number}: "${item.title}" (matches exclude pattern)`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Lock the issue/PR
|
||||
await github.rest.issues.lock({
|
||||
...context.repo,
|
||||
issue_number: item.number,
|
||||
lock_reason: 'resolved'
|
||||
});
|
||||
|
||||
console.log(`Locked #${item.number} (${item.pull_request ? 'PR' : 'Issue'})`);
|
||||
} catch (error) {
|
||||
console.log(`Failed to lock #${item.number}: ${error.message}`);
|
||||
try {
|
||||
// Lock the issue/PR silently
|
||||
await github.rest.issues.lock({
|
||||
...context.repo,
|
||||
issue_number: item.number,
|
||||
lock_reason: 'resolved'
|
||||
});
|
||||
|
||||
totalLocked++;
|
||||
console.log(`Locked #${item.number} (${item.pull_request ? 'PR' : 'Issue'})`);
|
||||
} catch (error) {
|
||||
console.log(`Failed to lock #${item.number}: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
page++;
|
||||
|
||||
// GitHub search API limit: max 10000 results (100 pages * 100) - temporarily increased
|
||||
if (page > 100) break;
|
||||
}
|
||||
|
||||
console.log(`Total locked: ${totalLocked} issues/PRs`);
|
||||
|
||||
74
CHANGELOG.md
74
CHANGELOG.md
@@ -772,6 +772,76 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
||||
|
||||
</details>
|
||||
|
||||
## 2026-02-04
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
- writefreely ([#11524](https://github.com/community-scripts/ProxmoxVE/pull/11524))
|
||||
|
||||
## 2026-02-03
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
- Wealthfolio ([#11511](https://github.com/community-scripts/ProxmoxVE/pull/11511))
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- [FIX] Shelfmark: unpin Chromium version [@vhsdream](https://github.com/vhsdream) ([#11505](https://github.com/community-scripts/ProxmoxVE/pull/11505))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- [FEAT] Scanopy: automatically update integrated daemon [@vhsdream](https://github.com/vhsdream) ([#11506](https://github.com/community-scripts/ProxmoxVE/pull/11506))
|
||||
|
||||
### 💾 Core
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- [FIX] tools.func: trim spaces in app_lc when checking for gh release [@vhsdream](https://github.com/vhsdream) ([#11512](https://github.com/community-scripts/ProxmoxVE/pull/11512))
|
||||
|
||||
### 🌐 Website
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- fix(frontend): decouple table pagination from summary fetching [@ls-root](https://github.com/ls-root) ([#11495](https://github.com/community-scripts/ProxmoxVE/pull/11495))
|
||||
|
||||
## 2026-02-02
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
- rustypaste | Alpine-rustypaste ([#11457](https://github.com/community-scripts/ProxmoxVE/pull/11457))
|
||||
- KitchenOwl ([#11453](https://github.com/community-scripts/ProxmoxVE/pull/11453))
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Grist: Update dependencies [@tremor021](https://github.com/tremor021) ([#11489](https://github.com/community-scripts/ProxmoxVE/pull/11489))
|
||||
- Allow "downgrade" of libigdgmm12 [@vhsdream](https://github.com/vhsdream) ([#11478](https://github.com/community-scripts/ProxmoxVE/pull/11478))
|
||||
- Disable NPM install and update due to OpenResty SHA-1 signature issues [@MickLesk](https://github.com/MickLesk) ([#11471](https://github.com/community-scripts/ProxmoxVE/pull/11471))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- Refactor: Forgejo & readeck - migrate to codeberg functions [@MickLesk](https://github.com/MickLesk) ([#11460](https://github.com/community-scripts/ProxmoxVE/pull/11460))
|
||||
|
||||
- #### 💥 Breaking Changes
|
||||
|
||||
- [FIX] Scanopy: remove daemon build [@vhsdream](https://github.com/vhsdream) ([#11444](https://github.com/community-scripts/ProxmoxVE/pull/11444))
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- Refactor: Vaultwarden [@MickLesk](https://github.com/MickLesk) ([#11445](https://github.com/community-scripts/ProxmoxVE/pull/11445))
|
||||
- various scripts: use ensure_dependencies instead of apt [@MickLesk](https://github.com/MickLesk) ([#11463](https://github.com/community-scripts/ProxmoxVE/pull/11463))
|
||||
|
||||
### 🌐 Website
|
||||
|
||||
- cleanup(frontend): remove unused /category-view route [@ls-root](https://github.com/ls-root) ([#11461](https://github.com/community-scripts/ProxmoxVE/pull/11461))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- feat(frontend): preview tab [@ls-root](https://github.com/ls-root) ([#11475](https://github.com/community-scripts/ProxmoxVE/pull/11475))
|
||||
|
||||
## 2026-02-01
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
@@ -780,12 +850,16 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- 2fauth: export PHP_VERSION for nginx config [@MickLesk](https://github.com/MickLesk) ([#11441](https://github.com/community-scripts/ProxmoxVE/pull/11441))
|
||||
- Prometheus Paperless NGX Exporter: Set correct binary path in systemd unit file [@andygrunwald](https://github.com/andygrunwald) ([#11438](https://github.com/community-scripts/ProxmoxVE/pull/11438))
|
||||
- tracearr: install/update new prestart script from upstream [@durzo](https://github.com/durzo) ([#11433](https://github.com/community-scripts/ProxmoxVE/pull/11433))
|
||||
- n8n: Fix dependencies [@tremor021](https://github.com/tremor021) ([#11429](https://github.com/community-scripts/ProxmoxVE/pull/11429))
|
||||
- [Hotfix] Bunkerweb update [@vhsdream](https://github.com/vhsdream) ([#11402](https://github.com/community-scripts/ProxmoxVE/pull/11402))
|
||||
- [Hotfix] Immich: revert healthcheck feature [@vhsdream](https://github.com/vhsdream) ([#11427](https://github.com/community-scripts/ProxmoxVE/pull/11427))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- tools.func: add codeberg functions & autocaliweb: migrate from GitHub to Codeberg [@MickLesk](https://github.com/MickLesk) ([#11440](https://github.com/community-scripts/ProxmoxVE/pull/11440))
|
||||
- Immich Refactor #2 [@vhsdream](https://github.com/vhsdream) ([#11375](https://github.com/community-scripts/ProxmoxVE/pull/11375))
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
@@ -27,10 +27,7 @@ function update_script() {
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
if ! command -v memcached >/dev/null 2>&1; then
|
||||
$STD apt update
|
||||
$STD apt install -y memcached libmemcached-tools
|
||||
fi
|
||||
ensure_dependencies memcached libmemcached-tools
|
||||
if check_for_gh_release "adventurelog" "seanmorley15/adventurelog"; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop adventurelog-backend
|
||||
|
||||
51
ct/alpine-rustypaste.sh
Normal file
51
ct/alpine-rustypaste.sh
Normal file
@@ -0,0 +1,51 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/orhun/rustypaste
|
||||
|
||||
APP="Alpine-RustyPaste"
|
||||
var_tags="${var_tags:-alpine;pastebin;storage}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-256}"
|
||||
var_disk="${var_disk:-4}"
|
||||
var_os="${var_os:-alpine}"
|
||||
var_version="${var_version:-3.23}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if ! apk info -e rustypaste >/dev/null 2>&1; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
msg_info "Updating RustyPaste"
|
||||
$STD apk update
|
||||
$STD apk upgrade rustypaste --repository=https://dl-cdn.alpinelinux.org/alpine/edge/community
|
||||
msg_ok "Updated RustyPaste"
|
||||
|
||||
msg_info "Restarting Services"
|
||||
$STD rc-service rustypaste restart
|
||||
msg_ok "Restarted Services"
|
||||
msg_ok "Updated successfully!"
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8000${CL}"
|
||||
@@ -31,11 +31,7 @@ function update_script() {
|
||||
NODE_VERSION="22" NODE_MODULE="@postlight/parser@latest,single-file-cli@latest" setup_nodejs
|
||||
PYTHON_VERSION="3.13" setup_uv
|
||||
|
||||
if ! dpkg -l | grep -q "^ii chromium "; then
|
||||
msg_info "Installing System Dependencies"
|
||||
$STD apt-get install -y chromium
|
||||
msg_ok "Installed System Dependencies"
|
||||
fi
|
||||
ensure_dependencies chromium
|
||||
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop archivebox
|
||||
|
||||
@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: vhsdream
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/gelbphoenix/autocaliweb
|
||||
# Source: https://codeberg.org/gelbphoenix/autocaliweb
|
||||
|
||||
APP="Autocaliweb"
|
||||
var_tags="${var_tags:-ebooks}"
|
||||
@@ -30,8 +30,8 @@ function update_script() {
|
||||
|
||||
setup_uv
|
||||
|
||||
RELEASE=$(get_latest_github_release "gelbphoenix/autocaliweb")
|
||||
if check_for_gh_release "autocaliweb" "gelbphoenix/autocaliweb"; then
|
||||
RELEASE=$(get_latest_codeberg_release "gelbphoenix/autocaliweb")
|
||||
if check_for_codeberg_release "autocaliweb" "gelbphoenix/autocaliweb"; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop autocaliweb metadata-change-detector acw-ingest-service acw-auto-zipper
|
||||
msg_ok "Stopped Services"
|
||||
@@ -39,7 +39,7 @@ function update_script() {
|
||||
INSTALL_DIR="/opt/autocaliweb"
|
||||
export VIRTUAL_ENV="${INSTALL_DIR}/venv"
|
||||
$STD tar -cf ~/autocaliweb_bkp.tar "$INSTALL_DIR"/{metadata_change_logs,dirs.json,.env,scripts/ingest_watcher.sh,scripts/auto_zipper_wrapper.sh,scripts/metadata_change_detector_wrapper.sh}
|
||||
fetch_and_deploy_gh_release "autocaliweb" "gelbphoenix/autocaliweb" "tarball" "latest" "/opt/autocaliweb"
|
||||
fetch_and_deploy_codeberg_release "autocaliweb" "gelbphoenix/autocaliweb" "tarball" "latest" "/opt/autocaliweb"
|
||||
|
||||
msg_info "Updating Autocaliweb"
|
||||
cd "$INSTALL_DIR"
|
||||
|
||||
@@ -29,12 +29,7 @@ function update_script() {
|
||||
exit
|
||||
fi
|
||||
|
||||
if ! dpkg -s libjpeg-dev >/dev/null 2>&1; then
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt-get update
|
||||
$STD apt-get install -y libjpeg-dev
|
||||
msg_ok "Updated Dependencies"
|
||||
fi
|
||||
ensure_dependencies libjpeg-dev
|
||||
|
||||
NODE_VERSION="24" setup_nodejs
|
||||
|
||||
|
||||
@@ -34,12 +34,7 @@ function update_script() {
|
||||
systemctl stop commafeed
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
if ! [[ $(dpkg -s rsync 2>/dev/null) ]]; then
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt update
|
||||
$STD apt install -y rsync
|
||||
msg_ok "Installed Dependencies"
|
||||
fi
|
||||
ensure_dependencies rsync
|
||||
|
||||
if [ -d /opt/commafeed/data ] && [ "$(ls -A /opt/commafeed/data)" ]; then
|
||||
msg_info "Backing up existing data"
|
||||
|
||||
@@ -44,10 +44,7 @@ function update_script() {
|
||||
NODE_VERSION="22" setup_nodejs
|
||||
if check_for_gh_release "cronicle" "jhuckaby/Cronicle"; then
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y \
|
||||
git \
|
||||
build-essential \
|
||||
ca-certificates
|
||||
ensure_dependencies git build-essential ca-certificates
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
NODE_VERSION="22" setup_nodejs
|
||||
|
||||
@@ -38,9 +38,7 @@ function update_script() {
|
||||
systemctl reload nginx
|
||||
fi
|
||||
|
||||
if ! dpkg -s vlc-bin vlc-plugin-base &>/dev/null; then
|
||||
$STD apt update && $STD apt install -y vlc-bin vlc-plugin-base
|
||||
fi
|
||||
ensure_dependencies vlc-bin vlc-plugin-base
|
||||
|
||||
if check_for_gh_release "Dispatcharr" "Dispatcharr/Dispatcharr"; then
|
||||
msg_info "Stopping Services"
|
||||
|
||||
@@ -27,35 +27,28 @@ function update_script() {
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop forgejo
|
||||
msg_ok "Stopped Service"
|
||||
if check_for_codeberg_release "forgejo" "forgejo/forgejo"; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop forgejo
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
msg_info "Updating ${APP}"
|
||||
RELEASE=$(curl -fsSL https://codeberg.org/api/v1/repos/forgejo/forgejo/releases/latest | grep -oP '"tag_name":\s*"\K[^"]+' | sed 's/^v//')
|
||||
curl -fsSL "https://codeberg.org/forgejo/forgejo/releases/download/v${RELEASE}/forgejo-${RELEASE}-linux-amd64" -o "forgejo-$RELEASE-linux-amd64"
|
||||
rm -rf /opt/forgejo/*
|
||||
cp -r forgejo-$RELEASE-linux-amd64 /opt/forgejo/forgejo-$RELEASE-linux-amd64
|
||||
chmod +x /opt/forgejo/forgejo-$RELEASE-linux-amd64
|
||||
ln -sf /opt/forgejo/forgejo-$RELEASE-linux-amd64 /usr/local/bin/forgejo
|
||||
msg_ok "Updated ${APP}"
|
||||
fetch_and_deploy_codeberg_release "forgejo" "forgejo/forgejo" "singlefile" "latest" "/opt/forgejo" "forgejo-*-linux-amd64"
|
||||
ln -sf /opt/forgejo/forgejo /usr/local/bin/forgejo
|
||||
|
||||
msg_info "Cleaning"
|
||||
rm -rf forgejo-$RELEASE-linux-amd64
|
||||
msg_ok "Cleaned"
|
||||
if grep -q "GITEA_WORK_DIR" /etc/systemd/system/forgejo.service; then
|
||||
msg_info "Updating Service File"
|
||||
sed -i "s/GITEA_WORK_DIR/FORGEJO_WORK_DIR/g" /etc/systemd/system/forgejo.service
|
||||
systemctl daemon-reload
|
||||
msg_ok "Updated Service File"
|
||||
fi
|
||||
|
||||
# Fix env var from older version of community script
|
||||
if grep -q "GITEA_WORK_DIR" /etc/systemd/system/forgejo.service; then
|
||||
msg_info "Updating Service File"
|
||||
sed -i "s/GITEA_WORK_DIR/FORGEJO_WORK_DIR/g" /etc/systemd/system/forgejo.service
|
||||
systemctl daemon-reload
|
||||
msg_ok "Updated Service File"
|
||||
msg_info "Starting Service"
|
||||
systemctl start forgejo
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at the latest version."
|
||||
fi
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start forgejo
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
exit
|
||||
}
|
||||
|
||||
|
||||
@@ -43,9 +43,7 @@ function update_script() {
|
||||
msg_error "Project directory does not exist: $PROJECT_DIR"
|
||||
exit
|
||||
fi
|
||||
if ! command -v git &>/dev/null; then
|
||||
$STD apt install -y git
|
||||
fi
|
||||
ensure_dependencies git
|
||||
|
||||
msg_info "Stopping service $SERVICE_NAME"
|
||||
systemctl stop "$SERVICE_NAME"
|
||||
|
||||
@@ -45,7 +45,7 @@ function update_script() {
|
||||
curl -fsSL "https://packages.graylog2.org/repo/packages/graylog-7.0-repository_latest.deb" -o "graylog-7.0-repository_latest.deb"
|
||||
$STD dpkg -i graylog-7.0-repository_latest.deb
|
||||
$STD apt update
|
||||
$STD apt install -y graylog-server graylog-datanode
|
||||
ensure_dependencies graylog-server graylog-datanode
|
||||
rm -f graylog-7.0-repository_latest.deb
|
||||
msg_ok "Updated Graylog"
|
||||
elif dpkg --compare-versions "$CURRENT_VERSION" ge "7.0"; then
|
||||
|
||||
@@ -29,6 +29,8 @@ function update_script() {
|
||||
exit
|
||||
fi
|
||||
|
||||
ensure_dependencies git
|
||||
|
||||
if check_for_gh_release "grist" "gristlabs/grist-core"; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop grist
|
||||
|
||||
6
ct/headers/alpine-rustypaste
Normal file
6
ct/headers/alpine-rustypaste
Normal file
@@ -0,0 +1,6 @@
|
||||
___ __ _ ____ __ ____ __
|
||||
/ | / /___ (_)___ ___ / __ \__ _______/ /___ __/ __ \____ ______/ /____
|
||||
/ /| | / / __ \/ / __ \/ _ \______/ /_/ / / / / ___/ __/ / / / /_/ / __ `/ ___/ __/ _ \
|
||||
/ ___ |/ / /_/ / / / / / __/_____/ _, _/ /_/ (__ ) /_/ /_/ / ____/ /_/ (__ ) /_/ __/
|
||||
/_/ |_/_/ .___/_/_/ /_/\___/ /_/ |_|\__,_/____/\__/\__, /_/ \__,_/____/\__/\___/
|
||||
/_/ /____/
|
||||
6
ct/headers/kitchenowl
Normal file
6
ct/headers/kitchenowl
Normal file
@@ -0,0 +1,6 @@
|
||||
__ __ _ __ __ ____ __
|
||||
/ //_/(_) /______/ /_ ___ ____ / __ \_ __/ /
|
||||
/ ,< / / __/ ___/ __ \/ _ \/ __ \/ / / / | /| / / /
|
||||
/ /| |/ / /_/ /__/ / / / __/ / / / /_/ /| |/ |/ / /
|
||||
/_/ |_/_/\__/\___/_/ /_/\___/_/ /_/\____/ |__/|__/_/
|
||||
|
||||
6
ct/headers/rustypaste
Normal file
6
ct/headers/rustypaste
Normal file
@@ -0,0 +1,6 @@
|
||||
__ __
|
||||
_______ _______/ /___ ______ ____ ______/ /____
|
||||
/ ___/ / / / ___/ __/ / / / __ \/ __ `/ ___/ __/ _ \
|
||||
/ / / /_/ (__ ) /_/ /_/ / /_/ / /_/ (__ ) /_/ __/
|
||||
/_/ \__,_/____/\__/\__, / .___/\__,_/____/\__/\___/
|
||||
/____/_/
|
||||
6
ct/headers/wealthfolio
Normal file
6
ct/headers/wealthfolio
Normal file
@@ -0,0 +1,6 @@
|
||||
_ __ ____ __ ____ ___
|
||||
| | / /__ ____ _/ / /_/ /_ / __/___ / (_)___
|
||||
| | /| / / _ \/ __ `/ / __/ __ \/ /_/ __ \/ / / __ \
|
||||
| |/ |/ / __/ /_/ / / /_/ / / / __/ /_/ / / / /_/ /
|
||||
|__/|__/\___/\__,_/_/\__/_/ /_/_/ \____/_/_/\____/
|
||||
|
||||
6
ct/headers/wishlist
Normal file
6
ct/headers/wishlist
Normal file
@@ -0,0 +1,6 @@
|
||||
_ ___ __ ___ __
|
||||
| | / (_)____/ /_ / (_)____/ /_
|
||||
| | /| / / / ___/ __ \/ / / ___/ __/
|
||||
| |/ |/ / (__ ) / / / / (__ ) /_
|
||||
|__/|__/_/____/_/ /_/_/_/____/\__/
|
||||
|
||||
6
ct/headers/writefreely
Normal file
6
ct/headers/writefreely
Normal file
@@ -0,0 +1,6 @@
|
||||
_ __ _ __ ______ __
|
||||
| | / /____(_) /____ / ____/_______ ___ / /_ __
|
||||
| | /| / / ___/ / __/ _ \/ /_ / ___/ _ \/ _ \/ / / / /
|
||||
| |/ |/ / / / / /_/ __/ __/ / / / __/ __/ / /_/ /
|
||||
|__/|__/_/ /_/\__/\___/_/ /_/ \___/\___/_/\__, /
|
||||
/____/
|
||||
@@ -30,14 +30,7 @@ function update_script() {
|
||||
|
||||
get_lxc_ip
|
||||
NODE_VERSION="22" NODE_MODULE="pnpm@latest" setup_nodejs
|
||||
if ! command -v jq &>/dev/null; then
|
||||
$STD msg_info "Installing jq..."
|
||||
$STD apt-get update -qq &>/dev/null
|
||||
$STD apt-get install -y jq &>/dev/null || {
|
||||
msg_error "Failed to install jq"
|
||||
exit
|
||||
}
|
||||
fi
|
||||
ensure_dependencies jq
|
||||
|
||||
if check_for_gh_release "homepage" "gethomepage/homepage"; then
|
||||
msg_info "Stopping service"
|
||||
|
||||
14
ct/immich.sh
14
ct/immich.sh
@@ -67,8 +67,7 @@ EOF
|
||||
msg_info "Installing Mise"
|
||||
curl -fSs https://mise.jdx.dev/gpg-key.pub | tee /etc/apt/keyrings/mise-archive-keyring.pub 1>/dev/null
|
||||
echo "deb [signed-by=/etc/apt/keyrings/mise-archive-keyring.pub arch=amd64] https://mise.jdx.dev/deb stable main" >/etc/apt/sources.list.d/mise.list
|
||||
$STD apt update
|
||||
$STD apt install -y mise
|
||||
ensure_dependencies mise
|
||||
msg_ok "Installed Mise"
|
||||
fi
|
||||
|
||||
@@ -89,7 +88,7 @@ EOF
|
||||
curl -fsSLO "$url"
|
||||
done
|
||||
$STD apt-mark unhold libigdgmm12
|
||||
$STD apt install -y ./libigdgmm12*.deb
|
||||
$STD apt install -y --allow-downgrades ./libigdgmm12*.deb
|
||||
rm ./libigdgmm12*.deb
|
||||
$STD apt install -y ./*.deb
|
||||
rm ./*.deb
|
||||
@@ -134,9 +133,7 @@ EOF
|
||||
$STD sudo -u postgres psql -d immich -c "REINDEX INDEX face_index;"
|
||||
$STD sudo -u postgres psql -d immich -c "REINDEX INDEX clip_index;"
|
||||
fi
|
||||
if ! dpkg -l | grep -q ccache; then
|
||||
$STD apt install -yqq ccache
|
||||
fi
|
||||
ensure_dependencies ccache
|
||||
|
||||
INSTALL_DIR="/opt/${APP}"
|
||||
UPLOAD_DIR="$(sed -n '/^IMMICH_MEDIA_LOCATION/s/[^=]*=//p' /opt/immich/.env)"
|
||||
@@ -304,10 +301,7 @@ function compile_libjxl() {
|
||||
|
||||
function compile_libheif() {
|
||||
SOURCE=${SOURCE_DIR}/libheif
|
||||
if ! dpkg -l | grep -q libaom; then
|
||||
$STD apt install -y libaom-dev
|
||||
local update="required"
|
||||
fi
|
||||
ensure_dependencies libaom-dev
|
||||
: "${LIBHEIF_REVISION:=$(jq -cr '.revision' "$BASE_DIR"/server/sources/libheif.json)}"
|
||||
if [[ "${update:-}" ]] || [[ "$LIBHEIF_REVISION" != "$(grep 'libheif' ~/.immich_library_revisions | awk '{print $2}')" ]]; then
|
||||
msg_info "Recompiling libheif"
|
||||
|
||||
@@ -40,9 +40,7 @@ function update_script() {
|
||||
fi
|
||||
|
||||
msg_info "Updating Jellyfin"
|
||||
if ! dpkg -s libjemalloc2 >/dev/null 2>&1; then
|
||||
$STD apt install -y libjemalloc2
|
||||
fi
|
||||
ensure_dependencies libjemalloc2
|
||||
if [[ ! -f /usr/lib/libjemalloc.so ]]; then
|
||||
ln -sf /usr/lib/x86_64-linux-gnu/libjemalloc.so.2 /usr/lib/libjemalloc.so
|
||||
fi
|
||||
|
||||
@@ -38,7 +38,7 @@ function update_script() {
|
||||
msg_ok "Updated yt-dlp"
|
||||
|
||||
msg_info "Prepare update"
|
||||
$STD apt install -y graphicsmagick ghostscript
|
||||
ensure_dependencies graphicsmagick ghostscript
|
||||
if [[ -f /opt/karakeep/.env ]] && [[ ! -f /etc/karakeep/karakeep.env ]]; then
|
||||
mkdir -p /etc/karakeep
|
||||
mv /opt/karakeep/.env /etc/karakeep/karakeep.env
|
||||
|
||||
@@ -23,9 +23,7 @@ function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if ! command -v lsb_release; then
|
||||
apt install -y lsb-release
|
||||
fi
|
||||
ensure_dependencies lsb-release
|
||||
if [[ ! -d /opt/kimai ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
|
||||
79
ct/kitchenowl.sh
Normal file
79
ct/kitchenowl.sh
Normal file
@@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: snazzybean
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/TomBursch/kitchenowl
|
||||
|
||||
APP="KitchenOwl"
|
||||
var_tags="${var_tags:-food;recipes}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_disk="${var_disk:-6}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/kitchenowl ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_release "kitchenowl" "TomBursch/kitchenowl"; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop kitchenowl
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
msg_info "Creating Backup"
|
||||
mkdir -p /opt/kitchenowl_backup
|
||||
cp -r /opt/kitchenowl/data /opt/kitchenowl_backup/
|
||||
cp -f /opt/kitchenowl/kitchenowl.env /opt/kitchenowl_backup/
|
||||
msg_ok "Created Backup"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "kitchenowl" "TomBursch/kitchenowl" "tarball" "latest" "/opt/kitchenowl"
|
||||
rm -rf /opt/kitchenowl/web
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "kitchenowl-web" "TomBursch/kitchenowl" "prebuild" "latest" "/opt/kitchenowl/web" "kitchenowl_Web.tar.gz"
|
||||
|
||||
msg_info "Restoring data"
|
||||
sed -i 's/default=True/default=False/' /opt/kitchenowl/backend/wsgi.py
|
||||
cp -r /opt/kitchenowl_backup/data /opt/kitchenowl/
|
||||
cp -f /opt/kitchenowl_backup/kitchenowl.env /opt/kitchenowl/
|
||||
rm -rf /opt/kitchenowl_backup
|
||||
msg_ok "Restored data"
|
||||
|
||||
msg_info "Updating KitchenOwl"
|
||||
cd /opt/kitchenowl/backend
|
||||
$STD uv sync --frozen
|
||||
cd /opt/kitchenowl/backend
|
||||
set -a
|
||||
source /opt/kitchenowl/kitchenowl.env
|
||||
set +a
|
||||
$STD uv run flask db upgrade
|
||||
msg_ok "Updated KitchenOwl"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start kitchenowl
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:80${CL}"
|
||||
@@ -27,9 +27,8 @@ function update_script() {
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
if ! dpkg -l | grep -q temurin-21-jre; then
|
||||
JAVA_VERSION="21" setup_java
|
||||
fi
|
||||
JAVA_VERSION="21" setup_java
|
||||
|
||||
msg_info "Updating ${APP}"
|
||||
$STD apt update
|
||||
$STD apt -y upgrade
|
||||
|
||||
@@ -36,12 +36,7 @@ function update_script() {
|
||||
NODE_VERSION="24" setup_nodejs
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "scanopy" "scanopy/scanopy" "tarball" "latest" "/opt/scanopy"
|
||||
|
||||
if ! dpkg -l | grep -q "pkg-config"; then
|
||||
$STD apt install -y pkg-config
|
||||
fi
|
||||
if ! dpkg -l | grep -q "libssl-dev"; then
|
||||
$STD apt install -y libssl-dev
|
||||
fi
|
||||
ensure_dependencies pkg-config libssl-dev
|
||||
TOOLCHAIN="$(grep "channel" /opt/scanopy/backend/rust-toolchain.toml | awk -F\" '{print $2}')"
|
||||
RUST_TOOLCHAIN=$TOOLCHAIN setup_rust
|
||||
|
||||
|
||||
@@ -28,6 +28,12 @@ function update_script() {
|
||||
exit
|
||||
fi
|
||||
|
||||
msg_error "This script is currently disabled due to an external issue with the OpenResty APT repository."
|
||||
msg_error "The repository's GPG key uses SHA-1 signatures, which are no longer accepted by Debian as of February 1, 2026."
|
||||
msg_error "The issue is tracked in openresty/openresty#1097"
|
||||
msg_error "For more details, see: https://github.com/community-scripts/ProxmoxVE/issues/11406"
|
||||
exit 1
|
||||
|
||||
if [[ $(grep -E '^VERSION_ID=' /etc/os-release) == *"12"* ]]; then
|
||||
msg_error "Wrong Debian version detected!"
|
||||
msg_error "Please create a snapshot first. You must upgrade your LXC to Debian Trixie before updating. Visit: https://github.com/community-scripts/ProxmoxVE/discussions/7489"
|
||||
|
||||
@@ -28,8 +28,8 @@ function update_script() {
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
ensure_dependencies python3-lxml
|
||||
if ! [[ $(dpkg -s python3-lxml-html-clean 2>/dev/null) ]]; then
|
||||
$STD apt install python3-lxml
|
||||
curl -fsSL "http://archive.ubuntu.com/ubuntu/pool/universe/l/lxml-html-clean/python3-lxml-html-clean_0.1.1-1_all.deb" -o /opt/python3-lxml-html-clean.deb
|
||||
$STD dpkg -i /opt/python3-lxml-html-clean.deb
|
||||
rm -f /opt/python3-lxml-html-clean.deb
|
||||
|
||||
@@ -32,11 +32,7 @@ function update_script() {
|
||||
if [[ ! -f /opt/Ollama_version.txt ]]; then
|
||||
touch /opt/Ollama_version.txt
|
||||
fi
|
||||
if ! command -v zstd &>/dev/null; then
|
||||
msg_info "Installing zstd"
|
||||
$STD apt install -y zstd
|
||||
msg_ok "Installed zstd"
|
||||
fi
|
||||
ensure_dependencies zstd
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop ollama
|
||||
msg_ok "Services Stopped"
|
||||
|
||||
@@ -92,11 +92,7 @@ EOF
|
||||
OLLAMA_VERSION=$(ollama -v | awk '{print $NF}')
|
||||
RELEASE=$(curl -s https://api.github.com/repos/ollama/ollama/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4)}')
|
||||
if [ "$OLLAMA_VERSION" != "$RELEASE" ]; then
|
||||
if ! command -v zstd &>/dev/null; then
|
||||
msg_info "Installing zstd"
|
||||
$STD apt install -y zstd
|
||||
msg_ok "Installed zstd"
|
||||
fi
|
||||
ensure_dependencies zstd
|
||||
msg_info "Ollama update available: v$OLLAMA_VERSION -> v$RELEASE"
|
||||
msg_info "Downloading Ollama v$RELEASE \n"
|
||||
curl -fS#LO https://github.com/ollama/ollama/releases/download/v${RELEASE}/ollama-linux-amd64.tar.zst
|
||||
|
||||
@@ -69,7 +69,7 @@ function update_script() {
|
||||
if [ "$VERSION_CODENAME" = "bookworm" ]; then
|
||||
setup_gs
|
||||
else
|
||||
$STD apt install -y ghostscript
|
||||
ensure_dependencies ghostscript
|
||||
fi
|
||||
|
||||
msg_info "Updating Paperless-ngx"
|
||||
@@ -145,7 +145,7 @@ function update_script() {
|
||||
setup_gs
|
||||
else
|
||||
msg_info "Installing Ghostscript"
|
||||
$STD apt install -y ghostscript
|
||||
ensure_dependencies ghostscript
|
||||
msg_ok "Installed Ghostscript"
|
||||
fi
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ function update_script() {
|
||||
LIBHEIF_URL=$(curl -fsSL "https://dl.photoprism.app/dist/libheif/" | grep -oP "libheif-bookworm-amd64-v[0-9\.]+\.tar\.gz" | sort -V | tail -n 1)
|
||||
if [[ "${LIBHEIF_URL}" != "$(cat ~/.photoprism_libheif 2>/dev/null)" ]] || [[ ! -f ~/.photoprism_libheif ]]; then
|
||||
msg_info "Updating PhotoPrism LibHeif"
|
||||
$STD apt install -y libvips42
|
||||
ensure_dependencies libvips42
|
||||
curl -fsSL "https://dl.photoprism.app/dist/libheif/$LIBHEIF_URL" -o /tmp/libheif.tar.gz
|
||||
tar -xzf /tmp/libheif.tar.gz -C /usr/local
|
||||
ldconfig
|
||||
|
||||
@@ -41,7 +41,7 @@ function update_script() {
|
||||
cp -R /opt/rdtc-backup/appsettings.json /opt/rdtc/
|
||||
if dpkg-query -W dotnet-sdk-8.0 >/dev/null 2>&1; then
|
||||
$STD apt remove --purge -y dotnet-sdk-8.0
|
||||
$STD apt install -y aspnetcore-runtime-9.0
|
||||
ensure_dependencies aspnetcore-runtime-9.0
|
||||
fi
|
||||
rm -rf /opt/rdtc-backup
|
||||
|
||||
|
||||
@@ -27,22 +27,20 @@ function update_script() {
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop readeck
|
||||
msg_ok "Stopped Service"
|
||||
if check_for_codeberg_release "readeck" "readeck/readeck"; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop readeck
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
msg_info "Updating Readeck"
|
||||
LATEST=$(curl -fsSL https://codeberg.org/readeck/readeck/releases/ | grep -oP '/releases/tag/\K\d+\.\d+\.\d+' | head -1)
|
||||
rm -rf /opt/readeck/readeck
|
||||
cd /opt/readeck
|
||||
curl -fsSL "https://codeberg.org/readeck/readeck/releases/download/${LATEST}/readeck-${LATEST}-linux-amd64" -o "readeck"
|
||||
chmod a+x readeck
|
||||
msg_ok "Updated Readeck"
|
||||
fetch_and_deploy_codeberg_release "readeck" "readeck/readeck" "singlefile" "latest" "/opt/readeck" "readeck-*-linux-amd64"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start readeck
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
msg_info "Starting Service"
|
||||
systemctl start readeck
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at the latest version."
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
|
||||
69
ct/rustypaste.sh
Normal file
69
ct/rustypaste.sh
Normal file
@@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: GoldenSpringness
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/orhun/rustypaste
|
||||
|
||||
APP="rustypaste"
|
||||
var_tags="${var_tags:-pastebin;storage}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-1024}"
|
||||
var_disk="${var_disk:-20}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -f /opt/rustypaste/rustypaste ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_release "rustypaste" "orhun/rustypaste"; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop rustypaste
|
||||
msg_ok "Stopped Services"
|
||||
|
||||
msg_info "Creating Backup"
|
||||
tar -czf "/opt/rustypaste_backup_$(date +%F).tar.gz" /opt/rustypaste/upload 2>/dev/null || true
|
||||
cp /opt/rustypaste/config.toml /tmp/rustypaste_config.toml.bak
|
||||
msg_ok "Backup Created"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "rustypaste" "orhun/rustypaste" "prebuild" "latest" "/opt/rustypaste" "*x86_64-unknown-linux-gnu.tar.gz"
|
||||
|
||||
msg_info "Restoring Data"
|
||||
mv /tmp/rustypaste_config.toml.bak /opt/rustypaste/config.toml
|
||||
tar -xzf "/opt/rustypaste_backup_$(date +%F).tar.gz" -C /opt/rustypaste/upload 2>/dev/null || true
|
||||
rm -rf /opt/rustypaste_backup_$(date +%F).tar.gz
|
||||
msg_ok "Restored Data"
|
||||
|
||||
msg_info "Starting Services"
|
||||
systemctl start rustypaste
|
||||
msg_ok "Started Services"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
|
||||
if check_for_gh_release "rustypaste-cli" "orhun/rustypaste-cli"; then
|
||||
fetch_and_deploy_gh_release "rustypaste-cli" "orhun/rustypaste-cli" "prebuild" "latest" "/usr/local/bin" "*x86_64-unknown-linux-gnu.tar.gz"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed successfully!\n"
|
||||
echo -e "${CREATING}${GN}rustypaste setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8000${CL}"
|
||||
@@ -42,12 +42,7 @@ function update_script() {
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "scanopy" "scanopy/scanopy" "tarball" "latest" "/opt/scanopy"
|
||||
|
||||
if ! dpkg -l | grep -q "pkg-config"; then
|
||||
$STD apt install -y pkg-config
|
||||
fi
|
||||
if ! dpkg -l | grep -q "libssl-dev"; then
|
||||
$STD apt install -y libssl-dev
|
||||
fi
|
||||
ensure_dependencies pkg-config libssl-dev
|
||||
TOOLCHAIN="$(grep "channel" /opt/scanopy/backend/rust-toolchain.toml | awk -F\" '{print $2}')"
|
||||
RUST_TOOLCHAIN=$TOOLCHAIN setup_rust
|
||||
|
||||
@@ -72,10 +67,13 @@ function update_script() {
|
||||
mv ./target/release/server /usr/bin/scanopy-server
|
||||
msg_ok "Built scanopy-server"
|
||||
|
||||
msg_info "Building scanopy-daemon"
|
||||
$STD cargo build --release --bin daemon
|
||||
cp ./target/release/daemon /usr/bin/scanopy-daemon
|
||||
msg_ok "Built scanopy-daemon"
|
||||
[[ -f /etc/systemd/system/scanopy-daemon.service ]] &&
|
||||
fetch_and_deploy_gh_release "scanopy" "scanopy/scanopy" "singlefile" "latest" "/usr/local/bin" "scanopy-daemon-linux-amd64" &&
|
||||
rm -f /usr/bin/scanopy-daemon ~/configure_daemon.sh &&
|
||||
sed -i -e 's|usr/bin|usr/local/bin|' \
|
||||
-e 's/push/daemon_poll/' \
|
||||
-e 's/pull/server_poll/' /etc/systemd/system/scanopy-daemon.service &&
|
||||
systemctl daemon-reload
|
||||
|
||||
msg_info "Starting services"
|
||||
systemctl start scanopy-server
|
||||
|
||||
@@ -30,6 +30,55 @@ function update_script() {
|
||||
|
||||
NODE_VERSION="24" setup_nodejs
|
||||
|
||||
msg_info "Updating prestart script"
|
||||
cat <<EOF >/data/tracearr/prestart.sh
|
||||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# Tune PostgreSQL for available resources (runs every startup)
|
||||
# =============================================================================
|
||||
# timescaledb-tune automatically optimizes PostgreSQL settings based on
|
||||
# available RAM and CPU. Safe to run repeatedly - recalculates if resources change.
|
||||
if command -v timescaledb-tune &> /dev/null; then
|
||||
total_ram_kb=\$(grep MemTotal /proc/meminfo | awk '{print \$2}')
|
||||
ram_for_tsdb=\$((total_ram_kb / 1024 / 2))
|
||||
timescaledb-tune -yes -memory "\$ram_for_tsdb"MB --quiet 2>/dev/null \
|
||||
|| echo "Warning: timescaledb-tune failed (non-fatal)"
|
||||
fi
|
||||
# =============================================================================
|
||||
# Ensure required PostgreSQL settings for Tracearr
|
||||
# =============================================================================
|
||||
pg_config_file="/etc/postgresql/18/main/postgresql.conf"
|
||||
if [ -f \$pg_config_file ]; then
|
||||
# Ensure max_tuples_decompressed_per_dml_transaction is set
|
||||
if grep -q "^timescaledb\.max_tuples_decompressed_per_dml_transaction" \$pg_config_file; then
|
||||
# Setting exists (uncommented) - update if not 0
|
||||
current_value=\$(grep "^timescaledb\.max_tuples_decompressed_per_dml_transaction" \$pg_config_file | grep -oE '[0-9]+' | head -1)
|
||||
if [ -n "\$current_value" ] && [ "\$current_value" -ne 0 ]; then
|
||||
sed -i "s/^timescaledb\.max_tuples_decompressed_per_dml_transaction.*/timescaledb.max_tuples_decompressed_per_dml_transaction = 0/" \$pg_config_file
|
||||
fi
|
||||
elif ! grep -q "^timescaledb\.max_tuples_decompressed_per_dml_transaction" \$pg_config_file; then
|
||||
echo "" >> \$pg_config_file
|
||||
echo "# Allow unlimited tuple decompression for migrations on compressed hypertables" >> \$pg_config_file
|
||||
echo "timescaledb.max_tuples_decompressed_per_dml_transaction = 0" >> \$pg_config_file
|
||||
fi
|
||||
# Ensure max_locks_per_transaction is set (for existing databases)
|
||||
if grep -q "^max_locks_per_transaction" \$pg_config_file; then
|
||||
# Setting exists (uncommented) - update if below 4096
|
||||
current_value=\$(grep "^max_locks_per_transaction" \$pg_config_file | grep -oE '[0-9]+' | head -1)
|
||||
if [ -n "\$current_value" ] && [ "\$current_value" -lt 4096 ]; then
|
||||
sed -i "s/^max_locks_per_transaction.*/max_locks_per_transaction = 4096/" \$pg_config_file
|
||||
fi
|
||||
elif ! grep -q "^max_locks_per_transaction" \$pg_config_file; then
|
||||
echo "" >> \$pg_config_file
|
||||
echo "# Increase lock table size for TimescaleDB hypertables with many chunks" >> \$pg_config_file
|
||||
echo "max_locks_per_transaction = 4096" >> \$pg_config_file
|
||||
fi
|
||||
fi
|
||||
systemctl restart postgresql
|
||||
EOF
|
||||
chmod +x /data/tracearr/prestart.sh
|
||||
msg_ok "Updated prestart script"
|
||||
|
||||
if check_for_gh_release "tracearr" "connorgallopo/Tracearr"; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop tracearr postgresql redis
|
||||
@@ -74,10 +123,15 @@ function update_script() {
|
||||
chown -R tracearr:tracearr /data/tracearr
|
||||
msg_ok "Configured Tracearr"
|
||||
|
||||
msg_info "Starting Services"
|
||||
msg_info "Starting services"
|
||||
systemctl start postgresql redis tracearr
|
||||
msg_ok "Started Services"
|
||||
msg_ok "Started services"
|
||||
msg_ok "Updated successfully!"
|
||||
else
|
||||
# no new release, just restart service to apply prestart changes
|
||||
msg_info "Restarting service"
|
||||
systemctl restart tracearr
|
||||
msg_ok "Restarted service"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
@@ -30,8 +30,7 @@ function update_script() {
|
||||
fi
|
||||
|
||||
msg_info "Updating ${APP}"
|
||||
$STD apt update
|
||||
$STD apt install -yq twingate-connector
|
||||
ensure_dependencies twingate-connector
|
||||
$STD systemctl restart twingate-connector
|
||||
msg_ok "Updated successfully!"
|
||||
exit
|
||||
|
||||
@@ -32,7 +32,7 @@ function update_script() {
|
||||
|
||||
msg_info "Updating ${APP}"
|
||||
$STD apt update --allow-releaseinfo-change
|
||||
$STD apt install -y unifi
|
||||
ensure_dependencies unifi
|
||||
msg_ok "Updated successfully!"
|
||||
exit
|
||||
}
|
||||
|
||||
@@ -30,12 +30,9 @@ function update_script() {
|
||||
|
||||
NODE_VERSION="22" setup_nodejs
|
||||
|
||||
if ! dpkg -s chromium >/dev/null 2>&1; then
|
||||
msg_info "Installing Chromium"
|
||||
$STD apt update
|
||||
$STD apt install -y chromium
|
||||
ensure_dependencies chromium
|
||||
if [[ ! -L /opt/uptime-kuma/chromium ]]; then
|
||||
ln -s /usr/bin/chromium /opt/uptime-kuma/chromium
|
||||
msg_ok "Installed Chromium"
|
||||
fi
|
||||
|
||||
if check_for_gh_release "uptime-kuma" "louislam/uptime-kuma"; then
|
||||
|
||||
@@ -28,12 +28,8 @@ function update_script() {
|
||||
exit
|
||||
fi
|
||||
|
||||
VAULT=$(curl -fsSL https://api.github.com/repos/dani-garcia/vaultwarden/releases/latest |
|
||||
grep "tag_name" |
|
||||
awk '{print substr($2, 2, length($2)-3) }')
|
||||
WVRELEASE=$(curl -fsSL https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest |
|
||||
grep "tag_name" |
|
||||
awk '{print substr($2, 2, length($2)-3) }')
|
||||
VAULT=$(get_latest_github_release "dani-garcia/vaultwarden")
|
||||
WVRELEASE=$(get_latest_github_release "dani-garcia/bw_web_builds")
|
||||
|
||||
UPD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --radiolist --cancel-button Exit-Script "Spacebar = Select" 11 58 3 \
|
||||
"1" "VaultWarden $VAULT" ON \
|
||||
@@ -42,57 +38,70 @@ function update_script() {
|
||||
3>&1 1>&2 2>&3)
|
||||
|
||||
if [ "$UPD" == "1" ]; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop vaultwarden
|
||||
msg_ok "Stopped Service"
|
||||
if check_for_gh_release "vaultwarden" "dani-garcia/vaultwarden"; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop vaultwarden
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
msg_info "Updating VaultWarden to $VAULT (Patience)"
|
||||
cd ~ && rm -rf vaultwarden
|
||||
$STD git clone https://github.com/dani-garcia/vaultwarden
|
||||
cd vaultwarden
|
||||
$STD cargo build --features "sqlite,mysql,postgresql" --release
|
||||
DIR=/usr/bin/vaultwarden
|
||||
if [ -d "$DIR" ]; then
|
||||
cp target/release/vaultwarden /usr/bin/
|
||||
fetch_and_deploy_gh_release "vaultwarden" "dani-garcia/vaultwarden" "tarball" "latest" "/tmp/vaultwarden-src"
|
||||
|
||||
msg_info "Updating VaultWarden to $VAULT (Patience)"
|
||||
cd /tmp/vaultwarden-src
|
||||
$STD cargo build --features "sqlite,mysql,postgresql" --release
|
||||
if [[ -f /usr/bin/vaultwarden ]]; then
|
||||
cp target/release/vaultwarden /usr/bin/
|
||||
else
|
||||
cp target/release/vaultwarden /opt/vaultwarden/bin/
|
||||
fi
|
||||
cd ~ && rm -rf /tmp/vaultwarden-src
|
||||
msg_ok "Updated VaultWarden to ${VAULT}"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start vaultwarden
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
else
|
||||
cp target/release/vaultwarden /opt/vaultwarden/bin/
|
||||
msg_ok "VaultWarden is already up-to-date"
|
||||
fi
|
||||
cd ~ && rm -rf vaultwarden
|
||||
msg_ok "Updated VaultWarden"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start vaultwarden
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ "$UPD" == "2" ]; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop vaultwarden
|
||||
msg_ok "Stopped Service"
|
||||
if check_for_gh_release "vaultwarden_webvault" "dani-garcia/bw_web_builds"; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop vaultwarden
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
msg_info "Updating Web-Vault to $WVRELEASE"
|
||||
$STD curl -fsSLO https://github.com/dani-garcia/bw_web_builds/releases/download/"$WVRELEASE"/bw_web_"$WVRELEASE".tar.gz
|
||||
$STD tar -zxf bw_web_"$WVRELEASE".tar.gz -C /opt/vaultwarden/
|
||||
rm bw_web_"$WVRELEASE".tar.gz
|
||||
msg_ok "Updated Web-Vault"
|
||||
msg_info "Updating Web-Vault to $WVRELEASE"
|
||||
rm -rf /opt/vaultwarden/web-vault
|
||||
mkdir -p /opt/vaultwarden/web-vault
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start vaultwarden
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
fetch_and_deploy_gh_release "vaultwarden_webvault" "dani-garcia/bw_web_builds" "prebuild" "latest" "/opt/vaultwarden/web-vault" "bw_web_*.tar.gz"
|
||||
|
||||
chown -R root:root /opt/vaultwarden/web-vault/
|
||||
msg_ok "Updated Web-Vault to ${WVRELEASE}"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start vaultwarden
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
else
|
||||
msg_ok "Web-Vault is already up-to-date"
|
||||
fi
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ "$UPD" == "3" ]; then
|
||||
if NEWTOKEN=$(whiptail --backtitle "Proxmox VE Helper Scripts" --passwordbox "Set the ADMIN_TOKEN" 10 58 3>&1 1>&2 2>&3); then
|
||||
if [[ -z "$NEWTOKEN" ]]; then exit; fi
|
||||
if ! command -v argon2 >/dev/null 2>&1; then $STD apt-get install -y argon2; fi
|
||||
ensure_dependencies argon2
|
||||
TOKEN=$(echo -n "${NEWTOKEN}" | argon2 "$(openssl rand -base64 32)" -t 2 -m 16 -p 4 -l 64 -e)
|
||||
sed -i "s|ADMIN_TOKEN=.*|ADMIN_TOKEN='${TOKEN}'|" /opt/vaultwarden/.env
|
||||
if [[ -f /opt/vaultwarden/data/config.json ]]; then
|
||||
sed -i "s|\"admin_token\":.*|\"admin_token\": \"${TOKEN}\"|" /opt/vaultwarden/data/config.json
|
||||
fi
|
||||
systemctl restart vaultwarden
|
||||
msg_ok "Admin token updated"
|
||||
fi
|
||||
exit
|
||||
fi
|
||||
|
||||
@@ -27,10 +27,7 @@ function update_script() {
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
if ! [[ $(dpkg -s zstd 2>/dev/null) ]]; then
|
||||
$STD apt update
|
||||
$STD apt install -y zstd
|
||||
fi
|
||||
ensure_dependencies zstd
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/matze/wastebin/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||
# Dirty-Fix 03/2025 for missing APP_version.txt on old installations, set to pre-latest release
|
||||
msg_info "Running Migration"
|
||||
|
||||
86
ct/wealthfolio.sh
Normal file
86
ct/wealthfolio.sh
Normal file
@@ -0,0 +1,86 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: CrazyWolf13
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://wealthfolio.app/
|
||||
|
||||
APP="Wealthfolio"
|
||||
var_tags="${var_tags:-finance;portfolio}"
|
||||
var_cpu="${var_cpu:-4}"
|
||||
var_ram="${var_ram:-4096}"
|
||||
var_disk="${var_disk:-10}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/wealthfolio ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_release "wealthfolio" "afadil/wealthfolio"; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop wealthfolio
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
msg_info "Backing up Data"
|
||||
cp -r /opt/wealthfolio_data /opt/wealthfolio_data_backup
|
||||
cp /opt/wealthfolio/.env /opt/wealthfolio_env_backup
|
||||
msg_ok "Backed up Data"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "wealthfolio" "afadil/wealthfolio" "tarball"
|
||||
|
||||
msg_info "Building Frontend (patience)"
|
||||
cd /opt/wealthfolio
|
||||
$STD pnpm install --frozen-lockfile
|
||||
$STD pnpm tsc
|
||||
$STD pnpm vite build
|
||||
msg_ok "Built Frontend"
|
||||
|
||||
msg_info "Building Backend (patience)"
|
||||
cd /opt/wealthfolio/src-server
|
||||
source ~/.cargo/env
|
||||
$STD cargo build --release --manifest-path Cargo.toml
|
||||
cp /opt/wealthfolio/src-server/target/release/wealthfolio-server /usr/local/bin/wealthfolio-server
|
||||
chmod +x /usr/local/bin/wealthfolio-server
|
||||
msg_ok "Built Backend"
|
||||
|
||||
msg_info "Restoring Data"
|
||||
cp -r /opt/wealthfolio_data_backup/. /opt/wealthfolio_data
|
||||
cp /opt/wealthfolio_env_backup /opt/wealthfolio/.env
|
||||
rm -rf /opt/wealthfolio_data_backup /opt/wealthfolio_env_backup
|
||||
msg_ok "Restored Data"
|
||||
|
||||
msg_info "Cleaning Up"
|
||||
rm -rf /opt/wealthfolio/src-server/target
|
||||
rm -rf /root/.cargo/registry
|
||||
rm -rf /opt/wealthfolio/node_modules
|
||||
msg_ok "Cleaned Up"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start wealthfolio
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"
|
||||
82
ct/wishlist.sh
Normal file
82
ct/wishlist.sh
Normal file
@@ -0,0 +1,82 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: Dunky13
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/cmintey/wishlist
|
||||
|
||||
APP="Wishlist"
|
||||
var_tags="${var_tags:-sharing}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_disk="${var_disk:-5}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /opt/wishlist ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_release "wishlist" "cmintey/wishlist"; then
|
||||
NODE_VERSION="24" NODE_MODULE="pnpm" setup_nodejs
|
||||
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop wishlist
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
msg_info "Creating Backup"
|
||||
mkdir -p /opt/wishlist-backup
|
||||
cp /opt/wishlist/.env /opt/wishlist-backup/.env
|
||||
cp -a /opt/wishlist/uploads /opt/wishlist-backup
|
||||
cp -a /opt/wishlist/data /opt/wishlist-backup
|
||||
msg_ok "Created Backup"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "wishlist" "cmintey/wishlist" "tarball"
|
||||
LATEST_APP_VERSION=$(get_latest_github_release "cmintey/wishlist")
|
||||
|
||||
msg_info "Updating Wishlist"
|
||||
cd /opt/wishlist
|
||||
$STD pnpm install
|
||||
$STD pnpm svelte-kit sync
|
||||
$STD pnpm prisma generate
|
||||
sed -i 's|/usr/src/app/|/opt/wishlist/|g' $(grep -rl '/usr/src/app/' /opt/wishlist)
|
||||
export VERSION="v${LATEST_APP_VERSION}"
|
||||
export SHA="v${LATEST_APP_VERSION}"
|
||||
$STD pnpm run build
|
||||
$STD pnpm prune --prod
|
||||
chmod +x /opt/wishlist/entrypoint.sh
|
||||
|
||||
msg_info "Restoring Backup"
|
||||
cp /opt/wishlist-backup/.env /opt/wishlist/.env
|
||||
cp -a /opt/wishlist-backup/uploads /opt/wishlist
|
||||
cp -a /opt/wishlist-backup/data /opt/wishlist
|
||||
rm -rf /opt/wishlist-backup
|
||||
msg_ok "Restored Backup"
|
||||
|
||||
msg_ok "Updated Wishlist"
|
||||
msg_info "Starting Service"
|
||||
systemctl start wishlist
|
||||
msg_ok "Started Service"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3280${CL}"
|
||||
72
ct/writefreely.sh
Normal file
72
ct/writefreely.sh
Normal file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: StellaeAlis
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/writefreely/writefreely
|
||||
|
||||
APP="WriteFreely"
|
||||
var_tags="${var_tags:-writing}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-1024}"
|
||||
var_disk="${var_disk:-4}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-13}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/writefreely ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if check_for_gh_release "writefreely" "writefreely/writefreely"; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop writefreely
|
||||
msg_ok "Stopped Services"
|
||||
|
||||
msg_info "Creating Backup"
|
||||
mkdir -p /tmp/writefreely_backup
|
||||
cp /opt/writefreely/keys /tmp/writefreely_backup/ 2>/dev/null
|
||||
cp /opt/writefreely/config.ini /tmp/writefreely_backup/ 2>/dev/null
|
||||
msg_ok "Created Backup"
|
||||
|
||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "writefreely" "writefreely/writefreely" "prebuild" "latest" "/opt/writefreely" "writefreely_*_linux_amd64.tar.gz"
|
||||
|
||||
msg_info "Restoring Data"
|
||||
cp /tmp/writefreely_backup/config.ini /opt/writefreely/ 2>/dev/null
|
||||
cp /tmp/writefreely_backup/keys/* /opt/writefreely/keys/ 2>/dev/null
|
||||
rm -rf /tmp/writefreely_backup
|
||||
msg_ok "Restored Data"
|
||||
|
||||
msg_info "Running Post-Update Tasks"
|
||||
cd /opt/writefreely
|
||||
$STD ./writefreely db migrate
|
||||
ln -s /opt/writefreely/writefreely /usr/local/bin/writefreely
|
||||
msg_ok "Ran Post-Update Tasks"
|
||||
|
||||
msg_info "Starting Services"
|
||||
systemctl start writefreely
|
||||
msg_ok "Started Services"
|
||||
msg_ok "Updated successfully!"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"
|
||||
@@ -9,9 +9,9 @@
|
||||
"updateable": true,
|
||||
"privileged": false,
|
||||
"interface_port": 8083,
|
||||
"documentation": "https://github.com/gelbphoenix/autocaliweb/wiki",
|
||||
"documentation": "https://codeberg.org/gelbphoenix/autocaliweb/wiki",
|
||||
"config_path": "/etc/autocaliweb",
|
||||
"website": "https://github.com/gelbphoenix/autocaliweb",
|
||||
"website": "https://codeberg.org/gelbphoenix/autocaliweb",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/autocaliweb.webp",
|
||||
"description": "A modern web management system for eBooks, eComics and PDFs",
|
||||
"install_methods": [
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"generated": "2026-02-01T18:07:47Z",
|
||||
"generated": "2026-02-04T06:18:03Z",
|
||||
"versions": [
|
||||
{
|
||||
"slug": "2fauth",
|
||||
@@ -95,9 +95,9 @@
|
||||
{
|
||||
"slug": "bar-assistant",
|
||||
"repo": "karlomikus/bar-assistant",
|
||||
"version": "v5.13.0",
|
||||
"version": "v5.13.1",
|
||||
"pinned": false,
|
||||
"date": "2026-02-01T15:49:21Z"
|
||||
"date": "2026-02-02T18:47:43Z"
|
||||
},
|
||||
{
|
||||
"slug": "bazarr",
|
||||
@@ -109,16 +109,16 @@
|
||||
{
|
||||
"slug": "bentopdf",
|
||||
"repo": "alam00000/bentopdf",
|
||||
"version": "v2.0.0",
|
||||
"version": "v2.1.0",
|
||||
"pinned": false,
|
||||
"date": "2026-01-31T10:13:47Z"
|
||||
"date": "2026-02-02T14:30:55Z"
|
||||
},
|
||||
{
|
||||
"slug": "beszel",
|
||||
"repo": "henrygd/beszel",
|
||||
"version": "v0.18.2",
|
||||
"version": "v0.18.3",
|
||||
"pinned": false,
|
||||
"date": "2026-01-12T23:58:00Z"
|
||||
"date": "2026-02-01T19:02:42Z"
|
||||
},
|
||||
{
|
||||
"slug": "bitmagnet",
|
||||
@@ -158,9 +158,9 @@
|
||||
{
|
||||
"slug": "bytestash",
|
||||
"repo": "jordan-dalby/ByteStash",
|
||||
"version": "v1.5.10",
|
||||
"version": "v1.5.11",
|
||||
"pinned": false,
|
||||
"date": "2026-01-26T14:07:59Z"
|
||||
"date": "2026-02-03T22:12:19Z"
|
||||
},
|
||||
{
|
||||
"slug": "caddy",
|
||||
@@ -186,9 +186,9 @@
|
||||
{
|
||||
"slug": "comfyui",
|
||||
"repo": "comfyanonymous/ComfyUI",
|
||||
"version": "v0.11.1",
|
||||
"version": "v0.12.2",
|
||||
"pinned": false,
|
||||
"date": "2026-01-29T07:52:21Z"
|
||||
"date": "2026-02-04T06:09:31Z"
|
||||
},
|
||||
{
|
||||
"slug": "commafeed",
|
||||
@@ -242,9 +242,9 @@
|
||||
{
|
||||
"slug": "discopanel",
|
||||
"repo": "nickheyer/discopanel",
|
||||
"version": "v1.0.32",
|
||||
"version": "v1.0.35",
|
||||
"pinned": false,
|
||||
"date": "2026-01-31T22:24:53Z"
|
||||
"date": "2026-02-02T05:20:12Z"
|
||||
},
|
||||
{
|
||||
"slug": "dispatcharr",
|
||||
@@ -256,9 +256,9 @@
|
||||
{
|
||||
"slug": "docmost",
|
||||
"repo": "docmost/docmost",
|
||||
"version": "v0.24.1",
|
||||
"version": "v0.25.0",
|
||||
"pinned": false,
|
||||
"date": "2025-12-14T13:49:16Z"
|
||||
"date": "2026-02-04T00:33:45Z"
|
||||
},
|
||||
{
|
||||
"slug": "domain-locker",
|
||||
@@ -291,9 +291,9 @@
|
||||
{
|
||||
"slug": "elementsynapse",
|
||||
"repo": "etkecc/synapse-admin",
|
||||
"version": "v0.11.1-etke52",
|
||||
"version": "v0.11.1-etke53",
|
||||
"pinned": false,
|
||||
"date": "2026-01-09T08:41:29Z"
|
||||
"date": "2026-02-03T20:38:15Z"
|
||||
},
|
||||
{
|
||||
"slug": "emby",
|
||||
@@ -312,9 +312,9 @@
|
||||
{
|
||||
"slug": "ersatztv",
|
||||
"repo": "ErsatzTV/ErsatzTV",
|
||||
"version": "v26.1.1",
|
||||
"version": "v26.2.0",
|
||||
"pinned": false,
|
||||
"date": "2026-01-08T22:02:15Z"
|
||||
"date": "2026-02-02T20:54:26Z"
|
||||
},
|
||||
{
|
||||
"slug": "excalidraw",
|
||||
@@ -375,9 +375,9 @@
|
||||
{
|
||||
"slug": "ghostfolio",
|
||||
"repo": "ghostfolio/ghostfolio",
|
||||
"version": "2.234.0",
|
||||
"version": "2.235.0",
|
||||
"pinned": false,
|
||||
"date": "2026-01-30T19:00:22Z"
|
||||
"date": "2026-02-03T19:27:17Z"
|
||||
},
|
||||
{
|
||||
"slug": "gitea",
|
||||
@@ -487,9 +487,9 @@
|
||||
{
|
||||
"slug": "homebox",
|
||||
"repo": "sysadminsmedia/homebox",
|
||||
"version": "v0.23.0",
|
||||
"version": "v0.23.1",
|
||||
"pinned": false,
|
||||
"date": "2026-01-30T17:41:01Z"
|
||||
"date": "2026-02-01T22:53:32Z"
|
||||
},
|
||||
{
|
||||
"slug": "homepage",
|
||||
@@ -515,9 +515,9 @@
|
||||
{
|
||||
"slug": "huntarr",
|
||||
"repo": "plexguide/Huntarr.io",
|
||||
"version": "9.1.5",
|
||||
"version": "9.1.9.1",
|
||||
"pinned": false,
|
||||
"date": "2026-01-31T22:55:29Z"
|
||||
"date": "2026-02-04T01:08:22Z"
|
||||
},
|
||||
{
|
||||
"slug": "inspircd",
|
||||
@@ -536,16 +536,16 @@
|
||||
{
|
||||
"slug": "invoiceninja",
|
||||
"repo": "invoiceninja/invoiceninja",
|
||||
"version": "v5.12.52",
|
||||
"version": "v5.12.53",
|
||||
"pinned": false,
|
||||
"date": "2026-02-01T02:08:10Z"
|
||||
"date": "2026-02-04T00:52:01Z"
|
||||
},
|
||||
{
|
||||
"slug": "jackett",
|
||||
"repo": "Jackett/Jackett",
|
||||
"version": "v0.24.1003",
|
||||
"version": "v0.24.1027",
|
||||
"pinned": false,
|
||||
"date": "2026-02-01T05:55:30Z"
|
||||
"date": "2026-02-04T05:56:22Z"
|
||||
},
|
||||
{
|
||||
"slug": "joplin-server",
|
||||
@@ -596,6 +596,13 @@
|
||||
"pinned": false,
|
||||
"date": "2026-01-31T18:10:59Z"
|
||||
},
|
||||
{
|
||||
"slug": "kitchenowl",
|
||||
"repo": "TomBursch/kitchenowl",
|
||||
"version": "v0.7.6",
|
||||
"pinned": false,
|
||||
"date": "2026-01-24T01:21:14Z"
|
||||
},
|
||||
{
|
||||
"slug": "koel",
|
||||
"repo": "koel/koel",
|
||||
@@ -662,9 +669,9 @@
|
||||
{
|
||||
"slug": "libretranslate",
|
||||
"repo": "LibreTranslate/LibreTranslate",
|
||||
"version": "v1.8.3",
|
||||
"version": "v1.8.4",
|
||||
"pinned": false,
|
||||
"date": "2025-12-04T21:07:00Z"
|
||||
"date": "2026-02-02T17:45:16Z"
|
||||
},
|
||||
{
|
||||
"slug": "lidarr",
|
||||
@@ -739,9 +746,9 @@
|
||||
{
|
||||
"slug": "mealie",
|
||||
"repo": "mealie-recipes/mealie",
|
||||
"version": "v3.9.2",
|
||||
"version": "v3.10.1",
|
||||
"pinned": false,
|
||||
"date": "2026-01-02T19:40:09Z"
|
||||
"date": "2026-02-03T01:04:38Z"
|
||||
},
|
||||
{
|
||||
"slug": "mediamanager",
|
||||
@@ -760,9 +767,9 @@
|
||||
{
|
||||
"slug": "meilisearch",
|
||||
"repo": "riccox/meilisearch-ui",
|
||||
"version": "v0.15.0",
|
||||
"version": "v0.15.1",
|
||||
"pinned": false,
|
||||
"date": "2026-01-29T03:54:27Z"
|
||||
"date": "2026-02-04T03:56:59Z"
|
||||
},
|
||||
{
|
||||
"slug": "memos",
|
||||
@@ -774,9 +781,9 @@
|
||||
{
|
||||
"slug": "metube",
|
||||
"repo": "alexta69/metube",
|
||||
"version": "2026.02.01",
|
||||
"version": "2026.02.03",
|
||||
"pinned": false,
|
||||
"date": "2026-02-01T00:20:00Z"
|
||||
"date": "2026-02-03T21:49:49Z"
|
||||
},
|
||||
{
|
||||
"slug": "miniflux",
|
||||
@@ -816,16 +823,16 @@
|
||||
{
|
||||
"slug": "navidrome",
|
||||
"repo": "navidrome/navidrome",
|
||||
"version": "v0.59.0",
|
||||
"version": "v0.60.0",
|
||||
"pinned": false,
|
||||
"date": "2025-12-06T18:08:42Z"
|
||||
"date": "2026-02-03T18:57:04Z"
|
||||
},
|
||||
{
|
||||
"slug": "netbox",
|
||||
"repo": "netbox-community/netbox",
|
||||
"version": "v4.5.1",
|
||||
"version": "v4.5.2",
|
||||
"pinned": false,
|
||||
"date": "2026-01-20T19:45:05Z"
|
||||
"date": "2026-02-03T13:54:26Z"
|
||||
},
|
||||
{
|
||||
"slug": "nocodb",
|
||||
@@ -872,9 +879,9 @@
|
||||
{
|
||||
"slug": "opengist",
|
||||
"repo": "thomiceli/opengist",
|
||||
"version": "v1.12.0",
|
||||
"version": "v1.12.1",
|
||||
"pinned": false,
|
||||
"date": "2026-01-27T15:31:57Z"
|
||||
"date": "2026-02-03T09:00:43Z"
|
||||
},
|
||||
{
|
||||
"slug": "ots",
|
||||
@@ -1047,9 +1054,9 @@
|
||||
{
|
||||
"slug": "prometheus-alertmanager",
|
||||
"repo": "prometheus/alertmanager",
|
||||
"version": "v0.30.1",
|
||||
"version": "v0.31.0",
|
||||
"pinned": false,
|
||||
"date": "2026-01-12T23:30:06Z"
|
||||
"date": "2026-02-02T13:34:15Z"
|
||||
},
|
||||
{
|
||||
"slug": "prometheus-blackbox-exporter",
|
||||
@@ -1177,6 +1184,13 @@
|
||||
"pinned": false,
|
||||
"date": "2026-01-12T05:38:30Z"
|
||||
},
|
||||
{
|
||||
"slug": "rustypaste",
|
||||
"repo": "orhun/rustypaste",
|
||||
"version": "v0.16.1",
|
||||
"pinned": false,
|
||||
"date": "2025-03-21T20:44:47Z"
|
||||
},
|
||||
{
|
||||
"slug": "sabnzbd",
|
||||
"repo": "sabnzbd/sabnzbd",
|
||||
@@ -1187,9 +1201,9 @@
|
||||
{
|
||||
"slug": "scanopy",
|
||||
"repo": "scanopy/scanopy",
|
||||
"version": "v0.14.0",
|
||||
"version": "v0.14.3",
|
||||
"pinned": false,
|
||||
"date": "2026-02-01T17:02:37Z"
|
||||
"date": "2026-02-04T01:41:01Z"
|
||||
},
|
||||
{
|
||||
"slug": "scraparr",
|
||||
@@ -1257,16 +1271,16 @@
|
||||
{
|
||||
"slug": "speedtest-tracker",
|
||||
"repo": "alexjustesen/speedtest-tracker",
|
||||
"version": "v1.13.5",
|
||||
"version": "v1.13.6",
|
||||
"pinned": false,
|
||||
"date": "2026-01-08T22:35:28Z"
|
||||
"date": "2026-02-03T21:20:51Z"
|
||||
},
|
||||
{
|
||||
"slug": "spoolman",
|
||||
"repo": "Donkie/Spoolman",
|
||||
"version": "v0.23.0",
|
||||
"version": "v0.23.1",
|
||||
"pinned": false,
|
||||
"date": "2026-01-23T20:42:34Z"
|
||||
"date": "2026-02-03T19:03:55Z"
|
||||
},
|
||||
{
|
||||
"slug": "sportarr",
|
||||
@@ -1341,9 +1355,9 @@
|
||||
{
|
||||
"slug": "thingsboard",
|
||||
"repo": "thingsboard/thingsboard",
|
||||
"version": "v4.3",
|
||||
"version": "v4.3.0.1",
|
||||
"pinned": false,
|
||||
"date": "2026-01-20T14:27:07Z"
|
||||
"date": "2026-02-03T12:39:14Z"
|
||||
},
|
||||
{
|
||||
"slug": "threadfin",
|
||||
@@ -1411,9 +1425,9 @@
|
||||
{
|
||||
"slug": "tunarr",
|
||||
"repo": "chrisbenincasa/tunarr",
|
||||
"version": "v1.1.11",
|
||||
"version": "v1.1.12",
|
||||
"pinned": false,
|
||||
"date": "2026-01-30T22:34:30Z"
|
||||
"date": "2026-02-03T20:19:00Z"
|
||||
},
|
||||
{
|
||||
"slug": "uhf",
|
||||
@@ -1457,12 +1471,19 @@
|
||||
"pinned": false,
|
||||
"date": "2025-10-22T17:03:54Z"
|
||||
},
|
||||
{
|
||||
"slug": "vaultwarden",
|
||||
"repo": "dani-garcia/vaultwarden",
|
||||
"version": "1.35.2",
|
||||
"pinned": false,
|
||||
"date": "2026-01-09T18:37:04Z"
|
||||
},
|
||||
{
|
||||
"slug": "victoriametrics",
|
||||
"repo": "VictoriaMetrics/VictoriaMetrics",
|
||||
"version": "v1.134.0",
|
||||
"version": "v1.135.0",
|
||||
"pinned": false,
|
||||
"date": "2026-01-19T13:29:43Z"
|
||||
"date": "2026-02-02T14:20:15Z"
|
||||
},
|
||||
{
|
||||
"slug": "vikunja",
|
||||
@@ -1488,9 +1509,9 @@
|
||||
{
|
||||
"slug": "wanderer",
|
||||
"repo": "meilisearch/meilisearch",
|
||||
"version": "v1.34.3",
|
||||
"version": "v1.35.0",
|
||||
"pinned": false,
|
||||
"date": "2026-01-28T17:52:24Z"
|
||||
"date": "2026-02-02T09:57:03Z"
|
||||
},
|
||||
{
|
||||
"slug": "warracker",
|
||||
@@ -1520,6 +1541,13 @@
|
||||
"pinned": false,
|
||||
"date": "2025-12-31T16:53:34Z"
|
||||
},
|
||||
{
|
||||
"slug": "wealthfolio",
|
||||
"repo": "afadil/wealthfolio",
|
||||
"version": "v2.1.0",
|
||||
"pinned": false,
|
||||
"date": "2025-12-01T21:57:36Z"
|
||||
},
|
||||
{
|
||||
"slug": "web-check",
|
||||
"repo": "CrazyWolf13/web-check",
|
||||
@@ -1558,9 +1586,9 @@
|
||||
{
|
||||
"slug": "zigbee2mqtt",
|
||||
"repo": "Koenkk/zigbee2mqtt",
|
||||
"version": "2.7.2",
|
||||
"version": "2.8.0",
|
||||
"pinned": false,
|
||||
"date": "2026-01-01T13:43:47Z"
|
||||
"date": "2026-02-01T19:27:25Z"
|
||||
},
|
||||
{
|
||||
"slug": "zipline",
|
||||
@@ -1586,9 +1614,9 @@
|
||||
{
|
||||
"slug": "zwave-js-ui",
|
||||
"repo": "zwave-js/zwave-js-ui",
|
||||
"version": "v11.10.1",
|
||||
"version": "v11.11.0",
|
||||
"pinned": false,
|
||||
"date": "2026-01-15T15:58:06Z"
|
||||
"date": "2026-02-03T13:13:05Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
35
frontend/public/json/kitchenowl.json
Normal file
35
frontend/public/json/kitchenowl.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"name": "KitchenOwl",
|
||||
"slug": "kitchenowl",
|
||||
"categories": [
|
||||
13
|
||||
],
|
||||
"date_created": "2026-02-02",
|
||||
"type": "ct",
|
||||
"updateable": true,
|
||||
"privileged": false,
|
||||
"interface_port": 80,
|
||||
"documentation": "https://docs.kitchenowl.org/",
|
||||
"website": "https://kitchenowl.org/",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/kitchenowl.webp",
|
||||
"config_path": "/opt/kitchenowl/kitchenowl.env",
|
||||
"description": "KitchenOwl is a smart self-hosted grocery list and recipe manager with real-time synchronization, recipe management, meal planning, and expense tracking.",
|
||||
"install_methods": [
|
||||
{
|
||||
"type": "default",
|
||||
"script": "ct/kitchenowl.sh",
|
||||
"resources": {
|
||||
"cpu": 1,
|
||||
"ram": 2048,
|
||||
"hdd": 6,
|
||||
"os": "Debian",
|
||||
"version": "13"
|
||||
}
|
||||
}
|
||||
],
|
||||
"default_credentials": {
|
||||
"username": null,
|
||||
"password": null
|
||||
},
|
||||
"notes": []
|
||||
}
|
||||
@@ -13,6 +13,8 @@
|
||||
"website": "https://nginxproxymanager.com/",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/nginx-proxy-manager.webp",
|
||||
"config_path": "",
|
||||
"disable": true,
|
||||
"disable_description": "This script is temporarily disabled due to an external issue with the OpenResty APT repository. The repository's GPG key uses SHA-1 signatures, which are no longer accepted by Debian as of February 1, 2026. This causes installation to fail with APT errors. The issue is tracked in openresty/openresty#1097. A workaround exists but requires manual configuration. The script will be re-enabled once OpenResty updates their repository signing key. For more details, see: https://github.com/community-scripts/ProxmoxVE/issues/11406",
|
||||
"description": "Nginx Proxy Manager is a tool that provides a web-based interface to manage Nginx reverse proxies. It enables users to easily and securely expose their services to the internet by providing features such as HTTPS encryption, domain mapping, and access control. It eliminates the need for manual configuration of Nginx reverse proxies, making it easy for users to quickly and securely expose their services to the public.",
|
||||
"install_methods": [
|
||||
{
|
||||
|
||||
51
frontend/public/json/rustypaste.json
Normal file
51
frontend/public/json/rustypaste.json
Normal file
@@ -0,0 +1,51 @@
|
||||
{
|
||||
"name": "RustyPaste",
|
||||
"slug": "rustypaste",
|
||||
"categories": [
|
||||
11
|
||||
],
|
||||
"date_created": "2026-02-02",
|
||||
"type": "ct",
|
||||
"updateable": true,
|
||||
"privileged": false,
|
||||
"interface_port": 8000,
|
||||
"documentation": "https://github.com/orhun/rustypaste",
|
||||
"config_path": "/opt/rustypaste/config.toml",
|
||||
"website": "https://github.com/orhun/rustypaste",
|
||||
"logo": "https://github.com/orhun/rustypaste/raw/master/img/rustypaste_logo.png",
|
||||
"description": "Rustypaste is a minimal file upload/pastebin service.",
|
||||
"install_methods": [
|
||||
{
|
||||
"type": "default",
|
||||
"script": "ct/rustypaste.sh",
|
||||
"resources": {
|
||||
"cpu": 1,
|
||||
"ram": 1024,
|
||||
"hdd": 20,
|
||||
"os": "Debian",
|
||||
"version": "13"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "alpine",
|
||||
"script": "ct/alpine-rustypaste.sh",
|
||||
"resources": {
|
||||
"cpu": 1,
|
||||
"ram": 256,
|
||||
"hdd": 4,
|
||||
"os": "Alpine",
|
||||
"version": "3.23"
|
||||
}
|
||||
}
|
||||
],
|
||||
"default_credentials": {
|
||||
"username": null,
|
||||
"password": null
|
||||
},
|
||||
"notes": [
|
||||
{
|
||||
"text": "When updating the script it will backup the whole project including all the uploaded files, make sure to extract it to a safe location or remove",
|
||||
"type": "info"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -37,7 +37,7 @@
|
||||
"type": "info"
|
||||
},
|
||||
{
|
||||
"text": "The integrated daemon config is located at `/root/.config/daemon/config.json`",
|
||||
"text": "The integrated daemon config is located at `/root/.config/daemon/`",
|
||||
"type": "info"
|
||||
}
|
||||
]
|
||||
|
||||
40
frontend/public/json/wealthfolio.json
Normal file
40
frontend/public/json/wealthfolio.json
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"name": "Wealthfolio",
|
||||
"slug": "wealthfolio",
|
||||
"categories": [
|
||||
23
|
||||
],
|
||||
"date_created": "2026-02-03",
|
||||
"type": "ct",
|
||||
"updateable": true,
|
||||
"privileged": false,
|
||||
"interface_port": 8080,
|
||||
"documentation": "https://wealthfolio.app/docs/introduction/",
|
||||
"config_path": "/opt/wealthfolio/.env",
|
||||
"website": "https://wealthfolio.app/",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/wealthfolio.webp",
|
||||
"description": "Wealthfolio is a beautiful, privacy-focused investment tracker with local data storage. Track your portfolio across multiple accounts and asset types with detailed performance analytics, goal planning, and multi-currency support.",
|
||||
"install_methods": [
|
||||
{
|
||||
"type": "default",
|
||||
"script": "ct/wealthfolio.sh",
|
||||
"resources": {
|
||||
"cpu": 4,
|
||||
"ram": 4096,
|
||||
"hdd": 10,
|
||||
"os": "Debian",
|
||||
"version": "13"
|
||||
}
|
||||
}
|
||||
],
|
||||
"default_credentials": {
|
||||
"username": null,
|
||||
"password": "See ~/wealthfolio.creds"
|
||||
},
|
||||
"notes": [
|
||||
{
|
||||
"text": "Login password is stored in ~/wealthfolio.creds",
|
||||
"type": "info"
|
||||
}
|
||||
]
|
||||
}
|
||||
35
frontend/public/json/wishlist.json
Normal file
35
frontend/public/json/wishlist.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"name": "Wishlist",
|
||||
"slug": "wishlist",
|
||||
"categories": [
|
||||
12
|
||||
],
|
||||
"date_created": "2026-02-04",
|
||||
"type": "ct",
|
||||
"updateable": true,
|
||||
"privileged": false,
|
||||
"interface_port": 3280,
|
||||
"documentation": "https://github.com/cmintey/wishlist/blob/main/README.md#getting-started",
|
||||
"config_path": "/opt/wishlist/.env",
|
||||
"website": "https://github.com/cmintey/wishlist",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/cmintey-wishlist.webp",
|
||||
"description": "Wishlist is a self-hosted wishlist application that you can share with your friends and family. You no longer have to wonder what to get your family for the holidays, simply check their wishlist and claim any available item!",
|
||||
"install_methods": [
|
||||
{
|
||||
"type": "default",
|
||||
"script": "ct/wishlist.sh",
|
||||
"resources": {
|
||||
"cpu": 2,
|
||||
"ram": 2048,
|
||||
"hdd": 5,
|
||||
"os": "Debian",
|
||||
"version": "13"
|
||||
}
|
||||
}
|
||||
],
|
||||
"default_credentials": {
|
||||
"username": null,
|
||||
"password": null
|
||||
},
|
||||
"notes": []
|
||||
}
|
||||
40
frontend/public/json/writefreely.json
Normal file
40
frontend/public/json/writefreely.json
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"name": "WriteFreely",
|
||||
"slug": "writefreely",
|
||||
"categories": [
|
||||
12
|
||||
],
|
||||
"date_created": "2026-02-04",
|
||||
"type": "ct",
|
||||
"updateable": true,
|
||||
"privileged": false,
|
||||
"interface_port": 80,
|
||||
"documentation": "https://writefreely.org/docs",
|
||||
"config_path": "/opt/writefreely/config.ini",
|
||||
"website": "https://writefreely.org/",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/writefreely-light.webp",
|
||||
"description": "WriteFreely is free and open source software for easily publishing writing on the web with support for the ActivityPub protocol. Use it to start a personal blog — or an entire community.",
|
||||
"install_methods": [
|
||||
{
|
||||
"type": "default",
|
||||
"script": "ct/writefreely.sh",
|
||||
"resources": {
|
||||
"cpu": 2,
|
||||
"ram": 1024,
|
||||
"hdd": 4,
|
||||
"os": "Debian",
|
||||
"version": "13"
|
||||
}
|
||||
}
|
||||
],
|
||||
"default_credentials": {
|
||||
"username": null,
|
||||
"password": null
|
||||
},
|
||||
"notes": [
|
||||
{
|
||||
"text": "After installation execute `writefreely user create --admin <username>:<password>` to create your user.",
|
||||
"type": "info"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,320 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { ChevronLeft, ChevronRight } from "lucide-react";
|
||||
import React, { useEffect, useState } from "react";
|
||||
import { useRouter } from "next/navigation";
|
||||
|
||||
import type { Category } from "@/lib/types";
|
||||
|
||||
import { Card, CardContent } from "@/components/ui/card";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
|
||||
const defaultLogo = "/default-logo.png"; // Fallback logo path
|
||||
const MAX_DESCRIPTION_LENGTH = 100; // Set max length for description
|
||||
const MAX_LOGOS = 5; // Max logos to display at once
|
||||
|
||||
function formattedBadge(type: string) {
|
||||
switch (type) {
|
||||
case "vm":
|
||||
return <Badge className="text-blue-500/75 border-blue-500/75 badge">VM</Badge>;
|
||||
case "ct":
|
||||
return <Badge className="text-yellow-500/75 border-yellow-500/75 badge">LXC</Badge>;
|
||||
case "pve":
|
||||
return <Badge className="text-orange-500/75 border-orange-500/75 badge">PVE</Badge>;
|
||||
case "addon":
|
||||
return <Badge className="text-green-500/75 border-green-500/75 badge">ADDON</Badge>;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function CategoryView() {
|
||||
const [categories, setCategories] = useState<Category[]>([]);
|
||||
const [selectedCategoryIndex, setSelectedCategoryIndex] = useState<number | null>(null);
|
||||
const [currentScripts, setCurrentScripts] = useState<any[]>([]);
|
||||
const [logoIndices, setLogoIndices] = useState<{ [key: string]: number }>({});
|
||||
const router = useRouter();
|
||||
|
||||
useEffect(() => {
|
||||
const fetchCategories = async () => {
|
||||
try {
|
||||
// eslint-disable-next-line node/no-process-env
|
||||
const basePath = process.env.NODE_ENV === "production" ? "/ProxmoxVE" : "";
|
||||
const response = await fetch(`${basePath}/api/categories`);
|
||||
if (!response.ok) {
|
||||
throw new Error("Failed to fetch categories");
|
||||
}
|
||||
const data = await response.json();
|
||||
setCategories(data);
|
||||
|
||||
// Initialize logo indices
|
||||
const initialLogoIndices: { [key: string]: number } = {};
|
||||
data.forEach((category: any) => {
|
||||
initialLogoIndices[category.name] = 0;
|
||||
});
|
||||
setLogoIndices(initialLogoIndices);
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Error fetching categories:", error);
|
||||
}
|
||||
};
|
||||
|
||||
fetchCategories();
|
||||
}, []);
|
||||
|
||||
const handleCategoryClick = (index: number) => {
|
||||
setSelectedCategoryIndex(index);
|
||||
setCurrentScripts(categories[index]?.scripts || []); // Update scripts for the selected category
|
||||
};
|
||||
|
||||
const handleBackClick = () => {
|
||||
setSelectedCategoryIndex(null);
|
||||
setCurrentScripts([]); // Clear scripts when going back
|
||||
};
|
||||
|
||||
const handleScriptClick = (scriptSlug: string) => {
|
||||
// Include category context when navigating to scripts
|
||||
const categoryName = selectedCategoryIndex !== null ? categories[selectedCategoryIndex]?.name : null;
|
||||
const queryParams = new URLSearchParams({ id: scriptSlug });
|
||||
if (categoryName) {
|
||||
queryParams.append("category", categoryName);
|
||||
}
|
||||
router.push(`/scripts?${queryParams.toString()}`);
|
||||
};
|
||||
|
||||
const navigateCategory = (direction: "prev" | "next") => {
|
||||
if (selectedCategoryIndex !== null) {
|
||||
const newIndex
|
||||
= direction === "prev"
|
||||
? (selectedCategoryIndex - 1 + categories.length) % categories.length
|
||||
: (selectedCategoryIndex + 1) % categories.length;
|
||||
setSelectedCategoryIndex(newIndex);
|
||||
setCurrentScripts(categories[newIndex]?.scripts || []); // Update scripts for the new category
|
||||
}
|
||||
};
|
||||
|
||||
const switchLogos = (categoryName: string, direction: "prev" | "next") => {
|
||||
setLogoIndices((prev) => {
|
||||
const currentIndex = prev[categoryName] || 0;
|
||||
const category = categories.find(cat => cat.name === categoryName);
|
||||
if (!category || !category.scripts)
|
||||
return prev;
|
||||
|
||||
const totalLogos = category.scripts.length;
|
||||
const newIndex
|
||||
= direction === "prev"
|
||||
? (currentIndex - MAX_LOGOS + totalLogos) % totalLogos
|
||||
: (currentIndex + MAX_LOGOS) % totalLogos;
|
||||
|
||||
return { ...prev, [categoryName]: newIndex };
|
||||
});
|
||||
};
|
||||
|
||||
const truncateDescription = (text: string) => {
|
||||
return text.length > MAX_DESCRIPTION_LENGTH ? `${text.slice(0, MAX_DESCRIPTION_LENGTH)}...` : text;
|
||||
};
|
||||
|
||||
const renderResources = (script: any) => {
|
||||
const cpu = script.install_methods[0]?.resources.cpu;
|
||||
const ram = script.install_methods[0]?.resources.ram;
|
||||
const hdd = script.install_methods[0]?.resources.hdd;
|
||||
|
||||
const resourceParts = [];
|
||||
if (cpu) {
|
||||
resourceParts.push(
|
||||
<span key="cpu">
|
||||
<b>CPU:</b>
|
||||
{" "}
|
||||
{cpu}
|
||||
vCPU
|
||||
</span>,
|
||||
);
|
||||
}
|
||||
if (ram) {
|
||||
resourceParts.push(
|
||||
<span key="ram">
|
||||
<b>RAM:</b>
|
||||
{" "}
|
||||
{ram}
|
||||
MB
|
||||
</span>,
|
||||
);
|
||||
}
|
||||
if (hdd) {
|
||||
resourceParts.push(
|
||||
<span key="hdd">
|
||||
<b>HDD:</b>
|
||||
{" "}
|
||||
{hdd}
|
||||
GB
|
||||
</span>,
|
||||
);
|
||||
}
|
||||
|
||||
return resourceParts.length > 0
|
||||
? (
|
||||
<div className="text-sm text-gray-400">
|
||||
{resourceParts.map((part, index) => (
|
||||
<React.Fragment key={index}>
|
||||
{part}
|
||||
{index < resourceParts.length - 1 && " | "}
|
||||
</React.Fragment>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
: null;
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="p-6 mt-20">
|
||||
{categories.length === 0 && (
|
||||
<p className="text-center text-gray-500">No categories available. Please check the API endpoint.</p>
|
||||
)}
|
||||
{selectedCategoryIndex !== null
|
||||
? (
|
||||
<div>
|
||||
{/* Header with Navigation */}
|
||||
<div className="flex items-center justify-between mb-6">
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => navigateCategory("prev")}
|
||||
className="p-2 transition-transform duration-300 hover:scale-105"
|
||||
>
|
||||
<ChevronLeft className="h-6 w-6" />
|
||||
</Button>
|
||||
<h2 className="text-3xl font-semibold transition-opacity duration-300 hover:opacity-90">
|
||||
{categories[selectedCategoryIndex].name}
|
||||
</h2>
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => navigateCategory("next")}
|
||||
className="p-2 transition-transform duration-300 hover:scale-105"
|
||||
>
|
||||
<ChevronRight className="h-6 w-6" />
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Scripts Grid */}
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 md:grid-cols-3 gap-6">
|
||||
{currentScripts
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
.map(script => (
|
||||
<Card
|
||||
key={script.name}
|
||||
className="p-4 cursor-pointer hover:shadow-md transition-shadow duration-300"
|
||||
onClick={() => handleScriptClick(script.slug)}
|
||||
>
|
||||
<CardContent className="flex flex-col gap-4">
|
||||
<h3 className="text-lg font-bold script-text text-center hover:text-blue-600 transition-colors duration-300">
|
||||
{script.name}
|
||||
</h3>
|
||||
<img
|
||||
src={script.logo || defaultLogo}
|
||||
alt={script.name || "Script logo"}
|
||||
className="h-12 w-12 object-contain mx-auto"
|
||||
/>
|
||||
<p className="text-sm text-gray-500 text-center">
|
||||
<b>Created at:</b>
|
||||
{" "}
|
||||
{script.date_created || "No date available"}
|
||||
</p>
|
||||
<p
|
||||
className="text-sm text-gray-700 hover:text-gray-900 text-center transition-colors duration-300"
|
||||
title={script.description || "No description available."}
|
||||
>
|
||||
{truncateDescription(script.description || "No description available.")}
|
||||
</p>
|
||||
{renderResources(script)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* Back to Categories Button */}
|
||||
<div className="mt-8 text-center">
|
||||
<Button
|
||||
variant="default"
|
||||
onClick={handleBackClick}
|
||||
className="px-6 py-2 text-white bg-blue-600 hover:bg-blue-700 rounded-lg shadow-md transition-transform duration-300 hover:scale-105"
|
||||
>
|
||||
Back to Categories
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
: (
|
||||
<div>
|
||||
{/* Categories Grid */}
|
||||
<div className="flex justify-between items-center mb-8">
|
||||
<h1 className="text-3xl font-semibold mb-4">Categories</h1>
|
||||
<p className="text-sm text-gray-500">
|
||||
{categories.reduce((total, category) => total + (category.scripts?.length || 0), 0)}
|
||||
{" "}
|
||||
Total scripts
|
||||
</p>
|
||||
</div>
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 md:grid-cols-3 gap-8">
|
||||
{categories.map((category, index) => (
|
||||
<Card
|
||||
key={category.name}
|
||||
onClick={() => handleCategoryClick(index)}
|
||||
className="cursor-pointer hover:shadow-lg flex flex-col items-center justify-center py-6 transition-shadow duration-300"
|
||||
>
|
||||
<CardContent className="flex flex-col items-center">
|
||||
<h3 className="text-xl font-bold mb-4 category-title transition-colors duration-300 hover:text-blue-600">
|
||||
{category.name}
|
||||
</h3>
|
||||
<div className="flex justify-center items-center gap-2 mb-4">
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
switchLogos(category.name, "prev");
|
||||
}}
|
||||
className="p-1 transition-transform duration-300 hover:scale-110"
|
||||
>
|
||||
<ChevronLeft className="h-4 w-4" />
|
||||
</Button>
|
||||
{category.scripts
|
||||
&& category.scripts
|
||||
.slice(logoIndices[category.name] || 0, (logoIndices[category.name] || 0) + MAX_LOGOS)
|
||||
.map((script, i) => (
|
||||
<div key={i} className="flex flex-col items-center">
|
||||
<img
|
||||
src={script.logo || defaultLogo}
|
||||
alt={script.name || "Script logo"}
|
||||
title={script.name}
|
||||
className="h-8 w-8 object-contain cursor-pointer"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleScriptClick(script.slug);
|
||||
}}
|
||||
/>
|
||||
{formattedBadge(script.type)}
|
||||
</div>
|
||||
))}
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
switchLogos(category.name, "next");
|
||||
}}
|
||||
className="p-1 transition-transform duration-300 hover:scale-110"
|
||||
>
|
||||
<ChevronRight className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
<p className="text-sm text-gray-400 text-center">
|
||||
{(category as any).description || "No description available."}
|
||||
</p>
|
||||
</CardContent>
|
||||
</Card>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default CategoryView;
|
||||
@@ -94,7 +94,8 @@ const chartConfigApps = {
|
||||
export default function DataPage() {
|
||||
const [data, setData] = useState<DataModel[]>([]);
|
||||
const [summary, setSummary] = useState<SummaryData | null>(null);
|
||||
const [loading, setLoading] = useState<boolean>(true);
|
||||
const [summaryLoading, setSummaryLoading] = useState<boolean>(true);
|
||||
const [dataLoading, setDataLoading] = useState<boolean>(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [currentPage, setCurrentPage] = useState(1);
|
||||
const [itemsPerPage, setItemsPerPage] = useState(25);
|
||||
@@ -105,35 +106,40 @@ export default function DataPage() {
|
||||
|
||||
const nf = new Intl.NumberFormat("en-US", { maximumFractionDigits: 0 });
|
||||
|
||||
// Fetch summary only once on mount
|
||||
useEffect(() => {
|
||||
const fetchData = async () => {
|
||||
setLoading(true);
|
||||
const fetchSummary = async () => {
|
||||
try {
|
||||
const [summaryRes, dataRes] = await Promise.all([
|
||||
fetch("https://api.htl-braunau.at/data/summary"),
|
||||
fetch(
|
||||
`https://api.htl-braunau.at/data/paginated?page=${currentPage}&limit=${
|
||||
itemsPerPage === 0 ? "" : itemsPerPage
|
||||
}`,
|
||||
),
|
||||
]);
|
||||
|
||||
const summaryRes = await fetch("https://api.htl-braunau.at/data/summary");
|
||||
if (!summaryRes.ok) {
|
||||
throw new Error(`Failed to fetch summary: ${summaryRes.statusText}`);
|
||||
}
|
||||
const summaryData: SummaryData = await summaryRes.json();
|
||||
setSummary(summaryData);
|
||||
} catch (err) {
|
||||
setError((err as Error).message);
|
||||
} finally {
|
||||
setSummaryLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
fetchSummary();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchData = async () => {
|
||||
setDataLoading(true);
|
||||
try {
|
||||
const dataRes = await fetch(`https://api.htl-braunau.at/data/paginated?page=${currentPage}&limit=${itemsPerPage}`);
|
||||
if (!dataRes.ok) {
|
||||
throw new Error(`Failed to fetch data: ${dataRes.statusText}`);
|
||||
}
|
||||
|
||||
const summaryData: SummaryData = await summaryRes.json();
|
||||
const pageData: DataModel[] = await dataRes.json();
|
||||
|
||||
setSummary(summaryData);
|
||||
setData(pageData);
|
||||
} catch (err) {
|
||||
setError((err as Error).message);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
setDataLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -306,7 +312,7 @@ export default function DataPage() {
|
||||
</CardHeader>
|
||||
<CardContent className="pl-2">
|
||||
<div className="h-[300px] w-full">
|
||||
{loading ? (
|
||||
{summaryLoading ? (
|
||||
<div className="flex h-full w-full items-center justify-center">
|
||||
<Loader2 className="h-8 w-8 animate-spin text-muted-foreground" />
|
||||
</div>
|
||||
@@ -411,7 +417,7 @@ export default function DataPage() {
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{loading ? (
|
||||
{dataLoading ? (
|
||||
<TableRow>
|
||||
<TableCell colSpan={8} className="h-24 text-center">
|
||||
<div className="flex items-center justify-center gap-2">
|
||||
@@ -478,7 +484,7 @@ export default function DataPage() {
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() => setCurrentPage((prev) => Math.max(prev - 1, 1))}
|
||||
disabled={currentPage === 1 || loading}
|
||||
disabled={currentPage === 1 || dataLoading}
|
||||
>
|
||||
<ChevronLeft className="mr-2 h-4 w-4" />
|
||||
Previous
|
||||
@@ -488,7 +494,7 @@ export default function DataPage() {
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() => setCurrentPage((prev) => prev + 1)}
|
||||
disabled={loading || sortedData.length < itemsPerPage}
|
||||
disabled={dataLoading || sortedData.length < itemsPerPage}
|
||||
>
|
||||
Next
|
||||
<ChevronRight className="ml-2 h-4 w-4" />
|
||||
|
||||
@@ -105,7 +105,7 @@ function Note({
|
||||
const addNote = useCallback(() => {
|
||||
setScript({
|
||||
...script,
|
||||
notes: [...script.notes, { text: "", type: "" }],
|
||||
notes: [...script.notes, { text: "", type: "info" }],
|
||||
});
|
||||
}, [script, setScript]);
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { z } from "zod";
|
||||
import { AlertColors } from "@/config/site-config";
|
||||
|
||||
export const InstallMethodSchema = z.object({
|
||||
type: z.enum(["default", "alpine"], {
|
||||
@@ -16,7 +17,9 @@ export const InstallMethodSchema = z.object({
|
||||
|
||||
const NoteSchema = z.object({
|
||||
text: z.string().min(1, "Note text cannot be empty"),
|
||||
type: z.string().min(1, "Note type cannot be empty"),
|
||||
type: z.enum(Object.keys(AlertColors) as [keyof typeof AlertColors, ...(keyof typeof AlertColors)[]], {
|
||||
message: `Type must be one of: ${Object.keys(AlertColors).join(", ")}`,
|
||||
}),
|
||||
});
|
||||
|
||||
export const ScriptSchema = z.object({
|
||||
@@ -42,7 +45,7 @@ export const ScriptSchema = z.object({
|
||||
username: z.string().nullable(),
|
||||
password: z.string().nullable(),
|
||||
}),
|
||||
notes: z.array(NoteSchema),
|
||||
notes: z.array(NoteSchema).optional().default([]),
|
||||
}).refine((data) => {
|
||||
if (data.disable === true && !data.disable_description) {
|
||||
return false;
|
||||
|
||||
@@ -18,6 +18,7 @@ import { Button } from "@/components/ui/button";
|
||||
import { Switch } from "@/components/ui/switch";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
|
||||
import { fetchCategories } from "@/lib/data";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
@@ -30,6 +31,7 @@ import Note from "./_components/note";
|
||||
|
||||
import { nord } from "react-syntax-highlighter/dist/esm/styles/hljs";
|
||||
import SyntaxHighlighter from "react-syntax-highlighter";
|
||||
import { ScriptItem } from "../scripts/_components/script-item";
|
||||
|
||||
const initialScript: Script = {
|
||||
name: "",
|
||||
@@ -60,6 +62,7 @@ export default function JSONGenerator() {
|
||||
const [isCopied, setIsCopied] = useState(false);
|
||||
const [isValid, setIsValid] = useState(false);
|
||||
const [categories, setCategories] = useState<Category[]>([]);
|
||||
const [currentTab, setCurrentTab] = useState<"json" | "preview">("json");
|
||||
const [zodErrors, setZodErrors] = useState<z.ZodError | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -68,6 +71,13 @@ export default function JSONGenerator() {
|
||||
.catch((error) => console.error("Error fetching categories:", error));
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isValid && currentTab === "preview") {
|
||||
setCurrentTab("json");
|
||||
toast.error("Switched to JSON tab due to invalid configuration.");
|
||||
}
|
||||
}, [isValid, currentTab]);
|
||||
|
||||
const updateScript = useCallback((key: keyof Script, value: Script[keyof Script]) => {
|
||||
setScript((prev) => {
|
||||
const updated = { ...prev, [key]: value };
|
||||
@@ -196,7 +206,7 @@ export default function JSONGenerator() {
|
||||
<Input
|
||||
placeholder="Path to config file"
|
||||
value={script.config_path || ""}
|
||||
onChange={(e) => updateScript("config_path", e.target.value || null)}
|
||||
onChange={(e) => updateScript("config_path", e.target.value || "")}
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
@@ -323,25 +333,41 @@ export default function JSONGenerator() {
|
||||
</form>
|
||||
</div>
|
||||
<div className="w-1/2 p-4 bg-background overflow-y-auto">
|
||||
{validationAlert}
|
||||
<div className="relative">
|
||||
<div className="absolute right-2 top-2 flex gap-1">
|
||||
<Button size="icon" variant="outline" onClick={handleCopy}>
|
||||
{isCopied ? <Check className="h-4 w-4" /> : <Clipboard className="h-4 w-4" />}
|
||||
</Button>
|
||||
<Button size="icon" variant="outline" onClick={handleDownload}>
|
||||
<Download className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
<Tabs
|
||||
defaultValue="json"
|
||||
className="w-full"
|
||||
onValueChange={(value) => setCurrentTab(value as "json" | "preview")}
|
||||
value={currentTab}
|
||||
>
|
||||
<TabsList className="grid w-full grid-cols-2">
|
||||
<TabsTrigger value="json">JSON</TabsTrigger>
|
||||
<TabsTrigger disabled={!isValid} value="preview">Preview</TabsTrigger>
|
||||
</TabsList>
|
||||
<TabsContent value="json" className="h-full w-full">
|
||||
{validationAlert}
|
||||
<div className="relative">
|
||||
<div className="absolute right-2 top-2 flex gap-1">
|
||||
<Button size="icon" variant="outline" onClick={handleCopy}>
|
||||
{isCopied ? <Check className="h-4 w-4" /> : <Clipboard className="h-4 w-4" />}
|
||||
</Button>
|
||||
<Button size="icon" variant="outline" onClick={handleDownload}>
|
||||
<Download className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<SyntaxHighlighter
|
||||
language="json"
|
||||
style={nord}
|
||||
className="mt-4 p-4 bg-secondary rounded shadow overflow-x-scroll"
|
||||
>
|
||||
{JSON.stringify(script, null, 2)}
|
||||
</SyntaxHighlighter>
|
||||
</div>
|
||||
<SyntaxHighlighter
|
||||
language="json"
|
||||
style={nord}
|
||||
className="mt-4 p-4 bg-secondary rounded shadow overflow-x-scroll"
|
||||
>
|
||||
{JSON.stringify(script, null, 2)}
|
||||
</SyntaxHighlighter>
|
||||
</div>
|
||||
</TabsContent>
|
||||
<TabsContent value="preview" className="h-full w-full">
|
||||
<ScriptItem item={script} />
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -4,7 +4,8 @@ import { X, HelpCircle } from "lucide-react";
|
||||
import { Suspense } from "react";
|
||||
import Image from "next/image";
|
||||
|
||||
import type { AppVersion, Script } from "@/lib/types";
|
||||
import type { AppVersion } from "@/lib/types";
|
||||
import type { Script } from "@/app/json-editor/_schemas/schemas";
|
||||
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip";
|
||||
@@ -26,7 +27,6 @@ import Alerts from "./script-items/alerts";
|
||||
|
||||
type ScriptItemProps = {
|
||||
item: Script;
|
||||
setSelectedScript: (script: string | null) => void;
|
||||
};
|
||||
|
||||
function ScriptHeader({ item }: { item: Script }) {
|
||||
@@ -135,25 +135,10 @@ function VersionInfo({ item }: { item: Script }) {
|
||||
);
|
||||
}
|
||||
|
||||
export function ScriptItem({ item, setSelectedScript }: ScriptItemProps) {
|
||||
const closeScript = () => {
|
||||
window.history.pushState({}, document.title, window.location.pathname);
|
||||
setSelectedScript(null);
|
||||
};
|
||||
|
||||
export function ScriptItem({ item }: ScriptItemProps) {
|
||||
return (
|
||||
<div className="w-full mx-auto">
|
||||
<div className="flex w-full flex-col">
|
||||
<div className="mb-3 flex items-center justify-between">
|
||||
<h2 className="text-2xl font-semibold tracking-tight text-foreground/90">Selected Script</h2>
|
||||
<button
|
||||
onClick={closeScript}
|
||||
className="rounded-full p-2 text-muted-foreground hover:bg-card/50 transition-colors"
|
||||
>
|
||||
<X className="h-5 w-5" />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="rounded-xl border border-border bg-accent/30 backdrop-blur-sm shadow-sm">
|
||||
<div className="p-6 space-y-6">
|
||||
<Suspense fallback={<div className="animate-pulse h-32 bg-accent/20 rounded-xl" />}>
|
||||
@@ -162,7 +147,7 @@ export function ScriptItem({ item, setSelectedScript }: ScriptItemProps) {
|
||||
|
||||
{item.disable && item.disable_description && (
|
||||
<DisableDescription item={item} />
|
||||
) }
|
||||
)}
|
||||
|
||||
{!item.disable && (
|
||||
<>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
import { Suspense, useEffect, useState } from "react";
|
||||
import { Loader2 } from "lucide-react";
|
||||
import { Loader2, X } from "lucide-react";
|
||||
import { useQueryState } from "nuqs";
|
||||
|
||||
import type { Category, Script } from "@/lib/types";
|
||||
@@ -20,6 +20,11 @@ function ScriptContent() {
|
||||
const [item, setItem] = useState<Script>();
|
||||
const [latestPage, setLatestPage] = useState(1);
|
||||
|
||||
const closeScript = () => {
|
||||
window.history.pushState({}, document.title, window.location.pathname);
|
||||
setSelectedScript(null);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (selectedScript && links.length > 0) {
|
||||
const script = links
|
||||
@@ -53,7 +58,18 @@ function ScriptContent() {
|
||||
<div className="px-4 w-full sm:max-w-[calc(100%-350px-16px)]">
|
||||
{selectedScript && item
|
||||
? (
|
||||
<ScriptItem item={item} setSelectedScript={setSelectedScript} />
|
||||
<div className="flex w-full flex-col">
|
||||
<div className="mb-3 flex items-center justify-between">
|
||||
<h2 className="text-2xl font-semibold tracking-tight text-foreground/90">Selected Script</h2>
|
||||
<button
|
||||
onClick={closeScript}
|
||||
className="rounded-full p-2 text-muted-foreground hover:bg-card/50 transition-colors"
|
||||
>
|
||||
<X className="h-5 w-5" />
|
||||
</button>
|
||||
</div>
|
||||
<ScriptItem item={item} />
|
||||
</div>
|
||||
)
|
||||
: (
|
||||
<div className="flex w-full flex-col gap-5">
|
||||
|
||||
@@ -119,7 +119,6 @@ function MobileSidebar() {
|
||||
<p className="text-sm font-medium">Last Viewed</p>
|
||||
<ScriptItem
|
||||
item={lastViewedScript}
|
||||
setSelectedScript={isOnScriptsPage ? setSelectedScript : setTempSelectedScript}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
@@ -131,3 +130,4 @@ function MobileSidebar() {
|
||||
}
|
||||
|
||||
export default MobileSidebar;
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ export type Script = {
|
||||
slug: string;
|
||||
categories: number[];
|
||||
date_created: string;
|
||||
type: "vm" | "ct" | "pve" | "addon";
|
||||
type: "vm" | "ct" | "pve" | "addon" | "turnkey";
|
||||
updateable: boolean;
|
||||
privileged: boolean;
|
||||
interface_port: number | null;
|
||||
@@ -31,12 +31,10 @@ export type Script = {
|
||||
username: string | null;
|
||||
password: string | null;
|
||||
};
|
||||
notes: [
|
||||
{
|
||||
text: string;
|
||||
type: keyof typeof AlertColors;
|
||||
},
|
||||
];
|
||||
notes: {
|
||||
text: string;
|
||||
type: keyof typeof AlertColors;
|
||||
}[];
|
||||
};
|
||||
|
||||
export type Category = {
|
||||
|
||||
55
install/alpine-rustypaste-install.sh
Normal file
55
install/alpine-rustypaste-install.sh
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/orhun/rustypaste
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing RustyPaste"
|
||||
$STD apk add --no-cache rustypaste --repository=https://dl-cdn.alpinelinux.org/alpine/edge/community
|
||||
msg_ok "Installed RustyPaste"
|
||||
|
||||
msg_info "Configuring RustyPaste"
|
||||
mkdir -p /var/lib/rustypaste
|
||||
sed -i 's|^address = ".*"|address = "0.0.0.0:8000"|' /etc/rustypaste/config.toml
|
||||
msg_ok "Configured RustyPaste"
|
||||
|
||||
msg_info "Creating Service"
|
||||
cat <<'EOF' >/etc/init.d/rustypaste
|
||||
#!/sbin/openrc-run
|
||||
|
||||
name="rustypaste"
|
||||
description="RustyPaste - A minimal file upload/pastebin service"
|
||||
command="/usr/bin/rustypaste"
|
||||
command_args=""
|
||||
command_user="root"
|
||||
command_background=true
|
||||
pidfile="/run/${RC_SVCNAME}.pid"
|
||||
directory="/var/lib/rustypaste"
|
||||
|
||||
depend() {
|
||||
need net
|
||||
after firewall
|
||||
}
|
||||
|
||||
start_pre() {
|
||||
export CONFIG=/etc/rustypaste/config.toml
|
||||
checkpath --directory --owner root:root --mode 0755 /var/lib/rustypaste
|
||||
}
|
||||
EOF
|
||||
chmod +x /etc/init.d/rustypaste
|
||||
$STD rc-update add rustypaste default
|
||||
$STD rc-service rustypaste start
|
||||
msg_ok "Created Service"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
@@ -3,7 +3,7 @@
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: vhsdream
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/gelbphoenix/autocaliweb
|
||||
# Source: https://codeberg.org/gelbphoenix/autocaliweb
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
@@ -56,7 +56,7 @@ msg_ok "Installed Calibre"
|
||||
|
||||
setup_uv
|
||||
|
||||
fetch_and_deploy_gh_release "autocaliweb" "gelbphoenix/autocaliweb" "tarball" "latest" "/opt/autocaliweb"
|
||||
fetch_and_deploy_codeberg_release "autocaliweb" "gelbphoenix/autocaliweb" "tarball" "latest" "/opt/autocaliweb"
|
||||
|
||||
msg_info "Configuring Autocaliweb"
|
||||
INSTALL_DIR="/opt/autocaliweb"
|
||||
@@ -111,8 +111,8 @@ msg_info "Initializing databases"
|
||||
KEPUBIFY_PATH=$(command -v kepubify 2>/dev/null || echo "/usr/bin/kepubify")
|
||||
EBOOK_CONVERT_PATH=$(command -v ebook-convert 2>/dev/null || echo "/usr/bin/ebook-convert")
|
||||
CALIBRE_BIN_DIR=$(dirname "$EBOOK_CONVERT_PATH")
|
||||
curl -fsSL https://github.com/gelbphoenix/autocaliweb/raw/refs/heads/main/library/metadata.db -o "$CALIBRE_LIB_DIR"/metadata.db
|
||||
curl -fsSL https://github.com/gelbphoenix/autocaliweb/raw/refs/heads/main/library/app.db -o "$CONFIG_DIR"/app.db
|
||||
curl -fsSL https://codeberg.org/gelbphoenix/autocaliweb/raw/branch/main/library/metadata.db -o "$CALIBRE_LIB_DIR"/metadata.db
|
||||
curl -fsSL https://codeberg.org/gelbphoenix/autocaliweb/raw/branch/main/library/app.db -o "$CONFIG_DIR"/app.db
|
||||
sqlite3 "$CONFIG_DIR/app.db" <<EOS
|
||||
UPDATE settings SET
|
||||
config_kepubifypath='$KEPUBIFY_PATH',
|
||||
|
||||
@@ -14,17 +14,13 @@ network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt-get install -y git
|
||||
$STD apt-get install -y git-lfs
|
||||
$STD apt install -y \
|
||||
git \
|
||||
git-lfs
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
msg_info "Installing Forgejo"
|
||||
mkdir -p /opt/forgejo
|
||||
RELEASE=$(curl -fsSL https://codeberg.org/api/v1/repos/forgejo/forgejo/releases/latest | grep -oP '"tag_name":\s*"\K[^"]+' | sed 's/^v//')
|
||||
curl -fsSL "https://codeberg.org/forgejo/forgejo/releases/download/v${RELEASE}/forgejo-${RELEASE}-linux-amd64" -o "/opt/forgejo/forgejo-$RELEASE-linux-amd64"
|
||||
chmod +x /opt/forgejo/forgejo-$RELEASE-linux-amd64
|
||||
ln -sf /opt/forgejo/forgejo-$RELEASE-linux-amd64 /usr/local/bin/forgejo
|
||||
msg_ok "Installed Forgejo"
|
||||
fetch_and_deploy_codeberg_release "forgejo" "forgejo/forgejo" "singlefile" "latest" "/opt/forgejo" "forgejo-*-linux-amd64"
|
||||
ln -sf /opt/forgejo/forgejo /usr/local/bin/forgejo
|
||||
|
||||
msg_info "Setting up Forgejo"
|
||||
$STD adduser --system --shell /bin/bash --gecos 'Git Version Control' --group --disabled-password --home /home/git git
|
||||
|
||||
@@ -17,7 +17,8 @@ msg_info "Installing Dependencies"
|
||||
$STD apt install -y \
|
||||
make \
|
||||
ca-certificates \
|
||||
python3-venv
|
||||
python3-venv \
|
||||
git
|
||||
msg_ok "Installed Dependencies"
|
||||
NODE_VERSION="22" NODE_MODULE="yarn@latest" setup_nodejs
|
||||
fetch_and_deploy_gh_release "grist" "gristlabs/grist-core" "tarball"
|
||||
|
||||
145
install/kitchenowl-install.sh
Normal file
145
install/kitchenowl-install.sh
Normal file
@@ -0,0 +1,145 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: snazzybean
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/TomBursch/kitchenowl
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y \
|
||||
nginx \
|
||||
build-essential \
|
||||
gfortran \
|
||||
pkg-config \
|
||||
ninja-build \
|
||||
autoconf \
|
||||
automake \
|
||||
libpq-dev \
|
||||
libffi-dev \
|
||||
libssl-dev \
|
||||
libpcre2-dev \
|
||||
libre2-dev \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
libopenblas-dev \
|
||||
liblapack-dev \
|
||||
zlib1g-dev \
|
||||
libjpeg62-turbo-dev \
|
||||
libsqlite3-dev \
|
||||
libexpat1-dev \
|
||||
libicu-dev
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
PYTHON_VERSION="3.14" setup_uv
|
||||
fetch_and_deploy_gh_release "kitchenowl" "TomBursch/kitchenowl" "tarball" "latest" "/opt/kitchenowl"
|
||||
rm -rf /opt/kitchenowl/web
|
||||
fetch_and_deploy_gh_release "kitchenowl-web" "TomBursch/kitchenowl" "prebuild" "latest" "/opt/kitchenowl/web" "kitchenowl_Web.tar.gz"
|
||||
|
||||
msg_info "Setting up KitchenOwl"
|
||||
cd /opt/kitchenowl/backend
|
||||
$STD uv sync --no-dev
|
||||
sed -i 's/default=True/default=False/' /opt/kitchenowl/backend/wsgi.py
|
||||
mkdir -p /nltk_data
|
||||
$STD uv run python -m nltk.downloader -d /nltk_data averaged_perceptron_tagger_eng
|
||||
JWT_SECRET=$(openssl rand -hex 32)
|
||||
mkdir -p /opt/kitchenowl/data
|
||||
cat <<EOF >/opt/kitchenowl/kitchenowl.env
|
||||
STORAGE_PATH=/opt/kitchenowl/data
|
||||
JWT_SECRET_KEY=${JWT_SECRET}
|
||||
NLTK_DATA=/nltk_data
|
||||
FRONT_URL=http://${LOCAL_IP}
|
||||
FLASK_APP=wsgi.py
|
||||
FLASK_ENV=production
|
||||
EOF
|
||||
set -a
|
||||
source /opt/kitchenowl/kitchenowl.env
|
||||
set +a
|
||||
$STD uv run flask db upgrade
|
||||
msg_ok "Set up KitchenOwl"
|
||||
|
||||
msg_info "Creating Systemd Service"
|
||||
cat <<EOF >/etc/systemd/system/kitchenowl.service
|
||||
[Unit]
|
||||
Description=KitchenOwl Backend
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/kitchenowl/backend
|
||||
EnvironmentFile=/opt/kitchenowl/kitchenowl.env
|
||||
ExecStart=/usr/local/bin/uv run wsgi.py
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now kitchenowl
|
||||
msg_ok "Created and Started Service"
|
||||
|
||||
msg_info "Configuring Nginx"
|
||||
rm -f /etc/nginx/sites-enabled/default
|
||||
cat <<'EOF' >/etc/nginx/sites-available/kitchenowl.conf
|
||||
server {
|
||||
listen 80;
|
||||
server_name _;
|
||||
|
||||
root /opt/kitchenowl/web;
|
||||
index index.html;
|
||||
|
||||
client_max_body_size 100M;
|
||||
|
||||
# Security Headers
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
|
||||
|
||||
location / {
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
location /api {
|
||||
proxy_pass http://127.0.0.1:5000;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_connect_timeout 60s;
|
||||
proxy_send_timeout 60s;
|
||||
proxy_read_timeout 60s;
|
||||
}
|
||||
|
||||
location /socket.io {
|
||||
proxy_pass http://127.0.0.1:5000;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
# WebSocket Timeouts - allow long-lived connections
|
||||
proxy_read_timeout 86400s;
|
||||
proxy_send_timeout 86400s;
|
||||
}
|
||||
}
|
||||
EOF
|
||||
ln -sf /etc/nginx/sites-available/kitchenowl.conf /etc/nginx/sites-enabled/
|
||||
rm -f /etc/nginx/sites-enabled/default
|
||||
$STD systemctl reload nginx
|
||||
msg_ok "Configured Nginx"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
@@ -31,7 +31,7 @@ After=network-online.target
|
||||
User=root
|
||||
Restart=always
|
||||
Type=simple
|
||||
ExecStart=/usr/local/bin/prometheus-paperless-exporter \
|
||||
ExecStart=/usr/bin/prometheus-paperless-exporter \
|
||||
--paperless_url=http://paperless.example.org \
|
||||
--paperless_auth_token_file=/etc/prometheus-paperless-ngx-exporter/paperless_auth_token_file
|
||||
ExecReload=/bin/kill -HUP \$MAINPID
|
||||
|
||||
@@ -13,13 +13,7 @@ setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Readeck"
|
||||
LATEST=$(curl -fsSL https://codeberg.org/readeck/readeck/releases/ | grep -oP '/releases/tag/\K\d+\.\d+\.\d+' | head -1)
|
||||
mkdir -p /opt/readeck
|
||||
cd /opt/readeck
|
||||
curl -fsSL "https://codeberg.org/readeck/readeck/releases/download/${LATEST}/readeck-${LATEST}-linux-amd64" -o "readeck"
|
||||
chmod a+x readeck
|
||||
msg_ok "Installed Readeck"
|
||||
fetch_and_deploy_codeberg_release "readeck" "readeck/readeck" "singlefile" "latest" "/opt/readeck" "readeck-*-linux-amd64"
|
||||
|
||||
msg_info "Creating Service"
|
||||
cat <<EOF >/etc/systemd/system/readeck.service
|
||||
|
||||
43
install/rustypaste-install.sh
Normal file
43
install/rustypaste-install.sh
Normal file
@@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: GoldenSpringness | MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/orhun/rustypaste
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
fetch_and_deploy_gh_release "rustypaste" "orhun/rustypaste" "prebuild" "latest" "/opt/rustypaste" "*x86_64-unknown-linux-gnu.tar.gz"
|
||||
fetch_and_deploy_gh_release "rustypaste-cli" "orhun/rustypaste-cli" "prebuild" "latest" "/usr/local/bin" "*x86_64-unknown-linux-gnu.tar.gz"
|
||||
|
||||
msg_info "Setting up RustyPaste"
|
||||
cd /opt/rustypaste
|
||||
sed -i 's|^address = ".*"|address = "0.0.0.0:8000"|' config.toml
|
||||
msg_ok "Set up RustyPaste"
|
||||
|
||||
msg_info "Creating Service"
|
||||
cat <<EOF >/etc/systemd/system/rustypaste.service
|
||||
[Unit]
|
||||
Description=rustypaste Service
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/opt/rustypaste
|
||||
ExecStart=/opt/rustypaste/rustypaste
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now rustypaste
|
||||
msg_ok "Created Service"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
@@ -41,39 +41,34 @@ $STD cargo build --release --bin server
|
||||
mv ./target/release/server /usr/bin/scanopy-server
|
||||
msg_ok "Built scanopy-server"
|
||||
|
||||
msg_info "Building scanopy-daemon"
|
||||
$STD cargo build --release --bin daemon
|
||||
cp ./target/release/daemon /usr/bin/scanopy-daemon
|
||||
msg_ok "Built scanopy-daemon"
|
||||
|
||||
msg_info "Configuring server for first-run"
|
||||
cat <<EOF >/opt/scanopy/.env
|
||||
### - SERVER
|
||||
scanopy_DATABASE_URL=postgresql://$PG_DB_USER:$PG_DB_PASS@localhost:5432/$PG_DB_NAME
|
||||
scanopy_WEB_EXTERNAL_PATH="/opt/scanopy/ui/build"
|
||||
scanopy_PUBLIC_URL=http://${LOCAL_IP}:60072
|
||||
scanopy_SERVER_PORT=60072
|
||||
scanopy_LOG_LEVEL=info
|
||||
scanopy_INTEGRATED_DAEMON_URL=http://127.0.0.1:60073
|
||||
SCANOPY_DATABASE_URL=postgresql://$PG_DB_USER:$PG_DB_PASS@localhost:5432/$PG_DB_NAME
|
||||
SCANOPY_WEB_EXTERNAL_PATH="/opt/scanopy/ui/build"
|
||||
SCANOPY_PUBLIC_URL=http://${LOCAL_IP}:60072
|
||||
SCANOPY_SERVER_PORT=60072
|
||||
SCANOPY_LOG_LEVEL=info
|
||||
SCANOPY_INTEGRATED_DAEMON_URL=http://127.0.0.1:60073
|
||||
## - uncomment to disable signups
|
||||
# scanopy_DISABLE_REGISTRATION=true
|
||||
# SCANOPY_DISABLE_REGISTRATION=true
|
||||
## - uncomment when using TLS
|
||||
# scanopy_USE_SECURE_SESSION_COOKIES=true
|
||||
# SCANOPY_USE_SECURE_SESSION_COOKIES=true
|
||||
## - see https://github.com/imbolc/axum-client-ip?tab=readme-ov-file#configurable-vs-specific-extractors
|
||||
## - before uncommenting the below
|
||||
# scanopy_CLIENT_IP_SOURCE=
|
||||
# SCANOPY_CLIENT_IP_SOURCE=
|
||||
|
||||
### - SMTP (password reset and notifications - optional)
|
||||
# scanopy_SMTP_RELAY=smtp.gmail.com:587
|
||||
# scanopy_SMTP_USERNAME=your-email@gmail.com
|
||||
# scanopy_SMTP_PASSWORD=your-app-password
|
||||
# scanopy_SMTP_EMAIL=scanopy@yourdomain.tld
|
||||
# SCANOPY_SMTP_RELAY=smtp.gmail.com:587
|
||||
# SCANOPY_SMTP_USERNAME=your-email@gmail.com
|
||||
# SCANOPY_SMTP_PASSWORD=your-app-password
|
||||
# SCANOPY_SMTP_EMAIL=scanopy@yourdomain.tld
|
||||
|
||||
### - INTEGRATED DAEMON
|
||||
scanopy_SERVER_URL=http://127.0.0.1:60072
|
||||
scanopy_BIND_ADDRESS=0.0.0.0
|
||||
scanopy_NAME="scanopy-daemon"
|
||||
scanopy_HEARTBEAT_INTERVAL=30
|
||||
SCANOPY_SERVER_URL=http://127.0.0.1:60072
|
||||
SCANOPY_BIND_ADDRESS=0.0.0.0
|
||||
SCANOPY_NAME="scanopy-daemon"
|
||||
SCANOPY_HEARTBEAT_INTERVAL=30
|
||||
|
||||
### - see https://github.com/scanopy/scanopy/blob/main/docs/CONFIGURATION.md for more options
|
||||
EOF
|
||||
|
||||
@@ -105,14 +105,13 @@ elif [[ "$DEPLOYMENT_TYPE" == "4" ]]; then
|
||||
sed -i '/_BYPASS=/s/true/false/' /etc/shelfmark/.env
|
||||
else
|
||||
DEPLOYMENT_TYPE="1"
|
||||
CHROME_VERSION=$(curl -fsSL https://raw.githubusercontent.com/calibrain/shelfmark/refs/heads/main/Dockerfile | sed -n '/chromium=/s/[^=]*=//p' | awk '{print $1}')
|
||||
msg_info "Installing internal bypasser dependencies"
|
||||
$STD apt install -y --no-install-recommends \
|
||||
xvfb \
|
||||
ffmpeg \
|
||||
chromium-common=${CHROME_VERSION} \
|
||||
chromium=${CHROME_VERSION} \
|
||||
chromium-driver=${CHROME_VERSION} \
|
||||
chromium-common \
|
||||
chromium \
|
||||
chromium-driver \
|
||||
python3-tk
|
||||
msg_ok "Installed internal bypasser dependencies"
|
||||
fi
|
||||
|
||||
@@ -109,18 +109,34 @@ if command -v timescaledb-tune &> /dev/null; then
|
||||
|| echo "Warning: timescaledb-tune failed (non-fatal)"
|
||||
fi
|
||||
# =============================================================================
|
||||
# Ensure TimescaleDB decompression limit is set (for existing databases)
|
||||
# Ensure required PostgreSQL settings for Tracearr
|
||||
# =============================================================================
|
||||
# This setting allows migrations to modify compressed hypertable data.
|
||||
# Without it, bulk UPDATEs on compressed sessions will fail with
|
||||
# "tuple decompression limit exceeded" errors.
|
||||
pg_config_file="/etc/postgresql/18/main/postgresql.conf"
|
||||
if [ -f \$pg_config_file ]; then
|
||||
if ! grep -q "max_tuples_decompressed_per_dml_transaction" \$pg_config_file; then
|
||||
# Ensure max_tuples_decompressed_per_dml_transaction is set
|
||||
if grep -q "^timescaledb\.max_tuples_decompressed_per_dml_transaction" \$pg_config_file; then
|
||||
# Setting exists (uncommented) - update if not 0
|
||||
current_value=\$(grep "^timescaledb\.max_tuples_decompressed_per_dml_transaction" \$pg_config_file | grep -oE '[0-9]+' | head -1)
|
||||
if [ -n "\$current_value" ] && [ "\$current_value" -ne 0 ]; then
|
||||
sed -i "s/^timescaledb\.max_tuples_decompressed_per_dml_transaction.*/timescaledb.max_tuples_decompressed_per_dml_transaction = 0/" \$pg_config_file
|
||||
fi
|
||||
elif ! grep -q "^timescaledb\.max_tuples_decompressed_per_dml_transaction" \$pg_config_file; then
|
||||
echo "" >> \$pg_config_file
|
||||
echo "# Allow unlimited tuple decompression for migrations on compressed hypertables" >> \$pg_config_file
|
||||
echo "timescaledb.max_tuples_decompressed_per_dml_transaction = 0" >> \$pg_config_file
|
||||
fi
|
||||
# Ensure max_locks_per_transaction is set (for existing databases)
|
||||
if grep -q "^max_locks_per_transaction" \$pg_config_file; then
|
||||
# Setting exists (uncommented) - update if below 4096
|
||||
current_value=\$(grep "^max_locks_per_transaction" \$pg_config_file | grep -oE '[0-9]+' | head -1)
|
||||
if [ -n "\$current_value" ] && [ "\$current_value" -lt 4096 ]; then
|
||||
sed -i "s/^max_locks_per_transaction.*/max_locks_per_transaction = 4096/" \$pg_config_file
|
||||
fi
|
||||
elif ! grep -q "^max_locks_per_transaction" \$pg_config_file; then
|
||||
echo "" >> \$pg_config_file
|
||||
echo "# Increase lock table size for TimescaleDB hypertables with many chunks" >> \$pg_config_file
|
||||
echo "max_locks_per_transaction = 4096" >> \$pg_config_file
|
||||
fi
|
||||
fi
|
||||
systemctl restart postgresql
|
||||
EOF
|
||||
|
||||
@@ -14,7 +14,7 @@ network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y git \
|
||||
$STD apt install -y \
|
||||
build-essential \
|
||||
pkgconf \
|
||||
libssl-dev \
|
||||
@@ -24,34 +24,25 @@ $STD apt install -y git \
|
||||
ssl-cert
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
WEBVAULT=$(curl -fsSL https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||
VAULT=$(curl -fsSL https://api.github.com/repos/dani-garcia/vaultwarden/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||
setup_rust
|
||||
fetch_and_deploy_gh_release "vaultwarden" "dani-garcia/vaultwarden" "tarball" "latest" "/tmp/vaultwarden-src"
|
||||
|
||||
msg_info "Installing Rust"
|
||||
curl -fsSL https://sh.rustup.rs -o rustup-init.sh
|
||||
$STD bash rustup-init.sh -y --profile minimal
|
||||
echo 'export PATH="$HOME/.cargo/bin:$PATH"' >>~/.bashrc
|
||||
export PATH="$HOME/.cargo/bin:$PATH"
|
||||
rm rustup-init.sh
|
||||
msg_ok "Installed Rust"
|
||||
|
||||
msg_info "Building Vaultwarden ${VAULT} (Patience)"
|
||||
$STD git clone https://github.com/dani-garcia/vaultwarden
|
||||
cd vaultwarden
|
||||
msg_info "Building Vaultwarden (Patience)"
|
||||
cd /tmp/vaultwarden-src
|
||||
$STD cargo build --features "sqlite,mysql,postgresql" --release
|
||||
msg_ok "Built Vaultwarden ${VAULT}"
|
||||
msg_ok "Built Vaultwarden"
|
||||
|
||||
msg_info "Setting up Vaultwarden"
|
||||
$STD addgroup --system vaultwarden
|
||||
$STD adduser --system --home /opt/vaultwarden --shell /usr/sbin/nologin --no-create-home --gecos 'vaultwarden' --ingroup vaultwarden --disabled-login --disabled-password vaultwarden
|
||||
mkdir -p /opt/vaultwarden/bin
|
||||
mkdir -p /opt/vaultwarden/data
|
||||
mkdir -p /opt/vaultwarden/{bin,data,web-vault}
|
||||
cp target/release/vaultwarden /opt/vaultwarden/bin/
|
||||
cd ~ && rm -rf /tmp/vaultwarden-src
|
||||
msg_ok "Set up Vaultwarden"
|
||||
|
||||
msg_info "Downloading Web-Vault ${WEBVAULT}"
|
||||
$STD curl -fsSLO https://github.com/dani-garcia/bw_web_builds/releases/download/"$WEBVAULT"/bw_web_"$WEBVAULT".tar.gz
|
||||
$STD tar -xzf bw_web_"$WEBVAULT".tar.gz -C /opt/vaultwarden/
|
||||
msg_ok "Downloaded Web-Vault ${WEBVAULT}"
|
||||
fetch_and_deploy_gh_release "vaultwarden_webvault" "dani-garcia/bw_web_builds" "prebuild" "latest" "/opt/vaultwarden/web-vault" "bw_web_*.tar.gz"
|
||||
|
||||
msg_info "Configuring Vaultwarden"
|
||||
cat <<EOF >/opt/vaultwarden/.env
|
||||
ADMIN_TOKEN=''
|
||||
ROCKET_ADDRESS=0.0.0.0
|
||||
@@ -61,22 +52,23 @@ DATABASE_MAX_CONNS=10
|
||||
WEB_VAULT_FOLDER=/opt/vaultwarden/web-vault
|
||||
WEB_VAULT_ENABLED=true
|
||||
EOF
|
||||
|
||||
mv /etc/ssl/certs/ssl-cert-snakeoil.pem /opt/vaultwarden/
|
||||
mv /etc/ssl/private/ssl-cert-snakeoil.key /opt/vaultwarden/
|
||||
|
||||
msg_info "Creating Service"
|
||||
chown -R vaultwarden:vaultwarden /opt/vaultwarden/
|
||||
chown root:root /opt/vaultwarden/bin/vaultwarden
|
||||
chmod +x /opt/vaultwarden/bin/vaultwarden
|
||||
chown -R root:root /opt/vaultwarden/web-vault/
|
||||
chmod +r /opt/vaultwarden/.env
|
||||
msg_ok "Configured Vaultwarden"
|
||||
|
||||
service_path="/etc/systemd/system/vaultwarden.service"
|
||||
echo "[Unit]
|
||||
msg_info "Creating Service"
|
||||
cat <<EOF >/etc/systemd/system/vaultwarden.service
|
||||
[Unit]
|
||||
Description=Bitwarden Server (Powered by Vaultwarden)
|
||||
Documentation=https://github.com/dani-garcia/vaultwarden
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
User=vaultwarden
|
||||
Group=vaultwarden
|
||||
@@ -99,10 +91,11 @@ LockPersonality=yes
|
||||
WorkingDirectory=/opt/vaultwarden
|
||||
ReadWriteDirectories=/opt/vaultwarden/data
|
||||
AmbientCapabilities=CAP_NET_BIND_SERVICE
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target" >$service_path
|
||||
systemctl daemon-reload
|
||||
$STD systemctl enable --now vaultwarden
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now vaultwarden
|
||||
msg_ok "Created Service"
|
||||
|
||||
motd_ssh
|
||||
|
||||
89
install/wealthfolio-install.sh
Normal file
89
install/wealthfolio-install.sh
Normal file
@@ -0,0 +1,89 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: CrazyWolf13
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://wealthfolio.app/
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y \
|
||||
pkg-config \
|
||||
libssl-dev \
|
||||
build-essential \
|
||||
libsqlite3-dev \
|
||||
argon2
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
setup_rust
|
||||
NODE_MODULE="pnpm" setup_nodejs
|
||||
fetch_and_deploy_gh_release "wealthfolio" "afadil/wealthfolio" "tarball"
|
||||
|
||||
msg_info "Building Frontend (patience)"
|
||||
cd /opt/wealthfolio
|
||||
$STD pnpm install --frozen-lockfile
|
||||
$STD pnpm tsc
|
||||
$STD pnpm vite build
|
||||
msg_ok "Built Frontend"
|
||||
|
||||
msg_info "Building Backend (patience)"
|
||||
cd /opt/wealthfolio/src-server
|
||||
$STD cargo build --release --manifest-path Cargo.toml
|
||||
cp /opt/wealthfolio/src-server/target/release/wealthfolio-server /usr/local/bin/wealthfolio-server
|
||||
chmod +x /usr/local/bin/wealthfolio-server
|
||||
msg_ok "Built Backend"
|
||||
|
||||
msg_info "Configuring Wealthfolio"
|
||||
mkdir -p /opt/wealthfolio_data
|
||||
SECRET_KEY=$(openssl rand -base64 32)
|
||||
WF_PASSWORD=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | cut -c1-16)
|
||||
WF_PASSWORD_HASH=$(echo -n "$WF_PASSWORD" | argon2 "$(openssl rand -base64 16)" -id -e)
|
||||
cat <<EOF >/opt/wealthfolio/.env
|
||||
WF_LISTEN_ADDR=0.0.0.0:8080
|
||||
WF_DB_PATH=/opt/wealthfolio_data/wealthfolio.db
|
||||
WF_SECRET_KEY=${SECRET_KEY}
|
||||
WF_AUTH_PASSWORD_HASH=${WF_PASSWORD_HASH}
|
||||
WF_STATIC_DIR=/opt/wealthfolio/dist
|
||||
WF_CORS_ALLOW_ORIGINS=*
|
||||
WF_REQUEST_TIMEOUT_MS=30000
|
||||
EOF
|
||||
echo "WF_PASSWORD=${WF_PASSWORD}" >~/wealthfolio.creds
|
||||
msg_ok "Configured Wealthfolio"
|
||||
|
||||
msg_info "Cleaning Up"
|
||||
rm -rf /opt/wealthfolio/src-server/target
|
||||
rm -rf /root/.cargo/registry
|
||||
rm -rf /opt/wealthfolio/node_modules
|
||||
msg_ok "Cleaned Up"
|
||||
|
||||
msg_info "Creating Service"
|
||||
cat <<EOF >/etc/systemd/system/wealthfolio.service
|
||||
[Unit]
|
||||
Description=Wealthfolio Investment Tracker
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/wealthfolio
|
||||
EnvironmentFile=/opt/wealthfolio/.env
|
||||
ExecStart=/usr/local/bin/wealthfolio-server
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now wealthfolio
|
||||
msg_ok "Created Service"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
66
install/wishlist-install.sh
Normal file
66
install/wishlist-install.sh
Normal file
@@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: Dunky13
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/cmintey/wishlist
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing dependencies"
|
||||
$STD apt install -y \
|
||||
build-essential \
|
||||
openssl \
|
||||
caddy
|
||||
msg_ok "Installed dependencies"
|
||||
|
||||
NODE_VERSION="24" NODE_MODULE="pnpm" setup_nodejs
|
||||
fetch_and_deploy_gh_release "wishlist" "cmintey/wishlist" "tarball"
|
||||
LATEST_APP_VERSION=$(get_latest_github_release "cmintey/wishlist")
|
||||
|
||||
msg_info "Installing Wishlist"
|
||||
cd /opt/wishlist
|
||||
cp .env.example .env
|
||||
sed -i "s|^ORIGIN=.*|ORIGIN=http://${LOCAL_IP}:3280|" /opt/wishlist/.env
|
||||
echo "" >>/opt/wishlist/.env
|
||||
echo "NODE_ENV=production" >>/opt/wishlist/.env
|
||||
$STD pnpm install
|
||||
$STD pnpm svelte-kit sync
|
||||
$STD pnpm prisma generate
|
||||
sed -i 's|/usr/src/app/|/opt/wishlist/|g' $(grep -rl '/usr/src/app/' /opt/wishlist)
|
||||
export VERSION="v${LATEST_APP_VERSION}"
|
||||
export SHA="v${LATEST_APP_VERSION}"
|
||||
$STD pnpm run build
|
||||
$STD pnpm prune --prod
|
||||
chmod +x /opt/wishlist/entrypoint.sh
|
||||
mkdir -p /opt/wishlist/uploads
|
||||
mkdir -p /opt/wishlist/data
|
||||
msg_ok "Installed Wishlist"
|
||||
|
||||
msg_info "Creating Service"
|
||||
cat <<EOF >/etc/systemd/system/wishlist.service
|
||||
[Unit]
|
||||
Description=Wishlist Service
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/opt/wishlist
|
||||
EnvironmentFile=/opt/wishlist/.env
|
||||
ExecStart=/usr/bin/env sh -c './entrypoint.sh'
|
||||
Restart=on-failure
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now wishlist
|
||||
msg_ok "Created Service"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
63
install/writefreely-install.sh
Normal file
63
install/writefreely-install.sh
Normal file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2026 community-scripts ORG
|
||||
# Author: StellaeAlis
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/writefreely/writefreely
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
verb_ip6
|
||||
catch_errors
|
||||
setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt install -y crudini
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
setup_mariadb
|
||||
MARIADB_DB_NAME="writefreely" MARIADB_DB_USER="writefreely" setup_mariadb_db
|
||||
fetch_and_deploy_gh_release "writefreely" "writefreely/writefreely" "prebuild" "latest" "/opt/writefreely" "writefreely_*_linux_amd64.tar.gz"
|
||||
|
||||
msg_info "Setting up WriteFreely"
|
||||
cd /opt/writefreely
|
||||
$STD ./writefreely config generate
|
||||
$STD ./writefreely keys generate
|
||||
msg_ok "Setup WriteFreely"
|
||||
|
||||
msg_info "Configuring WriteFreely"
|
||||
$STD crudini --set config.ini server port 80
|
||||
$STD crudini --set config.ini server bind $LOCAL_IP
|
||||
$STD crudini --set config.ini database username $MARIADB_DB_USER
|
||||
$STD crudini --set config.ini database password $MARIADB_DB_PASS
|
||||
$STD crudini --set config.ini database database $MARIADB_DB_NAME
|
||||
$STD crudini --set config.ini app host http://$LOCAL_IP:80
|
||||
$STD ./writefreely db init
|
||||
ln -s /opt/writefreely/writefreely /usr/local/bin/writefreely
|
||||
msg_ok "Configured WriteFreely"
|
||||
|
||||
msg_info "Creating Service"
|
||||
cat <<EOF >/etc/systemd/system/writefreely.service
|
||||
[Unit]
|
||||
Description=WriteFreely Service
|
||||
After=syslog.target network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/writefreely
|
||||
ExecStart=/opt/writefreely/writefreely
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
systemctl enable -q --now writefreely
|
||||
msg_ok "Created Service"
|
||||
|
||||
motd_ssh
|
||||
customize
|
||||
cleanup_lxc
|
||||
629
misc/tools.func
629
misc/tools.func
@@ -821,6 +821,54 @@ github_api_call() {
|
||||
return 1
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Codeberg API call with retry logic
|
||||
# ------------------------------------------------------------------------------
|
||||
codeberg_api_call() {
|
||||
local url="$1"
|
||||
local output_file="${2:-/dev/stdout}"
|
||||
local max_retries=3
|
||||
local retry_delay=2
|
||||
|
||||
for attempt in $(seq 1 $max_retries); do
|
||||
local http_code
|
||||
http_code=$(curl -fsSL -w "%{http_code}" -o "$output_file" \
|
||||
-H "Accept: application/json" \
|
||||
"$url" 2>/dev/null || echo "000")
|
||||
|
||||
case "$http_code" in
|
||||
200)
|
||||
return 0
|
||||
;;
|
||||
403)
|
||||
# Rate limit - retry
|
||||
if [[ $attempt -lt $max_retries ]]; then
|
||||
msg_warn "Codeberg API rate limit, waiting ${retry_delay}s... (attempt $attempt/$max_retries)"
|
||||
sleep "$retry_delay"
|
||||
retry_delay=$((retry_delay * 2))
|
||||
continue
|
||||
fi
|
||||
msg_error "Codeberg API rate limit exceeded."
|
||||
return 1
|
||||
;;
|
||||
404)
|
||||
msg_error "Codeberg API endpoint not found: $url"
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
if [[ $attempt -lt $max_retries ]]; then
|
||||
sleep "$retry_delay"
|
||||
continue
|
||||
fi
|
||||
msg_error "Codeberg API call failed with HTTP $http_code"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
should_upgrade() {
|
||||
local current="$1"
|
||||
local target="$2"
|
||||
@@ -1385,6 +1433,37 @@ get_latest_github_release() {
|
||||
echo "$version"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Get latest Codeberg release version
|
||||
# ------------------------------------------------------------------------------
|
||||
get_latest_codeberg_release() {
|
||||
local repo="$1"
|
||||
local strip_v="${2:-true}"
|
||||
local temp_file=$(mktemp)
|
||||
|
||||
# Codeberg API: get all releases and pick the first non-draft/non-prerelease
|
||||
if ! codeberg_api_call "https://codeberg.org/api/v1/repos/${repo}/releases" "$temp_file"; then
|
||||
rm -f "$temp_file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local version
|
||||
# Codeberg uses same JSON structure but releases endpoint returns array
|
||||
version=$(jq -r '[.[] | select(.draft==false and .prerelease==false)][0].tag_name // empty' "$temp_file")
|
||||
|
||||
if [[ "$strip_v" == "true" ]]; then
|
||||
version="${version#v}"
|
||||
fi
|
||||
|
||||
rm -f "$temp_file"
|
||||
|
||||
if [[ -z "$version" ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "$version"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Debug logging (only if DEBUG=1)
|
||||
# ------------------------------------------------------------------------------
|
||||
@@ -1452,7 +1531,8 @@ check_for_gh_release() {
|
||||
local app="$1"
|
||||
local source="$2"
|
||||
local pinned_version_in="${3:-}" # optional
|
||||
local app_lc="${app,,}"
|
||||
local app_lc=""
|
||||
app_lc="$(echo "${app,,}" | tr -d ' ')"
|
||||
local current_file="$HOME/.${app_lc}"
|
||||
|
||||
msg_info "Checking for update: ${app}"
|
||||
@@ -1559,6 +1639,119 @@ check_for_gh_release() {
|
||||
return 1
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Checks for new Codeberg release (latest tag).
|
||||
#
|
||||
# Description:
|
||||
# - Queries the Codeberg API for the latest release tag
|
||||
# - Compares it to a local cached version (~/.<app>)
|
||||
# - If newer, sets global CHECK_UPDATE_RELEASE and returns 0
|
||||
#
|
||||
# Usage:
|
||||
# if check_for_codeberg_release "autocaliweb" "gelbphoenix/autocaliweb" [optional] "v0.11.3"; then
|
||||
# # trigger update...
|
||||
# fi
|
||||
# exit 0
|
||||
# } (end of update_script not from the function)
|
||||
#
|
||||
# Notes:
|
||||
# - Requires `jq` (auto-installed if missing)
|
||||
# - Does not modify anything, only checks version state
|
||||
# - Does not support pre-releases
|
||||
# ------------------------------------------------------------------------------
|
||||
check_for_codeberg_release() {
|
||||
local app="$1"
|
||||
local source="$2"
|
||||
local pinned_version_in="${3:-}" # optional
|
||||
local app_lc="${app,,}"
|
||||
local current_file="$HOME/.${app_lc}"
|
||||
|
||||
msg_info "Checking for update: ${app}"
|
||||
|
||||
# DNS check
|
||||
if ! getent hosts codeberg.org >/dev/null 2>&1; then
|
||||
msg_error "Network error: cannot resolve codeberg.org"
|
||||
return 1
|
||||
fi
|
||||
|
||||
ensure_dependencies jq
|
||||
|
||||
# Fetch releases from Codeberg API
|
||||
local releases_json=""
|
||||
releases_json=$(curl -fsSL --max-time 20 \
|
||||
-H 'Accept: application/json' \
|
||||
"https://codeberg.org/api/v1/repos/${source}/releases" 2>/dev/null) || {
|
||||
msg_error "Unable to fetch releases for ${app}"
|
||||
return 1
|
||||
}
|
||||
|
||||
mapfile -t raw_tags < <(jq -r '.[] | select(.draft==false and .prerelease==false) | .tag_name' <<<"$releases_json")
|
||||
if ((${#raw_tags[@]} == 0)); then
|
||||
msg_error "No stable releases found for ${app}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local clean_tags=()
|
||||
for t in "${raw_tags[@]}"; do
|
||||
clean_tags+=("${t#v}")
|
||||
done
|
||||
|
||||
local latest_raw="${raw_tags[0]}"
|
||||
local latest_clean="${clean_tags[0]}"
|
||||
|
||||
# current installed (stored without v)
|
||||
local current=""
|
||||
if [[ -f "$current_file" ]]; then
|
||||
current="$(<"$current_file")"
|
||||
else
|
||||
# Migration: search for any /opt/*_version.txt
|
||||
local legacy_files
|
||||
mapfile -t legacy_files < <(find /opt -maxdepth 1 -type f -name "*_version.txt" 2>/dev/null)
|
||||
if ((${#legacy_files[@]} == 1)); then
|
||||
current="$(<"${legacy_files[0]}")"
|
||||
echo "${current#v}" >"$current_file"
|
||||
rm -f "${legacy_files[0]}"
|
||||
fi
|
||||
fi
|
||||
current="${current#v}"
|
||||
|
||||
# Pinned version handling
|
||||
if [[ -n "$pinned_version_in" ]]; then
|
||||
local pin_clean="${pinned_version_in#v}"
|
||||
local match_raw=""
|
||||
for i in "${!clean_tags[@]}"; do
|
||||
if [[ "${clean_tags[$i]}" == "$pin_clean" ]]; then
|
||||
match_raw="${raw_tags[$i]}"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -z "$match_raw" ]]; then
|
||||
msg_error "Pinned version ${pinned_version_in} not found upstream"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ "$current" != "$pin_clean" ]]; then
|
||||
CHECK_UPDATE_RELEASE="$match_raw"
|
||||
msg_ok "Update available: ${app} ${current:-not installed} → ${pin_clean}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
msg_ok "No update available: ${app} is already on pinned version (${current})"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# No pinning → use latest
|
||||
if [[ -z "$current" || "$current" != "$latest_clean" ]]; then
|
||||
CHECK_UPDATE_RELEASE="$latest_raw"
|
||||
msg_ok "Update available: ${app} ${current:-not installed} → ${latest_clean}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
msg_ok "No update available: ${app} (${latest_clean})"
|
||||
return 1
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Creates and installs self-signed certificates.
|
||||
#
|
||||
@@ -1648,6 +1841,440 @@ function ensure_usr_local_bin_persist() {
|
||||
fi
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Downloads and deploys latest Codeberg release (source, binary, tarball, asset).
|
||||
#
|
||||
# Description:
|
||||
# - Fetches latest release metadata from Codeberg API
|
||||
# - Supports the following modes:
|
||||
# - tarball: Source code tarball (default if omitted)
|
||||
# - source: Alias for tarball (same behavior)
|
||||
# - binary: .deb package install (arch-dependent)
|
||||
# - prebuild: Prebuilt .tar.gz archive (e.g. Go binaries)
|
||||
# - singlefile: Standalone binary (no archive, direct chmod +x install)
|
||||
# - tag: Direct tag download (bypasses Release API)
|
||||
# - Handles download, extraction/installation and version tracking in ~/.<app>
|
||||
#
|
||||
# Parameters:
|
||||
# $1 APP - Application name (used for install path and version file)
|
||||
# $2 REPO - Codeberg repository in form user/repo
|
||||
# $3 MODE - Release type:
|
||||
# tarball → source tarball (.tar.gz)
|
||||
# binary → .deb file (auto-arch matched)
|
||||
# prebuild → prebuilt archive (e.g. tar.gz)
|
||||
# singlefile→ standalone binary (chmod +x)
|
||||
# tag → direct tag (bypasses Release API)
|
||||
# $4 VERSION - Optional release tag (default: latest)
|
||||
# $5 TARGET_DIR - Optional install path (default: /opt/<app>)
|
||||
# $6 ASSET_FILENAME - Required for:
|
||||
# - prebuild → archive filename or pattern
|
||||
# - singlefile→ binary filename or pattern
|
||||
#
|
||||
# Examples:
|
||||
# # 1. Minimal: Fetch and deploy source tarball
|
||||
# fetch_and_deploy_codeberg_release "autocaliweb" "gelbphoenix/autocaliweb"
|
||||
#
|
||||
# # 2. Binary install via .deb asset (architecture auto-detected)
|
||||
# fetch_and_deploy_codeberg_release "myapp" "myuser/myapp" "binary"
|
||||
#
|
||||
# # 3. Prebuilt archive (.tar.gz) with asset filename match
|
||||
# fetch_and_deploy_codeberg_release "myapp" "myuser/myapp" "prebuild" "latest" "/opt/myapp" "myapp_Linux_x86_64.tar.gz"
|
||||
#
|
||||
# # 4. Single binary (chmod +x)
|
||||
# fetch_and_deploy_codeberg_release "myapp" "myuser/myapp" "singlefile" "v1.0.0" "/opt/myapp" "myapp-linux-amd64"
|
||||
#
|
||||
# # 5. Explicit tag version
|
||||
# fetch_and_deploy_codeberg_release "autocaliweb" "gelbphoenix/autocaliweb" "tag" "v0.11.3" "/opt/autocaliweb"
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
function fetch_and_deploy_codeberg_release() {
|
||||
local app="$1"
|
||||
local repo="$2"
|
||||
local mode="${3:-tarball}" # tarball | binary | prebuild | singlefile | tag
|
||||
local version="${4:-latest}"
|
||||
local target="${5:-/opt/$app}"
|
||||
local asset_pattern="${6:-}"
|
||||
|
||||
local app_lc=$(echo "${app,,}" | tr -d ' ')
|
||||
local version_file="$HOME/.${app_lc}"
|
||||
|
||||
local api_timeout="--connect-timeout 10 --max-time 60"
|
||||
local download_timeout="--connect-timeout 15 --max-time 900"
|
||||
|
||||
local current_version=""
|
||||
[[ -f "$version_file" ]] && current_version=$(<"$version_file")
|
||||
|
||||
ensure_dependencies jq
|
||||
|
||||
### Tag Mode (bypass Release API) ###
|
||||
if [[ "$mode" == "tag" ]]; then
|
||||
if [[ "$version" == "latest" ]]; then
|
||||
msg_error "Mode 'tag' requires explicit version (not 'latest')"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local tag_name="$version"
|
||||
[[ "$tag_name" =~ ^v ]] && version="${tag_name:1}" || version="$tag_name"
|
||||
|
||||
if [[ "$current_version" == "$version" ]]; then
|
||||
$STD msg_ok "$app is already up-to-date (v$version)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# DNS check
|
||||
if ! getent hosts "codeberg.org" &>/dev/null; then
|
||||
msg_error "DNS resolution failed for codeberg.org – check /etc/resolv.conf or networking"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local tmpdir
|
||||
tmpdir=$(mktemp -d) || return 1
|
||||
|
||||
msg_info "Fetching Codeberg tag: $app ($tag_name)"
|
||||
|
||||
local safe_version="${version//@/_}"
|
||||
safe_version="${safe_version//\//_}"
|
||||
local filename="${app_lc}-${safe_version}.tar.gz"
|
||||
local download_success=false
|
||||
|
||||
# Codeberg archive URL format: https://codeberg.org/{owner}/{repo}/archive/{tag}.tar.gz
|
||||
local archive_url="https://codeberg.org/$repo/archive/${tag_name}.tar.gz"
|
||||
if curl $download_timeout -fsSL -o "$tmpdir/$filename" "$archive_url"; then
|
||||
download_success=true
|
||||
fi
|
||||
|
||||
if [[ "$download_success" != "true" ]]; then
|
||||
msg_error "Download failed for $app ($tag_name)"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
mkdir -p "$target"
|
||||
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
|
||||
rm -rf "${target:?}/"*
|
||||
fi
|
||||
|
||||
tar --no-same-owner -xzf "$tmpdir/$filename" -C "$tmpdir" || {
|
||||
msg_error "Failed to extract tarball"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
local unpack_dir
|
||||
unpack_dir=$(find "$tmpdir" -mindepth 1 -maxdepth 1 -type d | head -n1)
|
||||
|
||||
shopt -s dotglob nullglob
|
||||
cp -r "$unpack_dir"/* "$target/"
|
||||
shopt -u dotglob nullglob
|
||||
|
||||
echo "$version" >"$version_file"
|
||||
msg_ok "Deployed: $app ($version)"
|
||||
rm -rf "$tmpdir"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Codeberg API: https://codeberg.org/api/v1/repos/{owner}/{repo}/releases
|
||||
local api_url="https://codeberg.org/api/v1/repos/$repo/releases"
|
||||
if [[ "$version" != "latest" ]]; then
|
||||
# Get release by tag: /repos/{owner}/{repo}/releases/tags/{tag}
|
||||
api_url="https://codeberg.org/api/v1/repos/$repo/releases/tags/$version"
|
||||
fi
|
||||
|
||||
# dns pre check
|
||||
if ! getent hosts "codeberg.org" &>/dev/null; then
|
||||
msg_error "DNS resolution failed for codeberg.org – check /etc/resolv.conf or networking"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local max_retries=3 retry_delay=2 attempt=1 success=false resp http_code
|
||||
|
||||
while ((attempt <= max_retries)); do
|
||||
resp=$(curl $api_timeout -fsSL -w "%{http_code}" -o /tmp/codeberg_rel.json "$api_url") && success=true && break
|
||||
sleep "$retry_delay"
|
||||
((attempt++))
|
||||
done
|
||||
|
||||
if ! $success; then
|
||||
msg_error "Failed to fetch release metadata from $api_url after $max_retries attempts"
|
||||
return 1
|
||||
fi
|
||||
|
||||
http_code="${resp:(-3)}"
|
||||
[[ "$http_code" != "200" ]] && {
|
||||
msg_error "Codeberg API returned HTTP $http_code"
|
||||
return 1
|
||||
}
|
||||
|
||||
local json tag_name
|
||||
json=$(</tmp/codeberg_rel.json)
|
||||
|
||||
# For "latest", the API returns an array - take the first (most recent) release
|
||||
if [[ "$version" == "latest" ]]; then
|
||||
json=$(echo "$json" | jq '.[0]')
|
||||
fi
|
||||
|
||||
tag_name=$(echo "$json" | jq -r '.tag_name // .name // empty')
|
||||
[[ "$tag_name" =~ ^v ]] && version="${tag_name:1}" || version="$tag_name"
|
||||
|
||||
if [[ "$current_version" == "$version" ]]; then
|
||||
$STD msg_ok "$app is already up-to-date (v$version)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local tmpdir
|
||||
tmpdir=$(mktemp -d) || return 1
|
||||
local filename="" url=""
|
||||
|
||||
msg_info "Fetching Codeberg release: $app ($version)"
|
||||
|
||||
### Tarball Mode ###
|
||||
if [[ "$mode" == "tarball" || "$mode" == "source" ]]; then
|
||||
local safe_version="${version//@/_}"
|
||||
safe_version="${safe_version//\//_}"
|
||||
filename="${app_lc}-${safe_version}.tar.gz"
|
||||
local download_success=false
|
||||
|
||||
# Codeberg archive URL format
|
||||
local archive_url="https://codeberg.org/$repo/archive/${tag_name}.tar.gz"
|
||||
if curl $download_timeout -fsSL -o "$tmpdir/$filename" "$archive_url"; then
|
||||
download_success=true
|
||||
fi
|
||||
|
||||
if [[ "$download_success" != "true" ]]; then
|
||||
msg_error "Download failed for $app ($tag_name)"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
mkdir -p "$target"
|
||||
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
|
||||
rm -rf "${target:?}/"*
|
||||
fi
|
||||
|
||||
tar --no-same-owner -xzf "$tmpdir/$filename" -C "$tmpdir" || {
|
||||
msg_error "Failed to extract tarball"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
local unpack_dir
|
||||
unpack_dir=$(find "$tmpdir" -mindepth 1 -maxdepth 1 -type d | head -n1)
|
||||
|
||||
shopt -s dotglob nullglob
|
||||
cp -r "$unpack_dir"/* "$target/"
|
||||
shopt -u dotglob nullglob
|
||||
|
||||
### Binary Mode ###
|
||||
elif [[ "$mode" == "binary" ]]; then
|
||||
local arch
|
||||
arch=$(dpkg --print-architecture 2>/dev/null || uname -m)
|
||||
[[ "$arch" == "x86_64" ]] && arch="amd64"
|
||||
[[ "$arch" == "aarch64" ]] && arch="arm64"
|
||||
|
||||
local assets url_match=""
|
||||
# Codeberg assets are in .assets[].browser_download_url
|
||||
assets=$(echo "$json" | jq -r '.assets[].browser_download_url')
|
||||
|
||||
# If explicit filename pattern is provided, match that first
|
||||
if [[ -n "$asset_pattern" ]]; then
|
||||
for u in $assets; do
|
||||
case "${u##*/}" in
|
||||
$asset_pattern)
|
||||
url_match="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
|
||||
# Fall back to architecture heuristic
|
||||
if [[ -z "$url_match" ]]; then
|
||||
for u in $assets; do
|
||||
if [[ "$u" =~ ($arch|amd64|x86_64|aarch64|arm64).*\.deb$ ]]; then
|
||||
url_match="$u"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Fallback: any .deb file
|
||||
if [[ -z "$url_match" ]]; then
|
||||
for u in $assets; do
|
||||
[[ "$u" =~ \.deb$ ]] && url_match="$u" && break
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -z "$url_match" ]]; then
|
||||
msg_error "No suitable .deb asset found for $app"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
filename="${url_match##*/}"
|
||||
curl $download_timeout -fsSL -o "$tmpdir/$filename" "$url_match" || {
|
||||
msg_error "Download failed: $url_match"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
chmod 644 "$tmpdir/$filename"
|
||||
$STD apt install -y "$tmpdir/$filename" || {
|
||||
$STD dpkg -i "$tmpdir/$filename" || {
|
||||
msg_error "Both apt and dpkg installation failed"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
}
|
||||
|
||||
### Prebuild Mode ###
|
||||
elif [[ "$mode" == "prebuild" ]]; then
|
||||
local pattern="${6%\"}"
|
||||
pattern="${pattern#\"}"
|
||||
[[ -z "$pattern" ]] && {
|
||||
msg_error "Mode 'prebuild' requires 6th parameter (asset filename pattern)"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
local asset_url=""
|
||||
for u in $(echo "$json" | jq -r '.assets[].browser_download_url'); do
|
||||
filename_candidate="${u##*/}"
|
||||
case "$filename_candidate" in
|
||||
$pattern)
|
||||
asset_url="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
[[ -z "$asset_url" ]] && {
|
||||
msg_error "No asset matching '$pattern' found"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
filename="${asset_url##*/}"
|
||||
curl $download_timeout -fsSL -o "$tmpdir/$filename" "$asset_url" || {
|
||||
msg_error "Download failed: $asset_url"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
local unpack_tmp
|
||||
unpack_tmp=$(mktemp -d)
|
||||
mkdir -p "$target"
|
||||
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
|
||||
rm -rf "${target:?}/"*
|
||||
fi
|
||||
|
||||
if [[ "$filename" == *.zip ]]; then
|
||||
ensure_dependencies unzip
|
||||
unzip -q "$tmpdir/$filename" -d "$unpack_tmp" || {
|
||||
msg_error "Failed to extract ZIP archive"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
elif [[ "$filename" == *.tar.* || "$filename" == *.tgz ]]; then
|
||||
tar --no-same-owner -xf "$tmpdir/$filename" -C "$unpack_tmp" || {
|
||||
msg_error "Failed to extract TAR archive"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
else
|
||||
msg_error "Unsupported archive format: $filename"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local top_dirs
|
||||
top_dirs=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1 -type d | wc -l)
|
||||
local top_entries inner_dir
|
||||
top_entries=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1)
|
||||
if [[ "$(echo "$top_entries" | wc -l)" -eq 1 && -d "$top_entries" ]]; then
|
||||
inner_dir="$top_entries"
|
||||
shopt -s dotglob nullglob
|
||||
if compgen -G "$inner_dir/*" >/dev/null; then
|
||||
cp -r "$inner_dir"/* "$target/" || {
|
||||
msg_error "Failed to copy contents from $inner_dir to $target"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
else
|
||||
msg_error "Inner directory is empty: $inner_dir"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
fi
|
||||
shopt -u dotglob nullglob
|
||||
else
|
||||
shopt -s dotglob nullglob
|
||||
if compgen -G "$unpack_tmp/*" >/dev/null; then
|
||||
cp -r "$unpack_tmp"/* "$target/" || {
|
||||
msg_error "Failed to copy contents to $target"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
}
|
||||
else
|
||||
msg_error "Unpacked archive is empty"
|
||||
rm -rf "$tmpdir" "$unpack_tmp"
|
||||
return 1
|
||||
fi
|
||||
shopt -u dotglob nullglob
|
||||
fi
|
||||
|
||||
### Singlefile Mode ###
|
||||
elif [[ "$mode" == "singlefile" ]]; then
|
||||
local pattern="${6%\"}"
|
||||
pattern="${pattern#\"}"
|
||||
[[ -z "$pattern" ]] && {
|
||||
msg_error "Mode 'singlefile' requires 6th parameter (asset filename pattern)"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
local asset_url=""
|
||||
for u in $(echo "$json" | jq -r '.assets[].browser_download_url'); do
|
||||
filename_candidate="${u##*/}"
|
||||
case "$filename_candidate" in
|
||||
$pattern)
|
||||
asset_url="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
[[ -z "$asset_url" ]] && {
|
||||
msg_error "No asset matching '$pattern' found"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
filename="${asset_url##*/}"
|
||||
mkdir -p "$target"
|
||||
|
||||
local use_filename="${USE_ORIGINAL_FILENAME:-false}"
|
||||
local target_file="$app"
|
||||
[[ "$use_filename" == "true" ]] && target_file="$filename"
|
||||
|
||||
curl $download_timeout -fsSL -o "$target/$target_file" "$asset_url" || {
|
||||
msg_error "Download failed: $asset_url"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
}
|
||||
|
||||
if [[ "$target_file" != *.jar && -f "$target/$target_file" ]]; then
|
||||
chmod +x "$target/$target_file"
|
||||
fi
|
||||
|
||||
else
|
||||
msg_error "Unknown mode: $mode"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "$version" >"$version_file"
|
||||
msg_ok "Deployed: $app ($version)"
|
||||
rm -rf "$tmpdir"
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Downloads and deploys latest GitHub release (source, binary, tarball, asset).
|
||||
#
|
||||
|
||||
Reference in New Issue
Block a user