mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2026-02-04 12:23:26 +01:00
Compare commits
103 Commits
tools_func
...
fix/php-mo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eb8691493b | ||
|
|
715159895b | ||
|
|
eb5de9173a | ||
|
|
8dc80b2b7e | ||
|
|
e2abb46c86 | ||
|
|
ab5e8cbd7c | ||
|
|
69e563afb5 | ||
|
|
22acb9d728 | ||
|
|
6fc6bab1bf | ||
|
|
c1ad36718e | ||
|
|
e3b796b842 | ||
|
|
d2f02c2ba0 | ||
|
|
94b9190e07 | ||
|
|
09b343d150 | ||
|
|
c464b95fa3 | ||
|
|
24ddcb9d97 | ||
|
|
a5aa55ffad | ||
|
|
2412a45a20 | ||
|
|
1b87ec7bfd | ||
|
|
84966410ed | ||
|
|
cc70f84d27 | ||
|
|
1c4c95723b | ||
|
|
b67a82123e | ||
|
|
33a531960e | ||
|
|
8ab722fb7c | ||
|
|
3294c58713 | ||
|
|
845aebd654 | ||
|
|
dd2def9384 | ||
|
|
82740302bc | ||
|
|
610509e834 | ||
|
|
aed241fdc4 | ||
|
|
e0fb69b6c3 | ||
|
|
c3c8384f69 | ||
|
|
99d13903f3 | ||
|
|
25ecb12060 | ||
|
|
5a5dd8ae87 | ||
|
|
42a3a68ce2 | ||
|
|
44f5656a56 | ||
|
|
27bcc09b0c | ||
|
|
47d63e92bf | ||
|
|
03216e05ac | ||
|
|
5ba96bb3f2 | ||
|
|
8e7dc349ac | ||
|
|
e1f02bfa77 | ||
|
|
b013dcb1e3 | ||
|
|
89645dcd92 | ||
|
|
718219fec3 | ||
|
|
97138713b4 | ||
|
|
620bf7ee66 | ||
|
|
f52c90b26b | ||
|
|
af53a669c7 | ||
|
|
cca0d9e584 | ||
|
|
56e97764ac | ||
|
|
bac7f07a74 | ||
|
|
ce4d2350ef | ||
|
|
90f7020826 | ||
|
|
393bc4a7e4 | ||
|
|
89cbd21e17 | ||
|
|
eb6bd65415 | ||
|
|
5623969bb9 | ||
|
|
a6e7939fce | ||
|
|
fd3e6196cc | ||
|
|
910723c745 | ||
|
|
6267250e49 | ||
|
|
b35437c391 | ||
|
|
032dfacce2 | ||
|
|
0ace1009ad | ||
|
|
2938bb29f5 | ||
|
|
ba9618eabd | ||
|
|
c7669c39c3 | ||
|
|
2434e0ab3b | ||
|
|
f76080e9db | ||
|
|
c83076dbc4 | ||
|
|
9b3786fc26 | ||
|
|
49c1eef653 | ||
|
|
f939170d47 | ||
|
|
21d09cfb17 | ||
|
|
07ad467f34 | ||
|
|
ff4f5f6a0a | ||
|
|
ea116222f4 | ||
|
|
fa00a51110 | ||
|
|
a259ae2b3e | ||
|
|
03e660fdef | ||
|
|
3a5e2f9515 | ||
|
|
83b1a5e39b | ||
|
|
01da983f72 | ||
|
|
3a04923479 | ||
|
|
ebb48f697c | ||
|
|
533ca924c9 | ||
|
|
3042162065 | ||
|
|
ddd0164c54 | ||
|
|
279e33c3a5 | ||
|
|
6c1d1e1e71 | ||
|
|
0453673115 | ||
|
|
497cefa850 | ||
|
|
ba279675a8 | ||
|
|
830c6923b5 | ||
|
|
cc59d69cb7 | ||
|
|
74b06f82e4 | ||
|
|
436dc8568b | ||
|
|
0e7e08579b | ||
|
|
22bbba572c | ||
|
|
d789af9637 |
@@ -28,16 +28,12 @@ jobs:
|
|||||||
const matched = patterns.some((regex) => regex.test(content));
|
const matched = patterns.some((regex) => regex.test(content));
|
||||||
|
|
||||||
if (matched) {
|
if (matched) {
|
||||||
const message = `👋 Hello!
|
const message = "👋 Hello!\n\n" +
|
||||||
|
"It looks like you are referencing a **container creation issue with a Debian 13 template** (e.g. `debian-13-standard_13.x-x_amd64.tar.zst`).\n\n" +
|
||||||
It looks like you are referencing a **container creation issue with a Debian 13 template** (e.g. \`debian-13-standard_13.x-x_amd64.tar.zst\`).
|
"We receive many similar reports about this, and it's not related to the scripts themselves but to **a Proxmox base template bug**.\n\n" +
|
||||||
|
"Please refer to [discussion #8126](https://github.com/community-scripts/ProxmoxVE/discussions/8126) for details.\n" +
|
||||||
We receive many similar reports about this, and it’s not related to the scripts themselves but to **a Proxmox base template bug**.
|
"If your issue persists after following the guidance there, feel free to reopen this issue.\n\n" +
|
||||||
|
"_This issue was automatically closed by a bot._";
|
||||||
Please refer to [discussion #8126](https://github.com/community-scripts/ProxmoxVE/discussions/8126) for details.
|
|
||||||
If your issue persists after following the guidance there, feel free to reopen this issue.
|
|
||||||
|
|
||||||
_This issue was automatically closed by a bot._`;
|
|
||||||
|
|
||||||
await github.rest.issues.createComment({
|
await github.rest.issues.createComment({
|
||||||
...context.repo,
|
...context.repo,
|
||||||
0
.github/workflows/crawl-versions.yaml → .github/workflows/bak/crawl-versions.yaml
generated
vendored
0
.github/workflows/crawl-versions.yaml → .github/workflows/bak/crawl-versions.yaml
generated
vendored
0
.github/workflows/script-test.yml → .github/workflows/bak/script-test.yml
generated
vendored
0
.github/workflows/script-test.yml → .github/workflows/bak/script-test.yml
generated
vendored
0
.github/workflows/script_format.yml → .github/workflows/bak/script_format.yml
generated
vendored
0
.github/workflows/script_format.yml → .github/workflows/bak/script_format.yml
generated
vendored
4
.github/workflows/close-discussion.yml
generated
vendored
4
.github/workflows/close-discussion.yml
generated
vendored
@@ -103,7 +103,7 @@ jobs:
|
|||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
//
|
let discussionQLId;
|
||||||
try {
|
try {
|
||||||
const discussionResponse = await graphqlWithAuth(discussionQuery, {
|
const discussionResponse = await graphqlWithAuth(discussionQuery, {
|
||||||
owner,
|
owner,
|
||||||
@@ -111,7 +111,7 @@ jobs:
|
|||||||
number: parseInt(discussionNumber, 10),
|
number: parseInt(discussionNumber, 10),
|
||||||
});
|
});
|
||||||
|
|
||||||
const discussionQLId = discussionResponse.repository.discussion.id;
|
discussionQLId = discussionResponse.repository.discussion.id;
|
||||||
if (!discussionQLId) {
|
if (!discussionQLId) {
|
||||||
console.log("Failed to fetch discussion GraphQL ID.");
|
console.log("Failed to fetch discussion GraphQL ID.");
|
||||||
return;
|
return;
|
||||||
|
|||||||
0
.github/workflows/close-ttek-issues.yaml → .github/workflows/close-tteck-issues.yaml
generated
vendored
0
.github/workflows/close-ttek-issues.yaml → .github/workflows/close-tteck-issues.yaml
generated
vendored
76
.github/workflows/lock-issue.yaml
generated
vendored
Normal file
76
.github/workflows/lock-issue.yaml
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
name: Lock closed issues
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 0 * * *" # Run daily at midnight
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lock:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Lock old issues and PRs
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const daysBeforeLock = 3;
|
||||||
|
const cutoffDate = new Date('2026-01-27T00:00:00Z');
|
||||||
|
const lockDate = new Date();
|
||||||
|
lockDate.setDate(lockDate.getDate() - daysBeforeLock);
|
||||||
|
|
||||||
|
// Exclude patterns (case-insensitive)
|
||||||
|
const excludePatterns = [
|
||||||
|
/automated pr/i,
|
||||||
|
/\[bot\]/i,
|
||||||
|
/dependabot/i
|
||||||
|
];
|
||||||
|
|
||||||
|
// Search for closed, unlocked issues older than 3 days
|
||||||
|
const issues = await github.rest.search.issuesAndPullRequests({
|
||||||
|
q: `repo:${context.repo.owner}/${context.repo.repo} is:closed is:unlocked updated:<${lockDate.toISOString().split('T')[0]}`,
|
||||||
|
per_page: 50
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Found ${issues.data.items.length} issues/PRs to process`);
|
||||||
|
|
||||||
|
for (const item of issues.data.items) {
|
||||||
|
// Skip excluded items
|
||||||
|
const shouldExclude = excludePatterns.some(pattern => pattern.test(item.title));
|
||||||
|
if (shouldExclude) {
|
||||||
|
console.log(`Skipped #${item.number}: "${item.title}" (matches exclude pattern)`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const createdAt = new Date(item.created_at);
|
||||||
|
const isNew = createdAt >= cutoffDate;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Add comment only for new issues (created after 2026-01-27)
|
||||||
|
if (isNew) {
|
||||||
|
const comment = item.pull_request
|
||||||
|
? 'This pull request has been automatically locked. Please open a new issue for related bugs.'
|
||||||
|
: 'This issue has been automatically locked. Please open a new issue for related bugs and reference this issue if needed.';
|
||||||
|
|
||||||
|
await github.rest.issues.createComment({
|
||||||
|
...context.repo,
|
||||||
|
issue_number: item.number,
|
||||||
|
body: comment
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lock the issue/PR
|
||||||
|
await github.rest.issues.lock({
|
||||||
|
...context.repo,
|
||||||
|
issue_number: item.number,
|
||||||
|
lock_reason: 'resolved'
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Locked #${item.number} (${item.pull_request ? 'PR' : 'Issue'})`);
|
||||||
|
} catch (error) {
|
||||||
|
console.log(`Failed to lock #${item.number}: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
230
.github/workflows/update-versions-github.yml
generated
vendored
Normal file
230
.github/workflows/update-versions-github.yml
generated
vendored
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
name: Update GitHub Versions (New)
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
# Runs 4x daily: 00:00, 06:00, 12:00, 18:00 UTC
|
||||||
|
- cron: "0 0,6,12,18 * * *"
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
env:
|
||||||
|
VERSIONS_FILE: frontend/public/json/github-versions.json
|
||||||
|
BRANCH_NAME: automated/update-github-versions
|
||||||
|
AUTOMATED_PR_LABEL: "automated pr"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
update-github-versions:
|
||||||
|
if: github.repository == 'community-scripts/ProxmoxVE'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Generate a token
|
||||||
|
id: generate-token
|
||||||
|
uses: actions/create-github-app-token@v1
|
||||||
|
with:
|
||||||
|
app-id: ${{ vars.APP_ID }}
|
||||||
|
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||||
|
|
||||||
|
- name: Generate a token for PR approval and merge
|
||||||
|
id: generate-token-merge
|
||||||
|
uses: actions/create-github-app-token@v1
|
||||||
|
with:
|
||||||
|
app-id: ${{ secrets.APP_ID_APPROVE_AND_MERGE }}
|
||||||
|
private-key: ${{ secrets.APP_KEY_APPROVE_AND_MERGE }}
|
||||||
|
|
||||||
|
- name: Checkout Repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
|
||||||
|
- name: Extract GitHub versions from install scripts
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "========================================="
|
||||||
|
echo " Extracting GitHub versions from scripts"
|
||||||
|
echo "========================================="
|
||||||
|
|
||||||
|
# Initialize versions array
|
||||||
|
versions_json="[]"
|
||||||
|
|
||||||
|
# Function to add a version entry
|
||||||
|
add_version() {
|
||||||
|
local slug="$1"
|
||||||
|
local repo="$2"
|
||||||
|
local version="$3"
|
||||||
|
local pinned="$4"
|
||||||
|
local date="$5"
|
||||||
|
|
||||||
|
versions_json=$(echo "$versions_json" | jq \
|
||||||
|
--arg slug "$slug" \
|
||||||
|
--arg repo "$repo" \
|
||||||
|
--arg version "$version" \
|
||||||
|
--argjson pinned "$pinned" \
|
||||||
|
--arg date "$date" \
|
||||||
|
'. += [{"slug": $slug, "repo": $repo, "version": $version, "pinned": $pinned, "date": $date}]')
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get list of slugs from JSON files
|
||||||
|
echo ""
|
||||||
|
echo "=== Scanning JSON files for slugs ==="
|
||||||
|
|
||||||
|
for json_file in frontend/public/json/*.json; do
|
||||||
|
[[ ! -f "$json_file" ]] && continue
|
||||||
|
|
||||||
|
# Skip non-app JSON files
|
||||||
|
basename_file=$(basename "$json_file")
|
||||||
|
case "$basename_file" in
|
||||||
|
metadata.json|versions.json|github-versions.json|dependency-check.json|update-apps.json)
|
||||||
|
continue
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# Extract slug from JSON
|
||||||
|
slug=$(jq -r '.slug // empty' "$json_file" 2>/dev/null)
|
||||||
|
[[ -z "$slug" ]] && continue
|
||||||
|
|
||||||
|
# Find corresponding install script
|
||||||
|
install_script="install/${slug}-install.sh"
|
||||||
|
[[ ! -f "$install_script" ]] && continue
|
||||||
|
|
||||||
|
# Look for fetch_and_deploy_gh_release calls
|
||||||
|
# Pattern: fetch_and_deploy_gh_release "app" "owner/repo" ["mode"] ["version"]
|
||||||
|
while IFS= read -r line; do
|
||||||
|
# Skip commented lines
|
||||||
|
[[ "$line" =~ ^[[:space:]]*# ]] && continue
|
||||||
|
|
||||||
|
# Extract repo and version from fetch_and_deploy_gh_release
|
||||||
|
if [[ "$line" =~ fetch_and_deploy_gh_release[[:space:]]+\"[^\"]*\"[[:space:]]+\"([^\"]+)\"([[:space:]]+\"([^\"]+)\")?([[:space:]]+\"([^\"]+)\")? ]]; then
|
||||||
|
repo="${BASH_REMATCH[1]}"
|
||||||
|
mode="${BASH_REMATCH[3]:-tarball}"
|
||||||
|
pinned_version="${BASH_REMATCH[5]:-latest}"
|
||||||
|
|
||||||
|
# Check if version is pinned (not "latest" and not empty)
|
||||||
|
is_pinned=false
|
||||||
|
target_version=""
|
||||||
|
|
||||||
|
if [[ -n "$pinned_version" && "$pinned_version" != "latest" ]]; then
|
||||||
|
is_pinned=true
|
||||||
|
target_version="$pinned_version"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Fetch version from GitHub
|
||||||
|
if [[ "$is_pinned" == "true" ]]; then
|
||||||
|
# For pinned versions, verify it exists and get date
|
||||||
|
response=$(gh api "repos/${repo}/releases/tags/${target_version}" 2>/dev/null || echo '{}')
|
||||||
|
if echo "$response" | jq -e '.tag_name' > /dev/null 2>&1; then
|
||||||
|
version=$(echo "$response" | jq -r '.tag_name')
|
||||||
|
date=$(echo "$response" | jq -r '.published_at // empty')
|
||||||
|
add_version "$slug" "$repo" "$version" "true" "$date"
|
||||||
|
echo "[$slug] ✓ $version (pinned)"
|
||||||
|
else
|
||||||
|
echo "[$slug] ⚠ pinned version $target_version not found"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Fetch latest release
|
||||||
|
response=$(gh api "repos/${repo}/releases/latest" 2>/dev/null || echo '{}')
|
||||||
|
if echo "$response" | jq -e '.tag_name' > /dev/null 2>&1; then
|
||||||
|
version=$(echo "$response" | jq -r '.tag_name')
|
||||||
|
date=$(echo "$response" | jq -r '.published_at // empty')
|
||||||
|
add_version "$slug" "$repo" "$version" "false" "$date"
|
||||||
|
echo "[$slug] ✓ $version"
|
||||||
|
else
|
||||||
|
# Try tags as fallback
|
||||||
|
version=$(gh api "repos/${repo}/tags" --jq '.[0].name // empty' 2>/dev/null || echo "")
|
||||||
|
if [[ -n "$version" ]]; then
|
||||||
|
add_version "$slug" "$repo" "$version" "false" ""
|
||||||
|
echo "[$slug] ✓ $version (from tags)"
|
||||||
|
else
|
||||||
|
echo "[$slug] ⚠ no version found"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
break # Only first match per script
|
||||||
|
fi
|
||||||
|
done < <(grep 'fetch_and_deploy_gh_release' "$install_script" 2>/dev/null || true)
|
||||||
|
|
||||||
|
done
|
||||||
|
|
||||||
|
# Save versions file
|
||||||
|
echo "$versions_json" | jq --arg date "$(date -u +%Y-%m-%dT%H:%M:%SZ)" \
|
||||||
|
'{generated: $date, versions: (. | sort_by(.slug))}' > "$VERSIONS_FILE"
|
||||||
|
|
||||||
|
total=$(echo "$versions_json" | jq 'length')
|
||||||
|
echo ""
|
||||||
|
echo "========================================="
|
||||||
|
echo " Total versions extracted: $total"
|
||||||
|
echo "========================================="
|
||||||
|
|
||||||
|
- name: Check for changes
|
||||||
|
id: check-changes
|
||||||
|
run: |
|
||||||
|
# Check if file is new (untracked) or has changes
|
||||||
|
if [[ ! -f "$VERSIONS_FILE" ]]; then
|
||||||
|
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "Versions file was not created"
|
||||||
|
elif ! git ls-files --error-unmatch "$VERSIONS_FILE" &>/dev/null; then
|
||||||
|
# File exists but is not tracked - it's new
|
||||||
|
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "New file created: $VERSIONS_FILE"
|
||||||
|
elif git diff --quiet "$VERSIONS_FILE" 2>/dev/null; then
|
||||||
|
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "No changes detected"
|
||||||
|
else
|
||||||
|
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "Changes detected:"
|
||||||
|
git diff --stat "$VERSIONS_FILE" 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Commit and push changes
|
||||||
|
if: steps.check-changes.outputs.changed == 'true'
|
||||||
|
run: |
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
git add "$VERSIONS_FILE"
|
||||||
|
git commit -m "chore: update github-versions.json"
|
||||||
|
git checkout -b $BRANCH_NAME || git checkout $BRANCH_NAME
|
||||||
|
git push origin $BRANCH_NAME --force
|
||||||
|
|
||||||
|
- name: Create pull request if not exists
|
||||||
|
if: steps.check-changes.outputs.changed == 'true'
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||||
|
run: |
|
||||||
|
PR_EXISTS=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||||
|
if [ -z "$PR_EXISTS" ]; then
|
||||||
|
gh pr create --title "[Github Action] Update github-versions.json" \
|
||||||
|
--body "This PR is auto-generated by a Github Action to update the github-versions.json file." \
|
||||||
|
--head $BRANCH_NAME \
|
||||||
|
--base main \
|
||||||
|
--label "$AUTOMATED_PR_LABEL"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Approve pull request
|
||||||
|
if: steps.check-changes.outputs.changed == 'true'
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||||
|
if [ -n "$PR_NUMBER" ]; then
|
||||||
|
gh pr review $PR_NUMBER --approve
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Approve pull request and merge
|
||||||
|
if: steps.check-changes.outputs.changed == 'true'
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ steps.generate-token-merge.outputs.token }}
|
||||||
|
run: |
|
||||||
|
git config --global user.name "github-actions-automege[bot]"
|
||||||
|
git config --global user.email "github-actions-automege[bot]@users.noreply.github.com"
|
||||||
|
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||||
|
if [ -n "$PR_NUMBER" ]; then
|
||||||
|
gh pr review $PR_NUMBER --approve
|
||||||
|
gh pr merge $PR_NUMBER --squash --admin
|
||||||
|
fi
|
||||||
102
CHANGELOG.md
102
CHANGELOG.md
@@ -391,6 +391,97 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
|||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
|
## 2026-01-30
|
||||||
|
|
||||||
|
## 2026-01-29
|
||||||
|
|
||||||
|
### 🆕 New Scripts
|
||||||
|
|
||||||
|
- Alpine-Valkey [@MickLesk](https://github.com/MickLesk) ([#11320](https://github.com/community-scripts/ProxmoxVE/pull/11320))
|
||||||
|
|
||||||
|
### 🚀 Updated Scripts
|
||||||
|
|
||||||
|
- #### 🐞 Bug Fixes
|
||||||
|
|
||||||
|
- Immich: Pin version to 2.5.2 [@vhsdream](https://github.com/vhsdream) ([#11335](https://github.com/community-scripts/ProxmoxVE/pull/11335))
|
||||||
|
- Kollection: Update to php 8.5 [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#11315](https://github.com/community-scripts/ProxmoxVE/pull/11315))
|
||||||
|
- Notifiarr: change installation check from apt to systemd service [@MickLesk](https://github.com/MickLesk) ([#11319](https://github.com/community-scripts/ProxmoxVE/pull/11319))
|
||||||
|
|
||||||
|
- #### ✨ New Features
|
||||||
|
|
||||||
|
- [FEAT] Immich: Enable Maintenance Mode before update [@vhsdream](https://github.com/vhsdream) ([#11342](https://github.com/community-scripts/ProxmoxVE/pull/11342))
|
||||||
|
- jellyfin: add logrotate instead of reducing log level [@MickLesk](https://github.com/MickLesk) ([#11326](https://github.com/community-scripts/ProxmoxVE/pull/11326))
|
||||||
|
- core: Add config file handling options | Fix Vikunja update with interactive overwrite [@MickLesk](https://github.com/MickLesk) ([#11317](https://github.com/community-scripts/ProxmoxVE/pull/11317))
|
||||||
|
- Immich: v2.5.0 [@vhsdream](https://github.com/vhsdream) ([#11240](https://github.com/community-scripts/ProxmoxVE/pull/11240))
|
||||||
|
|
||||||
|
- #### 💥 Breaking Changes
|
||||||
|
|
||||||
|
- fix: vikunja v1 [@CrazyWolf13](https://github.com/CrazyWolf13) ([#11308](https://github.com/community-scripts/ProxmoxVE/pull/11308))
|
||||||
|
|
||||||
|
- #### 🔧 Refactor
|
||||||
|
|
||||||
|
- Refactor: Byparr [@vhsdream](https://github.com/vhsdream) ([#11338](https://github.com/community-scripts/ProxmoxVE/pull/11338))
|
||||||
|
- cloudflare: Remove deprecated DNS-over-HTTPS proxy option [@MickLesk](https://github.com/MickLesk) ([#11068](https://github.com/community-scripts/ProxmoxVE/pull/11068))
|
||||||
|
|
||||||
|
### 💾 Core
|
||||||
|
|
||||||
|
- #### 🐞 Bug Fixes
|
||||||
|
|
||||||
|
- build.func: Replace storage variable with searchdomain variable [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#11322](https://github.com/community-scripts/ProxmoxVE/pull/11322))
|
||||||
|
|
||||||
|
### 📂 Github
|
||||||
|
|
||||||
|
- Add workflow to lock closed issues [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#11316](https://github.com/community-scripts/ProxmoxVE/pull/11316))
|
||||||
|
|
||||||
|
## 2026-01-28
|
||||||
|
|
||||||
|
### 🆕 New Scripts
|
||||||
|
|
||||||
|
- nodecast-tv ([#11287](https://github.com/community-scripts/ProxmoxVE/pull/11287))
|
||||||
|
|
||||||
|
### 🚀 Updated Scripts
|
||||||
|
|
||||||
|
- #### 🐞 Bug Fixes
|
||||||
|
|
||||||
|
- Ubuntu 25.04 VM - Change default start from yes to no [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#11292](https://github.com/community-scripts/ProxmoxVE/pull/11292))
|
||||||
|
|
||||||
|
- #### ✨ New Features
|
||||||
|
|
||||||
|
- various scripts: use setup_meilisearch function [@MickLesk](https://github.com/MickLesk) ([#11259](https://github.com/community-scripts/ProxmoxVE/pull/11259))
|
||||||
|
|
||||||
|
- #### 🔧 Refactor
|
||||||
|
|
||||||
|
- Refactor: NPMPlus / Default Login [@MickLesk](https://github.com/MickLesk) ([#11262](https://github.com/community-scripts/ProxmoxVE/pull/11262))
|
||||||
|
|
||||||
|
### 💾 Core
|
||||||
|
|
||||||
|
- #### 🐞 Bug Fixes
|
||||||
|
|
||||||
|
- core: sed patch for ram [@lavacano](https://github.com/lavacano) ([#11285](https://github.com/community-scripts/ProxmoxVE/pull/11285))
|
||||||
|
- Fix installer loop caused by invalid whiptail menu separator [@Mesteriis](https://github.com/Mesteriis) ([#11237](https://github.com/community-scripts/ProxmoxVE/pull/11237))
|
||||||
|
- core: fix Debian 13 LXC template root ownership bug [@MickLesk](https://github.com/MickLesk) ([#11277](https://github.com/community-scripts/ProxmoxVE/pull/11277))
|
||||||
|
- tools.func: prevent systemd-tmpfiles failure in unprivileged LXC during deb install [@MickLesk](https://github.com/MickLesk) ([#11271](https://github.com/community-scripts/ProxmoxVE/pull/11271))
|
||||||
|
- tools.func: fix php "wait_for" hint [@MickLesk](https://github.com/MickLesk) ([#11254](https://github.com/community-scripts/ProxmoxVE/pull/11254))
|
||||||
|
|
||||||
|
- #### ✨ New Features
|
||||||
|
|
||||||
|
- core: update dynamic values in LXC profile on update_motd_ip [@MickLesk](https://github.com/MickLesk) ([#11268](https://github.com/community-scripts/ProxmoxVE/pull/11268))
|
||||||
|
- tools.func: add new function - setup_meilisearch [@MickLesk](https://github.com/MickLesk) ([#11258](https://github.com/community-scripts/ProxmoxVE/pull/11258))
|
||||||
|
|
||||||
|
### 📂 Github
|
||||||
|
|
||||||
|
- github: add GitHub-based versions.json updater [@MickLesk](https://github.com/MickLesk) ([#10021](https://github.com/community-scripts/ProxmoxVE/pull/10021))
|
||||||
|
|
||||||
|
### 🌐 Website
|
||||||
|
|
||||||
|
- #### ✨ New Features
|
||||||
|
|
||||||
|
- Frontend: use github-versions.json for version display [@MickLesk](https://github.com/MickLesk) ([#11281](https://github.com/community-scripts/ProxmoxVE/pull/11281))
|
||||||
|
|
||||||
|
- #### 📝 Script Information
|
||||||
|
|
||||||
|
- fix: homarr: conf location [@CrazyWolf13](https://github.com/CrazyWolf13) ([#11253](https://github.com/community-scripts/ProxmoxVE/pull/11253))
|
||||||
|
|
||||||
## 2026-01-27
|
## 2026-01-27
|
||||||
|
|
||||||
### 🚀 Updated Scripts
|
### 🚀 Updated Scripts
|
||||||
@@ -398,12 +489,21 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
|||||||
- #### 🐞 Bug Fixes
|
- #### 🐞 Bug Fixes
|
||||||
|
|
||||||
- Immich: update libraw [@vhsdream](https://github.com/vhsdream) ([#11233](https://github.com/community-scripts/ProxmoxVE/pull/11233))
|
- Immich: update libraw [@vhsdream](https://github.com/vhsdream) ([#11233](https://github.com/community-scripts/ProxmoxVE/pull/11233))
|
||||||
- [FIX] Jotty: backup and restore custom config [@vhsdream](https://github.com/vhsdream) ([#11212](https://github.com/community-scripts/ProxmoxVE/pull/11212))
|
|
||||||
|
- #### ✨ New Features
|
||||||
|
|
||||||
|
- grist: enable optional enterprise features toggle [@MickLesk](https://github.com/MickLesk) ([#11239](https://github.com/community-scripts/ProxmoxVE/pull/11239))
|
||||||
|
|
||||||
- #### 🔧 Refactor
|
- #### 🔧 Refactor
|
||||||
|
|
||||||
- Termix: use nginx.conf from upstream repo [@MickLesk](https://github.com/MickLesk) ([#11228](https://github.com/community-scripts/ProxmoxVE/pull/11228))
|
- Termix: use nginx.conf from upstream repo [@MickLesk](https://github.com/MickLesk) ([#11228](https://github.com/community-scripts/ProxmoxVE/pull/11228))
|
||||||
|
|
||||||
|
### 💾 Core
|
||||||
|
|
||||||
|
- #### ✨ New Features
|
||||||
|
|
||||||
|
- feat: add NVIDIA driver install prompt for GPU-enabled containers [@devdecrux](https://github.com/devdecrux) ([#11184](https://github.com/community-scripts/ProxmoxVE/pull/11184))
|
||||||
|
|
||||||
### 📚 Documentation
|
### 📚 Documentation
|
||||||
|
|
||||||
- doc setup_deb822_repo arg order [@chrnie](https://github.com/chrnie) ([#11215](https://github.com/community-scripts/ProxmoxVE/pull/11215))
|
- doc setup_deb822_repo arg order [@chrnie](https://github.com/chrnie) ([#11215](https://github.com/community-scripts/ProxmoxVE/pull/11215))
|
||||||
|
|||||||
73
ct/alpine-valkey.sh
Normal file
73
ct/alpine-valkey.sh
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: pshankinclarke (lazarillo)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://valkey.io/
|
||||||
|
|
||||||
|
APP="Alpine-Valkey"
|
||||||
|
var_tags="${var_tags:-alpine;database}"
|
||||||
|
var_cpu="${var_cpu:-1}"
|
||||||
|
var_ram="${var_ram:-256}"
|
||||||
|
var_disk="${var_disk:-1}"
|
||||||
|
var_os="${var_os:-alpine}"
|
||||||
|
var_version="${var_version:-3.23}"
|
||||||
|
var_unprivileged="${var_unprivileged:-1}"
|
||||||
|
|
||||||
|
header_info "$APP"
|
||||||
|
variables
|
||||||
|
color
|
||||||
|
catch_errors
|
||||||
|
|
||||||
|
function update_script() {
|
||||||
|
if ! apk -e info newt >/dev/null 2>&1; then
|
||||||
|
apk add -q newt
|
||||||
|
fi
|
||||||
|
LXCIP=$(ip a s dev eth0 | awk '/inet / {print $2}' | cut -d/ -f1)
|
||||||
|
while true; do
|
||||||
|
CHOICE=$(
|
||||||
|
whiptail --backtitle "Proxmox VE Helper Scripts" --title "Valkey Management" --menu "Select option" 11 58 3 \
|
||||||
|
"1" "Update Valkey" \
|
||||||
|
"2" "Allow 0.0.0.0 for listening" \
|
||||||
|
"3" "Allow only ${LXCIP} for listening" 3>&2 2>&1 1>&3
|
||||||
|
)
|
||||||
|
exit_status=$?
|
||||||
|
if [ $exit_status == 1 ]; then
|
||||||
|
clear
|
||||||
|
exit-script
|
||||||
|
fi
|
||||||
|
header_info
|
||||||
|
case $CHOICE in
|
||||||
|
1)
|
||||||
|
msg_info "Updating Valkey"
|
||||||
|
apk update && apk upgrade valkey
|
||||||
|
rc-service valkey restart
|
||||||
|
msg_ok "Updated Valkey"
|
||||||
|
msg_ok "Updated successfully!"
|
||||||
|
exit
|
||||||
|
;;
|
||||||
|
2)
|
||||||
|
msg_info "Setting Valkey to listen on all interfaces"
|
||||||
|
sed -i 's/^bind .*/bind 0.0.0.0/' /etc/valkey/valkey.conf
|
||||||
|
rc-service valkey restart
|
||||||
|
msg_ok "Valkey now listens on all interfaces!"
|
||||||
|
exit
|
||||||
|
;;
|
||||||
|
3)
|
||||||
|
msg_info "Setting Valkey to listen only on ${LXCIP}"
|
||||||
|
sed -i "s/^bind .*/bind ${LXCIP}/" /etc/valkey/valkey.conf
|
||||||
|
rc-service valkey restart
|
||||||
|
msg_ok "Valkey now listens only on ${LXCIP}!"
|
||||||
|
exit
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
start
|
||||||
|
build_container
|
||||||
|
description
|
||||||
|
|
||||||
|
msg_ok "Completed successfully!\n"
|
||||||
|
echo -e "${APP} should be reachable on port 6379.
|
||||||
|
${BL}valkey-cli -h ${IP} -p 6379${CL} \n"
|
||||||
@@ -34,7 +34,7 @@ function update_script() {
|
|||||||
systemctl stop nginx
|
systemctl stop nginx
|
||||||
msg_ok "Stopped nginx"
|
msg_ok "Stopped nginx"
|
||||||
|
|
||||||
PHP_VERSION="8.4" PHP_FPM=YES PHP_MODULE="ffi,opcache,redis,zip,pdo-sqlite,bcmath,pdo,curl,dom,fpm" setup_php
|
PHP_VERSION="8.4" PHP_FPM=YES PHP_MODULE="ffi,redis,pdo-sqlite" setup_php
|
||||||
|
|
||||||
msg_info "Backing up Bar Assistant"
|
msg_info "Backing up Bar Assistant"
|
||||||
mv /opt/bar-assistant /opt/bar-assistant-backup
|
mv /opt/bar-assistant /opt/bar-assistant-backup
|
||||||
@@ -88,18 +88,8 @@ function update_script() {
|
|||||||
msg_ok "Started nginx"
|
msg_ok "Started nginx"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if check_for_gh_release "meilisearch" "meilisearch/meilisearch"; then
|
setup_meilisearch
|
||||||
msg_info "Stopping Meilisearch"
|
|
||||||
systemctl stop meilisearch
|
|
||||||
msg_ok "Stopped Meilisearch"
|
|
||||||
|
|
||||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
|
||||||
|
|
||||||
msg_info "Starting Meilisearch"
|
|
||||||
systemctl start meilisearch
|
|
||||||
msg_ok "Started Meilisearch"
|
|
||||||
msg_ok "Updated successfully!"
|
|
||||||
fi
|
|
||||||
exit
|
exit
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
48
ct/byparr.sh
48
ct/byparr.sh
@@ -35,6 +35,54 @@ function update_script() {
|
|||||||
|
|
||||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "Byparr" "ThePhaseless/Byparr" "tarball" "latest"
|
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "Byparr" "ThePhaseless/Byparr" "tarball" "latest"
|
||||||
|
|
||||||
|
if ! dpkg -l | grep -q ffmpeg; then
|
||||||
|
msg_info "Installing dependencies"
|
||||||
|
$STD apt install -y --no-install-recommends \
|
||||||
|
ffmpeg \
|
||||||
|
libatk1.0-0 \
|
||||||
|
libcairo-gobject2 \
|
||||||
|
libcairo2 \
|
||||||
|
libdbus-glib-1-2 \
|
||||||
|
libfontconfig1 \
|
||||||
|
libfreetype6 \
|
||||||
|
libgdk-pixbuf-xlib-2.0-0 \
|
||||||
|
libglib2.0-0 \
|
||||||
|
libgtk-3-0 \
|
||||||
|
libpango-1.0-0 \
|
||||||
|
libpangocairo-1.0-0 \
|
||||||
|
libpangoft2-1.0-0 \
|
||||||
|
libx11-6 \
|
||||||
|
libx11-xcb1 \
|
||||||
|
libxcb-shm0 \
|
||||||
|
libxcb1 \
|
||||||
|
libxcomposite1 \
|
||||||
|
libxcursor1 \
|
||||||
|
libxdamage1 \
|
||||||
|
libxext6 \
|
||||||
|
libxfixes3 \
|
||||||
|
libxi6 \
|
||||||
|
libxrender1 \
|
||||||
|
libxt6 \
|
||||||
|
libxtst6 \
|
||||||
|
xvfb \
|
||||||
|
fonts-noto-color-emoji \
|
||||||
|
fonts-unifont \
|
||||||
|
xfonts-cyrillic \
|
||||||
|
xfonts-scalable \
|
||||||
|
fonts-liberation \
|
||||||
|
fonts-ipafont-gothic \
|
||||||
|
fonts-wqy-zenhei \
|
||||||
|
fonts-tlwg-loma-otf
|
||||||
|
$STD apt autoremove -y chromium
|
||||||
|
msg_ok "Installed dependencies"
|
||||||
|
fi
|
||||||
|
|
||||||
|
msg_info "Configuring Byparr"
|
||||||
|
cd /opt/Byparr
|
||||||
|
$STD uv sync --link-mode copy
|
||||||
|
$STD uv run camoufox fetch
|
||||||
|
msg_ok "Configured Byparr"
|
||||||
|
|
||||||
msg_info "Starting Service"
|
msg_info "Starting Service"
|
||||||
systemctl start byparr
|
systemctl start byparr
|
||||||
msg_ok "Started Service"
|
msg_ok "Started Service"
|
||||||
|
|||||||
@@ -49,6 +49,7 @@ function update_script() {
|
|||||||
cp /opt/grist_bak/landing.db /opt/grist/landing.db
|
cp /opt/grist_bak/landing.db /opt/grist/landing.db
|
||||||
cd /opt/grist
|
cd /opt/grist
|
||||||
$STD yarn install
|
$STD yarn install
|
||||||
|
$STD yarn run install:ee
|
||||||
$STD yarn run build:prod
|
$STD yarn run build:prod
|
||||||
$STD yarn run install:python
|
$STD yarn run install:python
|
||||||
msg_ok "Updated Grist"
|
msg_ok "Updated Grist"
|
||||||
|
|||||||
6
ct/headers/alpine-valkey
Normal file
6
ct/headers/alpine-valkey
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
___ __ _ _ __ ____
|
||||||
|
/ | / /___ (_)___ ___ | | / /___ _/ / /_____ __ __
|
||||||
|
/ /| | / / __ \/ / __ \/ _ \_____| | / / __ `/ / //_/ _ \/ / / /
|
||||||
|
/ ___ |/ / /_/ / / / / / __/_____/ |/ / /_/ / / ,< / __/ /_/ /
|
||||||
|
/_/ |_/_/ .___/_/_/ /_/\___/ |___/\__,_/_/_/|_|\___/\__, /
|
||||||
|
/_/ /____/
|
||||||
6
ct/headers/nodecast-tv
Normal file
6
ct/headers/nodecast-tv
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
__ __ __
|
||||||
|
____ ____ ____/ /__ _________ ______/ /_ / /__ __
|
||||||
|
/ __ \/ __ \/ __ / _ \/ ___/ __ `/ ___/ __/_____/ __/ | / /
|
||||||
|
/ / / / /_/ / /_/ / __/ /__/ /_/ (__ ) /_/_____/ /_ | |/ /
|
||||||
|
/_/ /_/\____/\__,_/\___/\___/\__,_/____/\__/ \__/ |___/
|
||||||
|
|
||||||
33
ct/immich.sh
33
ct/immich.sh
@@ -68,7 +68,7 @@ EOF
|
|||||||
if [[ ! -f /etc/apt/sources.list.d/mise.list ]]; then
|
if [[ ! -f /etc/apt/sources.list.d/mise.list ]]; then
|
||||||
msg_info "Installing Mise"
|
msg_info "Installing Mise"
|
||||||
curl -fSs https://mise.jdx.dev/gpg-key.pub | tee /etc/apt/keyrings/mise-archive-keyring.pub 1>/dev/null
|
curl -fSs https://mise.jdx.dev/gpg-key.pub | tee /etc/apt/keyrings/mise-archive-keyring.pub 1>/dev/null
|
||||||
echo "deb [signed-by=/etc/apt/keyrings/mise-archive-keyring.pub arch=amd64] https://mise.jdx.dev/deb stable main" | tee /etc/apt/sources.list.d/mise.list
|
echo "deb [signed-by=/etc/apt/keyrings/mise-archive-keyring.pub arch=amd64] https://mise.jdx.dev/deb stable main" >/etc/apt/sources.list.d/mise.list
|
||||||
$STD apt update
|
$STD apt update
|
||||||
$STD apt install -y mise
|
$STD apt install -y mise
|
||||||
msg_ok "Installed Mise"
|
msg_ok "Installed Mise"
|
||||||
@@ -112,8 +112,16 @@ EOF
|
|||||||
msg_ok "Image-processing libraries up to date"
|
msg_ok "Image-processing libraries up to date"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
RELEASE="2.4.1"
|
RELEASE="2.5.2"
|
||||||
if check_for_gh_release "immich" "immich-app/immich" "${RELEASE}"; then
|
if check_for_gh_release "immich" "immich-app/immich" "${RELEASE}"; then
|
||||||
|
if [[ $(cat ~/.immich) > "2.5.1" ]]; then
|
||||||
|
msg_info "Enabling Maintenance Mode"
|
||||||
|
cd /opt/immich/app/bin
|
||||||
|
$STD bash ./immich-admin enable-maintenance-mode
|
||||||
|
export MAINT_MODE=1
|
||||||
|
$STD cd -
|
||||||
|
msg_ok "Enabled Maintenance Mode"
|
||||||
|
fi
|
||||||
msg_info "Stopping Services"
|
msg_info "Stopping Services"
|
||||||
systemctl stop immich-web
|
systemctl stop immich-web
|
||||||
systemctl stop immich-ml
|
systemctl stop immich-ml
|
||||||
@@ -167,7 +175,7 @@ EOF
|
|||||||
|
|
||||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "immich" "immich-app/immich" "tarball" "v${RELEASE}" "$SRC_DIR"
|
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "immich" "immich-app/immich" "tarball" "v${RELEASE}" "$SRC_DIR"
|
||||||
|
|
||||||
msg_info "Updating ${APP} web and microservices"
|
msg_info "Updating Immich web and microservices"
|
||||||
cd "$SRC_DIR"/server
|
cd "$SRC_DIR"/server
|
||||||
export COREPACK_ENABLE_DOWNLOAD_PROMPT=0
|
export COREPACK_ENABLE_DOWNLOAD_PROMPT=0
|
||||||
export CI=1
|
export CI=1
|
||||||
@@ -209,7 +217,7 @@ EOF
|
|||||||
mkdir -p "$PLUGIN_DIR"
|
mkdir -p "$PLUGIN_DIR"
|
||||||
cp -r ./dist "$PLUGIN_DIR"/dist
|
cp -r ./dist "$PLUGIN_DIR"/dist
|
||||||
cp ./manifest.json "$PLUGIN_DIR"
|
cp ./manifest.json "$PLUGIN_DIR"
|
||||||
msg_ok "Updated ${APP} server, web, cli and plugins"
|
msg_ok "Updated Immich server, web, cli and plugins"
|
||||||
|
|
||||||
cd "$SRC_DIR"/machine-learning
|
cd "$SRC_DIR"/machine-learning
|
||||||
mkdir -p "$ML_DIR" && chown -R immich:immich "$ML_DIR"
|
mkdir -p "$ML_DIR" && chown -R immich:immich "$ML_DIR"
|
||||||
@@ -217,12 +225,13 @@ EOF
|
|||||||
export VIRTUAL_ENV="${ML_DIR}"/ml-venv
|
export VIRTUAL_ENV="${ML_DIR}"/ml-venv
|
||||||
if [[ -f ~/.openvino ]]; then
|
if [[ -f ~/.openvino ]]; then
|
||||||
msg_info "Updating HW-accelerated machine-learning"
|
msg_info "Updating HW-accelerated machine-learning"
|
||||||
$STD sudo --preserve-env=VIRTUAL_ENV -nu immich uv sync --extra openvino --active -n -p python3.11 --managed-python
|
$STD uv add --no-sync --optional openvino onnxruntime-openvino==1.20.0 --active -n -p python3.12 --managed-python
|
||||||
patchelf --clear-execstack "${VIRTUAL_ENV}/lib/python3.11/site-packages/onnxruntime/capi/onnxruntime_pybind11_state.cpython-311-x86_64-linux-gnu.so"
|
$STD sudo --preserve-env=VIRTUAL_ENV -nu immich uv sync --extra openvino --no-dev --active --link-mode copy -n -p python3.12 --managed-python
|
||||||
|
patchelf --clear-execstack "${VIRTUAL_ENV}/lib/python3.12/site-packages/onnxruntime/capi/onnxruntime_pybind11_state.cpython-312-x86_64-linux-gnu.so"
|
||||||
msg_ok "Updated HW-accelerated machine-learning"
|
msg_ok "Updated HW-accelerated machine-learning"
|
||||||
else
|
else
|
||||||
msg_info "Updating machine-learning"
|
msg_info "Updating machine-learning"
|
||||||
$STD sudo --preserve-env=VIRTUAL_ENV -nu immich uv sync --extra cpu --active -n -p python3.11 --managed-python
|
$STD sudo --preserve-env=VIRTUAL_ENV -nu immich uv sync --extra cpu --no-dev --active --link-mode copy -n -p python3.11 --managed-python
|
||||||
msg_ok "Updated machine-learning"
|
msg_ok "Updated machine-learning"
|
||||||
fi
|
fi
|
||||||
cd "$SRC_DIR"
|
cd "$SRC_DIR"
|
||||||
@@ -241,8 +250,16 @@ EOF
|
|||||||
ln -s "$GEO_DIR" "$APP_DIR"
|
ln -s "$GEO_DIR" "$APP_DIR"
|
||||||
|
|
||||||
chown -R immich:immich "$INSTALL_DIR"
|
chown -R immich:immich "$INSTALL_DIR"
|
||||||
msg_ok "Updated ${APP} to v${RELEASE}"
|
if [[ "$MAINT_MODE" == 1 ]]; then
|
||||||
|
msg_info "Disabling Maintenance Mode"
|
||||||
|
cd /opt/immich/app/bin
|
||||||
|
$STD bash ./immich-admin disable-maintenance-mode
|
||||||
|
unset MAINT_MODE
|
||||||
|
$STD cd -
|
||||||
|
msg_ok "Disabled Maintenance Mode"
|
||||||
|
fi
|
||||||
systemctl restart immich-ml immich-web
|
systemctl restart immich-ml immich-web
|
||||||
|
msg_ok "Updated successfully!"
|
||||||
fi
|
fi
|
||||||
exit
|
exit
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -61,6 +61,7 @@ function update_script() {
|
|||||||
fi
|
fi
|
||||||
MODULE_VERSION="$(jq -r '.packageManager | split("@")[1]' /opt/karakeep/package.json)"
|
MODULE_VERSION="$(jq -r '.packageManager | split("@")[1]' /opt/karakeep/package.json)"
|
||||||
NODE_VERSION="22" NODE_MODULE="pnpm@${MODULE_VERSION}" setup_nodejs
|
NODE_VERSION="22" NODE_MODULE="pnpm@${MODULE_VERSION}" setup_nodejs
|
||||||
|
setup_meilisearch
|
||||||
|
|
||||||
msg_info "Updating Karakeep"
|
msg_info "Updating Karakeep"
|
||||||
corepack enable
|
corepack enable
|
||||||
@@ -90,6 +91,7 @@ function update_script() {
|
|||||||
msg_ok "Started Services"
|
msg_ok "Started Services"
|
||||||
msg_ok "Updated successfully!"
|
msg_ok "Updated successfully!"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
exit
|
exit
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -32,6 +32,8 @@ function update_script() {
|
|||||||
systemctl stop apache2
|
systemctl stop apache2
|
||||||
msg_ok "Stopped Service"
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
|
PHP_VERSION="8.5" PHP_APACHE="YES" PHP_MODULE="apcu,ctype,dom,fileinfo,iconv,pgsql" setup_php
|
||||||
|
|
||||||
msg_info "Creating a backup"
|
msg_info "Creating a backup"
|
||||||
mv /opt/koillection/ /opt/koillection-backup
|
mv /opt/koillection/ /opt/koillection-backup
|
||||||
msg_ok "Backup created"
|
msg_ok "Backup created"
|
||||||
|
|||||||
@@ -30,18 +30,7 @@ function update_script() {
|
|||||||
3>&1 1>&2 2>&3)
|
3>&1 1>&2 2>&3)
|
||||||
|
|
||||||
if [ "$UPD" == "1" ]; then
|
if [ "$UPD" == "1" ]; then
|
||||||
if check_for_gh_release "meilisearch" "meilisearch/meilisearch"; then
|
setup_meilisearch
|
||||||
msg_info "Stopping Meilisearch"
|
|
||||||
systemctl stop meilisearch
|
|
||||||
msg_ok "Stopped Meilisearch"
|
|
||||||
|
|
||||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
|
||||||
|
|
||||||
msg_info "Starting Meilisearch"
|
|
||||||
systemctl start meilisearch
|
|
||||||
msg_ok "Started Meilisearch"
|
|
||||||
msg_ok "Updated successfully!"
|
|
||||||
fi
|
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|||||||
60
ct/nodecast-tv.sh
Normal file
60
ct/nodecast-tv.sh
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: luismco
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://github.com/technomancer702/nodecast-tv
|
||||||
|
|
||||||
|
APP="nodecast-tv"
|
||||||
|
var_tags="${var_tags:-media}"
|
||||||
|
var_cpu="${var_cpu:-2}"
|
||||||
|
var_ram="${var_ram:-2048}"
|
||||||
|
var_disk="${var_disk:-4}"
|
||||||
|
var_os="${var_os:-debian}"
|
||||||
|
var_version="${var_version:-13}"
|
||||||
|
var_unprivileged="${var_unprivileged:-1}"
|
||||||
|
var_gpu="${var_gpu:-yes}"
|
||||||
|
|
||||||
|
header_info "$APP"
|
||||||
|
variables
|
||||||
|
color
|
||||||
|
catch_errors
|
||||||
|
|
||||||
|
function update_script() {
|
||||||
|
header_info
|
||||||
|
check_container_storage
|
||||||
|
check_container_resources
|
||||||
|
if [[ ! -d /opt/nodecast-tv ]]; then
|
||||||
|
msg_error "No ${APP} Installation Found!"
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
|
||||||
|
if check_for_gh_release "nodecast-tv" "technomancer702/nodecast-tv"; then
|
||||||
|
msg_info "Stopping Service"
|
||||||
|
systemctl stop nodecast-tv
|
||||||
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
|
fetch_and_deploy_gh_release "nodecast-tv" "technomancer702/nodecast-tv"
|
||||||
|
|
||||||
|
msg_info "Updating Modules"
|
||||||
|
cd /opt/nodecast-tv
|
||||||
|
$STD npm install
|
||||||
|
msg_ok "Updated Modules"
|
||||||
|
|
||||||
|
msg_info "Starting Service"
|
||||||
|
systemctl start nodecast-tv
|
||||||
|
msg_ok "Started Service"
|
||||||
|
msg_ok "Updated successfully!"
|
||||||
|
fi
|
||||||
|
exit
|
||||||
|
}
|
||||||
|
|
||||||
|
start
|
||||||
|
build_container
|
||||||
|
description
|
||||||
|
|
||||||
|
msg_ok "Completed successfully!\n"
|
||||||
|
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||||
|
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||||
|
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"
|
||||||
|
|
||||||
@@ -23,7 +23,7 @@ function update_script() {
|
|||||||
header_info
|
header_info
|
||||||
check_container_storage
|
check_container_storage
|
||||||
check_container_resources
|
check_container_resources
|
||||||
if [[ ! -f /etc/apt/sources.list.d/golift.list ]]; then
|
if [[ ! -f /usr/lib/systemd/system/notifiarr.service ]]; then
|
||||||
msg_error "No ${APP} Installation Found!"
|
msg_error "No ${APP} Installation Found!"
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -28,6 +28,8 @@ function update_script() {
|
|||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
setup_meilisearch
|
||||||
|
|
||||||
if check_for_gh_release "openarchiver" "LogicLabs-OU/OpenArchiver"; then
|
if check_for_gh_release "openarchiver" "LogicLabs-OU/OpenArchiver"; then
|
||||||
msg_info "Stopping Services"
|
msg_info "Stopping Services"
|
||||||
systemctl stop openarchiver
|
systemctl stop openarchiver
|
||||||
@@ -54,6 +56,7 @@ function update_script() {
|
|||||||
msg_ok "Started Services"
|
msg_ok "Started Services"
|
||||||
msg_ok "Updated successfully!"
|
msg_ok "Updated successfully!"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
exit
|
exit
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ function update_script() {
|
|||||||
|
|
||||||
CURRENT_PHP=$(php -v 2>/dev/null | awk '/^PHP/{print $2}' | cut -d. -f1,2)
|
CURRENT_PHP=$(php -v 2>/dev/null | awk '/^PHP/{print $2}' | cut -d. -f1,2)
|
||||||
if [[ "$CURRENT_PHP" != "8.3" ]]; then
|
if [[ "$CURRENT_PHP" != "8.3" ]]; then
|
||||||
PHP_VERSION="8.3" PHP_FPM="YES" PHP_MODULE="common,mysql,fpm,redis" setup_php
|
PHP_VERSION="8.3" PHP_FPM="YES" PHP_MODULE="mysql,redis" setup_php
|
||||||
setup_composer
|
setup_composer
|
||||||
sed -i 's|php8\.2-fpm\.sock|php8.3-fpm.sock|g' /etc/nginx/sites-available/paymenter.conf
|
sed -i 's|php8\.2-fpm\.sock|php8.3-fpm.sock|g' /etc/nginx/sites-available/paymenter.conf
|
||||||
$STD systemctl reload nginx
|
$STD systemctl reload nginx
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||||
# Copyright (c) 2021-2026 community-scripts ORG
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
# Author: MickLesk (Canbiz)
|
# Author: MickLesk (Canbiz) | Co-Author: CrazyWolf13
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://vikunja.io/
|
# Source: https://vikunja.io/
|
||||||
|
|
||||||
@@ -27,30 +27,51 @@ function update_script() {
|
|||||||
msg_error "No ${APP} Installation Found!"
|
msg_error "No ${APP} Installation Found!"
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
RELEASE=$(curl -fsSL https://dl.vikunja.io/vikunja/ | grep -oP 'href="/vikunja/\K[0-9]+\.[0-9]+\.[0-9]+' | sort -V | tail -n 1)
|
|
||||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
RELEASE="$( [[ -f "$HOME/.vikunja" ]] && cat "$HOME/.vikunja" 2>/dev/null || [[ -f /opt/Vikunja_version ]] && cat /opt/Vikunja_version 2>/dev/null || true)"
|
||||||
|
if [[ -z "$RELEASE" ]] || [[ "$RELEASE" == "unstable" ]] || dpkg --compare-versions "${RELEASE:-0.0.0}" lt "1.0.0"; then
|
||||||
|
msg_warn "You are upgrading from Vikunja '$RELEASE'."
|
||||||
|
msg_warn "This requires MANUAL config changes in /etc/vikunja/config.yml."
|
||||||
|
msg_warn "See: https://vikunja.io/changelog/whats-new-in-vikunja-1.0.0/#config-changes"
|
||||||
|
|
||||||
|
read -rp "Continue with update? (y to proceed): " -t 30 CONFIRM1 || exit 1
|
||||||
|
[[ "$CONFIRM1" =~ ^[yY]$ ]] || exit 0
|
||||||
|
|
||||||
|
echo
|
||||||
|
msg_warn "Vikunja may not start after the update until you manually adjust the config."
|
||||||
|
msg_warn "Details: https://vikunja.io/changelog/whats-new-in-vikunja-1.0.0/#config-changes"
|
||||||
|
|
||||||
|
read -rp "Acknowledge and continue? (y): " -t 30 CONFIRM2 || exit 1
|
||||||
|
[[ "$CONFIRM2" =~ ^[yY]$ ]] || exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if check_for_gh_release "vikunja" "go-vikunja/vikunja"; then
|
||||||
|
echo
|
||||||
|
msg_warn "The package update may include config file changes."
|
||||||
|
echo -e "${TAB}${YW}How do you want to handle /etc/vikunja/config.yml?${CL}"
|
||||||
|
echo -e "${TAB} 1) Keep your current config"
|
||||||
|
echo -e "${TAB} 2) Install the new package maintainer's config"
|
||||||
|
read -rp " Choose [1/2] (default: 1): " -t 60 CONFIG_CHOICE || CONFIG_CHOICE="1"
|
||||||
|
[[ -z "$CONFIG_CHOICE" ]] && CONFIG_CHOICE="1"
|
||||||
|
|
||||||
|
if [[ "$CONFIG_CHOICE" == "2" ]]; then
|
||||||
|
export DPKG_FORCE_CONFNEW="1"
|
||||||
|
else
|
||||||
|
export DPKG_FORCE_CONFOLD="1"
|
||||||
|
fi
|
||||||
|
|
||||||
msg_info "Stopping Service"
|
msg_info "Stopping Service"
|
||||||
systemctl stop vikunja
|
systemctl stop vikunja
|
||||||
msg_ok "Stopped Service"
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
msg_info "Updating ${APP} to ${RELEASE}"
|
fetch_and_deploy_gh_release "vikunja" "go-vikunja/vikunja" "binary"
|
||||||
cd /opt
|
|
||||||
rm -rf /opt/vikunja/vikunja
|
|
||||||
curl -fsSL "https://dl.vikunja.io/vikunja/$RELEASE/vikunja-$RELEASE-amd64.deb" -o $(basename "https://dl.vikunja.io/vikunja/$RELEASE/vikunja-$RELEASE-amd64.deb")
|
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
|
||||||
$STD dpkg -i vikunja-"$RELEASE"-amd64.deb
|
|
||||||
rm -rf /opt/vikunja-"$RELEASE"-amd64.deb
|
|
||||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
|
||||||
msg_ok "Updated ${APP}"
|
|
||||||
|
|
||||||
msg_info "Starting Service"
|
msg_info "Starting Service"
|
||||||
systemctl start vikunja
|
systemctl start vikunja
|
||||||
msg_ok "Started Service"
|
msg_ok "Started Service"
|
||||||
msg_ok "Updated successfully!"
|
msg_ok "Updated successfully!"
|
||||||
else
|
|
||||||
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
|
||||||
fi
|
fi
|
||||||
exit
|
exit 0
|
||||||
}
|
}
|
||||||
|
|
||||||
start
|
start
|
||||||
|
|||||||
@@ -33,7 +33,11 @@
|
|||||||
},
|
},
|
||||||
"notes": [
|
"notes": [
|
||||||
{
|
{
|
||||||
"text": "With an option to configure cloudflared as a DNS-over-HTTPS (DoH) proxy",
|
"text": "After install, run: cloudflared tunnel login && cloudflared tunnel create <NAME>",
|
||||||
|
"type": "info"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "Or create tunnel via Cloudflare Zero Trust Dashboard",
|
||||||
"type": "info"
|
"type": "info"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
1587
frontend/public/json/github-versions.json
Normal file
1587
frontend/public/json/github-versions.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -12,7 +12,7 @@
|
|||||||
"documentation": "https://homarr.dev/docs/getting-started/",
|
"documentation": "https://homarr.dev/docs/getting-started/",
|
||||||
"website": "https://homarr.dev/",
|
"website": "https://homarr.dev/",
|
||||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/homarr.webp",
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/homarr.webp",
|
||||||
"config_path": "/opt/homarr/.env",
|
"config_path": "/opt/homarr.env",
|
||||||
"description": "Homarr is a sleek, modern dashboard that puts all of your apps and services at your fingertips.",
|
"description": "Homarr is a sleek, modern dashboard that puts all of your apps and services at your fingertips.",
|
||||||
"install_methods": [
|
"install_methods": [
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -43,6 +43,10 @@
|
|||||||
{
|
{
|
||||||
"text": "For NVIDIA graphics cards, you'll need to install the same drivers in the container that you did on the host. In the container, run the driver installation script and add the CLI arg --no-kernel-module",
|
"text": "For NVIDIA graphics cards, you'll need to install the same drivers in the container that you did on the host. In the container, run the driver installation script and add the CLI arg --no-kernel-module",
|
||||||
"type": "info"
|
"type": "info"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "Log rotation is configured in /etc/logrotate.d/jellyfin. To reduce verbosity, change MinimumLevel in /etc/jellyfin/logging.json to Warning or Error (disables fail2ban auth logging).",
|
||||||
|
"type": "info"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
35
frontend/public/json/nodecast-tv.json
Normal file
35
frontend/public/json/nodecast-tv.json
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
{
|
||||||
|
"name": "nodecast-tv",
|
||||||
|
"slug": "nodecast-tv",
|
||||||
|
"categories": [
|
||||||
|
13
|
||||||
|
],
|
||||||
|
"date_created": "2026-01-28",
|
||||||
|
"type": "ct",
|
||||||
|
"updateable": true,
|
||||||
|
"privileged": false,
|
||||||
|
"interface_port": 3000,
|
||||||
|
"documentation": "https://github.com/technomancer702/nodecast-tv/blob/main/README.md",
|
||||||
|
"website": "https://github.com/technomancer702/nodecast-tv",
|
||||||
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/nodecast-tv.webp",
|
||||||
|
"config_path": "",
|
||||||
|
"description": "nodecast-tv is a modern, web-based IPTV player featuring Live TV, EPG, Movies (VOD), and Series support. Built with performance and user experience in mind.",
|
||||||
|
"install_methods": [
|
||||||
|
{
|
||||||
|
"type": "default",
|
||||||
|
"script": "ct/nodecast-tv.sh",
|
||||||
|
"resources": {
|
||||||
|
"cpu": 2,
|
||||||
|
"ram": 2048,
|
||||||
|
"hdd": 4,
|
||||||
|
"os": "debian",
|
||||||
|
"version": "13"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default_credentials": {
|
||||||
|
"username": null,
|
||||||
|
"password": null
|
||||||
|
},
|
||||||
|
"notes": []
|
||||||
|
}
|
||||||
@@ -39,8 +39,8 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"default_credentials": {
|
"default_credentials": {
|
||||||
"username": "admin@example.org",
|
"username": "admin@local.com",
|
||||||
"password": null
|
"password": "helper-scripts.com"
|
||||||
},
|
},
|
||||||
"notes": [
|
"notes": [
|
||||||
{
|
{
|
||||||
@@ -48,11 +48,7 @@
|
|||||||
"type": "info"
|
"type": "info"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"text": "The initial starting process can be take 1-2min. ",
|
"text": "The initial starting process can take 1-2min. ",
|
||||||
"type": "info"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"text": "Application credentials: `cat /opt/.npm_pwd` - if file not exist in LXC check docker logs for password with `docker logs npmplus`",
|
|
||||||
"type": "info"
|
"type": "info"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -25,6 +25,17 @@
|
|||||||
"os": "Debian",
|
"os": "Debian",
|
||||||
"version": "13"
|
"version": "13"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "alpine",
|
||||||
|
"script": "ct/alpine-valkey.sh",
|
||||||
|
"resources": {
|
||||||
|
"cpu": 1,
|
||||||
|
"ram": 256,
|
||||||
|
"hdd": 1,
|
||||||
|
"os": "alpine",
|
||||||
|
"version": "3.23"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"default_credentials": {
|
"default_credentials": {
|
||||||
@@ -35,6 +46,10 @@
|
|||||||
{
|
{
|
||||||
"text": "Show Login Credentials, type `cat ~/valkey.creds` in the LXC console",
|
"text": "Show Login Credentials, type `cat ~/valkey.creds` in the LXC console",
|
||||||
"type": "info"
|
"type": "info"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "Alpines Valkey package is compiled without TLS support. For TLS, use the Debian-based valkey script instead.",
|
||||||
|
"type": "info"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
36
frontend/src/app/api/github-versions/route.ts
Normal file
36
frontend/src/app/api/github-versions/route.ts
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
import { NextResponse } from "next/server";
|
||||||
|
import { promises as fs } from "node:fs";
|
||||||
|
import path from "node:path";
|
||||||
|
|
||||||
|
import type { GitHubVersionsResponse } from "@/lib/types";
|
||||||
|
|
||||||
|
export const dynamic = "force-static";
|
||||||
|
|
||||||
|
const jsonDir = "public/json";
|
||||||
|
const versionsFileName = "github-versions.json";
|
||||||
|
const encoding = "utf-8";
|
||||||
|
|
||||||
|
async function getVersions(): Promise<GitHubVersionsResponse> {
|
||||||
|
const filePath = path.resolve(jsonDir, versionsFileName);
|
||||||
|
const fileContent = await fs.readFile(filePath, encoding);
|
||||||
|
const data: GitHubVersionsResponse = JSON.parse(fileContent);
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
try {
|
||||||
|
const versions = await getVersions();
|
||||||
|
return NextResponse.json(versions);
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
console.error(error);
|
||||||
|
const err = error as globalThis.Error;
|
||||||
|
return NextResponse.json({
|
||||||
|
generated: "",
|
||||||
|
versions: [],
|
||||||
|
error: err.message || "An unexpected error occurred",
|
||||||
|
}, {
|
||||||
|
status: 500,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,18 +3,22 @@ import { NextResponse } from "next/server";
|
|||||||
import { promises as fs } from "node:fs";
|
import { promises as fs } from "node:fs";
|
||||||
import path from "node:path";
|
import path from "node:path";
|
||||||
|
|
||||||
import type { AppVersion } from "@/lib/types";
|
|
||||||
|
|
||||||
export const dynamic = "force-static";
|
export const dynamic = "force-static";
|
||||||
|
|
||||||
const jsonDir = "public/json";
|
const jsonDir = "public/json";
|
||||||
const versionsFileName = "versions.json";
|
const versionsFileName = "versions.json";
|
||||||
const encoding = "utf-8";
|
const encoding = "utf-8";
|
||||||
|
|
||||||
|
interface LegacyVersion {
|
||||||
|
name: string;
|
||||||
|
version: string;
|
||||||
|
date: string;
|
||||||
|
}
|
||||||
|
|
||||||
async function getVersions() {
|
async function getVersions() {
|
||||||
const filePath = path.resolve(jsonDir, versionsFileName);
|
const filePath = path.resolve(jsonDir, versionsFileName);
|
||||||
const fileContent = await fs.readFile(filePath, encoding);
|
const fileContent = await fs.readFile(filePath, encoding);
|
||||||
const versions: AppVersion[] = JSON.parse(fileContent);
|
const versions: LegacyVersion[] = JSON.parse(fileContent);
|
||||||
|
|
||||||
const modifiedVersions = versions.map((version) => {
|
const modifiedVersions = versions.map((version) => {
|
||||||
let newName = version.name;
|
let newName = version.name;
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { X } from "lucide-react";
|
import { X, HelpCircle } from "lucide-react";
|
||||||
import { Suspense } from "react";
|
import { Suspense } from "react";
|
||||||
import Image from "next/image";
|
import Image from "next/image";
|
||||||
|
|
||||||
import type { AppVersion, Script } from "@/lib/types";
|
import type { AppVersion, Script } from "@/lib/types";
|
||||||
|
|
||||||
import { cleanSlug } from "@/lib/utils/resource-utils";
|
|
||||||
import { Separator } from "@/components/ui/separator";
|
import { Separator } from "@/components/ui/separator";
|
||||||
|
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip";
|
||||||
import { useVersions } from "@/hooks/use-versions";
|
import { useVersions } from "@/hooks/use-versions";
|
||||||
import { basePath } from "@/config/site-config";
|
import { basePath } from "@/config/site-config";
|
||||||
import { extractDate } from "@/lib/time";
|
import { extractDate } from "@/lib/time";
|
||||||
@@ -108,18 +108,31 @@ function VersionInfo({ item }: { item: Script }) {
|
|||||||
const { data: versions = [], isLoading } = useVersions();
|
const { data: versions = [], isLoading } = useVersions();
|
||||||
|
|
||||||
if (isLoading || versions.length === 0) {
|
if (isLoading || versions.length === 0) {
|
||||||
return <p className="text-sm text-muted-foreground">Loading versions...</p>;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const matchedVersion = versions.find((v: AppVersion) => {
|
const matchedVersion = versions.find((v: AppVersion) => v.slug === item.slug);
|
||||||
const cleanName = v.name.replace(/[^a-z0-9]/gi, "").toLowerCase();
|
|
||||||
return cleanName === cleanSlug(item.slug) || cleanName.includes(cleanSlug(item.slug));
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!matchedVersion)
|
if (!matchedVersion)
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
return <span className="font-medium text-sm">{matchedVersion.version}</span>;
|
return (
|
||||||
|
<span className="font-medium text-sm flex items-center gap-1">
|
||||||
|
{matchedVersion.version}
|
||||||
|
{matchedVersion.pinned && (
|
||||||
|
<TooltipProvider>
|
||||||
|
<Tooltip>
|
||||||
|
<TooltipTrigger asChild>
|
||||||
|
<HelpCircle className="h-3.5 w-3.5 text-muted-foreground cursor-help" />
|
||||||
|
</TooltipTrigger>
|
||||||
|
<TooltipContent className="max-w-xs">
|
||||||
|
<p>This version is pinned. We test each update for breaking changes before releasing new versions.</p>
|
||||||
|
</TooltipContent>
|
||||||
|
</Tooltip>
|
||||||
|
</TooltipProvider>
|
||||||
|
)}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function ScriptItem({ item, setSelectedScript }: ScriptItemProps) {
|
export function ScriptItem({ item, setSelectedScript }: ScriptItemProps) {
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import { useQuery } from "@tanstack/react-query";
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
|
||||||
import type { AppVersion } from "@/lib/types";
|
import type { AppVersion, GitHubVersionsResponse } from "@/lib/types";
|
||||||
|
|
||||||
import { fetchVersions } from "@/lib/data";
|
import { fetchVersions } from "@/lib/data";
|
||||||
|
|
||||||
@@ -10,14 +10,8 @@ export function useVersions() {
|
|||||||
return useQuery<AppVersion[]>({
|
return useQuery<AppVersion[]>({
|
||||||
queryKey: ["versions"],
|
queryKey: ["versions"],
|
||||||
queryFn: async () => {
|
queryFn: async () => {
|
||||||
const fetchedVersions = await fetchVersions();
|
const response: GitHubVersionsResponse = await fetchVersions();
|
||||||
if (Array.isArray(fetchedVersions)) {
|
return response.versions ?? [];
|
||||||
return fetchedVersions;
|
|
||||||
}
|
|
||||||
if (fetchedVersions && typeof fetchedVersions === "object") {
|
|
||||||
return [fetchedVersions];
|
|
||||||
}
|
|
||||||
return [];
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ export async function fetchCategories() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function fetchVersions() {
|
export async function fetchVersions() {
|
||||||
const response = await fetch(`/ProxmoxVE/api/versions`);
|
const response = await fetch(`/ProxmoxVE/api/github-versions`);
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`Failed to fetch versions: ${response.statusText}`);
|
throw new Error(`Failed to fetch versions: ${response.statusText}`);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -63,7 +63,14 @@ export type OperatingSystem = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export type AppVersion = {
|
export type AppVersion = {
|
||||||
name: string;
|
slug: string;
|
||||||
|
repo: string;
|
||||||
version: string;
|
version: string;
|
||||||
date: Date;
|
pinned: boolean;
|
||||||
|
date: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type GitHubVersionsResponse = {
|
||||||
|
generated: string;
|
||||||
|
versions: AppVersion[];
|
||||||
};
|
};
|
||||||
|
|||||||
45
install/alpine-valkey-install.sh
Normal file
45
install/alpine-valkey-install.sh
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: pshankinclarke (lazarillo)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://valkey.io/
|
||||||
|
|
||||||
|
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||||
|
color
|
||||||
|
verb_ip6
|
||||||
|
catch_errors
|
||||||
|
setting_up_container
|
||||||
|
network_check
|
||||||
|
update_os
|
||||||
|
|
||||||
|
msg_info "Installing Valkey"
|
||||||
|
$STD apk add valkey valkey-openrc valkey-cli
|
||||||
|
sed -i 's/^bind .*/bind 0.0.0.0/' /etc/valkey/valkey.conf
|
||||||
|
|
||||||
|
PASS="$(head -c 100 /dev/urandom | tr -dc 'a-zA-Z0-9' | head -c32)"
|
||||||
|
echo "requirepass $PASS" >>/etc/valkey/valkey.conf
|
||||||
|
echo "$PASS" >~/valkey.creds
|
||||||
|
chmod 600 ~/valkey.creds
|
||||||
|
|
||||||
|
MEMTOTAL_MB=$(free -m | grep ^Mem: | awk '{print $2}')
|
||||||
|
MAXMEMORY_MB=$((MEMTOTAL_MB * 75 / 100))
|
||||||
|
|
||||||
|
{
|
||||||
|
echo ""
|
||||||
|
echo "# Memory-optimized settings for small-scale deployments"
|
||||||
|
echo "maxmemory ${MAXMEMORY_MB}mb"
|
||||||
|
echo "maxmemory-policy allkeys-lru"
|
||||||
|
echo "maxmemory-samples 10"
|
||||||
|
} >>/etc/valkey/valkey.conf
|
||||||
|
msg_ok "Installed Valkey"
|
||||||
|
|
||||||
|
# Note: Alpine's valkey package is compiled without TLS support
|
||||||
|
# For TLS, use the Debian-based valkey script instead
|
||||||
|
|
||||||
|
$STD rc-update add valkey default
|
||||||
|
$STD rc-service valkey start
|
||||||
|
|
||||||
|
motd_ssh
|
||||||
|
customize
|
||||||
|
cleanup_lxc
|
||||||
@@ -23,10 +23,10 @@ $STD apt install -y \
|
|||||||
libvips
|
libvips
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
PHP_VERSION="8.4" PHP_FPM=YES PHP_MODULE="ffi,opcache,redis,zip,pdo-sqlite,bcmath,pdo,curl,dom,fpm" setup_php
|
PHP_VERSION="8.4" PHP_FPM=YES PHP_MODULE="ffi,redis,pdo-sqlite" setup_php
|
||||||
setup_composer
|
setup_composer
|
||||||
NODE_VERSION="22" setup_nodejs
|
NODE_VERSION="22" setup_nodejs
|
||||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
setup_meilisearch
|
||||||
fetch_and_deploy_gh_release "bar-assistant" "karlomikus/bar-assistant" "tarball" "latest" "/opt/bar-assistant"
|
fetch_and_deploy_gh_release "bar-assistant" "karlomikus/bar-assistant" "tarball" "latest" "/opt/bar-assistant"
|
||||||
fetch_and_deploy_gh_release "vue-salt-rim" "karlomikus/vue-salt-rim" "tarball" "latest" "/opt/vue-salt-rim"
|
fetch_and_deploy_gh_release "vue-salt-rim" "karlomikus/vue-salt-rim" "tarball" "latest" "/opt/vue-salt-rim"
|
||||||
|
|
||||||
@@ -36,49 +36,16 @@ sed -i.bak -E 's/^\s*;?\s*ffi\.enable\s*=.*/ffi.enable=true/' /etc/php/${PHPVER}
|
|||||||
$STD systemctl reload php${PHPVER}-fpm
|
$STD systemctl reload php${PHPVER}-fpm
|
||||||
msg_info "configured PHP"
|
msg_info "configured PHP"
|
||||||
|
|
||||||
msg_info "Configure MeiliSearch"
|
|
||||||
curl -fsSL https://raw.githubusercontent.com/meilisearch/meilisearch/latest/config.toml -o /etc/meilisearch.toml
|
|
||||||
MASTER_KEY=$(openssl rand -base64 12)
|
|
||||||
sed -i \
|
|
||||||
-e 's|^env =.*|env = "production"|' \
|
|
||||||
-e "s|^# master_key =.*|master_key = \"$MASTER_KEY\"|" \
|
|
||||||
-e 's|^db_path =.*|db_path = "/var/lib/meilisearch/data"|' \
|
|
||||||
-e 's|^dump_dir =.*|dump_dir = "/var/lib/meilisearch/dumps"|' \
|
|
||||||
-e 's|^snapshot_dir =.*|snapshot_dir = "/var/lib/meilisearch/snapshots"|' \
|
|
||||||
-e 's|^# no_analytics = true|no_analytics = true|' \
|
|
||||||
-e 's|^http_addr =.*|http_addr = "127.0.0.1:7700"|' \
|
|
||||||
/etc/meilisearch.toml
|
|
||||||
msg_ok "Configured MeiliSearch"
|
|
||||||
|
|
||||||
msg_info "Creating MeiliSearch service"
|
|
||||||
cat <<EOF >/etc/systemd/system/meilisearch.service
|
|
||||||
[Unit]
|
|
||||||
Description=Meilisearch
|
|
||||||
After=network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
ExecStart=/usr/bin/meilisearch --config-file-path /etc/meilisearch.toml
|
|
||||||
Restart=always
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
EOF
|
|
||||||
systemctl enable -q --now meilisearch
|
|
||||||
sleep 5
|
|
||||||
msg_ok "Created Service MeiliSearch"
|
|
||||||
|
|
||||||
msg_info "Installing Bar Assistant"
|
msg_info "Installing Bar Assistant"
|
||||||
cd /opt/bar-assistant
|
cd /opt/bar-assistant
|
||||||
cp /opt/bar-assistant/.env.dist /opt/bar-assistant/.env
|
cp /opt/bar-assistant/.env.dist /opt/bar-assistant/.env
|
||||||
mkdir -p /opt/bar-assistant/resources/data
|
mkdir -p /opt/bar-assistant/resources/data
|
||||||
curl -fsSL https://github.com/bar-assistant/data/archive/refs/heads/v5.tar.gz | tar -xz --strip-components=1 -C /opt/bar-assistant/resources/data
|
curl -fsSL https://github.com/bar-assistant/data/archive/refs/heads/v5.tar.gz | tar -xz --strip-components=1 -C /opt/bar-assistant/resources/data
|
||||||
MeiliSearch_API_KEY=$(curl -s -X GET 'http://127.0.0.1:7700/keys' -H "Authorization: Bearer $MASTER_KEY" | grep -o '"key":"[^"]*"' | head -n 1 | sed 's/"key":"//;s/"//')
|
|
||||||
MeiliSearch_API_KEY_UID=$(curl -s -X GET 'http://127.0.0.1:7700/keys' -H "Authorization: Bearer $MASTER_KEY" | grep -o '"uid":"[^"]*"' | head -n 1 | sed 's/"uid":"//;s/"//')
|
|
||||||
sed -i -e "s|^APP_URL=|APP_URL=http://${LOCAL_IP}/bar/|" \
|
sed -i -e "s|^APP_URL=|APP_URL=http://${LOCAL_IP}/bar/|" \
|
||||||
-e "s|^MEILISEARCH_HOST=|MEILISEARCH_HOST=http://127.0.0.1:7700|" \
|
-e "s|^MEILISEARCH_HOST=|MEILISEARCH_HOST=http://127.0.0.1:7700|" \
|
||||||
-e "s|^MEILISEARCH_KEY=|MEILISEARCH_KEY=${MASTER_KEY}|" \
|
-e "s|^MEILISEARCH_KEY=|MEILISEARCH_KEY=${MEILISEARCH_MASTER_KEY}|" \
|
||||||
-e "s|^MEILISEARCH_API_KEY=|MEILISEARCH_API_KEY=${MeiliSearch_API_KEY}|" \
|
-e "s|^MEILISEARCH_API_KEY=|MEILISEARCH_API_KEY=${MEILISEARCH_API_KEY}|" \
|
||||||
-e "s|^MEILISEARCH_API_KEY_UID=|MEILISEARCH_API_KEY_UID=${MeiliSearch_API_KEY_UID}|" \
|
-e "s|^MEILISEARCH_API_KEY_UID=|MEILISEARCH_API_KEY_UID=${MEILISEARCH_API_KEY_UID}|" \
|
||||||
/opt/bar-assistant/.env
|
/opt/bar-assistant/.env
|
||||||
$STD composer install --no-interaction
|
$STD composer install --no-interaction
|
||||||
$STD php artisan key:generate
|
$STD php artisan key:generate
|
||||||
|
|||||||
@@ -14,17 +14,52 @@ network_check
|
|||||||
update_os
|
update_os
|
||||||
|
|
||||||
msg_info "Installing Dependencies"
|
msg_info "Installing Dependencies"
|
||||||
$STD apt -y install \
|
$STD apt -y install --no-install-recommends \
|
||||||
xauth \
|
ffmpeg \
|
||||||
xvfb \
|
libatk1.0-0 \
|
||||||
scrot \
|
libcairo-gobject2 \
|
||||||
chromium \
|
libcairo2 \
|
||||||
chromium-driver \
|
libdbus-glib-1-2 \
|
||||||
ca-certificates
|
libfontconfig1 \
|
||||||
|
libfreetype6 \
|
||||||
|
libgdk-pixbuf-xlib-2.0-0 \
|
||||||
|
libglib2.0-0 \
|
||||||
|
libgtk-3-0 \
|
||||||
|
libpango-1.0-0 \
|
||||||
|
libpangocairo-1.0-0 \
|
||||||
|
libpangoft2-1.0-0 \
|
||||||
|
libx11-6 \
|
||||||
|
libx11-xcb1 \
|
||||||
|
libxcb-shm0 \
|
||||||
|
libxcb1 \
|
||||||
|
libxcomposite1 \
|
||||||
|
libxcursor1 \
|
||||||
|
libxdamage1 \
|
||||||
|
libxext6 \
|
||||||
|
libxfixes3 \
|
||||||
|
libxi6 \
|
||||||
|
libxrender1 \
|
||||||
|
libxt6 \
|
||||||
|
libxtst6 \
|
||||||
|
xvfb \
|
||||||
|
fonts-noto-color-emoji \
|
||||||
|
fonts-unifont \
|
||||||
|
xfonts-cyrillic \
|
||||||
|
xfonts-scalable \
|
||||||
|
fonts-liberation \
|
||||||
|
fonts-ipafont-gothic \
|
||||||
|
fonts-wqy-zenhei \
|
||||||
|
fonts-tlwg-loma-otf
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
fetch_and_deploy_gh_release "Byparr" "ThePhaseless/Byparr" "tarball" "latest"
|
|
||||||
setup_uv
|
setup_uv
|
||||||
|
fetch_and_deploy_gh_release "Byparr" "ThePhaseless/Byparr" "tarball" "latest"
|
||||||
|
|
||||||
|
msg_info "Configuring Byparr"
|
||||||
|
cd /opt/Byparr
|
||||||
|
$STD uv sync --link-mode copy
|
||||||
|
$STD uv run camoufox fetch
|
||||||
|
msg_ok "Configured Byparr"
|
||||||
|
|
||||||
msg_info "Creating Service"
|
msg_info "Creating Service"
|
||||||
cat <<EOF >/etc/systemd/system/byparr.service
|
cat <<EOF >/etc/systemd/system/byparr.service
|
||||||
|
|||||||
@@ -23,41 +23,6 @@ setup_deb822_repo \
|
|||||||
$STD apt install -y cloudflared
|
$STD apt install -y cloudflared
|
||||||
msg_ok "Installed Cloudflared"
|
msg_ok "Installed Cloudflared"
|
||||||
|
|
||||||
read -r -p "${TAB3}Would you like to configure cloudflared as a DNS-over-HTTPS (DoH) proxy? <y/N> " prompt
|
|
||||||
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
|
|
||||||
msg_info "Creating Service"
|
|
||||||
cat <<EOF >/usr/local/etc/cloudflared/config.yml
|
|
||||||
proxy-dns: true
|
|
||||||
proxy-dns-address: 0.0.0.0
|
|
||||||
proxy-dns-port: 53
|
|
||||||
proxy-dns-max-upstream-conns: 5
|
|
||||||
proxy-dns-upstream:
|
|
||||||
- https://1.1.1.1/dns-query
|
|
||||||
- https://1.0.0.1/dns-query
|
|
||||||
#- https://8.8.8.8/dns-query
|
|
||||||
#- https://8.8.4.4/dns-query
|
|
||||||
#- https://9.9.9.9/dns-query
|
|
||||||
#- https://149.112.112.112/dns-query
|
|
||||||
EOF
|
|
||||||
cat <<EOF >/etc/systemd/system/cloudflared.service
|
|
||||||
[Unit]
|
|
||||||
Description=cloudflared DNS-over-HTTPS (DoH) proxy
|
|
||||||
After=syslog.target network-online.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
ExecStart=/usr/local/bin/cloudflared --config /usr/local/etc/cloudflared/config.yml
|
|
||||||
Restart=on-failure
|
|
||||||
RestartSec=10
|
|
||||||
KillMode=process
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
EOF
|
|
||||||
systemctl enable -q --now cloudflared
|
|
||||||
msg_ok "Created Service"
|
|
||||||
fi
|
|
||||||
|
|
||||||
motd_ssh
|
motd_ssh
|
||||||
customize
|
customize
|
||||||
cleanup_lxc
|
cleanup_lxc
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ export CYPRESS_INSTALL_BINARY=0
|
|||||||
export NODE_OPTIONS="--max-old-space-size=2048"
|
export NODE_OPTIONS="--max-old-space-size=2048"
|
||||||
cd /opt/grist
|
cd /opt/grist
|
||||||
$STD yarn install
|
$STD yarn install
|
||||||
|
$STD yarn run install:ee
|
||||||
$STD yarn run build:prod
|
$STD yarn run build:prod
|
||||||
$STD yarn run install:python
|
$STD yarn run install:python
|
||||||
cat <<EOF >/opt/grist/.env
|
cat <<EOF >/opt/grist/.env
|
||||||
|
|||||||
@@ -13,6 +13,42 @@ setting_up_container
|
|||||||
network_check
|
network_check
|
||||||
update_os
|
update_os
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo ""
|
||||||
|
echo -e "🤖 ${BL}Immich Machine Learning Options${CL}"
|
||||||
|
echo "─────────────────────────────────────────"
|
||||||
|
echo "Please choose your machine-learning type:"
|
||||||
|
echo ""
|
||||||
|
echo " 1) CPU only (default)"
|
||||||
|
echo " 2) Intel OpenVINO (requires GPU passthrough)"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
read -r -p "${TAB3}Select machine-learning type [1]: " ML_TYPE
|
||||||
|
ML_TYPE="${ML_TYPE:-1}"
|
||||||
|
if [[ "$ML_TYPE" == "2" ]]; then
|
||||||
|
msg_info "Installing OpenVINO dependencies"
|
||||||
|
touch ~/.openvino
|
||||||
|
$STD apt install -y --no-install-recommends patchelf
|
||||||
|
tmp_dir=$(mktemp -d)
|
||||||
|
$STD pushd "$tmp_dir"
|
||||||
|
curl -fsSLO https://raw.githubusercontent.com/immich-app/base-images/refs/heads/main/server/Dockerfile
|
||||||
|
readarray -t INTEL_URLS < <(
|
||||||
|
sed -n "/intel-[igc|opencl]/p" ./Dockerfile | awk '{print $2}'
|
||||||
|
sed -n "/libigdgmm12/p" ./Dockerfile | awk '{print $3}'
|
||||||
|
)
|
||||||
|
for url in "${INTEL_URLS[@]}"; do
|
||||||
|
curl -fsSLO "$url"
|
||||||
|
done
|
||||||
|
$STD apt install -y ./libigdgmm12*.deb
|
||||||
|
rm ./libigdgmm12*.deb
|
||||||
|
$STD apt install -y ./*.deb
|
||||||
|
$STD apt-mark hold libigdgmm12
|
||||||
|
$STD popd
|
||||||
|
rm -rf "$tmp_dir"
|
||||||
|
dpkg-query -W -f='${Version}\n' intel-opencl-icd >~/.intel_version
|
||||||
|
msg_ok "Installed OpenVINO dependencies"
|
||||||
|
fi
|
||||||
|
|
||||||
setup_uv
|
setup_uv
|
||||||
|
|
||||||
msg_info "Installing dependencies"
|
msg_info "Installing dependencies"
|
||||||
@@ -86,36 +122,11 @@ msg_ok "Dependencies Installed"
|
|||||||
|
|
||||||
msg_info "Installing Mise"
|
msg_info "Installing Mise"
|
||||||
curl -fSs https://mise.jdx.dev/gpg-key.pub | tee /etc/apt/keyrings/mise-archive-keyring.pub 1>/dev/null
|
curl -fSs https://mise.jdx.dev/gpg-key.pub | tee /etc/apt/keyrings/mise-archive-keyring.pub 1>/dev/null
|
||||||
echo "deb [signed-by=/etc/apt/keyrings/mise-archive-keyring.pub arch=amd64] https://mise.jdx.dev/deb stable main" | tee /etc/apt/sources.list.d/mise.list
|
echo "deb [signed-by=/etc/apt/keyrings/mise-archive-keyring.pub arch=amd64] https://mise.jdx.dev/deb stable main" >/etc/apt/sources.list.d/mise.list
|
||||||
$STD apt update
|
$STD apt update
|
||||||
$STD apt install -y mise
|
$STD apt install -y mise
|
||||||
msg_ok "Installed Mise"
|
msg_ok "Installed Mise"
|
||||||
|
|
||||||
read -r -p "${TAB3}Install OpenVINO dependencies for Intel HW-accelerated machine-learning? y/N " prompt
|
|
||||||
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
|
|
||||||
msg_info "Installing OpenVINO dependencies"
|
|
||||||
touch ~/.openvino
|
|
||||||
$STD apt install -y --no-install-recommends patchelf
|
|
||||||
tmp_dir=$(mktemp -d)
|
|
||||||
$STD pushd "$tmp_dir"
|
|
||||||
curl -fsSLO https://raw.githubusercontent.com/immich-app/base-images/refs/heads/main/server/Dockerfile
|
|
||||||
readarray -t INTEL_URLS < <(
|
|
||||||
sed -n "/intel-[igc|opencl]/p" ./Dockerfile | awk '{print $2}'
|
|
||||||
sed -n "/libigdgmm12/p" ./Dockerfile | awk '{print $3}'
|
|
||||||
)
|
|
||||||
for url in "${INTEL_URLS[@]}"; do
|
|
||||||
curl -fsSLO "$url"
|
|
||||||
done
|
|
||||||
$STD apt install -y ./libigdgmm12*.deb
|
|
||||||
rm ./libigdgmm12*.deb
|
|
||||||
$STD apt install -y ./*.deb
|
|
||||||
$STD apt-mark hold libigdgmm12
|
|
||||||
$STD popd
|
|
||||||
rm -rf "$tmp_dir"
|
|
||||||
dpkg-query -W -f='${Version}\n' intel-opencl-icd >~/.intel_version
|
|
||||||
msg_ok "Installed OpenVINO dependencies"
|
|
||||||
fi
|
|
||||||
|
|
||||||
msg_info "Configuring Debian Testing Repo"
|
msg_info "Configuring Debian Testing Repo"
|
||||||
sed -i 's/ trixie-updates/ trixie-updates testing/g' /etc/apt/sources.list.d/debian.sources
|
sed -i 's/ trixie-updates/ trixie-updates testing/g' /etc/apt/sources.list.d/debian.sources
|
||||||
cat <<EOF >/etc/apt/preferences.d/preferences
|
cat <<EOF >/etc/apt/preferences.d/preferences
|
||||||
@@ -137,28 +148,17 @@ PNPM_VERSION="$(curl -fsSL "https://raw.githubusercontent.com/immich-app/immich/
|
|||||||
NODE_VERSION="24" NODE_MODULE="pnpm@${PNPM_VERSION}" setup_nodejs
|
NODE_VERSION="24" NODE_MODULE="pnpm@${PNPM_VERSION}" setup_nodejs
|
||||||
PG_VERSION="16" PG_MODULES="pgvector" setup_postgresql
|
PG_VERSION="16" PG_MODULES="pgvector" setup_postgresql
|
||||||
|
|
||||||
msg_info "Setting up Postgresql Database"
|
|
||||||
VCHORD_RELEASE="0.5.3"
|
VCHORD_RELEASE="0.5.3"
|
||||||
|
msg_info "Installing Vectorchord v${VCHORD_RELEASE}"
|
||||||
curl -fsSL "https://github.com/tensorchord/VectorChord/releases/download/${VCHORD_RELEASE}/postgresql-16-vchord_${VCHORD_RELEASE}-1_amd64.deb" -o vchord.deb
|
curl -fsSL "https://github.com/tensorchord/VectorChord/releases/download/${VCHORD_RELEASE}/postgresql-16-vchord_${VCHORD_RELEASE}-1_amd64.deb" -o vchord.deb
|
||||||
$STD apt install -y ./vchord.deb
|
$STD apt install -y ./vchord.deb
|
||||||
rm vchord.deb
|
rm vchord.deb
|
||||||
echo "$VCHORD_RELEASE" >~/.vchord_version
|
echo "$VCHORD_RELEASE" >~/.vchord_version
|
||||||
DB_NAME="immich"
|
msg_ok "Installed Vectorchord v${VCHORD_RELEASE}"
|
||||||
DB_USER="immich"
|
|
||||||
DB_PASS=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | head -c18)
|
|
||||||
sed -i -e "/^#shared_preload/s/^#//;/^shared_preload/s/''/'vchord.so'/" /etc/postgresql/16/main/postgresql.conf
|
sed -i -e "/^#shared_preload/s/^#//;/^shared_preload/s/''/'vchord.so'/" /etc/postgresql/16/main/postgresql.conf
|
||||||
systemctl restart postgresql.service
|
systemctl restart postgresql.service
|
||||||
$STD sudo -u postgres psql -c "CREATE USER $DB_USER WITH ENCRYPTED PASSWORD '$DB_PASS';"
|
PG_DB_NAME="immich" PG_DB_USER="immich" PG_DB_GRANT_SUPERUSER="true" PG_DB_SKIP_ALTER_ROLE="true" setup_postgresql_db
|
||||||
$STD sudo -u postgres psql -c "CREATE DATABASE $DB_NAME WITH OWNER $DB_USER ENCODING 'UTF8' TEMPLATE template0;"
|
|
||||||
$STD sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE $DB_NAME to $DB_USER;"
|
|
||||||
$STD sudo -u postgres psql -c "ALTER USER $DB_USER WITH SUPERUSER;"
|
|
||||||
{
|
|
||||||
echo "${APPLICATION} DB Credentials"
|
|
||||||
echo "Database User: $DB_USER"
|
|
||||||
echo "Database Password: $DB_PASS"
|
|
||||||
echo "Database Name: $DB_NAME"
|
|
||||||
} >>~/"$APPLICATION".creds
|
|
||||||
msg_ok "Set up Postgresql Database"
|
|
||||||
|
|
||||||
msg_info "Compiling Custom Photo-processing Library (extreme patience)"
|
msg_info "Compiling Custom Photo-processing Library (extreme patience)"
|
||||||
LD_LIBRARY_PATH=/usr/local/lib
|
LD_LIBRARY_PATH=/usr/local/lib
|
||||||
@@ -296,9 +296,9 @@ GEO_DIR="${INSTALL_DIR}/geodata"
|
|||||||
mkdir -p "$INSTALL_DIR"
|
mkdir -p "$INSTALL_DIR"
|
||||||
mkdir -p {"${APP_DIR}","${UPLOAD_DIR}","${GEO_DIR}","${INSTALL_DIR}"/cache}
|
mkdir -p {"${APP_DIR}","${UPLOAD_DIR}","${GEO_DIR}","${INSTALL_DIR}"/cache}
|
||||||
|
|
||||||
fetch_and_deploy_gh_release "immich" "immich-app/immich" "tarball" "v2.4.1" "$SRC_DIR"
|
fetch_and_deploy_gh_release "immich" "immich-app/immich" "tarball" "v2.5.2" "$SRC_DIR"
|
||||||
|
|
||||||
msg_info "Installing ${APPLICATION} (patience)"
|
msg_info "Installing Immich (patience)"
|
||||||
|
|
||||||
cd "$SRC_DIR"/server
|
cd "$SRC_DIR"/server
|
||||||
export COREPACK_ENABLE_DOWNLOAD_PROMPT=0
|
export COREPACK_ENABLE_DOWNLOAD_PROMPT=0
|
||||||
@@ -347,12 +347,13 @@ mkdir -p "$ML_DIR" && chown -R immich:immich "$INSTALL_DIR"
|
|||||||
export VIRTUAL_ENV="${ML_DIR}/ml-venv"
|
export VIRTUAL_ENV="${ML_DIR}/ml-venv"
|
||||||
if [[ -f ~/.openvino ]]; then
|
if [[ -f ~/.openvino ]]; then
|
||||||
msg_info "Installing HW-accelerated machine-learning"
|
msg_info "Installing HW-accelerated machine-learning"
|
||||||
$STD sudo --preserve-env=VIRTUAL_ENV -nu immich uv sync --extra openvino --active -n -p python3.11 --managed-python
|
$STD uv add --no-sync --optional openvino onnxruntime-openvino==1.20.0 --active -n -p python3.12 --managed-python
|
||||||
patchelf --clear-execstack "${VIRTUAL_ENV}/lib/python3.11/site-packages/onnxruntime/capi/onnxruntime_pybind11_state.cpython-311-x86_64-linux-gnu.so"
|
$STD sudo --preserve-env=VIRTUAL_ENV -nu immich uv sync --extra openvino --no-dev --active --link-mode copy -n -p python3.12 --managed-python
|
||||||
|
patchelf --clear-execstack "${VIRTUAL_ENV}/lib/python3.12/site-packages/onnxruntime/capi/onnxruntime_pybind11_state.cpython-312-x86_64-linux-gnu.so"
|
||||||
msg_ok "Installed HW-accelerated machine-learning"
|
msg_ok "Installed HW-accelerated machine-learning"
|
||||||
else
|
else
|
||||||
msg_info "Installing machine-learning"
|
msg_info "Installing machine-learning"
|
||||||
$STD sudo --preserve-env=VIRTUAL_ENV -nu immich uv sync --extra cpu --active -n -p python3.11 --managed-python
|
$STD sudo --preserve-env=VIRTUAL_ENV -nu immich uv sync --extra cpu --no-dev --active --link-mode copy -n -p python3.11 --managed-python
|
||||||
msg_ok "Installed machine-learning"
|
msg_ok "Installed machine-learning"
|
||||||
fi
|
fi
|
||||||
cd "$SRC_DIR"
|
cd "$SRC_DIR"
|
||||||
@@ -384,7 +385,7 @@ msg_ok "Installed GeoNames data"
|
|||||||
|
|
||||||
mkdir -p /var/log/immich
|
mkdir -p /var/log/immich
|
||||||
touch /var/log/immich/{web.log,ml.log}
|
touch /var/log/immich/{web.log,ml.log}
|
||||||
msg_ok "Installed ${APPLICATION}"
|
msg_ok "Installed Immich"
|
||||||
|
|
||||||
msg_info "Modifying user, creating env file, scripts & services"
|
msg_info "Modifying user, creating env file, scripts & services"
|
||||||
usermod -aG video,render immich
|
usermod -aG video,render immich
|
||||||
@@ -393,11 +394,12 @@ cat <<EOF >"${INSTALL_DIR}"/.env
|
|||||||
TZ=$(cat /etc/timezone)
|
TZ=$(cat /etc/timezone)
|
||||||
IMMICH_VERSION=release
|
IMMICH_VERSION=release
|
||||||
NODE_ENV=production
|
NODE_ENV=production
|
||||||
|
IMMICH_ALLOW_SETUP=true
|
||||||
|
|
||||||
DB_HOSTNAME=127.0.0.1
|
DB_HOSTNAME=127.0.0.1
|
||||||
DB_USERNAME=${DB_USER}
|
DB_USERNAME=${PG_DB_USER}
|
||||||
DB_PASSWORD=${DB_PASS}
|
DB_PASSWORD=${PG_DB_PASS}
|
||||||
DB_DATABASE_NAME=${DB_NAME}
|
DB_DATABASE_NAME=${PG_DB_NAME}
|
||||||
DB_VECTOR_EXTENSION=vectorchord
|
DB_VECTOR_EXTENSION=vectorchord
|
||||||
|
|
||||||
REDIS_HOSTNAME=127.0.0.1
|
REDIS_HOSTNAME=127.0.0.1
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ setting_up_container
|
|||||||
network_check
|
network_check
|
||||||
update_os
|
update_os
|
||||||
|
|
||||||
|
msg_custom "ℹ️" "${GN}" "If NVIDIA GPU passthrough is detected, you'll be asked whether to install drivers in the container"
|
||||||
setup_hwaccel
|
setup_hwaccel
|
||||||
|
|
||||||
msg_info "Installing Jellyfin"
|
msg_info "Installing Jellyfin"
|
||||||
@@ -38,8 +39,19 @@ EOF
|
|||||||
|
|
||||||
$STD apt update
|
$STD apt update
|
||||||
$STD apt install -y jellyfin
|
$STD apt install -y jellyfin
|
||||||
sed -i 's/"MinimumLevel": "Information"/"MinimumLevel": "Error"/g' /etc/jellyfin/logging.json
|
# Configure log rotation to prevent disk fill (keeps fail2ban compatibility) (PR: #1690 / Issue: #11224)
|
||||||
|
cat <<EOF >/etc/logrotate.d/jellyfin
|
||||||
|
/var/log/jellyfin/*.log {
|
||||||
|
daily
|
||||||
|
rotate 3
|
||||||
|
maxsize 100M
|
||||||
|
missingok
|
||||||
|
notifempty
|
||||||
|
compress
|
||||||
|
delaycompress
|
||||||
|
copytruncate
|
||||||
|
}
|
||||||
|
EOF
|
||||||
chown -R jellyfin:adm /etc/jellyfin
|
chown -R jellyfin:adm /etc/jellyfin
|
||||||
sleep 10
|
sleep 10
|
||||||
systemctl restart jellyfin
|
systemctl restart jellyfin
|
||||||
|
|||||||
@@ -25,20 +25,7 @@ msg_ok "Installed Dependencies"
|
|||||||
|
|
||||||
fetch_and_deploy_gh_release "monolith" "Y2Z/monolith" "singlefile" "latest" "/usr/bin" "monolith-gnu-linux-x86_64"
|
fetch_and_deploy_gh_release "monolith" "Y2Z/monolith" "singlefile" "latest" "/usr/bin" "monolith-gnu-linux-x86_64"
|
||||||
fetch_and_deploy_gh_release "yt-dlp" "yt-dlp/yt-dlp-nightly-builds" "singlefile" "latest" "/usr/bin" "yt-dlp_linux"
|
fetch_and_deploy_gh_release "yt-dlp" "yt-dlp/yt-dlp-nightly-builds" "singlefile" "latest" "/usr/bin" "yt-dlp_linux"
|
||||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
setup_meilisearch
|
||||||
|
|
||||||
msg_info "Configuring Meilisearch"
|
|
||||||
curl -fsSL "https://raw.githubusercontent.com/meilisearch/meilisearch/latest/config.toml" -o "/etc/meilisearch.toml"
|
|
||||||
MASTER_KEY=$(openssl rand -base64 12)
|
|
||||||
sed -i \
|
|
||||||
-e 's|^env =.*|env = "production"|' \
|
|
||||||
-e "s|^# master_key =.*|master_key = \"$MASTER_KEY\"|" \
|
|
||||||
-e 's|^db_path =.*|db_path = "/var/lib/meilisearch/data"|' \
|
|
||||||
-e 's|^dump_dir =.*|dump_dir = "/var/lib/meilisearch/dumps"|' \
|
|
||||||
-e 's|^snapshot_dir =.*|snapshot_dir = "/var/lib/meilisearch/snapshots"|' \
|
|
||||||
-e 's|^# no_analytics = true|no_analytics = true|' \
|
|
||||||
/etc/meilisearch.toml
|
|
||||||
msg_ok "Configured Meilisearch"
|
|
||||||
|
|
||||||
fetch_and_deploy_gh_release "karakeep" "karakeep-app/karakeep" "tarball"
|
fetch_and_deploy_gh_release "karakeep" "karakeep-app/karakeep" "tarball"
|
||||||
cd /opt/karakeep
|
cd /opt/karakeep
|
||||||
@@ -70,7 +57,7 @@ NEXTAUTH_SECRET="$karakeep_SECRET"
|
|||||||
NEXTAUTH_URL="http://localhost:3000"
|
NEXTAUTH_URL="http://localhost:3000"
|
||||||
DATA_DIR=${DATA_DIR}
|
DATA_DIR=${DATA_DIR}
|
||||||
MEILI_ADDR="http://127.0.0.1:7700"
|
MEILI_ADDR="http://127.0.0.1:7700"
|
||||||
MEILI_MASTER_KEY="$MASTER_KEY"
|
MEILI_MASTER_KEY="$MEILISEARCH_MASTER_KEY"
|
||||||
BROWSER_WEB_URL="http://127.0.0.1:9222"
|
BROWSER_WEB_URL="http://127.0.0.1:9222"
|
||||||
DB_WAL_MODE=true
|
DB_WAL_MODE=true
|
||||||
|
|
||||||
@@ -109,19 +96,6 @@ $STD pnpm migrate
|
|||||||
msg_ok "Database Migration Completed"
|
msg_ok "Database Migration Completed"
|
||||||
|
|
||||||
msg_info "Creating Services"
|
msg_info "Creating Services"
|
||||||
cat <<EOF >/etc/systemd/system/meilisearch.service
|
|
||||||
[Unit]
|
|
||||||
Description=Meilisearch
|
|
||||||
After=network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
ExecStart=/usr/bin/meilisearch --config-file-path /etc/meilisearch.toml
|
|
||||||
Restart=always
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
EOF
|
|
||||||
|
|
||||||
cat <<EOF >/etc/systemd/system/karakeep-web.service
|
cat <<EOF >/etc/systemd/system/karakeep-web.service
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=karakeep Web
|
Description=karakeep Web
|
||||||
@@ -169,7 +143,7 @@ TimeoutStopSec=5
|
|||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
systemctl enable -q --now meilisearch karakeep-browser karakeep-workers karakeep-web
|
systemctl enable -q --now karakeep-browser karakeep-workers karakeep-web
|
||||||
msg_ok "Created Services"
|
msg_ok "Created Services"
|
||||||
|
|
||||||
motd_ssh
|
motd_ssh
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ update_os
|
|||||||
|
|
||||||
NODE_VERSION="22" NODE_MODULE="yarn@latest" setup_nodejs
|
NODE_VERSION="22" NODE_MODULE="yarn@latest" setup_nodejs
|
||||||
PG_VERSION="16" setup_postgresql
|
PG_VERSION="16" setup_postgresql
|
||||||
PHP_VERSION="8.4" PHP_APACHE="YES" PHP_MODULE="apcu,ctype,dom,fileinfo,iconv,pgsql" setup_php
|
PHP_VERSION="8.5" PHP_APACHE="YES" PHP_MODULE="apcu,ctype,dom,fileinfo,iconv,pgsql" setup_php
|
||||||
setup_composer
|
setup_composer
|
||||||
|
|
||||||
msg_info "Setting up PostgreSQL"
|
msg_info "Setting up PostgreSQL"
|
||||||
|
|||||||
@@ -13,21 +13,7 @@ setting_up_container
|
|||||||
network_check
|
network_check
|
||||||
update_os
|
update_os
|
||||||
|
|
||||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
MEILISEARCH_BIND="0.0.0.0:7700" setup_meilisearch
|
||||||
|
|
||||||
msg_info "Configuring ${APPLICATION}"
|
|
||||||
curl -fsSL https://raw.githubusercontent.com/meilisearch/meilisearch/latest/config.toml -o /etc/meilisearch.toml
|
|
||||||
MASTER_KEY=$(openssl rand -base64 12)
|
|
||||||
sed -i \
|
|
||||||
-e 's|^env =.*|env = "production"|' \
|
|
||||||
-e "s|^# master_key =.*|master_key = \"$MASTER_KEY\"|" \
|
|
||||||
-e 's|^db_path =.*|db_path = "/var/lib/meilisearch/data"|' \
|
|
||||||
-e 's|^dump_dir =.*|dump_dir = "/var/lib/meilisearch/dumps"|' \
|
|
||||||
-e 's|^snapshot_dir =.*|snapshot_dir = "/var/lib/meilisearch/snapshots"|' \
|
|
||||||
-e 's|^# no_analytics = true|no_analytics = true|' \
|
|
||||||
-e 's|^http_addr =.*|http_addr = "0.0.0.0:7700"|' \
|
|
||||||
/etc/meilisearch.toml
|
|
||||||
msg_ok "Configured ${APPLICATION}"
|
|
||||||
|
|
||||||
read -r -p "${TAB3}Do you want add meilisearch-ui? [y/n]: " prompt
|
read -r -p "${TAB3}Do you want add meilisearch-ui? [y/n]: " prompt
|
||||||
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
|
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
|
||||||
@@ -41,27 +27,11 @@ if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
|
|||||||
cat <<EOF >/opt/meilisearch-ui/.env.local
|
cat <<EOF >/opt/meilisearch-ui/.env.local
|
||||||
VITE_SINGLETON_MODE=true
|
VITE_SINGLETON_MODE=true
|
||||||
VITE_SINGLETON_HOST=http://${LOCAL_IP}:7700
|
VITE_SINGLETON_HOST=http://${LOCAL_IP}:7700
|
||||||
VITE_SINGLETON_API_KEY=${MASTER_KEY}
|
VITE_SINGLETON_API_KEY=${MEILISEARCH_MASTER_KEY}
|
||||||
EOF
|
EOF
|
||||||
msg_ok "Configured ${APPLICATION}-ui"
|
msg_ok "Configured ${APPLICATION}-ui"
|
||||||
fi
|
|
||||||
|
|
||||||
msg_info "Creating service"
|
msg_info "Creating Meilisearch-UI service"
|
||||||
cat <<EOF >/etc/systemd/system/meilisearch.service
|
|
||||||
[Unit]
|
|
||||||
Description=Meilisearch
|
|
||||||
After=network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
ExecStart=/usr/bin/meilisearch --config-file-path /etc/meilisearch.toml
|
|
||||||
Restart=always
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
EOF
|
|
||||||
systemctl enable -q --now meilisearch
|
|
||||||
|
|
||||||
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
|
|
||||||
cat <<EOF >/etc/systemd/system/meilisearch-ui.service
|
cat <<EOF >/etc/systemd/system/meilisearch-ui.service
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=Meilisearch UI Service
|
Description=Meilisearch UI Service
|
||||||
@@ -82,8 +52,8 @@ SyslogIdentifier=meilisearch-ui
|
|||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
EOF
|
EOF
|
||||||
systemctl enable -q --now meilisearch-ui
|
systemctl enable -q --now meilisearch-ui
|
||||||
|
msg_ok "Created Meilisearch-UI service"
|
||||||
fi
|
fi
|
||||||
msg_ok "Service created"
|
|
||||||
|
|
||||||
motd_ssh
|
motd_ssh
|
||||||
customize
|
customize
|
||||||
|
|||||||
50
install/nodecast-tv-install.sh
Normal file
50
install/nodecast-tv-install.sh
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: luismco
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://github.com/technomancer702/nodecast-tv
|
||||||
|
|
||||||
|
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||||
|
color
|
||||||
|
verb_ip6
|
||||||
|
catch_errors
|
||||||
|
setting_up_container
|
||||||
|
network_check
|
||||||
|
update_os
|
||||||
|
|
||||||
|
fetch_and_deploy_gh_release "nodecast-tv" "technomancer702/nodecast-tv"
|
||||||
|
setup_nodejs
|
||||||
|
|
||||||
|
msg_info "Installing Dependencies"
|
||||||
|
$STD apt install -y ffmpeg
|
||||||
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
|
msg_info "Installing Modules"
|
||||||
|
cd /opt/nodecast-tv
|
||||||
|
$STD npm install
|
||||||
|
msg_ok "Installed Modules"
|
||||||
|
|
||||||
|
msg_info "Creating Service"
|
||||||
|
cat <<EOF >/etc/systemd/system/nodecast-tv.service
|
||||||
|
[Unit]
|
||||||
|
Description=nodecast-tv
|
||||||
|
After=network.target
|
||||||
|
Wants=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
WorkingDirectory=/opt/nodecast-tv
|
||||||
|
ExecStart=/bin/npm run dev
|
||||||
|
Restart=on-failure
|
||||||
|
RestartSec=10
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF
|
||||||
|
systemctl enable -q --now nodecast-tv
|
||||||
|
msg_ok "Created Service"
|
||||||
|
|
||||||
|
motd_ssh
|
||||||
|
customize
|
||||||
|
cleanup_lxc
|
||||||
@@ -59,8 +59,8 @@ read -r -p "${TAB3}Enter your ACME Email: " ACME_EMAIL_INPUT
|
|||||||
|
|
||||||
yq -i "
|
yq -i "
|
||||||
.services.npmplus.environment |=
|
.services.npmplus.environment |=
|
||||||
(map(select(. != \"TZ=*\" and . != \"ACME_EMAIL=*\")) +
|
(map(select(. != \"TZ=*\" and . != \"ACME_EMAIL=*\" and . != \"INITIAL_ADMIN_EMAIL=*\" and . != \"INITIAL_ADMIN_PASSWORD=*\")) +
|
||||||
[\"TZ=$TZ_INPUT\", \"ACME_EMAIL=$ACME_EMAIL_INPUT\"])
|
[\"TZ=$TZ_INPUT\", \"ACME_EMAIL=$ACME_EMAIL_INPUT\", \"INITIAL_ADMIN_EMAIL=admin@local.com\", \"INITIAL_ADMIN_PASSWORD=helper-scripts.com\"])
|
||||||
" /opt/compose.yaml
|
" /opt/compose.yaml
|
||||||
|
|
||||||
msg_info "Building and Starting NPMplus (Patience)"
|
msg_info "Building and Starting NPMplus (Patience)"
|
||||||
@@ -86,39 +86,3 @@ msg_ok "Builded and started NPMplus"
|
|||||||
|
|
||||||
motd_ssh
|
motd_ssh
|
||||||
customize
|
customize
|
||||||
|
|
||||||
msg_info "Retrieving Default Login (Patience)"
|
|
||||||
PASSWORD_FOUND=0
|
|
||||||
|
|
||||||
for i in {1..60}; do
|
|
||||||
PASSWORD_LINE=$(
|
|
||||||
{ awk '/Creating a new user:/{print; exit}' < <(docker logs "$CONTAINER_ID" 2>&1); } || true
|
|
||||||
)
|
|
||||||
|
|
||||||
if [[ -n "${PASSWORD_LINE:-}" ]]; then
|
|
||||||
PASSWORD="${PASSWORD_LINE#*password: }"
|
|
||||||
printf 'username: admin@example.org\npassword: %s\n' "$PASSWORD" >/opt/.npm_pwd
|
|
||||||
msg_ok "Saved default login to /opt/.npm_pwd"
|
|
||||||
PASSWORD_FOUND=1
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
sleep 2
|
|
||||||
done
|
|
||||||
if [[ $PASSWORD_FOUND -eq 0 ]]; then
|
|
||||||
PASSWORD_LINE=$(
|
|
||||||
timeout 30s bash -c '
|
|
||||||
docker logs -f --since=0s --tail=0 "$1" 2>&1 | awk "/Creating a new user:/{print; exit}"
|
|
||||||
' _ "$CONTAINER_ID" || true
|
|
||||||
)
|
|
||||||
if [[ -n "${PASSWORD_LINE:-}" ]]; then
|
|
||||||
PASSWORD="${PASSWORD_LINE#*password: }"
|
|
||||||
printf 'username: admin@example.org\npassword: %s\n' "$PASSWORD" >/opt/.npm_pwd
|
|
||||||
msg_ok "Saved default login to /opt/.npm_pwd (live)"
|
|
||||||
PASSWORD_FOUND=1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ $PASSWORD_FOUND -eq 0 ]]; then
|
|
||||||
msg_error "Could not retrieve default login after 120s."
|
|
||||||
echo -e "\nYou can manually check the container logs with:\n docker logs $CONTAINER_ID | grep 'Creating a new user:'\n"
|
|
||||||
fi
|
|
||||||
|
|||||||
@@ -21,40 +21,11 @@ NODE_VERSION="22" NODE_MODULE="pnpm" setup_nodejs
|
|||||||
PG_VERSION="17" setup_postgresql
|
PG_VERSION="17" setup_postgresql
|
||||||
PG_DB_NAME="openarchiver_db" PG_DB_USER="openarchiver" setup_postgresql_db
|
PG_DB_NAME="openarchiver_db" PG_DB_USER="openarchiver" setup_postgresql_db
|
||||||
|
|
||||||
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
setup_meilisearch
|
||||||
fetch_and_deploy_gh_release "openarchiver" "LogicLabs-OU/OpenArchiver" "tarball"
|
fetch_and_deploy_gh_release "openarchiver" "LogicLabs-OU/OpenArchiver" "tarball"
|
||||||
JWT_KEY="$(openssl rand -hex 32)"
|
JWT_KEY="$(openssl rand -hex 32)"
|
||||||
SECRET_KEY="$(openssl rand -hex 32)"
|
SECRET_KEY="$(openssl rand -hex 32)"
|
||||||
|
|
||||||
msg_info "Configuring MeiliSearch"
|
|
||||||
curl -fsSL https://raw.githubusercontent.com/meilisearch/meilisearch/latest/config.toml -o /etc/meilisearch.toml
|
|
||||||
MASTER_KEY=$(openssl rand -base64 12)
|
|
||||||
sed -i \
|
|
||||||
-e 's|^env =.*|env = "production"|' \
|
|
||||||
-e "s|^# master_key =.*|master_key = \"$MASTER_KEY\"|" \
|
|
||||||
-e 's|^db_path =.*|db_path = "/var/lib/meilisearch/data"|' \
|
|
||||||
-e 's|^dump_dir =.*|dump_dir = "/var/lib/meilisearch/dumps"|' \
|
|
||||||
-e 's|^snapshot_dir =.*|snapshot_dir = "/var/lib/meilisearch/snapshots"|' \
|
|
||||||
-e 's|^# no_analytics = true|no_analytics = true|' \
|
|
||||||
-e 's|^http_addr =.*|http_addr = "127.0.0.1:7700"|' \
|
|
||||||
/etc/meilisearch.toml
|
|
||||||
|
|
||||||
cat <<EOF >/etc/systemd/system/meilisearch.service
|
|
||||||
[Unit]
|
|
||||||
Description=Meilisearch
|
|
||||||
After=network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
ExecStart=/usr/bin/meilisearch --config-file-path /etc/meilisearch.toml
|
|
||||||
Restart=always
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
EOF
|
|
||||||
systemctl enable -q --now meilisearch
|
|
||||||
sleep 5
|
|
||||||
msg_ok "Configured MeiliSearch"
|
|
||||||
|
|
||||||
msg_info "Setting up Open Archiver"
|
msg_info "Setting up Open Archiver"
|
||||||
mkdir -p /opt/openarchiver-data
|
mkdir -p /opt/openarchiver-data
|
||||||
cd /opt/openarchiver
|
cd /opt/openarchiver
|
||||||
@@ -65,7 +36,7 @@ sed -i "s|^POSTGRES_USER=.*|POSTGRES_USER=$PG_DB_USER|g" /opt/openarchiver/.env
|
|||||||
sed -i "s|^POSTGRES_PASSWORD=.*|POSTGRES_PASSWORD=$PG_DB_PASS|g" /opt/openarchiver/.env
|
sed -i "s|^POSTGRES_PASSWORD=.*|POSTGRES_PASSWORD=$PG_DB_PASS|g" /opt/openarchiver/.env
|
||||||
sed -i "s|^DATABASE_URL=.*|DATABASE_URL=\"postgresql://$PG_DB_USER:$PG_DB_PASS@localhost:5432/$PG_DB_NAME\"|g" /opt/openarchiver/.env
|
sed -i "s|^DATABASE_URL=.*|DATABASE_URL=\"postgresql://$PG_DB_USER:$PG_DB_PASS@localhost:5432/$PG_DB_NAME\"|g" /opt/openarchiver/.env
|
||||||
sed -i "s|^MEILI_HOST=.*|MEILI_HOST=http://localhost:7700|g" /opt/openarchiver/.env
|
sed -i "s|^MEILI_HOST=.*|MEILI_HOST=http://localhost:7700|g" /opt/openarchiver/.env
|
||||||
sed -i "s|^MEILI_MASTER_KEY=.*|MEILI_MASTER_KEY=$MASTER_KEY|g" /opt/openarchiver/.env
|
sed -i "s|^MEILI_MASTER_KEY=.*|MEILI_MASTER_KEY=$MEILISEARCH_MASTER_KEY|g" /opt/openarchiver/.env
|
||||||
sed -i "s|^REDIS_HOST=.*|REDIS_HOST=localhost|g" /opt/openarchiver/.env
|
sed -i "s|^REDIS_HOST=.*|REDIS_HOST=localhost|g" /opt/openarchiver/.env
|
||||||
sed -i "s|^REDIS_PASSWORD=.*|REDIS_PASSWORD=|g" /opt/openarchiver/.env
|
sed -i "s|^REDIS_PASSWORD=.*|REDIS_PASSWORD=|g" /opt/openarchiver/.env
|
||||||
sed -i "s|^STORAGE_LOCAL_ROOT_PATH=.*|STORAGE_LOCAL_ROOT_PATH=/opt/openarchiver-data|g" /opt/openarchiver/.env
|
sed -i "s|^STORAGE_LOCAL_ROOT_PATH=.*|STORAGE_LOCAL_ROOT_PATH=/opt/openarchiver-data|g" /opt/openarchiver/.env
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ $STD apt install -y \
|
|||||||
openssl
|
openssl
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
PHP_VERSION="8.2" PHP_MODULE="common,fpm" setup_php
|
PHP_VERSION="8.2" PHP_FPM="YES" setup_php
|
||||||
create_self_signed_cert
|
create_self_signed_cert
|
||||||
fetch_and_deploy_gh_release "privatebin" "PrivateBin/PrivateBin" "tarball"
|
fetch_and_deploy_gh_release "privatebin" "PrivateBin/PrivateBin" "tarball"
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
# Copyright (c) 2021-2026 community-scripts ORG
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
# Author: MickLesk (Canbiz)
|
# Author: MickLesk (Canbiz) | Co-Author: CrazyWolf13
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://vikunja.io/
|
# Source: https://vikunja.io/
|
||||||
|
|
||||||
@@ -13,22 +13,14 @@ setting_up_container
|
|||||||
network_check
|
network_check
|
||||||
update_os
|
update_os
|
||||||
|
|
||||||
msg_info "Installing Dependencies"
|
fetch_and_deploy_gh_release "vikunja" "go-vikunja/vikunja" "binary"
|
||||||
$STD apt install -y make
|
|
||||||
msg_ok "Installed Dependencies"
|
|
||||||
|
|
||||||
msg_info "Setup Vikunja (Patience)"
|
msg_info "Setting up Vikunja"
|
||||||
cd /opt
|
sed -i 's|^# \(service:\)|\1|' /etc/vikunja/config.yml
|
||||||
RELEASE=$(curl -fsSL https://dl.vikunja.io/vikunja/ | grep -oP 'href="/vikunja/\K[0-9]+\.[0-9]+\.[0-9]+' | sort -V | tail -n 1)
|
sed -i "s|^ # \(publicurl: \).*| \1\"http://$LOCAL_IP\"|" /etc/vikunja/config.yml
|
||||||
curl -fsSL "https://dl.vikunja.io/vikunja/$RELEASE/vikunja-$RELEASE-amd64.deb" -o vikunja-"$RELEASE"-amd64.deb
|
sed -i "0,/^ # \(timezone: \).*/s|| \1${tz}|" /etc/vikunja/config.yml
|
||||||
$STD dpkg -i vikunja-"$RELEASE"-amd64.deb
|
systemctl enable -q --now vikunja
|
||||||
sed -i 's|^ timezone: .*| timezone: UTC|' /etc/vikunja/config.yml
|
msg_ok "Set up Vikunja"
|
||||||
sed -i 's|"./vikunja.db"|"/etc/vikunja/vikunja.db"|' /etc/vikunja/config.yml
|
|
||||||
sed -i 's|./files|/etc/vikunja/files|' /etc/vikunja/config.yml
|
|
||||||
systemctl start vikunja.service
|
|
||||||
rm -rf /opt/vikunja-"$RELEASE"-amd64.deb
|
|
||||||
echo "${RELEASE}" >/opt/"${APPLICATION}"_version.txt
|
|
||||||
msg_ok "Installed Vikunja"
|
|
||||||
|
|
||||||
motd_ssh
|
motd_ssh
|
||||||
customize
|
customize
|
||||||
|
|||||||
@@ -195,9 +195,11 @@ get_current_ip() {
|
|||||||
#
|
#
|
||||||
# - Updates /etc/motd with current container IP
|
# - Updates /etc/motd with current container IP
|
||||||
# - Removes old IP entries to avoid duplicates
|
# - Removes old IP entries to avoid duplicates
|
||||||
|
# - Regenerates /etc/profile.d/00_lxc-details.sh with dynamic OS/IP info
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
update_motd_ip() {
|
update_motd_ip() {
|
||||||
MOTD_FILE="/etc/motd"
|
MOTD_FILE="/etc/motd"
|
||||||
|
PROFILE_FILE="/etc/profile.d/00_lxc-details.sh"
|
||||||
|
|
||||||
if [ -f "$MOTD_FILE" ]; then
|
if [ -f "$MOTD_FILE" ]; then
|
||||||
# Remove existing IP Address lines to prevent duplication
|
# Remove existing IP Address lines to prevent duplication
|
||||||
@@ -207,6 +209,26 @@ update_motd_ip() {
|
|||||||
# Add the new IP address
|
# Add the new IP address
|
||||||
echo -e "${TAB}${NETWORK}${YW} IP Address: ${GN}${IP}${CL}" >>"$MOTD_FILE"
|
echo -e "${TAB}${NETWORK}${YW} IP Address: ${GN}${IP}${CL}" >>"$MOTD_FILE"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Update dynamic LXC details profile if values changed (e.g., after OS upgrade)
|
||||||
|
# Only update if file exists and is from community-scripts
|
||||||
|
if [ -f "$PROFILE_FILE" ] && grep -q "community-scripts" "$PROFILE_FILE" 2>/dev/null; then
|
||||||
|
# Get current values
|
||||||
|
local current_os="$(grep ^NAME /etc/os-release | cut -d= -f2 | tr -d '"') - Version: $(grep ^VERSION_ID /etc/os-release | cut -d= -f2 | tr -d '"')"
|
||||||
|
local current_hostname="$(hostname)"
|
||||||
|
local current_ip="$(hostname -I | awk '{print $1}')"
|
||||||
|
|
||||||
|
# Update only if values actually changed
|
||||||
|
if ! grep -q "OS:.*$current_os" "$PROFILE_FILE" 2>/dev/null; then
|
||||||
|
sed -i "s|OS:.*|OS: \${GN}$current_os\${CL}\\\"|" "$PROFILE_FILE"
|
||||||
|
fi
|
||||||
|
if ! grep -q "Hostname:.*$current_hostname" "$PROFILE_FILE" 2>/dev/null; then
|
||||||
|
sed -i "s|Hostname:.*|Hostname: \${GN}$current_hostname\${CL}\\\"|" "$PROFILE_FILE"
|
||||||
|
fi
|
||||||
|
if ! grep -q "IP Address:.*$current_ip" "$PROFILE_FILE" 2>/dev/null; then
|
||||||
|
sed -i "s|IP Address:.*|IP Address: \${GN}$current_ip\${CL}\\\"|" "$PROFILE_FILE"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
@@ -968,7 +990,7 @@ base_settings() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
MTU=${var_mtu:-""}
|
MTU=${var_mtu:-""}
|
||||||
SD=${var_storage:-""}
|
SD=${var_searchdomain:-""}
|
||||||
NS=${var_ns:-""}
|
NS=${var_ns:-""}
|
||||||
MAC=${var_mac:-""}
|
MAC=${var_mac:-""}
|
||||||
VLAN=${var_vlan:-""}
|
VLAN=${var_vlan:-""}
|
||||||
@@ -1795,7 +1817,7 @@ advanced_settings() {
|
|||||||
if [[ -n "$BRIDGES" ]]; then
|
if [[ -n "$BRIDGES" ]]; then
|
||||||
while IFS= read -r bridge; do
|
while IFS= read -r bridge; do
|
||||||
if [[ -n "$bridge" ]]; then
|
if [[ -n "$bridge" ]]; then
|
||||||
local description=$(grep -A 10 "iface $bridge" /etc/network/interfaces 2>/dev/null | grep '^#' | head -n1 | sed 's/^#\s*//')
|
local description=$(grep -A 10 "iface $bridge" /etc/network/interfaces 2>/dev/null | grep '^#' | head -n1 | sed 's/^#\s*//;s/^[- ]*//')
|
||||||
BRIDGE_MENU_OPTIONS+=("$bridge" "${description:- }")
|
BRIDGE_MENU_OPTIONS+=("$bridge" "${description:- }")
|
||||||
fi
|
fi
|
||||||
done <<<"$BRIDGES"
|
done <<<"$BRIDGES"
|
||||||
@@ -2027,6 +2049,10 @@ advanced_settings() {
|
|||||||
"${BRIDGE_MENU_OPTIONS[@]}" \
|
"${BRIDGE_MENU_OPTIONS[@]}" \
|
||||||
3>&1 1>&2 2>&3); then
|
3>&1 1>&2 2>&3); then
|
||||||
local bridge_test="${result:-vmbr0}"
|
local bridge_test="${result:-vmbr0}"
|
||||||
|
# Skip separator entries (e.g., __other__) - re-display menu
|
||||||
|
if [[ "$bridge_test" == "__other__" || "$bridge_test" == -* ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
if validate_bridge "$bridge_test"; then
|
if validate_bridge "$bridge_test"; then
|
||||||
_bridge="$bridge_test"
|
_bridge="$bridge_test"
|
||||||
((STEP++))
|
((STEP++))
|
||||||
@@ -3310,6 +3336,7 @@ start() {
|
|||||||
set_std_mode
|
set_std_mode
|
||||||
ensure_profile_loaded
|
ensure_profile_loaded
|
||||||
update_script
|
update_script
|
||||||
|
update_motd_ip
|
||||||
cleanup_lxc
|
cleanup_lxc
|
||||||
else
|
else
|
||||||
CHOICE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "${APP} LXC Update/Setting" --menu \
|
CHOICE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "${APP} LXC Update/Setting" --menu \
|
||||||
@@ -3336,6 +3363,7 @@ start() {
|
|||||||
esac
|
esac
|
||||||
ensure_profile_loaded
|
ensure_profile_loaded
|
||||||
update_script
|
update_script
|
||||||
|
update_motd_ip
|
||||||
cleanup_lxc
|
cleanup_lxc
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@@ -3854,6 +3882,17 @@ EOF
|
|||||||
|
|
||||||
fix_gpu_gids
|
fix_gpu_gids
|
||||||
|
|
||||||
|
# Fix Debian 13 LXC template bug where / is owned by nobody:nogroup
|
||||||
|
# This must be done from the host as unprivileged containers cannot chown /
|
||||||
|
local rootfs
|
||||||
|
rootfs=$(pct config "$CTID" | grep -E '^rootfs:' | sed 's/rootfs: //' | cut -d',' -f1)
|
||||||
|
if [[ -n "$rootfs" ]]; then
|
||||||
|
local mount_point="/var/lib/lxc/${CTID}/rootfs"
|
||||||
|
if [[ -d "$mount_point" ]] && [[ "$(stat -c '%U' "$mount_point")" != "root" ]]; then
|
||||||
|
chown root:root "$mount_point" 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
# Continue with standard container setup
|
# Continue with standard container setup
|
||||||
msg_info "Customizing LXC Container"
|
msg_info "Customizing LXC Container"
|
||||||
|
|
||||||
|
|||||||
@@ -79,6 +79,13 @@ EOF
|
|||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
setting_up_container() {
|
setting_up_container() {
|
||||||
msg_info "Setting up Container OS"
|
msg_info "Setting up Container OS"
|
||||||
|
|
||||||
|
# Fix Debian 13 LXC template bug where / is owned by nobody
|
||||||
|
# Only attempt in privileged containers (unprivileged cannot chown /)
|
||||||
|
if [[ "$(stat -c '%U' /)" != "root" ]]; then
|
||||||
|
(chown root:root / 2>/dev/null) || true
|
||||||
|
fi
|
||||||
|
|
||||||
for ((i = RETRY_NUM; i > 0; i--)); do
|
for ((i = RETRY_NUM; i > 0; i--)); do
|
||||||
if [ "$(hostname -I)" != "" ]; then
|
if [ "$(hostname -I)" != "" ]; then
|
||||||
break
|
break
|
||||||
|
|||||||
331
misc/tools.func
331
misc/tools.func
@@ -574,7 +574,8 @@ EOF
|
|||||||
msg_error "Failed to download PHP keyring"
|
msg_error "Failed to download PHP keyring"
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
dpkg -i /tmp/debsuryorg-archive-keyring.deb >/dev/null 2>&1 || {
|
# Don't use /dev/null redirection for dpkg as it may use background processes
|
||||||
|
dpkg -i /tmp/debsuryorg-archive-keyring.deb >>"$(get_active_logfile)" 2>&1 || {
|
||||||
msg_error "Failed to install PHP keyring"
|
msg_error "Failed to install PHP keyring"
|
||||||
rm -f /tmp/debsuryorg-archive-keyring.deb
|
rm -f /tmp/debsuryorg-archive-keyring.deb
|
||||||
return 1
|
return 1
|
||||||
@@ -1844,8 +1845,13 @@ function fetch_and_deploy_gh_release() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
chmod 644 "$tmpdir/$filename"
|
chmod 644 "$tmpdir/$filename"
|
||||||
$STD apt install -y "$tmpdir/$filename" || {
|
# SYSTEMD_OFFLINE=1 prevents systemd-tmpfiles failures in unprivileged LXC (Debian 13+/systemd 257+)
|
||||||
$STD dpkg -i "$tmpdir/$filename" || {
|
# Support DPKG_CONFOLD/DPKG_CONFNEW env vars for config file handling during .deb upgrades
|
||||||
|
local dpkg_opts=""
|
||||||
|
[[ "${DPKG_FORCE_CONFOLD:-}" == "1" ]] && dpkg_opts="-o Dpkg::Options::=--force-confold"
|
||||||
|
[[ "${DPKG_FORCE_CONFNEW:-}" == "1" ]] && dpkg_opts="-o Dpkg::Options::=--force-confnew"
|
||||||
|
DEBIAN_FRONTEND=noninteractive SYSTEMD_OFFLINE=1 $STD apt install -y $dpkg_opts "$tmpdir/$filename" || {
|
||||||
|
SYSTEMD_OFFLINE=1 $STD dpkg -i "$tmpdir/$filename" || {
|
||||||
msg_error "Both apt and dpkg installation failed"
|
msg_error "Both apt and dpkg installation failed"
|
||||||
rm -rf "$tmpdir"
|
rm -rf "$tmpdir"
|
||||||
return 1
|
return 1
|
||||||
@@ -2631,6 +2637,7 @@ function setup_hwaccel() {
|
|||||||
# GPU Selection - Let user choose which GPU(s) to configure
|
# GPU Selection - Let user choose which GPU(s) to configure
|
||||||
# ═══════════════════════════════════════════════════════════════════════════
|
# ═══════════════════════════════════════════════════════════════════════════
|
||||||
local -a SELECTED_INDICES=()
|
local -a SELECTED_INDICES=()
|
||||||
|
local install_nvidia_drivers="yes"
|
||||||
|
|
||||||
if [[ $gpu_count -eq 1 ]]; then
|
if [[ $gpu_count -eq 1 ]]; then
|
||||||
# Single GPU - auto-select
|
# Single GPU - auto-select
|
||||||
@@ -2692,6 +2699,30 @@ function setup_hwaccel() {
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Ask whether to install NVIDIA drivers in the container
|
||||||
|
local nvidia_selected="no"
|
||||||
|
for idx in "${SELECTED_INDICES[@]}"; do
|
||||||
|
if [[ "${GPU_TYPES[$idx]}" == "NVIDIA" ]]; then
|
||||||
|
nvidia_selected="yes"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ "$nvidia_selected" == "yes" ]]; then
|
||||||
|
if [[ -n "${INSTALL_NVIDIA_DRIVERS:-}" ]]; then
|
||||||
|
install_nvidia_drivers="${INSTALL_NVIDIA_DRIVERS}"
|
||||||
|
else
|
||||||
|
echo ""
|
||||||
|
msg_custom "🎮" "${GN}" "NVIDIA GPU passthrough detected"
|
||||||
|
local nvidia_reply=""
|
||||||
|
read -r -t 60 -p "${TAB3}⚙️ Install NVIDIA driver libraries in the container? [Y/n] (auto-yes in 60s): " nvidia_reply || nvidia_reply=""
|
||||||
|
case "${nvidia_reply,,}" in
|
||||||
|
n | no) install_nvidia_drivers="no" ;;
|
||||||
|
*) install_nvidia_drivers="yes" ;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
# ═══════════════════════════════════════════════════════════════════════════
|
# ═══════════════════════════════════════════════════════════════════════════
|
||||||
# OS Detection
|
# OS Detection
|
||||||
# ═══════════════════════════════════════════════════════════════════════════
|
# ═══════════════════════════════════════════════════════════════════════════
|
||||||
@@ -2752,7 +2783,11 @@ function setup_hwaccel() {
|
|||||||
# NVIDIA GPUs
|
# NVIDIA GPUs
|
||||||
# ─────────────────────────────────────────────────────────────────────────
|
# ─────────────────────────────────────────────────────────────────────────
|
||||||
NVIDIA)
|
NVIDIA)
|
||||||
_setup_nvidia_gpu "$os_id" "$os_codename" "$os_version"
|
if [[ "$install_nvidia_drivers" == "yes" ]]; then
|
||||||
|
_setup_nvidia_gpu "$os_id" "$os_codename" "$os_version"
|
||||||
|
else
|
||||||
|
msg_warn "Skipping NVIDIA driver installation (user opted to install manually)"
|
||||||
|
fi
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
@@ -4425,6 +4460,8 @@ function setup_nodejs() {
|
|||||||
# - Adds Sury PHP repo if needed
|
# - Adds Sury PHP repo if needed
|
||||||
# - Installs default and user-defined modules
|
# - Installs default and user-defined modules
|
||||||
# - Patches php.ini for CLI, Apache, and FPM as needed
|
# - Patches php.ini for CLI, Apache, and FPM as needed
|
||||||
|
# - Handles built-in modules gracefully (e.g., opcache in PHP 8.5+)
|
||||||
|
# - Skips unavailable packages without failing
|
||||||
#
|
#
|
||||||
# Variables:
|
# Variables:
|
||||||
# PHP_VERSION - PHP version to install (default: 8.4)
|
# PHP_VERSION - PHP version to install (default: 8.4)
|
||||||
@@ -4435,6 +4472,17 @@ function setup_nodejs() {
|
|||||||
# PHP_UPLOAD_MAX_FILESIZE - (default: 128M)
|
# PHP_UPLOAD_MAX_FILESIZE - (default: 128M)
|
||||||
# PHP_POST_MAX_SIZE - (default: 128M)
|
# PHP_POST_MAX_SIZE - (default: 128M)
|
||||||
# PHP_MAX_EXECUTION_TIME - (default: 300)
|
# PHP_MAX_EXECUTION_TIME - (default: 300)
|
||||||
|
#
|
||||||
|
# Notes on modules:
|
||||||
|
# - Base modules (always installed): bcmath, cli, curl, gd, intl, mbstring,
|
||||||
|
# readline, xml, zip, common
|
||||||
|
# - Extended modules (commonly needed): mysql, sqlite3, pgsql, redis,
|
||||||
|
# imagick, bz2, ldap, soap, imap, gmp, apcu
|
||||||
|
# - Some modules are built-in depending on PHP version:
|
||||||
|
# * PHP 8.5+: opcache is built-in (no separate package)
|
||||||
|
# * All versions: ctype, fileinfo, iconv, tokenizer, phar, posix, etc.
|
||||||
|
# are part of php-common
|
||||||
|
# - Unavailable modules are skipped with a warning, not an error
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
function setup_php() {
|
function setup_php() {
|
||||||
@@ -4446,23 +4494,69 @@ function setup_php() {
|
|||||||
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
|
||||||
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)
|
||||||
|
|
||||||
local DEFAULT_MODULES="bcmath,cli,curl,gd,intl,mbstring,opcache,readline,xml,zip"
|
# Parse version for compatibility checks
|
||||||
local COMBINED_MODULES
|
local PHP_MAJOR="${PHP_VERSION%%.*}"
|
||||||
|
local PHP_MINOR="${PHP_VERSION#*.}"
|
||||||
|
PHP_MINOR="${PHP_MINOR%%.*}"
|
||||||
|
|
||||||
|
# Modules that are ALWAYS part of php-common (no separate package needed)
|
||||||
|
# These are either built-in or virtual packages provided by php-common
|
||||||
|
local BUILTIN_MODULES="calendar,ctype,exif,ffi,fileinfo,ftp,gettext,iconv,pdo,phar,posix,shmop,sockets,sysvmsg,sysvsem,sysvshm,tokenizer"
|
||||||
|
|
||||||
|
# Modules that became built-in in specific PHP versions
|
||||||
|
# PHP 8.5+: opcache is now part of the core
|
||||||
|
local BUILTIN_85=""
|
||||||
|
if [[ "$PHP_MAJOR" -gt 8 ]] || [[ "$PHP_MAJOR" -eq 8 && "$PHP_MINOR" -ge 5 ]]; then
|
||||||
|
BUILTIN_85="opcache"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Base modules - essential for most PHP applications
|
||||||
|
# Note: 'common' provides many built-in extensions
|
||||||
|
local BASE_MODULES="cli,common,bcmath,curl,gd,intl,mbstring,readline,xml,zip"
|
||||||
|
|
||||||
|
# Add opcache only for PHP < 8.5 (it's built-in starting from 8.5)
|
||||||
|
if [[ "$PHP_MAJOR" -lt 8 ]] || [[ "$PHP_MAJOR" -eq 8 && "$PHP_MINOR" -lt 5 ]]; then
|
||||||
|
BASE_MODULES="${BASE_MODULES},opcache"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extended default modules - commonly needed by web applications
|
||||||
|
# These cover ~90% of typical use cases without bloat
|
||||||
|
local EXTENDED_MODULES="mysql,sqlite3,pgsql,redis,imagick,bz2,apcu"
|
||||||
|
|
||||||
|
local COMBINED_MODULES="${BASE_MODULES},${EXTENDED_MODULES}"
|
||||||
|
|
||||||
local PHP_MEMORY_LIMIT="${PHP_MEMORY_LIMIT:-512M}"
|
local PHP_MEMORY_LIMIT="${PHP_MEMORY_LIMIT:-512M}"
|
||||||
local PHP_UPLOAD_MAX_FILESIZE="${PHP_UPLOAD_MAX_FILESIZE:-128M}"
|
local PHP_UPLOAD_MAX_FILESIZE="${PHP_UPLOAD_MAX_FILESIZE:-128M}"
|
||||||
local PHP_POST_MAX_SIZE="${PHP_POST_MAX_SIZE:-128M}"
|
local PHP_POST_MAX_SIZE="${PHP_POST_MAX_SIZE:-128M}"
|
||||||
local PHP_MAX_EXECUTION_TIME="${PHP_MAX_EXECUTION_TIME:-300}"
|
local PHP_MAX_EXECUTION_TIME="${PHP_MAX_EXECUTION_TIME:-300}"
|
||||||
|
|
||||||
# Merge default + user-defined modules
|
# Merge with user-defined modules
|
||||||
if [[ -n "$PHP_MODULE" ]]; then
|
if [[ -n "$PHP_MODULE" ]]; then
|
||||||
COMBINED_MODULES="${DEFAULT_MODULES},${PHP_MODULE}"
|
COMBINED_MODULES="${COMBINED_MODULES},${PHP_MODULE}"
|
||||||
else
|
|
||||||
COMBINED_MODULES="${DEFAULT_MODULES}"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Filter out built-in modules (they don't have separate packages)
|
||||||
|
local FILTERED_MODULES=""
|
||||||
|
IFS=',' read -ra ALL_MODULES <<<"$COMBINED_MODULES"
|
||||||
|
for mod in "${ALL_MODULES[@]}"; do
|
||||||
|
mod=$(echo "$mod" | tr -d '[:space:]')
|
||||||
|
[[ -z "$mod" ]] && continue
|
||||||
|
|
||||||
|
# Skip if it's a known built-in module
|
||||||
|
if echo ",$BUILTIN_MODULES,$BUILTIN_85," | grep -qi ",$mod,"; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add to filtered list
|
||||||
|
if [[ -z "$FILTERED_MODULES" ]]; then
|
||||||
|
FILTERED_MODULES="$mod"
|
||||||
|
else
|
||||||
|
FILTERED_MODULES="${FILTERED_MODULES},$mod"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
# Deduplicate
|
# Deduplicate
|
||||||
COMBINED_MODULES=$(echo "$COMBINED_MODULES" | tr ',' '\n' | awk '!seen[$0]++' | paste -sd, -)
|
COMBINED_MODULES=$(echo "$FILTERED_MODULES" | tr ',' '\n' | awk '!seen[$0]++' | paste -sd, -)
|
||||||
|
|
||||||
# Get current PHP-CLI version
|
# Get current PHP-CLI version
|
||||||
local CURRENT_PHP=""
|
local CURRENT_PHP=""
|
||||||
@@ -4499,7 +4593,8 @@ EOF
|
|||||||
# Ubuntu: Use ondrej/php PPA
|
# Ubuntu: Use ondrej/php PPA
|
||||||
msg_info "Adding ondrej/php PPA for Ubuntu"
|
msg_info "Adding ondrej/php PPA for Ubuntu"
|
||||||
$STD apt install -y software-properties-common
|
$STD apt install -y software-properties-common
|
||||||
$STD add-apt-repository -y ppa:ondrej/php
|
# Don't use $STD for add-apt-repository as it uses background processes
|
||||||
|
add-apt-repository -y ppa:ondrej/php >>"$(get_active_logfile)" 2>&1
|
||||||
else
|
else
|
||||||
# Debian: Use Sury repository
|
# Debian: Use Sury repository
|
||||||
manage_tool_repository "php" "$PHP_VERSION" "" "https://packages.sury.org/debsuryorg-archive-keyring.deb" || {
|
manage_tool_repository "php" "$PHP_VERSION" "" "https://packages.sury.org/debsuryorg-archive-keyring.deb" || {
|
||||||
@@ -4519,16 +4614,49 @@ EOF
|
|||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Build module list - without version pinning (preferences.d handles it)
|
# Build module list - verify each package exists before adding
|
||||||
local MODULE_LIST="php${PHP_VERSION}"
|
local MODULE_LIST="php${PHP_VERSION}"
|
||||||
|
local SKIPPED_MODULES=""
|
||||||
|
|
||||||
IFS=',' read -ra MODULES <<<"$COMBINED_MODULES"
|
IFS=',' read -ra MODULES <<<"$COMBINED_MODULES"
|
||||||
for mod in "${MODULES[@]}"; do
|
for mod in "${MODULES[@]}"; do
|
||||||
MODULE_LIST+=" php${PHP_VERSION}-${mod}"
|
mod=$(echo "$mod" | tr -d '[:space:]')
|
||||||
|
[[ -z "$mod" ]] && continue
|
||||||
|
|
||||||
|
local pkg_name="php${PHP_VERSION}-${mod}"
|
||||||
|
|
||||||
|
# Check if package exists in repository
|
||||||
|
if apt-cache show "$pkg_name" &>/dev/null; then
|
||||||
|
MODULE_LIST+=" $pkg_name"
|
||||||
|
else
|
||||||
|
# Package doesn't exist - could be built-in or renamed
|
||||||
|
if [[ -z "$SKIPPED_MODULES" ]]; then
|
||||||
|
SKIPPED_MODULES="$mod"
|
||||||
|
else
|
||||||
|
SKIPPED_MODULES="${SKIPPED_MODULES}, $mod"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
# Log skipped modules (informational, not an error)
|
||||||
|
if [[ -n "$SKIPPED_MODULES" ]]; then
|
||||||
|
msg_info "Skipping unavailable/built-in modules: $SKIPPED_MODULES"
|
||||||
|
fi
|
||||||
|
|
||||||
if [[ "$PHP_FPM" == "YES" ]]; then
|
if [[ "$PHP_FPM" == "YES" ]]; then
|
||||||
MODULE_LIST+=" php${PHP_VERSION}-fpm"
|
if apt-cache show "php${PHP_VERSION}-fpm" &>/dev/null; then
|
||||||
|
MODULE_LIST+=" php${PHP_VERSION}-fpm"
|
||||||
|
else
|
||||||
|
msg_warn "php${PHP_VERSION}-fpm not available"
|
||||||
|
fi
|
||||||
|
# Create systemd override for PHP-FPM to fix runtime directory issues in LXC containers
|
||||||
|
mkdir -p /etc/systemd/system/php${PHP_VERSION}-fpm.service.d/
|
||||||
|
cat <<EOF >/etc/systemd/system/php${PHP_VERSION}-fpm.service.d/override.conf
|
||||||
|
[Service]
|
||||||
|
RuntimeDirectory=php
|
||||||
|
RuntimeDirectoryMode=0755
|
||||||
|
EOF
|
||||||
|
$STD systemctl daemon-reload
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# install apache2 with PHP support if requested
|
# install apache2 with PHP support if requested
|
||||||
@@ -4547,38 +4675,31 @@ EOF
|
|||||||
|
|
||||||
# Install PHP packages (pinning via preferences.d ensures correct version)
|
# Install PHP packages (pinning via preferences.d ensures correct version)
|
||||||
msg_info "Installing PHP ${PHP_VERSION} packages"
|
msg_info "Installing PHP ${PHP_VERSION} packages"
|
||||||
if ! install_packages_with_retry $MODULE_LIST; then
|
|
||||||
msg_warn "Failed to install PHP packages, attempting individual installation"
|
# First attempt: Install all verified packages at once
|
||||||
|
if ! $STD apt install -y $MODULE_LIST 2>/dev/null; then
|
||||||
|
msg_warn "Bulk installation failed, attempting individual installation"
|
||||||
|
|
||||||
# Install main package first (critical)
|
# Install main package first (critical)
|
||||||
install_packages_with_retry "php${PHP_VERSION}" || {
|
if ! $STD apt install -y "php${PHP_VERSION}" 2>/dev/null; then
|
||||||
msg_error "Failed to install php${PHP_VERSION}"
|
msg_error "Failed to install php${PHP_VERSION}"
|
||||||
return 1
|
return 1
|
||||||
}
|
fi
|
||||||
|
|
||||||
# Try to install Apache module individually if requested
|
# Try to install Apache module individually if requested
|
||||||
if [[ "$PHP_APACHE" == "YES" ]]; then
|
if [[ "$PHP_APACHE" == "YES" ]]; then
|
||||||
install_packages_with_retry "libapache2-mod-php${PHP_VERSION}" || {
|
$STD apt install -y "libapache2-mod-php${PHP_VERSION}" 2>/dev/null || {
|
||||||
msg_warn "Could not install libapache2-mod-php${PHP_VERSION}"
|
msg_warn "Could not install libapache2-mod-php${PHP_VERSION}"
|
||||||
}
|
}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Try to install modules individually - skip those that don't exist
|
# Try to install each package individually
|
||||||
for pkg in "${MODULES[@]}"; do
|
for pkg in $MODULE_LIST; do
|
||||||
if apt-cache search "^php${PHP_VERSION}-${pkg}\$" 2>/dev/null | grep -q "^php${PHP_VERSION}-${pkg}"; then
|
[[ "$pkg" == "php${PHP_VERSION}" ]] && continue # Already installed
|
||||||
install_packages_with_retry "php${PHP_VERSION}-${pkg}" || {
|
$STD apt install -y "$pkg" 2>/dev/null || {
|
||||||
msg_warn "Could not install php${PHP_VERSION}-${pkg}"
|
msg_warn "Could not install $pkg - continuing without it"
|
||||||
}
|
}
|
||||||
fi
|
|
||||||
done
|
done
|
||||||
|
|
||||||
if [[ "$PHP_FPM" == "YES" ]]; then
|
|
||||||
if apt-cache search "^php${PHP_VERSION}-fpm\$" 2>/dev/null | grep -q "^php${PHP_VERSION}-fpm"; then
|
|
||||||
install_packages_with_retry "php${PHP_VERSION}-fpm" || {
|
|
||||||
msg_warn "Could not install php${PHP_VERSION}-fpm"
|
|
||||||
}
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
cache_installed_version "php" "$PHP_VERSION"
|
cache_installed_version "php" "$PHP_VERSION"
|
||||||
|
|
||||||
@@ -5096,6 +5217,146 @@ function setup_ruby() {
|
|||||||
msg_ok "Setup Ruby $RUBY_VERSION"
|
msg_ok "Setup Ruby $RUBY_VERSION"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# Installs or updates MeiliSearch search engine.
|
||||||
|
#
|
||||||
|
# Description:
|
||||||
|
# - Fresh install: Downloads binary, creates config/service, starts
|
||||||
|
# - Update: Checks for new release, updates binary if available
|
||||||
|
# - Waits for service to be ready before returning
|
||||||
|
# - Exports API keys for use by caller
|
||||||
|
#
|
||||||
|
# Variables:
|
||||||
|
# MEILISEARCH_BIND - Bind address (default: 127.0.0.1:7700)
|
||||||
|
# MEILISEARCH_ENV - Environment: production/development (default: production)
|
||||||
|
# MEILISEARCH_DB_PATH - Database path (default: /var/lib/meilisearch/data)
|
||||||
|
#
|
||||||
|
# Exports:
|
||||||
|
# MEILISEARCH_MASTER_KEY - The master key for admin access
|
||||||
|
# MEILISEARCH_API_KEY - The default search API key
|
||||||
|
# MEILISEARCH_API_KEY_UID - The UID of the default API key
|
||||||
|
#
|
||||||
|
# Example (install script):
|
||||||
|
# setup_meilisearch
|
||||||
|
#
|
||||||
|
# Example (CT update_script):
|
||||||
|
# setup_meilisearch
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
function setup_meilisearch() {
|
||||||
|
local MEILISEARCH_BIND="${MEILISEARCH_BIND:-127.0.0.1:7700}"
|
||||||
|
local MEILISEARCH_ENV="${MEILISEARCH_ENV:-production}"
|
||||||
|
local MEILISEARCH_DB_PATH="${MEILISEARCH_DB_PATH:-/var/lib/meilisearch/data}"
|
||||||
|
local MEILISEARCH_DUMP_DIR="${MEILISEARCH_DUMP_DIR:-/var/lib/meilisearch/dumps}"
|
||||||
|
local MEILISEARCH_SNAPSHOT_DIR="${MEILISEARCH_SNAPSHOT_DIR:-/var/lib/meilisearch/snapshots}"
|
||||||
|
|
||||||
|
# Get bind address for health checks
|
||||||
|
local MEILISEARCH_HOST="${MEILISEARCH_BIND%%:*}"
|
||||||
|
local MEILISEARCH_PORT="${MEILISEARCH_BIND##*:}"
|
||||||
|
[[ "$MEILISEARCH_HOST" == "0.0.0.0" ]] && MEILISEARCH_HOST="127.0.0.1"
|
||||||
|
|
||||||
|
# Update mode: MeiliSearch already installed
|
||||||
|
if [[ -f /usr/bin/meilisearch ]]; then
|
||||||
|
if check_for_gh_release "meilisearch" "meilisearch/meilisearch"; then
|
||||||
|
msg_info "Updating MeiliSearch"
|
||||||
|
systemctl stop meilisearch
|
||||||
|
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary"
|
||||||
|
systemctl start meilisearch
|
||||||
|
msg_ok "Updated MeiliSearch"
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Fresh install
|
||||||
|
msg_info "Setup MeiliSearch"
|
||||||
|
|
||||||
|
# Install binary
|
||||||
|
fetch_and_deploy_gh_release "meilisearch" "meilisearch/meilisearch" "binary" || {
|
||||||
|
msg_error "Failed to install MeiliSearch binary"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Download default config
|
||||||
|
curl -fsSL https://raw.githubusercontent.com/meilisearch/meilisearch/latest/config.toml -o /etc/meilisearch.toml || {
|
||||||
|
msg_error "Failed to download MeiliSearch config"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Generate master key
|
||||||
|
MEILISEARCH_MASTER_KEY=$(openssl rand -base64 12)
|
||||||
|
export MEILISEARCH_MASTER_KEY
|
||||||
|
|
||||||
|
# Configure
|
||||||
|
sed -i \
|
||||||
|
-e "s|^env =.*|env = \"${MEILISEARCH_ENV}\"|" \
|
||||||
|
-e "s|^# master_key =.*|master_key = \"${MEILISEARCH_MASTER_KEY}\"|" \
|
||||||
|
-e "s|^db_path =.*|db_path = \"${MEILISEARCH_DB_PATH}\"|" \
|
||||||
|
-e "s|^dump_dir =.*|dump_dir = \"${MEILISEARCH_DUMP_DIR}\"|" \
|
||||||
|
-e "s|^snapshot_dir =.*|snapshot_dir = \"${MEILISEARCH_SNAPSHOT_DIR}\"|" \
|
||||||
|
-e 's|^# no_analytics = true|no_analytics = true|' \
|
||||||
|
-e "s|^http_addr =.*|http_addr = \"${MEILISEARCH_BIND}\"|" \
|
||||||
|
/etc/meilisearch.toml
|
||||||
|
|
||||||
|
# Create data directories
|
||||||
|
mkdir -p "${MEILISEARCH_DB_PATH}" "${MEILISEARCH_DUMP_DIR}" "${MEILISEARCH_SNAPSHOT_DIR}"
|
||||||
|
|
||||||
|
# Create systemd service
|
||||||
|
cat <<EOF >/etc/systemd/system/meilisearch.service
|
||||||
|
[Unit]
|
||||||
|
Description=Meilisearch
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
ExecStart=/usr/bin/meilisearch --config-file-path /etc/meilisearch.toml
|
||||||
|
Restart=always
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Enable and start service
|
||||||
|
systemctl daemon-reload
|
||||||
|
systemctl enable -q --now meilisearch
|
||||||
|
|
||||||
|
# Wait for MeiliSearch to be ready (up to 30 seconds)
|
||||||
|
for i in {1..30}; do
|
||||||
|
if curl -s -o /dev/null -w "%{http_code}" "http://${MEILISEARCH_HOST}:${MEILISEARCH_PORT}/health" 2>/dev/null | grep -q "200"; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
# Verify service is running
|
||||||
|
if ! systemctl is-active --quiet meilisearch; then
|
||||||
|
msg_error "MeiliSearch service failed to start"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get API keys with retry logic
|
||||||
|
MEILISEARCH_API_KEY=""
|
||||||
|
for i in {1..10}; do
|
||||||
|
MEILISEARCH_API_KEY=$(curl -s -X GET "http://${MEILISEARCH_HOST}:${MEILISEARCH_PORT}/keys" \
|
||||||
|
-H "Authorization: Bearer ${MEILISEARCH_MASTER_KEY}" 2>/dev/null | \
|
||||||
|
grep -o '"key":"[^"]*"' | head -n 1 | sed 's/"key":"//;s/"//') || true
|
||||||
|
[[ -n "$MEILISEARCH_API_KEY" ]] && break
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
|
||||||
|
MEILISEARCH_API_KEY_UID=$(curl -s -X GET "http://${MEILISEARCH_HOST}:${MEILISEARCH_PORT}/keys" \
|
||||||
|
-H "Authorization: Bearer ${MEILISEARCH_MASTER_KEY}" 2>/dev/null | \
|
||||||
|
grep -o '"uid":"[^"]*"' | head -n 1 | sed 's/"uid":"//;s/"//') || true
|
||||||
|
|
||||||
|
export MEILISEARCH_API_KEY
|
||||||
|
export MEILISEARCH_API_KEY_UID
|
||||||
|
|
||||||
|
# Cache version
|
||||||
|
local MEILISEARCH_VERSION
|
||||||
|
MEILISEARCH_VERSION=$(/usr/bin/meilisearch --version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -1) || true
|
||||||
|
cache_installed_version "meilisearch" "${MEILISEARCH_VERSION:-unknown}"
|
||||||
|
|
||||||
|
msg_ok "Setup MeiliSearch ${MEILISEARCH_VERSION:-}"
|
||||||
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# Installs or upgrades ClickHouse database server.
|
# Installs or upgrades ClickHouse database server.
|
||||||
#
|
#
|
||||||
|
|||||||
@@ -595,7 +595,7 @@ EOF
|
|||||||
no)
|
no)
|
||||||
whiptail --backtitle "Proxmox VE Helper Scripts" --msgbox --title "Support Subscriptions" "Supporting the software's development team is essential. Check their official website's Support Subscriptions for pricing. Without their dedicated work, we wouldn't have this exceptional software." 10 58
|
whiptail --backtitle "Proxmox VE Helper Scripts" --msgbox --title "Support Subscriptions" "Supporting the software's development team is essential. Check their official website's Support Subscriptions for pricing. Without their dedicated work, we wouldn't have this exceptional software." 10 58
|
||||||
msg_error "Selected no to Disabling subscription nag"
|
msg_error "Selected no to Disabling subscription nag"
|
||||||
rm /etc/apt/apt.conf.d/no-nag-script 2>/dev/null
|
[[ -f /etc/apt/apt.conf.d/no-nag-script ]] && rm /etc/apt/apt.conf.d/no-nag-script
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
apt --reinstall install proxmox-widget-toolkit &>/dev/null || msg_error "Widget toolkit reinstall failed"
|
apt --reinstall install proxmox-widget-toolkit &>/dev/null || msg_error "Widget toolkit reinstall failed"
|
||||||
|
|||||||
@@ -213,7 +213,7 @@ function default_settings() {
|
|||||||
MAC="$GEN_MAC"
|
MAC="$GEN_MAC"
|
||||||
VLAN=""
|
VLAN=""
|
||||||
MTU=""
|
MTU=""
|
||||||
START_VM="yes"
|
START_VM="no"
|
||||||
METHOD="default"
|
METHOD="default"
|
||||||
echo -e "${CONTAINERID}${BOLD}${DGN}Virtual Machine ID: ${BGN}${VMID}${CL}"
|
echo -e "${CONTAINERID}${BOLD}${DGN}Virtual Machine ID: ${BGN}${VMID}${CL}"
|
||||||
echo -e "${CONTAINERTYPE}${BOLD}${DGN}Machine Type: ${BGN}i440fx${CL}"
|
echo -e "${CONTAINERTYPE}${BOLD}${DGN}Machine Type: ${BGN}i440fx${CL}"
|
||||||
|
|||||||
Reference in New Issue
Block a user