mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2026-02-05 12:53:27 +01:00
* feat(workflow): add GitHub-based versions.json updater Replaces newreleases.io with direct GitHub API queries. Extracts repos from fetch_and_deploy_gh_release calls in install scripts. Runs 2x daily (06:00 and 18:00 UTC). * feat(workflow): extend version crawler with multiple sources - Method 1: fetch_and_deploy_gh_release calls (direct) - Method 2: GitHub URLs extracted from all scripts - Method 3: VM image sources (HAOS) - Method 4: Docker Hub / GHCR versions - Method 5: npm Registry versions Also tries tags fallback when no releases exist. * feat(workflow): rewrite with version-sources.json config - Generates version-sources.json with structured metadata - Each entry has: slug, type, source, script, version, date - Extracts from: fetch_and_deploy_gh_release, GitHub URLs, npm, Docker - Generates versions.json for backward compatibility - Fully automatic, no manual mapping needed * feat(workflow): add manual GitHub mappings and pveam support - Method 5: Manual GitHub mappings for 36 apt-based apps (grafana, redis, postgresql, mariadb, influxdb, etc.) - Method 6: Proxmox LXC templates (debian, ubuntu, alpine) via download.proxmox.com index - Method 7: Special sources (HAOS VM) Total coverage: ~310+ apps * feat(workflow): expand manual GitHub mappings to 75 apps Added mappings for: - Apache projects (cassandra, couchdb, guacamole, tomcat) - Media apps (tdarr, unmanic, shinobi) - DevOps (coolify, dokploy, runtipi, sonarqube) - Databases (mongodb, mysql, neo4j, rabbitmq) - And 30+ more apps Total manual mappings: 75 * feat: add manual placeholders for 34 unknown-source apps - Added 34 apps with 'manual:-' type for apps without known sources - Added manual type handler in version-fetch (returns '-' placeholder) - Added manual counter to summary output - Coverage now 100% (all 405 scripts included) Manual entries can be updated later when sources are discovered. * Refactor and update GitHub workflow files Moved several workflow files to a 'bak' backup directory and renamed 'close-ttek-issues.yaml' to 'close-tteck-issues.yaml'. Refactored 'update-versions-github.yml' to focus on extracting and updating GitHub versions, simplified the extraction logic, and updated the workflow schedule to run four times daily. Minor variable and logic improvements were made in 'close-discussion.yml'. * clean file * chore: empty versions.json for workflow test
159 lines
6.1 KiB
YAML
Generated
159 lines
6.1 KiB
YAML
Generated
name: Validate filenames
|
|
|
|
on:
|
|
pull_request_target:
|
|
paths:
|
|
- "ct/*.sh"
|
|
- "install/*.sh"
|
|
- "frontend/public/json/*.json"
|
|
|
|
jobs:
|
|
check-files:
|
|
if: github.repository == 'community-scripts/ProxmoxVE'
|
|
name: Check changed files
|
|
runs-on: ubuntu-latest
|
|
permissions:
|
|
pull-requests: write
|
|
|
|
steps:
|
|
- name: Get pull request information
|
|
if: github.event_name == 'pull_request_target'
|
|
uses: actions/github-script@v7
|
|
id: pr
|
|
with:
|
|
script: |
|
|
const { data: pullRequest } = await github.rest.pulls.get({
|
|
...context.repo,
|
|
pull_number: context.payload.pull_request.number,
|
|
});
|
|
return pullRequest;
|
|
|
|
- name: Checkout code
|
|
uses: actions/checkout@v4
|
|
with:
|
|
fetch-depth: 0 # Ensure the full history is fetched for accurate diffing
|
|
ref: ${{ github.event_name == 'pull_request_target' && fromJSON(steps.pr.outputs.result).merge_commit_sha || '' }}
|
|
|
|
- name: Get changed files
|
|
id: changed-files
|
|
run: |
|
|
if ${{ github.event_name == 'pull_request_target' }}; then
|
|
echo "files=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ steps.pr.outputs.result && fromJSON(steps.pr.outputs.result).merge_commit_sha }} | xargs)" >> $GITHUB_OUTPUT
|
|
else
|
|
echo "files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | xargs)" >> $GITHUB_OUTPUT
|
|
fi
|
|
|
|
- name: "Validate filenames in ct and install directory"
|
|
if: always() && steps.changed-files.outputs.files != ''
|
|
id: check-scripts
|
|
run: |
|
|
CHANGED_FILES=$(printf "%s\n" ${{ steps.changed-files.outputs.files }} | { grep -E '^(ct|install)/.*\.sh$' || true; })
|
|
|
|
NON_COMPLIANT_FILES=""
|
|
for FILE in $CHANGED_FILES; do
|
|
BASENAME=$(echo "$(basename "${FILE%.*}")")
|
|
if [[ ! "$BASENAME" =~ ^[a-z0-9-]+$ ]]; then
|
|
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
|
fi
|
|
done
|
|
|
|
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
|
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
|
echo "Non-compliant filenames found, change to lowercase:"
|
|
for FILE in $NON_COMPLIANT_FILES; do
|
|
echo "$FILE"
|
|
done
|
|
exit 1
|
|
fi
|
|
|
|
- name: "Validate filenames in json directory."
|
|
if: always() && steps.changed-files.outputs.files != ''
|
|
id: check-json
|
|
run: |
|
|
CHANGED_FILES=$(printf "%s\n" ${{ steps.changed-files.outputs.files }} | { grep -E '^json/.*\.json$' || true; })
|
|
|
|
NON_COMPLIANT_FILES=""
|
|
for FILE in $CHANGED_FILES; do
|
|
BASENAME=$(echo "$(basename "${FILE%.*}")")
|
|
if [[ ! "$BASENAME" =~ ^[a-z0-9-]+$ ]]; then
|
|
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
|
fi
|
|
done
|
|
|
|
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
|
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
|
echo "Non-compliant filenames found, change to lowercase:"
|
|
for FILE in $NON_COMPLIANT_FILES; do
|
|
echo "$FILE"
|
|
done
|
|
exit 1
|
|
fi
|
|
|
|
- name: Post results and comment
|
|
if: always() && steps.check-scripts.outputs.files != '' && steps.check-json.outputs.files != '' && github.event_name == 'pull_request_target'
|
|
uses: actions/github-script@v7
|
|
with:
|
|
script: |
|
|
const result = "${{ job.status }}" === "success" ? "success" : "failure";
|
|
const nonCompliantFiles = {
|
|
script: "${{ steps.check-scripts.outputs.files }}",
|
|
JSON: "${{ steps.check-json.outputs.files }}",
|
|
};
|
|
|
|
const issueNumber = context.payload.pull_request
|
|
? context.payload.pull_request.number
|
|
: null;
|
|
const commentIdentifier = "validate-filenames";
|
|
let newCommentBody = `<!-- ${commentIdentifier}-start -->\n### Filename validation\n\n`;
|
|
|
|
if (result === "failure") {
|
|
newCommentBody += ":x: We found issues in the following changed files:\n\n";
|
|
for (const [check, files] of Object.entries(nonCompliantFiles)) {
|
|
if (files) {
|
|
newCommentBody += `**${check.charAt(0).toUpperCase() + check.slice(1)} filename invalid:**\n${files
|
|
.trim()
|
|
.split(" ")
|
|
.map((file) => `- ${file}`)
|
|
.join("\n")}\n\n`;
|
|
}
|
|
}
|
|
newCommentBody +=
|
|
"Please change the filenames to lowercase and use only alphanumeric characters and dashes.\n";
|
|
} else {
|
|
newCommentBody += `:rocket: All files passed filename validation!\n`;
|
|
}
|
|
|
|
newCommentBody += `\n\n<!-- ${commentIdentifier}-end -->`;
|
|
|
|
if (issueNumber) {
|
|
const { data: comments } = await github.rest.issues.listComments({
|
|
...context.repo,
|
|
issue_number: issueNumber,
|
|
});
|
|
|
|
const existingComment = comments.find(
|
|
(comment) => comment.user.login === "github-actions[bot]",
|
|
);
|
|
|
|
if (existingComment) {
|
|
if (existingComment.body.includes(commentIdentifier)) {
|
|
const re = new RegExp(String.raw`<!-- ${commentIdentifier}-start -->[\s\S]*?<!-- ${commentIdentifier}-end -->`, "");
|
|
newCommentBody = existingComment.body.replace(re, newCommentBody);
|
|
} else {
|
|
newCommentBody = existingComment.body + '\n\n---\n\n' + newCommentBody;
|
|
}
|
|
|
|
await github.rest.issues.updateComment({
|
|
...context.repo,
|
|
comment_id: existingComment.id,
|
|
body: newCommentBody,
|
|
});
|
|
} else {
|
|
await github.rest.issues.createComment({
|
|
...context.repo,
|
|
issue_number: issueNumber,
|
|
body: newCommentBody,
|
|
});
|
|
}
|
|
}
|