mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2026-02-13 16:53:27 +01:00
Compare commits
12 Commits
fix/debian
...
fix/dispat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
62ce92e8d2 | ||
|
|
dd8c998d43 | ||
|
|
b215bac01d | ||
|
|
c3cd9df12f | ||
|
|
406d53ea2f | ||
|
|
c8b278f26f | ||
|
|
a6f0d7233e | ||
|
|
079a436286 | ||
|
|
1c2ed6ff10 | ||
|
|
c1d7f23a17 | ||
|
|
fdbe48badb | ||
|
|
d09dd0b664 |
19
CHANGELOG.md
19
CHANGELOG.md
@@ -401,6 +401,20 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
|||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
|
## 2026-02-12
|
||||||
|
|
||||||
|
### 🚀 Updated Scripts
|
||||||
|
|
||||||
|
- #### 🐞 Bug Fixes
|
||||||
|
|
||||||
|
- Pangolin: Update database generation command in install script [@tremor021](https://github.com/tremor021) ([#11825](https://github.com/community-scripts/ProxmoxVE/pull/11825))
|
||||||
|
|
||||||
|
### 💾 Core
|
||||||
|
|
||||||
|
- #### ✨ New Features
|
||||||
|
|
||||||
|
- core: remove old Go API and extend misc/api.func with new backend [@MickLesk](https://github.com/MickLesk) ([#11822](https://github.com/community-scripts/ProxmoxVE/pull/11822))
|
||||||
|
|
||||||
## 2026-02-11
|
## 2026-02-11
|
||||||
|
|
||||||
### 🆕 New Scripts
|
### 🆕 New Scripts
|
||||||
@@ -411,11 +425,16 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
|||||||
|
|
||||||
- #### 🐞 Bug Fixes
|
- #### 🐞 Bug Fixes
|
||||||
|
|
||||||
|
- dispatcharr: include port 9191 in success-message [@MickLesk](https://github.com/MickLesk) ([#11808](https://github.com/community-scripts/ProxmoxVE/pull/11808))
|
||||||
- fix: make donetick 0.1.71 compatible [@tomfrenzel](https://github.com/tomfrenzel) ([#11804](https://github.com/community-scripts/ProxmoxVE/pull/11804))
|
- fix: make donetick 0.1.71 compatible [@tomfrenzel](https://github.com/tomfrenzel) ([#11804](https://github.com/community-scripts/ProxmoxVE/pull/11804))
|
||||||
- Kasm: Support new version URL format without hash suffix [@MickLesk](https://github.com/MickLesk) ([#11787](https://github.com/community-scripts/ProxmoxVE/pull/11787))
|
- Kasm: Support new version URL format without hash suffix [@MickLesk](https://github.com/MickLesk) ([#11787](https://github.com/community-scripts/ProxmoxVE/pull/11787))
|
||||||
- LibreTranslate: Remove Torch [@tremor021](https://github.com/tremor021) ([#11783](https://github.com/community-scripts/ProxmoxVE/pull/11783))
|
- LibreTranslate: Remove Torch [@tremor021](https://github.com/tremor021) ([#11783](https://github.com/community-scripts/ProxmoxVE/pull/11783))
|
||||||
- Snowshare: fix update script [@TuroYT](https://github.com/TuroYT) ([#11726](https://github.com/community-scripts/ProxmoxVE/pull/11726))
|
- Snowshare: fix update script [@TuroYT](https://github.com/TuroYT) ([#11726](https://github.com/community-scripts/ProxmoxVE/pull/11726))
|
||||||
|
|
||||||
|
- #### ✨ New Features
|
||||||
|
|
||||||
|
- [Feature] OpenCloud: support PosixFS Collaborative Mode [@vhsdream](https://github.com/vhsdream) ([#11806](https://github.com/community-scripts/ProxmoxVE/pull/11806))
|
||||||
|
|
||||||
### 💾 Core
|
### 💾 Core
|
||||||
|
|
||||||
- #### 🔧 Refactor
|
- #### 🔧 Refactor
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
MONGO_USER=
|
|
||||||
MONGO_PASSWORD=
|
|
||||||
MONGO_IP=
|
|
||||||
MONGO_PORT=
|
|
||||||
MONGO_DATABASE=
|
|
||||||
23
api/go.mod
23
api/go.mod
@@ -1,23 +0,0 @@
|
|||||||
module proxmox-api
|
|
||||||
|
|
||||||
go 1.24.0
|
|
||||||
|
|
||||||
require (
|
|
||||||
github.com/gorilla/mux v1.8.1
|
|
||||||
github.com/joho/godotenv v1.5.1
|
|
||||||
github.com/rs/cors v1.11.1
|
|
||||||
go.mongodb.org/mongo-driver v1.17.2
|
|
||||||
)
|
|
||||||
|
|
||||||
require (
|
|
||||||
github.com/golang/snappy v0.0.4 // indirect
|
|
||||||
github.com/klauspost/compress v1.16.7 // indirect
|
|
||||||
github.com/montanaflynn/stats v0.7.1 // indirect
|
|
||||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
|
||||||
github.com/xdg-go/scram v1.1.2 // indirect
|
|
||||||
github.com/xdg-go/stringprep v1.0.4 // indirect
|
|
||||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
|
|
||||||
golang.org/x/crypto v0.45.0 // indirect
|
|
||||||
golang.org/x/sync v0.18.0 // indirect
|
|
||||||
golang.org/x/text v0.31.0 // indirect
|
|
||||||
)
|
|
||||||
56
api/go.sum
56
api/go.sum
@@ -1,56 +0,0 @@
|
|||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
|
||||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
|
||||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
|
||||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
|
||||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
|
||||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
|
||||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
|
||||||
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
|
|
||||||
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
|
|
||||||
github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I=
|
|
||||||
github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
|
||||||
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
|
||||||
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
|
||||||
github.com/rs/cors v1.11.1 h1:eU3gRzXLRK57F5rKMGMZURNdIG4EoAmX8k94r9wXWHA=
|
|
||||||
github.com/rs/cors v1.11.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU=
|
|
||||||
github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
|
|
||||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
|
||||||
github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY=
|
|
||||||
github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4=
|
|
||||||
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
|
|
||||||
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
|
||||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
|
|
||||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
|
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
|
||||||
go.mongodb.org/mongo-driver v1.17.2 h1:gvZyk8352qSfzyZ2UMWcpDpMSGEr1eqE4T793SqyhzM=
|
|
||||||
go.mongodb.org/mongo-driver v1.17.2/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
|
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
|
||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
|
||||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
|
||||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
|
||||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
|
||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
|
||||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
|
||||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
|
||||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
|
||||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
|
||||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
|
||||||
450
api/main.go
450
api/main.go
@@ -1,450 +0,0 @@
|
|||||||
// Copyright (c) 2021-2026 community-scripts ORG
|
|
||||||
// Author: Michel Roegl-Brunner (michelroegl-brunner)
|
|
||||||
// License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
|
||||||
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
"net/http"
|
|
||||||
"os"
|
|
||||||
"strconv"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/gorilla/mux"
|
|
||||||
"github.com/joho/godotenv"
|
|
||||||
"github.com/rs/cors"
|
|
||||||
"go.mongodb.org/mongo-driver/bson"
|
|
||||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
|
||||||
"go.mongodb.org/mongo-driver/mongo"
|
|
||||||
"go.mongodb.org/mongo-driver/mongo/options"
|
|
||||||
)
|
|
||||||
|
|
||||||
var client *mongo.Client
|
|
||||||
var collection *mongo.Collection
|
|
||||||
|
|
||||||
func loadEnv() {
|
|
||||||
if err := godotenv.Load(); err != nil {
|
|
||||||
log.Fatal("Error loading .env file")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DataModel represents a single document in MongoDB
|
|
||||||
type DataModel struct {
|
|
||||||
ID primitive.ObjectID `json:"id" bson:"_id,omitempty"`
|
|
||||||
CT_TYPE uint `json:"ct_type" bson:"ct_type"`
|
|
||||||
DISK_SIZE float32 `json:"disk_size" bson:"disk_size"`
|
|
||||||
CORE_COUNT uint `json:"core_count" bson:"core_count"`
|
|
||||||
RAM_SIZE uint `json:"ram_size" bson:"ram_size"`
|
|
||||||
OS_TYPE string `json:"os_type" bson:"os_type"`
|
|
||||||
OS_VERSION string `json:"os_version" bson:"os_version"`
|
|
||||||
DISABLEIP6 string `json:"disableip6" bson:"disableip6"`
|
|
||||||
NSAPP string `json:"nsapp" bson:"nsapp"`
|
|
||||||
METHOD string `json:"method" bson:"method"`
|
|
||||||
CreatedAt time.Time `json:"created_at" bson:"created_at"`
|
|
||||||
PVEVERSION string `json:"pve_version" bson:"pve_version"`
|
|
||||||
STATUS string `json:"status" bson:"status"`
|
|
||||||
RANDOM_ID string `json:"random_id" bson:"random_id"`
|
|
||||||
TYPE string `json:"type" bson:"type"`
|
|
||||||
ERROR string `json:"error" bson:"error"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type StatusModel struct {
|
|
||||||
RANDOM_ID string `json:"random_id" bson:"random_id"`
|
|
||||||
ERROR string `json:"error" bson:"error"`
|
|
||||||
STATUS string `json:"status" bson:"status"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type CountResponse struct {
|
|
||||||
TotalEntries int64 `json:"total_entries"`
|
|
||||||
StatusCount map[string]int64 `json:"status_count"`
|
|
||||||
NSAPPCount map[string]int64 `json:"nsapp_count"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// ConnectDatabase initializes the MongoDB connection
|
|
||||||
func ConnectDatabase() {
|
|
||||||
loadEnv()
|
|
||||||
|
|
||||||
mongoURI := fmt.Sprintf("mongodb://%s:%s@%s:%s",
|
|
||||||
os.Getenv("MONGO_USER"),
|
|
||||||
os.Getenv("MONGO_PASSWORD"),
|
|
||||||
os.Getenv("MONGO_IP"),
|
|
||||||
os.Getenv("MONGO_PORT"))
|
|
||||||
|
|
||||||
database := os.Getenv("MONGO_DATABASE")
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
var err error
|
|
||||||
client, err = mongo.Connect(ctx, options.Client().ApplyURI(mongoURI))
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal("Failed to connect to MongoDB!", err)
|
|
||||||
}
|
|
||||||
collection = client.Database(database).Collection("data_models")
|
|
||||||
fmt.Println("Connected to MongoDB on 10.10.10.18")
|
|
||||||
}
|
|
||||||
|
|
||||||
// UploadJSON handles API requests and stores data as a document in MongoDB
|
|
||||||
func UploadJSON(w http.ResponseWriter, r *http.Request) {
|
|
||||||
var input DataModel
|
|
||||||
|
|
||||||
if err := json.NewDecoder(r.Body).Decode(&input); err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
input.CreatedAt = time.Now()
|
|
||||||
|
|
||||||
_, err := collection.InsertOne(context.Background(), input)
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Println("Received data:", input)
|
|
||||||
w.WriteHeader(http.StatusCreated)
|
|
||||||
json.NewEncoder(w).Encode(map[string]string{"message": "Data saved successfully"})
|
|
||||||
}
|
|
||||||
|
|
||||||
// UpdateStatus updates the status of a record based on RANDOM_ID
|
|
||||||
func UpdateStatus(w http.ResponseWriter, r *http.Request) {
|
|
||||||
var input StatusModel
|
|
||||||
|
|
||||||
if err := json.NewDecoder(r.Body).Decode(&input); err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
filter := bson.M{"random_id": input.RANDOM_ID}
|
|
||||||
update := bson.M{"$set": bson.M{"status": input.STATUS, "error": input.ERROR}}
|
|
||||||
|
|
||||||
_, err := collection.UpdateOne(context.Background(), filter, update)
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Println("Updated data:", input)
|
|
||||||
w.WriteHeader(http.StatusOK)
|
|
||||||
json.NewEncoder(w).Encode(map[string]string{"message": "Record updated successfully"})
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetDataJSON fetches all data from MongoDB
|
|
||||||
func GetDataJSON(w http.ResponseWriter, r *http.Request) {
|
|
||||||
var records []DataModel
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
cursor, err := collection.Find(ctx, bson.M{})
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer cursor.Close(ctx)
|
|
||||||
|
|
||||||
for cursor.Next(ctx) {
|
|
||||||
var record DataModel
|
|
||||||
if err := cursor.Decode(&record); err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
records = append(records, record)
|
|
||||||
}
|
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "application/json")
|
|
||||||
json.NewEncoder(w).Encode(records)
|
|
||||||
}
|
|
||||||
func GetPaginatedData(w http.ResponseWriter, r *http.Request) {
|
|
||||||
page, _ := strconv.Atoi(r.URL.Query().Get("page"))
|
|
||||||
limit, _ := strconv.Atoi(r.URL.Query().Get("limit"))
|
|
||||||
if page < 1 {
|
|
||||||
page = 1
|
|
||||||
}
|
|
||||||
if limit < 1 {
|
|
||||||
limit = 10
|
|
||||||
}
|
|
||||||
skip := (page - 1) * limit
|
|
||||||
var records []DataModel
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
options := options.Find().SetSkip(int64(skip)).SetLimit(int64(limit))
|
|
||||||
cursor, err := collection.Find(ctx, bson.M{}, options)
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer cursor.Close(ctx)
|
|
||||||
|
|
||||||
for cursor.Next(ctx) {
|
|
||||||
var record DataModel
|
|
||||||
if err := cursor.Decode(&record); err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
records = append(records, record)
|
|
||||||
}
|
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "application/json")
|
|
||||||
json.NewEncoder(w).Encode(records)
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetSummary(w http.ResponseWriter, r *http.Request) {
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
totalCount, err := collection.CountDocuments(ctx, bson.M{})
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
statusCount := make(map[string]int64)
|
|
||||||
nsappCount := make(map[string]int64)
|
|
||||||
|
|
||||||
pipeline := []bson.M{
|
|
||||||
{"$group": bson.M{"_id": "$status", "count": bson.M{"$sum": 1}}},
|
|
||||||
}
|
|
||||||
cursor, err := collection.Aggregate(ctx, pipeline)
|
|
||||||
if err == nil {
|
|
||||||
for cursor.Next(ctx) {
|
|
||||||
var result struct {
|
|
||||||
ID string `bson:"_id"`
|
|
||||||
Count int64 `bson:"count"`
|
|
||||||
}
|
|
||||||
if err := cursor.Decode(&result); err == nil {
|
|
||||||
statusCount[result.ID] = result.Count
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pipeline = []bson.M{
|
|
||||||
{"$group": bson.M{"_id": "$nsapp", "count": bson.M{"$sum": 1}}},
|
|
||||||
}
|
|
||||||
cursor, err = collection.Aggregate(ctx, pipeline)
|
|
||||||
if err == nil {
|
|
||||||
for cursor.Next(ctx) {
|
|
||||||
var result struct {
|
|
||||||
ID string `bson:"_id"`
|
|
||||||
Count int64 `bson:"count"`
|
|
||||||
}
|
|
||||||
if err := cursor.Decode(&result); err == nil {
|
|
||||||
nsappCount[result.ID] = result.Count
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
response := CountResponse{
|
|
||||||
TotalEntries: totalCount,
|
|
||||||
StatusCount: statusCount,
|
|
||||||
NSAPPCount: nsappCount,
|
|
||||||
}
|
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "application/json")
|
|
||||||
json.NewEncoder(w).Encode(response)
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetByNsapp(w http.ResponseWriter, r *http.Request) {
|
|
||||||
nsapp := r.URL.Query().Get("nsapp")
|
|
||||||
var records []DataModel
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
cursor, err := collection.Find(ctx, bson.M{"nsapp": nsapp})
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer cursor.Close(ctx)
|
|
||||||
|
|
||||||
for cursor.Next(ctx) {
|
|
||||||
var record DataModel
|
|
||||||
if err := cursor.Decode(&record); err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
records = append(records, record)
|
|
||||||
}
|
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "application/json")
|
|
||||||
json.NewEncoder(w).Encode(records)
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetByDateRange(w http.ResponseWriter, r *http.Request) {
|
|
||||||
|
|
||||||
startDate := r.URL.Query().Get("start_date")
|
|
||||||
endDate := r.URL.Query().Get("end_date")
|
|
||||||
|
|
||||||
if startDate == "" || endDate == "" {
|
|
||||||
http.Error(w, "Both start_date and end_date are required", http.StatusBadRequest)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
start, err := time.Parse("2006-01-02T15:04:05.999999+00:00", startDate+"T00:00:00+00:00")
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, "Invalid start_date format", http.StatusBadRequest)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
end, err := time.Parse("2006-01-02T15:04:05.999999+00:00", endDate+"T23:59:59+00:00")
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, "Invalid end_date format", http.StatusBadRequest)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var records []DataModel
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
cursor, err := collection.Find(ctx, bson.M{
|
|
||||||
"created_at": bson.M{
|
|
||||||
"$gte": start,
|
|
||||||
"$lte": end,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer cursor.Close(ctx)
|
|
||||||
|
|
||||||
for cursor.Next(ctx) {
|
|
||||||
var record DataModel
|
|
||||||
if err := cursor.Decode(&record); err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
records = append(records, record)
|
|
||||||
}
|
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "application/json")
|
|
||||||
json.NewEncoder(w).Encode(records)
|
|
||||||
}
|
|
||||||
func GetByStatus(w http.ResponseWriter, r *http.Request) {
|
|
||||||
status := r.URL.Query().Get("status")
|
|
||||||
var records []DataModel
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
cursor, err := collection.Find(ctx, bson.M{"status": status})
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer cursor.Close(ctx)
|
|
||||||
|
|
||||||
for cursor.Next(ctx) {
|
|
||||||
var record DataModel
|
|
||||||
if err := cursor.Decode(&record); err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
records = append(records, record)
|
|
||||||
}
|
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "application/json")
|
|
||||||
json.NewEncoder(w).Encode(records)
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetByOS(w http.ResponseWriter, r *http.Request) {
|
|
||||||
osType := r.URL.Query().Get("os_type")
|
|
||||||
osVersion := r.URL.Query().Get("os_version")
|
|
||||||
var records []DataModel
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
cursor, err := collection.Find(ctx, bson.M{"os_type": osType, "os_version": osVersion})
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer cursor.Close(ctx)
|
|
||||||
|
|
||||||
for cursor.Next(ctx) {
|
|
||||||
var record DataModel
|
|
||||||
if err := cursor.Decode(&record); err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
records = append(records, record)
|
|
||||||
}
|
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "application/json")
|
|
||||||
json.NewEncoder(w).Encode(records)
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetErrors(w http.ResponseWriter, r *http.Request) {
|
|
||||||
errorCount := make(map[string]int)
|
|
||||||
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
cursor, err := collection.Find(ctx, bson.M{"error": bson.M{"$ne": ""}})
|
|
||||||
if err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer cursor.Close(ctx)
|
|
||||||
|
|
||||||
for cursor.Next(ctx) {
|
|
||||||
var record DataModel
|
|
||||||
if err := cursor.Decode(&record); err != nil {
|
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if record.ERROR != "" {
|
|
||||||
errorCount[record.ERROR]++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type ErrorCountResponse struct {
|
|
||||||
Error string `json:"error"`
|
|
||||||
Count int `json:"count"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var errorCounts []ErrorCountResponse
|
|
||||||
for err, count := range errorCount {
|
|
||||||
errorCounts = append(errorCounts, ErrorCountResponse{
|
|
||||||
Error: err,
|
|
||||||
Count: count,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "application/json")
|
|
||||||
json.NewEncoder(w).Encode(struct {
|
|
||||||
ErrorCounts []ErrorCountResponse `json:"error_counts"`
|
|
||||||
}{
|
|
||||||
ErrorCounts: errorCounts,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
ConnectDatabase()
|
|
||||||
|
|
||||||
router := mux.NewRouter()
|
|
||||||
router.HandleFunc("/upload", UploadJSON).Methods("POST")
|
|
||||||
router.HandleFunc("/upload/updatestatus", UpdateStatus).Methods("POST")
|
|
||||||
router.HandleFunc("/data/json", GetDataJSON).Methods("GET")
|
|
||||||
router.HandleFunc("/data/paginated", GetPaginatedData).Methods("GET")
|
|
||||||
router.HandleFunc("/data/summary", GetSummary).Methods("GET")
|
|
||||||
router.HandleFunc("/data/nsapp", GetByNsapp).Methods("GET")
|
|
||||||
router.HandleFunc("/data/date", GetByDateRange).Methods("GET")
|
|
||||||
router.HandleFunc("/data/status", GetByStatus).Methods("GET")
|
|
||||||
router.HandleFunc("/data/os", GetByOS).Methods("GET")
|
|
||||||
router.HandleFunc("/data/errors", GetErrors).Methods("GET")
|
|
||||||
|
|
||||||
c := cors.New(cors.Options{
|
|
||||||
AllowedOrigins: []string{"*"},
|
|
||||||
AllowedMethods: []string{"GET", "POST"},
|
|
||||||
AllowedHeaders: []string{"Content-Type", "Authorization"},
|
|
||||||
AllowCredentials: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
handler := c.Handler(router)
|
|
||||||
|
|
||||||
fmt.Println("Server running on port 8080")
|
|
||||||
log.Fatal(http.ListenAndServe(":8080", handler))
|
|
||||||
}
|
|
||||||
@@ -104,7 +104,7 @@ function update_script() {
|
|||||||
cd /opt/dispatcharr
|
cd /opt/dispatcharr
|
||||||
rm -rf .venv
|
rm -rf .venv
|
||||||
$STD uv venv --clear
|
$STD uv venv --clear
|
||||||
$STD uv pip install -r requirements.txt --index-strategy unsafe-best-match
|
$STD uv sync
|
||||||
$STD uv pip install gunicorn gevent celery redis daphne
|
$STD uv pip install gunicorn gevent celery redis daphne
|
||||||
msg_ok "Updated Dispatcharr Backend"
|
msg_ok "Updated Dispatcharr Backend"
|
||||||
|
|
||||||
@@ -144,4 +144,4 @@ description
|
|||||||
msg_ok "Completed successfully!\n"
|
msg_ok "Completed successfully!\n"
|
||||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"
|
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:9191${CL}"
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ function update_script() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
RELEASE="v5.0.2"
|
RELEASE="v5.0.2"
|
||||||
if check_for_gh_release "opencloud" "opencloud-eu/opencloud" "${RELEASE}"; then
|
if check_for_gh_release "OpenCloud" "opencloud-eu/opencloud" "${RELEASE}"; then
|
||||||
msg_info "Stopping services"
|
msg_info "Stopping services"
|
||||||
systemctl stop opencloud opencloud-wopi
|
systemctl stop opencloud opencloud-wopi
|
||||||
msg_ok "Stopped services"
|
msg_ok "Stopped services"
|
||||||
@@ -38,9 +38,21 @@ function update_script() {
|
|||||||
msg_info "Updating packages"
|
msg_info "Updating packages"
|
||||||
$STD apt-get update
|
$STD apt-get update
|
||||||
$STD apt-get dist-upgrade -y
|
$STD apt-get dist-upgrade -y
|
||||||
|
ensure_dependencies "inotify-tools"
|
||||||
msg_ok "Updated packages"
|
msg_ok "Updated packages"
|
||||||
|
|
||||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "opencloud" "opencloud-eu/opencloud" "singlefile" "${RELEASE}" "/usr/bin" "opencloud-*-linux-amd64"
|
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "OpenCloud" "opencloud-eu/opencloud" "singlefile" "${RELEASE}" "/usr/bin" "opencloud-*-linux-amd64"
|
||||||
|
|
||||||
|
if ! grep -q 'POSIX_WATCH' /etc/opencloud/opencloud.env; then
|
||||||
|
sed -i '/^## External/i ## Uncomment below to enable PosixFS Collaborative Mode\
|
||||||
|
## Increase inotify watch/instance limits on your PVE host:\
|
||||||
|
### sysctl -w fs.inotify.max_user_watches=1048576\
|
||||||
|
### sysctl -w fs.inotify.max_user_instances=1024\
|
||||||
|
# STORAGE_USERS_POSIX_ENABLE_COLLABORATION=true\
|
||||||
|
# STORAGE_USERS_POSIX_WATCH_TYPE=inotifywait\
|
||||||
|
# STORAGE_USERS_POSIX_WATCH_FS=true\
|
||||||
|
# STORAGE_USERS_POSIX_WATCH_PATH=<path-to-storage-or-bind-mount>' /etc/opencloud/opencloud.env
|
||||||
|
fi
|
||||||
|
|
||||||
msg_info "Starting services"
|
msg_info "Starting services"
|
||||||
systemctl start opencloud opencloud-wopi
|
systemctl start opencloud opencloud-wopi
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"generated": "2026-02-11T18:22:25Z",
|
"generated": "2026-02-12T06:25:40Z",
|
||||||
"versions": [
|
"versions": [
|
||||||
{
|
{
|
||||||
"slug": "2fauth",
|
"slug": "2fauth",
|
||||||
@@ -571,16 +571,16 @@
|
|||||||
{
|
{
|
||||||
"slug": "invoiceninja",
|
"slug": "invoiceninja",
|
||||||
"repo": "invoiceninja/invoiceninja",
|
"repo": "invoiceninja/invoiceninja",
|
||||||
"version": "v5.12.55",
|
"version": "v5.12.57",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-05T01:06:15Z"
|
"date": "2026-02-11T23:08:56Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "jackett",
|
"slug": "jackett",
|
||||||
"repo": "Jackett/Jackett",
|
"repo": "Jackett/Jackett",
|
||||||
"version": "v0.24.1094",
|
"version": "v0.24.1098",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-11T06:01:16Z"
|
"date": "2026-02-12T05:56:25Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "jellystat",
|
"slug": "jellystat",
|
||||||
@@ -998,9 +998,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "pangolin",
|
"slug": "pangolin",
|
||||||
"repo": "fosrl/pangolin",
|
"repo": "fosrl/pangolin",
|
||||||
"version": "1.15.2",
|
"version": "1.15.3",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-05T19:23:58Z"
|
"date": "2026-02-12T06:10:19Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "paperless-ai",
|
"slug": "paperless-ai",
|
||||||
@@ -1131,9 +1131,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "prometheus-alertmanager",
|
"slug": "prometheus-alertmanager",
|
||||||
"repo": "prometheus/alertmanager",
|
"repo": "prometheus/alertmanager",
|
||||||
"version": "v0.31.0",
|
"version": "v0.31.1",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-02T13:34:15Z"
|
"date": "2026-02-11T21:28:26Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "prometheus-blackbox-exporter",
|
"slug": "prometheus-blackbox-exporter",
|
||||||
@@ -1229,9 +1229,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "rdtclient",
|
"slug": "rdtclient",
|
||||||
"repo": "rogerfar/rdt-client",
|
"repo": "rogerfar/rdt-client",
|
||||||
"version": "v2.0.119",
|
"version": "v2.0.120",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2025-10-13T23:15:11Z"
|
"date": "2026-02-12T02:53:51Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "reactive-resume",
|
"slug": "reactive-resume",
|
||||||
@@ -1383,9 +1383,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "stirling-pdf",
|
"slug": "stirling-pdf",
|
||||||
"repo": "Stirling-Tools/Stirling-PDF",
|
"repo": "Stirling-Tools/Stirling-PDF",
|
||||||
"version": "v2.4.5",
|
"version": "v2.4.6",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-02-06T23:12:20Z"
|
"date": "2026-02-12T00:01:19Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "streamlink-webui",
|
"slug": "streamlink-webui",
|
||||||
@@ -1656,9 +1656,9 @@
|
|||||||
{
|
{
|
||||||
"slug": "wikijs",
|
"slug": "wikijs",
|
||||||
"repo": "requarks/wiki",
|
"repo": "requarks/wiki",
|
||||||
"version": "v2.5.311",
|
"version": "v2.5.312",
|
||||||
"pinned": false,
|
"pinned": false,
|
||||||
"date": "2026-01-08T09:50:00Z"
|
"date": "2026-02-12T02:45:22Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "wishlist",
|
"slug": "wishlist",
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ fetch_and_deploy_gh_release "dispatcharr" "Dispatcharr/Dispatcharr" "tarball"
|
|||||||
msg_info "Installing Python Dependencies with uv"
|
msg_info "Installing Python Dependencies with uv"
|
||||||
cd /opt/dispatcharr
|
cd /opt/dispatcharr
|
||||||
$STD uv venv --clear
|
$STD uv venv --clear
|
||||||
$STD uv pip install -r requirements.txt --index-strategy unsafe-best-match
|
$STD uv sync
|
||||||
$STD uv pip install gunicorn gevent celery redis daphne
|
$STD uv pip install gunicorn gevent celery redis daphne
|
||||||
msg_ok "Installed Python Dependencies"
|
msg_ok "Installed Python Dependencies"
|
||||||
|
|
||||||
|
|||||||
@@ -38,6 +38,10 @@ for server in "${servers[@]}"; do
|
|||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
msg_info "Installing dependencies"
|
||||||
|
$STD apt install -y inotify-tools
|
||||||
|
msg_ok "Installed dependencies"
|
||||||
|
|
||||||
msg_info "Installing Collabora Online"
|
msg_info "Installing Collabora Online"
|
||||||
curl -fsSL https://collaboraoffice.com/downloads/gpg/collaboraonline-release-keyring.gpg -o /etc/apt/keyrings/collaboraonline-release-keyring.gpg
|
curl -fsSL https://collaboraoffice.com/downloads/gpg/collaboraonline-release-keyring.gpg -o /etc/apt/keyrings/collaboraonline-release-keyring.gpg
|
||||||
cat <<EOF >/etc/apt/sources.list.d/colloboraonline.sources
|
cat <<EOF >/etc/apt/sources.list.d/colloboraonline.sources
|
||||||
@@ -148,8 +152,15 @@ COLLABORATION_JWT_SECRET=
|
|||||||
# FRONTEND_FULL_TEXT_SEARCH_ENABLED=true
|
# FRONTEND_FULL_TEXT_SEARCH_ENABLED=true
|
||||||
# SEARCH_EXTRACTOR_TIKA_TIKA_URL=<your-tika-url>
|
# SEARCH_EXTRACTOR_TIKA_TIKA_URL=<your-tika-url>
|
||||||
|
|
||||||
## External storage test - Only NFS v4.2+ is supported
|
## Uncomment below to enable PosixFS Collaborative Mode
|
||||||
## User files
|
## Increase inotify watch/instance limits on your PVE host:
|
||||||
|
### sysctl -w fs.inotify.max_user_watches=1048576
|
||||||
|
### sysctl -w fs.inotify.max_user_instances=1024
|
||||||
|
# STORAGE_USERS_POSIX_ENABLE_COLLABORATION=true
|
||||||
|
# STORAGE_USERS_POSIX_WATCH_TYPE=inotifywait
|
||||||
|
# STORAGE_USERS_POSIX_WATCH_FS=true
|
||||||
|
# STORAGE_USERS_POSIX_WATCH_PATH=<path-to-storage-or-bind-mount>
|
||||||
|
## User files location - experimental - use at your own risk! - ZFS, NFS v4.2+ supported - CIFS/SMB not supported
|
||||||
# STORAGE_USERS_POSIX_ROOT=<path-to-your-bind_mount>
|
# STORAGE_USERS_POSIX_ROOT=<path-to-your-bind_mount>
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ $STD npm ci
|
|||||||
$STD npm run set:sqlite
|
$STD npm run set:sqlite
|
||||||
$STD npm run set:oss
|
$STD npm run set:oss
|
||||||
rm -rf server/private
|
rm -rf server/private
|
||||||
$STD npm run db:generate
|
$STD npm run db:sqlite:generate
|
||||||
$STD npm run build
|
$STD npm run build
|
||||||
$STD npm run build:cli
|
$STD npm run build:cli
|
||||||
cp -R .next/standalone ./
|
cp -R .next/standalone ./
|
||||||
|
|||||||
930
misc/api.func
930
misc/api.func
File diff suppressed because it is too large
Load Diff
@@ -3636,6 +3636,9 @@ $PCT_OPTIONS_STRING"
|
|||||||
exit 214
|
exit 214
|
||||||
fi
|
fi
|
||||||
msg_ok "Storage space validated"
|
msg_ok "Storage space validated"
|
||||||
|
|
||||||
|
# Report installation start to API (early - captures failed installs too)
|
||||||
|
post_to_api
|
||||||
fi
|
fi
|
||||||
|
|
||||||
create_lxc_container || exit $?
|
create_lxc_container || exit $?
|
||||||
@@ -4010,6 +4013,9 @@ EOF'
|
|||||||
# Install SSH keys
|
# Install SSH keys
|
||||||
install_ssh_keys_into_ct
|
install_ssh_keys_into_ct
|
||||||
|
|
||||||
|
# Start timer for duration tracking
|
||||||
|
start_install_timer
|
||||||
|
|
||||||
# Run application installer
|
# Run application installer
|
||||||
# Disable error trap - container errors are handled internally via flag file
|
# Disable error trap - container errors are handled internally via flag file
|
||||||
set +Eeuo pipefail # Disable ALL error handling temporarily
|
set +Eeuo pipefail # Disable ALL error handling temporarily
|
||||||
@@ -4040,6 +4046,9 @@ EOF'
|
|||||||
if [[ $install_exit_code -ne 0 ]]; then
|
if [[ $install_exit_code -ne 0 ]]; then
|
||||||
msg_error "Installation failed in container ${CTID} (exit code: ${install_exit_code})"
|
msg_error "Installation failed in container ${CTID} (exit code: ${install_exit_code})"
|
||||||
|
|
||||||
|
# Report failure to telemetry API
|
||||||
|
post_update_to_api "failed" "$install_exit_code"
|
||||||
|
|
||||||
# Copy both logs from container before potential deletion
|
# Copy both logs from container before potential deletion
|
||||||
local build_log_copied=false
|
local build_log_copied=false
|
||||||
local install_log_copied=false
|
local install_log_copied=false
|
||||||
@@ -5123,9 +5132,9 @@ EOF
|
|||||||
# api_exit_script()
|
# api_exit_script()
|
||||||
#
|
#
|
||||||
# - Exit trap handler for reporting to API telemetry
|
# - Exit trap handler for reporting to API telemetry
|
||||||
# - Captures exit code and reports to API using centralized error descriptions
|
# - Captures exit code and reports to PocketBase using centralized error descriptions
|
||||||
# - Uses explain_exit_code() from error_handler.func for consistent error messages
|
# - Uses explain_exit_code() from api.func for consistent error messages
|
||||||
# - Posts failure status with exit code to API (error description added automatically)
|
# - Posts failure status with exit code to API (error description resolved automatically)
|
||||||
# - Only executes on non-zero exit codes
|
# - Only executes on non-zero exit codes
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
api_exit_script() {
|
api_exit_script() {
|
||||||
@@ -5138,6 +5147,6 @@ api_exit_script() {
|
|||||||
if command -v pveversion >/dev/null 2>&1; then
|
if command -v pveversion >/dev/null 2>&1; then
|
||||||
trap 'api_exit_script' EXIT
|
trap 'api_exit_script' EXIT
|
||||||
fi
|
fi
|
||||||
trap 'post_update_to_api "failed" "$BASH_COMMAND"' ERR
|
trap 'post_update_to_api "failed" "$?"' ERR
|
||||||
trap 'post_update_to_api "failed" "INTERRUPTED"' SIGINT
|
trap 'post_update_to_api "failed" "130"' SIGINT
|
||||||
trap 'post_update_to_api "failed" "TERMINATED"' SIGTERM
|
trap 'post_update_to_api "failed" "143"' SIGTERM
|
||||||
|
|||||||
@@ -27,100 +27,90 @@
|
|||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# explain_exit_code()
|
# explain_exit_code()
|
||||||
#
|
#
|
||||||
# - Maps numeric exit codes to human-readable error descriptions
|
# - Canonical version is defined in api.func (sourced before this file)
|
||||||
# - Supports:
|
# - This section only provides a fallback if api.func was not loaded
|
||||||
# * Generic/Shell errors (1, 2, 126, 127, 128, 130, 137, 139, 143)
|
# - See api.func SECTION 1 for the authoritative exit code mappings
|
||||||
# * Package manager errors (APT, DPKG: 100, 101, 255)
|
|
||||||
# * Node.js/npm errors (243-249, 254)
|
|
||||||
# * Python/pip/uv errors (210-212)
|
|
||||||
# * PostgreSQL errors (231-234)
|
|
||||||
# * MySQL/MariaDB errors (241-244)
|
|
||||||
# * MongoDB errors (251-254)
|
|
||||||
# * Proxmox custom codes (200-231)
|
|
||||||
# - Returns description string for given exit code
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
explain_exit_code() {
|
if ! declare -f explain_exit_code &>/dev/null; then
|
||||||
local code="$1"
|
explain_exit_code() {
|
||||||
case "$code" in
|
local code="$1"
|
||||||
# --- Generic / Shell ---
|
case "$code" in
|
||||||
1) echo "General error / Operation not permitted" ;;
|
1) echo "General error / Operation not permitted" ;;
|
||||||
2) echo "Misuse of shell builtins (e.g. syntax error)" ;;
|
2) echo "Misuse of shell builtins (e.g. syntax error)" ;;
|
||||||
126) echo "Command invoked cannot execute (permission problem?)" ;;
|
6) echo "curl: DNS resolution failed (could not resolve host)" ;;
|
||||||
127) echo "Command not found" ;;
|
7) echo "curl: Failed to connect (network unreachable / host down)" ;;
|
||||||
128) echo "Invalid argument to exit" ;;
|
22) echo "curl: HTTP error returned (404, 429, 500+)" ;;
|
||||||
130) echo "Terminated by Ctrl+C (SIGINT)" ;;
|
28) echo "curl: Operation timeout (network slow or server not responding)" ;;
|
||||||
137) echo "Killed (SIGKILL / Out of memory?)" ;;
|
35) echo "curl: SSL/TLS handshake failed (certificate error)" ;;
|
||||||
139) echo "Segmentation fault (core dumped)" ;;
|
100) echo "APT: Package manager error (broken packages / dependency problems)" ;;
|
||||||
143) echo "Terminated (SIGTERM)" ;;
|
101) echo "APT: Configuration error (bad sources.list, malformed config)" ;;
|
||||||
|
102) echo "APT: Lock held by another process (dpkg/apt still running)" ;;
|
||||||
# --- Package manager / APT / DPKG ---
|
124) echo "Command timed out (timeout command)" ;;
|
||||||
100) echo "APT: Package manager error (broken packages / dependency problems)" ;;
|
126) echo "Command invoked cannot execute (permission problem?)" ;;
|
||||||
101) echo "APT: Configuration error (bad sources.list, malformed config)" ;;
|
127) echo "Command not found" ;;
|
||||||
255) echo "DPKG: Fatal internal error" ;;
|
128) echo "Invalid argument to exit" ;;
|
||||||
|
130) echo "Terminated by Ctrl+C (SIGINT)" ;;
|
||||||
# --- Node.js / npm / pnpm / yarn ---
|
134) echo "Process aborted (SIGABRT - possibly Node.js heap overflow)" ;;
|
||||||
243) echo "Node.js: Out of memory (JavaScript heap out of memory)" ;;
|
137) echo "Killed (SIGKILL / Out of memory?)" ;;
|
||||||
245) echo "Node.js: Invalid command-line option" ;;
|
139) echo "Segmentation fault (core dumped)" ;;
|
||||||
246) echo "Node.js: Internal JavaScript Parse Error" ;;
|
141) echo "Broken pipe (SIGPIPE - output closed prematurely)" ;;
|
||||||
247) echo "Node.js: Fatal internal error" ;;
|
143) echo "Terminated (SIGTERM)" ;;
|
||||||
248) echo "Node.js: Invalid C++ addon / N-API failure" ;;
|
150) echo "Systemd: Service failed to start" ;;
|
||||||
249) echo "Node.js: Inspector error" ;;
|
151) echo "Systemd: Service unit not found" ;;
|
||||||
254) echo "npm/pnpm/yarn: Unknown fatal error" ;;
|
152) echo "Permission denied (EACCES)" ;;
|
||||||
|
153) echo "Build/compile failed (make/gcc/cmake)" ;;
|
||||||
# --- Python / pip / uv ---
|
154) echo "Node.js: Native addon build failed (node-gyp)" ;;
|
||||||
210) echo "Python: Virtualenv / uv environment missing or broken" ;;
|
160) echo "Python: Virtualenv / uv environment missing or broken" ;;
|
||||||
211) echo "Python: Dependency resolution failed" ;;
|
161) echo "Python: Dependency resolution failed" ;;
|
||||||
212) echo "Python: Installation aborted (permissions or EXTERNALLY-MANAGED)" ;;
|
162) echo "Python: Installation aborted (permissions or EXTERNALLY-MANAGED)" ;;
|
||||||
|
170) echo "PostgreSQL: Connection failed (server not running / wrong socket)" ;;
|
||||||
# --- PostgreSQL ---
|
171) echo "PostgreSQL: Authentication failed (bad user/password)" ;;
|
||||||
231) echo "PostgreSQL: Connection failed (server not running / wrong socket)" ;;
|
172) echo "PostgreSQL: Database does not exist" ;;
|
||||||
232) echo "PostgreSQL: Authentication failed (bad user/password)" ;;
|
173) echo "PostgreSQL: Fatal error in query / syntax" ;;
|
||||||
233) echo "PostgreSQL: Database does not exist" ;;
|
180) echo "MySQL/MariaDB: Connection failed (server not running / wrong socket)" ;;
|
||||||
234) echo "PostgreSQL: Fatal error in query / syntax" ;;
|
181) echo "MySQL/MariaDB: Authentication failed (bad user/password)" ;;
|
||||||
|
182) echo "MySQL/MariaDB: Database does not exist" ;;
|
||||||
# --- MySQL / MariaDB ---
|
183) echo "MySQL/MariaDB: Fatal error in query / syntax" ;;
|
||||||
241) echo "MySQL/MariaDB: Connection failed (server not running / wrong socket)" ;;
|
190) echo "MongoDB: Connection failed (server not running)" ;;
|
||||||
242) echo "MySQL/MariaDB: Authentication failed (bad user/password)" ;;
|
191) echo "MongoDB: Authentication failed (bad user/password)" ;;
|
||||||
243) echo "MySQL/MariaDB: Database does not exist" ;;
|
192) echo "MongoDB: Database not found" ;;
|
||||||
244) echo "MySQL/MariaDB: Fatal error in query / syntax" ;;
|
193) echo "MongoDB: Fatal query error" ;;
|
||||||
|
200) echo "Proxmox: Failed to create lock file" ;;
|
||||||
# --- MongoDB ---
|
203) echo "Proxmox: Missing CTID variable" ;;
|
||||||
251) echo "MongoDB: Connection failed (server not running)" ;;
|
204) echo "Proxmox: Missing PCT_OSTYPE variable" ;;
|
||||||
252) echo "MongoDB: Authentication failed (bad user/password)" ;;
|
205) echo "Proxmox: Invalid CTID (<100)" ;;
|
||||||
253) echo "MongoDB: Database not found" ;;
|
206) echo "Proxmox: CTID already in use" ;;
|
||||||
254) echo "MongoDB: Fatal query error" ;;
|
207) echo "Proxmox: Password contains unescaped special characters" ;;
|
||||||
|
208) echo "Proxmox: Invalid configuration (DNS/MAC/Network format)" ;;
|
||||||
# --- Proxmox Custom Codes ---
|
209) echo "Proxmox: Container creation failed" ;;
|
||||||
200) echo "Proxmox: Failed to create lock file" ;;
|
210) echo "Proxmox: Cluster not quorate" ;;
|
||||||
203) echo "Proxmox: Missing CTID variable" ;;
|
211) echo "Proxmox: Timeout waiting for template lock" ;;
|
||||||
204) echo "Proxmox: Missing PCT_OSTYPE variable" ;;
|
212) echo "Proxmox: Storage type 'iscsidirect' does not support containers (VMs only)" ;;
|
||||||
205) echo "Proxmox: Invalid CTID (<100)" ;;
|
213) echo "Proxmox: Storage type does not support 'rootdir' content" ;;
|
||||||
206) echo "Proxmox: CTID already in use" ;;
|
214) echo "Proxmox: Not enough storage space" ;;
|
||||||
207) echo "Proxmox: Password contains unescaped special characters" ;;
|
215) echo "Proxmox: Container created but not listed (ghost state)" ;;
|
||||||
208) echo "Proxmox: Invalid configuration (DNS/MAC/Network format)" ;;
|
216) echo "Proxmox: RootFS entry missing in config" ;;
|
||||||
209) echo "Proxmox: Container creation failed" ;;
|
217) echo "Proxmox: Storage not accessible" ;;
|
||||||
210) echo "Proxmox: Cluster not quorate" ;;
|
218) echo "Proxmox: Template file corrupted or incomplete" ;;
|
||||||
211) echo "Proxmox: Timeout waiting for template lock" ;;
|
219) echo "Proxmox: CephFS does not support containers - use RBD" ;;
|
||||||
212) echo "Proxmox: Storage type 'iscsidirect' does not support containers (VMs only)" ;;
|
220) echo "Proxmox: Unable to resolve template path" ;;
|
||||||
213) echo "Proxmox: Storage type does not support 'rootdir' content" ;;
|
221) echo "Proxmox: Template file not readable" ;;
|
||||||
214) echo "Proxmox: Not enough storage space" ;;
|
222) echo "Proxmox: Template download failed" ;;
|
||||||
215) echo "Proxmox: Container created but not listed (ghost state)" ;;
|
223) echo "Proxmox: Template not available after download" ;;
|
||||||
216) echo "Proxmox: RootFS entry missing in config" ;;
|
224) echo "Proxmox: PBS storage is for backups only" ;;
|
||||||
217) echo "Proxmox: Storage not accessible" ;;
|
225) echo "Proxmox: No template available for OS/Version" ;;
|
||||||
219) echo "Proxmox: CephFS does not support containers - use RBD" ;;
|
231) echo "Proxmox: LXC stack upgrade failed" ;;
|
||||||
224) echo "Proxmox: PBS storage is for backups only" ;;
|
243) echo "Node.js: Out of memory (JavaScript heap out of memory)" ;;
|
||||||
218) echo "Proxmox: Template file corrupted or incomplete" ;;
|
245) echo "Node.js: Invalid command-line option" ;;
|
||||||
220) echo "Proxmox: Unable to resolve template path" ;;
|
246) echo "Node.js: Internal JavaScript Parse Error" ;;
|
||||||
221) echo "Proxmox: Template file not readable" ;;
|
247) echo "Node.js: Fatal internal error" ;;
|
||||||
222) echo "Proxmox: Template download failed" ;;
|
248) echo "Node.js: Invalid C++ addon / N-API failure" ;;
|
||||||
223) echo "Proxmox: Template not available after download" ;;
|
249) echo "npm/pnpm/yarn: Unknown fatal error" ;;
|
||||||
225) echo "Proxmox: No template available for OS/Version" ;;
|
255) echo "DPKG: Fatal internal error" ;;
|
||||||
231) echo "Proxmox: LXC stack upgrade failed" ;;
|
*) echo "Unknown error" ;;
|
||||||
|
esac
|
||||||
# --- Default ---
|
}
|
||||||
*) echo "Unknown error" ;;
|
fi
|
||||||
esac
|
|
||||||
}
|
|
||||||
|
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
# SECTION 2: ERROR HANDLERS
|
# SECTION 2: ERROR HANDLERS
|
||||||
@@ -197,12 +187,7 @@ error_handler() {
|
|||||||
|
|
||||||
# Create error flag file with exit code for host detection
|
# Create error flag file with exit code for host detection
|
||||||
echo "$exit_code" >"/root/.install-${SESSION_ID:-error}.failed" 2>/dev/null || true
|
echo "$exit_code" >"/root/.install-${SESSION_ID:-error}.failed" 2>/dev/null || true
|
||||||
|
# Log path is shown by host as combined log - no need to show container path
|
||||||
if declare -f msg_custom >/dev/null 2>&1; then
|
|
||||||
msg_custom "📋" "${YW}" "Log saved to: ${container_log}"
|
|
||||||
else
|
|
||||||
echo -e "${YW}Log saved to:${CL} ${BL}${container_log}${CL}"
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
# HOST CONTEXT: Show local log path and offer container cleanup
|
# HOST CONTEXT: Show local log path and offer container cleanup
|
||||||
if declare -f msg_custom >/dev/null 2>&1; then
|
if declare -f msg_custom >/dev/null 2>&1; then
|
||||||
@@ -213,6 +198,11 @@ error_handler() {
|
|||||||
|
|
||||||
# Offer to remove container if it exists (build errors after container creation)
|
# Offer to remove container if it exists (build errors after container creation)
|
||||||
if [[ -n "${CTID:-}" ]] && command -v pct &>/dev/null && pct status "$CTID" &>/dev/null; then
|
if [[ -n "${CTID:-}" ]] && command -v pct &>/dev/null && pct status "$CTID" &>/dev/null; then
|
||||||
|
# Report failure to API before container cleanup
|
||||||
|
if declare -f post_update_to_api &>/dev/null; then
|
||||||
|
post_update_to_api "failed" "$exit_code"
|
||||||
|
fi
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo -en "${YW}Remove broken container ${CTID}? (Y/n) [auto-remove in 60s]: ${CL}"
|
echo -en "${YW}Remove broken container ${CTID}? (Y/n) [auto-remove in 60s]: ${CL}"
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user