mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2025-07-05 05:27:39 +00:00
Compare commits
46 Commits
2025-07-02
...
2025-07-04
Author | SHA1 | Date | |
---|---|---|---|
0c98308b60 | |||
9128b9dd12 | |||
301a23e5f4 | |||
2b848ff1d8 | |||
3d69931675 | |||
3e504cf48f | |||
d2cbfcd69a | |||
ebfb6a4e34 | |||
7403470bd7 | |||
af9475d280 | |||
8882a17b6f | |||
32d6194ade | |||
569089cb73 | |||
f494e68016 | |||
7561e26c0a | |||
a95be13c95 | |||
9ab50d4248 | |||
f5be1d270a | |||
524a2a422d | |||
45fbc30cc5 | |||
f8c1d7bde8 | |||
9b8657fbb3 | |||
f68f19aa3d | |||
85758f8b91 | |||
e981c42517 | |||
7d6ac73153 | |||
aad2dd6232 | |||
cb24880115 | |||
0e87c4fe34 | |||
438eddbde1 | |||
729895e2ed | |||
3371529cce | |||
1ac1aadbe3 | |||
10127650fa | |||
3533c896a3 | |||
ee6f07b31a | |||
5ac4818030 | |||
c293b058c0 | |||
cadeee4deb | |||
f401c7cc4c | |||
a3ea46701c | |||
009178a3dc | |||
38e8b70873 | |||
31cda26096 | |||
e16aac7a83 | |||
2a52baf57e |
48
CHANGELOG.md
48
CHANGELOG.md
@ -14,8 +14,56 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
||||
All LXC instances created using this repository come pre-installed with Midnight Commander, which is a command-line tool (`mc`) that offers a user-friendly file and directory management interface for the terminal environment.
|
||||
|
||||
|
||||
## 2025-07-05
|
||||
|
||||
## 2025-07-04
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Refactor: Mafl [@tremor021](https://github.com/tremor021) ([#5702](https://github.com/community-scripts/ProxmoxVE/pull/5702))
|
||||
- Outline: Fix sed command for v0.85.0 [@tremor021](https://github.com/tremor021) ([#5688](https://github.com/community-scripts/ProxmoxVE/pull/5688))
|
||||
- Komodo: Update Script to use FerretDB / remove psql & sqlite options [@MickLesk](https://github.com/MickLesk) ([#5690](https://github.com/community-scripts/ProxmoxVE/pull/5690))
|
||||
- ESPHome: Fix Linking issue to prevent version mismatch [@MickLesk](https://github.com/MickLesk) ([#5685](https://github.com/community-scripts/ProxmoxVE/pull/5685))
|
||||
- Cloudflare-DDNS: fix unvisible read command at install [@MickLesk](https://github.com/MickLesk) ([#5682](https://github.com/community-scripts/ProxmoxVE/pull/5682))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- Core layer refactor: centralized error traps and msg_* consistency [@MickLesk](https://github.com/MickLesk) ([#5705](https://github.com/community-scripts/ProxmoxVE/pull/5705))
|
||||
|
||||
- #### 💥 Breaking Changes
|
||||
|
||||
- Update Iptag [@DesertGamer](https://github.com/DesertGamer) ([#5677](https://github.com/community-scripts/ProxmoxVE/pull/5677))
|
||||
|
||||
### 🌐 Website
|
||||
|
||||
- #### 📝 Script Information
|
||||
|
||||
- MySQL phpMyAdmin Access Information [@austinpilz](https://github.com/austinpilz) ([#5679](https://github.com/community-scripts/ProxmoxVE/pull/5679))
|
||||
|
||||
## 2025-07-03
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Zipline: Fix typo in uploads directory path [@tremor021](https://github.com/tremor021) ([#5662](https://github.com/community-scripts/ProxmoxVE/pull/5662))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- Improve asset matching in fetch_and_deploy_gh_release for prebuild and singlefile modes [@MickLesk](https://github.com/MickLesk) ([#5669](https://github.com/community-scripts/ProxmoxVE/pull/5669))
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- Refactor: Trilium [@MickLesk](https://github.com/MickLesk) ([#5665](https://github.com/community-scripts/ProxmoxVE/pull/5665))
|
||||
|
||||
### 🌐 Website
|
||||
|
||||
- #### 📝 Script Information
|
||||
|
||||
- Bump Icons to selfhst repo | switch svg to webp [@MickLesk](https://github.com/MickLesk) ([#5659](https://github.com/community-scripts/ProxmoxVE/pull/5659))
|
||||
|
||||
## 2025-07-02
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
@ -32,6 +32,15 @@ function update_script() {
|
||||
exit 1
|
||||
fi
|
||||
COMPOSE_BASENAME=$(basename "$COMPOSE_FILE")
|
||||
|
||||
if [[ "$COMPOSE_BASENAME" == "sqlite.compose.yaml" || "$COMPOSE_BASENAME" == "postgres.compose.yaml" ]]; then
|
||||
msg_error "❌ Detected outdated Komodo setup using SQLite or PostgreSQL (FerretDB v1)."
|
||||
echo -e "${YW}This configuration is no longer supported since Komodo v1.18.0.${CL}"
|
||||
echo -e "${YW}Please follow the migration guide:${CL}"
|
||||
echo -e "${BGN}https://github.com/community-scripts/ProxmoxVE/discussions/5689${CL}\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BACKUP_FILE="/opt/komodo/${COMPOSE_BASENAME}.bak_$(date +%Y%m%d_%H%M%S)"
|
||||
cp "$COMPOSE_FILE" "$BACKUP_FILE" || {
|
||||
msg_error "Failed to create backup of ${COMPOSE_BASENAME}!"
|
||||
|
@ -73,6 +73,11 @@ EOF
|
||||
msg_ok "Updated systemd service"
|
||||
fi
|
||||
|
||||
msg_info "Linking esphome to /usr/local/bin"
|
||||
rm -f /usr/local/bin/esphome
|
||||
ln -s /opt/esphome/.venv/bin/esphome /usr/local/bin/esphome
|
||||
msg_ok "Linked esphome binary"
|
||||
|
||||
msg_info "Starting ${APP}"
|
||||
systemctl start esphomeDashboard
|
||||
msg_ok "Started ${APP}"
|
||||
|
@ -36,6 +36,15 @@ function update_script() {
|
||||
exit 1
|
||||
fi
|
||||
COMPOSE_BASENAME=$(basename "$COMPOSE_FILE")
|
||||
|
||||
if [[ "$COMPOSE_BASENAME" == "sqlite.compose.yaml" || "$COMPOSE_BASENAME" == "postgres.compose.yaml" ]]; then
|
||||
msg_error "❌ Detected outdated Komodo setup using SQLite or PostgreSQL (FerretDB v1)."
|
||||
echo -e "${YW}This configuration is no longer supported since Komodo v1.18.0.${CL}"
|
||||
echo -e "${YW}Please follow the migration guide:${CL}"
|
||||
echo -e "${BGN}https://github.com/community-scripts/ProxmoxVE/discussions/5689${CL}\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BACKUP_FILE="/opt/komodo/${COMPOSE_BASENAME}.bak_$(date +%Y%m%d_%H%M%S)"
|
||||
cp "$COMPOSE_FILE" "$BACKUP_FILE" || {
|
||||
msg_error "Failed to create backup of ${COMPOSE_BASENAME}!"
|
||||
|
35
ct/mafl.sh
35
ct/mafl.sh
@ -27,18 +27,31 @@ function update_script() {
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/hywax/mafl/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||
msg_info "Updating Mafl to v${RELEASE} (Patience)"
|
||||
systemctl stop mafl
|
||||
curl -fsSL "https://github.com/hywax/mafl/archive/refs/tags/v${RELEASE}.tar.gz" -o $(basename "https://github.com/hywax/mafl/archive/refs/tags/v${RELEASE}.tar.gz")
|
||||
tar -xzf v${RELEASE}.tar.gz
|
||||
cp -r mafl-${RELEASE}/* /opt/mafl/
|
||||
rm -rf mafl-${RELEASE}
|
||||
cd /opt/mafl
|
||||
yarn install
|
||||
yarn build
|
||||
systemctl start mafl
|
||||
msg_ok "Updated Mafl to v${RELEASE}"
|
||||
if [[ "${RELEASE}" != "$(cat ~/.mafl 2>/dev/null)" ]] || [[ ! -f ~/.mafl ]]; then
|
||||
msg_info "Stopping Mafl service"
|
||||
systemctl stop mafl
|
||||
msg_ok "Service stopped"
|
||||
|
||||
msg_info "Performing backup"
|
||||
mkdir -p /opt/mafl-backup/data
|
||||
mv /opt/mafl/data /opt/mafl-backup/data
|
||||
rm /opt/mafl
|
||||
msg_ok "Backup complete"
|
||||
|
||||
fetch_and_deploy_gh_release "mafl" "hywax/mafl"
|
||||
|
||||
msg_info "Updating Mafl to v${RELEASE}"
|
||||
cd /opt/mafl
|
||||
yarn install
|
||||
yarn build
|
||||
mv /opt/mafl-backup/data /opt/mafl/data
|
||||
systemctl start mafl
|
||||
msg_ok "Updated Mafl to v${RELEASE}"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
|
@ -31,7 +31,7 @@ function update_script() {
|
||||
touch /opt/${APP}_version.txt
|
||||
mkdir -p $HOME/.config/qBittorrent/
|
||||
mkdir -p /opt/qbittorrent/
|
||||
mv /.config/qBittorrent $HOME/.config/
|
||||
[ -d "/.config/qBittorrent" ] && mv /.config/qBittorrent "$HOME/.config/"
|
||||
$STD apt-get remove --purge -y qbittorrent-nox
|
||||
sed -i 's@ExecStart=/usr/bin/qbittorrent-nox@ExecStart=/opt/qbittorrent/qbittorrent-nox@g' /etc/systemd/system/qbittorrent-nox.service
|
||||
systemctl daemon-reload
|
||||
@ -67,4 +67,4 @@ description
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8090${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8090${CL}"
|
||||
|
@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
|
||||
# Copyright (c) 2021-2025 tteck
|
||||
# Author: tteck (tteckster)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://triliumnext.github.io/Docs/
|
||||
# Source: https://github.com/TriliumNext/Trilium
|
||||
|
||||
APP="Trilium"
|
||||
var_tags="${var_tags:-notes}"
|
||||
@ -27,57 +27,52 @@ function update_script() {
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
if [[ ! -f /opt/${APP}_version.txt ]]; then touch /opt/${APP}_version.txt; fi
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/TriliumNext/Notes/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||
if [[ "v${RELEASE}" != "$(cat /opt/${APP}_version.txt 2>/dev/null)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
|
||||
|
||||
if [[ -d /opt/trilium/db ]]; then
|
||||
DB_PATH="/opt/trilium/db"
|
||||
DB_RESTORE_PATH="/opt/trilium/db"
|
||||
elif [[ -d /opt/trilium/assets/db ]]; then
|
||||
DB_PATH="/opt/trilium/assets/db"
|
||||
DB_RESTORE_PATH="/opt/trilium/assets/db"
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/TriliumNext/Trilium/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||
if [[ "${RELEASE}" != "$(cat ~/.Trilium 2>/dev/null)" ]] || [[ ! -f ~/.Trilium ]]; then
|
||||
|
||||
if [[ -d /opt/trilium/db ]]; then
|
||||
DB_PATH="/opt/trilium/db"
|
||||
DB_RESTORE_PATH="/opt/trilium/db"
|
||||
elif [[ -d /opt/trilium/assets/db ]]; then
|
||||
DB_PATH="/opt/trilium/assets/db"
|
||||
DB_RESTORE_PATH="/opt/trilium/assets/db"
|
||||
else
|
||||
msg_error "Database not found in either /opt/trilium/db or /opt/trilium/assets/db"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
msg_info "Stopping ${APP}"
|
||||
systemctl stop trilium
|
||||
sleep 1
|
||||
msg_ok "Stopped ${APP}"
|
||||
|
||||
msg_info "Backing up Database"
|
||||
mkdir -p /opt/trilium_backup
|
||||
cp -r "${DB_PATH}" /opt/trilium_backup/
|
||||
rm -rf /opt/trilium
|
||||
msg_ok "Backed up Database"
|
||||
|
||||
fetch_and_deploy_gh_release "Trilium" "TriliumNext/Trilium" "prebuild" "latest" "/opt/trilium" "TriliumNotes-Server-*linux-x64.tar.xz"
|
||||
|
||||
msg_info "Restoring Database"
|
||||
mkdir -p "$(dirname "${DB_RESTORE_PATH}")"
|
||||
cp -r /opt/trilium_backup/$(basename "${DB_PATH}") "${DB_RESTORE_PATH}"
|
||||
msg_ok "Restored Database"
|
||||
|
||||
msg_info "Cleaning up"
|
||||
rm -rf /opt/trilium_backup
|
||||
msg_ok "Cleaned"
|
||||
|
||||
msg_info "Starting ${APP}"
|
||||
systemctl start trilium
|
||||
sleep 1
|
||||
msg_ok "Started ${APP}"
|
||||
msg_ok "Updated Successfully"
|
||||
else
|
||||
msg_error "Database not found in either /opt/trilium/db or /opt/trilium/assets/db"
|
||||
exit 1
|
||||
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
||||
fi
|
||||
|
||||
msg_info "Stopping ${APP}"
|
||||
systemctl stop trilium
|
||||
sleep 1
|
||||
msg_ok "Stopped ${APP}"
|
||||
|
||||
msg_info "Updating to ${RELEASE}"
|
||||
mkdir -p /opt/trilium_backup
|
||||
cp -r "${DB_PATH}" /opt/trilium_backup/
|
||||
rm -rf /opt/trilium
|
||||
cd /tmp
|
||||
curl -fsSL "https://github.com/TriliumNext/trilium/releases/download/v${RELEASE}/TriliumNextNotes-Server-v${RELEASE}-linux-x64.tar.xz" -o "TriliumNextNotes-Server-v${RELEASE}-linux-x64.tar.xz"
|
||||
tar -xf "TriliumNextNotes-Server-v${RELEASE}-linux-x64.tar.xz"
|
||||
mv "TriliumNextNotes-Server-${RELEASE}-linux-x64" /opt/trilium
|
||||
|
||||
# Restore database
|
||||
mkdir -p "$(dirname "${DB_RESTORE_PATH}")"
|
||||
cp -r /opt/trilium_backup/$(basename "${DB_PATH}") "${DB_RESTORE_PATH}"
|
||||
|
||||
echo "v${RELEASE}" >/opt/${APP}_version.txt
|
||||
msg_ok "Updated to ${RELEASE}"
|
||||
|
||||
msg_info "Cleaning up"
|
||||
rm -rf "/tmp/TriliumNextNotes-Server-${RELEASE}-linux-x64.tar.xz"
|
||||
rm -rf /opt/trilium_backup
|
||||
msg_ok "Cleaned"
|
||||
|
||||
msg_info "Starting ${APP}"
|
||||
systemctl start trilium
|
||||
sleep 1
|
||||
msg_ok "Started ${APP}"
|
||||
msg_ok "Updated Successfully"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
||||
fi
|
||||
|
||||
exit
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
|
@ -40,9 +40,9 @@ function update_script() {
|
||||
|
||||
msg_info "Updating ${APP} to ${RELEASE}"
|
||||
cp /opt/zipline/.env /opt/
|
||||
mkdir -p /opt/zipline-upload
|
||||
if [ -d /opt/zipline/upload ] && [ "$(ls -A /opt/zipline/upload)" ]; then
|
||||
cp -R /opt/zipline/upload/* /opt/zipline-upload/
|
||||
mkdir -p /opt/zipline-uploads
|
||||
if [ -d /opt/zipline/uploads ] && [ "$(ls -A /opt/zipline/uploads)" ]; then
|
||||
cp -R /opt/zipline/uploads/* /opt/zipline-uploads/
|
||||
fi
|
||||
curl -fsSL "https://github.com/diced/zipline/archive/refs/tags/v${RELEASE}.zip" -o $(basename "https://github.com/diced/zipline/archive/refs/tags/v${RELEASE}.zip")
|
||||
$STD unzip v"${RELEASE}".zip
|
||||
|
2
frontend/public/json/add-iptag.json
generated
2
frontend/public/json/add-iptag.json
generated
@ -11,7 +11,7 @@
|
||||
"interface_port": null,
|
||||
"documentation": null,
|
||||
"website": null,
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/svg/proxmox.svg",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/proxmox.webp",
|
||||
"config_path": "/opt/iptag/iptag.conf",
|
||||
"description": "This script automatically adds IP address as tags to LXC containers or VM's using a systemd service. The service also updates the tags if a LXC/VM IP address is changed.",
|
||||
"install_methods": [
|
||||
|
2
frontend/public/json/babybuddy.json
generated
2
frontend/public/json/babybuddy.json
generated
@ -11,7 +11,7 @@
|
||||
"interface_port": 80,
|
||||
"documentation": "https://docs.baby-buddy.net/",
|
||||
"website": "https://github.com/babybuddy/babybuddy",
|
||||
"logo": "https://raw.githubusercontent.com/selfhst/icons/refs/heads/main/svg/baby-buddy.svg",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/baby-buddy.webp",
|
||||
"config_path": "/opt/babybuddy/babybuddy/settings/production.py",
|
||||
"description": "Baby Buddy is an open-source web application designed to assist caregivers in tracking various aspects of a baby's daily routine, including sleep, feedings, diaper changes, tummy time, and more. By recording this data, caregivers can better understand and anticipate their baby's needs, reducing guesswork in daily care. The application offers a user-friendly dashboard for data entry and visualization, supports multiple users, and provides features like timers and reminders. Additionally, Baby Buddy can be integrated with platforms like Home Assistant and Grafana for enhanced functionality.",
|
||||
"install_methods": [
|
||||
|
2
frontend/public/json/convertx.json
generated
2
frontend/public/json/convertx.json
generated
@ -12,7 +12,7 @@
|
||||
"interface_port": 3000,
|
||||
"documentation": "https://github.com/C4illin/ConvertX",
|
||||
"website": "https://github.com/C4illin/ConvertX",
|
||||
"logo": "https://raw.githubusercontent.com/selfhst/icons/refs/heads/main/svg/convertx.svg",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/convertx.webp",
|
||||
"description": "ConvertX is a self-hosted online file converter supporting over 1000 formats, including images, audio, video, documents, and more, powered by FFmpeg, GraphicsMagick, and other libraries.",
|
||||
"install_methods": [
|
||||
{
|
||||
|
2
frontend/public/json/evcc.json
generated
2
frontend/public/json/evcc.json
generated
@ -6,7 +6,7 @@
|
||||
],
|
||||
"date_created": "2024-10-15",
|
||||
"type": "ct",
|
||||
"updateable": false,
|
||||
"updateable": true,
|
||||
"privileged": false,
|
||||
"interface_port": 7070,
|
||||
"documentation": "https://evcc.io/#devices",
|
||||
|
2
frontend/public/json/huntarr.json
generated
2
frontend/public/json/huntarr.json
generated
@ -12,7 +12,7 @@
|
||||
"documentation": "https://github.com/plexguide/Huntarr.io/wiki",
|
||||
"config_path": "/opt/huntarr",
|
||||
"website": "https://github.com/plexguide/Huntarr.io",
|
||||
"logo": "https://raw.githubusercontent.com/plexguide/Huntarr.io/refs/heads/main/frontend/static/logo/Huntarr.svg",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/huntarr.webp",
|
||||
"description": "Huntarr is a tool that automates the search for missing or low-quality media content in your collection. It works seamlessly with applications like Sonarr, Radarr, Lidarr, Readarr, and Whisparr, enhancing their functionality with continuous background scans to identify and update missed or outdated content. Through a user-friendly web interface accessible on port 9705, Huntarr provides real-time statistics, log views, and extensive configuration options. The software is especially useful for users who want to keep their media library up to date by automatically searching for missing episodes or higher-quality versions. Huntarr is well-suited for self-hosted environments and can easily run in LXC containers or Docker setups.",
|
||||
"install_methods": [
|
||||
{
|
||||
|
2
frontend/public/json/itsm-ng.json
generated
2
frontend/public/json/itsm-ng.json
generated
@ -11,7 +11,7 @@
|
||||
"interface_port": 80,
|
||||
"documentation": "https://wiki.itsm-ng.org/en/home",
|
||||
"website": "https://itsm-ng.com",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/svg/itsm-ng.svg",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/itsm-ng.webp",
|
||||
"config_path": "/etc/itsm-ng",
|
||||
"description": "ITSM-NG is a powerful, open-source IT Service Management (ITSM) solution designed for managing IT assets, software, licenses, and support processes in accordance with ITIL best practices. It offers integrated features for asset inventory, incident tracking, problem management, change requests, and service desk workflows.",
|
||||
"install_methods": [
|
||||
|
2
frontend/public/json/librespeed-rust.json
generated
2
frontend/public/json/librespeed-rust.json
generated
@ -12,7 +12,7 @@
|
||||
"interface_port": 8080,
|
||||
"documentation": "https://github.com/librespeed/speedtest-rust",
|
||||
"website": "https://github.com/librespeed/speedtest-rust",
|
||||
"logo": "https://raw.githubusercontent.com/selfhst/icons/refs/heads/main/svg/librespeed.svg",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/librespeed.webp",
|
||||
"description": "Librespeed is a no flash, no java, no websocket speedtest server. This community script deploys the rust version for simplicity and low resource usage.",
|
||||
"install_methods": [
|
||||
{
|
||||
|
2
frontend/public/json/lyrionmusicserver.json
generated
2
frontend/public/json/lyrionmusicserver.json
generated
@ -12,7 +12,7 @@
|
||||
"interface_port": 9000,
|
||||
"documentation": "https://lyrion.org/",
|
||||
"website": "https://lyrion.org/",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/lyrion-media-server.webp",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/lyrion-music-server.webp",
|
||||
"description": "Lyrion Music Server is an open-source server software to stream local music collections, internet radio, and music services to Squeezebox and compatible audio players.",
|
||||
"install_methods": [
|
||||
{
|
||||
|
4
frontend/public/json/mysql.json
generated
4
frontend/public/json/mysql.json
generated
@ -39,6 +39,10 @@
|
||||
{
|
||||
"text": "With an option to install the MySQL 8.4 LTS release instead of MySQL 8.0",
|
||||
"type": "info"
|
||||
},
|
||||
{
|
||||
"text": "If installed, access phpMyAdmin at `http://<LXC_IP>/phpMyAdmin`, case sensitive.",
|
||||
"type": "info"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
2
frontend/public/json/oauth2-proxy.json
generated
2
frontend/public/json/oauth2-proxy.json
generated
@ -12,7 +12,7 @@
|
||||
"interface_port": null,
|
||||
"documentation": "https://oauth2-proxy.github.io/oauth2-proxy/configuration/overview",
|
||||
"website": "https://oauth2-proxy.github.io/oauth2-proxy/",
|
||||
"logo": "https://raw.githubusercontent.com/oauth2-proxy/oauth2-proxy/f82e90426a1881d36bf995f25de9b7b1db4c2564/docs/static/img/logos/OAuth2_Proxy_icon.svg",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/oauth2-proxy.webp",
|
||||
"config_path": "/opt/oauth2-proxy/config.toml",
|
||||
"description": "A reverse proxy that provides authentication with Google, Azure, OpenID Connect and many more identity providers.",
|
||||
"install_methods": [
|
||||
|
2
frontend/public/json/pulse.json
generated
2
frontend/public/json/pulse.json
generated
@ -11,7 +11,7 @@
|
||||
"interface_port": 7655,
|
||||
"documentation": null,
|
||||
"website": "https://github.com/rcourtman/Pulse",
|
||||
"logo": "https://raw.githubusercontent.com/rcourtman/Pulse/main/src/public/logos/pulse-logo-256x256.png",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/pulse.webp",
|
||||
"config_path": "/opt/pulse/.env",
|
||||
"description": "A lightweight monitoring application for Proxmox VE that displays real-time status for VMs and containers via a simple web interface.",
|
||||
"install_methods": [
|
||||
|
2
frontend/public/json/rclone.json
generated
2
frontend/public/json/rclone.json
generated
@ -11,7 +11,7 @@
|
||||
"interface_port": 3000,
|
||||
"documentation": "https://rclone.org/docs/",
|
||||
"website": "https://rclone.org/",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/svg/rclone.svg",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/rclone.webp",
|
||||
"config_path": "~/.config/rclone/rclone.conf",
|
||||
"description": "Rclone is a command-line program to manage files on cloud storage. It is a feature-rich alternative to cloud vendors' web storage interfaces",
|
||||
"install_methods": [
|
||||
|
2
frontend/public/json/streamlink-webui.json
generated
2
frontend/public/json/streamlink-webui.json
generated
@ -12,7 +12,7 @@
|
||||
"documentation": "https://github.com/CrazyWolf13/streamlink-webui",
|
||||
"config_path": "/opt/streamlink-webui.env",
|
||||
"website": "https://github.com/CrazyWolf13/streamlink-webui",
|
||||
"logo": "https://streamlink.github.io/_static/icon.svg",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/streamlink.webp",
|
||||
"description": "a simple web-ui to the well-known streamlink cli application, which allows you to save twitch streams to your local disk.",
|
||||
"install_methods": [
|
||||
{
|
||||
|
8
frontend/public/json/trilium.json
generated
8
frontend/public/json/trilium.json
generated
@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "TriliumNext",
|
||||
"name": "Trilium Notes",
|
||||
"slug": "trilium",
|
||||
"categories": [
|
||||
12
|
||||
@ -9,11 +9,11 @@
|
||||
"updateable": true,
|
||||
"privileged": false,
|
||||
"interface_port": 8080,
|
||||
"documentation": "https://triliumnext.github.io/Docs/",
|
||||
"documentation": "https://github.com/TriliumNext/trilium/wiki",
|
||||
"website": "https://github.com/TriliumNext/trilium",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/triliumnext.webp",
|
||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/trilium-notes.webp",
|
||||
"config_path": "/root/trilium-data/config.ini",
|
||||
"description": "TriliumNext is an newer Fork of Trilium. TriliumNext is an open-source note-taking and personal knowledge management application. It allows users to organize and manage their notes, ideas, and information in a single place, using a hierarchical tree-like structure. Trilium offers a range of features, including rich text formatting, links, images, and attachments, making it easy to create and structure notes. The software is designed to be flexible and customizable, with a range of customization options and plugins available, including themes, export options, and more. Trilium is a self-hosted solution, and can be run on a local machine or a cloud-based server, providing users with full control over their notes and information.",
|
||||
"description": "Trilium Notes is the latest and officially maintained version of the powerful, self-hosted note-taking and personal knowledge management application. It enables users to organize information in a hierarchical tree structure and supports rich text editing, internal linking, images, attachments, and powerful scripting capabilities. This version reflects the most current development efforts under the TriliumNext organization and replaces all prior forks or legacy variants. Trilium is ideal for building personal wikis, structured documentation, and long-term knowledge archives, giving users full local control and privacy.",
|
||||
"install_methods": [
|
||||
{
|
||||
"type": "default",
|
||||
|
306
frontend/public/json/versions.json
generated
306
frontend/public/json/versions.json
generated
@ -1,4 +1,154 @@
|
||||
[
|
||||
{
|
||||
"name": "cross-seed/cross-seed",
|
||||
"version": "v6.12.7",
|
||||
"date": "2025-06-18T03:44:24Z"
|
||||
},
|
||||
{
|
||||
"name": "home-assistant/core",
|
||||
"version": "2025.7.1",
|
||||
"date": "2025-07-04T20:02:52Z"
|
||||
},
|
||||
{
|
||||
"name": "Luligu/matterbridge",
|
||||
"version": "3.1.1",
|
||||
"date": "2025-07-04T19:50:37Z"
|
||||
},
|
||||
{
|
||||
"name": "homarr-labs/homarr",
|
||||
"version": "v1.27.0",
|
||||
"date": "2025-07-04T19:16:16Z"
|
||||
},
|
||||
{
|
||||
"name": "zitadel/zitadel",
|
||||
"version": "v3.3.0",
|
||||
"date": "2025-06-12T06:54:48Z"
|
||||
},
|
||||
{
|
||||
"name": "bunkerity/bunkerweb",
|
||||
"version": "v1.6.2",
|
||||
"date": "2025-07-04T15:21:18Z"
|
||||
},
|
||||
{
|
||||
"name": "kimai/kimai",
|
||||
"version": "2.37.0",
|
||||
"date": "2025-07-04T14:49:43Z"
|
||||
},
|
||||
{
|
||||
"name": "Graylog2/graylog2-server",
|
||||
"version": "6.3.1",
|
||||
"date": "2025-07-04T11:20:48Z"
|
||||
},
|
||||
{
|
||||
"name": "mattermost/mattermost",
|
||||
"version": "preview-v0.1",
|
||||
"date": "2025-06-27T14:35:47Z"
|
||||
},
|
||||
{
|
||||
"name": "theonedev/onedev",
|
||||
"version": "v11.11.3",
|
||||
"date": "2025-07-04T09:04:46Z"
|
||||
},
|
||||
{
|
||||
"name": "nzbgetcom/nzbget",
|
||||
"version": "v25.2",
|
||||
"date": "2025-07-04T08:21:42Z"
|
||||
},
|
||||
{
|
||||
"name": "Checkmk/checkmk",
|
||||
"version": "v2.2.0p44",
|
||||
"date": "2025-07-04T06:44:06Z"
|
||||
},
|
||||
{
|
||||
"name": "redis/redis",
|
||||
"version": "8.2-rc1-int",
|
||||
"date": "2025-07-02T19:27:08Z"
|
||||
},
|
||||
{
|
||||
"name": "Jackett/Jackett",
|
||||
"version": "v0.22.2117",
|
||||
"date": "2025-07-04T05:56:05Z"
|
||||
},
|
||||
{
|
||||
"name": "hyperion-project/hyperion.ng",
|
||||
"version": "2.1.1",
|
||||
"date": "2025-06-14T17:45:06Z"
|
||||
},
|
||||
{
|
||||
"name": "steveiliop56/tinyauth",
|
||||
"version": "v3.4.1",
|
||||
"date": "2025-06-11T07:53:44Z"
|
||||
},
|
||||
{
|
||||
"name": "outline/outline",
|
||||
"version": "v0.85.0",
|
||||
"date": "2025-07-04T00:06:47Z"
|
||||
},
|
||||
{
|
||||
"name": "home-assistant/operating-system",
|
||||
"version": "15.2",
|
||||
"date": "2025-04-14T15:37:12Z"
|
||||
},
|
||||
{
|
||||
"name": "keycloak/keycloak",
|
||||
"version": "26.3.0",
|
||||
"date": "2025-07-02T12:26:44Z"
|
||||
},
|
||||
{
|
||||
"name": "cloudflare/cloudflared",
|
||||
"version": "2025.7.0",
|
||||
"date": "2025-07-03T17:08:15Z"
|
||||
},
|
||||
{
|
||||
"name": "rabbitmq/rabbitmq-server",
|
||||
"version": "v4.1.2",
|
||||
"date": "2025-07-03T16:59:29Z"
|
||||
},
|
||||
{
|
||||
"name": "influxdata/influxdb",
|
||||
"version": "v3.2.1",
|
||||
"date": "2025-07-03T16:09:19Z"
|
||||
},
|
||||
{
|
||||
"name": "cockpit-project/cockpit",
|
||||
"version": "310.5",
|
||||
"date": "2025-07-03T14:05:25Z"
|
||||
},
|
||||
{
|
||||
"name": "n8n-io/n8n",
|
||||
"version": "n8n@1.100.0",
|
||||
"date": "2025-06-23T12:48:35Z"
|
||||
},
|
||||
{
|
||||
"name": "Dolibarr/dolibarr",
|
||||
"version": "18.0.7",
|
||||
"date": "2025-07-03T08:57:21Z"
|
||||
},
|
||||
{
|
||||
"name": "fuma-nama/fumadocs",
|
||||
"version": "fumadocs-openapi@9.0.17",
|
||||
"date": "2025-07-03T06:57:48Z"
|
||||
},
|
||||
{
|
||||
"name": "esphome/esphome",
|
||||
"version": "2025.6.3",
|
||||
"date": "2025-07-03T01:07:26Z"
|
||||
},
|
||||
{
|
||||
"name": "actualbudget/actual",
|
||||
"version": "v25.7.1",
|
||||
"date": "2025-07-03T01:03:18Z"
|
||||
},
|
||||
{
|
||||
"name": "mongodb/mongo",
|
||||
"version": "r6.0.25-rc0",
|
||||
"date": "2025-07-03T00:44:52Z"
|
||||
},
|
||||
{
|
||||
"name": "documenso/documenso",
|
||||
"version": "v1.12.2-rc.0",
|
||||
"date": "2025-07-03T00:31:22Z"
|
||||
},
|
||||
{
|
||||
"name": "emqx/emqx",
|
||||
"version": "v5.8.7",
|
||||
@ -14,11 +164,6 @@
|
||||
"version": "2.5.1",
|
||||
"date": "2025-07-02T19:38:06Z"
|
||||
},
|
||||
{
|
||||
"name": "redis/redis",
|
||||
"version": "8.2-rc1-int",
|
||||
"date": "2025-07-02T19:27:08Z"
|
||||
},
|
||||
{
|
||||
"name": "ollama/ollama",
|
||||
"version": "v0.9.5",
|
||||
@ -29,36 +174,6 @@
|
||||
"version": "v6.2.20",
|
||||
"date": "2025-07-02T04:03:37Z"
|
||||
},
|
||||
{
|
||||
"name": "home-assistant/core",
|
||||
"version": "2025.7.0",
|
||||
"date": "2025-07-02T16:23:42Z"
|
||||
},
|
||||
{
|
||||
"name": "cross-seed/cross-seed",
|
||||
"version": "v6.12.7",
|
||||
"date": "2025-06-18T03:44:24Z"
|
||||
},
|
||||
{
|
||||
"name": "fuma-nama/fumadocs",
|
||||
"version": "fumadocs-core@15.6.1",
|
||||
"date": "2025-07-02T15:29:41Z"
|
||||
},
|
||||
{
|
||||
"name": "nzbgetcom/nzbget",
|
||||
"version": "v25.1",
|
||||
"date": "2025-06-27T09:14:14Z"
|
||||
},
|
||||
{
|
||||
"name": "Graylog2/graylog2-server",
|
||||
"version": "6.2.5",
|
||||
"date": "2025-07-02T13:06:30Z"
|
||||
},
|
||||
{
|
||||
"name": "keycloak/keycloak",
|
||||
"version": "26.2.5",
|
||||
"date": "2025-05-28T06:49:43Z"
|
||||
},
|
||||
{
|
||||
"name": "wazuh/wazuh",
|
||||
"version": "coverity-w27-4.13.0",
|
||||
@ -79,11 +194,6 @@
|
||||
"version": "release-5.1.2",
|
||||
"date": "2025-07-02T06:13:16Z"
|
||||
},
|
||||
{
|
||||
"name": "Jackett/Jackett",
|
||||
"version": "v0.22.2107",
|
||||
"date": "2025-07-02T05:53:02Z"
|
||||
},
|
||||
{
|
||||
"name": "diced/zipline",
|
||||
"version": "v4.2.0",
|
||||
@ -94,26 +204,11 @@
|
||||
"version": "v0.20.2",
|
||||
"date": "2025-07-02T00:37:07Z"
|
||||
},
|
||||
{
|
||||
"name": "hyperion-project/hyperion.ng",
|
||||
"version": "2.1.1",
|
||||
"date": "2025-06-14T17:45:06Z"
|
||||
},
|
||||
{
|
||||
"name": "mongodb/mongo",
|
||||
"version": "r8.1.2",
|
||||
"date": "2025-07-01T22:39:32Z"
|
||||
},
|
||||
{
|
||||
"name": "Threadfin/Threadfin",
|
||||
"version": "1.2.35",
|
||||
"date": "2025-07-01T21:37:20Z"
|
||||
},
|
||||
{
|
||||
"name": "actualbudget/actual",
|
||||
"version": "v25.7.0",
|
||||
"date": "2025-07-01T21:02:27Z"
|
||||
},
|
||||
{
|
||||
"name": "TwiN/gatus",
|
||||
"version": "v5.19.0",
|
||||
@ -149,11 +244,6 @@
|
||||
"version": "v1.30.0",
|
||||
"date": "2025-07-01T11:29:11Z"
|
||||
},
|
||||
{
|
||||
"name": "Checkmk/checkmk",
|
||||
"version": "v2.2.0p44-rc1",
|
||||
"date": "2025-07-01T11:10:25Z"
|
||||
},
|
||||
{
|
||||
"name": "rcourtman/Pulse",
|
||||
"version": "v99.99.99",
|
||||
@ -194,11 +284,6 @@
|
||||
"version": "v2.53.5",
|
||||
"date": "2025-06-30T11:01:12Z"
|
||||
},
|
||||
{
|
||||
"name": "n8n-io/n8n",
|
||||
"version": "n8n@1.100.0",
|
||||
"date": "2025-06-23T12:48:35Z"
|
||||
},
|
||||
{
|
||||
"name": "jupyter/notebook",
|
||||
"version": "v7.4.4",
|
||||
@ -209,21 +294,11 @@
|
||||
"version": "v8.1.17",
|
||||
"date": "2025-06-30T11:26:27Z"
|
||||
},
|
||||
{
|
||||
"name": "documenso/documenso",
|
||||
"version": "v1.12.0-rc.8",
|
||||
"date": "2025-06-30T09:47:37Z"
|
||||
},
|
||||
{
|
||||
"name": "PrivateBin/PrivateBin",
|
||||
"version": "1.7.8",
|
||||
"date": "2025-06-30T09:00:54Z"
|
||||
},
|
||||
{
|
||||
"name": "mattermost/mattermost",
|
||||
"version": "preview-v0.1",
|
||||
"date": "2025-06-27T14:35:47Z"
|
||||
},
|
||||
{
|
||||
"name": "typesense/typesense",
|
||||
"version": "v29.0",
|
||||
@ -234,11 +309,6 @@
|
||||
"version": "0.50.5",
|
||||
"date": "2025-06-29T08:54:47Z"
|
||||
},
|
||||
{
|
||||
"name": "theonedev/onedev",
|
||||
"version": "v11.11.2",
|
||||
"date": "2025-06-29T01:40:39Z"
|
||||
},
|
||||
{
|
||||
"name": "linkwarden/linkwarden",
|
||||
"version": "v2.11.2",
|
||||
@ -249,16 +319,6 @@
|
||||
"version": "v1.22.5",
|
||||
"date": "2025-06-28T16:06:19Z"
|
||||
},
|
||||
{
|
||||
"name": "Luligu/matterbridge",
|
||||
"version": "3.1.0",
|
||||
"date": "2025-06-28T09:02:38Z"
|
||||
},
|
||||
{
|
||||
"name": "esphome/esphome",
|
||||
"version": "2025.6.2",
|
||||
"date": "2025-06-28T03:47:16Z"
|
||||
},
|
||||
{
|
||||
"name": "plexguide/Huntarr.io",
|
||||
"version": "8.1.11",
|
||||
@ -274,11 +334,6 @@
|
||||
"version": "v1.5.0",
|
||||
"date": "2025-06-27T22:04:32Z"
|
||||
},
|
||||
{
|
||||
"name": "homarr-labs/homarr",
|
||||
"version": "v1.26.0",
|
||||
"date": "2025-06-27T19:15:24Z"
|
||||
},
|
||||
{
|
||||
"name": "goauthentik/authentik",
|
||||
"version": "version/2025.6.3",
|
||||
@ -300,20 +355,15 @@
|
||||
"date": "2025-06-27T09:53:57Z"
|
||||
},
|
||||
{
|
||||
"name": "cockpit-project/cockpit",
|
||||
"version": "341.1",
|
||||
"date": "2025-06-27T08:50:16Z"
|
||||
"name": "fallenbagel/jellyseerr",
|
||||
"version": "preview-seerr",
|
||||
"date": "2025-06-27T06:10:03Z"
|
||||
},
|
||||
{
|
||||
"name": "MediaBrowser/Emby.Releases",
|
||||
"version": "4.9.1.2",
|
||||
"date": "2025-06-26T22:08:00Z"
|
||||
},
|
||||
{
|
||||
"name": "home-assistant/operating-system",
|
||||
"version": "15.2",
|
||||
"date": "2025-04-14T15:37:12Z"
|
||||
},
|
||||
{
|
||||
"name": "netbox-community/netbox",
|
||||
"version": "v4.3.3",
|
||||
@ -349,11 +399,6 @@
|
||||
"version": "4.1.0-beta.2",
|
||||
"date": "2025-06-26T14:23:26Z"
|
||||
},
|
||||
{
|
||||
"name": "Dolibarr/dolibarr",
|
||||
"version": "18.0.7",
|
||||
"date": "2025-06-26T09:16:33Z"
|
||||
},
|
||||
{
|
||||
"name": "gristlabs/grist-core",
|
||||
"version": "v1.6.1",
|
||||
@ -364,21 +409,11 @@
|
||||
"version": "v4.101.2",
|
||||
"date": "2025-06-25T21:18:52Z"
|
||||
},
|
||||
{
|
||||
"name": "influxdata/influxdb",
|
||||
"version": "v3.2.0",
|
||||
"date": "2025-06-25T17:31:48Z"
|
||||
},
|
||||
{
|
||||
"name": "wavelog/wavelog",
|
||||
"version": "2.0.5",
|
||||
"date": "2025-06-25T14:53:31Z"
|
||||
},
|
||||
{
|
||||
"name": "bunkerity/bunkerweb",
|
||||
"version": "testing",
|
||||
"date": "2025-06-16T18:10:42Z"
|
||||
},
|
||||
{
|
||||
"name": "moghtech/komodo",
|
||||
"version": "v1.18.4",
|
||||
@ -404,11 +439,6 @@
|
||||
"version": "v2.18.0",
|
||||
"date": "2025-06-24T08:29:55Z"
|
||||
},
|
||||
{
|
||||
"name": "fallenbagel/jellyseerr",
|
||||
"version": "preview-fix-proxy-axios",
|
||||
"date": "2025-06-24T08:50:22Z"
|
||||
},
|
||||
{
|
||||
"name": "minio/minio",
|
||||
"version": "RELEASE.2025-06-13T11-33-47Z",
|
||||
@ -559,11 +589,6 @@
|
||||
"version": "v25.05.1",
|
||||
"date": "2025-06-17T14:38:04Z"
|
||||
},
|
||||
{
|
||||
"name": "cloudflare/cloudflared",
|
||||
"version": "2025.6.1",
|
||||
"date": "2025-06-17T12:45:39Z"
|
||||
},
|
||||
{
|
||||
"name": "crowdsecurity/crowdsec",
|
||||
"version": "v1.6.9",
|
||||
@ -584,11 +609,6 @@
|
||||
"version": "2.402",
|
||||
"date": "2025-06-17T05:20:42Z"
|
||||
},
|
||||
{
|
||||
"name": "kimai/kimai",
|
||||
"version": "2.36.1",
|
||||
"date": "2025-06-16T19:20:54Z"
|
||||
},
|
||||
{
|
||||
"name": "open-webui/open-webui",
|
||||
"version": "v0.6.15",
|
||||
@ -659,21 +679,11 @@
|
||||
"version": "v2025-06-12",
|
||||
"date": "2025-06-12T20:59:47Z"
|
||||
},
|
||||
{
|
||||
"name": "zitadel/zitadel",
|
||||
"version": "v3.3.0",
|
||||
"date": "2025-06-12T06:54:48Z"
|
||||
},
|
||||
{
|
||||
"name": "autobrr/autobrr",
|
||||
"version": "v1.63.1",
|
||||
"date": "2025-06-11T11:05:42Z"
|
||||
},
|
||||
{
|
||||
"name": "steveiliop56/tinyauth",
|
||||
"version": "v3.4.1",
|
||||
"date": "2025-06-11T07:53:44Z"
|
||||
},
|
||||
{
|
||||
"name": "OctoPrint/OctoPrint",
|
||||
"version": "1.11.2",
|
||||
@ -744,11 +754,6 @@
|
||||
"version": "mariadb-11.8.2",
|
||||
"date": "2025-06-04T13:35:16Z"
|
||||
},
|
||||
{
|
||||
"name": "rabbitmq/rabbitmq-server",
|
||||
"version": "v4.1.1",
|
||||
"date": "2025-06-04T19:10:05Z"
|
||||
},
|
||||
{
|
||||
"name": "intri-in/manage-my-damn-life-nextjs",
|
||||
"version": "v0.7.1",
|
||||
@ -899,11 +904,6 @@
|
||||
"version": "v2.18.1",
|
||||
"date": "2025-05-12T07:16:12Z"
|
||||
},
|
||||
{
|
||||
"name": "outline/outline",
|
||||
"version": "v0.84.0",
|
||||
"date": "2025-05-11T15:50:48Z"
|
||||
},
|
||||
{
|
||||
"name": "owncast/owncast",
|
||||
"version": "v0.2.3",
|
||||
|
@ -28,8 +28,7 @@ msg_ok "Enabled Docker Service"
|
||||
|
||||
echo "${TAB3}Choose the database for Komodo installation:"
|
||||
echo "${TAB3}1) MongoDB (recommended)"
|
||||
echo "${TAB3}2) SQLite"
|
||||
echo "${TAB3}3) PostgreSQL"
|
||||
echo "${TAB3}2) FerretDB"
|
||||
read -rp "${TAB3}Enter your choice (default: 1): " DB_CHOICE
|
||||
DB_CHOICE=${DB_CHOICE:-1}
|
||||
|
||||
@ -38,10 +37,7 @@ case $DB_CHOICE in
|
||||
DB_COMPOSE_FILE="mongo.compose.yaml"
|
||||
;;
|
||||
2)
|
||||
DB_COMPOSE_FILE="sqlite.compose.yaml"
|
||||
;;
|
||||
3)
|
||||
DB_COMPOSE_FILE="postgres.compose.yaml"
|
||||
DB_COMPOSE_FILE="ferretdb.compose.yaml"
|
||||
;;
|
||||
*)
|
||||
echo "Invalid choice. Defaulting to MongoDB."
|
||||
|
@ -15,16 +15,15 @@ update_os
|
||||
|
||||
setup_go
|
||||
|
||||
msg_info "Configure Application"
|
||||
var_cf_api_token="default"
|
||||
read -rp "Enter the Cloudflare API token: " var_cf_api_token
|
||||
read -rp "${TAB3}Enter the Cloudflare API token: " var_cf_api_token
|
||||
|
||||
var_cf_domains="default"
|
||||
read -rp "Enter the domains separated with a comma (*.example.org,www.example.org) " var_cf_domains
|
||||
read -rp "${TAB3}Enter the domains separated with a comma (*.example.org,www.example.org) " var_cf_domains
|
||||
|
||||
var_cf_proxied="false"
|
||||
while true; do
|
||||
read -rp "Proxied? (y/n): " answer
|
||||
read -rp "${TAB3}Proxied? (y/n): " answer
|
||||
case "$answer" in
|
||||
[Yy]*)
|
||||
var_cf_proxied="true"
|
||||
@ -39,7 +38,7 @@ while true; do
|
||||
done
|
||||
var_cf_ip6_provider="none"
|
||||
while true; do
|
||||
read -rp "Enable IPv6 support? (y/n): " answer
|
||||
read -rp "${TAB3}Enable IPv6 support? (y/n): " answer
|
||||
case "$answer" in
|
||||
[Yy]*)
|
||||
var_cf_ip6_provider="auto"
|
||||
|
@ -29,6 +29,11 @@ $STD /opt/esphome/.venv/bin/python -m pip install --upgrade pip
|
||||
$STD /opt/esphome/.venv/bin/python -m pip install esphome tornado esptool
|
||||
msg_ok "Setup and Installed ESPHome"
|
||||
|
||||
msg_info "Linking esphome to /usr/local/bin"
|
||||
rm -f /usr/local/bin/esphome
|
||||
ln -s /opt/esphome/.venv/bin/esphome /usr/local/bin/esphome
|
||||
msg_ok "Linked esphome binary"
|
||||
|
||||
msg_info "Creating Service"
|
||||
mkdir -p /root/config
|
||||
cat <<EOF >/etc/systemd/system/esphomeDashboard.service
|
||||
|
@ -39,8 +39,7 @@ msg_ok "Installed Docker"
|
||||
|
||||
echo "${TAB3}Choose the database for Komodo installation:"
|
||||
echo "${TAB3}1) MongoDB (recommended)"
|
||||
echo "${TAB3}2) SQLite"
|
||||
echo "${TAB3}3) PostgreSQL"
|
||||
echo "${TAB3}2) FerretDB"
|
||||
read -rp "${TAB3}Enter your choice (default: 1): " DB_CHOICE
|
||||
DB_CHOICE=${DB_CHOICE:-1}
|
||||
|
||||
@ -49,10 +48,7 @@ case $DB_CHOICE in
|
||||
DB_COMPOSE_FILE="mongo.compose.yaml"
|
||||
;;
|
||||
2)
|
||||
DB_COMPOSE_FILE="sqlite.compose.yaml"
|
||||
;;
|
||||
3)
|
||||
DB_COMPOSE_FILE="postgres.compose.yaml"
|
||||
DB_COMPOSE_FILE="ferretdb.compose.yaml"
|
||||
;;
|
||||
*)
|
||||
echo "Invalid choice. Defaulting to MongoDB."
|
||||
|
@ -14,22 +14,17 @@ network_check
|
||||
update_os
|
||||
|
||||
msg_info "Installing Dependencies"
|
||||
$STD apt-get install -y make
|
||||
$STD apt-get install -y g++
|
||||
$STD apt-get install -y gcc
|
||||
$STD apt-get install -y ca-certificates
|
||||
$STD apt-get install -y \
|
||||
ca-certificates \
|
||||
build-essential
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
NODE_VERSION="22" NODE_MODULE="yarn@latest" setup_nodejs
|
||||
fetch_and_deploy_gh_release "mafl" "hywax/mafl"
|
||||
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/hywax/mafl/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||
msg_info "Installing Mafl v${RELEASE}"
|
||||
curl -fsSL "https://github.com/hywax/mafl/archive/refs/tags/v${RELEASE}.tar.gz" -o "v${RELEASE}.tar.gz"
|
||||
tar -xzf v${RELEASE}.tar.gz
|
||||
mkdir -p /opt/mafl/data
|
||||
curl -fsSL "https://raw.githubusercontent.com/hywax/mafl/main/.example/config.yml" -o "/opt/mafl/data/config.yml"
|
||||
mv mafl-${RELEASE}/* /opt/mafl
|
||||
rm -rf mafl-${RELEASE}
|
||||
cd /opt/mafl
|
||||
export NUXT_TELEMETRY_DISABLED=true
|
||||
$STD yarn install
|
||||
|
@ -55,7 +55,7 @@ sed -i 's/NODE_ENV=production/NODE_ENV=development/g' /opt/outline/.env
|
||||
sed -i "s/generate_a_new_key/${SECRET_KEY}/g" /opt/outline/.env
|
||||
sed -i "s/user:pass@postgres/${DB_USER}:${DB_PASS}@localhost/g" /opt/outline/.env
|
||||
sed -i 's/redis:6379/localhost:6379/g' /opt/outline/.env
|
||||
sed -i "32s#URL=#URL=http://${LOCAL_IP}#g" /opt/outline/.env
|
||||
sed -i "5s#URL=#URL=http://${LOCAL_IP}#g" /opt/outline/.env
|
||||
sed -i 's/FORCE_HTTPS=true/FORCE_HTTPS=false/g' /opt/outline/.env
|
||||
$STD yarn install --frozen-lockfile
|
||||
export NODE_OPTIONS="--max-old-space-size=3584"
|
||||
|
@ -3,7 +3,7 @@
|
||||
# Copyright (c) 2021-2025 tteck
|
||||
# Author: tteck (tteckster)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://triliumnext.github.io/Docs/
|
||||
# Source: https://github.com/TriliumNext/Trilium
|
||||
|
||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||
color
|
||||
@ -13,14 +13,7 @@ setting_up_container
|
||||
network_check
|
||||
update_os
|
||||
|
||||
msg_info "Setup TriliumNext"
|
||||
cd /opt
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/TriliumNext/trilium/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||
curl -fsSL "https://github.com/TriliumNext/trilium/releases/download/v${RELEASE}/TriliumNextNotes-Server-v${RELEASE}-linux-x64.tar.xz" -o "TriliumNextNotes-Server-v${RELEASE}-linux-x64.tar.xz"
|
||||
tar -xf TriliumNextNotes-Server-v${RELEASE}-linux-x64.tar.xz
|
||||
mv TriliumNextNotes-Server-$RELEASE-linux-x64 /opt/trilium
|
||||
echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
|
||||
msg_ok "Setup TriliumNext"
|
||||
fetch_and_deploy_gh_release "Trilium" "TriliumNext/Trilium" "prebuild" "latest" "/opt/trilium" "TriliumNotes-Server-*linux-x64.tar.xz"
|
||||
|
||||
msg_info "Creating Service"
|
||||
cat <<EOF >/etc/systemd/system/trilium.service
|
||||
@ -46,7 +39,6 @@ motd_ssh
|
||||
customize
|
||||
|
||||
msg_info "Cleaning up"
|
||||
rm -rf /opt/TriliumNextNotes-Server-${RELEASE}-linux-x64.tar.xz
|
||||
$STD apt-get -y autoremove
|
||||
$STD apt-get -y autoclean
|
||||
msg_ok "Cleaned"
|
||||
|
@ -50,9 +50,9 @@ CORE_HOSTNAME=0.0.0.0
|
||||
CORE_PORT=3000
|
||||
CORE_RETURN_HTTPS=false
|
||||
DATASOURCE_TYPE=local
|
||||
DATASOURCE_LOCAL_DIRECTORY=/opt/zipline-upload
|
||||
DATASOURCE_LOCAL_DIRECTORY=/opt/zipline-uploads
|
||||
EOF
|
||||
mkdir -p /opt/zipline-upload
|
||||
mkdir -p /opt/zipline-uploads
|
||||
$STD pnpm install
|
||||
$STD pnpm build
|
||||
echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
|
||||
|
@ -83,11 +83,6 @@ update_os() {
|
||||
msg_info "Updating Container OS"
|
||||
$STD apk -U upgrade
|
||||
msg_ok "Updated Container OS"
|
||||
|
||||
msg_info "Installing core dependencies"
|
||||
$STD apk update
|
||||
$STD apk add newt curl openssh nano mc ncurses gpg
|
||||
msg_ok "Core dependencies installed"
|
||||
}
|
||||
|
||||
# This function modifies the message of the day (motd) and SSH settings
|
||||
|
132
misc/build.func
132
misc/build.func
@ -304,13 +304,12 @@ echo_default() {
|
||||
fi
|
||||
|
||||
# Output the selected values with icons
|
||||
echo -e "${OS}${BOLD}${DGN}Operating System: ${BGN}$var_os${CL}"
|
||||
echo -e "${OSVERSION}${BOLD}${DGN}Version: ${BGN}$var_version${CL}"
|
||||
echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}${CT_ID}${CL}"
|
||||
echo -e "${OS}${BOLD}${DGN}Operating System: ${BGN}$var_os ($var_version)${CL}"
|
||||
echo -e "${CONTAINERTYPE}${BOLD}${DGN}Container Type: ${BGN}$CT_TYPE_DESC${CL}"
|
||||
echo -e "${DISKSIZE}${BOLD}${DGN}Disk Size: ${BGN}${DISK_SIZE} GB${CL}"
|
||||
echo -e "${CPUCORE}${BOLD}${DGN}CPU Cores: ${BGN}${CORE_COUNT}${CL}"
|
||||
echo -e "${RAMSIZE}${BOLD}${DGN}RAM Size: ${BGN}${RAM_SIZE} MiB${CL}"
|
||||
echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}${CT_ID}${CL}"
|
||||
if [ "$VERB" == "yes" ]; then
|
||||
echo -e "${SEARCH}${BOLD}${DGN}Verbose Mode: ${BGN}Enabled${CL}"
|
||||
fi
|
||||
@ -1095,7 +1094,9 @@ build_container() {
|
||||
# This executes create_lxc.sh and creates the container and .conf file
|
||||
bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/create_lxc.sh)" $?
|
||||
|
||||
LXC_CONFIG=/etc/pve/lxc/${CTID}.conf
|
||||
LXC_CONFIG="/etc/pve/lxc/${CTID}.conf"
|
||||
|
||||
# USB passthrough for privileged LXC (CT_TYPE=0)
|
||||
if [ "$CT_TYPE" == "0" ]; then
|
||||
cat <<EOF >>"$LXC_CONFIG"
|
||||
# USB passthrough
|
||||
@ -1111,38 +1112,98 @@ lxc.mount.entry: /dev/ttyACM1 dev/ttyACM1 none bind,optional,create=
|
||||
EOF
|
||||
fi
|
||||
|
||||
if [ "$CT_TYPE" == "0" ]; then
|
||||
if [[ "$APP" == "Channels" || "$APP" == "Emby" || "$APP" == "ErsatzTV" || "$APP" == "Frigate" || "$APP" == "Jellyfin" || "$APP" == "Plex" || "$APP" == "immich" || "$APP" == "Tdarr" || "$APP" == "Open WebUI" || "$APP" == "Unmanic" || "$APP" == "Ollama" || "$APP" == "FileFlows" ]]; then
|
||||
cat <<EOF >>"$LXC_CONFIG"
|
||||
# VAAPI hardware transcoding
|
||||
lxc.cgroup2.devices.allow: c 226:0 rwm
|
||||
lxc.cgroup2.devices.allow: c 226:128 rwm
|
||||
lxc.cgroup2.devices.allow: c 29:0 rwm
|
||||
lxc.mount.entry: /dev/fb0 dev/fb0 none bind,optional,create=file
|
||||
lxc.mount.entry: /dev/dri dev/dri none bind,optional,create=dir
|
||||
lxc.mount.entry: /dev/dri/renderD128 dev/dri/renderD128 none bind,optional,create=file
|
||||
EOF
|
||||
# VAAPI passthrough for privileged containers or known apps
|
||||
VAAPI_APPS=(
|
||||
"immich"
|
||||
"Channels"
|
||||
"Emby"
|
||||
"ErsatzTV"
|
||||
"Frigate"
|
||||
"Jellyfin"
|
||||
"Plex"
|
||||
"Scrypted"
|
||||
"Tdarr"
|
||||
"Unmanic"
|
||||
"Ollama"
|
||||
"FileFlows"
|
||||
"Open WebUI"
|
||||
)
|
||||
|
||||
is_vaapi_app=false
|
||||
for vaapi_app in "${VAAPI_APPS[@]}"; do
|
||||
if [[ "$APP" == "$vaapi_app" ]]; then
|
||||
is_vaapi_app=true
|
||||
break
|
||||
fi
|
||||
else
|
||||
if [[ "$APP" == "Channels" || "$APP" == "Emby" || "$APP" == "ErsatzTV" || "$APP" == "Frigate" || "$APP" == "Jellyfin" || "$APP" == "Plex" || "$APP" == "immich" || "$APP" == "Tdarr" || "$APP" == "Open WebUI" || "$APP" == "Unmanic" || "$APP" == "Ollama" || "$APP" == "FileFlows" ]]; then
|
||||
if [[ -e "/dev/dri/renderD128" ]]; then
|
||||
if [[ -e "/dev/dri/card0" ]]; then
|
||||
cat <<EOF >>"$LXC_CONFIG"
|
||||
# VAAPI hardware transcoding
|
||||
dev0: /dev/dri/card0,gid=44
|
||||
dev1: /dev/dri/renderD128,gid=104
|
||||
EOF
|
||||
else
|
||||
cat <<EOF >>"$LXC_CONFIG"
|
||||
# VAAPI hardware transcoding
|
||||
dev0: /dev/dri/card1,gid=44
|
||||
dev1: /dev/dri/renderD128,gid=104
|
||||
EOF
|
||||
done
|
||||
|
||||
if ([ "$CT_TYPE" == "0" ] || [ "$is_vaapi_app" == "true" ]) &&
|
||||
([[ -e /dev/dri/renderD128 ]] || [[ -e /dev/dri/card0 ]] || [[ -e /dev/fb0 ]]); then
|
||||
|
||||
echo ""
|
||||
msg_custom "⚙️ " "\e[96m" "Configuring VAAPI passthrough for LXC container"
|
||||
|
||||
if [ "$CT_TYPE" != "0" ]; then
|
||||
msg_custom "⚠️ " "\e[33m" "Container is unprivileged – VAAPI passthrough may not work without additional host configuration (e.g., idmap)."
|
||||
fi
|
||||
|
||||
msg_custom "ℹ️ " "\e[96m" "VAAPI enables GPU hardware acceleration (e.g., for video transcoding in Jellyfin or Plex)."
|
||||
|
||||
echo ""
|
||||
read -rp "➤ Automatically mount all available VAAPI devices? [Y/n]: " VAAPI_ALL
|
||||
|
||||
if [[ "$VAAPI_ALL" =~ ^[Yy]$|^$ ]]; then
|
||||
# Mount all devices automatically
|
||||
if [[ -e /dev/dri/renderD128 ]]; then
|
||||
echo "lxc.cgroup2.devices.allow: c 226:128 rwm" >>"$LXC_CONFIG"
|
||||
echo "lxc.mount.entry: /dev/dri/renderD128 dev/dri/renderD128 none bind,optional,create=file" >>"$LXC_CONFIG"
|
||||
fi
|
||||
if [[ -e /dev/dri/card0 ]]; then
|
||||
echo "lxc.cgroup2.devices.allow: c 226:0 rwm" >>"$LXC_CONFIG"
|
||||
|
||||
echo "lxc.mount.entry: /dev/dri/card0 dev/dri/card0 none bind,optional,create=file" >>"$LXC_CONFIG"
|
||||
fi
|
||||
if [[ -e /dev/fb0 ]]; then
|
||||
echo "lxc.cgroup2.devices.allow: c 29:0 rwm" >>"$LXC_CONFIG"
|
||||
echo "lxc.mount.entry: /dev/fb0 dev/fb0 none bind,optional,create=file" >>"$LXC_CONFIG"
|
||||
fi
|
||||
if [[ -d /dev/dri ]]; then
|
||||
echo "lxc.mount.entry: /dev/dri dev/dri none bind,optional,create=dir" >>"$LXC_CONFIG"
|
||||
fi
|
||||
else
|
||||
# Manual selection per device
|
||||
if [[ -e /dev/dri/renderD128 ]]; then
|
||||
read -rp "➤ Mount /dev/dri/renderD128 (GPU rendering)? [y/N]: " MOUNT_D128
|
||||
if [[ "$MOUNT_D128" =~ ^[Yy]$ ]]; then
|
||||
echo "lxc.cgroup2.devices.allow: c 226:128 rwm" >>"$LXC_CONFIG"
|
||||
echo "lxc.mount.entry: /dev/dri/renderD128 dev/dri/renderD128 none bind,optional,create=file" >>"$LXC_CONFIG"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -e /dev/dri/card0 ]]; then
|
||||
read -rp "➤ Mount /dev/dri/card0 (GPU hardware interface)? [y/N]: " MOUNT_CARD0
|
||||
if [[ "$MOUNT_CARD0" =~ ^[Yy]$ ]]; then
|
||||
echo "lxc.cgroup2.devices.allow: c 226:0 rwm" >>"$LXC_CONFIG"
|
||||
echo "lxc.mount.entry: /dev/dri/card0 dev/dri/card0 none bind,optional,create=file" >>"$LXC_CONFIG"
|
||||
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -e /dev/fb0 ]]; then
|
||||
read -rp "➤ Mount /dev/fb0 (Framebuffer, GUI)? [y/N]: " MOUNT_FB0
|
||||
if [[ "$MOUNT_FB0" =~ ^[Yy]$ ]]; then
|
||||
echo "lxc.cgroup2.devices.allow: c 29:0 rwm" >>"$LXC_CONFIG"
|
||||
echo "lxc.mount.entry: /dev/fb0 dev/fb0 none bind,optional,create=file" >>"$LXC_CONFIG"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -d /dev/dri ]]; then
|
||||
echo "lxc.mount.entry: /dev/dri dev/dri none bind,optional,create=dir" >>"$LXC_CONFIG"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# TUN device passthrough
|
||||
if [ "$ENABLE_TUN" == "yes" ]; then
|
||||
cat <<EOF >>"$LXC_CONFIG"
|
||||
lxc.cgroup2.devices.allow: c 10:200 rwm
|
||||
@ -1172,10 +1233,13 @@ EOF'
|
||||
locale-gen >/dev/null && \
|
||||
export LANG=\$locale_line"
|
||||
|
||||
if [[ -z "${tz:-}" ]]; then
|
||||
tz=$(timedatectl show --property=Timezone --value 2>/dev/null || echo "Etc/UTC")
|
||||
fi
|
||||
if pct exec "$CTID" -- test -e "/usr/share/zoneinfo/$tz"; then
|
||||
pct exec "$CTID" -- bash -c "echo $tz >/etc/timezone && ln -sf /usr/share/zoneinfo/$tz /etc/localtime"
|
||||
pct exec "$CTID" -- bash -c "tz='$tz'; echo \"\$tz\" >/etc/timezone && ln -sf \"/usr/share/zoneinfo/\$tz\" /etc/localtime"
|
||||
else
|
||||
msg_info "Skipping timezone setup – zone '$tz' not found in container"
|
||||
msg_warn "Skipping timezone setup – zone '$tz' not found in container"
|
||||
fi
|
||||
|
||||
pct exec "$CTID" -- bash -c "apt-get update >/dev/null && apt-get install -y sudo curl mc gnupg2 >/dev/null"
|
||||
@ -1255,7 +1319,9 @@ api_exit_script() {
|
||||
fi
|
||||
}
|
||||
|
||||
trap 'api_exit_script' EXIT
|
||||
if command -v pveversion >/dev/null 2>&1; then
|
||||
trap 'api_exit_script' EXIT
|
||||
fi
|
||||
trap 'post_update_to_api "failed" "$BASH_COMMAND"' ERR
|
||||
trap 'post_update_to_api "failed" "INTERRUPTED"' SIGINT
|
||||
trap 'post_update_to_api "failed" "TERMINATED"' SIGTERM
|
||||
|
400
misc/core.func
400
misc/core.func
@ -1,30 +1,6 @@
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/LICENSE
|
||||
|
||||
# if ! declare -f wait_for >/dev/null; then
|
||||
# echo "[DEBUG] Undefined function 'wait_for' used from: ${BASH_SOURCE[*]}" >&2
|
||||
# wait_for() {
|
||||
# echo "[DEBUG] Fallback: wait_for called with: $*" >&2
|
||||
# true
|
||||
# }
|
||||
# fi
|
||||
|
||||
trap 'on_error $? $LINENO' ERR
|
||||
trap 'on_exit' EXIT
|
||||
trap 'on_interrupt' INT
|
||||
trap 'on_terminate' TERM
|
||||
|
||||
if ! declare -f wait_for >/dev/null; then
|
||||
wait_for() {
|
||||
true
|
||||
}
|
||||
fi
|
||||
|
||||
declare -A MSG_INFO_SHOWN=()
|
||||
SPINNER_PID=""
|
||||
SPINNER_ACTIVE=0
|
||||
SPINNER_MSG=""
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Loads core utility groups once (colors, formatting, icons, defaults).
|
||||
# ------------------------------------------------------------------------------
|
||||
@ -43,100 +19,51 @@ load_functions() {
|
||||
# add more
|
||||
}
|
||||
|
||||
on_error() {
|
||||
local exit_code="$1"
|
||||
local lineno="$2"
|
||||
# ============================================================================
|
||||
# Error & Signal Handling – robust, universal, subshell-safe
|
||||
# ============================================================================
|
||||
|
||||
stop_spinner
|
||||
|
||||
case "$exit_code" in
|
||||
1) msg_error "Generic error occurred (line $lineno)" ;;
|
||||
2) msg_error "Shell misuse (line $lineno)" ;;
|
||||
126) msg_error "Command cannot execute (line $lineno)" ;;
|
||||
127) msg_error "Command not found (line $lineno)" ;;
|
||||
128) msg_error "Invalid exit argument (line $lineno)" ;;
|
||||
130) msg_error "Script aborted by user (CTRL+C)" ;;
|
||||
143) msg_error "Script terminated by SIGTERM" ;;
|
||||
*) msg_error "Script failed at line $lineno with exit code $exit_code" ;;
|
||||
esac
|
||||
|
||||
exit "$exit_code"
|
||||
}
|
||||
|
||||
on_exit() {
|
||||
cleanup_spinner || true
|
||||
[[ "${VERBOSE:-no}" == "yes" ]] && msg_info "Script exited"
|
||||
}
|
||||
|
||||
on_interrupt() {
|
||||
msg_error "Interrupted by user (CTRL+C)"
|
||||
exit 130
|
||||
}
|
||||
|
||||
on_terminate() {
|
||||
msg_error "Terminated by signal (TERM)"
|
||||
exit 143
|
||||
}
|
||||
|
||||
setup_trap_abort_handling() {
|
||||
trap '__handle_signal_abort SIGINT' SIGINT
|
||||
trap '__handle_signal_abort SIGTERM' SIGTERM
|
||||
trap '__handle_unexpected_error $?' ERR
|
||||
}
|
||||
|
||||
__handle_signal_abort() {
|
||||
local signal="$1"
|
||||
echo
|
||||
[ -n "${SPINNER_PID:-}" ] && kill "$SPINNER_PID" 2>/dev/null && wait "$SPINNER_PID" 2>/dev/null
|
||||
|
||||
case "$signal" in
|
||||
SIGINT)
|
||||
msg_error "Script aborted by user (CTRL+C)"
|
||||
exit 130
|
||||
_tool_error_hint() {
|
||||
local cmd="$1"
|
||||
local code="$2"
|
||||
case "$cmd" in
|
||||
curl)
|
||||
case "$code" in
|
||||
6) echo "Curl: Could not resolve host (DNS problem)" ;;
|
||||
7) echo "Curl: Failed to connect to host (connection refused)" ;;
|
||||
22) echo "Curl: HTTP error (404/403 etc)" ;;
|
||||
28) echo "Curl: Operation timeout" ;;
|
||||
*) echo "Curl: Unknown error ($code)" ;;
|
||||
esac
|
||||
;;
|
||||
SIGTERM)
|
||||
msg_error "Script terminated (SIGTERM)"
|
||||
exit 143
|
||||
wget)
|
||||
echo "Wget failed – URL unreachable or permission denied"
|
||||
;;
|
||||
*)
|
||||
msg_error "Script interrupted (unknown signal: $signal)"
|
||||
exit 1
|
||||
systemctl)
|
||||
echo "Systemd unit failure – check service name and permissions"
|
||||
;;
|
||||
jq)
|
||||
echo "jq parse error – malformed JSON or missing key"
|
||||
;;
|
||||
mariadb | mysql)
|
||||
echo "MySQL/MariaDB command failed – check credentials or DB"
|
||||
;;
|
||||
unzip)
|
||||
echo "unzip failed – corrupt file or missing permission"
|
||||
;;
|
||||
tar)
|
||||
echo "tar failed – invalid format or missing binary"
|
||||
;;
|
||||
node | npm | pnpm | yarn)
|
||||
echo "Node tool failed – check version compatibility or package.json"
|
||||
;;
|
||||
*) echo "" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
__handle_unexpected_error() {
|
||||
local exit_code="$1"
|
||||
echo
|
||||
[ -n "${SPINNER_PID:-}" ] && kill "$SPINNER_PID" 2>/dev/null && wait "$SPINNER_PID" 2>/dev/null
|
||||
|
||||
case "$exit_code" in
|
||||
1)
|
||||
msg_error "Generic error occurred (exit code 1)"
|
||||
;;
|
||||
2)
|
||||
msg_error "Misuse of shell builtins (exit code 2)"
|
||||
;;
|
||||
126)
|
||||
msg_error "Command invoked cannot execute (exit code 126)"
|
||||
;;
|
||||
127)
|
||||
msg_error "Command not found (exit code 127)"
|
||||
;;
|
||||
128)
|
||||
msg_error "Invalid exit argument (exit code 128)"
|
||||
;;
|
||||
130)
|
||||
msg_error "Script aborted by user (CTRL+C)"
|
||||
;;
|
||||
143)
|
||||
msg_error "Script terminated by SIGTERM"
|
||||
;;
|
||||
*)
|
||||
msg_error "Unexpected error occurred (exit code $exit_code)"
|
||||
;;
|
||||
esac
|
||||
exit "$exit_code"
|
||||
catch_errors() {
|
||||
set -Eeuo pipefail
|
||||
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
@ -153,6 +80,13 @@ color() {
|
||||
CL=$(echo "\033[m")
|
||||
}
|
||||
|
||||
# Special for spinner and colorized output via printf
|
||||
color_spinner() {
|
||||
CS_YW=$'\033[33m'
|
||||
CS_YWB=$'\033[93m'
|
||||
CS_CL=$'\033[m'
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Defines formatting helpers like tab, bold, and line reset sequences.
|
||||
# ------------------------------------------------------------------------------
|
||||
@ -196,6 +130,7 @@ icons() {
|
||||
ADVANCED="${TAB}🧩${TAB}${CL}"
|
||||
FUSE="${TAB}🗂️${TAB}${CL}"
|
||||
HOURGLASS="${TAB}⏳${TAB}"
|
||||
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
@ -227,7 +162,7 @@ silent() {
|
||||
# Function to download & save header files
|
||||
get_header() {
|
||||
local app_name=$(echo "${APP,,}" | tr -d ' ')
|
||||
local app_type=${APP_TYPE:-ct} # Default 'ct'
|
||||
local app_type=${APP_TYPE:-ct}
|
||||
local header_url="https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/${app_type}/headers/${app_name}"
|
||||
local local_header_path="/usr/local/community-scripts/headers/${app_type}/${app_name}"
|
||||
|
||||
@ -257,77 +192,39 @@ header_info() {
|
||||
fi
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Performs a curl request with retry logic and inline feedback.
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
run_curl() {
|
||||
if [ "$VERBOSE" = "no" ]; then
|
||||
$STD curl "$@"
|
||||
else
|
||||
curl "$@"
|
||||
ensure_tput() {
|
||||
if ! command -v tput >/dev/null 2>&1; then
|
||||
if grep -qi 'alpine' /etc/os-release; then
|
||||
apk add --no-cache ncurses >/dev/null 2>&1
|
||||
elif command -v apt-get >/dev/null 2>&1; then
|
||||
apt-get update -qq >/dev/null
|
||||
apt-get install -y -qq ncurses-bin >/dev/null 2>&1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
curl_handler() {
|
||||
set +e
|
||||
trap 'set -e' RETURN
|
||||
local args=()
|
||||
local url=""
|
||||
local max_retries=3
|
||||
local delay=2
|
||||
local attempt=1
|
||||
local exit_code
|
||||
local has_output_file=false
|
||||
local result=""
|
||||
is_alpine() {
|
||||
local os_id="${var_os:-${PCT_OSTYPE:-}}"
|
||||
|
||||
# Parse arguments
|
||||
for arg in "$@"; do
|
||||
if [[ "$arg" != -* && -z "$url" ]]; then
|
||||
url="$arg"
|
||||
fi
|
||||
[[ "$arg" == "-o" || "$arg" == --output ]] && has_output_file=true
|
||||
args+=("$arg")
|
||||
done
|
||||
|
||||
if [[ -z "$url" ]]; then
|
||||
msg_error "No valid URL or option entered for curl_handler"
|
||||
return 1
|
||||
if [[ -z "$os_id" && -f /etc/os-release ]]; then
|
||||
os_id="$(
|
||||
. /etc/os-release 2>/dev/null
|
||||
echo "${ID:-}"
|
||||
)"
|
||||
fi
|
||||
|
||||
$STD msg_info "Fetching: $url"
|
||||
[[ "$os_id" == "alpine" ]]
|
||||
}
|
||||
|
||||
while [[ $attempt -le $max_retries ]]; do
|
||||
if $has_output_file; then
|
||||
$STD run_curl "${args[@]}"
|
||||
exit_code=$?
|
||||
else
|
||||
result=$(run_curl "${args[@]}")
|
||||
exit_code=$?
|
||||
fi
|
||||
|
||||
if [[ $exit_code -eq 0 ]]; then
|
||||
$STD msg_ok "Fetched: $url"
|
||||
$has_output_file || printf '%s' "$result"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if ((attempt >= max_retries)); then
|
||||
# Read error log if it exists
|
||||
if [ -s /tmp/curl_error.log ]; then
|
||||
local curl_stderr
|
||||
curl_stderr=$(</tmp/curl_error.log)
|
||||
rm -f /tmp/curl_error.log
|
||||
fi
|
||||
__curl_err_handler "$exit_code" "$url" "${curl_stderr:-}"
|
||||
exit
|
||||
fi
|
||||
|
||||
$STD printf "\r\033[K${INFO}${YW}Retry $attempt/$max_retries in ${delay}s...${CL}" >&2
|
||||
sleep "$delay"
|
||||
((attempt++))
|
||||
done
|
||||
set -e
|
||||
is_verbose_mode() {
|
||||
local verbose="${VERBOSE:-${var_verbose:-no}}"
|
||||
local tty_status
|
||||
if [[ -t 2 ]]; then
|
||||
tty_status="interactive"
|
||||
else
|
||||
tty_status="not-a-tty"
|
||||
fi
|
||||
[[ "$verbose" != "no" || ! -t 2 ]]
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
@ -372,144 +269,93 @@ fatal() {
|
||||
kill -INT $$
|
||||
}
|
||||
|
||||
# Ensure POSIX compatibility across Alpine and Debian/Ubuntu
|
||||
# === Spinner Start ===
|
||||
# Trap cleanup on various signals
|
||||
trap 'cleanup_spinner' EXIT INT TERM HUP
|
||||
|
||||
spinner_frames=('⠋' '⠙' '⠹' '⠸' '⠼' '⠴' '⠦' '⠧' '⠇' '⠏')
|
||||
|
||||
# === Spinner Start ===
|
||||
start_spinner() {
|
||||
local msg="$1"
|
||||
local spin_i=0
|
||||
local interval=0.1
|
||||
|
||||
stop_spinner
|
||||
SPINNER_MSG="$msg"
|
||||
SPINNER_ACTIVE=1
|
||||
|
||||
{
|
||||
while [[ "$SPINNER_ACTIVE" -eq 1 ]]; do
|
||||
if [[ -t 2 ]]; then
|
||||
printf "\r\e[2K%s %b" "${TAB}${spinner_frames[spin_i]}${TAB}" "${YW}${SPINNER_MSG}${CL}" >&2
|
||||
else
|
||||
printf "%s...\n" "$SPINNER_MSG" >&2
|
||||
break
|
||||
fi
|
||||
spin_i=$(((spin_i + 1) % ${#spinner_frames[@]}))
|
||||
sleep "$interval"
|
||||
done
|
||||
} &
|
||||
|
||||
local pid=$!
|
||||
if ps -p "$pid" >/dev/null 2>&1; then
|
||||
SPINNER_PID="$pid"
|
||||
else
|
||||
SPINNER_ACTIVE=0
|
||||
SPINNER_PID=""
|
||||
fi
|
||||
spinner() {
|
||||
local chars=(⠋ ⠙ ⠹ ⠸ ⠼ ⠴ ⠦ ⠧ ⠇ ⠏)
|
||||
local i=0
|
||||
while true; do
|
||||
local index=$((i++ % ${#chars[@]}))
|
||||
printf "\r\033[2K%s %b" "${CS_YWB}${chars[$index]}${CS_CL}" "${CS_YWB}${SPINNER_MSG:-}${CS_CL}"
|
||||
sleep 0.1
|
||||
done
|
||||
}
|
||||
|
||||
clear_line() {
|
||||
tput cr 2>/dev/null || echo -en "\r"
|
||||
tput el 2>/dev/null || echo -en "\033[K"
|
||||
}
|
||||
|
||||
# === Spinner Stop ===
|
||||
stop_spinner() {
|
||||
if [[ "$SPINNER_ACTIVE" -eq 1 && -n "$SPINNER_PID" ]]; then
|
||||
SPINNER_ACTIVE=0
|
||||
local pid="${SPINNER_PID:-}"
|
||||
[[ -z "$pid" && -f /tmp/.spinner.pid ]] && pid=$(</tmp/.spinner.pid)
|
||||
|
||||
if kill -0 "$SPINNER_PID" 2>/dev/null; then
|
||||
kill "$SPINNER_PID" 2>/dev/null || true
|
||||
for _ in $(seq 1 10); do
|
||||
sleep 0.05
|
||||
kill -0 "$SPINNER_PID" 2>/dev/null || break
|
||||
done
|
||||
if [[ -n "$pid" && "$pid" =~ ^[0-9]+$ ]]; then
|
||||
if kill "$pid" 2>/dev/null; then
|
||||
sleep 0.05
|
||||
kill -9 "$pid" 2>/dev/null || true
|
||||
wait "$pid" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
if [[ "$SPINNER_PID" =~ ^[0-9]+$ ]]; then
|
||||
ps -p "$SPINNER_PID" -o pid= >/dev/null 2>&1 && wait "$SPINNER_PID" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
printf "\r\e[2K" >&2
|
||||
SPINNER_PID=""
|
||||
rm -f /tmp/.spinner.pid
|
||||
fi
|
||||
}
|
||||
|
||||
cleanup_spinner() {
|
||||
stop_spinner
|
||||
unset SPINNER_PID SPINNER_MSG
|
||||
stty sane 2>/dev/null || true
|
||||
}
|
||||
|
||||
msg_info() {
|
||||
local msg="$1"
|
||||
[[ -z "$msg" || -n "${MSG_INFO_SHOWN["$msg"]+x}" ]] && return
|
||||
[[ -z "$msg" ]] && return
|
||||
|
||||
if ! declare -p MSG_INFO_SHOWN &>/dev/null || ! declare -A MSG_INFO_SHOWN &>/dev/null; then
|
||||
declare -gA MSG_INFO_SHOWN=()
|
||||
fi
|
||||
[[ -n "${MSG_INFO_SHOWN["$msg"]+x}" ]] && return
|
||||
MSG_INFO_SHOWN["$msg"]=1
|
||||
|
||||
stop_spinner
|
||||
SPINNER_MSG="$msg"
|
||||
|
||||
if [[ "${VERBOSE:-no}" == "no" && -t 2 ]]; then
|
||||
start_spinner "$msg"
|
||||
else
|
||||
if is_verbose_mode || is_alpine; then
|
||||
local HOURGLASS="${TAB}⏳${TAB}"
|
||||
printf "\r\e[2K%s %b" "$HOURGLASS" "${YW}${msg}${CL}" >&2
|
||||
return
|
||||
fi
|
||||
|
||||
color_spinner
|
||||
spinner &
|
||||
SPINNER_PID=$!
|
||||
echo "$SPINNER_PID" >/tmp/.spinner.pid
|
||||
disown "$SPINNER_PID" 2>/dev/null || true
|
||||
}
|
||||
|
||||
msg_ok() {
|
||||
local msg="$1"
|
||||
[[ -z "$msg" ]] && return
|
||||
stop_spinner
|
||||
printf "\r\e[2K%s %b\n" "$CM" "${GN}${msg}${CL}" >&2
|
||||
clear_line
|
||||
printf "%s %b\n" "$CM" "${GN}${msg}${CL}" >&2
|
||||
unset MSG_INFO_SHOWN["$msg"]
|
||||
}
|
||||
|
||||
msg_error() {
|
||||
local msg="$1"
|
||||
[[ -z "$msg" ]] && return
|
||||
stop_spinner
|
||||
printf "\r\e[2K%s %b\n" "$CROSS" "${RD}${msg}${CL}" >&2
|
||||
local msg="$1"
|
||||
echo -e "${BFR:-} ${CROSS:-✖️} ${RD}${msg}${CL}"
|
||||
}
|
||||
|
||||
msg_warn() {
|
||||
local msg="$1"
|
||||
[[ -z "$msg" ]] && return
|
||||
stop_spinner
|
||||
printf "\r\e[2K%s %b\n" "$INFO" "${YWB}${msg}${CL}" >&2
|
||||
unset MSG_INFO_SHOWN["$msg"]
|
||||
local msg="$1"
|
||||
echo -e "${BFR:-} ${INFO:-ℹ️} ${YWB}${msg}${CL}"
|
||||
}
|
||||
|
||||
msg_custom() {
|
||||
local symbol="${1:-"[*]"}"
|
||||
local color="${2:-"\e[36m"}" # Default: Cyan
|
||||
local color="${2:-"\e[36m"}"
|
||||
local msg="${3:-}"
|
||||
|
||||
[[ -z "$msg" ]] && return
|
||||
stop_spinner 2>/dev/null || true
|
||||
printf "\r\e[2K%s %b\n" "$symbol" "${color}${msg}${CL:-\e[0m}" >&2
|
||||
}
|
||||
|
||||
msg_progress() {
|
||||
local current="$1"
|
||||
local total="$2"
|
||||
local label="$3"
|
||||
local width=40
|
||||
local filled percent bar empty
|
||||
local fill_char="#"
|
||||
local empty_char="-"
|
||||
|
||||
if ! [[ "$current" =~ ^[0-9]+$ ]] || ! [[ "$total" =~ ^[0-9]+$ ]] || [[ "$total" -eq 0 ]]; then
|
||||
printf "\r\e[2K%s %b\n" "$CROSS" "${RD}Invalid progress input${CL}" >&2
|
||||
return
|
||||
fi
|
||||
|
||||
percent=$(((current * 100) / total))
|
||||
filled=$(((current * width) / total))
|
||||
empty=$((width - filled))
|
||||
|
||||
bar=$(printf "%${filled}s" | tr ' ' "$fill_char")
|
||||
bar+=$(printf "%${empty}s" | tr ' ' "$empty_char")
|
||||
|
||||
printf "\r\e[2K%s [%s] %3d%% %s" "${TAB}" "$bar" "$percent" "$label" >&2
|
||||
|
||||
if [[ "$current" -eq "$total" ]]; then
|
||||
printf "\n" >&2
|
||||
fi
|
||||
stop_spinner
|
||||
echo -e "${BFR:-} ${symbol} ${color}${msg}${CL:-\e[0m}"
|
||||
printf "\r\033[K\e[?25h\n"
|
||||
}
|
||||
|
||||
run_container_safe() {
|
||||
@ -560,3 +406,5 @@ check_or_create_swap() {
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
trap 'stop_spinner' EXIT INT TERM
|
||||
|
@ -21,36 +21,67 @@ fi
|
||||
# This sets error handling options and defines the error_handler function to handle errors
|
||||
set -Eeuo pipefail
|
||||
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
|
||||
trap on_exit EXIT
|
||||
trap on_interrupt INT
|
||||
trap on_terminate TERM
|
||||
|
||||
function on_exit() {
|
||||
local exit_code="$?"
|
||||
[[ -n "${lockfile:-}" && -e "$lockfile" ]] && rm -f "$lockfile"
|
||||
exit "$exit_code"
|
||||
}
|
||||
|
||||
# This function handles errors
|
||||
function error_handler() {
|
||||
printf "\e[?25h"
|
||||
|
||||
local exit_code="$?"
|
||||
local line_number="$1"
|
||||
local command="$2"
|
||||
local error_message="${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}"
|
||||
echo -e "\n$error_message\n"
|
||||
exit 200
|
||||
printf "\e[?25h"
|
||||
echo -e "\n${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}\n"
|
||||
exit "$exit_code"
|
||||
}
|
||||
|
||||
function on_interrupt() {
|
||||
echo -e "\n${RD}Interrupted by user (SIGINT)${CL}"
|
||||
exit 130
|
||||
}
|
||||
|
||||
function on_terminate() {
|
||||
echo -e "\n${RD}Terminated by signal (SIGTERM)${CL}"
|
||||
exit 143
|
||||
}
|
||||
|
||||
function check_storage_support() {
|
||||
local CONTENT="$1"
|
||||
local -a VALID_STORAGES=()
|
||||
|
||||
while IFS= read -r line; do
|
||||
local STORAGE=$(awk '{print $1}' <<<"$line")
|
||||
[[ "$STORAGE" == "storage" || -z "$STORAGE" ]] && continue
|
||||
VALID_STORAGES+=("$STORAGE")
|
||||
done < <(pvesm status -content "$CONTENT" 2>/dev/null | awk 'NR>1')
|
||||
|
||||
[[ ${#VALID_STORAGES[@]} -gt 0 ]]
|
||||
}
|
||||
|
||||
# This checks for the presence of valid Container Storage and Template Storage locations
|
||||
msg_info "Validating Storage"
|
||||
VALIDCT=$(pvesm status -content rootdir | awk 'NR>1')
|
||||
if [ -z "$VALIDCT" ]; then
|
||||
msg_error "Unable to detect a valid Container Storage location."
|
||||
if ! check_storage_support "rootdir"; then
|
||||
|
||||
msg_error "No valid storage found for 'rootdir' (Container)."
|
||||
exit 1
|
||||
fi
|
||||
VALIDTMP=$(pvesm status -content vztmpl | awk 'NR>1')
|
||||
if [ -z "$VALIDTMP" ]; then
|
||||
msg_error "Unable to detect a valid Template Storage location."
|
||||
if ! check_storage_support "vztmpl"; then
|
||||
|
||||
msg_error "No valid storage found for 'vztmpl' (Template)."
|
||||
exit 1
|
||||
fi
|
||||
msg_ok "Validated Storage (rootdir / vztmpl)."
|
||||
|
||||
# This function is used to select the storage class and determine the corresponding storage content type and label.
|
||||
function select_storage() {
|
||||
local CLASS=$1
|
||||
local CONTENT
|
||||
local CONTENT_LABEL
|
||||
local CLASS=$1 CONTENT CONTENT_LABEL
|
||||
|
||||
case $CLASS in
|
||||
container)
|
||||
CONTENT='rootdir'
|
||||
@ -60,51 +91,72 @@ function select_storage() {
|
||||
CONTENT='vztmpl'
|
||||
CONTENT_LABEL='Container template'
|
||||
;;
|
||||
*) false || {
|
||||
msg_error "Invalid storage class."
|
||||
exit 201
|
||||
} ;;
|
||||
iso)
|
||||
CONTENT='iso'
|
||||
CONTENT_LABEL='ISO image'
|
||||
;;
|
||||
images)
|
||||
CONTENT='images'
|
||||
CONTENT_LABEL='VM Disk image'
|
||||
;;
|
||||
backup)
|
||||
CONTENT='backup'
|
||||
CONTENT_LABEL='Backup'
|
||||
;;
|
||||
snippets)
|
||||
CONTENT='snippets'
|
||||
CONTENT_LABEL='Snippets'
|
||||
;;
|
||||
*)
|
||||
msg_error "Invalid storage class '$CLASS'"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Collect storage options
|
||||
local -a MENU
|
||||
local MSG_MAX_LENGTH=0
|
||||
local -a MENU
|
||||
local -A STORAGE_MAP
|
||||
local COL_WIDTH=0
|
||||
|
||||
while read -r TAG TYPE _ _ _ FREE _; do
|
||||
local TYPE_PADDED
|
||||
local FREE_FMT
|
||||
|
||||
TYPE_PADDED=$(printf "%-10s" "$TYPE")
|
||||
FREE_FMT=$(numfmt --to=iec --from-unit=K --format %.2f <<<"$FREE")B
|
||||
local ITEM="Type: $TYPE_PADDED Free: $FREE_FMT"
|
||||
|
||||
((${#ITEM} + 2 > MSG_MAX_LENGTH)) && MSG_MAX_LENGTH=$((${#ITEM} + 2))
|
||||
|
||||
MENU+=("$TAG" "$ITEM" "OFF")
|
||||
while read -r TAG TYPE _ TOTAL USED FREE _; do
|
||||
[[ -n "$TAG" && -n "$TYPE" ]] || continue
|
||||
local DISPLAY="${TAG} (${TYPE})"
|
||||
local USED_FMT=$(numfmt --to=iec --from-unit=K --format %.1f <<<"$USED")
|
||||
local FREE_FMT=$(numfmt --to=iec --from-unit=K --format %.1f <<<"$FREE")
|
||||
local INFO="Free: ${FREE_FMT}B Used: ${USED_FMT}B"
|
||||
STORAGE_MAP["$DISPLAY"]="$TAG"
|
||||
MENU+=("$DISPLAY" "$INFO" "OFF")
|
||||
((${#DISPLAY} > COL_WIDTH)) && COL_WIDTH=${#DISPLAY}
|
||||
done < <(pvesm status -content "$CONTENT" | awk 'NR>1')
|
||||
|
||||
local OPTION_COUNT=$((${#MENU[@]} / 3))
|
||||
if [ ${#MENU[@]} -eq 0 ]; then
|
||||
msg_error "No storage found for content type '$CONTENT'."
|
||||
return 2
|
||||
fi
|
||||
|
||||
# Auto-select if only one option available
|
||||
if [[ "$OPTION_COUNT" -eq 1 ]]; then
|
||||
echo "${MENU[0]}"
|
||||
if [ $((${#MENU[@]} / 3)) -eq 1 ]; then
|
||||
STORAGE_RESULT="${STORAGE_MAP[${MENU[0]}]}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Display selection menu
|
||||
local STORAGE
|
||||
while [[ -z "${STORAGE:+x}" ]]; do
|
||||
STORAGE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Storage Pools" --radiolist \
|
||||
"Select the storage pool to use for the ${CONTENT_LABEL,,}.\nUse the spacebar to make a selection.\n" \
|
||||
16 $((MSG_MAX_LENGTH + 23)) 6 \
|
||||
"${MENU[@]}" 3>&1 1>&2 2>&3) || {
|
||||
msg_error "Storage selection cancelled."
|
||||
exit 202
|
||||
}
|
||||
done
|
||||
local WIDTH=$((COL_WIDTH + 42))
|
||||
while true; do
|
||||
local DISPLAY_SELECTED=$(whiptail --backtitle "Proxmox VE Helper Scripts" \
|
||||
--title "Storage Pools" \
|
||||
--radiolist "Which storage pool for ${CONTENT_LABEL,,}?\n(Spacebar to select)" \
|
||||
16 "$WIDTH" 6 "${MENU[@]}" 3>&1 1>&2 2>&3)
|
||||
|
||||
echo "$STORAGE"
|
||||
[[ $? -ne 0 ]] && return 3
|
||||
|
||||
if [[ -z "$DISPLAY_SELECTED" || -z "${STORAGE_MAP[$DISPLAY_SELECTED]+_}" ]]; then
|
||||
whiptail --msgbox "No valid storage selected. Please try again." 8 58
|
||||
continue
|
||||
fi
|
||||
|
||||
STORAGE_RESULT="${STORAGE_MAP[$DISPLAY_SELECTED]}"
|
||||
return 0
|
||||
done
|
||||
}
|
||||
|
||||
# Test if required variables are set
|
||||
[[ "${CTID:-}" ]] || {
|
||||
msg_error "You need to set 'CTID' variable."
|
||||
@ -129,13 +181,55 @@ if qm status "$CTID" &>/dev/null || pct status "$CTID" &>/dev/null; then
|
||||
exit 206
|
||||
fi
|
||||
|
||||
# Get template storage
|
||||
TEMPLATE_STORAGE=$(select_storage template)
|
||||
msg_ok "Using ${BL}$TEMPLATE_STORAGE${CL} ${GN}for Template Storage."
|
||||
# DEFAULT_FILE="/usr/local/community-scripts/default_storage"
|
||||
# if [[ -f "$DEFAULT_FILE" ]]; then
|
||||
# source "$DEFAULT_FILE"
|
||||
# if [[ -n "$TEMPLATE_STORAGE" && -n "$CONTAINER_STORAGE" ]]; then
|
||||
# msg_info "Using default storage configuration from: $DEFAULT_FILE"
|
||||
# msg_ok "Template Storage: ${BL}$TEMPLATE_STORAGE${CL} ${GN}|${CL} Container Storage: ${BL}$CONTAINER_STORAGE${CL}"
|
||||
# else
|
||||
# msg_warn "Default storage file exists but is incomplete – falling back to manual selection"
|
||||
# TEMPLATE_STORAGE=$(select_storage template)
|
||||
# msg_ok "Using ${BL}$TEMPLATE_STORAGE${CL} ${GN}for Template Storage."
|
||||
# CONTAINER_STORAGE=$(select_storage container)
|
||||
# msg_ok "Using ${BL}$CONTAINER_STORAGE${CL} ${GN}for Container Storage."
|
||||
# fi
|
||||
# else
|
||||
# # TEMPLATE STORAGE SELECTION
|
||||
# # Template Storage
|
||||
# while true; do
|
||||
# TEMPLATE_STORAGE=$(select_storage template)
|
||||
# if [[ -n "$TEMPLATE_STORAGE" ]]; then
|
||||
# msg_ok "Using ${BL}$TEMPLATE_STORAGE${CL} ${GN}for Template Storage."
|
||||
# break
|
||||
# fi
|
||||
# msg_warn "No valid template storage selected. Please try again."
|
||||
# done
|
||||
|
||||
# Get container storage
|
||||
CONTAINER_STORAGE=$(select_storage container)
|
||||
msg_ok "Using ${BL}$CONTAINER_STORAGE${CL} ${GN}for Container Storage."
|
||||
# while true; do
|
||||
# CONTAINER_STORAGE=$(select_storage container)
|
||||
# if [[ -n "$CONTAINER_STORAGE" ]]; then
|
||||
# msg_ok "Using ${BL}$CONTAINER_STORAGE${CL} ${GN}for Container Storage."
|
||||
# break
|
||||
# fi
|
||||
# msg_warn "No valid container storage selected. Please try again."
|
||||
# done
|
||||
|
||||
# fi
|
||||
|
||||
while true; do
|
||||
if select_storage template; then
|
||||
TEMPLATE_STORAGE="$STORAGE_RESULT"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
while true; do
|
||||
if select_storage container; then
|
||||
CONTAINER_STORAGE="$STORAGE_RESULT"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# Check free space on selected container storage
|
||||
STORAGE_FREE=$(pvesm status | awk -v s="$CONTAINER_STORAGE" '$1 == s { print $6 }')
|
||||
@ -204,7 +298,7 @@ if ! pveam list "$TEMPLATE_STORAGE" | grep -q "$TEMPLATE" || ! zstdcat "$TEMPLAT
|
||||
done
|
||||
fi
|
||||
|
||||
msg_ok "LXC Template '$TEMPLATE' is ready to use."
|
||||
msg_info "Creating LXC Container"
|
||||
# Check and fix subuid/subgid
|
||||
grep -q "root:100000:65536" /etc/subuid || echo "root:100000:65536" >>/etc/subuid
|
||||
grep -q "root:100000:65536" /etc/subgid || echo "root:100000:65536" >>/etc/subgid
|
||||
@ -215,12 +309,15 @@ PCT_OPTIONS=(${PCT_OPTIONS[@]:-${DEFAULT_PCT_OPTIONS[@]}})
|
||||
|
||||
# Secure creation of the LXC container with lock and template check
|
||||
lockfile="/tmp/template.${TEMPLATE}.lock"
|
||||
exec 9>"$lockfile"
|
||||
exec 9>"$lockfile" >/dev/null 2>&1 || {
|
||||
msg_error "Failed to create lock file '$lockfile'."
|
||||
exit 200
|
||||
}
|
||||
flock -w 60 9 || {
|
||||
msg_error "Timeout while waiting for template lock"
|
||||
exit 211
|
||||
}
|
||||
msg_info "Creating LXC Container"
|
||||
|
||||
if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" &>/dev/null; then
|
||||
msg_error "Container creation failed. Checking if template is corrupted or incomplete."
|
||||
|
||||
@ -252,16 +349,23 @@ if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[
|
||||
sleep 1 # I/O-Sync-Delay
|
||||
|
||||
msg_ok "Re-downloaded LXC Template"
|
||||
fi
|
||||
|
||||
if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" &>/dev/null; then
|
||||
msg_error "Container creation failed after re-downloading template."
|
||||
exit 200
|
||||
if ! pct list | awk '{print $1}' | grep -qx "$CTID"; then
|
||||
msg_error "Container ID $CTID not listed in 'pct list' – unexpected failure."
|
||||
exit 215
|
||||
fi
|
||||
|
||||
if ! grep -q '^rootfs:' "/etc/pve/lxc/$CTID.conf"; then
|
||||
msg_error "RootFS entry missing in container config – storage not correctly assigned."
|
||||
exit 216
|
||||
fi
|
||||
|
||||
if grep -q '^hostname:' "/etc/pve/lxc/$CTID.conf"; then
|
||||
CT_HOSTNAME=$(grep '^hostname:' "/etc/pve/lxc/$CTID.conf" | awk '{print $2}')
|
||||
if [[ ! "$CT_HOSTNAME" =~ ^[a-z0-9-]+$ ]]; then
|
||||
msg_warn "Hostname '$CT_HOSTNAME' contains invalid characters – may cause issues with networking or DNS."
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! pct status "$CTID" &>/dev/null; then
|
||||
msg_error "Container not found after pct create – assuming failure."
|
||||
exit 210
|
||||
fi
|
||||
|
||||
msg_ok "LXC Container ${BL}$CTID${CL} ${GN}was successfully created."
|
||||
|
@ -62,9 +62,11 @@ setting_up_container() {
|
||||
rm -rf /usr/lib/python3.*/EXTERNALLY-MANAGED
|
||||
systemctl disable -q --now systemd-networkd-wait-online.service
|
||||
msg_ok "Set up Container OS"
|
||||
msg_custom "${CM}" "${GN}" "Network Connected: ${BL}$(hostname -I)"
|
||||
#msg_custom "${CM}" "${GN}" "Network Connected: ${BL}$(hostname -I)"
|
||||
msg_ok "Network Connected: ${BL}$(hostname -I)"
|
||||
}
|
||||
|
||||
# This function checks the network connection by pinging a known IP address and prompts the user to continue if the internet is not connected
|
||||
# This function checks the network connection by pinging a known IP address and prompts the user to continue if the internet is not connected
|
||||
network_check() {
|
||||
set +e
|
||||
@ -72,6 +74,7 @@ network_check() {
|
||||
ipv4_connected=false
|
||||
ipv6_connected=false
|
||||
sleep 1
|
||||
|
||||
# Check IPv4 connectivity to Google, Cloudflare & Quad9 DNS servers.
|
||||
if ping -c 1 -W 1 1.1.1.1 &>/dev/null || ping -c 1 -W 1 8.8.8.8 &>/dev/null || ping -c 1 -W 1 9.9.9.9 &>/dev/null; then
|
||||
msg_ok "IPv4 Internet Connected"
|
||||
@ -100,25 +103,26 @@ network_check() {
|
||||
fi
|
||||
|
||||
# DNS resolution checks for GitHub-related domains (IPv4 and/or IPv6)
|
||||
GITHUB_HOSTS=("github.com" "raw.githubusercontent.com" "api.github.com")
|
||||
GITHUB_STATUS="GitHub DNS:"
|
||||
GIT_HOSTS=("github.com" "raw.githubusercontent.com" "api.github.com" "git.community-scripts.org")
|
||||
GIT_STATUS="Git DNS:"
|
||||
DNS_FAILED=false
|
||||
|
||||
for HOST in "${GITHUB_HOSTS[@]}"; do
|
||||
for HOST in "${GIT_HOSTS[@]}"; do
|
||||
RESOLVEDIP=$(getent hosts "$HOST" | awk '{ print $1 }' | grep -E '(^([0-9]{1,3}\.){3}[0-9]{1,3}$)|(^[a-fA-F0-9:]+$)' | head -n1)
|
||||
if [[ -z "$RESOLVEDIP" ]]; then
|
||||
GITHUB_STATUS+="$HOST:($DNSFAIL)"
|
||||
GIT_STATUS+="$HOST:($DNSFAIL)"
|
||||
DNS_FAILED=true
|
||||
else
|
||||
GITHUB_STATUS+=" $HOST:($DNSOK)"
|
||||
GIT_STATUS+=" $HOST:($DNSOK)"
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$DNS_FAILED" == true ]]; then
|
||||
fatal "$GITHUB_STATUS"
|
||||
fatal "$GIT_STATUS"
|
||||
else
|
||||
msg_ok "$GITHUB_STATUS"
|
||||
msg_ok "$GIT_STATUS"
|
||||
fi
|
||||
|
||||
set -e
|
||||
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
|
||||
}
|
||||
|
@ -239,10 +239,14 @@ setup_mariadb() {
|
||||
DISTRO_CODENAME="$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)"
|
||||
CURRENT_OS="$(awk -F= '/^ID=/{print $2}' /etc/os-release)"
|
||||
|
||||
if ! curl -fsI http://mirror.mariadb.org/repo/ >/dev/null; then
|
||||
msg_error "MariaDB mirror not reachable"
|
||||
return 1
|
||||
fi
|
||||
|
||||
msg_info "Setting up MariaDB $MARIADB_VERSION"
|
||||
# grab dynamic latest LTS version
|
||||
if [[ "$MARIADB_VERSION" == "latest" ]]; then
|
||||
$STD msg_info "Resolving latest GA MariaDB version"
|
||||
MARIADB_VERSION=$(curl -fsSL http://mirror.mariadb.org/repo/ |
|
||||
grep -Eo '[0-9]+\.[0-9]+\.[0-9]+/' |
|
||||
grep -vE 'rc/|rolling/' |
|
||||
@ -253,7 +257,6 @@ setup_mariadb() {
|
||||
msg_error "Could not determine latest GA MariaDB version"
|
||||
return 1
|
||||
fi
|
||||
$STD msg_ok "Latest GA MariaDB version is $MARIADB_VERSION"
|
||||
fi
|
||||
|
||||
local CURRENT_VERSION=""
|
||||
@ -278,7 +281,6 @@ setup_mariadb() {
|
||||
$STD msg_info "Setup MariaDB $MARIADB_VERSION"
|
||||
fi
|
||||
|
||||
$STD msg_info "Setting up MariaDB Repository"
|
||||
curl -fsSL "https://mariadb.org/mariadb_release_signing_key.asc" |
|
||||
gpg --dearmor -o /etc/apt/trusted.gpg.d/mariadb.gpg
|
||||
|
||||
@ -819,6 +821,7 @@ function fetch_and_deploy_gh_release() {
|
||||
|
||||
msg_info "Fetching GitHub release: $app ($version)"
|
||||
|
||||
### Tarball Mode ###
|
||||
if [[ "$mode" == "tarball" || "$mode" == "source" ]]; then
|
||||
url=$(echo "$json" | jq -r '.tarball_url // empty')
|
||||
[[ -z "$url" ]] && url="https://github.com/$repo/archive/refs/tags/v$version.tar.gz"
|
||||
@ -839,6 +842,7 @@ function fetch_and_deploy_gh_release() {
|
||||
cp -r "$unpack_dir"/* "$target/"
|
||||
shopt -u dotglob nullglob
|
||||
|
||||
### Binary Mode ###
|
||||
elif [[ "$mode" == "binary" ]]; then
|
||||
local arch
|
||||
arch=$(dpkg --print-architecture 2>/dev/null || uname -m)
|
||||
@ -848,12 +852,14 @@ function fetch_and_deploy_gh_release() {
|
||||
local assets url_match=""
|
||||
assets=$(echo "$json" | jq -r '.assets[].browser_download_url')
|
||||
|
||||
# If explicit filename pattern is provided (param $6), match that first
|
||||
if [[ -n "$6" ]]; then
|
||||
for u in $assets; do
|
||||
[[ "$u" =~ $6 || "$u" == *"$6" ]] && url_match="$u" && break
|
||||
done
|
||||
fi
|
||||
|
||||
# If no match via explicit pattern, fall back to architecture heuristic
|
||||
if [[ -z "$url_match" ]]; then
|
||||
for u in $assets; do
|
||||
if [[ "$u" =~ ($arch|amd64|x86_64|aarch64|arm64).*\.deb$ ]]; then
|
||||
@ -863,6 +869,7 @@ function fetch_and_deploy_gh_release() {
|
||||
done
|
||||
fi
|
||||
|
||||
# Fallback: any .deb file
|
||||
if [[ -z "$url_match" ]]; then
|
||||
for u in $assets; do
|
||||
[[ "$u" =~ \.deb$ ]] && url_match="$u" && break
|
||||
@ -891,8 +898,10 @@ function fetch_and_deploy_gh_release() {
|
||||
}
|
||||
}
|
||||
|
||||
### Prebuild Mode ###
|
||||
elif [[ "$mode" == "prebuild" ]]; then
|
||||
local pattern="$6"
|
||||
local pattern="${6%\"}"
|
||||
pattern="${pattern#\"}"
|
||||
[[ -z "$pattern" ]] && {
|
||||
msg_error "Mode 'prebuild' requires 6th parameter (asset filename pattern)"
|
||||
rm -rf "$tmpdir"
|
||||
@ -901,7 +910,14 @@ function fetch_and_deploy_gh_release() {
|
||||
|
||||
local asset_url=""
|
||||
for u in $(echo "$json" | jq -r '.assets[].browser_download_url'); do
|
||||
[[ "$u" =~ $pattern || "$u" == *"$pattern" ]] && asset_url="$u" && break
|
||||
filename_candidate="${u##*/}"
|
||||
case "$filename_candidate" in
|
||||
$pattern)
|
||||
asset_url="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
|
||||
done
|
||||
|
||||
[[ -z "$asset_url" ]] && {
|
||||
@ -923,16 +939,18 @@ function fetch_and_deploy_gh_release() {
|
||||
$STD apt-get install -y unzip
|
||||
fi
|
||||
$STD unzip "$tmpdir/$filename" -d "$target"
|
||||
elif [[ "$filename" == *.tar.gz ]]; then
|
||||
tar -xzf "$tmpdir/$filename" -C "$target"
|
||||
elif [[ "$filename" == *.tar.* ]]; then
|
||||
tar --strip-components=1 -xf "$tmpdir/$filename" -C "$target"
|
||||
else
|
||||
msg_error "Unsupported archive format: $filename"
|
||||
rm -rf "$tmpdir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
### Singlefile Mode ###
|
||||
elif [[ "$mode" == "singlefile" ]]; then
|
||||
local pattern="$6"
|
||||
local pattern="${6%\"}"
|
||||
pattern="${pattern#\"}"
|
||||
[[ -z "$pattern" ]] && {
|
||||
msg_error "Mode 'singlefile' requires 6th parameter (asset filename pattern)"
|
||||
rm -rf "$tmpdir"
|
||||
@ -941,7 +959,13 @@ function fetch_and_deploy_gh_release() {
|
||||
|
||||
local asset_url=""
|
||||
for u in $(echo "$json" | jq -r '.assets[].browser_download_url'); do
|
||||
[[ "$u" =~ $pattern || "$u" == *"$pattern" ]] && asset_url="$u" && break
|
||||
filename_candidate="${u##*/}"
|
||||
case "$filename_candidate" in
|
||||
$pattern)
|
||||
asset_url="$u"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
[[ -z "$asset_url" ]] && {
|
||||
|
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user